diff --git a/db-service/lib/deep-queries.js b/db-service/lib/deep-queries.js index 7d993fa95..2fdbf1041 100644 --- a/db-service/lib/deep-queries.js +++ b/db-service/lib/deep-queries.js @@ -1,6 +1,7 @@ const cds = require('@sap/cds') const { _target_name4 } = require('./SQLService') -const InsertResult = require('../lib/InsertResults') + +const ROOT = Symbol('root') // REVISIT: remove old path with cds^8 let _compareJson @@ -45,20 +46,22 @@ async function onDeep(req, next) { if (query.UPDATE && !beforeData.length) return 0 const queries = getDeepQueries(query, beforeData, target) - const res = await Promise.all(queries.map(query => { - if (query.INSERT) return this.onINSERT({ query }) - if (query.UPDATE) return this.onUPDATE({ query }) - if (query.DELETE) return this.onSIMPLE({ query }) - })) - return ( - beforeData.length || - new InsertResult(query, [ - { - changes: Array.isArray(req.data) ? req.data.length : 1, - ...(res[0]?.results[0]?.lastInsertRowid ? { lastInsertRowid: res[0].results[0].lastInsertRowid } : {}), - }, - ]) - ) + + // first delete, then update, then insert because of potential unique constraints: + // - deletes never trigger unique constraints, but can prevent them -> execute first + // - updates can trigger and prevent unique constraints -> execute second + // - inserts can only trigger unique constraints -> execute last + await Promise.all(Array.from(queries.deletes.values()).map(query => this.onSIMPLE({ query }))) + await Promise.all(queries.updates.map(query => this.onUPDATE({ query }))) + + const rootQuery = queries.inserts.get(ROOT) + queries.inserts.delete(ROOT) + const [rootResult] = await Promise.all([ + rootQuery && this.onINSERT({ query: rootQuery }), + ...Array.from(queries.inserts.values()).map(query => this.onINSERT({ query })), + ]) + + return beforeData.length ?? rootResult } const hasDeep = (q, target) => { @@ -195,7 +198,7 @@ const getDeepQueries = (query, dbData, target) => { diff = [diff] } - return _getDeepQueries(diff, target, true) + return _getDeepQueries(diff, target) } const _hasManagedElements = target => { @@ -205,16 +208,19 @@ const _hasManagedElements = target => { /** * @param {unknown[]} diff * @param {import('@sap/cds/apis/csn').Definition} target - * @param {boolean} [root=false] - * @returns {import('@sap/cds/apis/cqn').Query[]} + * @param {Map} deletes + * @param {Map} inserts + * @param {Object[]} updates + * @param {boolean} [root=true] + * @returns {Object|Boolean} */ -const _getDeepQueries = (diff, target, root = false) => { - const queries = [] - +const _getDeepQueries = (diff, target, deletes = new Map(), inserts = new Map(), updates = [], root = true) => { + // flag to determine if queries were created + let dirty = false for (const diffEntry of diff) { if (diffEntry === undefined) continue - const subQueries = [] + let childrenDirty = false for (const prop in diffEntry) { // handle deep operations @@ -224,9 +230,12 @@ const _getDeepQueries = (diff, target, root = false) => { delete diffEntry[prop] } else if (target.compositions?.[prop]) { const arrayed = Array.isArray(propData) ? propData : [propData] - arrayed.forEach(subEntry => { - subQueries.push(..._getDeepQueries([subEntry], target.elements[prop]._target)) - }) + childrenDirty = + arrayed + .map(subEntry => + _getDeepQueries([subEntry], target.elements[prop]._target, deletes, inserts, updates, false), + ) + .some(a => a) || childrenDirty delete diffEntry[prop] } else if (diffEntry[prop] === undefined) { // restore current behavior, if property is undefined, not part of payload @@ -242,12 +251,32 @@ const _getDeepQueries = (diff, target, root = false) => { delete diffEntry._old } - // first calculate subqueries and rm their properties, then build root query if (op === 'create') { - queries.push(INSERT.into(target).entries(diffEntry)) + dirty = true + const id = root ? ROOT : target.name + const insert = inserts.get(id) + if (insert) { + insert.INSERT.entries.push(diffEntry) + } else { + const q = INSERT.into(target).entries(diffEntry) + inserts.set(id, q) + } } else if (op === 'delete') { - queries.push(DELETE.from(target).where(diffEntry)) - } else if (op === 'update' || (op === undefined && (root || subQueries.length) && _hasManagedElements(target))) { + dirty = true + const keys = cds.utils + .Object_keys(target.keys) + .filter(key => !target.keys[key].virtual && !target.keys[key].isAssociation) + + const keyVals = keys.map(k => ({ val: diffEntry[k] })) + const currDelete = deletes.get(target.name) + if (currDelete) currDelete.DELETE.where[2].list.push({ list: keyVals }) + else { + const left = { list: keys.map(k => ({ ref: [k] })) } + const right = { list: [{ list: keyVals }] } + deletes.set(target.name, DELETE.from(target).where([left, 'in', right])) + } + } else if (op === 'update' || (op === undefined && (root || childrenDirty) && _hasManagedElements(target))) { + dirty = true // TODO do we need the where here? const keys = target.keys const cqn = UPDATE(target).with(diffEntry) @@ -259,34 +288,16 @@ const _getDeepQueries = (diff, target, root = false) => { delete diffEntry[key] } cqn.with(diffEntry) - queries.push(cqn) + updates.push(cqn) } - - for (const q of subQueries) queries.push(q) } - const insertQueries = new Map() - - return queries.map(q => { - // Merge all INSERT statements for each target - if (q.INSERT) { - const target = q.target - if (insertQueries.has(target)) { - insertQueries.get(target).INSERT.entries.push(...q.INSERT.entries) - return - } else { - insertQueries.set(target, q) - } - } - Object.defineProperty(q, handledDeep, { value: true }) - return q - }) - .filter(a => a) + return root ? { updates, inserts, deletes } : dirty } module.exports = { onDeep, - getDeepQueries, - getExpandForDeep, hasDeep, + getDeepQueries, // only for testing + getExpandForDeep, // only for testing } diff --git a/db-service/test/deep/deep.test.js b/db-service/test/deep/deep.test.js index 6947d6dfc..114a6bec9 100644 --- a/db-service/test/deep/deep.test.js +++ b/db-service/test/deep/deep.test.js @@ -784,20 +784,28 @@ describe('test deep query generation', () => { ], }, ]) - const deepQueries = getDeepQueries(query, [], model.definitions.Root) + const { inserts, updates, deletes } = getDeepQueries(query, [], model.definitions.Root) const expectedInserts = [ INSERT.into(model.definitions.Root) .entries([{ ID: 1 }, { ID: 2 }, { ID: 3 }]), INSERT.into(model.definitions.Child) - .entries([{ ID: 1 }, { ID: 2 }, { ID: 3 }, { ID: 4 }, { ID: 5 }, { ID: 6 }, { ID: 7 }, { ID: 8 }, { ID: 9 }]), + .entries([{ ID: 1 }, { ID: 2 }, { ID: 3 }, { ID: 4 }, { ID: 6 }, { ID: 7 }, { ID: 5 }, { ID: 9 }, { ID: 8 }]), INSERT.into(model.definitions.SubChild) .entries([{ ID: 10 }, { ID: 11 }, { ID: 12 }, { ID: 13 }]), ] + const insertsArray = Array.from(inserts.values()) + const updatesArray = Array.from(updates) + const deletesArray = Array.from(deletes.values()) + expectedInserts.forEach(insert => { - expect(deepQueries).toContainEqual(insert) + expect(insertsArray).toContainEqual(insert) }) + + expect(updatesArray.length).toBe(0) + expect(deletesArray.length).toBe(0) + }) test('backlink keys are properly propagated', async () => { diff --git a/test/compliance/UPDATE.test.js b/test/compliance/UPDATE.test.js index 3dc82deac..f49c34999 100644 --- a/test/compliance/UPDATE.test.js +++ b/test/compliance/UPDATE.test.js @@ -1,5 +1,6 @@ const cds = require('../cds.js') const Books = 'complex.associations.Books' +const BooksUnique = 'complex.uniques.Books' describe('UPDATE', () => { describe('entity', () => { @@ -14,39 +15,104 @@ describe('UPDATE', () => { }) }) - describe('where', () => { + describe('with database', () => { cds.test(__dirname + '/resources') - test('flat with or on key', async () => { - const entires = [ - { - ID: 5, - title: 'foo', - }, - { - ID: 6, - title: 'bar', - }, - ] - - const insert = await cds.run(INSERT.into(Books).entries(entires)) - expect(insert.affectedRows).toEqual(2) - - const update = await cds.run( - UPDATE.entity(Books) - .set({ - title: 'foo', - }) - .where({ - ID: 5, - or: { + describe('where', () => { + test('flat with or on key', async () => { + const insert = await cds.run( + INSERT.into(Books).entries([ + { + ID: 5, + title: 'foo', + }, + { ID: 6, + title: 'bar', }, - }), - ) - expect(update).toEqual(2) + ]), + ) + expect(insert.affectedRows).toEqual(2) + + const update = await cds.run( + UPDATE.entity(Books) + .set({ + title: 'foo', + }) + .where({ + ID: 5, + or: { + ID: 6, + }, + }), + ) + expect(update).toEqual(2) + }) + test.skip('missing', () => { + throw new Error('not supported') + }) }) - test.skip('missing', () => { - throw new Error('not supported') + + describe('uniques in deep updates', () => { + test('2nd level unique constraints ', async () => { + // number must be unique for each book + + await cds.tx(async tx => { + await tx.run(DELETE.from(BooksUnique).where({ ID: 1 })) + await expect( + tx.run( + INSERT.into(BooksUnique).entries([ + { + ID: 1, + title: 'foo', + pages: [ + { + ID: 1, + number: 1, + }, + { + ID: 2, + number: 1, // unique constraint violation + }, + ], + }, + { + ID: 2, + title: 'bar', + }, + ]), + ), + ).rejects.toBeTruthy() + }) + + await cds.tx(async tx => { + await tx.run(DELETE.from(BooksUnique).where({ ID: 1 })) + const data = { + ID: 1, + title: 'foo', + pages: [ + { + ID: 1, + number: 1, + }, + { + ID: 2, + number: 2, + }, + ], + } + await tx.run(INSERT.into(BooksUnique).entries([data])) + + // Create new entries with conflicting numbers + data.pages[0].ID = 3 + data.pages[1].ID = 4 + await tx.run(UPDATE(BooksUnique).data(data)) // first, old entries are deleted, so no violation + + data.pages[0].ID = 5 + data.pages[0].number = 1 // would fail without the update below first + data.pages[1].number = 999 + await tx.run(UPDATE(BooksUnique).data(data)) + }) + }) }) }) }) diff --git a/test/compliance/resources/db/complex/index.cds b/test/compliance/resources/db/complex/index.cds index e925dbc33..ce7b0e6f3 100644 --- a/test/compliance/resources/db/complex/index.cds +++ b/test/compliance/resources/db/complex/index.cds @@ -3,3 +3,4 @@ namespace complex; using from './computed'; using from './associations'; using from './associationsUnmanaged'; +using from './uniques'; diff --git a/test/compliance/resources/db/complex/uniques.cds b/test/compliance/resources/db/complex/uniques.cds new file mode 100644 index 000000000..3a18477be --- /dev/null +++ b/test/compliance/resources/db/complex/uniques.cds @@ -0,0 +1,14 @@ +namespace complex.uniques; + +entity Books { + key ID : Integer; + title : String(111); + pages : Composition of many Pages on pages.book = $self; +} + +@assert.unique: { number: [number, book] } +entity Pages { + key ID : Integer; + book : Association to Books; + number : Integer; +}