From 3cf79d6e583fc8c10549af0237d4d77e3eaa404d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Ad=C3=A1mek?= Date: Thu, 3 Nov 2022 12:09:13 +0100 Subject: [PATCH] feat(core): support composite unique keys in `em.upsert()` Closes #3656 --- packages/core/src/EntityManager.ts | 20 +- packages/core/src/unit-of-work/UnitOfWork.ts | 2 +- .../upsert/__snapshots__/upsert.test.ts.snap | 315 ++++++++++++++++++ tests/features/upsert/upsert.test.ts | 118 ++++++- 4 files changed, 447 insertions(+), 8 deletions(-) diff --git a/packages/core/src/EntityManager.ts b/packages/core/src/EntityManager.ts index 4bbaaed4c7ba..30254aefe822 100644 --- a/packages/core/src/EntityManager.ts +++ b/packages/core/src/EntityManager.ts @@ -530,12 +530,26 @@ export class EntityManager { const unique = meta.props.filter(p => p.unique).map(p => p.name); const propIndex = unique.findIndex(p => data![p] != null); - if (where == null && propIndex >= 0) { - where = { [unique[propIndex]]: data[unique[propIndex]] } as FilterQuery; + if (where == null) { + if (propIndex >= 0) { + where = { [unique[propIndex]]: data[unique[propIndex]] } as FilterQuery; + } else if (meta.uniques.length > 0) { + for (const u of meta.uniques) { + if (Utils.asArray(u.properties).every(p => data![p])) { + where = Utils.asArray(u.properties).reduce((o, key) => { + o[key] = data![key]; + return o; + }, {} as FilterQuery); + break; + } + } + } } if (where == null) { - throw new Error(`Unique property value required for upsert, provide one of: ${meta.primaryKeys.concat(...unique)}`); + const compositeUniqueProps = meta.uniques.map(u => Utils.asArray(u.properties).join(' + ')); + const uniqueProps = meta.primaryKeys.concat(...unique).concat(compositeUniqueProps); + throw new Error(`Unique property value required for upsert, provide one of: ${uniqueProps.join(', ')}`); } data = QueryHelper.processObjectParams(data) as EntityData; diff --git a/packages/core/src/unit-of-work/UnitOfWork.ts b/packages/core/src/unit-of-work/UnitOfWork.ts index 7012cf68cd0c..dd6971c26fc8 100644 --- a/packages/core/src/unit-of-work/UnitOfWork.ts +++ b/packages/core/src/unit-of-work/UnitOfWork.ts @@ -779,7 +779,7 @@ export class UnitOfWork { if (!Utils.isEntity(reference)) { entity[prop.name] = this.em.getReference(prop.type, reference as Primary, { wrapped: !!prop.wrappedReference }) as T[string & keyof T]; } else if (!helper(reference).__initialized && !helper(reference).__em) { - const pk = helper(reference).getSerializedPrimaryKey(); + const pk = helper(reference).getPrimaryKey(); entity[prop.name] = this.em.getReference(prop.type, pk as Primary, { wrapped: !!prop.wrappedReference }) as T[string & keyof T]; } } diff --git a/tests/features/upsert/__snapshots__/upsert.test.ts.snap b/tests/features/upsert/__snapshots__/upsert.test.ts.snap index 3aeed89f3697..32366b0e14c5 100644 --- a/tests/features/upsert/__snapshots__/upsert.test.ts.snap +++ b/tests/features/upsert/__snapshots__/upsert.test.ts.snap @@ -14,6 +14,37 @@ exports[`em.upsert [better-sqlite] em.upsert(Type, data) with PK 1`] = ` ] `; +exports[`em.upsert [better-sqlite] em.upsert(Type, data) with unique composite property (no additional props) 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`) values (1, 'fb1') on conflict (\`author__id\`, \`name\`) do nothing returning \`_id\`", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 1 and \`f0\`.\`name\` = 'fb1' limit 1", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`) values (2, 'fb2') on conflict (\`author__id\`, \`name\`) do nothing returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`) values (3, 'fb3') on conflict (\`author__id\`, \`name\`) do nothing returning \`_id\`", + ], +] +`; + +exports[`em.upsert [better-sqlite] em.upsert(Type, data) with unique composite property (update additional prop) 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (1, 'fb1', 'val 1') on conflict (\`author__id\`, \`name\`) do update set \`prop\` = excluded.\`prop\` returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (2, 'fb2', 'val 2') on conflict (\`author__id\`, \`name\`) do update set \`prop\` = excluded.\`prop\` returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (3, 'fb3', 'val 3') on conflict (\`author__id\`, \`name\`) do update set \`prop\` = excluded.\`prop\` returning \`_id\`", + ], +] +`; + exports[`em.upsert [better-sqlite] em.upsert(Type, data) with unique property 1`] = ` [ [ @@ -42,6 +73,20 @@ exports[`em.upsert [better-sqlite] em.upsert(entity) 1`] = ` ] `; +exports[`em.upsert [better-sqlite] em.upsert(entity) with unique composite property 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (1, 1, 'fb1', 'val 1') on conflict (\`_id\`) do update set \`author__id\` = excluded.\`author__id\`, \`name\` = excluded.\`name\`, \`prop\` = excluded.\`prop\` returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (2, 2, 'fb2', 'val 2') on conflict (\`_id\`) do update set \`author__id\` = excluded.\`author__id\`, \`name\` = excluded.\`name\`, \`prop\` = excluded.\`prop\` returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (3, 3, 'fb3', 'val 3') on conflict (\`_id\`) do update set \`author__id\` = excluded.\`author__id\`, \`name\` = excluded.\`name\`, \`prop\` = excluded.\`prop\` returning \`_id\`", + ], +] +`; + exports[`em.upsert [mariadb] em.upsert(Type, data) with PK 1`] = ` [ [ @@ -56,6 +101,52 @@ exports[`em.upsert [mariadb] em.upsert(Type, data) with PK 1`] = ` ] `; +exports[`em.upsert [mariadb] em.upsert(Type, data) with unique composite property (no additional props) 1`] = ` +[ + [ + "[query] insert ignore into \`foo_bar\` (\`author__id\`, \`name\`) values (1, 'fb1')", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 1 and \`f0\`.\`name\` = 'fb1' limit 1", + ], + [ + "[query] insert ignore into \`foo_bar\` (\`author__id\`, \`name\`) values (2, 'fb2')", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 2 and \`f0\`.\`name\` = 'fb2' limit 1", + ], + [ + "[query] insert ignore into \`foo_bar\` (\`author__id\`, \`name\`) values (3, 'fb3')", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 3 and \`f0\`.\`name\` = 'fb3' limit 1", + ], +] +`; + +exports[`em.upsert [mariadb] em.upsert(Type, data) with unique composite property (update additional prop) 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (1, 'fb1', 'val 1') on duplicate key update \`prop\` = values(\`prop\`)", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 1 and \`f0\`.\`name\` = 'fb1' limit 1", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (2, 'fb2', 'val 2') on duplicate key update \`prop\` = values(\`prop\`)", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 2 and \`f0\`.\`name\` = 'fb2' limit 1", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (3, 'fb3', 'val 3') on duplicate key update \`prop\` = values(\`prop\`)", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 3 and \`f0\`.\`name\` = 'fb3' limit 1", + ], +] +`; + exports[`em.upsert [mariadb] em.upsert(Type, data) with unique property 1`] = ` [ [ @@ -93,6 +184,20 @@ exports[`em.upsert [mariadb] em.upsert(entity) 1`] = ` ] `; +exports[`em.upsert [mariadb] em.upsert(entity) with unique composite property 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (1, 1, 'fb1', 'val 1') on duplicate key update \`author__id\` = values(\`author__id\`), \`name\` = values(\`name\`), \`prop\` = values(\`prop\`)", + ], + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (2, 2, 'fb2', 'val 2') on duplicate key update \`author__id\` = values(\`author__id\`), \`name\` = values(\`name\`), \`prop\` = values(\`prop\`)", + ], + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (3, 3, 'fb3', 'val 3') on duplicate key update \`author__id\` = values(\`author__id\`), \`name\` = values(\`name\`), \`prop\` = values(\`prop\`)", + ], +] +`; + exports[`em.upsert [mongo] em.upsert(Type, data) with PK 1`] = ` [ [ @@ -107,6 +212,52 @@ exports[`em.upsert [mongo] em.upsert(Type, data) with PK 1`] = ` ] `; +exports[`em.upsert [mongo] em.upsert(Type, data) with unique composite property (no additional props) 1`] = ` +[ + [ + "[query] db.getCollection('foo-bar').updateMany({ author: 1, name: 'fb1' }, { '$set': { name: 'fb1', author: 1 } }, { upsert: true });", + ], + [ + "[query] db.getCollection('foo-bar').find({ author: 1, name: 'fb1' }, { projection: { _id: 1 } }).limit(1).toArray();", + ], + [ + "[query] db.getCollection('foo-bar').updateMany({ author: 2, name: 'fb2' }, { '$set': { name: 'fb2', author: 2 } }, { upsert: true });", + ], + [ + "[query] db.getCollection('foo-bar').find({ author: 2, name: 'fb2' }, { projection: { _id: 1 } }).limit(1).toArray();", + ], + [ + "[query] db.getCollection('foo-bar').updateMany({ author: 3, name: 'fb3' }, { '$set': { name: 'fb3', author: 3 } }, { upsert: true });", + ], + [ + "[query] db.getCollection('foo-bar').find({ author: 3, name: 'fb3' }, { projection: { _id: 1 } }).limit(1).toArray();", + ], +] +`; + +exports[`em.upsert [mongo] em.upsert(Type, data) with unique composite property (update additional prop) 1`] = ` +[ + [ + "[query] db.getCollection('foo-bar').updateMany({ author: 1, name: 'fb1' }, { '$set': { name: 'fb1', author: 1, prop: 'val 1' } }, { upsert: true });", + ], + [ + "[query] db.getCollection('foo-bar').find({ author: 1, name: 'fb1' }, { projection: { _id: 1 } }).limit(1).toArray();", + ], + [ + "[query] db.getCollection('foo-bar').updateMany({ author: 2, name: 'fb2' }, { '$set': { name: 'fb2', author: 2, prop: 'val 2' } }, { upsert: true });", + ], + [ + "[query] db.getCollection('foo-bar').find({ author: 2, name: 'fb2' }, { projection: { _id: 1 } }).limit(1).toArray();", + ], + [ + "[query] db.getCollection('foo-bar').updateMany({ author: 3, name: 'fb3' }, { '$set': { name: 'fb3', author: 3, prop: 'val 3' } }, { upsert: true });", + ], + [ + "[query] db.getCollection('foo-bar').find({ author: 3, name: 'fb3' }, { projection: { _id: 1 } }).limit(1).toArray();", + ], +] +`; + exports[`em.upsert [mongo] em.upsert(Type, data) with unique property 1`] = ` [ [ @@ -144,6 +295,20 @@ exports[`em.upsert [mongo] em.upsert(entity) 1`] = ` ] `; +exports[`em.upsert [mongo] em.upsert(entity) with unique composite property 1`] = ` +[ + [ + "[query] db.getCollection('foo-bar').updateMany({ _id: 1 }, { '$set': { _id: 1, author: 1, name: 'fb1', prop: 'val 1' } }, { upsert: true });", + ], + [ + "[query] db.getCollection('foo-bar').updateMany({ _id: 2 }, { '$set': { _id: 2, author: 2, name: 'fb2', prop: 'val 2' } }, { upsert: true });", + ], + [ + "[query] db.getCollection('foo-bar').updateMany({ _id: 3 }, { '$set': { _id: 3, author: 3, name: 'fb3', prop: 'val 3' } }, { upsert: true });", + ], +] +`; + exports[`em.upsert [mysql] em.upsert(Type, data) with PK 1`] = ` [ [ @@ -158,6 +323,52 @@ exports[`em.upsert [mysql] em.upsert(Type, data) with PK 1`] = ` ] `; +exports[`em.upsert [mysql] em.upsert(Type, data) with unique composite property (no additional props) 1`] = ` +[ + [ + "[query] insert ignore into \`foo_bar\` (\`author__id\`, \`name\`) values (1, 'fb1')", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 1 and \`f0\`.\`name\` = 'fb1' limit 1", + ], + [ + "[query] insert ignore into \`foo_bar\` (\`author__id\`, \`name\`) values (2, 'fb2')", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 2 and \`f0\`.\`name\` = 'fb2' limit 1", + ], + [ + "[query] insert ignore into \`foo_bar\` (\`author__id\`, \`name\`) values (3, 'fb3')", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 3 and \`f0\`.\`name\` = 'fb3' limit 1", + ], +] +`; + +exports[`em.upsert [mysql] em.upsert(Type, data) with unique composite property (update additional prop) 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (1, 'fb1', 'val 1') on duplicate key update \`prop\` = values(\`prop\`)", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 1 and \`f0\`.\`name\` = 'fb1' limit 1", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (2, 'fb2', 'val 2') on duplicate key update \`prop\` = values(\`prop\`)", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 2 and \`f0\`.\`name\` = 'fb2' limit 1", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (3, 'fb3', 'val 3') on duplicate key update \`prop\` = values(\`prop\`)", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 3 and \`f0\`.\`name\` = 'fb3' limit 1", + ], +] +`; + exports[`em.upsert [mysql] em.upsert(Type, data) with unique property 1`] = ` [ [ @@ -195,6 +406,20 @@ exports[`em.upsert [mysql] em.upsert(entity) 1`] = ` ] `; +exports[`em.upsert [mysql] em.upsert(entity) with unique composite property 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (1, 1, 'fb1', 'val 1') on duplicate key update \`author__id\` = values(\`author__id\`), \`name\` = values(\`name\`), \`prop\` = values(\`prop\`)", + ], + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (2, 2, 'fb2', 'val 2') on duplicate key update \`author__id\` = values(\`author__id\`), \`name\` = values(\`name\`), \`prop\` = values(\`prop\`)", + ], + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (3, 3, 'fb3', 'val 3') on duplicate key update \`author__id\` = values(\`author__id\`), \`name\` = values(\`name\`), \`prop\` = values(\`prop\`)", + ], +] +`; + exports[`em.upsert [postgresql] em.upsert(Type, data) with PK 1`] = ` [ [ @@ -209,6 +434,37 @@ exports[`em.upsert [postgresql] em.upsert(Type, data) with PK 1`] = ` ] `; +exports[`em.upsert [postgresql] em.upsert(Type, data) with unique composite property (no additional props) 1`] = ` +[ + [ + "[query] insert into "foo_bar" ("author__id", "name") values (1, 'fb1') on conflict ("author__id", "name") do nothing returning "_id"", + ], + [ + "[query] select "f0"."_id" from "foo_bar" as "f0" where "f0"."author__id" = 1 and "f0"."name" = 'fb1' limit 1", + ], + [ + "[query] insert into "foo_bar" ("author__id", "name") values (2, 'fb2') on conflict ("author__id", "name") do nothing returning "_id"", + ], + [ + "[query] insert into "foo_bar" ("author__id", "name") values (3, 'fb3') on conflict ("author__id", "name") do nothing returning "_id"", + ], +] +`; + +exports[`em.upsert [postgresql] em.upsert(Type, data) with unique composite property (update additional prop) 1`] = ` +[ + [ + "[query] insert into "foo_bar" ("author__id", "name", "prop") values (1, 'fb1', 'val 1') on conflict ("author__id", "name") do update set "prop" = excluded."prop" returning "_id"", + ], + [ + "[query] insert into "foo_bar" ("author__id", "name", "prop") values (2, 'fb2', 'val 2') on conflict ("author__id", "name") do update set "prop" = excluded."prop" returning "_id"", + ], + [ + "[query] insert into "foo_bar" ("author__id", "name", "prop") values (3, 'fb3', 'val 3') on conflict ("author__id", "name") do update set "prop" = excluded."prop" returning "_id"", + ], +] +`; + exports[`em.upsert [postgresql] em.upsert(Type, data) with unique property 1`] = ` [ [ @@ -237,6 +493,20 @@ exports[`em.upsert [postgresql] em.upsert(entity) 1`] = ` ] `; +exports[`em.upsert [postgresql] em.upsert(entity) with unique composite property 1`] = ` +[ + [ + "[query] insert into "foo_bar" ("_id", "author__id", "name", "prop") values (1, 1, 'fb1', 'val 1') on conflict ("_id") do update set "author__id" = excluded."author__id", "name" = excluded."name", "prop" = excluded."prop" returning "_id"", + ], + [ + "[query] insert into "foo_bar" ("_id", "author__id", "name", "prop") values (2, 2, 'fb2', 'val 2') on conflict ("_id") do update set "author__id" = excluded."author__id", "name" = excluded."name", "prop" = excluded."prop" returning "_id"", + ], + [ + "[query] insert into "foo_bar" ("_id", "author__id", "name", "prop") values (3, 3, 'fb3', 'val 3') on conflict ("_id") do update set "author__id" = excluded."author__id", "name" = excluded."name", "prop" = excluded."prop" returning "_id"", + ], +] +`; + exports[`em.upsert [sqlite] em.upsert(Type, data) with PK 1`] = ` [ [ @@ -251,6 +521,37 @@ exports[`em.upsert [sqlite] em.upsert(Type, data) with PK 1`] = ` ] `; +exports[`em.upsert [sqlite] em.upsert(Type, data) with unique composite property (no additional props) 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`) values (1, 'fb1') on conflict (\`author__id\`, \`name\`) do nothing returning \`_id\`", + ], + [ + "[query] select \`f0\`.\`_id\` from \`foo_bar\` as \`f0\` where \`f0\`.\`author__id\` = 1 and \`f0\`.\`name\` = 'fb1' limit 1", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`) values (2, 'fb2') on conflict (\`author__id\`, \`name\`) do nothing returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`) values (3, 'fb3') on conflict (\`author__id\`, \`name\`) do nothing returning \`_id\`", + ], +] +`; + +exports[`em.upsert [sqlite] em.upsert(Type, data) with unique composite property (update additional prop) 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (1, 'fb1', 'val 1') on conflict (\`author__id\`, \`name\`) do update set \`prop\` = excluded.\`prop\` returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (2, 'fb2', 'val 2') on conflict (\`author__id\`, \`name\`) do update set \`prop\` = excluded.\`prop\` returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`author__id\`, \`name\`, \`prop\`) values (3, 'fb3', 'val 3') on conflict (\`author__id\`, \`name\`) do update set \`prop\` = excluded.\`prop\` returning \`_id\`", + ], +] +`; + exports[`em.upsert [sqlite] em.upsert(Type, data) with unique property 1`] = ` [ [ @@ -278,3 +579,17 @@ exports[`em.upsert [sqlite] em.upsert(entity) 1`] = ` ], ] `; + +exports[`em.upsert [sqlite] em.upsert(entity) with unique composite property 1`] = ` +[ + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (1, 1, 'fb1', 'val 1') on conflict (\`_id\`) do update set \`author__id\` = excluded.\`author__id\`, \`name\` = excluded.\`name\`, \`prop\` = excluded.\`prop\` returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (2, 2, 'fb2', 'val 2') on conflict (\`_id\`) do update set \`author__id\` = excluded.\`author__id\`, \`name\` = excluded.\`name\`, \`prop\` = excluded.\`prop\` returning \`_id\`", + ], + [ + "[query] insert into \`foo_bar\` (\`_id\`, \`author__id\`, \`name\`, \`prop\`) values (3, 3, 'fb3', 'val 3') on conflict (\`_id\`) do update set \`author__id\` = excluded.\`author__id\`, \`name\` = excluded.\`name\`, \`prop\` = excluded.\`prop\` returning \`_id\`", + ], +] +`; diff --git a/tests/features/upsert/upsert.test.ts b/tests/features/upsert/upsert.test.ts index cc30fc4f2d27..edd5c2f5629a 100644 --- a/tests/features/upsert/upsert.test.ts +++ b/tests/features/upsert/upsert.test.ts @@ -1,4 +1,4 @@ -import { MikroORM, Entity, PrimaryKey, ManyToOne, Property, SimpleLogger } from '@mikro-orm/core'; +import { MikroORM, Entity, PrimaryKey, ManyToOne, Property, SimpleLogger, Unique } from '@mikro-orm/core'; import { mockLogger } from '../../helpers'; @Entity() @@ -43,6 +43,31 @@ export class Book { } +@Entity() +@Unique({ properties: ['author', 'name'] }) +export class FooBar { + + static id = 1; + + @PrimaryKey({ name: '_id' }) + id: number = FooBar.id++; + + @ManyToOne(() => Author) + author: Author; + + @Property() + name: string; + + @Property({ nullable: true }) + prop?: string; + + constructor(name: string, author: Author) { + this.name = name; + this.author = author; + } + +} + const options = { 'sqlite': { dbName: ':memory:' }, 'better-sqlite': { dbName: ':memory:' }, @@ -57,7 +82,7 @@ describe.each(Object.keys(options))('em.upsert [%s]', type => { beforeAll(async () => { orm = await MikroORM.init({ - entities: [Author, Book], + entities: [Author, Book, FooBar], type, loggerFactory: options => new SimpleLogger(options), ...options[type], @@ -67,7 +92,7 @@ describe.each(Object.keys(options))('em.upsert [%s]', type => { beforeEach(async () => { await orm.schema.clearDatabase(); - Author.id = Book.id = 1; + Author.id = Book.id = FooBar.id = 1; }); afterAll(() => orm.close()); @@ -78,9 +103,16 @@ describe.each(Object.keys(options))('em.upsert [%s]', type => { new Book('b2', new Author('a2', 32)), new Book('b3', new Author('a3', 33)), ]; - await orm.em.persist(books).flush(); + const fooBars = [ + new FooBar('fb1', books[0].author), + new FooBar('fb2', books[1].author), + new FooBar('fb3', books[2].author), + ]; + await orm.em.persist(books).persist(fooBars).flush(); expect(books.map(b => b.id)).toEqual([1, 2, 3]); expect(books.map(b => b.author.id)).toEqual([1, 2, 3]); + expect(fooBars.map(fb => fb.id)).toEqual([1, 2, 3]); + expect(fooBars.map(fb => fb.author.id)).toEqual([1, 2, 3]); return books; } @@ -114,6 +146,35 @@ describe.each(Object.keys(options))('em.upsert [%s]', type => { expect(author22.age).toBe(321); } + async function assertFooBars(fooBars: FooBar[], mock: jest.Mock) { + expect(mock.mock.calls).toMatchSnapshot(); + mock.mockReset(); + await orm.em.flush(); + expect(mock).not.toBeCalled(); + + fooBars[0].prop = '12345'; + await orm.em.flush(); + expect(mock).toBeCalled(); + + orm.em.clear(); + const fooBarsReloaded = await orm.em.find(FooBar, {}, { orderBy: { name: 'asc' } }); + expect(fooBarsReloaded).toHaveLength(3); + + mock.mockReset(); + fooBarsReloaded[1].prop = '12345'; + const fooBar12 = await orm.em.upsert(fooBarsReloaded[0]); // exists + const fooBar22 = await orm.em.upsert(fooBarsReloaded[1]); // exists + const fooBar32 = await orm.em.upsert(fooBarsReloaded[2]); // exists + expect(fooBar12).toBe(fooBarsReloaded[0]); + expect(fooBar22).toBe(fooBarsReloaded[1]); + expect(fooBar32).toBe(fooBarsReloaded[2]); + expect(fooBar22.prop).toBe('12345'); + expect(mock).not.toBeCalled(); + await orm.em.flush(); + await orm.em.refresh(fooBar22); + expect(fooBar22.prop).toBe('12345'); + } + test('em.upsert(Type, data) with PK', async () => { await createEntities(); @@ -161,4 +222,53 @@ describe.each(Object.keys(options))('em.upsert [%s]', type => { await assert(author2, mock); }); + + test('em.upsert(Type, data) with unique composite property (no additional props)', async () => { + await createEntities(); + + await orm.em.nativeDelete(FooBar, [2, 3]); + orm.em.clear(); + + const mock = mockLogger(orm); + const fooBar1 = await orm.em.upsert(FooBar, { name: 'fb1', author: 1 }); // exists + const fooBar2 = await orm.em.upsert(FooBar, { name: 'fb2', author: 2 }); // inserts + const fooBar3 = await orm.em.upsert(FooBar, { name: 'fb3', author: 3 }); // inserts + + await assertFooBars([fooBar1, fooBar2, fooBar3], mock); + }); + + test('em.upsert(Type, data) with unique composite property (update additional prop)', async () => { + await createEntities(); + + await orm.em.nativeDelete(FooBar, [2, 3]); + orm.em.clear(); + + const mock = mockLogger(orm); + const fooBar1 = await orm.em.upsert(FooBar, { name: 'fb1', author: 1, prop: 'val 1' }); // exists + const fooBar2 = await orm.em.upsert(FooBar, { name: 'fb2', author: 2, prop: 'val 2' }); // inserts + const fooBar3 = await orm.em.upsert(FooBar, { name: 'fb3', author: 3, prop: 'val 3' }); // inserts + + await assertFooBars([fooBar1, fooBar2, fooBar3], mock); + }); + + test('em.upsert(entity) with unique composite property', async () => { + await createEntities(); + + await orm.em.nativeDelete(FooBar, [2, 3]); + orm.em.clear(); + + const mock = mockLogger(orm); + const fb1 = orm.em.create(FooBar, { id: 1, name: 'fb1', author: 1, prop: 'val 1' }); + const fb2 = orm.em.create(FooBar, { id: 2, name: 'fb2', author: 2, prop: 'val 2' }); + const fb3 = orm.em.create(FooBar, { id: 3, name: 'fb3', author: 3, prop: 'val 3' }); + const fooBar1 = await orm.em.upsert(FooBar, fb1); // exists + const fooBar2 = await orm.em.upsert(FooBar, fb2); // inserts + const fooBar3 = await orm.em.upsert(FooBar, fb3); // inserts + expect(fb1).toBe(fooBar1); + expect(fb2).toBe(fooBar2); + expect(fb3).toBe(fooBar3); + + await assertFooBars([fooBar1, fooBar2, fooBar3], mock); + }); + });