diff --git a/.envrc b/.envrc index fe5abd452215..74a3a65ea45f 100644 --- a/.envrc +++ b/.envrc @@ -1,11 +1,11 @@ export SERVER_ROOT=$(pwd) export RUST_LOG_FORMAT=devel export RUST_BACKTRACE=1 -export RUST_LOG=query_engine=debug,query_core=trace,query_connector=debug,sql_query_connector=debug,prisma_models=debug,engineer=info,sql_introspection_connector=debug +export RUST_LOG=query_engine_tests=trace,query_engine=debug,query_core=trace,query_connector=debug,sql_query_connector=debug,prisma_models=debug,engineer=info,sql_introspection_connector=debug export SKIP_CONNECTORS="vitess_5_7,vitess_8_0" # Controls Scala test kit verbosity. Levels are trace, debug, info, error, warning -export LOG_LEVEL=debug +export LOG_LEVEL=trace # Controls Scala test kit test complexity ("simple", "complex") export TEST_MODE=simple diff --git a/Cargo.lock b/Cargo.lock index af5cc72b1c0b..b30a6fd9d732 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -435,7 +435,7 @@ dependencies = [ [[package]] name = "barrel" version = "0.6.6-alpha.0" -source = "git+https://github.com/prisma/barrel.git?branch=mssql-support#5d6a731665acdab0d459be23843c2c54112ff099" +source = "git+https://github.com/prisma/barrel.git?branch=mssql-support#4e84cf3d5013b4c92eb81d7ba90cd1c1c01c6805" [[package]] name = "base-x" @@ -963,6 +963,7 @@ dependencies = [ "colored", "datamodel-connector", "dml", + "enumflags2", "expect-test", "indoc", "itertools 0.10.0", @@ -985,6 +986,7 @@ name = "datamodel-connector" version = "0.1.0" dependencies = [ "dml", + "enumflags2", "itertools 0.10.0", "serde_json", "thiserror", @@ -1056,6 +1058,7 @@ version = "0.1.0" dependencies = [ "chrono", "cuid", + "enumflags2", "native-types", "prisma-value", "serde", @@ -2428,6 +2431,7 @@ version = "0.1.0" dependencies = [ "datamodel-connector", "dml", + "enumflags2", "lazy_static", "native-types", "once_cell", @@ -4050,7 +4054,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0bccbcf40c8938196944a3da0e133e031a33f4d6b72db3bda3cc556e361905d" dependencies = [ "lazy_static", - "parking_lot 0.10.2", + "parking_lot 0.11.1", "serial_test_derive", ] @@ -4212,6 +4216,7 @@ version = "0.1.0" dependencies = [ "datamodel-connector", "dml", + "enumflags2", "native-types", "once_cell", "regex", diff --git a/datamodel_v2.prisma b/datamodel_v2.prisma index 12ae3fbbcbcc..c956161a9d16 100644 --- a/datamodel_v2.prisma +++ b/datamodel_v2.prisma @@ -1,115 +1,40 @@ -datasource chinook { - provider = "sqlite" - url = "file:./db/Chinook.db?connection_limit=1&socket_timeout=20" +datasource db { + provider = "postgres" + url = "postgresql://postgres:prisma@localhost:5432" } generator js { provider = "prisma-client-js" - previewFeatures = ["microsoftSqlServer", "mongodb", "orderByRelation", "napi", "selectRelationCount", "orderByAggregateGroup"] + previewFeatures = ["microsoftSqlServer", "mongodb", "orderByRelation", "napi", "selectRelationCount", "orderByAggregateGroup", "referentialActions"] } -model Album { - id Int @id @map("AlbumId") - Title String @default("TestDefaultTitle") - ArtistId Int - Tracks Track[] - Artist Artist @relation(fields: [ArtistId], references: [id]) -} - -model Track { - id Int @id @map("TrackId") - Name String - Composer String? - Milliseconds Int - UnitPrice Float - AlbumId Int? - GenreId Int? - MediaTypeId Int - MediaType MediaType @relation(fields: [MediaTypeId], references: [id]) - Genre Genre? @relation(fields: [GenreId], references: [id]) - Album Album? @relation(fields: [AlbumId], references: [id]) - InvoiceLines InvoiceLine[] -} - -model MediaType { - id Int @id @map("MediaTypeId") - Name String? - Track Track[] -} +model A { + id String @id + gql String? -model Genre { - id Int @id @map("GenreId") - Name String? - Tracks Track[] -} + bs B[] -model Artist { - id Int @id @map("ArtistId") - Name String? - Albums Album[] + c_id String + c C @relation(fields: [c_id], onDelete: Cascade, references: [id]) } -model Customer { - id Int @id @map("CustomerId") - FirstName String - LastName String - Company String? - Address String? - City String? - State String? - Country String? - PostalCode String? - Phone String? - Fax String? - Email String - SupportRepId Int? - SupportRep Employee? @relation(fields: [SupportRepId], references: [id]) - Invoices Invoice[] -} +model B { + id String @id + gql String? -model Employee { - id Int @id @map("EmployeeId") - FirstName String - LastName String - Title String? - BirthDate DateTime? - HireDate DateTime? - Address String? - City String? - State String? - Country String? - PostalCode String? - Phone String? - Fax String? - Email String? - Customers Customer[] + a_id String + a A @relation(fields: [a_id], references: [id], onDelete: Cascade) } -model Invoice { - id Int @id @map("InvoiceId") - InvoiceDate DateTime - BillingAddress String? - BillingCity String? - BillingState String? - BillingCountry String? - BillingPostalCode String? - Total Float - CustomerId Int - Customer Customer @relation(fields: [CustomerId], references: [id]) - Lines InvoiceLine[] -} +model C { + id String @id + gql String? + ds D[] @relation(onDelete: Cascade) -model InvoiceLine { - id Int @id @map("InvoiceLineId") - UnitPrice Float - Quantity Int - InvoiceId Int - TrackId Int - Invoice Invoice @relation(fields: [InvoiceId], references: [id]) - Track Track @relation(fields: [TrackId], references: [id]) + a A? } -model Playlist { - id Int @id @map("PlaylistId") - Name String? +model D { + id String @id + cs C[] } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/calculate_datamodel.rs b/introspection-engine/connectors/sql-introspection-connector/src/calculate_datamodel.rs index 978a34f687b0..42d55d6a492f 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/calculate_datamodel.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/calculate_datamodel.rs @@ -1,11 +1,10 @@ -use crate::commenting_out_guardrails::commenting_out_guardrails; -use crate::introspection::introspect; use crate::introspection_helpers::*; use crate::prisma_1_defaults::*; use crate::re_introspection::enrich; use crate::sanitize_datamodel_names::{sanitization_leads_to_duplicate_names, sanitize_datamodel_names}; use crate::version_checker::VersionChecker; use crate::SqlIntrospectionResult; +use crate::{commenting_out_guardrails::commenting_out_guardrails, introspection::introspect}; use datamodel::Datamodel; use introspection_connector::IntrospectionResult; use quaint::connector::SqlFamily; @@ -63,7 +62,7 @@ mod tests { use super::*; use datamodel::{ dml, Datamodel, DefaultValue as DMLDefault, Field, FieldArity, FieldType, Model, NativeTypeInstance, - OnDeleteStrategy, RelationField, RelationInfo, ScalarField, ScalarType, ValueGenerator, + ReferentialAction, RelationField, RelationInfo, ScalarField, ScalarType, ValueGenerator, }; use native_types::{NativeType, PostgresType}; use pretty_assertions::assert_eq; @@ -470,12 +469,15 @@ mod tests { Field::RelationField(RelationField::new( "User", FieldArity::List, + FieldArity::List, RelationInfo { to: "User".to_string(), fields: vec![], references: vec![], name: "CityToUser".to_string(), - on_delete: OnDeleteStrategy::None, + on_delete: None, + on_update: None, + legacy_referential_actions: false, }, )), ], @@ -557,17 +559,26 @@ mod tests { is_commented_out: false, is_ignored: false, }), - Field::RelationField(RelationField::new( - "City", - FieldArity::Required, - RelationInfo { + Field::RelationField(RelationField { + name: "City".into(), + arity: FieldArity::Required, + referential_arity: FieldArity::Required, + documentation: None, + is_generated: false, + is_commented_out: false, + is_ignored: false, + supports_restrict_action: Some(true), + emulates_referential_actions: None, + relation_info: RelationInfo { name: "CityToUser".to_string(), to: "City".to_string(), fields: vec!["city_id".to_string(), "city_name".to_string()], references: vec!["id".to_string(), "name".to_string()], - on_delete: OnDeleteStrategy::None, + on_delete: Some(ReferentialAction::NoAction), + on_update: Some(ReferentialAction::NoAction), + legacy_referential_actions: false, }, - )), + }), ], is_generated: false, indices: vec![], @@ -862,12 +873,15 @@ mod tests { Field::RelationField(RelationField::new( "User", FieldArity::List, + FieldArity::List, RelationInfo { to: "User".to_string(), fields: vec![], references: vec![], name: "CityToUser".to_string(), - on_delete: OnDeleteStrategy::None, + on_delete: None, + on_update: None, + legacy_referential_actions: false, }, )), ], @@ -918,17 +932,26 @@ mod tests { }), ), )), - Field::RelationField(RelationField::new( - "City", - FieldArity::Required, - RelationInfo { + Field::RelationField(RelationField { + name: "City".into(), + arity: FieldArity::Required, + referential_arity: FieldArity::Required, + documentation: None, + is_generated: false, + is_commented_out: false, + is_ignored: false, + supports_restrict_action: Some(true), + emulates_referential_actions: None, + relation_info: RelationInfo { name: "CityToUser".to_string(), to: "City".to_string(), fields: vec!["city_id".to_string()], references: vec!["id".to_string()], - on_delete: OnDeleteStrategy::None, + on_delete: Some(ReferentialAction::NoAction), + on_update: Some(ReferentialAction::NoAction), + legacy_referential_actions: false, }, - )), + }), ], is_generated: false, indices: vec![], diff --git a/introspection-engine/connectors/sql-introspection-connector/src/introspection.rs b/introspection-engine/connectors/sql-introspection-connector/src/introspection.rs index b30452a910b6..56afbf634c80 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/introspection.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/introspection.rs @@ -41,7 +41,11 @@ pub fn introspect( for foreign_key in &foreign_keys_copy { version_check.has_inline_relations(table); version_check.uses_on_delete(foreign_key, table); - let relation_field = calculate_relation_field(schema, table, foreign_key)?; + + let mut relation_field = calculate_relation_field(schema, table, foreign_key)?; + + relation_field.supports_restrict_action(!sql_family.is_mssql()); + model.add_field(Field::RelationField(relation_field)); } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/introspection_helpers.rs b/introspection-engine/connectors/sql-introspection-connector/src/introspection_helpers.rs index 2e75d94b7ddb..364e9fdbad52 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/introspection_helpers.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/introspection_helpers.rs @@ -2,13 +2,13 @@ use crate::Dedup; use crate::SqlError; use datamodel::{ common::RelationNames, Datamodel, DefaultValue as DMLDef, FieldArity, FieldType, IndexDefinition, Model, - OnDeleteStrategy, RelationField, RelationInfo, ScalarField, ScalarType, ValueGenerator as VG, + ReferentialAction, RelationField, RelationInfo, ScalarField, ScalarType, ValueGenerator as VG, }; use datamodel_connector::Connector; use quaint::connector::SqlFamily; use sql_datamodel_connector::SqlDatamodelConnectors; -use sql_schema_describer::DefaultKind; use sql_schema_describer::{Column, ColumnArity, ColumnTypeFamily, ForeignKey, Index, IndexType, SqlSchema, Table}; +use sql_schema_describer::{DefaultKind, ForeignKeyAction}; use tracing::debug; //checks @@ -107,7 +107,9 @@ pub fn calculate_many_to_many_field( fields: vec![], to: opposite_foreign_key.referenced_table.clone(), references: opposite_foreign_key.referenced_columns.clone(), - on_delete: OnDeleteStrategy::None, + on_delete: None, + on_update: None, + legacy_referential_actions: false, }; let basename = opposite_foreign_key.referenced_table.clone(); @@ -117,7 +119,7 @@ pub fn calculate_many_to_many_field( false => basename, }; - RelationField::new(&name, FieldArity::List, relation_info) + RelationField::new(&name, FieldArity::List, FieldArity::List, relation_info) } pub(crate) fn calculate_index(index: &Index) -> IndexDefinition { @@ -174,12 +176,22 @@ pub(crate) fn calculate_relation_field( ) -> Result { debug!("Handling foreign key {:?}", foreign_key); + let map_action = |action: ForeignKeyAction| match action { + ForeignKeyAction::NoAction => ReferentialAction::NoAction, + ForeignKeyAction::Restrict => ReferentialAction::Restrict, + ForeignKeyAction::Cascade => ReferentialAction::Cascade, + ForeignKeyAction::SetNull => ReferentialAction::SetNull, + ForeignKeyAction::SetDefault => ReferentialAction::SetDefault, + }; + let relation_info = RelationInfo { name: calculate_relation_name(schema, foreign_key, table)?, fields: foreign_key.columns.clone(), to: foreign_key.referenced_table.clone(), references: foreign_key.referenced_columns.clone(), - on_delete: OnDeleteStrategy::None, + on_delete: Some(map_action(foreign_key.on_delete_action)), + on_update: Some(map_action(foreign_key.on_update_action)), + legacy_referential_actions: false, }; let columns: Vec<&Column> = foreign_key @@ -193,7 +205,17 @@ pub(crate) fn calculate_relation_field( false => FieldArity::Required, }; - Ok(RelationField::new(&foreign_key.referenced_table, arity, relation_info)) + let calculated_arity = match columns.iter().any(|c| c.is_required()) { + true => FieldArity::Required, + false => arity, + }; + + Ok(RelationField::new( + &foreign_key.referenced_table, + arity, + calculated_arity, + relation_info, + )) } pub(crate) fn calculate_backrelation_field( @@ -213,7 +235,9 @@ pub(crate) fn calculate_backrelation_field( to: model.name.clone(), fields: vec![], references: vec![], - on_delete: OnDeleteStrategy::None, + on_delete: None, + on_update: None, + legacy_referential_actions: false, }; // unique or id @@ -236,7 +260,7 @@ pub(crate) fn calculate_backrelation_field( model.name.clone() }; - Ok(RelationField::new(&name, arity, new_relation_info)) + Ok(RelationField::new(&name, arity, arity, new_relation_info)) } } } @@ -339,7 +363,7 @@ pub(crate) fn calculate_scalar_field_type_with_native_types(column: &Column, fam //fixme move this out of function let connector: Box = match family { - SqlFamily::Mysql => Box::new(SqlDatamodelConnectors::mysql()), + SqlFamily::Mysql => Box::new(SqlDatamodelConnectors::mysql(false)), SqlFamily::Postgres => Box::new(SqlDatamodelConnectors::postgres()), SqlFamily::Sqlite => Box::new(SqlDatamodelConnectors::sqlite()), SqlFamily::Mssql => Box::new(SqlDatamodelConnectors::mssql()), diff --git a/introspection-engine/introspection-engine-tests/src/test_api.rs b/introspection-engine/introspection-engine-tests/src/test_api.rs index 13c7e782b7a9..8805a28bd3fa 100644 --- a/introspection-engine/introspection-engine-tests/src/test_api.rs +++ b/introspection-engine/introspection-engine-tests/src/test_api.rs @@ -27,7 +27,9 @@ impl TestApi { let connection_string = args.database_url(); let (database, connection_string): (Quaint, String) = if tags.intersects(Tags::Vitess) { - let me = SqlMigrationConnector::new(&connection_string, None).await.unwrap(); + let me = SqlMigrationConnector::new(&connection_string, BitFlags::all(), None) + .await + .unwrap(); me.reset().await.unwrap(); ( @@ -85,6 +87,10 @@ impl TestApi { self.tags().contains(Tags::Cockroach) } + pub fn is_mysql8(&self) -> bool { + self.tags().contains(Tags::Mysql8) + } + #[tracing::instrument(skip(self, data_model_string))] #[track_caller] pub async fn re_introspect(&self, data_model_string: &str) -> Result { @@ -184,6 +190,7 @@ impl TestApi { let parsed_expected = datamodel::parse_datamodel(&self.dm_with_sources(expected_without_header)) .unwrap() .subject; + let parsed_result = datamodel::parse_datamodel(result_with_header).unwrap().subject; let reformatted_expected = diff --git a/introspection-engine/introspection-engine-tests/tests/re_introspection/mod.rs b/introspection-engine/introspection-engine-tests/tests/re_introspection/mod.rs index f8cc93a97b78..e4e1e79f0b7b 100644 --- a/introspection-engine/introspection-engine-tests/tests/re_introspection/mod.rs +++ b/introspection-engine/introspection-engine-tests/tests/re_introspection/mod.rs @@ -2,7 +2,7 @@ use barrel::types; use indoc::formatdoc; use indoc::indoc; use introspection_engine_tests::{assert_eq_json, test_api::*}; -use quaint::prelude::Queryable; +use quaint::prelude::{Queryable, SqlFamily}; use serde_json::json; use test_macros::test_connector; @@ -136,7 +136,7 @@ async fn mapped_model_and_field_name(api: &TestApi) -> TestResult { id Int @id @default(autoincrement()) c_user_id Int @map("user_id") Custom_User Custom_User @relation(fields: [c_user_id], references: [c_id]) - {} + {extra_index} }} model Custom_User {{ @@ -146,7 +146,7 @@ async fn mapped_model_and_field_name(api: &TestApi) -> TestResult { @@map(name: "User") }} "#, - extra_index + extra_index = extra_index ); let final_dm = format!( @@ -155,7 +155,7 @@ async fn mapped_model_and_field_name(api: &TestApi) -> TestResult { id Int @id @default(autoincrement()) c_user_id Int @map("user_id") Custom_User Custom_User @relation(fields: [c_user_id], references: [c_id]) - {} + {extra_index} }} model Custom_User {{ @@ -169,7 +169,7 @@ async fn mapped_model_and_field_name(api: &TestApi) -> TestResult { id Int @id @default(autoincrement()) }} "#, - extra_index + extra_index = extra_index ); api.assert_eq_datamodels(&final_dm, &api.re_introspect(&input_dm).await?); @@ -820,11 +820,12 @@ async fn multiple_changed_relation_names(api: &TestApi) -> TestResult { eveningEmployeeId Int Employee_EmployeeToSchedule_eveningEmployeeId Employee @relation("EmployeeToSchedule_eveningEmployeeId", fields: [eveningEmployeeId], references: [id]) Employee_EmployeeToSchedule_morningEmployeeId Employee @relation("EmployeeToSchedule_morningEmployeeId", fields: [morningEmployeeId], references: [id]) - {} - {} + {idx1} + {idx2} }} "#, - idx1, idx2 + idx1 = idx1, + idx2 = idx2, ); let final_dm = format!( @@ -841,15 +842,16 @@ async fn multiple_changed_relation_names(api: &TestApi) -> TestResult { eveningEmployeeId Int Employee_EmployeeToSchedule_eveningEmployeeId Employee @relation("EmployeeToSchedule_eveningEmployeeId", fields: [eveningEmployeeId], references: [id]) Employee_EmployeeToSchedule_morningEmployeeId Employee @relation("EmployeeToSchedule_morningEmployeeId", fields: [morningEmployeeId], references: [id]) - {} - {} + {idx1} + {idx2} }} model Unrelated {{ id Int @id @default(autoincrement()) }} "#, - idx1, idx2 + idx1 = idx1, + idx2 = idx2, ); api.assert_eq_datamodels(&final_dm, &api.re_introspect(&input_dm).await?); @@ -877,38 +879,37 @@ async fn custom_virtual_relation_field_names(api: &TestApi) -> TestResult { }) .await?; - let input_dm = indoc! {r#" - model Post { + let input_dm = formatdoc! {r#" + model Post {{ id Int @id @default(autoincrement()) user_id Int @unique custom_User User @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) custom_Post Post? - } + }} "#}; - let final_dm = indoc! {r#" - model Post { + let final_dm = formatdoc! {r#" + model Post {{ id Int @id @default(autoincrement()) user_id Int @unique custom_User User @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) custom_Post Post? - } + }} - model Unrelated { + model Unrelated {{ id Int @id @default(autoincrement()) - } - + }} "#}; - api.assert_eq_datamodels(final_dm, &api.re_introspect(input_dm).await?); + api.assert_eq_datamodels(&final_dm, &api.re_introspect(&input_dm).await?); Ok(()) } @@ -1117,47 +1118,47 @@ async fn multiple_changed_relation_names_due_to_mapped_models(api: &TestApi) -> }) .await?; - let input_dm = indoc! {r#" - model Post { + let input_dm = formatdoc! {r#" + model Post {{ id Int @id @default(autoincrement()) user_id Int @unique user_id2 Int @unique custom_User Custom_User @relation("CustomRelationName", fields: [user_id], references: [id]) custom_User2 Custom_User @relation("AnotherCustomRelationName", fields: [user_id2], references: [id]) - } + }} - model Custom_User { + model Custom_User {{ id Int @id @default(autoincrement()) custom_Post Post? @relation("CustomRelationName") custom_Post2 Post? @relation("AnotherCustomRelationName") @@map("User") - } + }} "#}; - let final_dm = indoc! {r#" - model Post { + let final_dm = formatdoc! {r#" + model Post {{ id Int @id @default(autoincrement()) user_id Int @unique user_id2 Int @unique custom_User Custom_User @relation("CustomRelationName", fields: [user_id], references: [id]) custom_User2 Custom_User @relation("AnotherCustomRelationName", fields: [user_id2], references: [id]) - } + }} - model Custom_User { + model Custom_User {{ id Int @id @default(autoincrement()) custom_Post Post? @relation("CustomRelationName") custom_Post2 Post? @relation("AnotherCustomRelationName") @@map("User") - } + }} - model Unrelated { + model Unrelated {{ id Int @id @default(autoincrement()) - } + }} "#}; - api.assert_eq_datamodels(final_dm, &api.re_introspect(&input_dm).await?); + api.assert_eq_datamodels(&final_dm, &api.re_introspect(&input_dm).await?); Ok(()) } @@ -1726,3 +1727,274 @@ async fn do_not_try_to_keep_custom_many_to_many_self_relation_names(api: &TestAp Ok(()) } + +#[test_connector] +async fn legacy_referential_actions(api: &TestApi) -> TestResult { + let family = api.sql_family(); + + api.barrel() + .execute(move |migration| { + migration.create_table("a", |t| { + t.add_column("id", types::primary()); + }); + + migration.create_table("b", move |t| { + t.add_column("id", types::primary()); + t.add_column("a_id", types::integer().nullable(false)); + + match family { + SqlFamily::Mssql => { + t.inject_custom( + "CONSTRAINT asdf FOREIGN KEY (a_id) REFERENCES legacy_referential_actions.a(id) ON DELETE NO ACTION ON UPDATE NO ACTION", + ); + } + _ => { + t.inject_custom( + "CONSTRAINT asdf FOREIGN KEY (a_id) REFERENCES a(id) ON DELETE NO ACTION ON UPDATE NO ACTION", + ); + } + } + }); + }) + .await?; + + let extra_index = if api.sql_family().is_mysql() { + r#"@@index([a_id], name: "asdf")"# + } else { + "" + }; + + let input_dm = formatdoc! {r#" + model a {{ + id Int @id @default(autoincrement()) + bs b[] @relation("changed") + }} + + model b {{ + id Int @id @default(autoincrement()) + a_id Int + a a @relation("changed", fields: [a_id], references: [id]) + {} + }} + "#, extra_index}; + + api.assert_eq_datamodels(&input_dm, &api.re_introspect(&input_dm).await?); + + Ok(()) +} + +#[test_connector] +async fn referential_actions(api: &TestApi) -> TestResult { + let family = api.sql_family(); + + api.barrel() + .execute(move |migration| { + migration.create_table("a", |t| { + t.add_column("id", types::primary()); + }); + + migration.create_table("b", move |t| { + t.add_column("id", types::primary()); + t.add_column("a_id", types::integer().nullable(false)); + + match family { + SqlFamily::Mssql => { + t.inject_custom( + "CONSTRAINT asdf FOREIGN KEY (a_id) REFERENCES referential_actions.a(id) ON DELETE CASCADE ON UPDATE NO ACTION", + ); + } + _ => { + t.inject_custom( + "CONSTRAINT asdf FOREIGN KEY (a_id) REFERENCES a(id) ON DELETE CASCADE ON UPDATE NO ACTION", + ); + } + } + }); + }) + .await?; + + let extra_index = if api.sql_family().is_mysql() { + r#"@@index([a_id], name: "asdf")"# + } else { + "" + }; + + let input_dm = formatdoc! {r#" + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + }} + + model a {{ + id Int @id @default(autoincrement()) + bs b[] + }} + + model b {{ + id Int @id @default(autoincrement()) + a_id Int + a a @relation(fields: [a_id], references: [id], onDelete: Cascade, onUpdate: NoAction) + {} + }} + "#, extra_index}; + + api.assert_eq_datamodels(&input_dm, &api.re_introspect(&input_dm).await?); + + Ok(()) +} + +#[test_connector(tags(Postgres, Mysql, Sqlite))] +async fn default_referential_actions_with_restrict(api: &TestApi) -> TestResult { + api.barrel() + .execute(|migration| { + migration.create_table("a", |t| { + t.add_column("id", types::primary()); + }); + + migration.create_table("b", |t| { + t.add_column("id", types::primary()); + t.add_column("a_id", types::integer().nullable(false)); + t.inject_custom( + "CONSTRAINT asdf FOREIGN KEY (a_id) REFERENCES a(id) ON DELETE RESTRICT ON UPDATE CASCADE", + ); + }); + }) + .await?; + + let extra_index = if api.sql_family().is_mysql() { + r#"@@index([a_id], name: "asdf")"# + } else { + "" + }; + + let input_dm = formatdoc! {r#" + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + }} + + model a {{ + id Int @id @default(autoincrement()) + bs b[] + }} + + model b {{ + id Int @id @default(autoincrement()) + a_id Int + a a @relation(fields: [a_id], references: [id]) + {} + }} + "#, extra_index}; + + api.assert_eq_datamodels(&input_dm, &api.re_introspect(&input_dm).await?); + + Ok(()) +} + +#[test_connector(tags(Mssql))] +async fn default_referential_actions_without_restrict(api: &TestApi) -> TestResult { + api.barrel() + .execute(|migration| { + migration.create_table("a", |t| { + t.add_column("id", types::primary()); + }); + + migration.create_table("b", |t| { + t.add_column("id", types::primary()); + t.add_column("a_id", types::integer().nullable(false)); + t.inject_custom( + "CONSTRAINT asdf FOREIGN KEY (a_id) REFERENCES default_referential_actions_without_restrict.a(id) ON DELETE NO ACTION ON UPDATE CASCADE", + ); + }); + }) + .await?; + + let extra_index = if api.sql_family().is_mysql() { + r#"@@index([a_id], name: "asdf")"# + } else { + "" + }; + + let input_dm = formatdoc! {r#" + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + }} + + model a {{ + id Int @id @default(autoincrement()) + bs b[] + }} + + model b {{ + id Int @id @default(autoincrement()) + a_id Int + a a @relation(fields: [a_id], references: [id]) + {} + }} + "#, extra_index}; + + api.assert_eq_datamodels(&input_dm, &api.re_introspect(&input_dm).await?); + + Ok(()) +} + +#[test_connector] +async fn default_optional_actions(api: &TestApi) -> TestResult { + let family = api.sql_family(); + + api.barrel() + .execute(move |migration| { + migration.create_table("a", |t| { + t.add_column("id", types::primary()); + }); + + migration.create_table("b", move |t| { + t.add_column("id", types::primary()); + t.add_column("a_id", types::integer().nullable(true)); + + match family { + SqlFamily::Mssql => { + t.inject_custom( + "CONSTRAINT asdf FOREIGN KEY (a_id) REFERENCES default_optional_actions.a(id) ON DELETE SET NULL ON UPDATE CASCADE", + ); + } + _ => { + t.inject_custom( + "CONSTRAINT asdf FOREIGN KEY (a_id) REFERENCES a(id) ON DELETE SET NULL ON UPDATE CASCADE", + ); + } + } + }); + }) + .await?; + + let extra_index = if api.sql_family().is_mysql() { + r#"@@index([a_id], name: "asdf")"# + } else { + "" + }; + + let input_dm = formatdoc! {r#" + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + }} + + model a {{ + id Int @id @default(autoincrement()) + bs b[] + }} + + model b {{ + id Int @id @default(autoincrement()) + a_id Int? + a a? @relation(fields: [a_id], references: [id]) + {} + }} + "#, extra_index}; + + api.assert_eq_datamodels(&input_dm, &api.re_introspect(&input_dm).await?); + + Ok(()) +} diff --git a/introspection-engine/introspection-engine-tests/tests/relations/mod.rs b/introspection-engine/introspection-engine-tests/tests/relations/mod.rs index 83494f153bd1..9a15797864a5 100644 --- a/introspection-engine/introspection-engine-tests/tests/relations/mod.rs +++ b/introspection-engine/introspection-engine-tests/tests/relations/mod.rs @@ -24,20 +24,20 @@ async fn one_to_one_req_relation(api: &TestApi) -> TestResult { ) .await?; - let dm = indoc! {r##" - model Post { + let dm = formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int @unique User User @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post? - } + }} "##}; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -60,19 +60,19 @@ async fn one_to_one_relation_on_a_singular_primary_key(api: &TestApi) -> TestRes ) .await?; - let dm = indoc! {r##" - model Post { + let dm = formatdoc! {r##" + model Post {{ id Int @unique User User @relation(fields: [id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post? - } + }} "##}; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -115,23 +115,23 @@ async fn two_one_to_one_relations_between_the_same_models(api: &TestApi) -> Test ) .await?; - let dm = indoc! {r##" - model Post { + let dm = formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int @unique User_Post_user_idToUser User @relation("Post_user_idToUser", fields: [user_id], references: [id]) User_PostToUser_post_id User? @relation("PostToUser_post_id") - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) post_id Int @unique Post_PostToUser_post_id Post @relation("PostToUser_post_id", fields: [post_id], references: [id]) Post_Post_user_idToUser Post? @relation("Post_user_idToUser") - } + }} "##}; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -155,20 +155,20 @@ async fn a_one_to_one_relation(api: &TestApi) -> TestResult { ) .await?; - let dm = indoc! {r##" - model Post { + let dm = formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int? @unique User User? @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post? - } + }} "##}; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -202,16 +202,16 @@ async fn a_one_to_one_relation_referencing_non_id(api: &TestApi) -> TestResult { let dm = formatdoc! {r##" model Post {{ id Int @id @default(autoincrement()) - user_email String? @unique {} + user_email String? @unique {native_type} User User? @relation(fields: [user_email], references: [email]) }} model User {{ id Int @id @default(autoincrement()) - email String? @unique {} + email String? @unique {native_type} Post Post? }} - "##, native_type, native_type}; + "##, native_type = native_type}; api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -239,37 +239,37 @@ async fn a_one_to_many_relation(api: &TestApi) -> TestResult { let dm = match api.sql_family() { SqlFamily::Mysql => { - indoc! {r##" - model Post { + formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int? User User? @relation(fields: [user_id], references: [id]) @@index([user_id], name: "user_id") - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post[] - } + }} "##} } _ => { - indoc! {r##" - model Post { + formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int? User User? @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post[] - } + }} "##} } }; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -295,37 +295,37 @@ async fn a_one_req_to_many_relation(api: &TestApi) -> TestResult { let dm = match api.sql_family() { SqlFamily::Mysql => { - indoc! {r##" - model Post { + formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int User User @relation(fields: [user_id], references: [id]) @@index([user_id], name: "user_id") - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post[] - } + }} "##} } _ => { - indoc! {r##" - model Post { + formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int User User @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post[] - } + }} "##} } }; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -403,13 +403,13 @@ async fn a_many_to_many_relation_with_an_id(api: &TestApi) -> TestResult { let dm = match api.sql_family() { SqlFamily::Mysql => { - indoc! {r##" - model Post { + formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) PostsToUsers PostsToUsers[] - } + }} - model PostsToUsers { + model PostsToUsers {{ id Int @id @default(autoincrement()) user_id Int post_id Int @@ -417,38 +417,38 @@ async fn a_many_to_many_relation_with_an_id(api: &TestApi) -> TestResult { User User @relation(fields: [user_id], references: [id]) @@index([post_id], name: "post_id") @@index([user_id], name: "user_id") - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) PostsToUsers PostsToUsers[] - } + }} "##} } _ => { - indoc! {r##" - model Post { + formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) PostsToUsers PostsToUsers[] - } + }} - model PostsToUsers { + model PostsToUsers {{ id Int @id @default(autoincrement()) user_id Int post_id Int Post Post @relation(fields: [post_id], references: [id]) User User @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) PostsToUsers PostsToUsers[] - } + }} "##} } }; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -473,8 +473,8 @@ async fn a_self_relation(api: &TestApi) -> TestResult { let dm = match api.sql_family() { SqlFamily::Mysql => { - indoc! {r##" - model User { + formatdoc! {r##" + model User {{ id Int @id @default(autoincrement()) recruited_by Int? direct_report Int? @@ -484,12 +484,12 @@ async fn a_self_relation(api: &TestApi) -> TestResult { other_User_UserToUser_recruited_by User[] @relation("UserToUser_recruited_by") @@index([direct_report], name: "direct_report") @@index([recruited_by], name: "recruited_by") - } + }} "##} } _ => { - indoc! {r##" - model User { + formatdoc! {r##" + model User {{ id Int @id @default(autoincrement()) recruited_by Int? direct_report Int? @@ -497,12 +497,12 @@ async fn a_self_relation(api: &TestApi) -> TestResult { User_UserToUser_recruited_by User? @relation("UserToUser_recruited_by", fields: [recruited_by], references: [id]) other_User_UserToUser_direct_report User[] @relation("UserToUser_direct_report") other_User_UserToUser_recruited_by User[] @relation("UserToUser_recruited_by") - } + }} "##} } }; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -526,19 +526,19 @@ async fn id_fields_with_foreign_key(api: &TestApi) -> TestResult { ) .await?; - let dm = indoc! {r##" - model Post { + let dm = formatdoc! {r##" + model Post {{ user_id Int @id User User @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post? - } + }} "##}; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -569,37 +569,37 @@ async fn duplicate_fks_should_ignore_one_of_them(api: &TestApi) -> TestResult { let dm = match api.sql_family() { SqlFamily::Mysql => { - indoc! {r##" - model Post { + formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int? User User? @relation(fields: [user_id], references: [id]) @@index([user_id], name: "user_id") - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post[] - } + }} "##} } _ => { - indoc! {r##" - model Post { + formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int? User User? @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post[] - } + }} "##} } }; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -619,20 +619,20 @@ async fn default_values_on_relations(api: &TestApi) -> TestResult { }) .await?; - let dm = indoc! {r##" - model Post { + let dm = formatdoc! {r##" + model Post {{ id Int @id @default(autoincrement()) user_id Int? @default(0) User User? @relation(fields: [user_id], references: [id]) - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post Post[] - } + }} "##}; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -705,54 +705,54 @@ async fn relations_should_avoid_name_clashes(api: &TestApi) -> TestResult { let dm = match api.sql_family() { SqlFamily::Sqlite => { - indoc! {r##" - model x { + formatdoc! {r##" + model x {{ id Int @id @default(autoincrement()) y Int y_xToy y @relation(fields: [y], references: [id]) - } + }} - model y { + model y {{ id Int @id @default(autoincrement()) x Int x_xToy x[] - } + }} "##} } SqlFamily::Mysql => { - indoc! {r##" - model x { + formatdoc! {r##" + model x {{ id Int @id y Int y_xToy y @relation(fields: [y], references: [id]) @@index([y], name: "y") - } + }} - model y { + model y {{ id Int @id x Int x_xToy x[] - } + }} "##} } _ => { - indoc! {r##" - model x { + formatdoc! {r##" + model x {{ id Int @id y Int y_xToy y @relation(fields: [y], references: [id]) - } + }} - model y { + model y {{ id Int @id x Int x_xToy x[] - } + }} "##} } }; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -800,17 +800,17 @@ async fn relations_should_avoid_name_clashes_2(api: &TestApi) -> TestResult { let dm = match sql_family { SqlFamily::Mysql => { - indoc! { r##" - model x { + formatdoc! { r##" + model x {{ id Int @id @default(autoincrement()) y Int y_x_yToy y @relation("x_yToy", fields: [y], references: [id]) y_xToy_fk_x_1_fk_x_2 y[] @relation("xToy_fk_x_1_fk_x_2") @@unique([id, y], name: "unique_y_id") @@index([y], name: "y") - } + }} - model y { + model y {{ id Int @id @default(autoincrement()) x Int fk_x_1 Int @@ -818,32 +818,32 @@ async fn relations_should_avoid_name_clashes_2(api: &TestApi) -> TestResult { x_xToy_fk_x_1_fk_x_2 x @relation("xToy_fk_x_1_fk_x_2", fields: [fk_x_1, fk_x_2], references: [id, y]) x_x_yToy x[] @relation("x_yToy") @@index([fk_x_1, fk_x_2], name: "fk_x_1") - } + }} "##} } _ => { - indoc! { r##" - model x { + formatdoc! { r##" + model x {{ id Int @id @default(autoincrement()) y Int y_x_yToy y @relation("x_yToy", fields: [y], references: [id]) y_xToy_fk_x_1_fk_x_2 y[] @relation("xToy_fk_x_1_fk_x_2") @@unique([id, y], name: "unique_y_id") - } + }} - model y { + model y {{ id Int @id @default(autoincrement()) x Int fk_x_1 Int fk_x_2 Int x_xToy_fk_x_1_fk_x_2 x @relation("xToy_fk_x_1_fk_x_2", fields: [fk_x_1, fk_x_2], references: [id, y]) x_x_yToy x[] @relation("x_yToy") - } + }} "##} } }; - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -897,7 +897,7 @@ async fn one_to_many_relation_field_names_do_not_conflict_with_many_to_many_rela host_id Int User_EventToUser User @relation(fields: [host_id], references: [id]) User_EventToUserManyToMany User[] @relation("EventToUserManyToMany") - {} + {extra_index} }} model User {{ @@ -906,7 +906,7 @@ async fn one_to_many_relation_field_names_do_not_conflict_with_many_to_many_rela Event_EventToUserManyToMany Event[] @relation("EventToUserManyToMany") }} "#, - extra_index + extra_index = extra_index, ); api.assert_eq_datamodels(&expected_dm, &api.introspect().await?); diff --git a/introspection-engine/introspection-engine-tests/tests/relations_with_compound_fk/mod.rs b/introspection-engine/introspection-engine-tests/tests/relations_with_compound_fk/mod.rs index e7e3568152fb..d4c4dd2588d0 100644 --- a/introspection-engine/introspection-engine-tests/tests/relations_with_compound_fk/mod.rs +++ b/introspection-engine/introspection-engine-tests/tests/relations_with_compound_fk/mod.rs @@ -40,7 +40,7 @@ async fn compound_foreign_keys_for_one_to_one_relations(api: &TestApi) -> TestRe user_age Int? User User? @relation(fields: [user_id, user_age], references: [id, age]) - @@unique([user_id, user_age], name: "{}") + @@unique([user_id, user_age], name: "{constraint_name}") }} model User {{ @@ -51,7 +51,7 @@ async fn compound_foreign_keys_for_one_to_one_relations(api: &TestApi) -> TestRe @@unique([id, age], name: "user_unique") }} "#, - constraint_name + constraint_name = constraint_name, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -96,7 +96,7 @@ async fn compound_foreign_keys_for_required_one_to_one_relations(api: &TestApi) user_age Int User User @relation(fields: [user_id, user_age], references: [id, age]) - @@unique([user_id, user_age], name: "{}") + @@unique([user_id, user_age], name: "{constraint_name}") }} model User {{ @@ -107,7 +107,7 @@ async fn compound_foreign_keys_for_required_one_to_one_relations(api: &TestApi) @@unique([id, age], name: "user_unique") }} "#, - constraint_name + constraint_name = constraint_name, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -149,7 +149,7 @@ async fn compound_foreign_keys_for_one_to_many_relations(api: &TestApi) -> TestR user_id Int? user_age Int? User User? @relation(fields: [user_id, user_age], references: [id, age]) - {} + {extra_index} }} model User {{ @@ -160,7 +160,7 @@ async fn compound_foreign_keys_for_one_to_many_relations(api: &TestApi) -> TestR @@unique([id, age], name: "user_unique") }} "#, - extra_index + extra_index = extra_index, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -202,7 +202,7 @@ async fn compound_foreign_keys_for_one_to_many_relations_with_mixed_requiredness user_id Int user_age Int? User User? @relation(fields: [user_id, user_age], references: [id, age]) - {} + {extra_index} }} model User {{ @@ -213,7 +213,7 @@ async fn compound_foreign_keys_for_one_to_many_relations_with_mixed_requiredness @@unique([id, age], name: "user_unique") }} "#, - extra_index + extra_index = extra_index, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -255,7 +255,7 @@ async fn compound_foreign_keys_for_required_one_to_many_relations(api: &TestApi) user_id Int user_age Int User User @relation(fields: [user_id, user_age], references: [id, age]) - {} + {extra_index} }} model User {{ @@ -266,7 +266,7 @@ async fn compound_foreign_keys_for_required_one_to_many_relations(api: &TestApi) @@unique([id, age], name: "user_unique") }} "#, - extra_index + extra_index = extra_index ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -312,11 +312,12 @@ async fn compound_foreign_keys_for_required_self_relations(api: &TestApi) -> Tes Person Person @relation("PersonToPerson_partner_id_partner_age", fields: [partner_id, partner_age], references: [id, age]) other_Person Person[] @relation("PersonToPerson_partner_id_partner_age") - @@unique([id, age], name: "{}") - {} + @@unique([id, age], name: "{constraint_name}") + {extra_index} }} "#, - constraint_name, extra_index, + constraint_name = constraint_name, + extra_index = extra_index, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -362,11 +363,12 @@ async fn compound_foreign_keys_for_self_relations(api: &TestApi) -> TestResult { Person Person? @relation("PersonToPerson_partner_id_partner_age", fields: [partner_id, partner_age], references: [id, age]) other_Person Person[] @relation("PersonToPerson_partner_id_partner_age") - @@unique([id, age], name: "{}") - {} + @@unique([id, age], name: "{constraint_name}") + {extra_index} }} "#, - constraint_name, extra_index + constraint_name = constraint_name, + extra_index = extra_index ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -412,11 +414,12 @@ async fn compound_foreign_keys_with_defaults(api: &TestApi) -> TestResult { Person Person @relation("PersonToPerson_partner_id_partner_age", fields: [partner_id, partner_age], references: [id, age]) other_Person Person[] @relation("PersonToPerson_partner_id_partner_age") - @@unique([id, age], name: "{}") - {} + @@unique([id, age], name: "{constraint_name}") + {extra_index} }} "#, - constraint_name, extra_index + constraint_name = constraint_name, + extra_index = extra_index ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -464,7 +467,7 @@ async fn compound_foreign_keys_for_one_to_many_relations_with_non_unique_index(a user_id Int user_age Int User User @relation(fields: [user_id, user_age], references: [id, age]) - {} + {extra_index} }} model User {{ @@ -472,10 +475,11 @@ async fn compound_foreign_keys_for_one_to_many_relations_with_non_unique_index(a age Int Post Post[] - @@unique([id, age], name: "{}") + @@unique([id, age], name: "{constraint_name}") }} "#, - extra_index, constraint_name + extra_index = extra_index, + constraint_name = constraint_name ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -524,10 +528,10 @@ async fn repro_matt_references_on_wrong_side(api: &TestApi) -> TestResult { two Int a a @relation(fields: [one, two], references: [one, two]) - {} + {extra_index} }} "#, - extra_index + extra_index = extra_index, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -579,10 +583,10 @@ async fn a_compound_fk_pk_with_overlapping_primary_key(api: &TestApi) -> TestRes a a @relation(fields: [one, two], references: [one, two]) @@id([dummy, one, two]) - {} + {extra_index} }} "#, - extra_index + extra_index = extra_index, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -639,7 +643,7 @@ async fn compound_foreign_keys_for_duplicate_one_to_many_relations(api: &TestApi other_user_age Int? User_Post_other_user_id_other_user_ageToUser User? @relation("Post_other_user_id_other_user_ageToUser", fields: [other_user_id, other_user_age], references: [id, age]) User_Post_user_id_user_ageToUser User? @relation("Post_user_id_user_ageToUser", fields: [user_id, user_age], references: [id, age]) - {} + {extra_index} }} model User {{ @@ -648,10 +652,11 @@ async fn compound_foreign_keys_for_duplicate_one_to_many_relations(api: &TestApi Post_Post_other_user_id_other_user_ageToUser Post[] @relation("Post_other_user_id_other_user_ageToUser") Post_Post_user_id_user_ageToUser Post[] @relation("Post_user_id_user_ageToUser") - @@unique([id, age], name: "{}") + @@unique([id, age], name: "{constraint_name}") }} "#, - extra_index, constraint_name + extra_index = extra_index, + constraint_name = constraint_name ); api.assert_eq_datamodels(&dm, &api.introspect().await?); diff --git a/introspection-engine/introspection-engine-tests/tests/remapping_database_names/mod.rs b/introspection-engine/introspection-engine-tests/tests/remapping_database_names/mod.rs index 07d73ed66b0e..2d26cd29bf27 100644 --- a/introspection-engine/introspection-engine-tests/tests/remapping_database_names/mod.rs +++ b/introspection-engine/introspection-engine-tests/tests/remapping_database_names/mod.rs @@ -127,24 +127,24 @@ async fn remapping_models_in_relations(api: &TestApi) -> TestResult { }) .await?; - let dm = { + let dm = formatdoc!( r#" - model Post { + model Post {{ id Int @id @default(autoincrement()) user_id Int @unique User_with_Space User_with_Space @relation(fields: [user_id], references: [id]) - } + }} - model User_with_Space { + model User_with_Space {{ id Int @id @default(autoincrement()) Post Post? @@map("User with Space") - } - "# - }; + }} + "#, + ); - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -167,22 +167,24 @@ async fn remapping_models_in_relations_should_not_map_virtual_fields(api: &TestA }) .await?; - let dm = indoc! {r#" - model Post_With_Space { + let dm = formatdoc!( + r#" + model Post_With_Space {{ id Int @id @default(autoincrement()) user_id Int @unique User User @relation(fields: [user_id], references: [id]) @@map("Post With Space") - } + }} - model User { + model User {{ id Int @id @default(autoincrement()) Post_With_Space Post_With_Space? - } - "#}; + }} + "#, + ); - api.assert_eq_datamodels(dm, &api.introspect().await?); + api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) } @@ -233,7 +235,7 @@ async fn remapping_models_in_compound_relations(api: &TestApi) -> TestResult { user_age Int User_with_Space User_with_Space @relation(fields: [user_id, user_age], references: [id, age]) - @@unique([user_id, user_age], name: "{}") + @@unique([user_id, user_age], name: "{post_constraint}") }} model User_with_Space {{ @@ -242,10 +244,11 @@ async fn remapping_models_in_compound_relations(api: &TestApi) -> TestResult { Post Post? @@map("User with Space") - @@unique([id, age], name: "{}") + @@unique([id, age], name: "{user_constraint}") }} "#, - post_constraint, user_constraint + post_constraint = post_constraint, + user_constraint = user_constraint, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -302,7 +305,7 @@ async fn remapping_fields_in_compound_relations(api: &TestApi) -> TestResult { user_age Int User User @relation(fields: [user_id, user_age], references: [id, age_that_is_invalid]) - @@unique([user_id, user_age], name: "{}") + @@unique([user_id, user_age], name: "{user_post_constraint}") }} model User {{ @@ -310,10 +313,11 @@ async fn remapping_fields_in_compound_relations(api: &TestApi) -> TestResult { age_that_is_invalid Int @map("age-that-is-invalid") Post Post? - @@unique([id, age_that_is_invalid], name: "{}") + @@unique([id, age_that_is_invalid], name: "{user_constraint}") }} "#, - user_post_constraint, user_constraint + user_post_constraint = user_post_constraint, + user_constraint = user_constraint, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); diff --git a/libs/datamodel/connectors/datamodel-connector/Cargo.toml b/libs/datamodel/connectors/datamodel-connector/Cargo.toml index 79671c917d0e..591b58f0b6fc 100644 --- a/libs/datamodel/connectors/datamodel-connector/Cargo.toml +++ b/libs/datamodel/connectors/datamodel-connector/Cargo.toml @@ -12,3 +12,4 @@ dml = { path = "../dml" } thiserror = "1.0" itertools = "0.10" url = "2.2.1" +enumflags2 = "0.7" diff --git a/libs/datamodel/connectors/datamodel-connector/src/empty_connector.rs b/libs/datamodel/connectors/datamodel-connector/src/empty_connector.rs index 4d41ecedb749..8beb6fecabcc 100644 --- a/libs/datamodel/connectors/datamodel-connector/src/empty_connector.rs +++ b/libs/datamodel/connectors/datamodel-connector/src/empty_connector.rs @@ -1,3 +1,6 @@ +use dml::relation_info::ReferentialAction; +use enumflags2::BitFlags; + use crate::{connector_error::ConnectorError, Connector, ConnectorCapability}; /// A [Connector](/trait.Connector.html) implementor meant to @@ -9,6 +12,10 @@ impl Connector for EmptyDatamodelConnector { std::any::type_name::() } + fn referential_actions(&self) -> BitFlags { + BitFlags::empty() + } + fn capabilities(&self) -> &[ConnectorCapability] { &[] } diff --git a/libs/datamodel/connectors/datamodel-connector/src/lib.rs b/libs/datamodel/connectors/datamodel-connector/src/lib.rs index bb790ce37187..c22d6b446052 100644 --- a/libs/datamodel/connectors/datamodel-connector/src/lib.rs +++ b/libs/datamodel/connectors/datamodel-connector/src/lib.rs @@ -8,8 +8,9 @@ pub use empty_connector::EmptyDatamodelConnector; use crate::connector_error::{ConnectorError, ConnectorErrorFactory, ErrorKind}; use dml::{ field::Field, model::Model, native_type_constructor::NativeTypeConstructor, - native_type_instance::NativeTypeInstance, scalars::ScalarType, + native_type_instance::NativeTypeInstance, relation_info::ReferentialAction, scalars::ScalarType, }; +use enumflags2::BitFlags; use std::{borrow::Cow, collections::BTreeMap}; pub trait Connector: Send + Sync { @@ -21,6 +22,16 @@ pub trait Connector: Send + Sync { self.capabilities().contains(&capability) } + fn referential_actions(&self) -> BitFlags; + + fn supports_referential_action(&self, action: ReferentialAction) -> bool { + self.referential_actions().contains(action) + } + + fn emulates_referential_actions(&self) -> bool { + false + } + fn validate_field(&self, field: &Field) -> Result<(), ConnectorError>; fn validate_model(&self, model: &Model) -> Result<(), ConnectorError>; @@ -176,6 +187,7 @@ pub enum ConnectorCapability { AutoIncrementMultipleAllowed, AutoIncrementNonIndexedAllowed, RelationFieldsInArbitraryOrder, + ForeignKeys, // start of Query Engine Capabilities InsensitiveFilters, @@ -193,7 +205,7 @@ pub enum ConnectorCapability { /// Contains all capabilities that the connector is able to serve. #[derive(Debug)] pub struct ConnectorCapabilities { - capabilities: Vec, + pub capabilities: Vec, } impl ConnectorCapabilities { diff --git a/libs/datamodel/connectors/dml/Cargo.toml b/libs/datamodel/connectors/dml/Cargo.toml index 9a5994630e04..ed9828d150cd 100644 --- a/libs/datamodel/connectors/dml/Cargo.toml +++ b/libs/datamodel/connectors/dml/Cargo.toml @@ -14,8 +14,9 @@ chrono = { version = "0.4.6", features = ["serde"] } serde = { version = "1.0.90", features = ["derive"] } serde_json = { version = "1.0", features = ["float_roundtrip"] } native-types = { path = "../../../native-types" } +enumflags2 = "0.7" [features] # Support for generating default UUID and CUID default values. This implies # random number generation works, so it doesn't compile on targets like wasm32. -default_generators = ["uuid", "cuid"] \ No newline at end of file +default_generators = ["uuid", "cuid"] diff --git a/libs/datamodel/connectors/dml/src/datamodel.rs b/libs/datamodel/connectors/dml/src/datamodel.rs index 8188ae4bc008..440635f636db 100644 --- a/libs/datamodel/connectors/dml/src/datamodel.rs +++ b/libs/datamodel/connectors/dml/src/datamodel.rs @@ -1,4 +1,4 @@ -use crate::field::{Field, FieldType, RelationField, ScalarField}; +use crate::field::{Field, RelationField, ScalarField}; use crate::model::Model; use crate::r#enum::Enum; use crate::relation_info::RelationInfo; @@ -117,7 +117,7 @@ impl Datamodel { let mut fields = vec![]; for model in self.models() { for field in model.scalar_fields() { - if FieldType::Enum(enum_name.to_owned()) == field.field_type { + if field.field_type.is_enum(enum_name) { fields.push((model.name.clone(), field.name.clone())) } } diff --git a/libs/datamodel/connectors/dml/src/field.rs b/libs/datamodel/connectors/dml/src/field.rs index 1fb38306d41b..aac746fe890a 100644 --- a/libs/datamodel/connectors/dml/src/field.rs +++ b/libs/datamodel/connectors/dml/src/field.rs @@ -1,8 +1,11 @@ use super::*; -use crate::default_value::{DefaultValue, ValueGenerator}; use crate::native_type_instance::NativeTypeInstance; use crate::scalars::ScalarType; use crate::traits::{Ignorable, WithDatabaseName, WithName}; +use crate::{ + default_value::{DefaultValue, ValueGenerator}, + relation_info::ReferentialAction, +}; use std::hash::Hash; /// Arity of a Field in a Model. @@ -70,6 +73,10 @@ impl FieldType { self.scalar_type().map(|st| st.is_string()).unwrap_or(false) } + pub fn is_enum(&self, name: &str) -> bool { + matches!(self, Self::Enum(this) if this == name) + } + pub fn scalar_type(&self) -> Option { match self { FieldType::Scalar(st, _, _) => Some(*st), @@ -100,6 +107,13 @@ impl Field { } } + pub fn as_relation_field_mut(&mut self) -> Option<&mut RelationField> { + match self { + Field::RelationField(ref mut rf) => Some(rf), + _ => None, + } + } + pub fn as_scalar_field(&self) -> Option<&ScalarField> { match self { Field::ScalarField(sf) => Some(sf), @@ -225,7 +239,7 @@ impl WithDatabaseName for Field { } /// Represents a relation field in a model. -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, Clone)] pub struct RelationField { /// Name of the field. pub name: String, @@ -236,6 +250,9 @@ pub struct RelationField { /// The field's arity. pub arity: FieldArity, + /// The arity of underlying fields for referential actions. + pub referential_arity: FieldArity, + /// Comments associated with this field. pub documentation: Option, @@ -247,21 +264,81 @@ pub struct RelationField { /// Indicates if this field has to be ignored by the Client. pub is_ignored: bool, + + /// Is `ON DELETE/UPDATE RESTRICT` allowed. + pub supports_restrict_action: Option, + + /// Do we run the referential actions in the core instead of the database. + pub emulates_referential_actions: Option, +} + +impl PartialEq for RelationField { + //ignores the relation name for reintrospection + fn eq(&self, other: &Self) -> bool { + let this_matches = self.name == other.name + && self.arity == other.arity + && self.referential_arity == other.referential_arity + && self.documentation == other.documentation + && self.is_generated == other.is_generated + && self.is_commented_out == other.is_commented_out + && self.is_ignored == other.is_ignored + && self.relation_info == other.relation_info; + + let this_on_delete = self + .relation_info + .on_delete + .unwrap_or_else(|| self.default_on_delete_action()); + + let other_on_delete = other + .relation_info + .on_delete + .unwrap_or_else(|| other.default_on_delete_action()); + + let on_delete_matches = this_on_delete == other_on_delete; + + let this_on_update = self + .relation_info + .on_update + .unwrap_or_else(|| self.default_on_update_action()); + + let other_on_update = other + .relation_info + .on_update + .unwrap_or_else(|| other.default_on_update_action()); + + let on_update_matches = this_on_update == other_on_update; + + this_matches && on_delete_matches && on_update_matches + } } impl RelationField { /// Creates a new field with the given name and type. - pub fn new(name: &str, arity: FieldArity, relation_info: RelationInfo) -> Self { + pub fn new(name: &str, arity: FieldArity, referential_arity: FieldArity, relation_info: RelationInfo) -> Self { RelationField { name: String::from(name), arity, + referential_arity, relation_info, documentation: None, is_generated: false, is_commented_out: false, is_ignored: false, + supports_restrict_action: None, + emulates_referential_actions: None, } } + + /// The default `onDelete` can be `Restrict`. + pub fn supports_restrict_action(&mut self, value: bool) { + self.supports_restrict_action = Some(value); + } + + /// The referential actions should be handled by the core. + pub fn emulates_referential_actions(&mut self, value: bool) { + self.emulates_referential_actions = Some(value); + } + /// Creates a new field with the given name and type, marked as generated and optional. pub fn new_generated(name: &str, info: RelationInfo, required: bool) -> Self { let arity = if required { @@ -270,7 +347,7 @@ impl RelationField { FieldArity::Optional }; - let mut field = Self::new(name, arity, info); + let mut field = Self::new(name, arity, arity, info); field.is_generated = true; field @@ -295,6 +372,26 @@ impl RelationField { pub fn is_optional(&self) -> bool { self.arity.is_optional() } + + pub fn default_on_delete_action(&self) -> ReferentialAction { + use ReferentialAction::*; + + match self.referential_arity { + FieldArity::Required if self.supports_restrict_action.unwrap_or(true) => Restrict, + FieldArity::Required => NoAction, + _ => SetNull, + } + } + + pub fn default_on_update_action(&self) -> ReferentialAction { + use ReferentialAction::*; + + match self.referential_arity { + _ if !self.emulates_referential_actions.unwrap_or(false) => Cascade, + FieldArity::Required => Restrict, + _ => SetNull, + } + } } /// Represents a scalar field in a model. diff --git a/libs/datamodel/connectors/dml/src/relation_info.rs b/libs/datamodel/connectors/dml/src/relation_info.rs index 24a50a005c3f..c296f1b78956 100644 --- a/libs/datamodel/connectors/dml/src/relation_info.rs +++ b/libs/datamodel/connectors/dml/src/relation_info.rs @@ -1,3 +1,6 @@ +use enumflags2::bitflags; +use std::fmt; + /// Holds information about a relation field. #[derive(Debug, Clone)] pub struct RelationInfo { @@ -11,16 +14,19 @@ pub struct RelationInfo { pub name: String, /// A strategy indicating what happens when /// a related node is deleted. - pub on_delete: OnDeleteStrategy, + pub on_delete: Option, + /// A strategy indicating what happens when + /// a related node is updated. + pub on_update: Option, + /// Set true if referential actions feature is not in use. + /// This prevents the datamodel validator nagging about the missing preview feature, when automatically setting the values. + pub legacy_referential_actions: bool, } impl PartialEq for RelationInfo { - //ignores the relation name for reintrospection + //ignores the relation name for reintrospection, ignores referential actions that are compared in the relation field. fn eq(&self, other: &Self) -> bool { - self.to == other.to - && self.fields == other.fields - && self.references == other.references - && self.on_delete == other.on_delete + self.to == other.to && self.fields == other.fields && self.references == other.references } } @@ -33,23 +39,57 @@ impl RelationInfo { fields: Vec::new(), references: Vec::new(), name: String::new(), - on_delete: OnDeleteStrategy::None, + on_delete: None, + on_update: None, + legacy_referential_actions: false, } } + + /// Set referential action legacy mode, skipping the validation errors on + /// automatically set actions. + pub fn legacy_referential_actions(&mut self) { + self.legacy_referential_actions = true; + } } /// Describes what happens when related nodes are deleted. +#[repr(u8)] +#[bitflags] #[derive(Debug, Copy, PartialEq, Clone)] -pub enum OnDeleteStrategy { +pub enum ReferentialAction { + /// Deletes record if dependent record is deleted. Updates relation scalar + /// fields if referenced scalar fields of the dependent record are updated. + /// Prevents operation (both updates and deletes) from succeeding if any + /// records are connected. Cascade, - None, + /// Prevents operation (both updates and deletes) from succeeding if any + /// records are connected. This behavior will always result in a runtime + /// error for required relations. + Restrict, + /// Behavior is database specific. Either defers throwing an integrity check + /// error until the end of the transaction or errors immediately. If + /// deferred, this makes it possible to temporarily violate integrity in a + /// transaction while making sure that subsequent operations in the + /// transaction restore integrity. + NoAction, + /// Sets relation scalar fields to null if the relation is deleted or + /// updated. This will always result in a runtime error if one or more of the + /// relation scalar fields are required. + SetNull, + /// Sets relation scalar fields to their default values on update or delete + /// of relation. Will always result in a runtime error if no defaults are + /// provided for any relation scalar fields. + SetDefault, } -impl ToString for OnDeleteStrategy { - fn to_string(&self) -> String { +impl fmt::Display for ReferentialAction { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - OnDeleteStrategy::Cascade => String::from("CASCADE"), - OnDeleteStrategy::None => String::from("NONE"), + ReferentialAction::Cascade => write!(f, "Cascade"), + ReferentialAction::Restrict => write!(f, "Restrict"), + ReferentialAction::NoAction => write!(f, "NoAction"), + ReferentialAction::SetNull => write!(f, "SetNull"), + ReferentialAction::SetDefault => write!(f, "SetDefault"), } } } diff --git a/libs/datamodel/connectors/mongodb-datamodel-connector/Cargo.toml b/libs/datamodel/connectors/mongodb-datamodel-connector/Cargo.toml index ef85adb882d3..b34be96ae258 100644 --- a/libs/datamodel/connectors/mongodb-datamodel-connector/Cargo.toml +++ b/libs/datamodel/connectors/mongodb-datamodel-connector/Cargo.toml @@ -13,3 +13,4 @@ lazy_static = "1.4" native-types = { path = "../../../native-types" } once_cell = "1.3" serde_json = { version = "1.0", features = ["float_roundtrip"] } +enumflags2 = "0.7" diff --git a/libs/datamodel/connectors/mongodb-datamodel-connector/src/lib.rs b/libs/datamodel/connectors/mongodb-datamodel-connector/src/lib.rs index 55a130fc64d4..e09c6677cf1c 100644 --- a/libs/datamodel/connectors/mongodb-datamodel-connector/src/lib.rs +++ b/libs/datamodel/connectors/mongodb-datamodel-connector/src/lib.rs @@ -6,8 +6,9 @@ use datamodel_connector::{ }; use dml::{ default_value::DefaultValue, field::FieldType, native_type_constructor::NativeTypeConstructor, - native_type_instance::NativeTypeInstance, traits::WithDatabaseName, + native_type_instance::NativeTypeInstance, relation_info::ReferentialAction, traits::WithDatabaseName, }; +use enumflags2::BitFlags; use mongodb_types::*; use native_types::MongoDbType; use std::result::Result as StdResult; @@ -17,10 +18,13 @@ type Result = std::result::Result; pub struct MongoDbDatamodelConnector { capabilities: Vec, native_types: Vec, + referential_actions: BitFlags, } impl MongoDbDatamodelConnector { pub fn new() -> Self { + use ReferentialAction::*; + let capabilities = vec![ ConnectorCapability::RelationsOverNonUniqueCriteria, ConnectorCapability::Json, @@ -34,10 +38,12 @@ impl MongoDbDatamodelConnector { ]; let native_types = mongodb_types::available_types(); + let referential_actions = Restrict | SetNull | NoAction | Cascade; Self { capabilities, native_types, + referential_actions, } } } @@ -57,6 +63,14 @@ impl Connector for MongoDbDatamodelConnector { &self.capabilities } + fn referential_actions(&self) -> BitFlags { + self.referential_actions + } + + fn emulates_referential_actions(&self) -> bool { + true + } + fn validate_field(&self, field: &dml::field::Field) -> Result<()> { // WIP, I don't really know what I'm doing with the dml. diff --git a/libs/datamodel/connectors/sql-datamodel-connector/Cargo.toml b/libs/datamodel/connectors/sql-datamodel-connector/Cargo.toml index 1bfe9fe53572..5e59c2a7ab10 100644 --- a/libs/datamodel/connectors/sql-datamodel-connector/Cargo.toml +++ b/libs/datamodel/connectors/sql-datamodel-connector/Cargo.toml @@ -13,3 +13,4 @@ native-types = { path = "../../../native-types" } serde_json = { version = "1.0", features = ["float_roundtrip"] } once_cell = "1.3" regex = "1" +enumflags2 = "0.7" diff --git a/libs/datamodel/connectors/sql-datamodel-connector/src/lib.rs b/libs/datamodel/connectors/sql-datamodel-connector/src/lib.rs index 474940276bee..e90ac0243fb2 100644 --- a/libs/datamodel/connectors/sql-datamodel-connector/src/lib.rs +++ b/libs/datamodel/connectors/sql-datamodel-connector/src/lib.rs @@ -15,8 +15,8 @@ impl SqlDatamodelConnectors { PostgresDatamodelConnector::new() } - pub fn mysql() -> MySqlDatamodelConnector { - MySqlDatamodelConnector::new() + pub fn mysql(is_planetscale: bool) -> MySqlDatamodelConnector { + MySqlDatamodelConnector::new(is_planetscale) } pub fn sqlite() -> SqliteDatamodelConnector { diff --git a/libs/datamodel/connectors/sql-datamodel-connector/src/mssql_datamodel_connector.rs b/libs/datamodel/connectors/sql-datamodel-connector/src/mssql_datamodel_connector.rs index 6c6bde75808d..9f0020a40c2a 100644 --- a/libs/datamodel/connectors/sql-datamodel-connector/src/mssql_datamodel_connector.rs +++ b/libs/datamodel/connectors/sql-datamodel-connector/src/mssql_datamodel_connector.rs @@ -1,14 +1,19 @@ use datamodel_connector::connector_error::ConnectorError; use datamodel_connector::helper::{arg_vec_from_opt, args_vec_from_opt, parse_one_opt_u32, parse_two_opt_u32}; use datamodel_connector::{Connector, ConnectorCapability}; -use dml::field::{Field, FieldType}; -use dml::model::{IndexType, Model}; -use dml::native_type_constructor::NativeTypeConstructor; -use dml::native_type_instance::NativeTypeInstance; -use dml::scalars::ScalarType; +use dml::{ + field::{Field, FieldType}, + model::{IndexType, Model}, + native_type_constructor::NativeTypeConstructor, + native_type_instance::NativeTypeInstance, + relation_info::ReferentialAction, + scalars::ScalarType, +}; +use enumflags2::BitFlags; use native_types::{MsSqlType, MsSqlTypeParameter}; use once_cell::sync::Lazy; use std::borrow::Cow; + use MsSqlType::*; use MsSqlTypeParameter::*; @@ -44,10 +49,13 @@ const UNIQUE_IDENTIFIER_TYPE_NAME: &str = "UniqueIdentifier"; pub struct MsSqlDatamodelConnector { capabilities: Vec, constructors: Vec, + referential_actions: BitFlags, } impl MsSqlDatamodelConnector { pub fn new() -> MsSqlDatamodelConnector { + use ReferentialAction::*; + let capabilities = vec![ ConnectorCapability::AutoIncrementAllowedOnNonId, ConnectorCapability::AutoIncrementMultipleAllowed, @@ -57,6 +65,7 @@ impl MsSqlDatamodelConnector { ConnectorCapability::MultipleIndexesWithSameName, ConnectorCapability::AutoIncrement, ConnectorCapability::CompoundIds, + ConnectorCapability::ForeignKeys, ]; let constructors: Vec = vec![ @@ -90,9 +99,12 @@ impl MsSqlDatamodelConnector { NativeTypeConstructor::without_args(UNIQUE_IDENTIFIER_TYPE_NAME, vec![ScalarType::String]), ]; + let referential_actions = NoAction | Cascade | SetNull | SetDefault; + MsSqlDatamodelConnector { capabilities, constructors, + referential_actions, } } @@ -142,6 +154,10 @@ impl Connector for MsSqlDatamodelConnector { &self.capabilities } + fn referential_actions(&self) -> BitFlags { + self.referential_actions + } + fn scalar_type_for_native_type(&self, native_type: serde_json::Value) -> ScalarType { let native_type: MsSqlType = serde_json::from_value(native_type).unwrap(); diff --git a/libs/datamodel/connectors/sql-datamodel-connector/src/mysql_datamodel_connector.rs b/libs/datamodel/connectors/sql-datamodel-connector/src/mysql_datamodel_connector.rs index adb54b2fa701..fd93e5eee8dc 100644 --- a/libs/datamodel/connectors/sql-datamodel-connector/src/mysql_datamodel_connector.rs +++ b/libs/datamodel/connectors/sql-datamodel-connector/src/mysql_datamodel_connector.rs @@ -1,13 +1,18 @@ -use datamodel_connector::connector_error::ConnectorError; -use datamodel_connector::helper::{args_vec_from_opt, parse_one_opt_u32, parse_one_u32, parse_two_opt_u32}; -use datamodel_connector::{Connector, ConnectorCapability}; -use dml::field::{Field, FieldType}; -use dml::model::{IndexType, Model}; -use dml::native_type_constructor::NativeTypeConstructor; -use dml::native_type_instance::NativeTypeInstance; -use dml::scalars::ScalarType; -use native_types::MySqlType; -use native_types::MySqlType::*; +use datamodel_connector::{ + connector_error::ConnectorError, + helper::{args_vec_from_opt, parse_one_opt_u32, parse_one_u32, parse_two_opt_u32}, + Connector, ConnectorCapability, +}; +use dml::{ + field::{Field, FieldType}, + model::{IndexType, Model}, + native_type_constructor::NativeTypeConstructor, + native_type_instance::NativeTypeInstance, + relation_info::ReferentialAction, + scalars::ScalarType, +}; +use enumflags2::BitFlags; +use native_types::MySqlType::{self, *}; const INT_TYPE_NAME: &str = "Int"; const UNSIGNED_INT_TYPE_NAME: &str = "UnsignedInt"; @@ -56,11 +61,15 @@ const NATIVE_TYPES_THAT_CAN_NOT_BE_USED_IN_KEY_SPECIFICATION: &[&str] = &[ pub struct MySqlDatamodelConnector { capabilities: Vec, constructors: Vec, + referential_actions: BitFlags, + is_planetscale: bool, } impl MySqlDatamodelConnector { - pub fn new() -> MySqlDatamodelConnector { - let capabilities = vec![ + pub fn new(is_planetscale: bool) -> MySqlDatamodelConnector { + use ReferentialAction::*; + + let mut capabilities = vec![ ConnectorCapability::RelationsOverNonUniqueCriteria, ConnectorCapability::Enums, ConnectorCapability::Json, @@ -77,6 +86,10 @@ impl MySqlDatamodelConnector { ConnectorCapability::CompoundIds, ]; + if !is_planetscale { + capabilities.push(ConnectorCapability::ForeignKeys); + } + let int = NativeTypeConstructor::without_args(INT_TYPE_NAME, vec![ScalarType::Int]); let unsigned_int = NativeTypeConstructor::without_args(UNSIGNED_INT_TYPE_NAME, vec![ScalarType::Int]); let small_int = NativeTypeConstructor::without_args(SMALL_INT_TYPE_NAME, vec![ScalarType::Int]); @@ -149,9 +162,17 @@ impl MySqlDatamodelConnector { json, ]; + let referential_actions = if is_planetscale { + Restrict | SetNull + } else { + Restrict | Cascade | SetNull | NoAction | SetDefault + }; + MySqlDatamodelConnector { capabilities, constructors, + referential_actions, + is_planetscale, } } } @@ -177,6 +198,14 @@ impl Connector for MySqlDatamodelConnector { &self.capabilities } + fn referential_actions(&self) -> BitFlags { + self.referential_actions + } + + fn emulates_referential_actions(&self) -> bool { + self.is_planetscale + } + fn scalar_type_for_native_type(&self, native_type: serde_json::Value) -> ScalarType { let native_type: MySqlType = serde_json::from_value(native_type).unwrap(); @@ -424,9 +453,3 @@ impl Connector for MySqlDatamodelConnector { Ok(()) } } - -impl Default for MySqlDatamodelConnector { - fn default() -> Self { - Self::new() - } -} diff --git a/libs/datamodel/connectors/sql-datamodel-connector/src/postgres_datamodel_connector.rs b/libs/datamodel/connectors/sql-datamodel-connector/src/postgres_datamodel_connector.rs index 330f127f9cf4..5096d2983850 100644 --- a/libs/datamodel/connectors/sql-datamodel-connector/src/postgres_datamodel_connector.rs +++ b/libs/datamodel/connectors/sql-datamodel-connector/src/postgres_datamodel_connector.rs @@ -1,13 +1,18 @@ -use datamodel_connector::connector_error::ConnectorError; -use datamodel_connector::helper::{arg_vec_from_opt, args_vec_from_opt, parse_one_opt_u32, parse_two_opt_u32}; -use datamodel_connector::{Connector, ConnectorCapability}; -use dml::field::{Field, FieldType}; -use dml::model::Model; -use dml::native_type_constructor::NativeTypeConstructor; -use dml::native_type_instance::NativeTypeInstance; -use dml::scalars::ScalarType; -use native_types::PostgresType; -use native_types::PostgresType::*; +use datamodel_connector::{ + connector_error::ConnectorError, + helper::{arg_vec_from_opt, args_vec_from_opt, parse_one_opt_u32, parse_two_opt_u32}, + Connector, ConnectorCapability, +}; +use dml::{ + field::{Field, FieldType}, + model::Model, + native_type_constructor::NativeTypeConstructor, + native_type_instance::NativeTypeInstance, + relation_info::ReferentialAction, + scalars::ScalarType, +}; +use enumflags2::BitFlags; +use native_types::PostgresType::{self, *}; const SMALL_INT_TYPE_NAME: &str = "SmallInt"; const INTEGER_TYPE_NAME: &str = "Integer"; @@ -39,11 +44,14 @@ const JSON_B_TYPE_NAME: &str = "JsonB"; pub struct PostgresDatamodelConnector { capabilities: Vec, constructors: Vec, + referential_actions: BitFlags, } //todo should this also contain the pretty printed output for SQL rendering? impl PostgresDatamodelConnector { pub fn new() -> PostgresDatamodelConnector { + use ReferentialAction::*; + let capabilities = vec![ ConnectorCapability::ScalarLists, ConnectorCapability::Enums, @@ -61,6 +69,7 @@ impl PostgresDatamodelConnector { ConnectorCapability::CreateManyWriteableAutoIncId, ConnectorCapability::AutoIncrement, ConnectorCapability::CompoundIds, + ConnectorCapability::ForeignKeys, ]; let small_int = NativeTypeConstructor::without_args(SMALL_INT_TYPE_NAME, vec![ScalarType::Int]); @@ -120,9 +129,12 @@ impl PostgresDatamodelConnector { json_b, ]; + let referential_actions = NoAction | Restrict | Cascade | SetNull | SetDefault; + PostgresDatamodelConnector { capabilities, constructors, + referential_actions, } } } @@ -148,6 +160,10 @@ impl Connector for PostgresDatamodelConnector { &self.capabilities } + fn referential_actions(&self) -> BitFlags { + self.referential_actions + } + fn scalar_type_for_native_type(&self, native_type: serde_json::Value) -> ScalarType { let native_type: PostgresType = serde_json::from_value(native_type).unwrap(); diff --git a/libs/datamodel/connectors/sql-datamodel-connector/src/sqlite_datamodel_connector.rs b/libs/datamodel/connectors/sql-datamodel-connector/src/sqlite_datamodel_connector.rs index cfbfa1efb016..de47c0c7ead4 100644 --- a/libs/datamodel/connectors/sql-datamodel-connector/src/sqlite_datamodel_connector.rs +++ b/libs/datamodel/connectors/sql-datamodel-connector/src/sqlite_datamodel_connector.rs @@ -1,29 +1,36 @@ use datamodel_connector::{connector_error::ConnectorError, Connector, ConnectorCapability}; -use dml::model::Model; -use dml::native_type_constructor::NativeTypeConstructor; -use dml::native_type_instance::NativeTypeInstance; -use dml::{field::Field, scalars::ScalarType}; +use dml::{ + field::Field, model::Model, native_type_constructor::NativeTypeConstructor, + native_type_instance::NativeTypeInstance, relation_info::ReferentialAction, scalars::ScalarType, +}; +use enumflags2::BitFlags; use std::borrow::Cow; pub struct SqliteDatamodelConnector { capabilities: Vec, constructors: Vec, + referential_actions: BitFlags, } impl SqliteDatamodelConnector { pub fn new() -> SqliteDatamodelConnector { + use ReferentialAction::*; + let capabilities = vec![ ConnectorCapability::RelationFieldsInArbitraryOrder, ConnectorCapability::UpdateableId, ConnectorCapability::AutoIncrement, ConnectorCapability::CompoundIds, + ConnectorCapability::ForeignKeys, ]; let constructors: Vec = vec![]; + let referential_actions = SetNull | SetDefault | Cascade | Restrict | NoAction; SqliteDatamodelConnector { capabilities, constructors, + referential_actions, } } } @@ -32,10 +39,15 @@ impl Connector for SqliteDatamodelConnector { fn name(&self) -> &str { "sqlite" } + fn capabilities(&self) -> &[ConnectorCapability] { &self.capabilities } + fn referential_actions(&self) -> BitFlags { + self.referential_actions + } + fn scalar_type_for_native_type(&self, _native_type: serde_json::Value) -> ScalarType { unreachable!("No native types on Sqlite"); } diff --git a/libs/datamodel/core/Cargo.toml b/libs/datamodel/core/Cargo.toml index 985868bbbdd8..3538efd06f09 100644 --- a/libs/datamodel/core/Cargo.toml +++ b/libs/datamodel/core/Cargo.toml @@ -22,6 +22,7 @@ regex = "1.3.7" serde = { version = "1.0.90", features = ["derive"] } serde_json = { version = "1.0", features = ["preserve_order", "float_roundtrip"] } thiserror = "1.0" +enumflags2 = "0.7" [dev-dependencies] clap = "2.33" diff --git a/libs/datamodel/core/src/ast/reformat/reformatter.rs b/libs/datamodel/core/src/ast/reformat/reformatter.rs index 176c776a948e..a95c21c69ea8 100644 --- a/libs/datamodel/core/src/ast/reformat/reformatter.rs +++ b/libs/datamodel/core/src/ast/reformat/reformatter.rs @@ -1,3 +1,5 @@ +use std::collections::HashSet; + use super::helpers::*; use crate::ast::helper::get_sort_index_of_attribute; use crate::diagnostics::ValidatedMissingFields; @@ -33,7 +35,7 @@ impl<'a> Reformatter<'a> { let schema_ast = crate::parse_schema_ast(&schema_string)?; let validated_datamodel = crate::parse_datamodel_for_formatter(&schema_string)?; - let lowerer = crate::transform::dml_to_ast::LowerDmlToAst::new(None); + let lowerer = crate::transform::dml_to_ast::LowerDmlToAst::new(None, &HashSet::new()); let mut result = Vec::new(); diagnostics.append_warning_vec(validated_datamodel.warnings); @@ -67,7 +69,7 @@ impl<'a> Reformatter<'a> { let validated_datamodel = crate::parse_datamodel_for_formatter(&schema_string)?; diagnostics.append_warning_vec(validated_datamodel.warnings); - let lowerer = crate::transform::dml_to_ast::LowerDmlToAst::new(None); + let lowerer = crate::transform::dml_to_ast::LowerDmlToAst::new(None, &HashSet::new()); let mut missing_field_attributes = Vec::new(); for model in validated_datamodel.subject.models() { @@ -103,7 +105,7 @@ impl<'a> Reformatter<'a> { let validated_datamodel = crate::parse_datamodel_for_formatter(&schema_string)?; diagnostics.append_warning_vec(validated_datamodel.warnings); - let lowerer = crate::transform::dml_to_ast::LowerDmlToAst::new(None); + let lowerer = crate::transform::dml_to_ast::LowerDmlToAst::new(None, &HashSet::new()); let mut missing_relation_attribute_args = Vec::new(); for model in validated_datamodel.subject.models() { diff --git a/libs/datamodel/core/src/common/preview_features.rs b/libs/datamodel/core/src/common/preview_features.rs index f793763585c5..9d9ed1409a47 100644 --- a/libs/datamodel/core/src/common/preview_features.rs +++ b/libs/datamodel/core/src/common/preview_features.rs @@ -4,6 +4,8 @@ use PreviewFeature::*; macro_rules! features { ($( $variant:ident $(,)? ),*) => { + #[enumflags2::bitflags] + #[repr(u32)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum PreviewFeature { $( @@ -53,6 +55,7 @@ features!( OrderByAggregateGroup, FilterJson, PlanetScaleMode, + ReferentialActions, ); // Mapping of which active, deprecated and hidden @@ -69,6 +72,7 @@ pub static GENERATOR: Lazy = Lazy::new(|| { OrderByAggregateGroup, FilterJson, PlanetScaleMode, + ReferentialActions, ]) .with_hidden(vec![MongoDb]) .with_deprecated(vec![ diff --git a/libs/datamodel/core/src/json/dmmf/to_dmmf.rs b/libs/datamodel/core/src/json/dmmf/to_dmmf.rs index 83d10b9725bc..046a2ae053f2 100644 --- a/libs/datamodel/core/src/json/dmmf/to_dmmf.rs +++ b/libs/datamodel/core/src/json/dmmf/to_dmmf.rs @@ -202,7 +202,7 @@ fn get_relation_to_fields(field: &dml::Field) -> Option> { fn get_relation_delete_strategy(field: &dml::Field) -> Option { match &field { - dml::Field::RelationField(rf) => Some(rf.relation_info.on_delete.to_string()), + dml::Field::RelationField(rf) => rf.relation_info.on_delete.map(|ri| ri.to_string()), _ => None, } } diff --git a/libs/datamodel/core/src/lib.rs b/libs/datamodel/core/src/lib.rs index 02b474f71aed..39af451045ba 100644 --- a/libs/datamodel/core/src/lib.rs +++ b/libs/datamodel/core/src/lib.rs @@ -142,7 +142,7 @@ fn parse_datamodel_internal( let generators = GeneratorLoader::load_generators_from_ast(&ast, &mut diagnostics); let preview_features = preview_features(&generators); let datasources = load_sources(&ast, &preview_features, &mut &mut diagnostics); - let validator = ValidationPipeline::new(&datasources); + let validator = ValidationPipeline::new(&datasources, &preview_features); diagnostics.to_result()?; @@ -192,7 +192,7 @@ pub fn parse_configuration(schema: &str) -> Result, + preview_features: &HashSet, diagnostics: &mut Diagnostics, ) -> Vec { let source_loader = DatasourceLoader::new(); @@ -225,7 +225,7 @@ pub fn render_datamodel_to( datamodel: &dml::Datamodel, datasource: Option<&Datasource>, ) { - let lowered = LowerDmlToAst::new(datasource).lower(datamodel); + let lowered = LowerDmlToAst::new(datasource, &HashSet::new()).lower(datamodel); render_schema_ast_to(stream, &lowered, 2); } @@ -247,7 +247,8 @@ fn render_datamodel_and_config_to( datamodel: &dml::Datamodel, config: &configuration::Configuration, ) { - let mut lowered = LowerDmlToAst::new(config.datasources.first()).lower(datamodel); + let features = config.preview_features().map(Clone::clone).collect(); + let mut lowered = LowerDmlToAst::new(config.datasources.first(), &features).lower(datamodel); DatasourceSerializer::add_sources_to_ast(config.datasources.as_slice(), &mut lowered); GeneratorSerializer::add_generators_to_ast(&config.generators, &mut lowered); @@ -261,6 +262,9 @@ fn render_schema_ast_to(stream: &mut dyn std::fmt::Write, schema: &ast::SchemaAs renderer.render(schema); } -fn preview_features(generators: &[Generator]) -> HashSet<&PreviewFeature> { - generators.iter().flat_map(|gen| gen.preview_features.iter()).collect() +fn preview_features(generators: &[Generator]) -> HashSet { + generators + .iter() + .flat_map(|gen| gen.preview_features.iter().cloned()) + .collect() } diff --git a/libs/datamodel/core/src/transform/ast_to_dml/builtin_datasource_providers.rs b/libs/datamodel/core/src/transform/ast_to_dml/builtin_datasource_providers.rs index 4e8a901e68d3..9563d7898891 100644 --- a/libs/datamodel/core/src/transform/ast_to_dml/builtin_datasource_providers.rs +++ b/libs/datamodel/core/src/transform/ast_to_dml/builtin_datasource_providers.rs @@ -48,11 +48,13 @@ impl DatasourceProvider for PostgresDatasourceProvider { } } -pub struct MySqlDatasourceProvider {} +pub struct MySqlDatasourceProvider { + is_planetscale: bool, +} impl MySqlDatasourceProvider { - pub fn new() -> Self { - Self {} + pub fn new(is_planetscale: bool) -> Self { + Self { is_planetscale } } } @@ -66,7 +68,7 @@ impl DatasourceProvider for MySqlDatasourceProvider { } fn connector(&self) -> Box { - Box::new(SqlDatamodelConnectors::mysql()) + Box::new(SqlDatamodelConnectors::mysql(self.is_planetscale)) } } diff --git a/libs/datamodel/core/src/transform/ast_to_dml/datasource_loader.rs b/libs/datamodel/core/src/transform/ast_to_dml/datasource_loader.rs index e2501829b852..7e0095f466f7 100644 --- a/libs/datamodel/core/src/transform/ast_to_dml/datasource_loader.rs +++ b/libs/datamodel/core/src/transform/ast_to_dml/datasource_loader.rs @@ -10,9 +10,10 @@ use crate::{ ast::SourceConfig, diagnostics::{DatamodelError, Diagnostics}, }; -use crate::{ast::Span, common::preview_features::PreviewFeature, configuration::StringFromEnvVar}; use crate::{ - ast::{self}, + ast::{self, Span}, + common::{preview_features::PreviewFeature, provider_names::*}, + configuration::StringFromEnvVar, Datasource, }; use std::collections::{HashMap, HashSet}; @@ -22,16 +23,12 @@ const SHADOW_DATABASE_URL_KEY: &str = "shadowDatabaseUrl"; const URL_KEY: &str = "url"; /// Is responsible for loading and validating Datasources defined in an AST. -pub struct DatasourceLoader { - source_definitions: Vec>, -} +pub struct DatasourceLoader {} impl DatasourceLoader { #[allow(clippy::new_without_default)] pub fn new() -> Self { - Self { - source_definitions: get_builtin_datasource_providers(), - } + Self {} } /// Loads all datasources from the provided schema AST. @@ -40,7 +37,7 @@ impl DatasourceLoader { pub fn load_datasources_from_ast( &self, ast_schema: &ast::SchemaAst, - preview_features: &HashSet<&PreviewFeature>, + preview_features: &HashSet, diagnostics: &mut Diagnostics, ) -> Vec { let mut sources = Vec::new(); @@ -67,7 +64,7 @@ impl DatasourceLoader { fn lift_datasource( &self, ast_source: &ast::SourceConfig, - preview_features: &HashSet<&PreviewFeature>, + preview_features: &HashSet, diagnostics: &mut Diagnostics, ) -> Option { let source_name = &ast_source.name.name; @@ -161,14 +158,22 @@ impl DatasourceLoader { preview_features_guardrail(&args, diagnostics); let documentation = ast_source.documentation.as_ref().map(|comment| comment.text.clone()); + let planet_scale_mode = get_planet_scale_mode_arg(&args, preview_features, ast_source, diagnostics); - let datasource_provider = match self.get_datasource_provider(&provider) { - Some(provider) => provider, - None => { + let datasource_provider: Box = match provider { + p if p == MYSQL_SOURCE_NAME => Box::new(MySqlDatasourceProvider::new(planet_scale_mode)), + p if p == POSTGRES_SOURCE_NAME || p == POSTGRES_SOURCE_NAME_HEROKU => { + Box::new(PostgresDatasourceProvider::new()) + } + p if p == SQLITE_SOURCE_NAME => Box::new(SqliteDatasourceProvider::new()), + p if p == MSSQL_SOURCE_NAME => Box::new(MsSqlDatasourceProvider::new()), + p if p == MONGODB_SOURCE_NAME => Box::new(MongoDbDatasourceProvider::new()), + _ => { diagnostics.push_error(DatamodelError::new_datasource_provider_not_known_error( provider, provider_arg.span(), )); + return None; } }; @@ -182,26 +187,9 @@ impl DatasourceLoader { documentation, active_connector: datasource_provider.connector(), shadow_database_url, - planet_scale_mode: get_planet_scale_mode_arg(&args, preview_features, ast_source, diagnostics), + planet_scale_mode, }) } - - fn get_datasource_provider(&self, provider: &str) -> Option<&dyn DatasourceProvider> { - self.source_definitions - .iter() - .find(|sd| sd.is_provider(provider)) - .map(|b| b.as_ref()) - } -} - -fn get_builtin_datasource_providers() -> Vec> { - vec![ - Box::new(MySqlDatasourceProvider::new()), - Box::new(PostgresDatasourceProvider::new()), - Box::new(SqliteDatasourceProvider::new()), - Box::new(MsSqlDatasourceProvider::new()), - Box::new(MongoDbDatasourceProvider::new()), - ] } const PLANET_SCALE_PREVIEW_FEATURE_ERR: &str = r#" @@ -217,7 +205,7 @@ generator client { fn get_planet_scale_mode_arg( args: &HashMap<&str, ValueValidator>, - preview_features: &HashSet<&PreviewFeature>, + preview_features: &HashSet, source: &SourceConfig, diagnostics: &mut Diagnostics, ) -> bool { diff --git a/libs/datamodel/core/src/transform/ast_to_dml/lift.rs b/libs/datamodel/core/src/transform/ast_to_dml/lift.rs index 1e259f021064..fda705db27a1 100644 --- a/libs/datamodel/core/src/transform/ast_to_dml/lift.rs +++ b/libs/datamodel/core/src/transform/ast_to_dml/lift.rs @@ -1,12 +1,16 @@ -use super::super::attributes::AllAttributes; -use super::db::ParserDatabase; -use crate::transform::helpers::ValueValidator; -use crate::{ast, configuration, dml, Field, FieldType}; +use std::collections::HashSet; + +use super::{super::attributes::AllAttributes, db::ParserDatabase}; use crate::{ - ast::Identifier, + ast::{self, Identifier}, + common::preview_features::PreviewFeature, + configuration, diagnostics::{DatamodelError, Diagnostics}, + dml::{self, ScalarType}, + transform::helpers::ValueValidator, + Datasource, Field, FieldType, }; -use crate::{dml::ScalarType, Datasource}; +use ::dml::relation_info::ReferentialAction; use datamodel_connector::connector_error::{ConnectorError, ErrorKind}; use itertools::Itertools; use once_cell::sync::Lazy; @@ -28,9 +32,13 @@ impl<'a> LiftAstToDml<'a> { /// the attributes defined by the given sources registered. /// /// The attributes defined by the given sources will be namespaced. - pub(crate) fn new(source: Option<&'a configuration::Datasource>, db: &'a ParserDatabase<'a>) -> LiftAstToDml<'a> { + pub(crate) fn new( + source: Option<&'a configuration::Datasource>, + preview_features: &HashSet, + db: &'a ParserDatabase<'a>, + ) -> LiftAstToDml<'a> { LiftAstToDml { - attributes: AllAttributes::new(), + attributes: AllAttributes::new(preview_features), source, db, } @@ -159,7 +167,19 @@ impl<'a> LiftAstToDml<'a> { let mut field = match field_type { FieldType::Relation(info) => { let arity = self.lift_field_arity(&ast_field.arity); - let mut field = dml::RelationField::new(&ast_field.name.name, arity, info); + + let mut field = dml::RelationField::new(&ast_field.name.name, arity, arity, info); + + if let Some(ref source) = self.source { + field.supports_restrict_action( + source + .active_connector + .supports_referential_action(ReferentialAction::Restrict), + ); + + field.emulates_referential_actions(source.active_connector.emulates_referential_actions()); + } + field.documentation = ast_field.documentation.clone().map(|comment| comment.text); Field::RelationField(field) } diff --git a/libs/datamodel/core/src/transform/ast_to_dml/standardise_formatting.rs b/libs/datamodel/core/src/transform/ast_to_dml/standardise_formatting.rs index 346ccdd38831..2998fe280581 100644 --- a/libs/datamodel/core/src/transform/ast_to_dml/standardise_formatting.rs +++ b/libs/datamodel/core/src/transform/ast_to_dml/standardise_formatting.rs @@ -1,8 +1,6 @@ use super::common::*; use crate::diagnostics::DatamodelError; -use crate::{ - ast, common::NameNormalizer, diagnostics::Diagnostics, dml, Field, OnDeleteStrategy, ScalarField, UniqueCriteria, -}; +use crate::{ast, common::NameNormalizer, diagnostics::Diagnostics, dml, Field, ScalarField, UniqueCriteria}; use itertools::Itertools; use std::collections::HashMap; @@ -212,7 +210,9 @@ impl StandardiserForFormatting { fields: vec![], references: vec![], name: rel_info.name.clone(), - on_delete: OnDeleteStrategy::None, + on_delete: None, + on_update: None, + legacy_referential_actions: false, }; let mut opposite_relation_field = dml::RelationField::new_generated(&model.name, relation_info, false); @@ -289,7 +289,9 @@ impl StandardiserForFormatting { fields: underlying_field_names, references: unique_criteria_field_names, name: rel_info.name.clone(), - on_delete: OnDeleteStrategy::None, + on_delete: None, + on_update: None, + legacy_referential_actions: false, }; let is_required = all_existing_underlying_fields_on_opposite_model_are_required diff --git a/libs/datamodel/core/src/transform/ast_to_dml/standardise_parsing.rs b/libs/datamodel/core/src/transform/ast_to_dml/standardise_parsing.rs index 395b113a77b5..86958b0e70e4 100644 --- a/libs/datamodel/core/src/transform/ast_to_dml/standardise_parsing.rs +++ b/libs/datamodel/core/src/transform/ast_to_dml/standardise_parsing.rs @@ -1,23 +1,102 @@ +use std::collections::HashSet; + +use ::dml::{field::FieldArity, relation_info::ReferentialAction}; + use super::common::*; -use crate::{common::RelationNames, diagnostics::Diagnostics, dml, Field}; +use crate::{ + common::{preview_features::PreviewFeature, RelationNames}, + diagnostics::Diagnostics, + dml, Field, +}; /// Helper for standardising a datamodel during parsing. /// /// This will add relation names and M2M references contents -pub struct StandardiserForParsing {} +pub struct StandardiserForParsing<'a> { + preview_features: &'a HashSet, +} -impl StandardiserForParsing { +impl<'a> StandardiserForParsing<'a> { /// Creates a new instance, with all builtin attributes registered. - pub fn new() -> Self { - StandardiserForParsing {} + pub fn new(preview_features: &'a HashSet) -> Self { + Self { preview_features } } pub fn standardise(&self, schema: &mut dml::Datamodel) -> Result<(), Diagnostics> { self.name_unnamed_relations(schema); self.set_relation_to_field_to_id_if_missing_for_m2m_relations(schema); + self.set_referential_arities(schema); + self.set_default_referential_actions(schema); + Ok(()) } + fn set_referential_arities(&self, schema: &mut dml::Datamodel) { + let mut modifications = Vec::new(); + + for (model_id, model) in schema.models().enumerate() { + for (field_id, field) in model.fields().enumerate() { + match field { + Field::RelationField(field) if field.is_singular() => { + let some_required = field + .relation_info + .fields + .iter() + .flat_map(|name| model.find_field(name)) + .any(|field| field.arity().is_required()); + + let arity = if some_required { + FieldArity::Required + } else { + field.arity + }; + + modifications.push((model_id, field_id, arity)); + } + _ => (), + } + } + } + + for (model_id, field_id, arity) in modifications { + let mut field = schema.models[model_id].fields[field_id] + .as_relation_field_mut() + .unwrap(); + + field.referential_arity = arity; + } + } + + fn set_default_referential_actions(&self, schema: &mut dml::Datamodel) { + if self.preview_features.contains(&PreviewFeature::ReferentialActions) { + return; + } + + for model in schema.models_mut() { + for field in model.fields_mut() { + match field { + Field::RelationField(field) if field.is_singular() => { + if field.relation_info.on_delete.is_some() || field.relation_info.on_update.is_some() { + continue; + } + + field.relation_info.on_update = Some(ReferentialAction::Cascade); + field.relation_info.on_delete = Some(match field.referential_arity { + FieldArity::Required => ReferentialAction::Cascade, + _ => ReferentialAction::SetNull, + }); + + // So our validator won't get a stroke when seeing the + // values set without having the preview feature + // enabled. Remove this before GA. + field.relation_info.legacy_referential_actions(); + } + _ => (), + } + } + } + } + /// For M2M relations set the references to the @id fields of the foreign model. fn set_relation_to_field_to_id_if_missing_for_m2m_relations(&self, schema: &mut dml::Datamodel) { let schema_copy = schema.clone(); diff --git a/libs/datamodel/core/src/transform/ast_to_dml/validate.rs b/libs/datamodel/core/src/transform/ast_to_dml/validate.rs index 4ec6d3312fa3..2206c970ea46 100644 --- a/libs/datamodel/core/src/transform/ast_to_dml/validate.rs +++ b/libs/datamodel/core/src/transform/ast_to_dml/validate.rs @@ -1,6 +1,8 @@ use super::db::ParserDatabase; use crate::{ - ast, configuration, + ast, + common::preview_features::PreviewFeature, + configuration, diagnostics::{DatamodelError, Diagnostics}, dml, walkers::ModelWalker, @@ -16,6 +18,7 @@ use std::collections::{HashMap, HashSet}; /// When validating, we check if the datamodel is valid, and generate errors otherwise. pub struct Validator<'a> { source: Option<&'a configuration::Datasource>, + preview_features: &'a HashSet, } /// State error message. Seeing this error means something went really wrong internally. It's the datamodel equivalent of a bluescreen. @@ -26,8 +29,14 @@ const PRISMA_FORMAT_HINT: &str = "You can run `prisma format` to fix this automa impl<'a> Validator<'a> { /// Creates a new instance, with all builtin attributes registered. - pub(crate) fn new(source: Option<&'a configuration::Datasource>) -> Validator<'a> { - Self { source } + pub fn new( + source: Option<&'a configuration::Datasource>, + preview_features: &'a HashSet, + ) -> Validator<'a> { + Self { + source, + preview_features, + } } pub(crate) fn validate(&self, db: &ParserDatabase<'_>, schema: &mut dml::Datamodel) -> Result<(), Diagnostics> { @@ -511,6 +520,32 @@ impl<'a> Validator<'a> { ast_model.find_field(&field.name()).span, )); } + + if let dml::Field::RelationField(ref rf) = field { + let actions = &[rf.relation_info.on_delete, rf.relation_info.on_update]; + + actions.iter().flatten().for_each(|action| { + if !connector.supports_referential_action(*action) { + let allowed_values: Vec<_> = connector + .referential_actions() + .iter() + .map(|f| format!("`{}`", f)) + .collect(); + + let message = format!( + "Invalid referential action: `{}`. Allowed values: ({})", + action, + allowed_values.join(", "), + ); + + diagnostics.push_error(DatamodelError::new_attribute_validation_error( + &message, + "relation", + ast_model.find_field(field.name()).span, + )); + } + }); + } } } @@ -873,6 +908,36 @@ impl<'a> Validator<'a> { )); } + if !self.preview_features.contains(&PreviewFeature::ReferentialActions) + && (rel_info.on_delete.is_some() || rel_info.on_update.is_some()) + && !rel_info.legacy_referential_actions + { + let message = &format!( + "The relation field `{}` on Model `{}` must not specify the `onDelete` or `onUpdate` argument in the {} attribute without enabling the `referentialActions` preview feature.", + &field.name, &model.name, RELATION_ATTRIBUTE_NAME_WITH_AT + ); + + errors.push_error(DatamodelError::new_attribute_validation_error( + message, + RELATION_ATTRIBUTE_NAME, + field_span, + )) + } else if field.is_list() + && !related_field.is_list() + && (rel_info.on_delete.is_some() || rel_info.on_update.is_some()) + { + let message = &format!( + "The relation field `{}` on Model `{}` must not specify the `onDelete` or `onUpdate` argument in the {} attribute. You must only specify it on the opposite field `{}` on model `{}`, or in case of a many to many relation, in an explicit join table.", + &field.name, &model.name, RELATION_ATTRIBUTE_NAME_WITH_AT, &related_field.name, &related_model.name + ); + + errors.push_error(DatamodelError::new_attribute_validation_error( + message, + RELATION_ATTRIBUTE_NAME, + field_span, + )); + } + // ONE TO ONE if field.is_singular() && related_field.is_singular() { if rel_info.fields.is_empty() && related_field_rel_info.fields.is_empty() { @@ -908,6 +973,22 @@ impl<'a> Validator<'a> { )); } + if self.preview_features.contains(&PreviewFeature::ReferentialActions) + && (rel_info.on_delete.is_some() || rel_info.on_update.is_some()) + && (related_field_rel_info.on_delete.is_some() || related_field_rel_info.on_update.is_some()) + { + let message = format!( + "The relation fields `{}` on Model `{}` and `{}` on Model `{}` both provide the `onDelete` or `onUpdate` argument in the {} attribute. You have to provide it only on one of the two fields.", + &field.name, &model.name, &related_field.name, &related_model.name, RELATION_ATTRIBUTE_NAME_WITH_AT + ); + + errors.push_error(DatamodelError::new_attribute_validation_error( + &message, + RELATION_ATTRIBUTE_NAME, + field_span, + )); + } + if !rel_info.fields.is_empty() && !related_field_rel_info.fields.is_empty() { errors.push_error(DatamodelError::new_attribute_validation_error( &format!( diff --git a/libs/datamodel/core/src/transform/ast_to_dml/validation_pipeline.rs b/libs/datamodel/core/src/transform/ast_to_dml/validation_pipeline.rs index db43c6101c84..e5dd00fb0e00 100644 --- a/libs/datamodel/core/src/transform/ast_to_dml/validation_pipeline.rs +++ b/libs/datamodel/core/src/transform/ast_to_dml/validation_pipeline.rs @@ -1,25 +1,35 @@ +use std::collections::HashSet; + use super::db::ParserDatabase; use super::*; -use crate::transform::ast_to_dml::standardise_parsing::StandardiserForParsing; -use crate::{ast, configuration, diagnostics::Diagnostics, ValidatedDatamodel}; +use crate::{ + ast, common::preview_features::PreviewFeature, configuration, diagnostics::Diagnostics, + transform::ast_to_dml::standardise_parsing::StandardiserForParsing, ValidatedDatamodel, +}; /// Is responsible for loading and validating the Datamodel defined in an AST. /// Wrapper for all lift and validation steps pub struct ValidationPipeline<'a> { source: Option<&'a configuration::Datasource>, + preview_features: &'a HashSet, validator: Validator<'a>, - standardiser_for_parsing: StandardiserForParsing, standardiser_for_formatting: StandardiserForFormatting, + standardiser_for_parsing: StandardiserForParsing<'a>, } impl<'a, 'b> ValidationPipeline<'a> { - pub fn new(sources: &'a [configuration::Datasource]) -> ValidationPipeline<'a> { + pub fn new( + sources: &'a [configuration::Datasource], + preview_features: &'a HashSet, + ) -> ValidationPipeline<'a> { let source = sources.first(); + ValidationPipeline { source, - validator: Validator::new(source), + preview_features, + validator: Validator::new(source, preview_features), standardiser_for_formatting: StandardiserForFormatting::new(), - standardiser_for_parsing: StandardiserForParsing::new(), + standardiser_for_parsing: StandardiserForParsing::new(preview_features), } } @@ -50,7 +60,7 @@ impl<'a, 'b> ValidationPipeline<'a> { } // Phase 3: Lift AST to DML. - let lifter = LiftAstToDml::new(self.source, &db); + let lifter = LiftAstToDml::new(self.source, self.preview_features, &db); let mut schema = match lifter.lift() { Err(mut err) => { diff --git a/libs/datamodel/core/src/transform/attributes/mod.rs b/libs/datamodel/core/src/transform/attributes/mod.rs index e75920efd8fb..d46b7611e7b3 100644 --- a/libs/datamodel/core/src/transform/attributes/mod.rs +++ b/libs/datamodel/core/src/transform/attributes/mod.rs @@ -8,9 +8,10 @@ mod relation; mod unique_and_index; mod updated_at; -use crate::dml; +use crate::{common::preview_features::PreviewFeature, dml}; use attribute_list_validator::AttributeListValidator; use attribute_validator::AttributeValidator; +use std::collections::HashSet; /// This is the facade for all attribute validations. It is used within the `ValidationPipeline`. pub struct AllAttributes { @@ -21,9 +22,9 @@ pub struct AllAttributes { } impl AllAttributes { - pub fn new() -> AllAttributes { + pub fn new(preview_features: &HashSet) -> AllAttributes { AllAttributes { - field: new_builtin_field_attributes(), + field: new_builtin_field_attributes(preview_features), model: new_builtin_model_attributes(), enm: new_builtin_enum_attributes(), enm_value: new_builtin_enum_value_attributes(), @@ -31,7 +32,7 @@ impl AllAttributes { } } -fn new_builtin_field_attributes() -> AttributeListValidator { +fn new_builtin_field_attributes(preview_features: &HashSet) -> AttributeListValidator { let mut validator = AttributeListValidator::::new(); // this order of field attributes is used in the formatter as well @@ -40,7 +41,9 @@ fn new_builtin_field_attributes() -> AttributeListValidator { validator.add(Box::new(default::DefaultAttributeValidator {})); validator.add(Box::new(updated_at::UpdatedAtAttributeValidator {})); validator.add(Box::new(map::MapAttributeValidatorForField {})); - validator.add(Box::new(relation::RelationAttributeValidator {})); + validator.add(Box::new(relation::RelationAttributeValidator::new( + preview_features.clone(), + ))); validator.add(Box::new(ignore::IgnoreAttributeValidatorForField {})); validator diff --git a/libs/datamodel/core/src/transform/attributes/relation.rs b/libs/datamodel/core/src/transform/attributes/relation.rs index 7d0cbb56e0f0..b98f78eacd18 100644 --- a/libs/datamodel/core/src/transform/attributes/relation.rs +++ b/libs/datamodel/core/src/transform/attributes/relation.rs @@ -1,10 +1,23 @@ +use std::collections::HashSet; + use super::{super::helpers::*, AttributeValidator}; -use crate::common::RelationNames; -use crate::diagnostics::DatamodelError; -use crate::{ast, dml, Field}; +use crate::{ + ast, + common::{preview_features::PreviewFeature, RelationNames}, + diagnostics::DatamodelError, + dml, Field, +}; /// Prismas builtin `@relation` attribute. -pub struct RelationAttributeValidator {} +pub struct RelationAttributeValidator { + preview_features: HashSet, +} + +impl RelationAttributeValidator { + pub fn new(preview_features: HashSet) -> Self { + Self { preview_features } + } +} impl AttributeValidator for RelationAttributeValidator { fn attribute_name(&self) -> &'static str { @@ -32,6 +45,14 @@ impl AttributeValidator for RelationAttributeValidator { rf.relation_info.fields = base_fields.as_array().to_literal_vec()?; } + if let Ok(on_delete) = args.arg("onDelete") { + rf.relation_info.on_delete = Some(on_delete.as_referential_action()?); + } + + if let Ok(on_update) = args.arg("onUpdate") { + rf.relation_info.on_update = Some(on_update.as_referential_action()?); + } + Ok(()) } else { self.new_attribute_validation_error("Invalid field type, not a relation.", args.span()) @@ -41,9 +62,7 @@ impl AttributeValidator for RelationAttributeValidator { fn serialize(&self, field: &dml::Field, datamodel: &dml::Datamodel) -> Vec { if let dml::Field::RelationField(rf) = field { let mut args = Vec::new(); - let relation_info = &rf.relation_info; - let parent_model = datamodel.find_model_by_relation_field_ref(rf).unwrap(); let related_model = datamodel @@ -95,11 +114,20 @@ impl AttributeValidator for RelationAttributeValidator { } } - if relation_info.on_delete != dml::OnDeleteStrategy::None { - args.push(ast::Argument::new_constant( - "onDelete", - &relation_info.on_delete.to_string(), - )); + if self.preview_features.contains(&PreviewFeature::ReferentialActions) { + if let Some(ref_action) = relation_info.on_delete { + if rf.default_on_delete_action() != ref_action { + let expression = ast::Expression::ConstantValue(ref_action.to_string(), ast::Span::empty()); + args.push(ast::Argument::new("onDelete", expression)); + } + } + + if let Some(ref_action) = relation_info.on_update { + if rf.default_on_update_action() != ref_action { + let expression = ast::Expression::ConstantValue(ref_action.to_string(), ast::Span::empty()); + args.push(ast::Argument::new("onUpdate", expression)); + } + } } if !args.is_empty() { diff --git a/libs/datamodel/core/src/transform/dml_to_ast/lower.rs b/libs/datamodel/core/src/transform/dml_to_ast/lower.rs index fea88811a412..3abd5771ad30 100644 --- a/libs/datamodel/core/src/transform/dml_to_ast/lower.rs +++ b/libs/datamodel/core/src/transform/dml_to_ast/lower.rs @@ -1,4 +1,7 @@ +use std::collections::HashSet; + use super::super::attributes::AllAttributes; +use crate::common::preview_features::PreviewFeature; use crate::{ ast::{self, Attribute, Span}, dml, Datasource, @@ -11,9 +14,9 @@ pub struct LowerDmlToAst<'a> { impl<'a> LowerDmlToAst<'a> { /// Creates a new instance, with all builtin attributes registered. - pub fn new(datasource: Option<&'a Datasource>) -> Self { + pub fn new(datasource: Option<&'a Datasource>, preview_features: &HashSet) -> Self { Self { - attributes: AllAttributes::new(), + attributes: AllAttributes::new(preview_features), datasource, } } @@ -72,6 +75,7 @@ impl<'a> LowerDmlToAst<'a> { pub fn lower_field(&self, field: &dml::Field, datamodel: &dml::Datamodel) -> ast::Field { let mut attributes = self.attributes.field.serialize(field, datamodel); + if let (Some((scalar_type, native_type)), Some(datasource)) = ( field.as_scalar_field().and_then(|sf| sf.field_type.as_native_type()), self.datasource, diff --git a/libs/datamodel/core/src/transform/helpers/value_validator.rs b/libs/datamodel/core/src/transform/helpers/value_validator.rs index ef8ea6ca653a..580dbeea7b28 100644 --- a/libs/datamodel/core/src/transform/helpers/value_validator.rs +++ b/libs/datamodel/core/src/transform/helpers/value_validator.rs @@ -8,6 +8,7 @@ use crate::{ }; use bigdecimal::BigDecimal; use chrono::{DateTime, FixedOffset}; +use dml::relation_info::ReferentialAction; use dml::scalars::ScalarType; use prisma_value::PrismaValue; use std::error; @@ -164,7 +165,27 @@ impl ValueValidator { } } - /// Unwraps the wrapped value as an array literal. + /// Unwraps the wrapped value as a referential action. + pub fn as_referential_action(&self) -> Result { + match self.as_constant_literal()?.as_str() { + "Cascade" => Ok(ReferentialAction::Cascade), + "Restrict" => Ok(ReferentialAction::Restrict), + "NoAction" => Ok(ReferentialAction::NoAction), + "SetNull" => Ok(ReferentialAction::SetNull), + "SetDefault" => Ok(ReferentialAction::SetDefault), + s => { + let message = format!("Invalid referential action: `{}`", s); + + Err(DatamodelError::AttributeValidationError { + message, + attribute_name: String::from("relation"), + span: self.span(), + }) + } + } + } + + /// Unwraps the wrapped value as a constant literal.. pub fn as_array(&self) -> Vec { match &self.value { ast::Expression::Array(values, _) => { diff --git a/libs/datamodel/core/src/walkers.rs b/libs/datamodel/core/src/walkers.rs index 634f8521465c..c4ca3dec9a9a 100644 --- a/libs/datamodel/core/src/walkers.rs +++ b/libs/datamodel/core/src/walkers.rs @@ -7,7 +7,7 @@ use crate::{ }, NativeTypeInstance, RelationField, }; -use dml::scalars::ScalarType; +use dml::{relation_info::ReferentialAction, scalars::ScalarType}; use itertools::Itertools; /// Iterator over all the models in the schema. @@ -341,6 +341,22 @@ impl<'a> RelationFieldWalker<'a> { }), } } + + pub fn on_update_action(&self) -> Option { + self.get().relation_info.on_update + } + + pub fn on_delete_action(&self) -> Option { + self.get().relation_info.on_delete + } + + pub fn default_on_update_action(&self) -> ReferentialAction { + self.get().default_on_update_action() + } + + pub fn default_on_delete_action(&self) -> ReferentialAction { + self.get().default_on_delete_action() + } } #[derive(Debug, Clone, Copy)] diff --git a/libs/datamodel/core/tests/attributes/relations/mod.rs b/libs/datamodel/core/tests/attributes/relations/mod.rs index 0a6e722b5c97..ed67e30dcf26 100644 --- a/libs/datamodel/core/tests/attributes/relations/mod.rs +++ b/libs/datamodel/core/tests/attributes/relations/mod.rs @@ -1,3 +1,4 @@ +pub mod referential_actions; pub mod relations_negative; pub mod relations_new; pub mod relations_positive; diff --git a/libs/datamodel/core/tests/attributes/relations/referential_actions.rs b/libs/datamodel/core/tests/attributes/relations/referential_actions.rs new file mode 100644 index 000000000000..538d76d1e397 --- /dev/null +++ b/libs/datamodel/core/tests/attributes/relations/referential_actions.rs @@ -0,0 +1,412 @@ +use crate::common::*; +use datamodel::{ast::Span, diagnostics::DatamodelError, ReferentialAction::*}; +use indoc::{formatdoc, indoc}; + +#[test] +fn on_delete_actions() { + let actions = &[Cascade, Restrict, NoAction, SetNull, SetDefault]; + + for action in actions { + let dml = formatdoc!( + r#" + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + }} + + model A {{ + id Int @id + bs B[] + }} + + model B {{ + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onDelete: {}) + }} + "#, + action + ); + + parse(&dml) + .assert_has_model("B") + .assert_has_relation_field("a") + .assert_relation_delete_strategy(*action); + } +} + +#[test] +fn on_update_actions() { + let actions = &[Cascade, Restrict, NoAction, SetNull, SetDefault]; + + for action in actions { + let dml = formatdoc!( + r#" + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + }} + + model A {{ + id Int @id + bs B[] + }} + + model B {{ + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onUpdate: {}) + }} + "#, + action + ); + + parse(&dml) + .assert_has_model("B") + .assert_has_relation_field("a") + .assert_relation_update_strategy(*action); + } +} + +#[test] +fn actions_on_mongo() { + let actions = &[Restrict, SetNull]; + + for action in actions { + let dml = formatdoc!( + r#" + datasource db {{ + provider = "mongodb" + url = "mongodb://" + }} + + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + }} + + model A {{ + id Int @id @map("_id") + bs B[] + }} + + model B {{ + id Int @id @map("_id") + aId Int + a A @relation(fields: [aId], references: [id], onDelete: {action}, onUpdate: {action}) + }} + "#, + action = action + ); + + parse(&dml) + .assert_has_model("B") + .assert_has_relation_field("a") + .assert_relation_delete_strategy(*action) + .assert_relation_update_strategy(*action); + } +} + +#[test] +fn actions_on_planetscale() { + let actions = &[Restrict, SetNull]; + + for action in actions { + let dml = formatdoc!( + r#" + datasource db {{ + provider = "mysql" + planetScaleMode = true + url = "mysql://root:prisma@localhost:3306/mydb" + }} + + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["planetScaleMode", "referentialActions"] + }} + + model A {{ + id Int @id + bs B[] + }} + + model B {{ + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onDelete: {action}, onUpdate: {action}) + }} + "#, + action = action + ); + + parse(&dml) + .assert_has_model("B") + .assert_has_relation_field("a") + .assert_relation_delete_strategy(*action) + .assert_relation_update_strategy(*action); + } +} + +#[test] +fn invalid_on_delete_action() { + let dml = indoc! { r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + bs B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onDelete: MeowMeow) + } + "#}; + + parse_error(dml).assert_is(DatamodelError::new_attribute_validation_error( + "Invalid referential action: `MeowMeow`", + "relation", + Span::new(238, 246), + )); +} + +#[test] +fn invalid_on_update_action() { + let dml = indoc! { r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + bs B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onUpdate: MeowMeow) + } + "#}; + + parse_error(dml).assert_is(DatamodelError::new_attribute_validation_error( + "Invalid referential action: `MeowMeow`", + "relation", + Span::new(238, 246), + )); +} + +#[test] +fn restrict_should_not_work_on_sql_server() { + let dml = indoc! { r#" + datasource db { + provider = "sqlserver" + url = "sqlserver://" + } + + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + bs B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onUpdate: Restrict, onDelete: Restrict) + } + "#}; + + let message = + "Invalid referential action: `Restrict`. Allowed values: (`Cascade`, `NoAction`, `SetNull`, `SetDefault`)"; + + parse_error(dml).assert_are(&[ + DatamodelError::new_attribute_validation_error(&message, "relation", Span::new(252, 339)), + DatamodelError::new_attribute_validation_error(&message, "relation", Span::new(252, 339)), + ]); +} + +#[test] +fn actions_should_be_defined_only_from_one_side() { + let dml = indoc! { r#" + datasource db { + provider = "sqlserver" + url = "sqlserver://" + } + + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B? @relation(onUpdate: NoAction, onDelete: NoAction) + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onUpdate: NoAction, onDelete: NoAction) + } + "#}; + + let message1 = + "The relation fields `b` on Model `A` and `a` on Model `B` both provide the `onDelete` or `onUpdate` argument in the @relation attribute. You have to provide it only on one of the two fields."; + + let message2 = + "The relation fields `a` on Model `B` and `b` on Model `A` both provide the `onDelete` or `onUpdate` argument in the @relation attribute. You have to provide it only on one of the two fields."; + + parse_error(dml).assert_are(&[ + DatamodelError::new_attribute_validation_error(&message1, "relation", Span::new(201, 256)), + DatamodelError::new_attribute_validation_error(&message2, "relation", Span::new(300, 387)), + ]); +} + +#[test] +fn concrete_actions_should_not_work_on_planetscale() { + let actions = &[(Cascade, 411), (NoAction, 412), (SetDefault, 414)]; + + for (action, span) in actions { + let dml = formatdoc!( + r#" + datasource db {{ + provider = "mysql" + planetScaleMode = true + url = "mysql://root:prisma@localhost:3306/mydb" + }} + + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["planetScaleMode", "referentialActions"] + }} + + model A {{ + id Int @id @map("_id") + bs B[] + }} + + model B {{ + id Int @id @map("_id") + aId Int + a A @relation(fields: [aId], references: [id], onDelete: {}) + }} + "#, + action + ); + + let message = format!( + "Invalid referential action: `{}`. Allowed values: (`Restrict`, `SetNull`)", + action + ); + + parse_error(&dml).assert_are(&[DatamodelError::new_attribute_validation_error( + &message, + "relation", + Span::new(345, *span), + )]); + } +} + +#[test] +fn on_delete_cannot_be_defined_on_the_wrong_side() { + let dml = indoc! { r#" + datasource db { + provider = "mysql" + url = "mysql://" + } + + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + bs B[] @relation(onDelete: Restrict) + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onDelete: Restrict) + } + "#}; + + let message = + "The relation field `bs` on Model `A` must not specify the `onDelete` or `onUpdate` argument in the @relation attribute. You must only specify it on the opposite field `a` on model `B`, or in case of a many to many relation, in an explicit join table."; + + parse_error(dml).assert_are(&[DatamodelError::new_attribute_validation_error( + &message, + "relation", + Span::new(193, 230), + )]); +} + +#[test] +fn on_update_cannot_be_defined_on_the_wrong_side() { + let dml = indoc! { r#" + datasource db { + provider = "mysql" + url = "mysql://" + } + + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + bs B[] @relation(onUpdate: Restrict) + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onUpdate: Restrict) + } + "#}; + + let message = + "The relation field `bs` on Model `A` must not specify the `onDelete` or `onUpdate` argument in the @relation attribute. You must only specify it on the opposite field `a` on model `B`, or in case of a many to many relation, in an explicit join table."; + + parse_error(dml).assert_are(&[DatamodelError::new_attribute_validation_error( + &message, + "relation", + Span::new(193, 230), + )]); +} + +#[test] +fn referential_actions_without_preview_feature_should_error() { + let dml = indoc! { r#" + model A { + id Int @id + bs B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onUpdate: Restrict) + } + "#}; + + let message = "The relation field `a` on Model `B` must not specify the `onDelete` or `onUpdate` argument in the @relation attribute without enabling the `referentialActions` preview feature."; + + parse_error(dml).assert_are(&[DatamodelError::new_attribute_validation_error( + &message, + "relation", + Span::new(80, 147), + )]); +} diff --git a/libs/datamodel/core/tests/common.rs b/libs/datamodel/core/tests/common.rs index c21052e10799..f65f22a63cc2 100644 --- a/libs/datamodel/core/tests/common.rs +++ b/libs/datamodel/core/tests/common.rs @@ -32,7 +32,8 @@ pub trait ScalarFieldAsserts { pub trait RelationFieldAsserts { fn assert_relation_name(&self, t: &str) -> &Self; fn assert_relation_to(&self, t: &str) -> &Self; - fn assert_relation_delete_strategy(&self, t: dml::OnDeleteStrategy) -> &Self; + fn assert_relation_delete_strategy(&self, t: dml::ReferentialAction) -> &Self; + fn assert_relation_update_strategy(&self, t: dml::ReferentialAction) -> &Self; fn assert_relation_referenced_fields(&self, t: &[&str]) -> &Self; fn assert_relation_base_fields(&self, t: &[&str]) -> &Self; fn assert_ignored(&self, state: bool) -> &Self; @@ -202,8 +203,13 @@ impl RelationFieldAsserts for dml::RelationField { self } - fn assert_relation_delete_strategy(&self, t: dml::OnDeleteStrategy) -> &Self { - assert_eq!(self.relation_info.on_delete, t); + fn assert_relation_delete_strategy(&self, t: dml::ReferentialAction) -> &Self { + assert_eq!(self.relation_info.on_delete, Some(t)); + self + } + + fn assert_relation_update_strategy(&self, t: dml::ReferentialAction) -> &Self { + assert_eq!(self.relation_info.on_update, Some(t)); self } diff --git a/libs/datamodel/core/tests/render_to_dmmf/files/general.json b/libs/datamodel/core/tests/render_to_dmmf/files/general.json index ae1cf2e26a04..1b9a5eb9f120 100644 --- a/libs/datamodel/core/tests/render_to_dmmf/files/general.json +++ b/libs/datamodel/core/tests/render_to_dmmf/files/general.json @@ -90,7 +90,6 @@ "relationName": "author", "relationFromFields": [], "relationToFields": [], - "relationOnDelete": "NONE", "isGenerated": false, "isUpdatedAt": false }, @@ -107,7 +106,7 @@ "relationName": "ProfileToUser", "relationFromFields": [], "relationToFields": [], - "relationOnDelete": "NONE", + "relationOnDelete": "SetNull", "isGenerated": false, "isUpdatedAt": false } @@ -165,7 +164,7 @@ "relationToFields": [ "id" ], - "relationOnDelete": "NONE", + "relationOnDelete": "Cascade", "isGenerated": false, "isUpdatedAt": false }, @@ -304,7 +303,7 @@ "relationToFields": [ "id" ], - "relationOnDelete": "NONE", + "relationOnDelete": "Cascade", "isGenerated": false, "isUpdatedAt": false }, @@ -321,7 +320,6 @@ "relationName": "PostToPostToCategory", "relationFromFields": [], "relationToFields": [], - "relationOnDelete": "NONE", "isGenerated": false, "isUpdatedAt": false } @@ -378,7 +376,6 @@ "relationName": "CategoryToPostToCategory", "relationFromFields": [], "relationToFields": [], - "relationOnDelete": "NONE", "isGenerated": false, "isUpdatedAt": false }, @@ -477,7 +474,7 @@ "title", "createdAt" ], - "relationOnDelete": "NONE", + "relationOnDelete": "Cascade", "isGenerated": false, "isUpdatedAt": false }, @@ -498,7 +495,7 @@ "relationToFields": [ "id" ], - "relationOnDelete": "NONE", + "relationOnDelete": "Cascade", "isGenerated": false, "isUpdatedAt": false } @@ -559,7 +556,7 @@ "relationToFields": [ "id" ], - "relationOnDelete": "NONE", + "relationOnDelete": "Cascade", "isGenerated": false, "isUpdatedAt": false } @@ -600,7 +597,7 @@ "relationName": "AToB", "relationFromFields": [], "relationToFields": [], - "relationOnDelete": "NONE", + "relationOnDelete": "SetNull", "isGenerated": false, "isUpdatedAt": false } @@ -611,4 +608,4 @@ "uniqueIndexes": [] } ] -} \ No newline at end of file +} diff --git a/libs/datamodel/core/tests/render_to_dmmf/files/without_relation_name.json b/libs/datamodel/core/tests/render_to_dmmf/files/without_relation_name.json index 020ff07769ba..5ba52cd80d02 100644 --- a/libs/datamodel/core/tests/render_to_dmmf/files/without_relation_name.json +++ b/libs/datamodel/core/tests/render_to_dmmf/files/without_relation_name.json @@ -32,7 +32,6 @@ "relationName": "PostToUser", "relationFromFields": [], "relationToFields": [], - "relationOnDelete": "NONE", "isGenerated": false, "isUpdatedAt": false } @@ -90,7 +89,7 @@ "relationToFields": [ "id" ], - "relationOnDelete": "NONE", + "relationOnDelete": "Cascade", "isGenerated": false, "isUpdatedAt": false } @@ -101,4 +100,4 @@ "uniqueIndexes": [] } ] -} \ No newline at end of file +} diff --git a/libs/prisma-models/src/datamodel_converter.rs b/libs/prisma-models/src/datamodel_converter.rs index 8617827127ab..960eea027e90 100644 --- a/libs/prisma-models/src/datamodel_converter.rs +++ b/libs/prisma-models/src/datamodel_converter.rs @@ -98,6 +98,8 @@ impl<'a> DatamodelConverter<'a> { relation_name: relation.name.clone(), relation_side: relation.relation_side(rf), relation_info: rf.relation_info.clone(), + on_delete_default: rf.default_on_delete_action(), + on_update_default: rf.default_on_update_action(), })) } dml::Field::ScalarField(sf) => { @@ -132,8 +134,6 @@ impl<'a> DatamodelConverter<'a> { .filter(|r| r.model_a.is_relation_supported(&r.field_a) && r.model_b.is_relation_supported(&r.field_b)) .map(|r| RelationTemplate { name: r.name(), - model_a_on_delete: OnDelete::SetNull, - model_b_on_delete: OnDelete::SetNull, manifestation: r.manifestation(), model_a_name: r.model_a.name.clone(), model_b_name: r.model_b.name.clone(), diff --git a/libs/prisma-models/src/field/relation.rs b/libs/prisma-models/src/field/relation.rs index 98f0294ca3de..3df44cbdbb1a 100644 --- a/libs/prisma-models/src/field/relation.rs +++ b/libs/prisma-models/src/field/relation.rs @@ -1,5 +1,5 @@ use crate::prelude::*; -use datamodel::{FieldArity, RelationInfo}; +use datamodel::{FieldArity, ReferentialAction, RelationInfo}; use once_cell::sync::OnceCell; use std::{ fmt::Debug, @@ -21,6 +21,8 @@ pub struct RelationFieldTemplate { pub relation_name: String, pub relation_side: RelationSide, pub relation_info: RelationInfo, + pub on_delete_default: ReferentialAction, + pub on_update_default: ReferentialAction, } #[derive(Clone)] @@ -33,6 +35,9 @@ pub struct RelationField { pub relation: OnceCell, pub relation_info: RelationInfo, + pub on_delete_default: ReferentialAction, + pub on_update_default: ReferentialAction, + pub model: ModelWeakRef, pub(crate) fields: OnceCell>, } @@ -112,6 +117,8 @@ impl RelationFieldTemplate { relation: OnceCell::new(), relation_info: self.relation_info, fields: OnceCell::new(), + on_delete_default: self.on_delete_default, + on_update_default: self.on_update_default, }) } } @@ -299,4 +306,8 @@ impl RelationField { pub fn db_names(&self) -> impl Iterator { self.scalar_fields().into_iter().map(|f| f.db_name().to_owned()) } + + pub fn on_delete(&self) -> Option<&ReferentialAction> { + self.relation_info.on_delete.as_ref() + } } diff --git a/libs/prisma-models/src/fields.rs b/libs/prisma-models/src/fields.rs index 663b914344f6..09f91b5fcef1 100644 --- a/libs/prisma-models/src/fields.rs +++ b/libs/prisma-models/src/fields.rs @@ -113,21 +113,6 @@ impl Fields { self.relation_weak().iter().map(|f| f.upgrade().unwrap()).collect() } - pub fn cascading_relation(&self) -> Vec> { - self.relation_weak() - .iter() - .map(|f| f.upgrade().unwrap()) - .fold(Vec::new(), |mut acc, rf| { - match rf.relation_side { - RelationSide::A if rf.relation().model_a_on_delete.is_cascade() => acc.push(rf), - RelationSide::B if rf.relation().model_b_on_delete.is_cascade() => acc.push(rf), - _ => (), - } - - acc - }) - } - fn relation_weak(&self) -> &[Weak] { self.relation .get_or_init(|| self.all.iter().fold(Vec::new(), Self::relation_filter)) diff --git a/libs/prisma-models/src/internal_data_model.rs b/libs/prisma-models/src/internal_data_model.rs index 6a12296b7a24..4bc677138b9f 100644 --- a/libs/prisma-models/src/internal_data_model.rs +++ b/libs/prisma-models/src/internal_data_model.rs @@ -147,12 +147,27 @@ impl InternalDataModel { self.version.is_none() } - pub fn fields_requiring_model(&self, model: &ModelRef) -> Vec { + /// Finds all non-list relation fields pointing to the given model. + /// `required` may narrow down the returned fields to required fields only. Returns all on `false`. + pub fn fields_pointing_to_model(&self, model: &ModelRef, required: bool) -> Vec { self.relation_fields() .iter() - .filter(|rf| &rf.related_model() == model) - .filter(|f| f.is_required && !f.is_list) - .map(|f| Arc::clone(f)) + .filter(|rf| &rf.related_model() == model) // All relation fields pointing to `model`. + .filter(|rf| rf.is_inlined_on_enclosing_model()) // Not a list, not a virtual field. + .filter(|rf| !required || rf.is_required) // If only required fields should be returned + .map(Arc::clone) + .collect() + } + + /// Finds all relation fields where the foreign key refers to the given field (as either singular or compound). + pub fn fields_refering_to_field(&self, field: &ScalarFieldRef) -> Vec { + let model_name = &field.model().name; + + self.relation_fields() + .iter() + .filter(|rf| &rf.relation_info.to == model_name) + .filter(|rf| rf.relation_info.references.contains(&field.name)) + .map(Arc::clone) .collect() } diff --git a/libs/prisma-models/src/relation.rs b/libs/prisma-models/src/relation.rs index e02a11d31587..4b855b9d3408 100644 --- a/libs/prisma-models/src/relation.rs +++ b/libs/prisma-models/src/relation.rs @@ -1,4 +1,5 @@ use crate::prelude::*; +use datamodel::ReferentialAction; use once_cell::sync::OnceCell; use std::{ fmt::Debug, @@ -11,8 +12,6 @@ pub type RelationWeakRef = Weak; #[derive(Debug)] pub struct RelationTemplate { pub name: String, - pub model_a_on_delete: OnDelete, - pub model_b_on_delete: OnDelete, pub manifestation: RelationLinkManifestation, pub model_a_name: String, pub model_b_name: String, @@ -26,9 +25,6 @@ pub struct Relation { model_a_name: String, model_b_name: String, - pub model_a_on_delete: OnDelete, - pub model_b_on_delete: OnDelete, - model_a: OnceCell, model_b: OnceCell, @@ -46,8 +42,6 @@ impl Debug for Relation { .field("name", &self.name) .field("model_a_name", &self.model_a_name) .field("model_b_name", &self.model_b_name) - .field("model_a_on_delete", &self.model_a_on_delete) - .field("model_b_on_delete", &self.model_b_on_delete) .field("model_a", &self.model_a) .field("model_b", &self.model_b) .field("field_a", &self.field_a) @@ -76,28 +70,6 @@ pub struct RelationTable { pub model_b_column: String, } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum OnDelete { - SetNull, - Cascade, -} - -impl OnDelete { - pub fn is_cascade(self) -> bool { - match self { - OnDelete::Cascade => true, - OnDelete::SetNull => false, - } - } - - pub fn is_set_null(self) -> bool { - match self { - OnDelete::Cascade => false, - OnDelete::SetNull => true, - } - } -} - impl RelationTemplate { pub fn build(self, internal_data_model: InternalDataModelWeakRef) -> RelationRef { let relation = Relation { @@ -105,8 +77,6 @@ impl RelationTemplate { manifestation: self.manifestation, model_a_name: self.model_a_name, model_b_name: self.model_b_name, - model_a_on_delete: self.model_a_on_delete, - model_b_on_delete: self.model_b_on_delete, model_a: OnceCell::new(), model_b: OnceCell::new(), field_a: OnceCell::new(), @@ -212,4 +182,13 @@ impl Relation { .upgrade() .expect("InternalDataModel does not exist anymore. Parent internal_data_model is deleted without deleting the child internal_data_model.") } + + /// Retrieves the onDelete policy for this relation. + pub fn on_delete(&self) -> ReferentialAction { + self.field_a() + .on_delete() + .cloned() + .or_else(|| self.field_b().on_delete().cloned()) + .unwrap_or(self.field_a().on_delete_default) + } } diff --git a/libs/sql-ddl/src/mysql.rs b/libs/sql-ddl/src/mysql.rs index 07aff6069f87..c513a3d5cca8 100644 --- a/libs/sql-ddl/src/mysql.rs +++ b/libs/sql-ddl/src/mysql.rs @@ -111,7 +111,7 @@ impl<'a> Display for ForeignKey<'a> { #[derive(Debug)] pub enum ForeignKeyAction { Cascade, - DoNothing, + NoAction, Restrict, SetDefault, SetNull, @@ -122,7 +122,7 @@ impl Display for ForeignKeyAction { let s = match self { ForeignKeyAction::Cascade => "CASCADE", ForeignKeyAction::Restrict => "RESTRICT", - ForeignKeyAction::DoNothing => "DO NOTHING", + ForeignKeyAction::NoAction => "NO ACTION", ForeignKeyAction::SetNull => "SET NULL", ForeignKeyAction::SetDefault => "SET DEFAULT", }; @@ -310,14 +310,14 @@ mod tests { changes: vec![AlterTableClause::AddForeignKey(ForeignKey { constrained_columns: vec!["bestFriendId".into()], constraint_name: Some("myfk".into()), - on_delete: Some(ForeignKeyAction::DoNothing), + on_delete: Some(ForeignKeyAction::NoAction), on_update: Some(ForeignKeyAction::SetNull), referenced_columns: vec!["id".into()], referenced_table: "Dog".into(), })], }; - let expected = "ALTER TABLE `Cat` ADD CONSTRAINT `myfk` FOREIGN KEY (`bestFriendId`) REFERENCES `Dog`(`id`) ON DELETE DO NOTHING ON UPDATE SET NULL"; + let expected = "ALTER TABLE `Cat` ADD CONSTRAINT `myfk` FOREIGN KEY (`bestFriendId`) REFERENCES `Dog`(`id`) ON DELETE NO ACTION ON UPDATE SET NULL"; assert_eq!(alter_table.to_string(), expected); } diff --git a/libs/sql-ddl/src/postgres.rs b/libs/sql-ddl/src/postgres.rs index c2cf73c26822..78ddfbfc406b 100644 --- a/libs/sql-ddl/src/postgres.rs +++ b/libs/sql-ddl/src/postgres.rs @@ -222,7 +222,7 @@ impl Display for ForeignKey<'_> { #[derive(Debug)] pub enum ForeignKeyAction { Cascade, - DoNothing, + NoAction, Restrict, SetDefault, SetNull, @@ -233,7 +233,7 @@ impl Display for ForeignKeyAction { let s = match self { ForeignKeyAction::Cascade => "CASCADE", ForeignKeyAction::Restrict => "RESTRICT", - ForeignKeyAction::DoNothing => "DO NOTHING", + ForeignKeyAction::NoAction => "NO ACTION", ForeignKeyAction::SetNull => "SET NULL", ForeignKeyAction::SetDefault => "SET DEFAULT", }; diff --git a/libs/test-macros/src/lib.rs b/libs/test-macros/src/lib.rs index 3676a1cb27ad..2c18733f6cf1 100644 --- a/libs/test-macros/src/lib.rs +++ b/libs/test-macros/src/lib.rs @@ -35,7 +35,7 @@ pub fn test_connector(attr: TokenStream, input: TokenStream) -> TokenStream { // Then the function body // We take advantage of the function body being the last token tree (surrounded by braces). - let (sig, body): (syn::Signature, proc_macro2::TokenStream) = { + let (sig, body): (Signature, proc_macro2::TokenStream) = { let sig_tokens = input .clone() .into_iter() @@ -54,6 +54,8 @@ pub fn test_connector(attr: TokenStream, input: TokenStream) -> TokenStream { let include_tagged = &attrs.include_tagged; let exclude_tagged = &attrs.exclude_tagged; let capabilities = &attrs.capabilities; + let preview_features = &attrs.preview_features; + let test_function_name = &sig.ident; let test_function_name_lit = sig.ident.to_string(); let (arg_name, arg_type) = match extract_api_arg(&sig) { @@ -72,7 +74,7 @@ pub fn test_connector(attr: TokenStream, input: TokenStream) -> TokenStream { #[test] #ignore_attr fn #test_function_name() { - let args = test_setup::TestApiArgs::new(#test_function_name_lit); + let args = test_setup::TestApiArgs::new(#test_function_name_lit, &[#(#preview_features,)*]); if test_setup::should_skip_test( &args, @@ -94,7 +96,7 @@ pub fn test_connector(attr: TokenStream, input: TokenStream) -> TokenStream { #[test] #ignore_attr fn #test_function_name() { - let args = test_setup::TestApiArgs::new(#test_function_name_lit); + let args = test_setup::TestApiArgs::new(#test_function_name_lit, &[#(#preview_features,)*]); if test_setup::should_skip_test( &args, @@ -118,6 +120,7 @@ struct TestConnectorAttrs { include_tagged: Vec, exclude_tagged: Vec, capabilities: Vec, + preview_features: Vec, ignore_reason: Option, } @@ -127,6 +130,18 @@ impl TestConnectorAttrs { p if p.is_ident("tags") => &mut self.include_tagged, p if p.is_ident("exclude") => &mut self.exclude_tagged, p if p.is_ident("capabilities") => &mut self.capabilities, + p if p.is_ident("preview_features") => { + self.preview_features.reserve(list.nested.len()); + + for item in list.nested { + match item { + NestedMeta::Lit(Lit::Str(s)) => self.preview_features.push(s), + other => return Err(syn::Error::new_spanned(other, "Unexpected argument")), + } + } + + return Ok(()); + } p if p.is_ident("logs") => return Ok(()), // TODO other => return Err(syn::Error::new_spanned(other, "Unexpected argument")), }; diff --git a/libs/test-setup/src/test_api_args.rs b/libs/test-setup/src/test_api_args.rs index 59d4d4c4f7fb..a99ccb208825 100644 --- a/libs/test-setup/src/test_api_args.rs +++ b/libs/test-setup/src/test_api_args.rs @@ -102,17 +102,23 @@ pub(crate) fn db_under_test() -> &'static DbUnderTest { #[derive(Debug)] pub struct TestApiArgs { test_function_name: &'static str, + preview_features: &'static [&'static str], db: &'static DbUnderTest, } impl TestApiArgs { - pub fn new(test_function_name: &'static str) -> Self { + pub fn new(test_function_name: &'static str, preview_features: &'static [&'static str]) -> Self { TestApiArgs { test_function_name, + preview_features, db: db_under_test(), } } + pub fn preview_features(&self) -> &'static [&'static str] { + &self.preview_features + } + pub fn test_function_name(&self) -> &'static str { self.test_function_name } diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour.rs b/migration-engine/connectors/sql-migration-connector/src/flavour.rs index 689f560a262b..12e03f1a4ce9 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour.rs @@ -7,6 +7,7 @@ mod mysql; mod postgres; mod sqlite; +use enumflags2::BitFlags; pub(crate) use mssql::MssqlFlavour; pub(crate) use mysql::MysqlFlavour; pub(crate) use postgres::PostgresFlavour; @@ -17,7 +18,7 @@ use crate::{ sql_renderer::SqlRenderer, sql_schema_calculator::SqlSchemaCalculatorFlavour, sql_schema_differ::SqlSchemaDifferFlavour, SqlMigrationConnector, }; -use datamodel::Datamodel; +use datamodel::{common::preview_features::PreviewFeature, Datamodel}; use migration_connector::{migrations_directory::MigrationDirectory, ConnectorError, ConnectorResult}; use quaint::prelude::{ConnectionInfo, Table}; use sql_schema_describer::SqlSchema; @@ -28,15 +29,19 @@ use std::fmt::Debug; /// reference: https://dev.mysql.com/doc/refman/5.7/en/identifier-length.html pub(crate) const MYSQL_IDENTIFIER_SIZE_LIMIT: usize = 64; -pub(crate) fn from_connection_info(connection_info: &ConnectionInfo) -> Box { +pub(crate) fn from_connection_info( + connection_info: &ConnectionInfo, + preview_features: BitFlags, +) -> Box { match connection_info { - ConnectionInfo::Mysql(url) => Box::new(MysqlFlavour::new(url.clone())), - ConnectionInfo::Postgres(url) => Box::new(PostgresFlavour::new(url.clone())), + ConnectionInfo::Mysql(url) => Box::new(MysqlFlavour::new(url.clone(), preview_features)), + ConnectionInfo::Postgres(url) => Box::new(PostgresFlavour::new(url.clone(), preview_features)), ConnectionInfo::Sqlite { file_path, db_name } => Box::new(SqliteFlavour { file_path: file_path.clone(), attached_name: db_name.clone(), + preview_features, }), - ConnectionInfo::Mssql(url) => Box::new(MssqlFlavour::new(url.clone())), + ConnectionInfo::Mssql(url) => Box::new(MssqlFlavour::new(url.clone(), preview_features)), ConnectionInfo::InMemorySqlite { .. } => unreachable!("SqlFlavour for in-memory SQLite"), } } @@ -92,6 +97,9 @@ pub(crate) trait SqlFlavour: connector: &SqlMigrationConnector, ) -> ConnectorResult; + /// The preview features in use. + fn preview_features(&self) -> BitFlags; + /// Table to store applied migrations, the name part. fn migrations_table_name(&self) -> &'static str { "_prisma_migrations" diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour/mssql.rs b/migration-engine/connectors/sql-migration-connector/src/flavour/mssql.rs index 715c6d28e7c2..43b7dc389882 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour/mssql.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour/mssql.rs @@ -2,6 +2,8 @@ use crate::{ connect, connection_wrapper::Connection, error::quaint_error_to_connector_error, SqlFlavour, SqlMigrationConnector, }; use connection_string::JdbcString; +use datamodel::common::preview_features::PreviewFeature; +use enumflags2::BitFlags; use indoc::formatdoc; use migration_connector::{migrations_directory::MigrationDirectory, ConnectorError, ConnectorResult}; use quaint::{connector::MssqlUrl, prelude::Table}; @@ -11,6 +13,7 @@ use user_facing_errors::{introspection_engine::DatabaseSchemaInconsistent, Known pub(crate) struct MssqlFlavour { url: MssqlUrl, + preview_features: BitFlags, } impl std::fmt::Debug for MssqlFlavour { @@ -20,8 +23,8 @@ impl std::fmt::Debug for MssqlFlavour { } impl MssqlFlavour { - pub fn new(url: MssqlUrl) -> Self { - Self { url } + pub fn new(url: MssqlUrl, preview_features: BitFlags) -> Self { + Self { url, preview_features } } fn is_running_on_azure_sql(&self) -> bool { @@ -118,6 +121,10 @@ impl SqlFlavour for MssqlFlavour { (self.schema_name(), self.migrations_table_name()).into() } + fn preview_features(&self) -> BitFlags { + self.preview_features + } + async fn create_database(&self, jdbc_string: &str) -> ConnectorResult { let (db_name, master_uri) = Self::master_url(jdbc_string)?; let conn = connect(&master_uri.to_string()).await?; @@ -384,7 +391,7 @@ mod tests { fn debug_impl_does_not_leak_connection_info() { let url = "sqlserver://myserver:8765;database=master;schema=mydbname;user=SA;password=;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED"; - let flavour = MssqlFlavour::new(MssqlUrl::new(&url).unwrap()); + let flavour = MssqlFlavour::new(MssqlUrl::new(&url).unwrap(), BitFlags::empty()); let debugged = format!("{:?}", flavour); let words = &["myname", "mypassword", "myserver", "8765", "mydbname"]; diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour/mysql.rs b/migration-engine/connectors/sql-migration-connector/src/flavour/mysql.rs index 523caba8586e..608138b209f4 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour/mysql.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour/mysql.rs @@ -5,7 +5,7 @@ use crate::{ error::{quaint_error_to_connector_error, SystemDatabase}, SqlMigrationConnector, }; -use datamodel::{walkers::walk_scalar_fields, Datamodel}; +use datamodel::{common::preview_features::PreviewFeature, walkers::walk_scalar_fields, Datamodel}; use enumflags2::BitFlags; use indoc::indoc; use migration_connector::{migrations_directory::MigrationDirectory, ConnectorError, ConnectorResult}; @@ -23,6 +23,7 @@ pub(crate) struct MysqlFlavour { url: MysqlUrl, /// See the [Circumstances] enum. circumstances: AtomicU8, + preview_features: BitFlags, } impl std::fmt::Debug for MysqlFlavour { @@ -32,10 +33,11 @@ impl std::fmt::Debug for MysqlFlavour { } impl MysqlFlavour { - pub(crate) fn new(url: MysqlUrl) -> Self { + pub(crate) fn new(url: MysqlUrl, preview_features: BitFlags) -> Self { MysqlFlavour { url, circumstances: Default::default(), + preview_features, } } @@ -304,6 +306,10 @@ impl SqlFlavour for MysqlFlavour { Ok(()) } + fn preview_features(&self) -> BitFlags { + self.preview_features + } + fn scan_migration_script(&self, script: &str) { for capture in QUALIFIED_NAME_RE .captures_iter(script) @@ -397,7 +403,7 @@ mod tests { fn debug_impl_does_not_leak_connection_info() { let url = "mysql://myname:mypassword@myserver:8765/mydbname"; - let flavour = MysqlFlavour::new(MysqlUrl::new(url.parse().unwrap()).unwrap()); + let flavour = MysqlFlavour::new(MysqlUrl::new(url.parse().unwrap()).unwrap(), BitFlags::empty()); // unwrap this let debugged = format!("{:?}", flavour); let words = &["myname", "mypassword", "myserver", "8765", "mydbname"]; diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour/postgres.rs b/migration-engine/connectors/sql-migration-connector/src/flavour/postgres.rs index d0504a07f7c0..056ca0fd0bf0 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour/postgres.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour/postgres.rs @@ -1,6 +1,8 @@ use crate::{ connect, connection_wrapper::Connection, error::quaint_error_to_connector_error, SqlFlavour, SqlMigrationConnector, }; +use datamodel::common::preview_features::PreviewFeature; +use enumflags2::BitFlags; use indoc::indoc; use migration_connector::{migrations_directory::MigrationDirectory, ConnectorError, ConnectorResult}; use quaint::{connector::PostgresUrl, error::ErrorKind as QuaintKind}; @@ -17,6 +19,7 @@ const ADVISORY_LOCK_TIMEOUT: std::time::Duration = std::time::Duration::from_sec pub(crate) struct PostgresFlavour { url: PostgresUrl, + preview_features: BitFlags, } impl std::fmt::Debug for PostgresFlavour { @@ -26,8 +29,8 @@ impl std::fmt::Debug for PostgresFlavour { } impl PostgresFlavour { - pub fn new(url: PostgresUrl) -> Self { - Self { url } + pub fn new(url: PostgresUrl, preview_features: BitFlags) -> Self { + Self { url, preview_features } } pub(crate) fn schema_name(&self) -> &str { @@ -276,6 +279,10 @@ impl SqlFlavour for PostgresFlavour { Ok(()) } + fn preview_features(&self) -> BitFlags { + self.preview_features + } + #[tracing::instrument(skip(self, migrations, connection, connector))] async fn sql_schema_from_migration_history( &self, @@ -381,7 +388,7 @@ mod tests { fn debug_impl_does_not_leak_connection_info() { let url = "postgresql://myname:mypassword@myserver:8765/mydbname"; - let flavour = PostgresFlavour::new(PostgresUrl::new(url.parse().unwrap()).unwrap()); + let flavour = PostgresFlavour::new(PostgresUrl::new(url.parse().unwrap()).unwrap(), BitFlags::all()); let debugged = format!("{:?}", flavour); let words = &["myname", "mypassword", "myserver", "8765", "mydbname"]; diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour/sqlite.rs b/migration-engine/connectors/sql-migration-connector/src/flavour/sqlite.rs index 2d674797273c..07e0c9aaee94 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour/sqlite.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour/sqlite.rs @@ -2,6 +2,8 @@ use crate::{ connect, connection_wrapper::Connection, error::quaint_error_to_connector_error, flavour::SqlFlavour, SqlMigrationConnector, }; +use datamodel::common::preview_features::PreviewFeature; +use enumflags2::BitFlags; use indoc::indoc; use migration_connector::{migrations_directory::MigrationDirectory, ConnectorError, ConnectorResult}; use quaint::prelude::ConnectionInfo; @@ -12,6 +14,7 @@ use std::path::Path; pub(crate) struct SqliteFlavour { pub(super) file_path: String, pub(super) attached_name: String, + pub(super) preview_features: BitFlags, } #[async_trait::async_trait] @@ -154,4 +157,8 @@ impl SqlFlavour for SqliteFlavour { Ok(sql_schema) } + + fn preview_features(&self) -> BitFlags { + self.preview_features + } } diff --git a/migration-engine/connectors/sql-migration-connector/src/lib.rs b/migration-engine/connectors/sql-migration-connector/src/lib.rs index 583ef8a71329..8d4b863e1b1b 100644 --- a/migration-engine/connectors/sql-migration-connector/src/lib.rs +++ b/migration-engine/connectors/sql-migration-connector/src/lib.rs @@ -18,7 +18,8 @@ mod sql_schema_differ; use std::env; use connection_wrapper::Connection; -use datamodel::{walkers::walk_models, Configuration, Datamodel}; +use datamodel::{common::preview_features::PreviewFeature, walkers::walk_models, Configuration, Datamodel}; +use enumflags2::BitFlags; use error::quaint_error_to_connector_error; use flavour::SqlFlavour; use migration_connector::{migrations_directory::MigrationDirectory, *}; @@ -39,10 +40,11 @@ impl SqlMigrationConnector { /// Construct and initialize the SQL migration connector. pub async fn new( connection_string: &str, + preview_features: BitFlags, shadow_database_connection_string: Option, ) -> ConnectorResult { let connection = connect(connection_string).await?; - let flavour = flavour::from_connection_info(connection.connection_info()); + let flavour = flavour::from_connection_info(connection.connection_info(), preview_features); flavour.ensure_connection_validity(&connection).await?; @@ -56,14 +58,14 @@ impl SqlMigrationConnector { /// Create the database corresponding to the connection string, without initializing the connector. pub async fn create_database(database_str: &str) -> ConnectorResult { let connection_info = ConnectionInfo::from_url(database_str).map_err(ConnectorError::url_parse_error)?; - let flavour = flavour::from_connection_info(&connection_info); + let flavour = flavour::from_connection_info(&connection_info, BitFlags::empty()); flavour.create_database(database_str).await } /// Drop the database corresponding to the connection string, without initializing the connector. pub async fn drop_database(database_str: &str) -> ConnectorResult<()> { let connection_info = ConnectionInfo::from_url(database_str).map_err(ConnectorError::url_parse_error)?; - let flavour = flavour::from_connection_info(&connection_info); + let flavour = flavour::from_connection_info(&connection_info, BitFlags::empty()); flavour.drop_database(database_str).await } @@ -72,7 +74,7 @@ impl SqlMigrationConnector { pub async fn qe_setup(database_str: &str) -> ConnectorResult<()> { let connection_info = ConnectionInfo::from_url(database_str).map_err(ConnectorError::url_parse_error)?; - let flavour = flavour::from_connection_info(&connection_info); + let flavour = flavour::from_connection_info(&connection_info, BitFlags::empty()); flavour.qe_setup(database_str).await } @@ -109,7 +111,6 @@ impl SqlMigrationConnector { let source_schema = self.flavour.describe_schema(connection).await?; let target_schema = SqlSchema::empty(); - let mut steps = Vec::new(); // We drop views here, not in the normal migration process to not @@ -163,7 +164,8 @@ impl SqlMigrationConnector { ) -> SqlMigration { let connection_info = ConnectionInfo::from_url(&from.0.datasources[0].load_url(|key| env::var(key).ok()).unwrap()).unwrap(); - let flavour = flavour::from_connection_info(&connection_info); + + let flavour = flavour::from_connection_info(&connection_info, BitFlags::empty()); let from_sql = sql_schema_calculator::calculate_sql_schema(from, flavour.as_ref()); let to_sql = sql_schema_calculator::calculate_sql_schema(to, flavour.as_ref()); diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/destructive_change_checker_flavour/mysql.rs b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/destructive_change_checker_flavour/mysql.rs index 7f5c5d9b16bf..37f0ecb7279c 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/destructive_change_checker_flavour/mysql.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/destructive_change_checker_flavour/mysql.rs @@ -51,7 +51,7 @@ impl DestructiveChangeCheckerFlavour for MysqlFlavour { return; } - let datamodel_connector = SqlDatamodelConnectors::mysql(); + let datamodel_connector = SqlDatamodelConnectors::mysql(false); let previous_type = match &columns.previous().column_type().native_type { Some(tpe) => datamodel_connector.render_native_type(tpe.clone()), _ => format!("{:?}", columns.previous().column_type_family()), diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/common.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/common.rs index 3288438e5220..55b7fdc9a9f3 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/common.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/common.rs @@ -67,13 +67,13 @@ pub(crate) fn render_nullability(column: &ColumnWalker<'_>) -> &'static str { } } -pub(crate) fn render_on_delete(on_delete: &ForeignKeyAction) -> &'static str { - match on_delete { - ForeignKeyAction::NoAction => "", - ForeignKeyAction::SetNull => "ON DELETE SET NULL", - ForeignKeyAction::Cascade => "ON DELETE CASCADE", - ForeignKeyAction::SetDefault => "ON DELETE SET DEFAULT", - ForeignKeyAction::Restrict => "ON DELETE RESTRICT", +pub(crate) fn render_referential_action(action: &ForeignKeyAction) -> &'static str { + match action { + ForeignKeyAction::NoAction => "NO ACTION", + ForeignKeyAction::Restrict => "RESTRICT", + ForeignKeyAction::Cascade => "CASCADE", + ForeignKeyAction::SetNull => "SET NULL", + ForeignKeyAction::SetDefault => "SET DEFAULT", } } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs index f2ff252db8b6..cc702c4bdd0f 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs @@ -1,7 +1,7 @@ mod alter_table; use super::{ - common::{self, render_on_delete}, + common::{self, render_referential_action}, IteratorJoin, Quoted, SqlRenderer, }; use crate::{ @@ -77,10 +77,11 @@ impl MssqlFlavour { .join(","); format!( - " REFERENCES {}({}) {} ON UPDATE CASCADE", + " REFERENCES {}({}) ON DELETE {} ON UPDATE {}", self.quote_with_schema(&foreign_key.referenced_table().name()), cols, - render_on_delete(&foreign_key.on_delete_action()), + render_referential_action(&foreign_key.on_delete_action()), + render_referential_action(&foreign_key.on_update_action()), ) } } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs index cb201e5fc949..3e559f7fed41 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs @@ -66,14 +66,14 @@ impl SqlRenderer for MysqlFlavour { .collect(), on_delete: Some(match foreign_key.on_delete_action() { ForeignKeyAction::Cascade => ddl::ForeignKeyAction::Cascade, - ForeignKeyAction::NoAction => ddl::ForeignKeyAction::DoNothing, + ForeignKeyAction::NoAction => ddl::ForeignKeyAction::NoAction, ForeignKeyAction::Restrict => ddl::ForeignKeyAction::Restrict, ForeignKeyAction::SetDefault => ddl::ForeignKeyAction::SetDefault, ForeignKeyAction::SetNull => ddl::ForeignKeyAction::SetNull, }), on_update: Some(match foreign_key.on_update_action() { ForeignKeyAction::Cascade => ddl::ForeignKeyAction::Cascade, - ForeignKeyAction::NoAction => ddl::ForeignKeyAction::DoNothing, + ForeignKeyAction::NoAction => ddl::ForeignKeyAction::NoAction, ForeignKeyAction::Restrict => ddl::ForeignKeyAction::Restrict, ForeignKeyAction::SetDefault => ddl::ForeignKeyAction::SetDefault, ForeignKeyAction::SetNull => ddl::ForeignKeyAction::SetNull, diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/postgres_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/postgres_renderer.rs index 2b78ecb3e55d..b953bbf8471e 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/postgres_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/postgres_renderer.rs @@ -52,14 +52,14 @@ impl SqlRenderer for PostgresFlavour { referenced_table: foreign_key.referenced_table().name().into(), on_delete: Some(match foreign_key.on_delete_action() { ForeignKeyAction::Cascade => ddl::ForeignKeyAction::Cascade, - ForeignKeyAction::NoAction => ddl::ForeignKeyAction::DoNothing, + ForeignKeyAction::NoAction => ddl::ForeignKeyAction::NoAction, ForeignKeyAction::Restrict => ddl::ForeignKeyAction::Restrict, ForeignKeyAction::SetDefault => ddl::ForeignKeyAction::SetDefault, ForeignKeyAction::SetNull => ddl::ForeignKeyAction::SetNull, }), on_update: Some(match foreign_key.on_update_action() { ForeignKeyAction::Cascade => ddl::ForeignKeyAction::Cascade, - ForeignKeyAction::NoAction => ddl::ForeignKeyAction::DoNothing, + ForeignKeyAction::NoAction => ddl::ForeignKeyAction::NoAction, ForeignKeyAction::Restrict => ddl::ForeignKeyAction::Restrict, ForeignKeyAction::SetDefault => ddl::ForeignKeyAction::SetDefault, ForeignKeyAction::SetNull => ddl::ForeignKeyAction::SetNull, diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/sqlite_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/sqlite_renderer.rs index 86a7184c8785..6b37a20e07f6 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/sqlite_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/sqlite_renderer.rs @@ -100,7 +100,13 @@ impl SqlRenderer for SqliteFlavour { ForeignKeyAction::SetNull => sql_ddl::sqlite::ForeignKeyAction::SetNull, ForeignKeyAction::SetDefault => sql_ddl::sqlite::ForeignKeyAction::SetDefault, }), - on_update: Some(sql_ddl::sqlite::ForeignKeyAction::Cascade), + on_update: Some(match fk.on_update_action() { + ForeignKeyAction::NoAction => sql_ddl::sqlite::ForeignKeyAction::NoAction, + ForeignKeyAction::Restrict => sql_ddl::sqlite::ForeignKeyAction::Restrict, + ForeignKeyAction::Cascade => sql_ddl::sqlite::ForeignKeyAction::Cascade, + ForeignKeyAction::SetNull => sql_ddl::sqlite::ForeignKeyAction::SetNull, + ForeignKeyAction::SetDefault => sql_ddl::sqlite::ForeignKeyAction::SetDefault, + }), }) .collect(), }; diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator.rs b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator.rs index c460fb985135..ed31b98c6cae 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator.rs @@ -3,13 +3,12 @@ mod sql_schema_calculator_flavour; pub(super) use sql_schema_calculator_flavour::SqlSchemaCalculatorFlavour; use crate::{flavour::SqlFlavour, sql_renderer::IteratorJoin}; -use datamodel::{walkers::RelationFieldWalker, Configuration}; use datamodel::{ walkers::{walk_models, walk_relations, ModelWalker, ScalarFieldWalker, TypeWalker}, - Datamodel, DefaultValue, FieldArity, IndexDefinition, IndexType, ScalarType, + Configuration, Datamodel, DefaultValue, FieldArity, IndexDefinition, IndexType, ScalarType, }; use prisma_value::PrismaValue; -use sql_schema_describer::{self as sql, walkers::SqlSchemaExt, ColumnType, ForeignKeyAction}; +use sql_schema_describer::{self as sql, walkers::SqlSchemaExt, ColumnType}; pub(crate) fn calculate_sql_schema( (configuration, datamodel): (&Configuration, &Datamodel), @@ -102,13 +101,13 @@ fn calculate_model_tables<'a>( foreign_keys: Vec::new(), }; - push_inline_relations(model, &mut table); + push_inline_relations(model, &mut table, flavour); table }) } -fn push_inline_relations(model: ModelWalker<'_>, table: &mut sql::Table) { +fn push_inline_relations(model: ModelWalker<'_>, table: &mut sql::Table, flavour: &dyn SqlFlavour) { let relation_fields = model .relation_fields() .filter(|relation_field| !relation_field.is_virtual()); @@ -128,8 +127,8 @@ fn push_inline_relations(model: ModelWalker<'_>, table: &mut sql::Table) { columns: fk_columns, referenced_table: relation_field.referenced_model().database_name().to_owned(), referenced_columns: relation_field.referenced_columns().map(String::from).collect(), - on_update_action: sql::ForeignKeyAction::Cascade, - on_delete_action: calculate_on_delete_action(relation_field), + on_update_action: flavour.on_update_action(&relation_field), + on_delete_action: flavour.on_delete_action(&relation_field), }; table.foreign_keys.push(fk); @@ -137,14 +136,6 @@ fn push_inline_relations(model: ModelWalker<'_>, table: &mut sql::Table) { } } -fn calculate_on_delete_action(relation_field: RelationFieldWalker<'_>) -> ForeignKeyAction { - if relation_field.scalar_arities().any(|ar| ar.is_required()) { - sql::ForeignKeyAction::Cascade - } else { - sql::ForeignKeyAction::SetNull - } -} - fn push_one_to_one_relation_unique_index(column_names: &[String], table: &mut sql::Table) { // Don't add a duplicate index. if table diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour.rs b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour.rs index 17534065b26e..53a639816285 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour.rs @@ -3,7 +3,11 @@ mod mysql; mod postgres; mod sqlite; -use datamodel::{walkers::ModelWalker, walkers::ScalarFieldWalker, Datamodel, FieldArity, ScalarType}; +use datamodel::{ + walkers::ModelWalker, + walkers::{RelationFieldWalker, ScalarFieldWalker}, + Datamodel, FieldArity, ReferentialAction, ScalarType, +}; use sql_schema_describer::{self as sql, ColumnArity, ColumnType, ColumnTypeFamily}; pub(crate) trait SqlSchemaCalculatorFlavour { @@ -34,6 +38,18 @@ pub(crate) trait SqlSchemaCalculatorFlavour { false } + fn on_update_action(&self, rf: &RelationFieldWalker<'_>) -> sql::ForeignKeyAction { + rf.on_update_action() + .map(convert_referential_action) + .unwrap_or_else(|| convert_referential_action(rf.default_on_update_action())) + } + + fn on_delete_action(&self, rf: &RelationFieldWalker<'_>) -> sql::ForeignKeyAction { + rf.on_delete_action() + .map(convert_referential_action) + .unwrap_or_else(|| convert_referential_action(rf.default_on_delete_action())) + } + fn m2m_foreign_key_action(&self, _model_a: &ModelWalker<'_>, _model_b: &ModelWalker<'_>) -> sql::ForeignKeyAction { sql::ForeignKeyAction::Cascade } @@ -43,3 +59,13 @@ pub(crate) trait SqlSchemaCalculatorFlavour { format!("{}.{}_unique", model_name, field_name) } } + +fn convert_referential_action(action: ReferentialAction) -> sql::ForeignKeyAction { + match action { + ReferentialAction::Cascade => sql::ForeignKeyAction::Cascade, + ReferentialAction::Restrict => sql::ForeignKeyAction::Restrict, + ReferentialAction::NoAction => sql::ForeignKeyAction::NoAction, + ReferentialAction::SetNull => sql::ForeignKeyAction::SetNull, + ReferentialAction::SetDefault => sql::ForeignKeyAction::SetDefault, + } +} diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs index 7c465b54aab5..08a12e03fd7d 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs @@ -1,8 +1,11 @@ use super::SqlSchemaCalculatorFlavour; use crate::flavour::MssqlFlavour; -use datamodel::{walkers::ModelWalker, ScalarType}; +use datamodel::{ + walkers::{ModelWalker, RelationFieldWalker}, + ScalarType, +}; use datamodel_connector::Connector; -use sql_schema_describer::ForeignKeyAction; +use sql_schema_describer::{self as sql, ForeignKeyAction}; impl SqlSchemaCalculatorFlavour for MssqlFlavour { fn default_native_type_for_scalar_type(&self, scalar_type: &ScalarType) -> serde_json::Value { @@ -18,6 +21,19 @@ impl SqlSchemaCalculatorFlavour for MssqlFlavour { } } + fn on_delete_action(&self, rf: &RelationFieldWalker<'_>) -> sql::ForeignKeyAction { + let action = rf + .on_delete_action() + .map(super::convert_referential_action) + .unwrap_or_else(|| super::convert_referential_action(rf.default_on_delete_action())); + + if action == ForeignKeyAction::Restrict { + ForeignKeyAction::NoAction + } else { + action + } + } + fn single_field_index_name(&self, model_name: &str, field_name: &str) -> String { format!("{}_{}_unique", model_name, field_name) } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mysql.rs b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mysql.rs index 495257cc91e3..04e2b8e65fdb 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mysql.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mysql.rs @@ -33,7 +33,7 @@ impl SqlSchemaCalculatorFlavour for MysqlFlavour { } fn default_native_type_for_scalar_type(&self, scalar_type: &ScalarType) -> serde_json::Value { - sql_datamodel_connector::SqlDatamodelConnectors::mysql().default_native_type_for_scalar_type(scalar_type) + sql_datamodel_connector::SqlDatamodelConnectors::mysql(false).default_native_type_for_scalar_type(scalar_type) } fn enum_column_type(&self, field: &ScalarFieldWalker<'_>, _db_name: &str) -> sql::ColumnType { diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs b/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs index a47a9c368464..3ae359d65d63 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_schema_differ.rs @@ -6,6 +6,7 @@ mod sql_schema_differ_flavour; mod table; pub(crate) use column::{ColumnChange, ColumnChanges}; +use datamodel::common::preview_features::PreviewFeature; pub(crate) use sql_schema_differ_flavour::SqlSchemaDifferFlavour; use self::differ_database::DifferDatabase; @@ -542,11 +543,20 @@ fn foreign_keys_match(fks: Pair<&ForeignKeyWalker<'_>>, flavour: &dyn SqlFlavour .interleave(|fk| fk.referenced_column_names()) .all(|pair| pair.previous() == pair.next()); - references_same_table + let matches = references_same_table && references_same_column_count && constrains_same_column_count && constrains_same_columns - && references_same_columns + && references_same_columns; + + if flavour.preview_features().contains(PreviewFeature::ReferentialActions) { + let same_on_delete_action = fks.previous().on_delete_action() == fks.next().on_delete_action(); + let same_on_update_action = fks.previous().on_update_action() == fks.next().on_update_action(); + + matches && same_on_delete_action && same_on_update_action + } else { + matches + } } fn enums_match(previous: &EnumWalker<'_>, next: &EnumWalker<'_>) -> bool { diff --git a/migration-engine/core/src/lib.rs b/migration-engine/core/src/lib.rs index fbd6669e1c57..1139b9a90d6a 100644 --- a/migration-engine/core/src/lib.rs +++ b/migration-engine/core/src/lib.rs @@ -13,10 +13,14 @@ pub use api::{rpc_api, GenericApi}; pub use core_error::{CoreError, CoreResult}; use datamodel::{ - common::provider_names::{MSSQL_SOURCE_NAME, MYSQL_SOURCE_NAME, POSTGRES_SOURCE_NAME, SQLITE_SOURCE_NAME}, + common::{ + preview_features::PreviewFeature, + provider_names::{MSSQL_SOURCE_NAME, MYSQL_SOURCE_NAME, POSTGRES_SOURCE_NAME, SQLITE_SOURCE_NAME}, + }, dml::Datamodel, Configuration, Datasource, }; +use enumflags2::BitFlags; use migration_connector::ConnectorError; use sql_migration_connector::SqlMigrationConnector; use std::env; @@ -29,7 +33,7 @@ use mongodb_migration_connector::MongoDbMigrationConnector; /// Top-level constructor for the migration engine API. pub async fn migration_api(datamodel: &str) -> CoreResult> { - let (source, url, shadow_database_url) = parse_configuration(datamodel)?; + let (source, url, preview_features, shadow_database_url) = parse_configuration(datamodel)?; match source.active_provider.as_str() { #[cfg(feature = "sql")] @@ -59,13 +63,13 @@ pub async fn migration_api(datamodel: &str) -> CoreResult { - let connector = SqlMigrationConnector::new(&url, shadow_database_url).await?; + let connector = SqlMigrationConnector::new(&url, preview_features, shadow_database_url).await?; Ok(Box::new(connector)) } @@ -77,7 +81,7 @@ pub async fn migration_api(datamodel: &str) -> CoreResult CoreResult { - let (source, url, _shadow_database_url) = parse_configuration(schema)?; + let (source, url, _preview_features, _shadow_database_url) = parse_configuration(schema)?; match &source.active_provider { provider @@ -97,7 +101,7 @@ pub async fn create_database(schema: &str) -> CoreResult { /// Drop the database referenced by the passed in Prisma schema. pub async fn drop_database(schema: &str) -> CoreResult<()> { - let (source, url, _shadow_database_url) = parse_configuration(schema)?; + let (source, url, _preview_features, _shadow_database_url) = parse_configuration(schema)?; match &source.active_provider { provider @@ -115,7 +119,7 @@ pub async fn drop_database(schema: &str) -> CoreResult<()> { } } -fn parse_configuration(datamodel: &str) -> CoreResult<(Datasource, String, Option)> { +fn parse_configuration(datamodel: &str) -> CoreResult<(Datasource, String, BitFlags, Option)> { let config = datamodel::parse_configuration(&datamodel) .map(|validated_config| validated_config.subject) .map_err(|err| CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)))?; @@ -128,13 +132,15 @@ fn parse_configuration(datamodel: &str) -> CoreResult<(Datasource, String, Optio .load_shadow_database_url() .map_err(|err| CoreError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)))?; + let preview_features = config.preview_features().map(Clone::clone).collect(); + let source = config .datasources .into_iter() .next() .ok_or_else(|| CoreError::from_msg("There is no datasource in the schema.".into()))?; - Ok((source, url, shadow_database_url)) + Ok((source, url, preview_features, shadow_database_url)) } fn parse_schema(schema: &str) -> CoreResult<(Configuration, Datamodel)> { diff --git a/migration-engine/core/src/qe_setup.rs b/migration-engine/core/src/qe_setup.rs index d155e9929282..bb217d822413 100644 --- a/migration-engine/core/src/qe_setup.rs +++ b/migration-engine/core/src/qe_setup.rs @@ -24,11 +24,11 @@ pub enum QueryEngineFlags { /// Database setup for connector-test-kit. pub async fn run(prisma_schema: &str, flags: BitFlags) -> CoreResult<()> { - let (source, url, _shadow_database_url) = super::parse_configuration(prisma_schema)?; + let (source, url, preview_features, _shadow_database_url) = super::parse_configuration(prisma_schema)?; let api: Box = match &source.active_provider { _ if flags.contains(QueryEngineFlags::DatabaseCreationNotAllowed) => { - let api = SqlMigrationConnector::new(&url, None).await?; + let api = SqlMigrationConnector::new(&url, preview_features, None).await?; api.reset().await?; Box::new(api) @@ -44,7 +44,7 @@ pub async fn run(prisma_schema: &str, flags: BitFlags) -> Core { // 1. creates schema & database SqlMigrationConnector::qe_setup(&url).await?; - Box::new(SqlMigrationConnector::new(&url, None).await?) + Box::new(SqlMigrationConnector::new(&url, preview_features, None).await?) } #[cfg(feature = "mongodb")] provider if provider == MONGODB_SOURCE_NAME => { diff --git a/migration-engine/migration-engine-tests/src/assertions.rs b/migration-engine/migration-engine-tests/src/assertions.rs index c65095f7688d..9a67ec9d68c8 100644 --- a/migration-engine/migration-engine-tests/src/assertions.rs +++ b/migration-engine/migration-engine-tests/src/assertions.rs @@ -581,6 +581,7 @@ impl<'a> ForeignKeyAssertion<'a> { Self { fk, tags } } + #[track_caller] pub fn assert_references(self, table: &str, columns: &[&str]) -> Self { assert!( self.is_same_table_name(&self.fk.referenced_table, table) && self.fk.referenced_columns == columns, @@ -594,10 +595,25 @@ impl<'a> ForeignKeyAssertion<'a> { self } - pub fn assert_cascades_on_delete(self) -> Self { + #[track_caller] + pub fn assert_referential_action_on_delete(self, action: ForeignKeyAction) -> Self { + assert!( + self.fk.on_delete_action == action, + "Assertion failed: expected foreign key to {:?} on delete, but got {:?}.", + action, + self.fk.on_delete_action + ); + + self + } + + #[track_caller] + pub fn assert_referential_action_on_update(self, action: ForeignKeyAction) -> Self { assert!( - self.fk.on_delete_action == ForeignKeyAction::Cascade, - "Assertion failed: expected foreign key to cascade on delete." + self.fk.on_update_action == action, + "Assertion failed: expected foreign key to {:?} on update, but got {:?}.", + action, + self.fk.on_update_action ); self diff --git a/migration-engine/migration-engine-tests/src/multi_engine_test_api.rs b/migration-engine/migration-engine-tests/src/multi_engine_test_api.rs index afdef9aca0cd..878399b74188 100644 --- a/migration-engine/migration-engine-tests/src/multi_engine_test_api.rs +++ b/migration-engine/migration-engine-tests/src/multi_engine_test_api.rs @@ -3,6 +3,7 @@ //! A TestApi that is initialized without IO or async code and can instantiate //! multiple migration engines. +use datamodel::common::preview_features::PreviewFeature; pub use test_macros::test_connector; pub use test_setup::{BitFlags, Capabilities, Tags}; @@ -23,6 +24,7 @@ pub struct TestApi { connection_string: String, pub(crate) admin_conn: Quaint, pub(crate) rt: tokio::runtime::Runtime, + preview_features: BitFlags, } impl TestApi { @@ -31,6 +33,12 @@ impl TestApi { let rt = test_setup::runtime::test_tokio_runtime(); let tags = args.tags(); + let preview_features = args + .preview_features() + .iter() + .flat_map(|f| PreviewFeature::parse_opt(f)) + .collect(); + let (admin_conn, connection_string) = if tags.contains(Tags::Postgres) { let (_, q, cs) = rt.block_on(args.create_postgres_database()); (q, cs) @@ -38,6 +46,7 @@ impl TestApi { let conn = rt .block_on(SqlMigrationConnector::new( args.database_url(), + preview_features, args.shadow_database_url().map(String::from), )) .unwrap(); @@ -69,6 +78,7 @@ impl TestApi { connection_string, admin_conn, rt, + preview_features, } } @@ -164,6 +174,7 @@ impl TestApi { .rt .block_on(SqlMigrationConnector::new( &connection_string, + self.preview_features, shadow_db_connection_string, )) .unwrap(); diff --git a/migration-engine/migration-engine-tests/src/test_api.rs b/migration-engine/migration-engine-tests/src/test_api.rs index c46283a7c700..8fb03b01c89a 100644 --- a/migration-engine/migration-engine-tests/src/test_api.rs +++ b/migration-engine/migration-engine-tests/src/test_api.rs @@ -11,6 +11,7 @@ mod schema_push; pub use apply_migrations::ApplyMigrations; pub use create_migration::CreateMigration; +use datamodel::common::preview_features::PreviewFeature; pub use dev_diagnostic::DevDiagnostic; pub use diagnose_migration_history::DiagnoseMigrationHistory; pub use evaluate_data_loss::EvaluateDataLoss; @@ -44,13 +45,21 @@ impl TestApi { pub async fn new(args: TestApiArgs) -> Self { let tags = args.tags(); + let shadow_database_url = args.shadow_database_url().map(String::from); + + let preview_features = args + .preview_features() + .iter() + .flat_map(|f| PreviewFeature::parse_opt(f)) + .collect(); + let connection_string = if tags.contains(Tags::Mysql | Tags::Vitess) { let connector = - SqlMigrationConnector::new(args.database_url(), args.shadow_database_url().map(String::from)) + SqlMigrationConnector::new(args.database_url(), preview_features, shadow_database_url.clone()) .await .unwrap(); - connector.reset().await.unwrap(); + connector.reset().await.unwrap(); args.database_url().to_owned() } else if tags.contains(Tags::Mysql) { args.create_mysql_database().await.1 @@ -67,7 +76,7 @@ impl TestApi { unreachable!() }; - let api = SqlMigrationConnector::new(&connection_string, args.shadow_database_url().map(String::from)) + let api = SqlMigrationConnector::new(&connection_string, preview_features, shadow_database_url) .await .unwrap(); diff --git a/migration-engine/migration-engine-tests/tests/migration_tests.rs b/migration-engine/migration-engine-tests/tests/migration_tests.rs index 6367b9b6b1d3..177b1397fc98 100644 --- a/migration-engine/migration-engine-tests/tests/migration_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migration_tests.rs @@ -1109,10 +1109,11 @@ async fn join_tables_between_models_with_compound_primary_keys_must_work(api: &T .assert_has_column("cat_id") .assert_fk_on_columns(&["human_firstName", "human_lastName"], |fk| { fk.assert_references("Human", &["firstName", "lastName"]) - .assert_cascades_on_delete() + .assert_referential_action_on_delete(ForeignKeyAction::Cascade) }) .assert_fk_on_columns(&["cat_id"], |fk| { - fk.assert_references("Cat", &["id"]).assert_cascades_on_delete() + fk.assert_references("Cat", &["id"]) + .assert_referential_action_on_delete(ForeignKeyAction::Cascade) }) .assert_indexes_count(2) .assert_index_on_columns(&["cat_id", "human_firstName", "human_lastName"], |idx| { diff --git a/migration-engine/migration-engine-tests/tests/migrations/diagnose_migration_history_tests.rs b/migration-engine/migration-engine-tests/tests/migrations/diagnose_migration_history_tests.rs index 3aa74cea2994..55cc889a72ae 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/diagnose_migration_history_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/diagnose_migration_history_tests.rs @@ -847,12 +847,12 @@ fn shadow_database_creation_error_is_special_cased_mssql(api: TestApi) { api.raw_cmd( " - CREATE LOGIN prismashadowdbtestuser - WITH PASSWORD = '1234batmanZ'; - - CREATE USER prismashadowdbuser FOR LOGIN prismashadowdbtestuser; - - GRANT SELECT TO prismashadowdbuser; + BEGIN TRY + CREATE LOGIN prismashadowdbtestuser WITH PASSWORD = '1234batmanZ'; + GRANT SELECT TO prismashadowdbuser; + END TRY + BEGIN CATCH + END CATCH; ", ); diff --git a/migration-engine/migration-engine-tests/tests/migrations/foreign_keys.rs b/migration-engine/migration-engine-tests/tests/migrations/foreign_keys.rs index ac87ab618394..0d55476a0a7a 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/foreign_keys.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/foreign_keys.rs @@ -1,4 +1,5 @@ use migration_engine_tests::sync_test_api::*; +use sql_schema_describer::ForeignKeyAction; #[test_connector] fn foreign_keys_of_inline_one_to_one_relations_have_a_unique_constraint(api: TestApi) { @@ -204,6 +205,7 @@ fn changing_a_relation_field_to_a_scalar_field_must_work(api: TestApi) { b Int b_rel B @relation(fields: [b], references: [id]) } + model B { id Int @id a A? @@ -217,7 +219,8 @@ fn changing_a_relation_field_to_a_scalar_field_must_work(api: TestApi) { .assert_column("b", |col| col.assert_type_is_int()) .assert_foreign_keys_count(1) .assert_fk_on_columns(&["b"], |fk| { - fk.assert_references("B", &["id"]).assert_cascades_on_delete() + fk.assert_references("B", &["id"]) + .assert_referential_action_on_delete(ForeignKeyAction::Cascade) }) }); diff --git a/migration-engine/migration-engine-tests/tests/migrations/postgres.rs b/migration-engine/migration-engine-tests/tests/migrations/postgres.rs index 325f4357ebcf..17306081c256 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/postgres.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/postgres.rs @@ -162,7 +162,7 @@ fn uuids_do_not_generate_drift_issue_5282(api: TestApi) { r#" CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; CREATE TABLE a (id uuid DEFAULT uuid_generate_v4() primary key); - CREATE TABLE b (id uuid DEFAULT uuid_generate_v4() primary key, a_id uuid, CONSTRAINT aaa FOREIGN KEY (a_id) REFERENCES a(id)); + CREATE TABLE b (id uuid DEFAULT uuid_generate_v4() primary key, a_id uuid, CONSTRAINT aaa FOREIGN KEY (a_id) REFERENCES a(id) ON DELETE SET NULL ON UPDATE CASCADE); "# ); diff --git a/migration-engine/migration-engine-tests/tests/migrations/relations.rs b/migration-engine/migration-engine-tests/tests/migrations/relations.rs index c51feaa1d2d1..7b4e0a7058d2 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/relations.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/relations.rs @@ -1,4 +1,6 @@ +use datamodel::ReferentialAction; use migration_engine_tests::sync_test_api::*; +use sql_schema_describer::ForeignKeyAction; #[test_connector] fn adding_a_many_to_many_relation_must_result_in_a_prisma_style_relation_table(api: TestApi) { @@ -22,10 +24,14 @@ fn adding_a_many_to_many_relation_must_result_in_a_prisma_style_relation_table(a .assert_column("A", |col| col.assert_type_is_int()) .assert_column("B", |col| col.assert_type_is_string()) .assert_fk_on_columns(&["A"], |fk| { - fk.assert_references("A", &["id"]).assert_cascades_on_delete() + fk.assert_references("A", &["id"]) + .assert_referential_action_on_update(ForeignKeyAction::Cascade) + .assert_referential_action_on_delete(ForeignKeyAction::Cascade) }) .assert_fk_on_columns(&["B"], |fk| { - fk.assert_references("B", &["id"]).assert_cascades_on_delete() + fk.assert_references("B", &["id"]) + .assert_referential_action_on_update(ForeignKeyAction::Cascade) + .assert_referential_action_on_delete(ForeignKeyAction::Cascade) }) }); } @@ -84,7 +90,9 @@ fn adding_an_inline_relation_must_result_in_a_foreign_key_in_the_model_table(api .assert_column("cid", |c| c.assert_type_is_int().assert_is_nullable()) .assert_foreign_keys_count(2) .assert_fk_on_columns(&["bid"], |fk| { - fk.assert_references("B", &["id"]).assert_cascades_on_delete() + fk.assert_references("B", &["id"]) + .assert_referential_action_on_update(ForeignKeyAction::Cascade) + .assert_referential_action_on_delete(ForeignKeyAction::Cascade) }) .assert_fk_on_columns(&["cid"], |fk| fk.assert_references("C", &["id"])) }); @@ -133,7 +141,9 @@ fn adding_an_inline_relation_to_a_model_with_an_exotic_id_type(api: TestApi) { t.assert_column("b_id", |c| c.assert_type_is_string()) .assert_foreign_keys_count(1) .assert_fk_on_columns(&["b_id"], |fk| { - fk.assert_references("B", &["id"]).assert_cascades_on_delete() + fk.assert_references("B", &["id"]) + .assert_referential_action_on_update(ForeignKeyAction::Cascade) + .assert_referential_action_on_delete(ForeignKeyAction::Cascade) }) }); } @@ -203,7 +213,8 @@ fn compound_foreign_keys_should_work_in_correct_order(api: TestApi) { api.assert_schema().assert_table("A", |t| { t.assert_foreign_keys_count(1) .assert_fk_on_columns(&["a", "b", "d"], |fk| { - fk.assert_cascades_on_delete() + fk.assert_referential_action_on_delete(ForeignKeyAction::Cascade) + .assert_referential_action_on_update(ForeignKeyAction::Cascade) .assert_references("B", &["a_id", "b_id", "d_id"]) }) }); @@ -227,7 +238,9 @@ fn moving_an_inline_relation_to_the_other_side_must_work(api: TestApi) { api.schema_push(dm1).send_sync().assert_green_bang(); api.assert_schema().assert_table("A", |t| { t.assert_foreign_keys_count(1).assert_fk_on_columns(&["b_id"], |fk| { - fk.assert_cascades_on_delete().assert_references("B", &["id"]) + fk.assert_referential_action_on_delete(ForeignKeyAction::Cascade) + .assert_referential_action_on_update(ForeignKeyAction::Cascade) + .assert_references("B", &["id"]) }) }); @@ -250,7 +263,9 @@ fn moving_an_inline_relation_to_the_other_side_must_work(api: TestApi) { table .assert_foreign_keys_count(1) .assert_fk_on_columns(&["a_id"], |fk| { - fk.assert_references("A", &["id"]).assert_cascades_on_delete() + fk.assert_references("A", &["id"]) + .assert_referential_action_on_delete(ForeignKeyAction::Cascade) + .assert_referential_action_on_update(ForeignKeyAction::Cascade) }) }) .assert_table("A", |table| table.assert_foreign_keys_count(0).assert_indexes_count(0)); @@ -433,3 +448,466 @@ fn relations_with_mappings_on_referencing_side_can_reference_multiple_fields(api }) }); } + +#[test_connector(preview_features("referentialActions"))] +fn on_delete_referential_actions_should_work(api: TestApi) { + let actions = &[ + (ReferentialAction::SetNull, ForeignKeyAction::SetNull), + (ReferentialAction::Cascade, ForeignKeyAction::Cascade), + (ReferentialAction::NoAction, ForeignKeyAction::NoAction), + ]; + + for (ra, fka) in actions { + let dm = format!( + r#" + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + }} + + model A {{ + id Int @id @default(autoincrement()) + b B[] + }} + + model B {{ + id Int @id + aId Int? + a A? @relation(fields: [aId], references: [id], onDelete: {}) + }} + "#, + ra + ); + + api.schema_push(&dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_delete(*fka) + }) + }); + + api.schema_push("").send_sync().assert_green_bang(); + } +} + +// 5.6 and 5.7 doesn't let you `SET DEFAULT` without setting the default value +// (even if nullable). Maria will silently just use `RESTRICT` instead. +#[test_connector(exclude(Mysql56, Mysql57, Mariadb, Mssql), preview_features("referentialActions"))] +fn on_delete_set_default_should_work(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onDelete: SetDefault) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_delete(ForeignKeyAction::SetDefault) + }) + }); +} + +#[test_connector(exclude(Mssql), preview_features("referentialActions"))] +fn on_delete_restrict_should_work(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onDelete: Restrict) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_delete(ForeignKeyAction::Restrict) + }) + }); +} + +#[test_connector(preview_features("referentialActions"))] +fn on_update_referential_actions_should_work(api: TestApi) { + let actions = &[ + (ReferentialAction::NoAction, ForeignKeyAction::NoAction), + (ReferentialAction::SetNull, ForeignKeyAction::SetNull), + (ReferentialAction::Cascade, ForeignKeyAction::Cascade), + ]; + + for (ra, fka) in actions { + let dm = format!( + r#" + generator client {{ + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + }} + + model A {{ + id Int @id @default(autoincrement()) + b B[] + }} + + model B {{ + id Int @id + aId Int? + a A? @relation(fields: [aId], references: [id], onUpdate: {}) + }} + "#, + ra + ); + + api.schema_push(&dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_update(*fka) + }) + }); + } +} + +// 5.6 and 5.7 doesn't let you `SET DEFAULT` without setting the default value +// (even if nullable). Maria will silently just use `RESTRICT` instead. +#[test_connector(exclude(Mysql56, Mysql57, Mariadb, Mssql), preview_features("referentialActions"))] +fn on_update_set_default_should_work(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onUpdate: SetDefault) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_update(ForeignKeyAction::SetDefault) + }) + }); +} + +#[test_connector(exclude(Mssql), preview_features("referentialActions"))] +fn on_update_restrict_should_work(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id], onUpdate: Restrict) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_update(ForeignKeyAction::Restrict) + }) + }); +} + +#[test_connector(exclude(Mssql), preview_features("referentialActions"))] +fn on_delete_required_default_action(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id]) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_delete(ForeignKeyAction::Restrict) + }) + }); +} + +#[test_connector(tags(Mssql), preview_features("referentialActions"))] +fn on_delete_required_default_action_with_no_restrict(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id]) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_delete(ForeignKeyAction::NoAction) + }) + }); +} + +#[test_connector(preview_features("referentialActions"))] +fn on_delete_optional_default_action(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B[] + } + + model B { + id Int @id + aId Int? + a A? @relation(fields: [aId], references: [id]) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_delete(ForeignKeyAction::SetNull) + }) + }); +} + +#[test_connector(preview_features("referentialActions"))] +fn on_delete_compound_optional_optional_default_action(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + id2 Int + b B[] + @@unique([id, id2]) + } + + model B { + id Int @id + aId1 Int? + aId2 Int? + a A? @relation(fields: [aId1, aId2], references: [id, id2]) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table + .assert_foreign_keys_count(1) + .assert_fk_on_columns(&["aId1", "aId2"], |fk| { + fk.assert_references("A", &["id", "id2"]) + .assert_referential_action_on_delete(ForeignKeyAction::SetNull) + }) + }); +} + +#[test_connector(exclude(Mssql), preview_features("referentialActions"))] +fn on_delete_compound_required_optional_default_action_with_restrict(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + id2 Int + b B[] + @@unique([id, id2]) + } + + model B { + id Int @id + aId1 Int? + aId2 Int + a A? @relation(fields: [aId1, aId2], references: [id, id2]) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table + .assert_foreign_keys_count(1) + .assert_fk_on_columns(&["aId1", "aId2"], |fk| { + fk.assert_references("A", &["id", "id2"]) + .assert_referential_action_on_delete(ForeignKeyAction::Restrict) + }) + }); +} + +#[test_connector(tags(Mssql), preview_features("referentialActions"))] +fn on_delete_compound_required_optional_default_action_without_restrict(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + id2 Int + b B[] + @@unique([id, id2]) + } + + model B { + id Int @id + aId1 Int? + aId2 Int + a A? @relation(fields: [aId1, aId2], references: [id, id2]) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table + .assert_foreign_keys_count(1) + .assert_fk_on_columns(&["aId1", "aId2"], |fk| { + fk.assert_references("A", &["id", "id2"]) + .assert_referential_action_on_delete(ForeignKeyAction::NoAction) + }) + }); +} + +#[test_connector(preview_features("referentialActions"))] +fn on_update_optional_default_action(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B[] + } + + model B { + id Int @id + aId Int? + a A? @relation(fields: [aId], references: [id]) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_update(ForeignKeyAction::Cascade) + }) + }); +} + +#[test_connector(preview_features("referentialActions"))] +fn on_update_required_default_action(api: TestApi) { + let dm = r#" + generator client { + provider = "prisma-client-js" + previewFeatures = ["referentialActions"] + } + + model A { + id Int @id + b B[] + } + + model B { + id Int @id + aId Int + a A @relation(fields: [aId], references: [id]) + } + "#; + + api.schema_push(dm).send_sync().assert_green_bang(); + + api.assert_schema().assert_table("B", |table| { + table.assert_foreign_keys_count(1).assert_fk_on_columns(&["aId"], |fk| { + fk.assert_references("A", &["id"]) + .assert_referential_action_on_update(ForeignKeyAction::Cascade) + }) + }); +} diff --git a/migration-engine/migration-engine-tests/tests/native_types/mysql.rs b/migration-engine/migration-engine-tests/tests/native_types/mysql.rs index 0ec122f449a1..1c062505dc30 100644 --- a/migration-engine/migration-engine-tests/tests/native_types/mysql.rs +++ b/migration-engine/migration-engine-tests/tests/native_types/mysql.rs @@ -750,7 +750,7 @@ fn filter_to_types(api: &TestApi, to_types: &'static [&'static str]) -> Cow<'sta #[test_connector(tags(Mysql))] fn safe_casts_with_existing_data_should_work(api: TestApi) { - let connector = sql_datamodel_connector::MySqlDatamodelConnector::new(); + let connector = sql_datamodel_connector::MySqlDatamodelConnector::new(false); let mut dm1 = String::with_capacity(256); let mut dm2 = String::with_capacity(256); let colnames = colnames_for_cases(SAFE_CASTS); @@ -796,7 +796,7 @@ fn safe_casts_with_existing_data_should_work(api: TestApi) { #[test_connector(tags(Mysql))] fn risky_casts_with_existing_data_should_warn(api: TestApi) { - let connector = sql_datamodel_connector::MySqlDatamodelConnector::new(); + let connector = sql_datamodel_connector::MySqlDatamodelConnector::new(false); let mut dm1 = String::with_capacity(256); let mut dm2 = String::with_capacity(256); let colnames = colnames_for_cases(RISKY_CASTS); @@ -859,7 +859,7 @@ fn risky_casts_with_existing_data_should_warn(api: TestApi) { #[test_connector(tags(Mysql))] fn impossible_casts_with_existing_data_should_warn(api: TestApi) { - let connector = sql_datamodel_connector::MySqlDatamodelConnector::new(); + let connector = sql_datamodel_connector::MySqlDatamodelConnector::new(false); let mut dm1 = String::with_capacity(256); let mut dm2 = String::with_capacity(256); let colnames = colnames_for_cases(IMPOSSIBLE_CASTS); diff --git a/prisma-fmt/src/actions.rs b/prisma-fmt/src/actions.rs new file mode 100644 index 000000000000..66c26f89ddbd --- /dev/null +++ b/prisma-fmt/src/actions.rs @@ -0,0 +1,33 @@ +use std::io::{self, Read}; + +pub fn run() { + let mut datamodel_string = String::new(); + + io::stdin() + .read_to_string(&mut datamodel_string) + .expect("Unable to read from stdin."); + + let datamodel_result = datamodel::parse_configuration(&datamodel_string); + + match datamodel_result { + Ok(validated_configuration) => { + if validated_configuration.subject.datasources.len() != 1 { + print!("[]") + } else if let Some(datasource) = validated_configuration.subject.datasources.first() { + let available_referential_actions = datasource + .active_connector + .referential_actions() + .iter() + .map(|act| format!("{:?}", act)) + .collect::>(); + + let json = serde_json::to_string(&available_referential_actions).expect("Failed to render JSON"); + + print!("{}", json) + } else { + print!("[]") + } + } + _ => print!("[]"), + } +} diff --git a/prisma-fmt/src/main.rs b/prisma-fmt/src/main.rs index adc43ecf8720..2a938ccb3b82 100644 --- a/prisma-fmt/src/main.rs +++ b/prisma-fmt/src/main.rs @@ -1,3 +1,4 @@ +mod actions; mod format; mod lint; mod native; @@ -45,6 +46,8 @@ pub enum FmtOpts { Format(FormatOpts), /// Specifies Native Types mode NativeTypes, + /// List of available referential actions + ReferentialActions, /// Specifies preview features mode PreviewFeatures(PreviewFeaturesOpts), } @@ -62,6 +65,7 @@ fn main() { FmtOpts::Lint(opts) => lint::run(opts), FmtOpts::Format(opts) => format::run(opts), FmtOpts::NativeTypes => native::run(), + FmtOpts::ReferentialActions => actions::run(), FmtOpts::PreviewFeatures(opts) => preview::run(opts), } } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/mod.rs index 73e346653b62..9ad887944cee 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/mod.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/mod.rs @@ -1,3 +1,4 @@ mod create_many; mod cursor; mod disconnect; +mod ref_actions; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/mod.rs new file mode 100644 index 000000000000..af76b00948de --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/mod.rs @@ -0,0 +1,2 @@ +mod on_delete; +mod on_update; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/cascade.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/cascade.rs new file mode 100644 index 000000000000..b285453e2852 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/cascade.rs @@ -0,0 +1,166 @@ +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "cascade_onD_1to1_req", schema(required))] +mod one2one_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: Cascade) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent deletes child as well. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "query { findManyChild { id }}"), + @r###"{"data":{"findManyChild":[]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "cascade_onD_1to1_opt", schema(optional))] +mod one2one_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: Cascade) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent deletes child as well. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "query { findManyChild { id }}"), + @r###"{"data":{"findManyChild":[]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "cascade_onD_1toM_req", schema(required))] +mod one2many_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: Cascade) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent deletes all children. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: [ { id: 1 }, { id: 2 } ] }}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "query { findManyChild { id }}"), + @r###"{"data":{"findManyChild":[]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "cascade_onD_1toM_opt", schema(optional))] +mod one2many_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: Cascade) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent deletes all children. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: [ { id: 1 }, { id: 2 } ] }}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "query { findManyChild { id }}"), + @r###"{"data":{"findManyChild":[]}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/mod.rs new file mode 100644 index 000000000000..1b8d08fa7328 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/mod.rs @@ -0,0 +1,5 @@ +mod cascade; +mod no_action; +mod restrict; +mod set_default; +mod set_null; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/no_action.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/no_action.rs new file mode 100644 index 000000000000..ff3b29a5bf61 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/no_action.rs @@ -0,0 +1,344 @@ +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "noaction_onD_1to1_req", schema(required))] +mod one2one_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: NoAction) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected. + #[connector_test(exclude(MongoDb))] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ); + + assert_error!( + runner, + "mutation { deleteManyParent(where: { id: 1 }) { count }}", + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Deleting the parent leaves the data in a integrity-violating state. + #[connector_test(only(MongoDb))] + async fn delete_parent_violation(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { parent_id }}"#), + @r###"{"data":{"findManyChild":[{"parent_id":1}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "noaction_onD_1to1_opt", schema(optional), exclude(MongoDb))] +mod one2one_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: NoAction) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected. + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ); + + assert_error!( + runner, + "mutation { deleteManyParent(where: { id: 1 }) { count }}", + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Deleting the parent succeeds if no child is connected. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteManyParent(where: { id: 2 }) { count }}"), + @r###"{"data":{"deleteManyParent":{"count":1}}}"### + ); + + Ok(()) + } + + /// Deleting the parent leaves the data in a integrity-violating state. + #[connector_test(only(MongoDb))] + async fn delete_parent_violation(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { parent_id }}"#), + @r###"{"data":{"findManyChild":[{"parent_id":1}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "noaction_onD_1toM_req", schema(required), exclude(MongoDb))] +mod one2many_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: NoAction) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected. + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ); + + assert_error!( + runner, + "mutation { deleteManyParent(where: { id: 1 }) { count }}", + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Deleting the parent succeeds if no child is connected. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteManyParent(where: { id: 2 }) { count }}"), + @r###"{"data":{"deleteManyParent":{"count":1}}}"### + ); + + Ok(()) + } + + /// Deleting the parent leaves the data in a integrity-violating state. + #[connector_test(only(MongoDb))] + async fn delete_parent_violation(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { parent_id }}"#), + @r###"{"data":{"findManyChild":[{"parent_id":1}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "noaction_onD_1toM_opt", schema(optional), exclude(MongoDb))] +mod one2many_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: NoAction) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected. + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ); + + assert_error!( + runner, + "mutation { deleteManyParent(where: { id: 1 }) { count }}", + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Deleting the parent succeeds if no child is connected. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteManyParent(where: { id: 2 }) { count }}"), + @r###"{"data":{"deleteManyParent":{"count":1}}}"### + ); + + Ok(()) + } + + /// Deleting the parent leaves the data in a integrity-violating state. + #[connector_test(only(MongoDb))] + async fn delete_parent_violation(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { parent_id }}"#), + @r###"{"data":{"findManyChild":[{"parent_id":1}]}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/restrict.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/restrict.rs new file mode 100644 index 000000000000..8756b3f05dfa --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/restrict.rs @@ -0,0 +1,266 @@ +//! SQL Server doesn't support Restrict. + +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "restrict_onD_1to1_req", schema(required), exclude(SqlServer))] +mod one2one_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: Restrict) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected. + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + match runner.connector() { + ConnectorTag::MongoDb(_) => assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2014, + "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + ), + _ => assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ), + }; + + Ok(()) + } +} + +#[test_suite(suite = "restrict_onD_1to1_opt", schema(optional), exclude(SqlServer))] +mod one2one_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: Restrict) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected. + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + match runner.connector() { + ConnectorTag::MongoDb(_) => assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2014, + "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + ), + _ => assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ), + }; + + Ok(()) + } + + /// Deleting the parent succeeds if no child is connected. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteManyParent(where: { id: 2 }) { count }}"), + @r###"{"data":{"deleteManyParent":{"count":1}}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "restrict_onD_1toM_req", schema(required), exclude(SqlServer))] +mod one2many_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: Restrict) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected. + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + match runner.connector() { + ConnectorTag::MongoDb(_) => assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2014, + "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + ), + _ => assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ), + }; + + Ok(()) + } + + /// Deleting the parent succeeds if no child is connected. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteManyParent(where: { id: 2 }) { count }}"), + @r###"{"data":{"deleteManyParent":{"count":1}}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "restrict_onD_1toM_opt", schema(optional), exclude(SqlServer))] +mod one2many_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: Restrict) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected. + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + match runner.connector() { + ConnectorTag::MongoDb(_) => assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2014, + "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + ), + _ => assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ), + }; + + Ok(()) + } + + /// Deleting the parent succeeds if no child is connected. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteOneParent(where: { id: 1 }) { id }}"), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "mutation { deleteManyParent(where: { id: 2 }) { count }}"), + @r###"{"data":{"deleteManyParent":{"count":1}}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs new file mode 100644 index 000000000000..f5932ce10416 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs @@ -0,0 +1,411 @@ +//! MySQL doesn't support SetDefault for InnoDB (which is our only supported engine at the moment). +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "setdefault_onD_1to1_req", exclude(MongoDb, MySQL))] +mod one2one_req { + fn required_with_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int @default(2) + parent Parent @relation(fields: [parent_id], references: [id], onDelete: SetDefault) + }"# + }; + + schema.to_owned() + } + + fn required_without_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: SetDefault) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent reconnects the child to the default. + #[connector_test(schema(required_with_default))] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // The default + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent { id } }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent":{"id":2}}]}}"### + ); + + Ok(()) + } + + /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). + #[connector_test(schema(required_with_default))] + async fn delete_parent_no_exist_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Deleting the parent with no default for SetDefault fails. + /// Only postgres allows setting no default for a SetDefault FK. + #[connector_test(schema(required_without_default), only(Postgres))] + async fn delete_parent_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2011, + "Null constraint violation on the fields" + ); + + Ok(()) + } +} + +#[test_suite(suite = "setdefault_onD_1to1_opt", exclude(MongoDb, MySQL))] +mod one2one_opt { + fn optional_with_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int? @default(2) + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: SetDefault) + }"# + }; + + schema.to_owned() + } + + fn optional_without_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: SetDefault) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent reconnects the child to the default. + #[connector_test(schema(optional_with_default))] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // The default + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent { id } }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent":{"id":2}}]}}"### + ); + + Ok(()) + } + + /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). + #[connector_test(schema(optional_with_default))] + async fn delete_parent_no_exist_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Deleting the parent with no default for SetDefault nulls the FK. + #[connector_test(schema(optional_without_default), only(Postgres))] + async fn delete_parent_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild(where: { id: 1 }) { id parent_id }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent_id":null}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "setdefault_onD_1toM_req", exclude(MongoDb, MySQL))] +mod one2many_req { + fn required_with_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int @default(2) + parent Parent @relation(fields: [parent_id], references: [id], onDelete: SetDefault) + }"# + }; + + schema.to_owned() + } + + fn required_without_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: SetDefault) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent reconnects the children to the default. + #[connector_test(schema(required_with_default))] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // The default + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent { id } }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent":{"id":2}}]}}"### + ); + + Ok(()) + } + + /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). + #[connector_test(schema(required_with_default))] + async fn delete_parent_no_exist_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Deleting the parent with no default for SetDefault fails. + /// Only postgres allows setting no default for a SetDefault FK. + #[connector_test(schema(required_without_default), only(Postgres))] + async fn delete_parent_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2011, + "Null constraint violation on the fields" + ); + + Ok(()) + } +} + +#[test_suite(suite = "setdefault_onD_1toM_opt", exclude(MongoDb, MySQL))] +mod one2many_opt { + fn optional_with_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int? @default(2) + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: SetDefault) + }"# + }; + + schema.to_owned() + } + + fn optional_without_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: SetDefault) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent reconnects the child to the default. + #[connector_test(schema(optional_with_default))] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // The default + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2 }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent { id } }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent":{"id":2}}]}}"### + ); + + Ok(()) + } + + /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). + #[connector_test(schema(optional_with_default))] + async fn delete_parent_no_exist_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Deleting the parent with no default for SetDefault nulls the FK. + #[connector_test(schema(optional_without_default), only(Postgres))] + async fn delete_parent_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild(where: { id: 1 }) { id parent_id }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent_id":null}]}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_null.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_null.rs new file mode 100644 index 000000000000..5257a5a6b998 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_null.rs @@ -0,0 +1,178 @@ +//! Only Postgres allows SetNull on a non-nullable FK at all, rest fail during migration. + +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "setnull_onD_1to1_req", schema(required), only(Postgres))] +mod one2one_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: SetNull) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected (because of null key violation). + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // `onDelete: SetNull` would cause `null` on `parent_id`, throwing an error. + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2011, + "Null constraint violation on the fields: (`parent_id`)" + ); + + assert_error!( + runner, + "mutation { deleteManyParent(where: { id: 1 }) { count }}", + 2011, + "Null constraint violation on the fields: (`parent_id`)" + ); + + Ok(()) + } +} + +#[test_suite(suite = "setnull_onD_1to1_opt", schema(optional), exclude(MongoDb))] +mod one2one_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + child Child? + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: SetNull) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent suceeds and sets the FK null. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent_id }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent_id":null}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "setnull_onD_1toM_req", schema(required), only(Postgres))] +mod one2many_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int + parent Parent @relation(fields: [parent_id], references: [id], onDelete: SetNull) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent must fail if a child is connected (because of null key violation). + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // `onDelete: SetNull` would cause `null` on `parent_id`, throwing an error. + assert_error!( + runner, + "mutation { deleteOneParent(where: { id: 1 }) { id }}", + 2011, + "Null constraint violation on the fields: (`parent_id`)" + ); + + assert_error!( + runner, + "mutation { deleteManyParent(where: { id: 1 }) { count }}", + 2011, + "Null constraint violation on the fields: (`parent_id`)" + ); + + Ok(()) + } +} + +#[test_suite(suite = "setnull_onD_1toM_opt", schema(optional), exclude(MongoDb))] +mod one2many_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_id Int? + parent Parent? @relation(fields: [parent_id], references: [id], onDelete: SetNull) + }"# + }; + + schema.to_owned() + } + + /// Deleting the parent suceeds and sets the FK null. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { deleteOneParent(where: { id: 1 }) { id }}"#), + @r###"{"data":{"deleteOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent_id }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent_id":null}]}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs new file mode 100644 index 000000000000..98f8495a88c4 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs @@ -0,0 +1,170 @@ +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "cascade_onU_1to1_req", schema(required), exclude(MongoDb))] +mod one2one_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: Cascade) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent updates the child as well. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "1u" }) { uniq }}"#), + @r###"{"data":{"updateOneParent":{"uniq":"1u"}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "query { findManyParent { uniq child { parent_uniq } }}"), + @r###"{"data":{"findManyParent":[{"uniq":"1u","child":{"parent_uniq":"1u"}}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "cascade_onU_1to1_opt", schema(optional), exclude(MongoDb))] +mod one2one_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: Cascade) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent updates the child as well. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "1u" }) { uniq }}"#), + @r###"{"data":{"updateOneParent":{"uniq":"1u"}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "query { findManyParent { uniq child { parent_uniq } }}"), + @r###"{"data":{"findManyParent":[{"uniq":"1u","child":{"parent_uniq":"1u"}}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "cascade_onU_1toM_req", schema(required), exclude(MongoDb))] +mod one2many_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: Cascade) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent updates the child as well. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "1u" }) { uniq }}"#), + @r###"{"data":{"updateOneParent":{"uniq":"1u"}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "query { findManyParent { uniq children { parent_uniq } }}"), + @r###"{"data":{"findManyParent":[{"uniq":"1u","children":[{"parent_uniq":"1u"}]}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "cascade_onU_1toM_opt", schema(optional), exclude(MongoDb))] +mod one2many_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: Cascade) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent updates the child as well. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "1u" }) { uniq }}"#), + @r###"{"data":{"updateOneParent":{"uniq":"1u"}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, "query { findManyParent { uniq children { parent_uniq } }}"), + @r###"{"data":{"findManyParent":[{"uniq":"1u","children":[{"parent_uniq":"1u"}]}]}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/mod.rs new file mode 100644 index 000000000000..1b8d08fa7328 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/mod.rs @@ -0,0 +1,5 @@ +mod cascade; +mod no_action; +mod restrict; +mod set_default; +mod set_null; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/no_action.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/no_action.rs new file mode 100644 index 000000000000..a04234423173 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/no_action.rs @@ -0,0 +1,350 @@ +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "noaction_onU_1to1_req", schema(required))] +mod one2one_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: NoAction) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected. + #[connector_test(exclude(MongoDb))] + async fn update_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + assert_error!( + runner, + r#"mutation { updateManyParent(where: { id: 1 }, data: { uniq: "u1" }) { count }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Updating the parent leaves the data in a integrity-violating state. + /// All supported dbs except mongo throw key violations. + #[connector_test(only(MongoDb))] + async fn update_parent_violation(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { parent_uniq }}"#), + @r###"{"data":{"findManyChild":[{"parent_uniq":"1"}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "noaction_onU_1to1_opt", schema(optional), exclude(MongoDb))] +mod one2one_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: NoAction) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected. + #[connector_test] + async fn update_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + assert_error!( + runner, + r#"mutation { updateManyParent(where: { id: 1 }, data: { uniq: "u1" }) { count }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Updating the parent succeeds if no child is connected. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateManyParent(where: { id: 1 }, data: { uniq: "u1" }) { count }}"#), + @r###"{"data":{"updateManyParent":{"count":1}}}"### + ); + + Ok(()) + } + + /// Updating the parent leaves the data in a integrity-violating state. + /// All supported dbs except mongo throw key violations. + #[connector_test(only(MongoDb))] + async fn update_parent_violation(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { parent_uniq }}"#), + @r###"{"data":{"findManyChild":[{"parent_uniq":"1"}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "noaction_onU_1toM_req", schema(required), exclude(MongoDb))] +mod one2many_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: NoAction) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected. + #[connector_test] + async fn update_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + assert_error!( + runner, + r#"mutation { updateManyParent(where: { id: 1 }, data: { uniq: "u1" }) { count }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Updating the parent succeeds if no child is connected. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateManyParent(where: { id: 2 }, data: { uniq: "u2" }) { count }}"#), + @r###"{"data":{"updateManyParent":{"count":1}}}"### + ); + + Ok(()) + } + + /// Updating the parent leaves the data in a integrity-violating state. + #[connector_test(only(MongoDb))] + async fn update_parent_violation(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { parent_uniq }}"#), + @r###"{"data":{"findManyChild":[{"parent_uniq":"1"}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "noaction_onU_1toM_opt", schema(optional), exclude(MongoDb))] +mod one2many_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: NoAction) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected. + #[connector_test] + async fn update_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + assert_error!( + runner, + r#"mutation { updateManyParent(where: { id: 1 }, data: { uniq: "u1" }) { count }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Updating the parent succeeds if no child is connected. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateManyParent(where: { id: 2 }, data: { uniq: "u2" }) { count }}"#), + @r###"{"data":{"updateManyParent":{"count":1}}}"### + ); + + Ok(()) + } + + /// Updating the parent leaves the data in a integrity-violating state. + #[connector_test(only(MongoDb))] + async fn update_parent_violation(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { parent_uniq }}"#), + @r###"{"data":{"findManyChild":[{"parent_uniq":"1"}]}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs new file mode 100644 index 000000000000..f6fe4215ff6a --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs @@ -0,0 +1,270 @@ +//! SQL Server doesn't support Restrict. + +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "restrict_onU_1to1_req", schema(required), exclude(SqlServer, MongoDb))] +mod one2one_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: Restrict) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected. + #[connector_test] + async fn update_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, , uniq: ""uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + match runner.connector() { + // ConnectorTag::MongoDb(_) => assert_error!( + // runner, + // r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + // 2014, + // "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + // ), + _ => assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ), + }; + + Ok(()) + } +} + +#[test_suite(suite = "restrict_onU_1to1_opt", schema(optional), exclude(SqlServer, MongoDb))] +mod one2one_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: Restrict) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected. + #[connector_test] + async fn update_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + match runner.connector() { + // ConnectorTag::MongoDb(_) => assert_error!( + // runner, + // r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + // 2014, + // "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + // ), + _ => assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ), + }; + + Ok(()) + } + + /// Updating the parent succeeds if no child is connected. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + r#run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateManyParent(where: { id: 2 }, data: { uniq: "u2" }) { count }}"#), + @r###"{"data":{"updateManyParent":{"count":1}}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "restrict_onU_1toM_req", schema(required), exclude(SqlServer, MongoDb))] +mod one2many_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: Restrict) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected. + #[connector_test] + async fn update_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + match runner.connector() { + // ConnectorTag::MongoDb(_) => assert_error!( + // runner, + // r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + // 2014, + // "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + // ), + _ => assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ), + }; + + Ok(()) + } + + /// Updating the parent succeeds if no child is connected. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + r#run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateManyParent(where: { id: 2 }, data: { uniq: "u2" }) { count }}"#), + @r###"{"data":{"updateManyParent":{"count":1}}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "restrict_onU_1toM_opt", schema(optional), exclude(SqlServer, MongoDb))] +mod one2many_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: Restrict) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected. + #[connector_test] + async fn update_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, , uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + match runner.connector() { + // ConnectorTag::MongoDb(_) => assert_error!( + // runner, + // r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + // 2014, + // "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + // ), + _ => assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ), + }; + + Ok(()) + } + + /// Updating the parent succeeds if no child is connected. + #[connector_test] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + r#run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateManyParent(where: { id: 2 }, data: { uniq: "u2" }) { count }}"#), + @r###"{"data":{"updateManyParent":{"count":1}}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs new file mode 100644 index 000000000000..ce39539eca16 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs @@ -0,0 +1,419 @@ +//! MySQL doesn't support SetDefault for InnoDB (which is our only supported engine at the moment). +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "setdefault_onU_1to1_req", exclude(MongoDb, MySQL))] +mod one2one_req { + fn required_with_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String @default("2") + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetDefault) + }"# + }; + + schema.to_owned() + } + + fn required_without_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetDefault) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent reconnects the child to the default. + #[connector_test(schema(required_with_default))] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // The default + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent { id } }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent":{"id":2}}]}}"### + ); + + Ok(()) + } + + /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). + #[connector_test(schema(required_with_default))] + async fn update_parent_no_exist_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Updating the parent with no default for SetDefault fails. + /// Only postgres allows setting no default for a SetDefault FK. + #[connector_test(schema(required_without_default), only(Postgres))] + async fn update_parent_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2011, + "Null constraint violation on the fields" + ); + + Ok(()) + } +} + +#[test_suite(suite = "setdefault_onU_1to1_opt", exclude(MongoDb, MySQL))] +mod one2one_opt { + fn optional_with_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String? @default("2") + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetDefault) + }"# + }; + + schema.to_owned() + } + + fn optional_without_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetDefault) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent reconnects the child to the default. + #[connector_test(schema(optional_with_default))] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // The default + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent { id } }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent":{"id":2}}]}}"### + ); + + Ok(()) + } + + /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). + #[connector_test(schema(optional_with_default))] + async fn update_parent_no_exist_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 } data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Updating the parent with no default for SetDefault nulls the FK. + #[connector_test(schema(optional_without_default), only(Postgres))] + async fn update_parent_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild(where: { id: 1 }) { id parent_uniq }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent_uniq":null}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "setdefault_onU_1toM_req", exclude(MongoDb, MySQL))] +mod one2many_req { + fn required_with_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String @default("2") + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetDefault) + }"# + }; + + schema.to_owned() + } + + fn required_without_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetDefault) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent reconnects the children to the default. + #[connector_test(schema(required_with_default))] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // The default + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent { id } }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent":{"id":2}}]}}"### + ); + + Ok(()) + } + + /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). + #[connector_test(schema(required_with_default))] + async fn update_parent_no_exist_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Updating the parent with no default for SetDefault fails. + /// Only postgres allows setting no default for a SetDefault FK. + #[connector_test(schema(required_without_default), only(Postgres))] + async fn update_parent_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2011, + "Null constraint violation on the fields" + ); + + Ok(()) + } +} + +#[test_suite(suite = "setdefault_onU_1toM_opt", exclude(MongoDb, MySQL))] +mod one2many_opt { + fn optional_with_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String? @default("2") + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetDefault) + }"# + }; + + schema.to_owned() + } + + fn optional_without_default() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetDefault) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent reconnects the child to the default. + #[connector_test(schema(optional_with_default))] + async fn update_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // The default + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 2, uniq: "2" }) { id }}"#), + @r###"{"data":{"createOneParent":{"id":2}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent { id } }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent":{"id":2}}]}}"### + ); + + Ok(()) + } + + /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). + #[connector_test(schema(optional_with_default))] + async fn update_parent_no_exist_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2003, + "Foreign key constraint failed on the field" + ); + + Ok(()) + } + + /// Updating the parent with no default for SetDefault nulls the FK. + #[connector_test(schema(optional_without_default), only(Postgres))] + async fn update_parent_fail(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild(where: { id: 1 }) { id parent_uniq }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent_uniq":null}]}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs new file mode 100644 index 000000000000..987641e5f8c9 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs @@ -0,0 +1,182 @@ +//! Only Postgres allows SetNull on a non-nullable FK at all, rest fail during migration. + +use indoc::indoc; +use query_engine_tests::*; + +#[test_suite(suite = "setnull_onU_1to1_req", schema(required), only(Postgres))] +mod one2one_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetNull) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected (because of null key violation). + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // `onUpdate: SetNull` would cause `null` on `parent_uniq`, throwing an error. + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2011, + "Null constraint violation on the fields: (`parent_uniq`)" + ); + + assert_error!( + runner, + r#"mutation { updateManyParent(where: { id: 1 }, data: { uniq: "u1" }) { count }}"#, + 2011, + "Null constraint violation on the fields: (`parent_uniq`)" + ); + + Ok(()) + } +} + +#[test_suite(suite = "setnull_onU_1to1_opt", schema(optional), exclude(MongoDb))] +mod one2one_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String? @unique + child Child? + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetNull) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent suceeds and sets the FK null. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent_uniq }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent_uniq":null}]}}"### + ); + + Ok(()) + } +} + +#[test_suite(suite = "setnull_onU_1toM_req", schema(required), only(Postgres))] +mod one2many_req { + fn required() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String + parent Parent @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetNull) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent must fail if a child is connected (because of null key violation). + #[connector_test] + async fn delete_parent_failure(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + // `onUpdate: SetNull` would cause `null` on `parent_uniq`, throwing an error. + assert_error!( + runner, + r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#, + 2011, + "Null constraint violation on the fields: (`parent_uniq`)" + ); + + assert_error!( + runner, + r#"mutation { updateManyParent(where: { id: 1 }, data: { uniq: "u1" }) { count }}"#, + 2011, + "Null constraint violation on the fields: (`parent_uniq`)" + ); + + Ok(()) + } +} + +#[test_suite(suite = "setnull_onU_1toM_opt", schema(optional), exclude(MongoDb))] +mod one2many_opt { + fn optional() -> String { + let schema = indoc! { + r#"model Parent { + #id(id, Int, @id) + uniq String? @unique + children Child[] + } + + model Child { + #id(id, Int, @id) + parent_uniq String? + parent Parent? @relation(fields: [parent_uniq], references: [uniq], onUpdate: SetNull) + }"# + }; + + schema.to_owned() + } + + /// Updating the parent suceeds and sets the FK null. + #[connector_test] + async fn delete_parent(runner: &Runner) -> TestResult<()> { + insta::assert_snapshot!( + run_query!(runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), + @r###"{"data":{"createOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"mutation { updateOneParent(where: { id: 1 }, data: { uniq: "u1" }) { id }}"#), + @r###"{"data":{"updateOneParent":{"id":1}}}"### + ); + + insta::assert_snapshot!( + run_query!(runner, r#"query { findManyChild { id parent_uniq }}"#), + @r###"{"data":{"findManyChild":[{"id":1,"parent_uniq":null}]}}"### + ); + + Ok(()) + } +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs index ed02706c2e13..893cae7c5298 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs @@ -1,93 +1,94 @@ use query_engine_tests::*; // TODO: Finish porting this test suite. Needs a way to port the `schemaWithRelation` method +// Note: Referential actions require a rewrite of these tests. #[test_suite] mod delete_many_rels { - use indoc::indoc; - use query_engine_tests::{assert_error, run_query}; + // use indoc::indoc; + // use query_engine_tests::{assert_error, run_query}; - fn schema_1() -> String { - let schema = indoc! { - r#"model Parent{ - #id(id, Int, @id) - p String @unique - c Child[] - } - - model Child{ - #id(id, Int, @id) - c String @unique - parentId Int - parentReq Parent @relation(fields: [parentId], references: [id]) - }"# - }; + // fn schema_1() -> String { + // let schema = indoc! { + // r#"model Parent{ + // #id(id, Int, @id) + // p String @unique + // c Child[] + // } - schema.to_owned() - } + // model Child{ + // #id(id, Int, @id) + // c String @unique + // parentId Int + // parentReq Parent @relation(fields: [parentId], references: [id]) + // }"# + // }; - // "a PM to C1! relation " should "error when deleting the parent" - #[connector_test(schema(schema_1))] - async fn pm_c1_error_delete_parent(runner: &Runner) -> TestResult<()> { - run_query!( - runner, - r#"mutation { - createOneChild(data: { - id: 1, - c: "c1" - parentReq: { - create: {id: 1, p: "p1"} - } - }){ - id - } - }"# - ); + // schema.to_owned() + // } - assert_error!( - runner, - r#"mutation { - deleteManyParent( - where: { p: { equals: "p1" }} - ){ - count - } - }"#, - 2014, - "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." - ); + // // "a PM to C1! relation " should "error when deleting the parent" + // #[connector_test(schema(schema_1))] + // async fn pm_c1_error_delete_parent(runner: &Runner) -> TestResult<()> { + // run_query!( + // runner, + // r#"mutation { + // createOneChild(data: { + // id: 1, + // c: "c1" + // parentReq: { + // create: {id: 1, p: "p1"} + // } + // }){ + // id + // } + // }"# + // ); - Ok(()) - } + // assert_error!( + // runner, + // r#"mutation { + // deleteManyParent( + // where: { p: { equals: "p1" }} + // ){ + // count + // } + // }"#, + // 2014, + // "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + // ); - // "a PM to C1! relation " should "error when deleting the parent with empty filter" - #[connector_test(schema(schema_1))] - async fn pm_c1_error_delete_parent_empty_filter(runner: &Runner) -> TestResult<()> { - run_query!( - runner, - r#"mutation { - createOneChild(data: { - id: 1, - c: "c1" - parentReq: { - create: {id: 1, p: "p1"} - } - }){ - id - } - }"# - ); + // Ok(()) + // } - assert_error!( - runner, - r#"mutation { - deleteManyParent(where: {}){ - count - } - }"#, - 2014, - "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." - ); + // // "a PM to C1! relation " should "error when deleting the parent with empty filter" + // #[connector_test(schema(schema_1))] + // async fn pm_c1_error_delete_parent_empty_filter(runner: &Runner) -> TestResult<()> { + // run_query!( + // runner, + // r#"mutation { + // createOneChild(data: { + // id: 1, + // c: "c1" + // parentReq: { + // create: {id: 1, p: "p1"} + // } + // }){ + // id + // } + // }"# + // ); - Ok(()) - } + // assert_error!( + // runner, + // r#"mutation { + // deleteManyParent(where: {}){ + // count + // } + // }"#, + // 2014, + // "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." + // ); + + // Ok(()) + // } } diff --git a/query-engine/connector-test-kit-rs/query-test-macros/src/test_suite.rs b/query-engine/connector-test-kit-rs/query-test-macros/src/test_suite.rs index 92a7c1e307d1..a51e6b2a2dbe 100644 --- a/query-engine/connector-test-kit-rs/query-test-macros/src/test_suite.rs +++ b/query-engine/connector-test-kit-rs/query-test-macros/src/test_suite.rs @@ -1,3 +1,5 @@ +use std::collections::hash_map::Entry; + use crate::{attr_map::NestedAttrMap, ConnectorTestArgs}; use darling::{FromMeta, ToTokens}; use proc_macro::TokenStream; @@ -54,7 +56,10 @@ pub fn test_suite_impl(attr: TokenStream, input: TokenStream) -> TokenStream { let suite_meta: Meta = parse_quote! { suite = #module_name }; let suite_nested_meta = NestedMeta::from(suite_meta); - module_attrs.insert("suite".to_owned(), suite_nested_meta); + + if let Entry::Vacant(entry) = module_attrs.entry("suite".to_owned()) { + entry.insert(suite_nested_meta); + }; if let Some((_, ref mut items)) = test_module.content { add_module_imports(items); diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mysql.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mysql.rs index 38c92ebc0673..10e3cfcb956a 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mysql.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mysql.rs @@ -146,6 +146,6 @@ impl ToString for MySqlVersion { } fn mysql_capabilities() -> Vec { - let dm_connector = MySqlDatamodelConnector::new(); + let dm_connector = MySqlDatamodelConnector::new(false); dm_connector.capabilities().to_owned() } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs index be5f63b8e104..3a34cf7937f9 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs @@ -112,6 +112,6 @@ impl Display for VitessVersion { } fn vitess_capabilities() -> Vec { - let dm_connector = MySqlDatamodelConnector::new(); + let dm_connector = MySqlDatamodelConnector::new(true); dm_connector.capabilities().to_owned() } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs index 427a1e6cab8a..3e3c11045afd 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs @@ -32,7 +32,7 @@ impl QueryResult { let err_code = format!("P{}", err_code); let err_exists = self.errors().into_iter().any(|err| { - let code_matches = dbg!(err.code() == Some(&err_code)); + let code_matches = err.code() == Some(&err_code); let msg_matches = match msg_contains.as_ref() { Some(msg) => err.message().contains(msg), None => true, diff --git a/query-engine/connector-test-kit/src/test/scala/writes/nestedMutations/alreadyConverted/NestedDeleteMutationInsideUpsertSpec.scala b/query-engine/connector-test-kit/src/test/scala/writes/nestedMutations/alreadyConverted/NestedDeleteMutationInsideUpsertSpec.scala index 6d03ef7f9830..98a500a2c642 100644 --- a/query-engine/connector-test-kit/src/test/scala/writes/nestedMutations/alreadyConverted/NestedDeleteMutationInsideUpsertSpec.scala +++ b/query-engine/connector-test-kit/src/test/scala/writes/nestedMutations/alreadyConverted/NestedDeleteMutationInsideUpsertSpec.scala @@ -28,7 +28,7 @@ class NestedDeleteMutationInsideUpsertSpec extends FlatSpec with Matchers with A | } | }){ | ${t.parent.selection} - | childReq{ + | childReq { | ${t.child.selection} | } | } diff --git a/query-engine/connectors/mongodb-query-connector/src/cursor.rs b/query-engine/connectors/mongodb-query-connector/src/cursor.rs index f267daf7c95c..096b57d06208 100644 --- a/query-engine/connectors/mongodb-query-connector/src/cursor.rs +++ b/query-engine/connectors/mongodb-query-connector/src/cursor.rs @@ -92,11 +92,11 @@ fn cursor_conditions(mut order_data: Vec, reverse: bool) -> Documen and_conditions.push(map_equality_condition(order_data)); } + let order_data = tail.first().unwrap(); + if head.len() == num_orderings - 1 { - let order_data = tail.first().unwrap(); and_conditions.push(map_orderby_condition(order_data, reverse, true)); } else { - let order_data = tail.first().unwrap(); and_conditions.push(map_orderby_condition(order_data, reverse, false)); } diff --git a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs index 8692e5d4c0c5..ee0586f9731b 100644 --- a/query-engine/connectors/sql-query-connector/src/cursor_condition.rs +++ b/query-engine/connectors/sql-query-connector/src/cursor_condition.rs @@ -167,6 +167,8 @@ pub fn build( )); } + let order_definition = tail.first().unwrap(); + if head.len() == len - 1 { // Special case where we build lte / gte, not lt / gt. // - We use the combination of all order-by fields as comparator for the the cursor. @@ -192,11 +194,8 @@ pub fn build( // // Said differently, we handle all the cases in which the prefixes are equal to len - 1 to account for possible identical comparators, // but everything else must come strictly "after" the cursor. - let order_definition = tail.first().unwrap(); - and_conditions.push(map_orderby_condition(order_definition, reverse, true)); } else { - let order_definition = tail.first().unwrap(); and_conditions.push(map_orderby_condition(order_definition, reverse, false)); } diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs index dd455c0a90b0..7c8b6e7fe7d3 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read.rs @@ -81,7 +81,7 @@ pub async fn get_many_records( // The should_batch has been adjusted to reflect that as a band-aid, but deeper investigation is necessary. if query_arguments.should_batch() { // We don't need to order in the database due to us ordering in this function. - let order = std::mem::replace(&mut query_arguments.order_by, vec![]); + let order = std::mem::take(&mut query_arguments.order_by); let batches = query_arguments.batched(); let mut futures = FuturesUnordered::new(); diff --git a/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs b/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs index 819a64bc859a..bd2195cd63f0 100644 --- a/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs +++ b/query-engine/core/src/interpreter/query_interpreters/inmemory_record_processor.rs @@ -32,7 +32,7 @@ impl InMemoryRecordProcessor { } fn take_abs(&self) -> Option { - self.take.clone().map(|t| if t < 0 { -t } else { t }) + self.take.map(|t| if t < 0 { -t } else { t }) } /// Checks whether or not we need to take records going backwards in the record list, diff --git a/query-engine/core/src/interpreter/query_interpreters/nested_read.rs b/query-engine/core/src/interpreter/query_interpreters/nested_read.rs index f22d05b03ea8..4e3af658ea9e 100644 --- a/query-engine/core/src/interpreter/query_interpreters/nested_read.rs +++ b/query-engine/core/src/interpreter/query_interpreters/nested_read.rs @@ -139,6 +139,7 @@ pub async fn m2m<'a, 'b>( selected_fields, processor ))] +#[allow(clippy::too_many_arguments)] pub async fn one2m<'a, 'b>( tx: &'a ConnectionLike<'a, 'b>, parent_field: &RelationFieldRef, diff --git a/query-engine/core/src/query_graph/mod.rs b/query-engine/core/src/query_graph/mod.rs index 048e9861800b..f7193812d332 100644 --- a/query-engine/core/src/query_graph/mod.rs +++ b/query-engine/core/src/query_graph/mod.rs @@ -523,7 +523,7 @@ impl QueryGraph { trace!("[Graph][Swap] Before shape: {}", self); } - let mut marked = std::mem::replace(&mut self.marked_node_pairs, vec![]); + let mut marked = std::mem::take(&mut self.marked_node_pairs); marked.reverse(); // Todo: Marked operation order is currently breaking if done bottom-up. Investigate how to fix it. for (parent_node, child_node) in marked { diff --git a/query-engine/core/src/query_graph_builder/builder.rs b/query-engine/core/src/query_graph_builder/builder.rs index 0cb4bd61502f..25442d847d22 100644 --- a/query-engine/core/src/query_graph_builder/builder.rs +++ b/query-engine/core/src/query_graph_builder/builder.rs @@ -4,8 +4,6 @@ use super::*; use crate::{constants::args, query_document::*, query_graph::*, schema::*, IrSerializer}; use prisma_value::PrismaValue; -// TODO: Think about if this is really necessary here, or if the whole code should move into -// the query_document module, possibly already as part of the parser. pub struct QueryGraphBuilder { pub query_schema: QuerySchemaRef, } @@ -85,9 +83,11 @@ impl QueryGraphBuilder { } #[tracing::instrument(skip(self, field_pair))] + #[rustfmt::skip] fn dispatch_build(&self, field_pair: FieldPair) -> QueryGraphBuilderResult { let query_info = field_pair.schema_field.query_info.as_ref().unwrap(); let parsed_field = field_pair.parsed_field; + let connector_ctx = self.query_schema.context(); let mut graph = match (&query_info.tag, query_info.model.clone()) { (QueryTag::FindUnique, Some(m)) => read::find_unique(parsed_field, m).map(Into::into), @@ -95,13 +95,13 @@ impl QueryGraphBuilder { (QueryTag::FindMany, Some(m)) => read::find_many(parsed_field, m).map(Into::into), (QueryTag::Aggregate, Some(m)) => read::aggregate(parsed_field, m).map(Into::into), (QueryTag::GroupBy, Some(m)) => read::group_by(parsed_field, m).map(Into::into), - (QueryTag::CreateOne, Some(m)) => QueryGraph::root(|g| write::create_record(g, m, parsed_field)), - (QueryTag::CreateMany, Some(m)) => QueryGraph::root(|g| write::create_many_records(g, m, parsed_field)), - (QueryTag::UpdateOne, Some(m)) => QueryGraph::root(|g| write::update_record(g, m, parsed_field)), - (QueryTag::UpdateMany, Some(m)) => QueryGraph::root(|g| write::update_many_records(g, m, parsed_field)), - (QueryTag::UpsertOne, Some(m)) => QueryGraph::root(|g| write::upsert_record(g, m, parsed_field)), - (QueryTag::DeleteOne, Some(m)) => QueryGraph::root(|g| write::delete_record(g, m, parsed_field)), - (QueryTag::DeleteMany, Some(m)) => QueryGraph::root(|g| write::delete_many_records(g, m, parsed_field)), + (QueryTag::CreateOne, Some(m)) => QueryGraph::root(|g| write::create_record(g, connector_ctx, m, parsed_field)), + (QueryTag::CreateMany, Some(m)) => QueryGraph::root(|g| write::create_many_records(g, connector_ctx,m, parsed_field)), + (QueryTag::UpdateOne, Some(m)) => QueryGraph::root(|g| write::update_record(g, connector_ctx, m, parsed_field)), + (QueryTag::UpdateMany, Some(m)) => QueryGraph::root(|g| write::update_many_records(g, connector_ctx, m, parsed_field)), + (QueryTag::UpsertOne, Some(m)) => QueryGraph::root(|g| write::upsert_record(g, connector_ctx, m, parsed_field)), + (QueryTag::DeleteOne, Some(m)) => QueryGraph::root(|g| write::delete_record(g, connector_ctx, m, parsed_field)), + (QueryTag::DeleteMany, Some(m)) => QueryGraph::root(|g| write::delete_many_records(g, connector_ctx, m, parsed_field)), (QueryTag::ExecuteRaw, _) => QueryGraph::root(|g| write::execute_raw(g, parsed_field)), (QueryTag::QueryRaw, _) => QueryGraph::root(|g| write::query_raw(g, parsed_field)), _ => unreachable!("Query builder dispatching failed."), diff --git a/query-engine/core/src/query_graph_builder/write/create.rs b/query-engine/core/src/query_graph_builder/write/create.rs index 2e051cddc590..1faeabd8258e 100644 --- a/query-engine/core/src/query_graph_builder/write/create.rs +++ b/query-engine/core/src/query_graph_builder/write/create.rs @@ -3,7 +3,7 @@ use crate::{ constants::args, query_ast::*, query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, - ArgumentListLookup, ParsedField, ParsedInputList, ParsedInputMap, + ArgumentListLookup, ConnectorContext, ParsedField, ParsedInputList, ParsedInputMap, }; use connector::IdFilter; use prisma_models::ModelRef; @@ -12,7 +12,12 @@ use write_args_parser::*; /// Creates a create record query and adds it to the query graph, together with it's nested queries and companion read query. #[tracing::instrument(skip(graph, model, field))] -pub fn create_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedField) -> QueryGraphBuilderResult<()> { +pub fn create_record( + graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, + model: ModelRef, + mut field: ParsedField, +) -> QueryGraphBuilderResult<()> { graph.flag_transactional(); let data_map = match field.arguments.lookup(args::DATA) { @@ -20,7 +25,7 @@ pub fn create_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedF None => ParsedInputMap::new(), }; - let create_node = create::create_record_node(graph, Arc::clone(&model), data_map)?; + let create_node = create::create_record_node(graph, connector_ctx, Arc::clone(&model), data_map)?; // Follow-up read query on the write let read_query = read::find_unique(field, model.clone())?; @@ -56,6 +61,7 @@ pub fn create_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedF #[tracing::instrument(skip(graph, model, field))] pub fn create_many_records( graph: &mut QueryGraph, + _connector_ctx: &ConnectorContext, model: ModelRef, mut field: ParsedField, ) -> QueryGraphBuilderResult<()> { @@ -95,6 +101,7 @@ pub fn create_many_records( #[tracing::instrument(skip(graph, model, data_map))] pub fn create_record_node( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, model: ModelRef, data_map: ParsedInputMap, ) -> QueryGraphBuilderResult { @@ -107,7 +114,7 @@ pub fn create_record_node( let create_node = graph.create_node(Query::Write(WriteQuery::CreateRecord(cr))); for (relation_field, data_map) in create_args.nested { - nested::connect_nested_query(graph, create_node, relation_field, data_map)?; + nested::connect_nested_query(graph, connector_ctx, create_node, relation_field, data_map)?; } Ok(create_node) diff --git a/query-engine/core/src/query_graph_builder/write/delete.rs b/query-engine/core/src/query_graph_builder/write/delete.rs index a15f5efd135a..4299bc1b142b 100644 --- a/query-engine/core/src/query_graph_builder/write/delete.rs +++ b/query-engine/core/src/query_graph_builder/write/delete.rs @@ -3,7 +3,7 @@ use crate::{ constants::args, query_ast::*, query_graph::{QueryGraph, QueryGraphDependency}, - ArgumentListLookup, FilteredQuery, ParsedField, + ArgumentListLookup, ConnectorContext, FilteredQuery, ParsedField, }; use connector::filter::Filter; use prisma_models::ModelRef; @@ -11,7 +11,12 @@ use std::{convert::TryInto, sync::Arc}; /// Creates a top level delete record query and adds it to the query graph. #[tracing::instrument(skip(graph, model, field))] -pub fn delete_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedField) -> QueryGraphBuilderResult<()> { +pub fn delete_record( + graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, + model: ModelRef, + mut field: ParsedField, +) -> QueryGraphBuilderResult<()> { graph.flag_transactional(); let where_arg = field.arguments.lookup(args::WHERE).unwrap(); @@ -28,7 +33,7 @@ pub fn delete_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedF })); let delete_node = graph.create_node(delete_query); - utils::insert_deletion_checks(graph, &model, &read_node, &delete_node)?; + utils::insert_emulated_on_delete(graph, connector_ctx, &model, &read_node, &delete_node)?; graph.create_edge( &read_node, @@ -55,6 +60,7 @@ pub fn delete_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedF #[tracing::instrument(skip(graph, model, field))] pub fn delete_many_records( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, model: ModelRef, mut field: ParsedField, ) -> QueryGraphBuilderResult<()> { @@ -76,7 +82,8 @@ pub fn delete_many_records( let read_query_node = graph.create_node(read_query); let delete_many_node = graph.create_node(Query::Write(delete_many)); - utils::insert_deletion_checks(graph, &model, &read_query_node, &delete_many_node)?; + utils::insert_emulated_on_delete(graph, connector_ctx, &model, &read_query_node, &delete_many_node)?; + graph.create_edge( &read_query_node, &delete_many_node, diff --git a/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs index 669ad84fe514..1a832fb4ab56 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs @@ -16,6 +16,7 @@ use std::{convert::TryInto, sync::Arc}; #[tracing::instrument(skip(graph, parent_node, parent_relation_field, value, child_model))] pub fn nested_connect_or_create( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, value: ParsedInputValue, @@ -25,11 +26,32 @@ pub fn nested_connect_or_create( let values = utils::coerce_vec(value); if relation.is_many_to_many() { - handle_many_to_many(graph, parent_node, parent_relation_field, values, child_model) + handle_many_to_many( + graph, + connector_ctx, + parent_node, + parent_relation_field, + values, + child_model, + ) } else if relation.is_one_to_many() { - handle_one_to_many(graph, parent_node, parent_relation_field, values, child_model) + handle_one_to_many( + graph, + connector_ctx, + parent_node, + parent_relation_field, + values, + child_model, + ) } else { - handle_one_to_one(graph, parent_node, parent_relation_field, values, child_model) + handle_one_to_one( + graph, + connector_ctx, + parent_node, + parent_relation_field, + values, + child_model, + ) } } @@ -70,6 +92,7 @@ pub fn nested_connect_or_create( #[tracing::instrument(skip(graph, parent_node, parent_relation_field, values, child_model))] fn handle_many_to_many( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, values: Vec, @@ -91,7 +114,7 @@ fn handle_many_to_many( filter, )); - let create_node = create::create_record_node(graph, Arc::clone(child_model), create_map)?; + let create_node = create::create_record_node(graph, connector_ctx, Arc::clone(child_model), create_map)?; let if_node = graph.create_node(Flow::default_if()); let connect_exists_node = @@ -127,15 +150,30 @@ fn handle_many_to_many( #[tracing::instrument(skip(graph, parent_node, parent_relation_field, values, child_model))] fn handle_one_to_many( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, values: Vec, child_model: &ModelRef, ) -> QueryGraphBuilderResult<()> { if parent_relation_field.is_inlined_on_enclosing_model() { - one_to_many_inlined_parent(graph, parent_node, parent_relation_field, values, child_model) + one_to_many_inlined_parent( + graph, + connector_ctx, + parent_node, + parent_relation_field, + values, + child_model, + ) } else { - one_to_many_inlined_child(graph, parent_node, parent_relation_field, values, child_model) + one_to_many_inlined_child( + graph, + connector_ctx, + parent_node, + parent_relation_field, + values, + child_model, + ) } } @@ -143,6 +181,7 @@ fn handle_one_to_many( #[tracing::instrument(skip(graph, parent_node, parent_relation_field, values, child_model))] fn handle_one_to_one( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, mut values: Vec, @@ -162,6 +201,7 @@ fn handle_one_to_one( if parent_relation_field.is_inlined_on_enclosing_model() { one_to_one_inlined_parent( graph, + connector_ctx, parent_node, parent_relation_field, filter, @@ -171,6 +211,7 @@ fn handle_one_to_one( } else { one_to_one_inlined_child( graph, + connector_ctx, parent_node, parent_relation_field, filter, @@ -212,6 +253,7 @@ fn handle_one_to_one( #[tracing::instrument(skip(graph, parent_node, parent_relation_field, values, child_model))] fn one_to_many_inlined_child( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, values: Vec, @@ -238,7 +280,7 @@ fn one_to_many_inlined_child( let if_node = graph.create_node(Flow::default_if()); let update_child_node = utils::update_records_node_placeholder(graph, filter, Arc::clone(child_model)); - let create_node = create::create_record_node(graph, Arc::clone(child_model), create_map)?; + let create_node = create::create_record_node(graph, connector_ctx, Arc::clone(child_model), create_map)?; graph.create_edge(&parent_node, &read_node, QueryGraphDependency::ExecutionOrder)?; graph.create_edge(&if_node, &update_child_node, QueryGraphDependency::Then)?; @@ -353,6 +395,7 @@ fn one_to_many_inlined_child( #[tracing::instrument(skip(graph, parent_node, parent_relation_field, values, child_model))] fn one_to_many_inlined_parent( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, mut values: Vec, @@ -381,7 +424,7 @@ fn one_to_many_inlined_parent( graph.create_edge(&parent_node, &read_node, QueryGraphDependency::ExecutionOrder)?; let if_node = graph.create_node(Flow::default_if()); - let create_node = create::create_record_node(graph, Arc::clone(child_model), create_map)?; + let create_node = create::create_record_node(graph, connector_ctx, Arc::clone(child_model), create_map)?; let return_existing = graph.create_node(Flow::Return(None)); let return_create = graph.create_node(Flow::Return(None)); @@ -524,6 +567,7 @@ fn one_to_many_inlined_parent( #[tracing::instrument(skip(graph, parent_node, parent_relation_field, filter, create_data, child_model))] fn one_to_one_inlined_parent( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, filter: Filter, @@ -543,7 +587,7 @@ fn one_to_one_inlined_parent( graph.create_edge(&parent_node, &read_node, QueryGraphDependency::ExecutionOrder)?; let if_node = graph.create_node(Flow::default_if()); - let create_node = create::create_record_node(graph, Arc::clone(child_model), create_data)?; + let create_node = create::create_record_node(graph, connector_ctx, Arc::clone(child_model), create_data)?; let return_existing = graph.create_node(Flow::Return(None)); let return_create = graph.create_node(Flow::Return(None)); @@ -748,6 +792,7 @@ fn one_to_one_inlined_parent( #[tracing::instrument(skip(graph, parent_node, parent_relation_field, filter, create_data, child_model))] fn one_to_one_inlined_child( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, filter: Filter, @@ -771,7 +816,7 @@ fn one_to_one_inlined_child( graph.create_edge(&parent_node, &read_node, QueryGraphDependency::ExecutionOrder)?; let if_node = graph.create_node(Flow::default_if()); - let create_node = create::create_record_node(graph, Arc::clone(child_model), create_data)?; + let create_node = create::create_record_node(graph, connector_ctx, Arc::clone(child_model), create_data)?; graph.create_edge( &read_node, diff --git a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs index a94373738f64..f2d1ef475fbe 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs @@ -16,6 +16,7 @@ use std::{convert::TryInto, sync::Arc}; #[tracing::instrument(skip(graph, parent_node, parent_relation_field, value, child_model))] pub fn nested_create( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, value: ParsedInputValue, @@ -26,7 +27,7 @@ pub fn nested_create( // Build all create nodes upfront. let creates: Vec = utils::coerce_vec(value) .into_iter() - .map(|value| create::create_record_node(graph, Arc::clone(child_model), value.try_into()?)) + .map(|value| create::create_record_node(graph, connector_ctx, Arc::clone(child_model), value.try_into()?)) .collect::>>()?; if relation.is_many_to_many() { diff --git a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs index 56787825907a..6fbc3b263ed1 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs @@ -23,6 +23,7 @@ use std::{convert::TryInto, sync::Arc}; #[tracing::instrument(skip(graph, parent_node, parent_relation_field, value, child_model))] pub fn nested_delete( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: &NodeRef, parent_relation_field: &RelationFieldRef, value: ParsedInputValue, @@ -50,7 +51,13 @@ pub fn nested_delete( let find_child_records_node = utils::insert_find_children_by_parent_node(graph, parent_node, parent_relation_field, or_filter)?; - utils::insert_deletion_checks(graph, child_model, &find_child_records_node, &delete_many_node)?; + utils::insert_emulated_on_delete( + graph, + connector_ctx, + child_model, + &find_child_records_node, + &delete_many_node, + )?; let relation_name = parent_relation_field.relation().name.clone(); let parent_name = parent_relation_field.model().name.clone(); @@ -91,7 +98,13 @@ pub fn nested_delete( record_filter: None, }))); - utils::insert_deletion_checks(graph, child_model, &find_child_records_node, &delete_record_node)?; + utils::insert_emulated_on_delete( + graph, + connector_ctx, + child_model, + &find_child_records_node, + &delete_record_node, + )?; let relation_name = parent_relation_field.relation().name.clone(); let child_model_name = child_model.name.clone(); @@ -127,6 +140,7 @@ pub fn nested_delete( #[tracing::instrument(skip(graph, parent, parent_relation_field, value, child_model))] pub fn nested_delete_many( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent: &NodeRef, parent_relation_field: &RelationFieldRef, value: ParsedInputValue, @@ -147,7 +161,13 @@ pub fn nested_delete_many( }); let delete_many_node = graph.create_node(Query::Write(delete_many)); - utils::insert_deletion_checks(graph, child_model, &find_child_records_node, &delete_many_node)?; + utils::insert_emulated_on_delete( + graph, + connector_ctx, + child_model, + &find_child_records_node, + &delete_many_node, + )?; graph.create_edge( &find_child_records_node, diff --git a/query-engine/core/src/query_graph_builder/write/nested/mod.rs b/query-engine/core/src/query_graph_builder/write/nested/mod.rs index a078709bd935..828c0a246c34 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/mod.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/mod.rs @@ -11,7 +11,7 @@ use super::*; use crate::{ constants::operations, query_graph::{NodeRef, QueryGraph}, - ParsedInputMap, + ConnectorContext, ParsedInputMap, }; use connect_nested::*; use connect_or_create_nested::*; @@ -24,8 +24,10 @@ use update_nested::*; use upsert_nested::*; #[tracing::instrument(skip(graph, parent, parent_relation_field, data_map))] +#[rustfmt::skip] pub fn connect_nested_query( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent: NodeRef, parent_relation_field: RelationFieldRef, data_map: ParsedInputMap, @@ -34,19 +36,17 @@ pub fn connect_nested_query( for (field_name, value) in data_map { match field_name.as_str() { - operations::CREATE => nested_create(graph, parent, &parent_relation_field, value, &child_model)?, + operations::CREATE => nested_create(graph, connector_ctx,parent, &parent_relation_field, value, &child_model)?, operations::CREATE_MANY => nested_create_many(graph, parent, &parent_relation_field, value, &child_model)?, - operations::UPDATE => nested_update(graph, &parent, &parent_relation_field, value, &child_model)?, - operations::UPSERT => nested_upsert(graph, parent, &parent_relation_field, value)?, - operations::DELETE => nested_delete(graph, &parent, &parent_relation_field, value, &child_model)?, + operations::UPDATE => nested_update(graph, connector_ctx, &parent, &parent_relation_field, value, &child_model)?, + operations::UPSERT => nested_upsert(graph, connector_ctx, parent, &parent_relation_field, value)?, + operations::DELETE => nested_delete(graph, connector_ctx, &parent, &parent_relation_field, value, &child_model)?, operations::CONNECT => nested_connect(graph, parent, &parent_relation_field, value, &child_model)?, operations::DISCONNECT => nested_disconnect(graph, parent, &parent_relation_field, value, &child_model)?, operations::SET => nested_set(graph, &parent, &parent_relation_field, value, &child_model)?, operations::UPDATE_MANY => nested_update_many(graph, &parent, &parent_relation_field, value, &child_model)?, - operations::DELETE_MANY => nested_delete_many(graph, &parent, &parent_relation_field, value, &child_model)?, - operations::CONNECT_OR_CREATE => { - nested_connect_or_create(graph, parent, &parent_relation_field, value, &child_model)? - } + operations::DELETE_MANY => nested_delete_many(graph, connector_ctx, &parent, &parent_relation_field, value, &child_model)?, + operations::CONNECT_OR_CREATE => nested_connect_or_create(graph, connector_ctx, parent, &parent_relation_field, value, &child_model)?, _ => panic!("Unhandled nested operation: {}", field_name), }; } diff --git a/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs index 9870f21f25b0..0eeff35ff0a7 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs @@ -30,6 +30,7 @@ use write_args_parser::*; #[tracing::instrument(skip(graph, parent, parent_relation_field, value, child_model))] pub fn nested_update( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent: &NodeRef, parent_relation_field: &RelationFieldRef, value: ParsedInputValue, @@ -54,8 +55,13 @@ pub fn nested_update( let find_child_records_node = utils::insert_find_children_by_parent_node(graph, parent, parent_relation_field, filter)?; - let update_node = - update::update_record_node(graph, Filter::empty(), Arc::clone(child_model), data.try_into()?)?; + let update_node = update::update_record_node( + graph, + connector_ctx, + Filter::empty(), + Arc::clone(child_model), + data.try_into()?, + )?; let child_model_identifier = parent_relation_field.related_model().primary_identifier(); diff --git a/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs index eeec9878e0c9..2670d6ad76b3 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs @@ -87,6 +87,7 @@ use std::{convert::TryInto, sync::Arc}; #[tracing::instrument(skip(graph, parent_node, parent_relation_field, value))] pub fn nested_upsert( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, value: ParsedInputValue, @@ -117,9 +118,11 @@ pub fn nested_upsert( utils::insert_find_children_by_parent_node(graph, &parent_node, parent_relation_field, filter)?; let if_node = graph.create_node(Flow::default_if()); - let create_node = create::create_record_node(graph, Arc::clone(&child_model), create_input.try_into()?)?; + let create_node = + create::create_record_node(graph, connector_ctx, Arc::clone(&child_model), create_input.try_into()?)?; let update_node = update::update_record_node( graph, + connector_ctx, Filter::empty(), Arc::clone(&child_model), update_input.try_into()?, diff --git a/query-engine/core/src/query_graph_builder/write/update.rs b/query-engine/core/src/query_graph_builder/write/update.rs index 06c01ccd8051..fbebe951375b 100644 --- a/query-engine/core/src/query_graph_builder/write/update.rs +++ b/query-engine/core/src/query_graph_builder/write/update.rs @@ -1,5 +1,5 @@ use super::*; -use crate::{constants::args, query_graph_builder::write::write_args_parser::*}; +use crate::{constants::args, query_graph_builder::write::write_args_parser::*, ConnectorContext}; use crate::{ query_ast::*, query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency}, @@ -11,7 +11,12 @@ use std::{convert::TryInto, sync::Arc}; /// Creates an update record query and adds it to the query graph, together with it's nested queries and companion read query. #[tracing::instrument(skip(graph, model, field))] -pub fn update_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedField) -> QueryGraphBuilderResult<()> { +pub fn update_record( + graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, + model: ModelRef, + mut field: ParsedField, +) -> QueryGraphBuilderResult<()> { // "where" let where_arg: ParsedInputMap = field.arguments.lookup(args::WHERE).unwrap().value.try_into()?; let filter = extract_unique_filter(where_arg, &model)?; @@ -20,7 +25,7 @@ pub fn update_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedF let data_argument = field.arguments.lookup(args::DATA).unwrap(); let data_map: ParsedInputMap = data_argument.value.try_into()?; - let update_node = update_record_node(graph, filter, Arc::clone(&model), data_map)?; + let update_node = update_record_node(graph, connector_ctx, filter, Arc::clone(&model), data_map)?; let read_query = read::find_unique(field, model.clone())?; let read_node = graph.create_node(Query::Read(read_query)); @@ -55,6 +60,7 @@ pub fn update_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedF #[tracing::instrument(skip(graph, model, field))] pub fn update_many_records( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, model: ModelRef, mut field: ParsedField, ) -> QueryGraphBuilderResult<()> { @@ -87,6 +93,7 @@ pub fn update_many_records( #[tracing::instrument(skip(graph, filter, model, data_map))] pub fn update_record_node( graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, filter: T, model: ModelRef, data_map: ParsedInputMap, @@ -111,7 +118,7 @@ where let node = graph.create_node(Query::Write(WriteQuery::UpdateRecord(ur))); for (relation_field, data_map) in update_args.nested { - nested::connect_nested_query(graph, node, relation_field, data_map)?; + nested::connect_nested_query(graph, connector_ctx, node, relation_field, data_map)?; } Ok(node) diff --git a/query-engine/core/src/query_graph_builder/write/upsert.rs b/query-engine/core/src/query_graph_builder/write/upsert.rs index 06af31db83c7..8406a04de137 100644 --- a/query-engine/core/src/query_graph_builder/write/upsert.rs +++ b/query-engine/core/src/query_graph_builder/write/upsert.rs @@ -3,14 +3,19 @@ use crate::{ constants::args, query_ast::*, query_graph::{Flow, Node, QueryGraph, QueryGraphDependency}, - ArgumentListLookup, ParsedField, ParsedInputMap, + ArgumentListLookup, ConnectorContext, ParsedField, ParsedInputMap, }; use connector::IdFilter; use prisma_models::ModelRef; use std::{convert::TryInto, sync::Arc}; #[tracing::instrument(skip(graph, model, field))] -pub fn upsert_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedField) -> QueryGraphBuilderResult<()> { +pub fn upsert_record( + graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, + model: ModelRef, + mut field: ParsedField, +) -> QueryGraphBuilderResult<()> { graph.flag_transactional(); let where_arg: ParsedInputMap = field.arguments.lookup(args::WHERE).unwrap().value.try_into()?; @@ -24,8 +29,20 @@ pub fn upsert_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedF let read_parent_records = utils::read_ids_infallible(model.clone(), model_id.clone(), filter.clone()); let read_parent_records_node = graph.create_node(read_parent_records); - let create_node = create::create_record_node(graph, Arc::clone(&model), create_argument.value.try_into()?)?; - let update_node = update::update_record_node(graph, filter, Arc::clone(&model), update_argument.value.try_into()?)?; + let create_node = create::create_record_node( + graph, + connector_ctx, + Arc::clone(&model), + create_argument.value.try_into()?, + )?; + + let update_node = update::update_record_node( + graph, + connector_ctx, + filter, + Arc::clone(&model), + update_argument.value.try_into()?, + )?; let read_query = read::find_unique(field, Arc::clone(&model))?; let read_node_create = graph.create_node(Query::Read(read_query.clone())); diff --git a/query-engine/core/src/query_graph_builder/write/utils.rs b/query-engine/core/src/query_graph_builder/write/utils.rs index 0c354679823c..d8ce3c855491 100644 --- a/query-engine/core/src/query_graph_builder/write/utils.rs +++ b/query-engine/core/src/query_graph_builder/write/utils.rs @@ -1,10 +1,11 @@ use crate::{ query_ast::*, query_graph::{Flow, Node, NodeRef, QueryGraph, QueryGraphDependency}, - ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult, + ConnectorContext, ParsedInputValue, QueryGraphBuilderError, QueryGraphBuilderResult, }; -use connector::{Filter, WriteArgs}; -use itertools::Itertools; +use connector::{Filter, RecordFilter, WriteArgs}; +use datamodel::{common::preview_features::PreviewFeature, ReferentialAction}; +use datamodel_connector::ConnectorCapability; use prisma_models::{ModelProjection, ModelRef, RelationFieldRef}; use std::sync::Arc; @@ -265,97 +266,478 @@ pub fn insert_existing_1to1_related_model_checks( Ok(read_existing_children) } -/// Inserts checks into the graph that check all required, non-list relations pointing to -/// the given `model`. Those checks fail at runtime (edges to the `Empty` node) if one or more -/// records are found. Checks are inserted between `parent_node` and `child_node`. +/// Inserts emulated referential actions for `onDelete` into the graph. +/// All relations that refer to the `model` row(s) being deleted are checked for their desired emulation and inserted accordingly. +/// Right now, supported modes are `Restrict` and `SetNull` (cascade will follow). +/// Those checks fail at runtime and are inserted between `parent_node` and `child_node`. /// /// This function is usually part of a delete (`deleteOne` or `deleteMany`). /// Expects `parent_node` to return one or more IDs (for records of `model`) to be checked. /// -/// ## Example for a standard delete scenario -/// - We have 2 relations, from `A` and `B` to `model`. -/// - This function inserts the nodes and edges in between `Find Record IDs` (`parent_node`) and -/// `Delete` (`child_node`) into the graph (but not the edge from `Find` to `Delete`, assumed already existing here). +/// The old behavior (pre-referential actions) is preserved for if the ReferentialActions feature flag is disabled, +/// which was basically only the `Restrict` part of this code. /// +/// Resulting graph (all emulations): /// ```text -/// ┌────────────────────┐ -/// │ Find Record IDs to │ -/// ┌──│ Delete │ -/// │ └────────────────────┘ +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// Parent │ +/// ┌ ─│ (ids to delete) ─────────────────┬─────────────────────────────┬────────────────────────────────────────┐ +/// ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ │ │ │ +/// │ │ │ │ │ +/// ▼ ▼ ▼ ▼ +/// │ ┌────────────────────┐ ┌────────────────────┐ ┌────────────────────┐ ┌────────────────────┐ +/// │Find Connected Model│ │Find Connected Model│ │Find Connected Model│ │Find Connected Model│ +/// │ │ A (Restrict) │ │ B (Restrict) │ ┌──│ C (SetNull) │ ┌──│ D (Cascade) │ +/// └────────────────────┘ └────────────────────┘ │ └────────────────────┘ │ └────────────────────┘ +/// │ │ │ │ │ │ │ +/// Fail if│> 0 Fail if│> 0 │ ▼ │ │ +/// │ │ │ │┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ ▼ +/// ▼ ▼ │ ┌────────────────────┐ │ │┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// │ ┌────────────────────┐ ┌────────────────────┐ ││ │ Insert onUpdate │ │ ┌────────────────────┐ │ +/// │ Empty │ │ Empty │ │ │ emulation subtree │ │ ││ │ Insert onDelete │ +/// │ └────────────────────┘ └────────────────────┘ ││ │for relations using │ │ │ emulation subtree │ │ +/// │ │ │ │the foreign key that│ │ ││ │ for all relations │ +/// │ │ │ ││ │ was updated. │ │ │ pointing to D. │ │ +/// │ │ │ └────────────────────┘ │ ││ └────────────────────┘ +/// │ │ │ │└ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ +/// │ │ │ │ │ │ +/// │ │ │ │ │ │ │ +/// ▼ │ │ ▼ │ ▼ +/// │ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ │ ┌────────────────────┐ │ ┌────────────────────┐ +/// ─▶ Delete │◀────────────────┘ │ │ Update Cs (set FK │ └─▶│ Delete Cs │ +/// └ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ └─▶│ null) │ └────────────────────┘ +/// ▲ └────────────────────┘ │ +/// │ │ │ +/// └─────────────────────────────────────────────────────────┴────────────────────────────────────────┘ +/// ``` +#[tracing::instrument(skip(graph, model_to_delete, parent_node, child_node))] +pub fn insert_emulated_on_delete( + graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, + model_to_delete: &ModelRef, + parent_node: &NodeRef, + child_node: &NodeRef, +) -> QueryGraphBuilderResult<()> { + let has_fks = connector_ctx.capabilities.contains(&ConnectorCapability::ForeignKeys); + let has_ra_feature = connector_ctx.features.contains(&PreviewFeature::ReferentialActions); + + // If the connector supports foreign keys and the new mode is enabled (preview feature), we do not do any checks / emulation. + if has_ra_feature && has_fks { + return Ok(()); + } + + // If it's non-fk dbs, then the emulation will kick in. If it has Fks, then preserve the old behavior (`has_fks` -> only required ones). + let internal_model = model_to_delete.internal_data_model(); + let relation_fields = internal_model.fields_pointing_to_model(model_to_delete, has_fks); + + for rf in relation_fields { + match rf.relation().on_delete() { + // old behavior was to only insert restrict checks. + _ if !has_ra_feature => emulate_restrict(graph, &rf, parent_node, child_node)?, + ReferentialAction::Restrict => emulate_restrict(graph, &rf, parent_node, child_node)?, + ReferentialAction::NoAction => continue, // Explicitly do nothing. + + ReferentialAction::Cascade => { + emulate_on_delete_cascade(graph, &rf, connector_ctx, model_to_delete, parent_node, child_node)? + } + + ReferentialAction::SetNull => { + emulate_set_null(graph, &rf, connector_ctx, model_to_delete, parent_node, child_node)? + } + + x => panic!("Unsupported referential action emulation: {}", x), + }; + } + + Ok(()) +} + +/// Inserts restrict emulations into the graph between `parent_node` and `child_node`. +/// `relation_field` is the relation field pointing to the model to be deleted/updated. +/// +/// +/// ```text +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// Parent │ +/// ┌ ─│ (ids to del/upd) +/// ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ /// │ │ -/// │ ▼ +/// ▼ +/// │ ┌────────────────────┐ +/// │Find Connected Model│ +/// │ │ (Restrict) │ +/// └────────────────────┘ +/// │ │ +/// Fail if│> 0 +/// │ │ +/// ▼ /// │ ┌────────────────────┐ -/// ├─▶│Find Connected Model│ -/// │ │ A │──┐ -/// │ └────────────────────┘ │ +/// │ Empty │ +/// │ └────────────────────┘ +/// │ +/// │ ▼ +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// └ ▶ Delete / Update │ +/// └ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// ``` +pub fn emulate_restrict( + graph: &mut QueryGraph, + relation_field: &RelationFieldRef, + parent_node: &NodeRef, + child_node: &NodeRef, +) -> QueryGraphBuilderResult<()> { + let noop_node = graph.create_node(Node::Empty); + let relation_field = relation_field.related_field(); + let child_model_identifier = relation_field.related_model().primary_identifier(); + let read_node = insert_find_children_by_parent_node(graph, parent_node, &relation_field, Filter::empty())?; + + graph.create_edge( + &read_node, + &noop_node, + QueryGraphDependency::ParentProjection( + child_model_identifier, + Box::new(move |noop_node, child_ids| { + if !child_ids.is_empty() { + return Err(QueryGraphBuilderError::RelationViolation((relation_field).into())); + } + + Ok(noop_node) + }), + ), + )?; + + // Edge from empty node to the child (delete). + graph.create_edge(&noop_node, child_node, QueryGraphDependency::ExecutionOrder)?; + + Ok(()) +} + +/// Inserts cascade emulations into the graph between `parent_node` and `child_node`. +/// `relation_field` is the relation field pointing to the model to be deleted. +/// Recurses into the deletion emulation to ensure that subsequent deletions are handled correctly as well. +/// +/// ```text +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// Parent │ +/// │ (ids to delete) ─ ┐ +/// ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ +/// │ │ +/// ▼ +/// ┌────────────────────┐ │ +/// │Find Connected Model│ +/// ┌──│ (Cascade) │ │ +/// │ └────────────────────┘ /// │ │ │ -/// │ ▼ │ -/// │ ┌────────────────────┐ │ -/// ├─▶│Find Connected Model│ │ Fail if > 0 -/// │ │ B │ │ -/// │ └────────────────────┘ │ -/// │ │Fail if > 0 │ -/// │ ▼ │ +/// │ ▼ +/// │┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ +/// │ ┌────────────────────┐ │ +/// ││ │ Insert onDelete │ │ +/// │ │ emulation subtree │ │ +/// ││ │ for all relations │ │ +/// │ │ pointing to D. │ │ +/// ││ └────────────────────┘ │ +/// │ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ +/// │ │ │ +/// │ ▼ /// │ ┌────────────────────┐ │ -/// ├─▶│ Empty │◀─┘ +/// └─▶│ Delete children │ +/// └────────────────────┘ │ +/// │ +/// ▼ │ +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// Delete │◀ ┘ +/// └ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// ``` +pub fn emulate_on_delete_cascade( + graph: &mut QueryGraph, + relation_field: &RelationFieldRef, // This is the field _on the other model_ for cascade. + connector_ctx: &ConnectorContext, + _model: &ModelRef, + parent_node: &NodeRef, + child_node: &NodeRef, +) -> QueryGraphBuilderResult<()> { + let dependent_model = relation_field.model(); + let parent_relation_field = relation_field.related_field(); + let child_model_identifier = relation_field.related_model().primary_identifier(); + + // Records that need to be deleted for the cascade. + let dependent_records_node = + insert_find_children_by_parent_node(graph, parent_node, &parent_relation_field, Filter::empty())?; + + let delete_query = WriteQuery::DeleteManyRecords(DeleteManyRecords { + model: dependent_model.clone(), + record_filter: RecordFilter::empty(), + }); + + let delete_dependents_node = graph.create_node(Query::Write(delete_query)); + + insert_emulated_on_delete( + graph, + connector_ctx, + &dependent_model, + &dependent_records_node, + &delete_dependents_node, + )?; + + graph.create_edge( + &dependent_records_node, + &delete_dependents_node, + QueryGraphDependency::ParentProjection( + child_model_identifier.clone(), + Box::new(move |mut delete_dependents_node, dependent_ids| { + if let Node::Query(Query::Write(WriteQuery::DeleteManyRecords(ref mut dmr))) = delete_dependents_node { + dmr.record_filter = dependent_ids.into(); + } + + Ok(delete_dependents_node) + }), + ), + )?; + + graph.create_edge( + &delete_dependents_node, + child_node, + QueryGraphDependency::ExecutionOrder, + )?; + + Ok(()) +} + +/// Inserts set null emulations into the graph between `parent_node` and `child_node`. +/// `relation_field` is the relation field pointing to the model to be deleted. +/// Recurses into the deletion emulation to ensure that subsequent deletions are handled correctly as well. +/// +/// ```text +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// Parent │ +/// │ (ids to del/upd) ─ ┐ +/// ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ +/// │ │ +/// ▼ +/// ┌────────────────────┐ │ +/// │Find Connected Model│ +/// ┌──│ (SetNull) │ │ /// │ └────────────────────┘ -/// │ │ +/// │ │ │ /// │ ▼ +/// │┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ +/// │ ┌────────────────────┐ │ +/// ││ │ Insert onUpdate │ │ +/// │ │ emulation subtree │ │ +/// ││ │for relations using │ │ +/// │ │the foreign key that│ │ +/// ││ │ was updated. │ │ +/// │ └────────────────────┘ │ +/// │└ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ +/// │ │ +/// │ ▼ │ /// │ ┌────────────────────┐ -/// └─▶│ Delete │ -/// └────────────────────┘ +/// │ │Update children (set│ │ +/// └─▶│ FK null) │ +/// └────────────────────┘ │ +/// │ +/// ▼ │ +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// Delete / Update │◀ ┘ +/// └ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ /// ``` -#[tracing::instrument(skip(graph, model, parent_node, child_node))] -pub fn insert_deletion_checks( +pub fn emulate_set_null( graph: &mut QueryGraph, + relation_field: &RelationFieldRef, + connector_ctx: &ConnectorContext, model: &ModelRef, parent_node: &NodeRef, child_node: &NodeRef, ) -> QueryGraphBuilderResult<()> { - let internal_model = model.internal_data_model(); - let relation_fields = internal_model.fields_requiring_model(model); - let mut check_nodes = vec![]; - - if !relation_fields.is_empty() { - let noop_node = graph.create_node(Node::Empty); - - // We know that the relation can't be a list and must be required on the related model for `model` (see fields_requiring_model). - // For all requiring models (RM), we use the field on `model` to query for existing RM records and error out if at least one exists. - for rf in relation_fields { - let relation_field = rf.related_field(); - let child_model_identifier = relation_field.related_model().primary_identifier(); - let read_node = insert_find_children_by_parent_node(graph, parent_node, &relation_field, Filter::empty())?; - - graph.create_edge( - &read_node, - &noop_node, - QueryGraphDependency::ParentProjection( - child_model_identifier, - Box::new(move |noop_node, child_ids| { - if !child_ids.is_empty() { - return Err(QueryGraphBuilderError::RelationViolation((relation_field).into())); - } - - Ok(noop_node) - }), - ), - )?; - - check_nodes.push(read_node); - } + // let dependent_model = relation_field.model(); + // let parent_relation_field = relation_field.related_field(); + // let child_model_identifier = relation_field.related_model().primary_identifier(); + + // // Records that need to be deleted for the cascade. + // let dependent_records_node = + // insert_find_children_by_parent_node(graph, parent_node, &parent_relation_field, Filter::empty())?; + + // let delete_query = WriteQuery::DeleteManyRecords(DeleteManyRecords { + // model: dependent_model.clone(), + // record_filter: RecordFilter::empty(), + // }); + + // let delete_dependents_node = graph.create_node(Query::Write(delete_query)); + + // insert_emulated_on_delete( + // graph, + // connector_ctx, + // &dependent_model, + // &dependent_records_node, + // &delete_dependents_node, + // )?; + + // graph.create_edge( + // &dependent_records_node, + // &delete_dependents_node, + // QueryGraphDependency::ParentProjection( + // child_model_identifier.clone(), + // Box::new(move |mut delete_dependents_node, dependent_ids| { + // if let Node::Query(Query::Write(WriteQuery::DeleteManyRecords(ref mut dmr))) = delete_dependents_node { + // dmr.record_filter = dependent_ids.into(); + // } + + // Ok(delete_dependents_node) + // }), + // ), + // )?; + + // graph.create_edge( + // &delete_dependents_node, + // child_node, + // QueryGraphDependency::ExecutionOrder, + // )?; + + Ok(()) +} + +/// Inserts emulated referential actions for `onUpdate` into the graph. +/// All relations that refer to the `model` row(s) being deleted are checked for their desired emulation and inserted accordingly. +/// Right now, supported modes are `Restrict` and `SetNull` (cascade will follow). +/// Those checks fail at runtime and are inserted between `parent_node` and `child_node`. +/// +/// This function is usually part of a delete (`deleteOne` or `deleteMany`). +/// Expects `parent_node` to return one or more IDs (for records of `model`) to be checked. +/// +/// The old behavior (pre-referential actions) is preserved for if the ReferentialActions feature flag is disabled, +/// which was basically only the `Restrict` part of this code. +/// +/// Resulting graph (all emulations): +/// ```text +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// Parent │ +/// ┌ ─│ (ids to update) ─────────────────┬─────────────────────────────┬────────────────────────────────────────┐ +/// ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ │ │ │ +/// │ │ │ │ │ +/// ▼ ▼ ▼ ▼ +/// │ ┌────────────────────┐ ┌────────────────────┐ ┌────────────────────┐ ┌────────────────────┐ +/// │Find Connected Model│ │Find Connected Model│ │Find Connected Model│ │Find Connected Model│ +/// │ │ A (Restrict) │ │ B (Restrict) │ ┌──│ C (SetNull) │ ┌──│ D (Cascade) │ +/// └────────────────────┘ └────────────────────┘ │ └────────────────────┘ │ └────────────────────┘ +/// │ │ │ │ │ │ │ +/// Fail if│> 0 Fail if│> 0 │ ▼ │ │ +/// │ │ │ │┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ ▼ +/// ▼ ▼ │ ┌────────────────────┐ │ │┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// │ ┌────────────────────┐ ┌────────────────────┐ ││ │ Insert onUpdate │ │ ┌────────────────────┐ │ +/// │ Empty │ │ Empty │ │ │ emulation subtree │ │ ││ │ Insert onUpdate │ +/// │ └────────────────────┘ └────────────────────┘ ││ │for relations using │ │ │ emulation subtree │ │ +/// │ │ │ │the foreign key that│ │ ││ │ for all relations │ +/// │ │ │ ││ │ was updated. │ │ │ pointing to D. │ │ +/// │ │ │ └────────────────────┘ │ ││ └────────────────────┘ +/// │ │ │ │└ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ +/// │ │ │ │ │ │ +/// │ │ │ │ │ │ │ +/// ▼ │ │ ▼ │ ▼ +/// │ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ │ ┌────────────────────┐ │ ┌────────────────────┐ +/// ─▶ Update │◀────────────────┘ │ │ Update Cs (set FK │ └─▶│ Update Cs │ +/// └ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ └─▶│ null) │ └────────────────────┘ +/// ▲ └────────────────────┘ │ +/// │ │ │ +/// └─────────────────────────────────────────────────────────┴────────────────────────────────────────┘ +/// ``` +#[tracing::instrument(skip(graph, model_to_update, parent_node, child_node))] +pub fn insert_emulated_on_update( + graph: &mut QueryGraph, + connector_ctx: &ConnectorContext, + model_to_update: &ModelRef, + parent_node: &NodeRef, + child_node: &NodeRef, +) -> QueryGraphBuilderResult<()> { + let has_fks = connector_ctx.capabilities.contains(&ConnectorCapability::ForeignKeys); + let has_ra_feature = connector_ctx.features.contains(&PreviewFeature::ReferentialActions); - // Connects all `Find Connected Model` nodes with execution order dependency from the example in the docs. - check_nodes.into_iter().fold1(|prev, next| { - graph - .create_edge(&prev, &next, QueryGraphDependency::ExecutionOrder) - .unwrap(); + // If the connector supports foreign keys and the new mode is enabled (preview feature), we do not do any checks / emulation. + if has_ra_feature && has_fks { + return Ok(()); + } - next - }); + // If it's non-fk dbs, then the emulation will kick in. If it has Fks, then preserve the old behavior (`has_fks` -> only required ones). + let internal_model = model_to_update.internal_data_model(); + let relation_fields = internal_model.fields_pointing_to_model(model_to_update, has_fks); - // Edge from empty node to the child (delete). - graph.create_edge(&noop_node, child_node, QueryGraphDependency::ExecutionOrder)?; + // Unwraps are safe as in this stage, no node content can be replaced. + let parent_update_args = extract_update_args(graph.node_content(parent_node).unwrap()); + + for rf in relation_fields { + match rf.relation().on_delete() { + ReferentialAction::Restrict => emulate_restrict(graph, &rf, parent_node, child_node)?, + ReferentialAction::NoAction => continue, // Explicitly do nothing. + + ReferentialAction::Cascade => { + emulate_on_update_cascade(graph, &rf, connector_ctx, model_to_update, parent_node, child_node)? + } + + ReferentialAction::SetNull => { + emulate_set_null(graph, &rf, connector_ctx, model_to_update, parent_node, child_node)? + } + + x => panic!("Unsupported referential action emulation: {}", x), + }; } Ok(()) } + +fn extract_update_args(parent_node: &Node) -> &WriteArgs { + if let Node::Query(Query::Write(q)) = parent_node { + match q { + WriteQuery::UpdateRecord(one) => &one.args, + WriteQuery::UpdateManyRecords(many) => &many.args, + _ => panic!("Parent operation for update emulation is not an update."), + } + } else { + panic!("Parent operation for update emulation is not a query.") + } +} + +/// Inserts cascade emulations into the graph between `parent_node` and `child_node`. +/// `relation_field` is the relation field pointing to the model to be deleted. +/// Recurses into the deletion emulation to ensure that subsequent deletions are handled correctly as well. +/// +/// ```text +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// Parent │ +/// │ (ids to update) ─ ┐ +/// ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ +/// │ │ +/// ▼ +/// ┌────────────────────┐ │ +/// │Find Connected Model│ +/// ┌──│ (Cascade) │ │ +/// │ └────────────────────┘ +/// │ │ │ +/// │ ▼ +/// │┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ │ +/// │ ┌────────────────────┐ │ +/// ││ │ Insert onUpdate │ │ +/// │ │ emulation subtree │ │ +/// ││ │ for all relations │ │ +/// │ │ pointing to D. │ │ +/// ││ └────────────────────┘ │ +/// │ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┘ +/// │ │ │ +/// │ ▼ +/// │ ┌────────────────────┐ │ +/// └─▶│ Update children │ +/// └────────────────────┘ │ +/// │ +/// ▼ │ +/// ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// Update │◀ ┘ +/// └ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ +/// ``` +pub fn emulate_on_update_cascade( + _graph: &mut QueryGraph, + _relation_field: &RelationFieldRef, // This is the field _on the other model_ for cascade. + _connector_ctx: &ConnectorContext, + _model: &ModelRef, + _parent_node: &NodeRef, + _child_node: &NodeRef, +) -> QueryGraphBuilderResult<()> { + Ok(()) +} diff --git a/query-engine/core/src/response_ir/internal.rs b/query-engine/core/src/response_ir/internal.rs index b8defaf11a6e..eed6fce17cca 100644 --- a/query-engine/core/src/response_ir/internal.rs +++ b/query-engine/core/src/response_ir/internal.rs @@ -289,7 +289,7 @@ fn serialize_objects( ) -> crate::Result { // The way our query execution works, we only need to look at nested + lists if we hit an object. // Move nested out of result for separate processing. - let nested = std::mem::replace(&mut result.nested, Vec::new()); + let nested = std::mem::take(&mut result.nested); // { -> { parent ID -> items } } let mut nested_mapping: HashMap = process_nested_results(nested, &typ)?; diff --git a/query-engine/core/src/schema/query_schema.rs b/query-engine/core/src/schema/query_schema.rs index d6a979a6abfd..ddcbf1e9e16a 100644 --- a/query-engine/core/src/schema/query_schema.rs +++ b/query-engine/core/src/schema/query_schema.rs @@ -1,4 +1,6 @@ use super::*; +use datamodel::common::preview_features::PreviewFeature; +use datamodel_connector::ConnectorCapability; use fmt::Debug; use prisma_models::{InternalDataModelRef, ModelRef}; use std::{borrow::Borrow, fmt}; @@ -18,16 +20,39 @@ use std::{borrow::Borrow, fmt}; /// Using a QuerySchema should never involve dealing with the strong references. #[derive(Debug)] pub struct QuerySchema { + /// Root query object (read queries). pub query: OutputTypeRef, + + /// Root mutation object (write queries). pub mutation: OutputTypeRef, - /// Stores all strong refs to the input object types. + /// Internal abstraction over the datamodel AST. + pub internal_data_model: InternalDataModelRef, + + /// Information about the connector this schema was build for. + pub context: ConnectorContext, + + /// Internal. Stores all strong Arc refs to the input object types. input_object_types: Vec, - /// Stores all strong refs to the output object types. + /// Internal. Stores all strong Arc refs to the output object types. output_object_types: Vec, +} - pub internal_data_model: InternalDataModelRef, +/// Connector meta information, to be used in query execution if necessary. +#[derive(Debug)] +pub struct ConnectorContext { + /// Capabilities of the provider. + pub capabilities: Vec, + + /// Enabled preview features. + pub features: Vec, +} + +impl ConnectorContext { + pub fn new(capabilities: Vec, features: Vec) -> Self { + Self { capabilities, features } + } } impl QuerySchema { @@ -37,6 +62,8 @@ impl QuerySchema { input_object_types: Vec, output_object_types: Vec, internal_data_model: InternalDataModelRef, + capabilities: Vec, + features: Vec, ) -> Self { QuerySchema { query, @@ -44,6 +71,7 @@ impl QuerySchema { input_object_types, output_object_types, internal_data_model, + context: ConnectorContext::new(capabilities, features), } } @@ -76,6 +104,10 @@ impl QuerySchema { _ => unreachable!(), } } + + pub fn context(&self) -> &ConnectorContext { + &self.context + } } /// Designates a specific top-level operation on a corresponding model. diff --git a/query-engine/core/src/schema_builder/input_types/objects/update_one_objects.rs b/query-engine/core/src/schema_builder/input_types/objects/update_one_objects.rs index 3e5e6d28d426..5d94a61a1692 100644 --- a/query-engine/core/src/schema_builder/input_types/objects/update_one_objects.rs +++ b/query-engine/core/src/schema_builder/input_types/objects/update_one_objects.rs @@ -114,11 +114,24 @@ pub(super) fn scalar_input_fields_for_unchecked_update( vec![] }; + let id_fields = model.fields().id(); let scalar_fields: Vec = model .fields() .scalar() .into_iter() .filter(|sf| !linking_fields.contains(sf)) + .filter(|sf| { + if let Some(ref id_fields) = &id_fields { + // Exclude @@id or @id fields if not updatable + if id_fields.contains(sf) { + ctx.capabilities.contains(ConnectorCapability::UpdateableId) + } else { + true + } + } else { + true + } + }) .collect(); input_fields::scalar_input_fields( diff --git a/query-engine/core/src/schema_builder/mod.rs b/query-engine/core/src/schema_builder/mod.rs index 152c5afad7fe..4d2841ebcf1b 100644 --- a/query-engine/core/src/schema_builder/mod.rs +++ b/query-engine/core/src/schema_builder/mod.rs @@ -164,8 +164,9 @@ pub fn build( internal_data_model, enable_raw_queries, capabilities, - preview_features, + preview_features.clone(), ); + output_types::output_objects::initialize_model_object_type_cache(&mut ctx); let (query_type, query_object_ref) = output_types::query_type::build(&mut ctx); @@ -185,6 +186,8 @@ pub fn build( input_objects, output_objects, ctx.internal_data_model, + ctx.capabilities.capabilities, + preview_features, ) } diff --git a/query-engine/core/src/schema_builder/output_types/mutation_type.rs b/query-engine/core/src/schema_builder/output_types/mutation_type.rs index 18a3704e67e5..ccf2e1a1184b 100644 --- a/query-engine/core/src/schema_builder/output_types/mutation_type.rs +++ b/query-engine/core/src/schema_builder/output_types/mutation_type.rs @@ -48,8 +48,8 @@ pub(crate) fn build(ctx: &mut BuilderContext) -> (OutputType, ObjectTypeStrongRe // implementation note: these need to be in the same function, because these vecs interact: the create inputs will enqueue update inputs, and vice versa. #[tracing::instrument(skip(ctx))] fn create_nested_inputs(ctx: &mut BuilderContext) { - let mut nested_create_inputs_queue = std::mem::replace(&mut ctx.nested_create_inputs_queue, Vec::new()); - let mut nested_update_inputs_queue = std::mem::replace(&mut ctx.nested_update_inputs_queue, Vec::new()); + let mut nested_create_inputs_queue = std::mem::take(&mut ctx.nested_create_inputs_queue); + let mut nested_update_inputs_queue = std::mem::take(&mut ctx.nested_update_inputs_queue); while !(nested_create_inputs_queue.is_empty() && nested_update_inputs_queue.is_empty()) { // Create inputs. diff --git a/query-engine/query-engine/src/context.rs b/query-engine/query-engine/src/context.rs index b07c024b0f6f..b584b9aef06b 100644 --- a/query-engine/query-engine/src/context.rs +++ b/query-engine/query-engine/src/context.rs @@ -58,7 +58,6 @@ impl PrismaContext { let url = data_source.load_url(|key| env::var(key).ok())?; // Load executor - let preview_features: Vec<_> = config.preview_features().cloned().collect(); let (db_name, executor) = exec_loader::load(&data_source, &preview_features, &url).await?; diff --git a/query-engine/query-engine/src/tests/test_api.rs b/query-engine/query-engine/src/tests/test_api.rs index 11bfc49545a3..d1f8bb46b57b 100644 --- a/query-engine/query-engine/src/tests/test_api.rs +++ b/query-engine/query-engine/src/tests/test_api.rs @@ -104,22 +104,34 @@ impl TestApi { pub(super) async fn mysql_migration_connector(args: &TestApiArgs) -> (SqlMigrationConnector, String) { let (_db_name, url) = args.create_mysql_database().await; - (SqlMigrationConnector::new(&url, None).await.unwrap(), url) + ( + SqlMigrationConnector::new(&url, BitFlags::all(), None).await.unwrap(), + url, + ) } pub(super) async fn mssql_migration_connector(db_name: &str, args: &TestApiArgs) -> (SqlMigrationConnector, String) { let (_, url) = test_setup::init_mssql_database(args.database_url(), db_name) .await .unwrap(); - (SqlMigrationConnector::new(&url, None).await.unwrap(), url) + ( + SqlMigrationConnector::new(&url, BitFlags::all(), None).await.unwrap(), + url, + ) } pub(super) async fn postgres_migration_connector(args: &TestApiArgs) -> (SqlMigrationConnector, String) { let (_db_name, _, url) = args.create_postgres_database().await; - (SqlMigrationConnector::new(&url, None).await.unwrap(), url) + ( + SqlMigrationConnector::new(&url, BitFlags::all(), None).await.unwrap(), + url, + ) } pub(super) async fn sqlite_migration_connector(db_name: &str) -> (SqlMigrationConnector, String) { let url = sqlite_test_url(db_name); - (SqlMigrationConnector::new(&url, None).await.unwrap(), url) + ( + SqlMigrationConnector::new(&url, BitFlags::all(), None).await.unwrap(), + url, + ) } diff --git a/query-engine/request-handlers/src/dmmf/schema/mod.rs b/query-engine/request-handlers/src/dmmf/schema/mod.rs index b6bc1537cd79..b2c9d5d4fa46 100644 --- a/query-engine/request-handlers/src/dmmf/schema/mod.rs +++ b/query-engine/request-handlers/src/dmmf/schema/mod.rs @@ -29,7 +29,7 @@ impl QuerySchemaRenderer<(DmmfSchema, DmmfOperationMappings)> for DmmfQuerySchem ctx.mark_to_be_rendered(&query_schema); while !ctx.next_pass.is_empty() { - let renderers = std::mem::replace(&mut ctx.next_pass, Vec::new()); + let renderers = std::mem::take(&mut ctx.next_pass); for renderer in renderers { renderer.render(&mut ctx) diff --git a/query-engine/request-handlers/src/tests/dmmf/aggregation.rs b/query-engine/request-handlers/src/tests/dmmf/aggregation.rs index 4dda502899d1..ad8b48e751c0 100644 --- a/query-engine/request-handlers/src/tests/dmmf/aggregation.rs +++ b/query-engine/request-handlers/src/tests/dmmf/aggregation.rs @@ -38,27 +38,27 @@ fn nullable_fields_should_be_nullable_in_group_by_output_types() { parent_type.name.as_str(), ) { (TypeLocation::Scalar, false, _) => match field.name.as_str() { - "required_id" => assert_eq!(is_nullable, false), - "optional_string" => assert_eq!(is_nullable, true), - "required_string" => assert_eq!(is_nullable, false), - "optional_int" => assert_eq!(is_nullable, true), - "required_int" => assert_eq!(is_nullable, false), + "required_id" => assert!(!is_nullable), + "optional_string" => assert!(is_nullable), + "required_string" => assert!(!is_nullable), + "optional_int" => assert!(is_nullable), + "required_int" => assert!(!is_nullable), _ => (), }, (TypeLocation::Scalar, true, "BlogCountAggregateOutputType") => match field.name.as_str() { - "required_id" => assert_eq!(is_nullable, false), - "optional_string" => assert_eq!(is_nullable, false), - "required_string" => assert_eq!(is_nullable, false), - "optional_int" => assert_eq!(is_nullable, false), - "required_int" => assert_eq!(is_nullable, false), + "required_id" => assert!(!is_nullable), + "optional_string" => assert!(!is_nullable), + "required_string" => assert!(!is_nullable), + "optional_int" => assert!(!is_nullable), + "required_int" => assert!(!is_nullable), _ => (), }, (TypeLocation::Scalar, true, _) => match field.name.as_str() { - "required_id" => assert_eq!(is_nullable, true), - "optional_string" => assert_eq!(is_nullable, true), - "required_string" => assert_eq!(is_nullable, true), - "optional_int" => assert_eq!(is_nullable, true), - "required_int" => assert_eq!(is_nullable, true), + "required_id" => assert!(is_nullable), + "optional_string" => assert!(is_nullable), + "required_string" => assert!(is_nullable), + "optional_int" => assert!(is_nullable), + "required_int" => assert!(is_nullable), _ => (), }, _ => (), diff --git a/query-engine/request-handlers/src/tests/dmmf/ignored.rs b/query-engine/request-handlers/src/tests/dmmf/ignored.rs index 6ca1302b3a35..5f37a434e73b 100644 --- a/query-engine/request-handlers/src/tests/dmmf/ignored.rs +++ b/query-engine/request-handlers/src/tests/dmmf/ignored.rs @@ -351,7 +351,7 @@ fn ignored_models_should_be_filtered() { let mutation = find_output_type(&dmmf, PRISMA_NAMESPACE, "Mutation"); let has_no_inputs = dmmf.schema.input_object_types.get(PRISMA_NAMESPACE).is_none(); - assert_eq!(has_no_inputs, true); + assert!(has_no_inputs); assert_eq!(query.fields.len(), 0); assert_eq!(mutation.fields.len(), 0); }