diff --git a/.rustfmt.toml b/.rustfmt.toml index 32a9786fa1c4..261644ce7835 100644 --- a/.rustfmt.toml +++ b/.rustfmt.toml @@ -1 +1,2 @@ edition = "2018" +max_width = 120 diff --git a/migration-engine/connectors/migration-connector/src/destructive_changes_checker.rs b/migration-engine/connectors/migration-connector/src/destructive_changes_checker.rs index e779ea36d868..37d01dbbdaae 100644 --- a/migration-engine/connectors/migration-connector/src/destructive_changes_checker.rs +++ b/migration-engine/connectors/migration-connector/src/destructive_changes_checker.rs @@ -44,6 +44,14 @@ impl DestructiveChangeDiagnostics { pub fn has_warnings(&self) -> bool { !self.warnings.is_empty() } + + pub fn warn_about_unexecutable_migrations(&mut self) { + for unexecutable in &self.unexecutable_migrations { + self.warnings.push(MigrationWarning { + description: unexecutable.description.clone(), + }); + } + } } /// A warning emitted by [DestructiveChangesChecker](trait.DestructiveChangesChecker.html). Warnings will diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_changes_checker.rs b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_changes_checker.rs index 764b6d666fe5..f683aaabd00f 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_changes_checker.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_changes_checker.rs @@ -349,6 +349,9 @@ impl SqlDestructiveChangesChecker<'_> { } } + // Temporary, for better reporting. + diagnostics.warn_about_unexecutable_migrations(); + Ok(diagnostics) } } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs index eb4b6ea9cdad..91368b8e1fc5 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs @@ -1,7 +1,7 @@ use super::common::*; use crate::{sql_schema_helpers::ColumnRef, SqlFamily}; use sql_schema_describer::*; -use std::fmt::Write as _; +use std::{borrow::Cow, fmt::Write as _}; const VARCHAR_LENGTH_PREFIX: &str = "(191)"; @@ -58,15 +58,22 @@ impl super::SqlRenderer for MySqlRenderer { } impl MySqlRenderer { - fn render_column_type(&self, column: &ColumnRef<'_>) -> anyhow::Result { + fn render_column_type(&self, column: &ColumnRef<'_>) -> anyhow::Result> { match &column.column_type().family { - ColumnTypeFamily::Boolean => Ok(format!("boolean")), - ColumnTypeFamily::DateTime => Ok(format!("datetime(3)")), - ColumnTypeFamily::Float => Ok(format!("Decimal(65,30)")), - ColumnTypeFamily::Int => Ok(format!("int")), + ColumnTypeFamily::Boolean => Ok("boolean".into()), + ColumnTypeFamily::DateTime => { + // CURRENT_TIMESTAMP has up to second precision, not more. + if let Some(DefaultValue::NOW) = column.default() { + return Ok("datetime".into()); + } else { + Ok("datetime(3)".into()) + } + } + ColumnTypeFamily::Float => Ok("Decimal(65,30)".into()), + ColumnTypeFamily::Int => Ok("int".into()), // we use varchar right now as mediumtext doesn't allow default values // a bigger length would not allow to use such a column as primary key - ColumnTypeFamily::String => Ok(format!("varchar{}", VARCHAR_LENGTH_PREFIX)), + ColumnTypeFamily::String => Ok(format!("varchar{}", VARCHAR_LENGTH_PREFIX).into()), ColumnTypeFamily::Enum(enum_name) => { let r#enum = column .schema() @@ -75,7 +82,7 @@ impl MySqlRenderer { let variants: String = r#enum.values.iter().map(quoted_string).join(", "); - Ok(format!("ENUM({})", variants)) + Ok(format!("ENUM({})", variants).into()) } x => unimplemented!("{:?} not handled yet", x), } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator.rs b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator.rs index 87de876ae868..1bf18e1916f9 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator.rs @@ -1,7 +1,6 @@ mod datamodel_helpers; use crate::{error::SqlError, sql_renderer::IteratorJoin, DatabaseInfo, SqlResult}; -use chrono::*; use datamodel::common::*; use datamodel::*; use datamodel_helpers::{FieldRef, ModelRef, TypeRef}; @@ -24,12 +23,17 @@ impl<'a> SqlSchemaCalculator<'a> { } fn calculate_internal(&self) -> SqlResult { - let mut tables = Vec::new(); - let model_tables_without_inline_relations = self.calculate_model_tables()?; - let mut model_tables = self.add_inline_relations_to_model_tables(model_tables_without_inline_relations)?; + let mut tables = Vec::with_capacity(self.data_model.models.len()); + let model_tables_without_inline_relations = self.calculate_model_tables(); + + for result in model_tables_without_inline_relations { + let (model, mut table) = result?; + self.add_inline_relations_to_model_tables(model, &mut table); + tables.push(table); + } + let mut relation_tables = self.calculate_relation_tables()?; - tables.append(&mut model_tables); tables.append(&mut relation_tables); // guarantee same sorting as in the sql-schema-describer @@ -89,204 +93,130 @@ impl<'a> SqlSchemaCalculator<'a> { } } - fn calculate_model_tables(&self) -> SqlResult> { - datamodel_helpers::walk_models(self.data_model) - .map(|model| { - let columns = model - .fields() - .flat_map(|f| match f.field_type() { - TypeRef::Base(_) => Some(sql::Column { + fn calculate_model_tables<'iter>( + &'iter self, + ) -> impl Iterator, sql::Table)>> + 'iter { + datamodel_helpers::walk_models(self.data_model).map(move |model| { + let columns = model + .fields() + .flat_map(|f| match f.field_type() { + TypeRef::Base(_) => Some(sql::Column { + name: f.db_name().to_owned(), + tpe: column_type(&f), + default: migration_value_new(&f), + auto_increment: matches!(f.default_value(), Some(DefaultValue::Expression(ValueGenerator { generator: ValueGeneratorFn::Autoincrement, .. }))), + }), + TypeRef::Enum(r#enum) => { + let enum_db_name = r#enum.db_name(); + Some(sql::Column { name: f.db_name().to_owned(), - tpe: column_type(&f), + tpe: enum_column_type(&f, &self.database_info, enum_db_name), default: migration_value_new(&f), - auto_increment: { - match f.default_value() { - Some(DefaultValue::Expression(ValueGenerator { - name: _, - args: _, - generator: ValueGeneratorFn::Autoincrement, - })) => true, - _ => false, - } - }, - }), - TypeRef::Enum(r#enum) => { - let enum_db_name = r#enum.db_name(); - Some(sql::Column { - name: f.db_name().to_owned(), - tpe: enum_column_type(&f, &self.database_info, enum_db_name), - default: migration_value_new(&f), - auto_increment: false, - }) - } - _ => None, + auto_increment: false, + }) + } + _ => None, + }) + .collect(); + + let primary_key = sql::PrimaryKey { + columns: model + .id_fields() + .flat_map(|field| { + field + .data_source_fields() + .into_iter() + .map(|s| s.name.clone()) + .collect::>() + .into_iter() + }) + .collect(), + sequence: None, + }; + + let single_field_indexes = model.fields().filter_map(|f| { + if f.is_unique() { + Some(sql::Index { + name: format!("{}.{}", &model.db_name(), &f.db_name()), + columns: f.data_source_fields().iter().map(|f| f.name.clone()).collect(), + tpe: sql::IndexType::Unique, }) + } else { + None + } + }); + + let multiple_field_indexes = model.indexes().map(|index_definition: &IndexDefinition| { + let referenced_fields: Vec = index_definition + .fields + .iter() + .map(|field_name| model.find_field(field_name).expect("Unknown field in index directive.")) .collect(); - let primary_key = sql::PrimaryKey { - columns: model - .id_fields() - .flat_map(|field| { - field - .data_source_fields() - .into_iter() - .map(|s| s.name.clone()) - .collect::>() - .into_iter() - }) + sql::Index { + name: index_definition.name.clone().unwrap_or_else(|| { + format!( + "{}.{}", + &model.db_name(), + referenced_fields.iter().map(|field| field.db_name()).join("_") + ) + }), + // The model index definition uses the model field names, but the SQL Index + // wants the column names. + columns: referenced_fields + .iter() + .flat_map(|field| field.data_source_fields().into_iter().map(|f| f.name.clone())) .collect(), - sequence: None, - }; - - let single_field_indexes = model.fields().filter_map(|f| { - if f.is_unique() { - Some(sql::Index { - name: format!("{}.{}", &model.db_name(), &f.db_name()), - columns: f.data_source_fields().iter().map(|f| f.name.clone()).collect(), - tpe: sql::IndexType::Unique, - }) + tpe: if index_definition.tpe == IndexType::Unique { + sql::IndexType::Unique } else { - None - } - }); + sql::IndexType::Normal + }, + } + }); - let multiple_field_indexes = model.indexes().map(|index_definition: &IndexDefinition| { - let referenced_fields: Vec = index_definition - .fields - .iter() - .map(|field_name| model.find_field(field_name).expect("Unknown field in index directive.")) - .collect(); - - sql::Index { - name: index_definition.name.clone().unwrap_or_else(|| { - format!( - "{}.{}", - &model.db_name(), - referenced_fields.iter().map(|field| field.db_name()).join("_") - ) - }), - // The model index definition uses the model field names, but the SQL Index - // wants the column names. - columns: referenced_fields - .iter() - .flat_map(|field| field.data_source_fields().into_iter().map(|f| f.name.clone())) - .collect(), - tpe: if index_definition.tpe == IndexType::Unique { - sql::IndexType::Unique - } else { - sql::IndexType::Normal - }, - } - }); - - let table = sql::Table { - name: model.database_name().to_owned(), - columns, - indices: single_field_indexes.chain(multiple_field_indexes).collect(), - primary_key: Some(primary_key), - foreign_keys: Vec::new(), - }; + let table = sql::Table { + name: model.database_name().to_owned(), + columns, + indices: single_field_indexes.chain(multiple_field_indexes).collect(), + primary_key: Some(primary_key), + foreign_keys: Vec::new(), + }; - Ok(ModelTable { - model: model.model().clone(), - table, - }) - }) - .collect() + Ok((model, table)) + }) } - fn add_inline_relations_to_model_tables(&self, model_tables: Vec) -> SqlResult> { - let mut result = Vec::new(); - let relations = self.calculate_relations(); - for mut model_table in model_tables { - for relation in relations.iter() { - match &relation.manifestation { - TempManifestationHolder::Inline { - in_table_of_model, - field: dml_field, - referenced_fields, - } if in_table_of_model == &model_table.model.name => { - let (model, related_model) = if model_table.model == relation.model_a { - (&relation.model_a, &relation.model_b) - } else { - (&relation.model_b, &relation.model_a) - }; - - let (model, related_model) = ( - ModelRef { - model: &model, - datamodel: self.data_model, - }, - ModelRef { - model: &related_model, - datamodel: self.data_model, - }, - ); - - let field = model.fields().find(|f| &f.name() == &dml_field.name).unwrap(); - - let referenced_fields: Vec = if referenced_fields.is_empty() { - first_unique_criterion(related_model).map_err(SqlError::Generic)? - } else { - let fields: Vec<_> = related_model - .fields() - .filter(|field| { - referenced_fields - .iter() - .any(|referenced| referenced.as_str() == field.name()) - }) - .collect(); - - if fields.len() != referenced_fields.len() { - return Err(crate::SqlError::Generic(anyhow::anyhow!( - "References to unknown fields {referenced_fields:?} on `{model_name}`", - model_name = related_model.name(), - referenced_fields = referenced_fields, - ))); - } - - fields - }; - - let columns: Vec = field - .field - .data_source_fields - .iter() - .map(|dsf| sql::Column { - name: dsf.name.clone(), - tpe: column_type_for_scalar_type(&dsf.field_type, column_arity(dsf.arity)), - default: None, - auto_increment: false, - }) - .collect(); - - let foreign_key = sql::ForeignKey { - constraint_name: None, - columns: columns.iter().map(|col| col.name.to_owned()).collect(), - referenced_table: related_model.db_name().to_owned(), - referenced_columns: referenced_fields - .iter() - .flat_map(|field| field.data_source_fields().into_iter().map(|f| f.name.clone())) - .collect(), - on_delete_action: match column_arity(field.arity()) { - ColumnArity::Required => sql::ForeignKeyAction::Cascade, - _ => sql::ForeignKeyAction::SetNull, - }, - }; - - if relation.is_one_to_one() { - add_one_to_one_relation_unique_index(&mut model_table.table, &columns) - } - - model_table.table.columns.extend(columns); - model_table.table.foreign_keys.push(foreign_key); - } - _ => {} - } + fn add_inline_relations_to_model_tables(&self, model: ModelRef<'a>, table: &mut sql::Table) { + let relation_fields = model + .fields() + .filter_map(|field| field.as_relation_field()) + .filter(|relation_field| !relation_field.is_virtual()); + + for relation_field in relation_fields { + let fk_columns: Vec = relation_field.referencing_columns().map(String::from).collect(); + + // Optional unique index for 1:1 relations. + if relation_field.is_one_to_one() { + add_one_to_one_relation_unique_index(table, &fk_columns); + } + + // Foreign key + { + let fk = sql::ForeignKey { + constraint_name: None, + columns: fk_columns, + referenced_table: relation_field.referenced_table_name().to_owned(), + referenced_columns: relation_field.referenced_columns().map(String::from).collect(), + on_delete_action: match column_arity(relation_field.arity()) { + ColumnArity::Required => sql::ForeignKeyAction::Cascade, + _ => sql::ForeignKeyAction::SetNull, + }, + }; + + table.foreign_keys.push(fk); } - result.push(model_table.table); } - Ok(result) } fn calculate_relation_tables(&self) -> SqlResult> { @@ -294,14 +224,9 @@ impl<'a> SqlSchemaCalculator<'a> { for relation in self.calculate_relations().iter() { match &relation.manifestation { TempManifestationHolder::Table => { - let model_a = ModelRef { - datamodel: self.data_model, - model: &relation.model_a, - }; - let model_b = ModelRef { - datamodel: self.data_model, - model: &relation.model_b, - }; + let model_a = ModelRef::new(&relation.model_a, self.data_model); + let model_b = ModelRef::new(&relation.model_b, self.data_model); + let a_columns = relation_table_columns(&model_a, relation.model_a_column()); let b_columns = relation_table_columns(&model_b, relation.model_b_column()); @@ -412,20 +337,15 @@ fn relation_table_columns(referenced_model: &ModelRef<'_>, reference_field_name: } } -#[derive(PartialEq, Debug)] -struct ModelTable { - table: sql::Table, - model: Model, -} - fn migration_value_new(field: &FieldRef<'_>) -> Option { let value = match (&field.default_value(), field.arity()) { (Some(df), _) => match df { dml::DefaultValue::Single(s) => s.clone(), - dml::DefaultValue::Expression(_) => default_migration_value(&field.field_type()), + dml::DefaultValue::Expression(expression) if expression.name == "now" && expression.args.is_empty() => { + return Some(sql_schema_describer::DefaultValue::NOW) + } + dml::DefaultValue::Expression(_) => return None, }, - // This is a temporary hack until we can report impossible unexecutable migrations. - (None, FieldArity::Required) => default_migration_value(&field.field_type()), (None, _) => return None, }; @@ -469,33 +389,6 @@ fn migration_value_new(field: &FieldRef<'_>) -> Option) -> ScalarValue { - match field_type { - TypeRef::Base(ScalarType::Boolean) => ScalarValue::Boolean(false), - TypeRef::Base(ScalarType::Int) => ScalarValue::Int(0), - TypeRef::Base(ScalarType::Float) => ScalarValue::Float(0.0), - TypeRef::Base(ScalarType::String) => ScalarValue::String("".to_string()), - TypeRef::Base(ScalarType::Decimal) => ScalarValue::Decimal(0.0), - TypeRef::Base(ScalarType::DateTime) => { - let naive = NaiveDateTime::from_timestamp(0, 0); - let datetime: DateTime = DateTime::from_utc(naive, Utc); - ScalarValue::DateTime(datetime) - } - TypeRef::Enum(inum) => { - let first_value = inum - .values() - .iter() - .next() - .ok_or_else(|| anyhow::anyhow!("Enum {} did not contain any values.", inum.name())) - .unwrap() - .final_database_name(); - - ScalarValue::String(first_value.to_string()) - } - _ => unimplemented!("this functions must only be called for scalar fields"), - } -} - fn enum_column_type(field: &FieldRef<'_>, database_info: &DatabaseInfo, db_name: &str) -> sql::ColumnType { let arity = column_arity(field.arity()); match database_info.sql_family() { @@ -543,12 +436,11 @@ fn column_type_for_scalar_type(scalar_type: &ScalarType, column_arity: ColumnAri } } -fn add_one_to_one_relation_unique_index(table: &mut sql::Table, columns: &Vec) { - let column_names: Vec = columns.iter().map(|c| c.name.to_owned()).collect(); +fn add_one_to_one_relation_unique_index(table: &mut sql::Table, column_names: &[String]) { let columns_suffix = column_names.join("_"); let index = sql::Index { name: format!("{}_{}", table.name, columns_suffix), - columns: column_names, + columns: column_names.to_owned(), tpe: sql::IndexType::Unique, }; diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/datamodel_helpers.rs b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/datamodel_helpers.rs index fef75ae4a77a..389697a61042 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/datamodel_helpers.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_schema_calculator/datamodel_helpers.rs @@ -1,16 +1,13 @@ use datamodel::{ dml::{ - Datamodel, DefaultValue, Enum, Field, FieldArity, FieldType, IndexDefinition, Model, - ScalarType, WithDatabaseName, + Datamodel, DefaultValue, Enum, Field, FieldArity, FieldType, IndexDefinition, Model, ScalarType, + WithDatabaseName, }, - DataSourceField, EnumValue, + DataSourceField, EnumValue, RelationInfo, }; pub(crate) fn walk_models<'a>(datamodel: &'a Datamodel) -> impl Iterator> + 'a { - datamodel - .models - .iter() - .map(move |model| ModelRef { datamodel, model }) + datamodel.models.iter().map(move |model| ModelRef { datamodel, model }) } /// Iterator to walk all the fields in the schema, associating them with their parent model. @@ -26,22 +23,21 @@ pub(super) fn walk_fields<'a>(datamodel: &'a Datamodel) -> impl Iterator { - pub(crate) datamodel: &'a Datamodel, - pub(crate) model: &'a Model, + datamodel: &'a Datamodel, + model: &'a Model, } impl<'a> ModelRef<'a> { + pub(crate) fn new(model: &'a Model, datamodel: &'a Datamodel) -> Self { + ModelRef { datamodel, model } + } + pub(super) fn database_name(&self) -> &'a str { - self.model - .database_name - .as_ref() - .unwrap_or(&self.model.name) + self.model.database_name.as_ref().unwrap_or(&self.model.name) } pub(super) fn db_name(&self) -> &str { - self.model - .single_database_name() - .unwrap_or_else(|| &self.model.name) + self.model.single_database_name().unwrap_or_else(|| &self.model.name) } pub(super) fn fields<'b>(&'b self) -> impl Iterator> + 'b { @@ -82,11 +78,12 @@ impl<'a> ModelRef<'a> { .fields() .filter(|field| field.is_id) // Compound id models - .chain(self.model.id_fields.iter().filter_map(move |field_name| { + .chain( self.model - .fields() - .find(|field| field.name.as_str() == field_name) - })) + .id_fields + .iter() + .filter_map(move |field_name| self.model.fields().find(|field| field.name.as_str() == field_name)), + ) .map(move |field| FieldRef { datamodel: self.datamodel, model: self.model, @@ -107,10 +104,11 @@ impl<'a> ModelRef<'a> { } } +#[derive(Debug, Clone, Copy)] pub(super) struct FieldRef<'a> { datamodel: &'a Datamodel, model: &'a Model, - pub field: &'a Field, + field: &'a Field, } impl<'a> FieldRef<'a> { @@ -122,7 +120,7 @@ impl<'a> FieldRef<'a> { self.field.single_database_name().unwrap_or(self.name()) } - pub(super) fn data_source_fields(&self) -> &[DataSourceField] { + pub(super) fn data_source_fields(&self) -> &'a [DataSourceField] { &self.field.data_source_fields } @@ -141,6 +139,16 @@ impl<'a> FieldRef<'a> { } } + pub(super) fn as_relation_field(&self) -> Option> { + match &self.field.field_type { + FieldType::Relation(relation_info) => Some(RelationFieldRef { + field: *self, + relation_info: relation_info, + }), + _ => None, + } + } + pub(super) fn is_id(&self) -> bool { self.field.is_id } @@ -184,6 +192,86 @@ impl<'a> TypeRef<'a> { } } +#[derive(Debug)] +pub(super) struct RelationFieldRef<'a> { + field: FieldRef<'a>, + relation_info: &'a RelationInfo, +} + +impl<'a> RelationFieldRef<'a> { + pub(super) fn arity(&self) -> FieldArity { + self.field.arity() + } + + pub(crate) fn is_one_to_one(&self) -> bool { + self.field.arity().is_singular() + && self + .opposite_side() + .map(|rel| rel.field.arity().is_singular()) + .unwrap_or(false) + } + + pub(crate) fn is_virtual(&self) -> bool { + self.relation_info.fields.is_empty() + } + + pub(crate) fn opposite_side(&self) -> Option> { + self.referenced_model_ref() + .fields() + .filter_map(|f| f.as_relation_field()) + .find(|relation_field| { + relation_field.relation_name() == self.relation_name() + && relation_field.referenced_model().name.as_str() == &self.field.model.name + }) + } + + pub(crate) fn referencing_columns<'b>(&'b self) -> impl Iterator { + self.field.data_source_fields().iter().map(|field| field.name.as_str()) + } + + pub(crate) fn referenced_columns<'b>(&'b self) -> impl Iterator + 'b { + self.referenced_model() + .fields + .iter() + .filter(move |field| { + self.relation_info + .to_fields + .iter() + .any(|to_field| to_field == &field.name) + }) + .flat_map(|field| field.data_source_fields.iter()) + .map(|dsf| dsf.name.as_str()) + } + + pub(crate) fn relation_name(&self) -> &'a str { + self.relation_info.name.as_ref() + } + + pub(crate) fn referenced_table_name(&self) -> &'a str { + self.referenced_model().final_single_database_name() + } + + fn referenced_model(&self) -> &'a Model { + self.field + .datamodel + .find_model(&self.relation_info.to) + .ok_or_else(|| { + anyhow::anyhow!( + "Invariant violation: could not find model {} referenced in relation info.", + self.relation_info.to + ) + }) + .unwrap() + } + + fn referenced_model_ref(&self) -> ModelRef<'a> { + ModelRef { + model: self.referenced_model(), + datamodel: self.field.datamodel, + } + } +} + #[derive(Debug, Clone, Copy)] pub(super) struct EnumRef<'a> { pub(super) r#enum: &'a Enum, @@ -191,18 +279,12 @@ pub(super) struct EnumRef<'a> { } impl<'a> EnumRef<'a> { - pub(super) fn name(&self) -> &'a str { - &self.r#enum.name - } - pub(super) fn values(&self) -> &[EnumValue] { &self.r#enum.values } pub(super) fn db_name(&self) -> &'a str { - self.r#enum - .single_database_name() - .unwrap_or(&self.r#enum.name) + self.r#enum.single_database_name().unwrap_or(&self.r#enum.name) } } diff --git a/migration-engine/migration-engine-tests/src/assertions.rs b/migration-engine/migration-engine-tests/src/assertions.rs index 8d4961645676..026622bf44ec 100644 --- a/migration-engine/migration-engine-tests/src/assertions.rs +++ b/migration-engine/migration-engine-tests/src/assertions.rs @@ -1,7 +1,6 @@ use pretty_assertions::assert_eq; use sql_schema_describer::{ - Column, DefaultValue, Enum, ForeignKey, ForeignKeyAction, Index, IndexType, PrimaryKey, - SqlSchema, Table, + Column, DefaultValue, Enum, ForeignKey, ForeignKeyAction, Index, IndexType, PrimaryKey, SqlSchema, Table, }; pub(crate) type AssertionResult = Result; @@ -16,10 +15,7 @@ impl SqlSchemaExt for SqlSchema { anyhow::anyhow!( "assert_table failed. Table {} not found. Tables in database: {:?}", table_name, - self.tables - .iter() - .map(|table| &table.name) - .collect::>() + self.tables.iter().map(|table| &table.name).collect::>() ) }) } @@ -53,11 +49,7 @@ impl SchemaAssertion { anyhow::anyhow!( "assert_table failed. Table {} not found. Tables in database: {:?}", table_name, - self.0 - .tables - .iter() - .map(|table| &table.name) - .collect::>() + self.0.tables.iter().map(|table| &table.name).collect::>() ) })?; @@ -97,7 +89,6 @@ pub struct EnumAssertion<'a>(&'a Enum); impl<'a> EnumAssertion<'a> { pub fn assert_values(self, expected_values: &[&'static str]) -> AssertionResult { - dbg!(&self.0); assert_eq!(self.0.values, expected_values); Ok(self) @@ -125,22 +116,14 @@ impl<'a> TableAssertion<'a> { Ok(self) } - pub fn assert_fk_on_columns( - self, - columns: &[&str], - fk_assertions: F, - ) -> AssertionResult + pub fn assert_fk_on_columns(self, columns: &[&str], fk_assertions: F) -> AssertionResult where F: FnOnce(ForeignKeyAssertion<'a>) -> AssertionResult>, { if let Some(fk) = self.0.foreign_keys.iter().find(|fk| fk.columns == columns) { fk_assertions(ForeignKeyAssertion(fk))?; } else { - anyhow::bail!( - "Could not find foreign key on {}.{:?}", - self.0.name, - columns - ); + anyhow::bail!("Could not find foreign key on {}.{:?}", self.0.name, columns); } Ok(self) @@ -148,11 +131,7 @@ impl<'a> TableAssertion<'a> { pub fn assert_does_not_have_column(self, column_name: &str) -> AssertionResult { if self.0.column(column_name).is_some() { - anyhow::bail!( - "Assertion failed: found column `{}` on `{}`.", - column_name, - self.0.name - ); + anyhow::bail!("Assertion failed: found column `{}` on `{}`.", column_name, self.0.name); } Ok(self) @@ -163,11 +142,7 @@ impl<'a> TableAssertion<'a> { anyhow::anyhow!( "Assertion failed: column {} not found. Existing columns: {:?}", column_name, - self.0 - .columns - .iter() - .map(|col| &col.name) - .collect::>() + self.0.columns.iter().map(|col| &col.name).collect::>() ) })?; @@ -235,11 +210,7 @@ impl<'a> TableAssertion<'a> { Ok(self) } - pub fn assert_index_on_columns( - self, - columns: &[&str], - index_assertions: F, - ) -> AssertionResult + pub fn assert_index_on_columns(self, columns: &[&str], index_assertions: F) -> AssertionResult where F: FnOnce(IndexAssertion<'a>) -> AssertionResult>, { @@ -257,16 +228,12 @@ pub struct ColumnAssertion<'a>(&'a Column); impl<'a> ColumnAssertion<'a> { pub fn assert_default(self, expected: Option<&str>) -> AssertionResult { - let found = self - .0 - .default - .as_ref() - .map(|default_value| match default_value { - DefaultValue::VALUE(s) => s, - DefaultValue::SEQUENCE(s) => s, - DefaultValue::DBGENERATED(s) => s, - DefaultValue::NOW => "CURRENT_TIMESTAMP", - }); + let found = self.0.default.as_ref().map(|default_value| match default_value { + DefaultValue::VALUE(s) => s, + DefaultValue::SEQUENCE(s) => s, + DefaultValue::DBGENERATED(s) => s, + DefaultValue::NOW => "CURRENT_TIMESTAMP", + }); anyhow::ensure!( found == expected, diff --git a/migration-engine/migration-engine-tests/src/test_api.rs b/migration-engine/migration-engine-tests/src/test_api.rs index 5890aacdb9be..593eaf6fbdd9 100644 --- a/migration-engine/migration-engine-tests/src/test_api.rs +++ b/migration-engine/migration-engine-tests/src/test_api.rs @@ -198,7 +198,6 @@ pub struct SingleRowInsert<'a> { impl<'a> SingleRowInsert<'a> { pub fn value(mut self, name: &'a str, value: impl Into>) -> Self { - // let insert = std::mem::replace(&mut self.insert, quaint::ast::Insert::single_into("")); self.insert = self.insert.value(name, value); self diff --git a/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/added_required_field_to_table.rs b/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/added_required_field_to_table.rs index ad764a91e792..b294eab211c9 100644 --- a/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/added_required_field_to_table.rs +++ b/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/added_required_field_to_table.rs @@ -1,6 +1,6 @@ use migration_engine_tests::sql::*; -#[test_each_connector] +#[test_each_connector(tags("sql"))] async fn adding_a_required_field_to_an_existing_table_with_data_without_a_default_is_unexecutable( api: &TestApi, ) -> TestResult { @@ -27,8 +27,11 @@ async fn adding_a_required_field_to_an_existing_table_with_data_without_a_defaul } "#; - // TODO: flip this - api.infer_apply(&dm2).send_assert().await?.assert_green()?; + api.infer_apply(&dm2) + .force(Some(false)) + .send_assert() + .await? + .assert_unexecutable(&[format!("Added the required column `age` to the `Test` table without a default value. There are Some(1) rows in this table, it is not possible.")])?; let rows = api.select("Test").column("id").column("name").send_debug().await?; assert_eq!(rows, &[&[r#"Text("abc")"#, r#"Text("george")"#]]); @@ -36,7 +39,7 @@ async fn adding_a_required_field_to_an_existing_table_with_data_without_a_defaul Ok(()) } -#[test_each_connector] +#[test_each_connector(tags("sql"))] async fn adding_a_required_field_with_a_default_to_an_existing_table_works(api: &TestApi) -> TestResult { let dm1 = r#" model Test { diff --git a/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/added_unimplementable_unique_constraint.rs b/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/added_unimplementable_unique_constraint.rs index 1867ec777acb..3533228f24d1 100644 --- a/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/added_unimplementable_unique_constraint.rs +++ b/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/added_unimplementable_unique_constraint.rs @@ -1,6 +1,7 @@ -// use crate::tests::test_harness::sql::*; +use migration_engine_tests::sql::*; -// #[test_each_connector] +// TODO! +// #[test_each_connector(tags("sql"), log = "debug")] // async fn adding_a_unique_constraint_when_existing_data_does_not_respect_it_is_unexecutable( // api: &TestApi, // ) -> TestResult { @@ -13,17 +14,19 @@ // api.infer_apply(&dm1).send_assert().await?.assert_green()?; -// api.insert("Test") -// .value("id", "abc") -// .value("name", "george") -// .result_raw() -// .await?; +// { +// api.insert("Test") +// .value("id", "abc") +// .value("name", "george") +// .result_raw() +// .await?; -// api.insert("Test") -// .value("id", "def") -// .value("name", "george") -// .result_raw() -// .await?; +// api.insert("Test") +// .value("id", "def") +// .value("name", "george") +// .result_raw() +// .await?; +// } // let dm2 = r#" // model Test { @@ -32,8 +35,11 @@ // } // "#; -// // TODO: flip this -// api.infer_apply(&dm2).send_assert().await?.assert_green()?; +// api.infer_apply(&dm2) +// .force(Some(false)) +// .send_assert() +// .await? +// .assert_unexecutable(&["yo".into()])?; // let rows = api.select("Test").column("id").column("name").send_debug().await?; // assert_eq!( @@ -47,47 +53,46 @@ // Ok(()) // } -// #[test_each_connector] -// async fn adding_a_unique_constraint_when_existing_data_respects_it_works(api: &TestApi) -> TestResult { -// let dm1 = r#" -// model Test { -// id String @id -// name String -// } -// "#; - -// api.infer_apply(&dm1).send_assert().await?.assert_green()?; - -// api.insert("Test") -// .value("id", "abc") -// .value("name", "george") -// .result_raw() -// .await?; - -// api.insert("Test") -// .value("id", "def") -// .value("name", "georgina") -// .result_raw() -// .await?; - -// let dm2 = r#" -// model Test { -// id String @id -// name String @unique -// } -// "#; - -// // TODO: flip this -// api.infer_apply(&dm2).send_assert().await?.assert_green()?; - -// let rows = api.select("Test").column("id").column("name").send_debug().await?; -// assert_eq!( -// rows, -// &[ -// &[r#"Text("abc")"#, r#"Text("george")"#], -// &[r#"Text("def")"#, r#"Text("georgina")"#] -// ] -// ); - -// Ok(()) -// } +#[test_each_connector(tags("sql"))] +async fn adding_a_unique_constraint_when_existing_data_respects_it_works(api: &TestApi) -> TestResult { + let dm1 = r#" + model Test { + id String @id + name String + } + "#; + + api.infer_apply(&dm1).send_assert().await?.assert_green()?; + + api.insert("Test") + .value("id", "abc") + .value("name", "george") + .result_raw() + .await?; + + api.insert("Test") + .value("id", "def") + .value("name", "georgina") + .result_raw() + .await?; + + let dm2 = r#" + model Test { + id String @id + name String @unique + } + "#; + + api.infer_apply(&dm2).send_assert().await?.assert_green()?; + + let rows = api.select("Test").column("id").column("name").send_debug().await?; + assert_eq!( + rows, + &[ + &[r#"Text("abc")"#, r#"Text("george")"#], + &[r#"Text("def")"#, r#"Text("georgina")"#] + ] + ); + + Ok(()) +} diff --git a/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs b/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs index ac5201aa4002..b1ad87883665 100644 --- a/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs +++ b/migration-engine/migration-engine-tests/tests/existing_data/sql_unexecutable_migrations/made_optional_field_required.rs @@ -1,6 +1,6 @@ use migration_engine_tests::sql::*; -#[test_each_connector] +#[test_each_connector(tags("sql"))] async fn making_an_optional_field_required_with_data_without_a_default_is_unexecutable(api: &TestApi) -> TestResult { let dm1 = r#" model Test { @@ -26,8 +26,9 @@ async fn making_an_optional_field_required_with_data_without_a_default_is_unexec } "#; - // TODO: flip this - api.infer_apply(&dm2).send_assert().await?.assert_green()?; + api.infer_apply(&dm2).send_assert().await?.assert_unexecutable(&[ + "Made the column `age` on table `Test` required, but there are existing NULL values.".into(), + ])?; api.assert_schema() .await? @@ -65,7 +66,6 @@ async fn making_an_optional_field_required_with_data_with_a_default_works(api: & } "#; - // TODO: flip this api.infer_apply(&dm2).send_assert().await?.assert_green()?; api.assert_schema() @@ -104,7 +104,6 @@ async fn making_an_optional_field_required_on_an_empty_table_works(api: &TestApi } "#; - // TODO: flip this api.infer_apply(&dm2).send_assert().await?.assert_green()?; api.assert_schema() diff --git a/migration-engine/migration-engine-tests/tests/existing_data_tests.rs b/migration-engine/migration-engine-tests/tests/existing_data_tests.rs index 005d914503e8..fb51dc674e26 100644 --- a/migration-engine/migration-engine-tests/tests/existing_data_tests.rs +++ b/migration-engine/migration-engine-tests/tests/existing_data_tests.rs @@ -6,97 +6,6 @@ use pretty_assertions::assert_eq; use quaint::ast::*; use std::borrow::Cow; -#[test_each_connector] -async fn adding_a_required_field_if_there_is_data(api: &TestApi) { - let dm = r#" - model Test { - id String @id @default(cuid()) - } - - enum MyEnum { - B - A - } - "#; - api.infer_and_apply(&dm).await.sql_schema; - - let insert = Insert::single_into((api.schema_name(), "Test")).value("id", "test"); - api.database().query(insert.into()).await.unwrap(); - - let dm = r#" - model Test { - id String @id @default(cuid()) - myint Int - myfloat Float - boolean Boolean - string String - dateTime DateTime - enum MyEnum - } - - enum MyEnum { - B - A - } - "#; - api.infer_and_apply(&dm).await; -} - -#[test_each_connector] -async fn adding_a_required_field_must_use_the_default_value_for_migrations( - api: &TestApi, -) -> TestResult { - let dm = r#" - model Test { - id String @id @default(cuid()) - } - - enum MyEnum { - B - A - } - "#; - - api.infer_apply(&dm).send().await?; - - let conn = api.database(); - let insert = Insert::single_into((api.schema_name(), "Test")).value("id", "test"); - - conn.query(insert.into()).await.unwrap(); - - let dm = r#" - model Test { - id String @id @default(cuid()) - myint Int @default(1) - myfloat Float @default(2) - boolean Boolean @default(true) - string String @default("test_string") - dateTime DateTime - enum MyEnum @default(A) - } - - enum MyEnum { - B - A - } - "#; - api.infer_and_apply(&dm).await; - - let query = Select::from_table(api.render_table_name("Test")) - .column("myint") - .column("string") - .so_that("id".equals("test")); - let result_set = conn.query(query.into()).await.unwrap(); - let row = result_set - .into_iter() - .next() - .expect("query returned no results"); - assert_eq!(row["myint"].as_i64().unwrap(), 1); - assert_eq!(row["string"].as_str().unwrap(), "test_string"); - - Ok(()) -} - #[test_each_connector] async fn dropping_a_table_with_rows_should_warn(api: &TestApi) { let dm = r#" @@ -125,8 +34,7 @@ async fn dropping_a_table_with_rows_should_warn(api: &TestApi) { assert_eq!( migration_output.warnings, &[MigrationWarning { - description: "You are about to drop the table `Test`, which is not empty (1 rows)." - .into() + description: "You are about to drop the table `Test`, which is not empty (1 rows).".into() }] ); } @@ -231,9 +139,7 @@ async fn altering_a_column_with_non_null_values_should_warn(api: &TestApi) -> Te let migration_output = api.infer_apply(&dm2).send().await?; // The schema should not change because the migration should not run if there are warnings // and the force flag isn't passed. - api.assert_schema() - .await? - .assert_equals(&original_database_schema)?; + api.assert_schema().await?.assert_equals(&original_database_schema)?; assert_eq!( migration_output.warnings, @@ -278,13 +184,11 @@ async fn column_defaults_can_safely_be_changed(api: &TestApi) -> TestResult { api.infer_apply(&dm1).force(Some(true)).send().await?; - api.assert_schema() - .await? - .assert_table(model_name, |table| { - table.assert_column("name", |column| { - column.assert_default(Some(first_default.unwrap_or(""))) - }) - })?; + api.assert_schema().await?.assert_table(model_name, |table| { + table.assert_column("name", |column| { + column.assert_default(Some(first_default.unwrap_or(""))) + }) + })?; } // Insert data @@ -305,10 +209,7 @@ async fn column_defaults_can_safely_be_changed(api: &TestApi) -> TestResult { .filter_map(|row| row.get("name").and_then(|val| val.to_string())) .collect(); // TODO: change this when the defaults hack is removed - assert_eq!( - &[first_default.unwrap_or(""), "Waterworld"], - names.as_slice() - ); + assert_eq!(&[first_default.unwrap_or(""), "Waterworld"], names.as_slice()); } // Migrate @@ -343,18 +244,13 @@ async fn column_defaults_can_safely_be_changed(api: &TestApi) -> TestResult { .into_iter() .filter_map(|row| row.get("name").and_then(|val| val.to_string())) .collect(); - assert_eq!( - &[first_default.unwrap_or(""), "Waterworld"], - names.as_slice() - ); + assert_eq!(&[first_default.unwrap_or(""), "Waterworld"], names.as_slice()); - api.assert_schema() - .await? - .assert_table(model_name, |table| { - table.assert_column("name", |column| { - column.assert_default(Some(second_default.unwrap_or(""))) - }) - })?; + api.assert_schema().await?.assert_table(model_name, |table| { + table.assert_column("name", |column| { + column.assert_default(Some(second_default.unwrap_or(""))) + }) + })?; } } @@ -401,9 +297,7 @@ async fn changing_a_column_from_required_to_optional_should_work(api: &TestApi) "You are about to alter the column `age` on the `Test` table, which still contains 2 non-null values. The data in that column will be lost.", ); - api.assert_schema() - .await? - .assert_equals(&original_database_schema)?; + api.assert_schema().await?.assert_equals(&original_database_schema)?; } else { // On other databases, the migration should be successful. anyhow::ensure!( @@ -412,9 +306,7 @@ async fn changing_a_column_from_required_to_optional_should_work(api: &TestApi) migration_output.warnings ); - api.assert_schema() - .await? - .assert_ne(&original_database_schema)?; + api.assert_schema().await?.assert_ne(&original_database_schema)?; } // Check that no data was lost. @@ -461,9 +353,7 @@ async fn changing_a_column_from_optional_to_required_must_warn(api: &TestApi) -> // The schema should not change because the migration should not run if there are warnings // and the force flag isn't passed. - api.assert_schema() - .await? - .assert_equals(&original_database_schema)?; + api.assert_schema().await?.assert_equals(&original_database_schema)?; assert_eq!( migration_output.warnings, @@ -490,7 +380,7 @@ async fn changing_a_column_from_optional_to_required_must_warn(api: &TestApi) -> Ok(()) } -#[test_each_connector] +#[test_each_connector(tags("sql"))] async fn dropping_a_table_referenced_by_foreign_keys_must_work(api: &TestApi) -> TestResult { use quaint::ast::*; @@ -502,12 +392,19 @@ async fn dropping_a_table_referenced_by_foreign_keys_must_work(api: &TestApi) -> model Recipe { id Int @id - category Category + categoryId Int + category Category @relation(fields: [categoryId]) } "#; - let sql_schema = api.infer_and_apply(&dm1).await.sql_schema; - assert!(sql_schema.table("Category").is_ok()); + api.infer_apply(&dm1).send_assert().await?.assert_green()?; + + api.assert_schema() + .await? + .assert_table("Category", |table| table.assert_columns_count(2))? + .assert_table("Recipe", |table| { + table.assert_fk_on_columns(&["categoryId"], |fk| fk.assert_references("Category", &["id"])) + })?; let id: i32 = 1; @@ -517,13 +414,10 @@ async fn dropping_a_table_referenced_by_foreign_keys_must_work(api: &TestApi) -> api.database().query(insert.into()).await?; let insert = Insert::single_into(api.render_table_name("Recipe")) - .value("category", id) + .value("categoryId", id) .value("id", id); api.database().query(insert.into()).await?; - let fk = sql_schema.table_bang("Recipe").foreign_keys.get(0).unwrap(); - assert_eq!(fk.referenced_table, "Category"); - let dm2 = r#" model Recipe { id Int @id @@ -591,18 +485,13 @@ async fn string_columns_do_not_get_arbitrarily_migrated(api: &TestApi) -> TestRe row.get("kindle_email").unwrap().as_str().unwrap(), "george+kindle@prisma.io" ); - assert_eq!( - row.get("email").unwrap().as_str().unwrap(), - "george@prisma.io" - ); + assert_eq!(row.get("email").unwrap().as_str().unwrap(), "george@prisma.io"); Ok(()) } #[test_each_connector] -async fn altering_the_type_of_a_column_in_an_empty_table_should_not_warn( - api: &TestApi, -) -> TestResult { +async fn altering_the_type_of_a_column_in_an_empty_table_should_not_warn(api: &TestApi) -> TestResult { let dm1 = r#" model User { id String @id @default(cuid()) @@ -628,9 +517,7 @@ async fn altering_the_type_of_a_column_in_an_empty_table_should_not_warn( api.assert_schema() .await? .assert_table("User", |table| { - table.assert_column("dogs", |col| { - col.assert_type_is_string()?.assert_is_required() - }) + table.assert_column("dogs", |col| col.assert_type_is_string()?.assert_is_required()) }) .map(drop) } @@ -668,9 +555,7 @@ async fn making_a_column_required_in_an_empty_table_should_not_warn(api: &TestAp } #[test_each_connector] -async fn altering_the_type_of_a_column_in_a_non_empty_table_always_warns( - api: &TestApi, -) -> TestResult { +async fn altering_the_type_of_a_column_in_a_non_empty_table_always_warns(api: &TestApi) -> TestResult { let dm1 = r#" model User { id String @id @default(cuid()) @@ -719,9 +604,7 @@ async fn altering_the_type_of_a_column_in_a_non_empty_table_always_warns( } #[test_each_connector(ignore("mysql"))] -async fn migrating_a_required_column_from_int_to_string_should_warn_and_cast( - api: &TestApi, -) -> TestResult { +async fn migrating_a_required_column_from_int_to_string_should_warn_and_cast(api: &TestApi) -> TestResult { let dm1 = r#" model Test { id String @id @@ -740,11 +623,7 @@ async fn migrating_a_required_column_from_int_to_string_should_warn_and_cast( let test = api.dump_table("Test").await?; let first_row = test.get(0).unwrap(); assert_eq!( - format!( - "{:?} {:?}", - first_row.get("id"), - first_row.get("serialNumber") - ), + format!("{:?} {:?}", first_row.get("id"), first_row.get("serialNumber")), r#"Some(Text("abcd")) Some(Integer(47))"# ); @@ -783,11 +662,7 @@ async fn migrating_a_required_column_from_int_to_string_should_warn_and_cast( let test = api.dump_table("Test").await?; let first_row = test.get(0).unwrap(); assert_eq!( - format!( - "{:?} {:?}", - first_row.get("id"), - first_row.get("serialNumber") - ), + format!("{:?} {:?}", first_row.get("id"), first_row.get("serialNumber")), r#"Some(Text("abcd")) Some(Text("47"))"# ); } @@ -821,16 +696,15 @@ async fn enum_variants_can_be_added_without_data_loss(api: &TestApi) -> TestResu .assert_green()?; { - let cat_inserts = - quaint::ast::Insert::multi_into(api.render_table_name("Cat"), vec!["id", "mood"]) - .values(( - ParameterizedValue::Text(Cow::Borrowed("felix")), - ParameterizedValue::Enum(Cow::Borrowed("HUNGRY")), - )) - .values(( - ParameterizedValue::Text(Cow::Borrowed("mittens")), - ParameterizedValue::Enum(Cow::Borrowed("HAPPY")), - )); + let cat_inserts = quaint::ast::Insert::multi_into(api.render_table_name("Cat"), vec!["id", "mood"]) + .values(( + ParameterizedValue::Text(Cow::Borrowed("felix")), + ParameterizedValue::Enum(Cow::Borrowed("HUNGRY")), + )) + .values(( + ParameterizedValue::Text(Cow::Borrowed("mittens")), + ParameterizedValue::Enum(Cow::Borrowed("HAPPY")), + )); api.database().query(cat_inserts.into()).await?; } @@ -862,10 +736,8 @@ async fn enum_variants_can_be_added_without_data_loss(api: &TestApi) -> TestResu // Assertions { let cat_data = api.dump_table("Cat").await?; - let cat_data: Vec> = cat_data - .into_iter() - .map(|row| row.into_iter().collect()) - .collect(); + let cat_data: Vec> = + cat_data.into_iter().map(|row| row.into_iter().collect()).collect(); let expected_cat_data = if api.sql_family().is_mysql() { vec![ @@ -894,10 +766,8 @@ async fn enum_variants_can_be_added_without_data_loss(api: &TestApi) -> TestResu assert_eq!(cat_data, expected_cat_data); let human_data = api.dump_table("Human").await?; - let human_data: Vec> = human_data - .into_iter() - .map(|row| row.into_iter().collect()) - .collect(); + let human_data: Vec> = + human_data.into_iter().map(|row| row.into_iter().collect()).collect(); let expected_human_data: Vec> = Vec::new(); assert_eq!(human_data, expected_human_data); @@ -947,16 +817,15 @@ async fn enum_variants_can_be_dropped_without_data_loss(api: &TestApi) -> TestRe .assert_green()?; { - let cat_inserts = - quaint::ast::Insert::multi_into(api.render_table_name("Cat"), &["id", "mood"]) - .values(( - ParameterizedValue::Text(Cow::Borrowed("felix")), - ParameterizedValue::Enum(Cow::Borrowed("HUNGRY")), - )) - .values(( - ParameterizedValue::Text(Cow::Borrowed("mittens")), - ParameterizedValue::Enum(Cow::Borrowed("HAPPY")), - )); + let cat_inserts = quaint::ast::Insert::multi_into(api.render_table_name("Cat"), &["id", "mood"]) + .values(( + ParameterizedValue::Text(Cow::Borrowed("felix")), + ParameterizedValue::Enum(Cow::Borrowed("HUNGRY")), + )) + .values(( + ParameterizedValue::Text(Cow::Borrowed("mittens")), + ParameterizedValue::Enum(Cow::Borrowed("HAPPY")), + )); api.database().query(cat_inserts.into()).await?; } @@ -987,10 +856,8 @@ async fn enum_variants_can_be_dropped_without_data_loss(api: &TestApi) -> TestRe // Assertions { let cat_data = api.dump_table("Cat").await?; - let cat_data: Vec> = cat_data - .into_iter() - .map(|row| row.into_iter().collect()) - .collect(); + let cat_data: Vec> = + cat_data.into_iter().map(|row| row.into_iter().collect()).collect(); let expected_cat_data = if api.sql_family().is_mysql() { vec![ @@ -1019,10 +886,8 @@ async fn enum_variants_can_be_dropped_without_data_loss(api: &TestApi) -> TestRe assert_eq!(cat_data, expected_cat_data); let human_data = api.dump_table("Human").await?; - let human_data: Vec> = human_data - .into_iter() - .map(|row| row.into_iter().collect()) - .collect(); + let human_data: Vec> = + human_data.into_iter().map(|row| row.into_iter().collect()).collect(); let expected_human_data: Vec> = Vec::new(); assert_eq!(human_data, expected_human_data); diff --git a/migration-engine/migration-engine-tests/tests/existing_databases_tests.rs b/migration-engine/migration-engine-tests/tests/existing_databases_tests.rs index 9ac8c2f3d236..b96e93a3b59d 100644 --- a/migration-engine/migration-engine-tests/tests/existing_databases_tests.rs +++ b/migration-engine/migration-engine-tests/tests/existing_databases_tests.rs @@ -76,14 +76,7 @@ async fn creating_a_field_for_an_existing_column_with_a_compatible_type_must_wor t.add_column("id", types::primary()); // We add a default because the migration engine always adds defaults to facilitate // migration of required columns. - t.add_column( - "title", - if is_mysql { - types::varchar(181).default("") - } else { - types::text().default("") - }, - ); + t.add_column("title", if is_mysql { types::varchar(181) } else { types::text() }); }); }) .await?; @@ -95,8 +88,15 @@ async fn creating_a_field_for_an_existing_column_with_a_compatible_type_must_wor } "#; - let result = api.infer_and_apply(&dm).await.sql_schema; - assert_eq!(initial_result, result); + api.infer_apply(&dm) + .force(Some(true)) + .send_assert() + .await? + .assert_green()?; + + let final_schema = api.describe_database().await?; + + assert_eq!(initial_result, final_schema); Ok(()) } @@ -380,8 +380,10 @@ async fn removing_a_default_from_a_non_nullable_foreign_key_column_must_warn(api t.add_column("id", types::primary()); // Barrel fails to create foreign key columns with defaults (bad SQL). let fk = match sql_family { - SqlFamily::Postgres => r#""user" INTEGER DEFAULT 1, FOREIGN KEY ("user") REFERENCES "User" ("id")"#, - _ => "user INTEGER DEFAULT 1, FOREIGN KEY (user) REFERENCES User (id)", + SqlFamily::Postgres => { + r#""userId" INTEGER DEFAULT 1, FOREIGN KEY ("userId") REFERENCES "User" ("id")"# + } + _ => "userId INTEGER DEFAULT 1, FOREIGN KEY (userId) REFERENCES User (id)", }; t.inject_custom(fk); @@ -389,7 +391,12 @@ async fn removing_a_default_from_a_non_nullable_foreign_key_column_must_warn(api }) .await?; - assert!(sql_schema.table_bang("Blog").column("user").unwrap().default.is_some()); + assert!(sql_schema + .table_bang("Blog") + .column("userId") + .unwrap() + .default + .is_some()); let dm = r#" model User { @@ -398,7 +405,8 @@ async fn removing_a_default_from_a_non_nullable_foreign_key_column_must_warn(api model Blog { id Int @id - user User + userId Int + user User @relation(fields: [userId]) } "#; @@ -416,7 +424,7 @@ async fn removing_a_default_from_a_non_nullable_foreign_key_column_must_warn(api .send() .await?; - let expected_warning = "The migration is about to remove a default value on the foreign key field `Blog.user`."; + let expected_warning = "The migration is about to remove a default value on the foreign key field `Blog.userId`."; assert_eq!( result.warnings, &[migration_connector::MigrationWarning { diff --git a/migration-engine/migration-engine-tests/tests/migration_tests.rs b/migration-engine/migration-engine-tests/tests/migration_tests.rs index d35048d67ef3..fbe9d9b5dbf5 100644 --- a/migration-engine/migration-engine-tests/tests/migration_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migration_tests.rs @@ -224,7 +224,8 @@ async fn changing_the_type_of_an_id_field_must_work(api: &TestApi) { let dm1 = r#" model A { id Int @id - b B @relation(references: [id]) + b_id Int + b B @relation(fields: [b_id], references: [id]) } model B { id Int @id @@ -232,13 +233,13 @@ async fn changing_the_type_of_an_id_field_must_work(api: &TestApi) { "#; let result = api.infer_and_apply(&dm1).await.sql_schema; let table = result.table_bang("A"); - let column = table.column_bang("b"); + let column = table.column_bang("b_id"); assert_eq!(column.tpe.family, ColumnTypeFamily::Int); assert_eq!( table.foreign_keys, &[ForeignKey { constraint_name: match api.sql_family() { - SqlFamily::Postgres => Some("A_b_fkey".to_owned()), + SqlFamily::Postgres => Some("A_b_id_fkey".to_owned()), SqlFamily::Mysql => Some("A_ibfk_1".to_owned()), SqlFamily::Sqlite => None, }, @@ -252,7 +253,8 @@ async fn changing_the_type_of_an_id_field_must_work(api: &TestApi) { let dm2 = r#" model A { id Int @id - b B @relation(references: [id]) + b_id String + b B @relation(fields: [b_id], references: [id]) } model B { id String @id @default(cuid()) @@ -260,17 +262,17 @@ async fn changing_the_type_of_an_id_field_must_work(api: &TestApi) { "#; let result = api.infer_and_apply(&dm2).await.sql_schema; let table = result.table_bang("A"); - let column = table.column_bang("b"); + let column = table.column_bang("b_id"); assert_eq!(column.tpe.family, ColumnTypeFamily::String); assert_eq!( table.foreign_keys, &[ForeignKey { constraint_name: match api.sql_family() { - SqlFamily::Postgres => Some("A_b_fkey".to_owned()), + SqlFamily::Postgres => Some("A_b_id_fkey".to_owned()), SqlFamily::Mysql => Some("A_ibfk_1".to_owned()), SqlFamily::Sqlite => None, }, - columns: vec![column.name.clone()], + columns: vec!["b_id".into()], referenced_table: "B".to_string(), referenced_columns: vec!["id".to_string()], on_delete_action: ForeignKeyAction::Cascade, @@ -305,7 +307,8 @@ async fn changing_a_relation_field_to_a_scalar_field_must_work(api: &TestApi) -> let dm1 = r#" model A { id Int @id - b B @relation(references: [id]) + b Int + b_rel B @relation(fields: [b], references: [id]) } model B { id Int @id @@ -375,7 +378,8 @@ async fn changing_a_scalar_field_to_a_relation_field_must_work(api: &TestApi) { let dm2 = r#" model A { id Int @id - b B @relation(references: [id]) + b Int + b_rel B @relation(fields: [b], references: [id]) } model B { id Int @id @@ -485,20 +489,15 @@ async fn adding_a_many_to_many_relation_with_custom_name_must_work(api: &TestApi ); } -#[test] -#[ignore] -fn providing_an_explicit_link_table_must_work() { - // TODO: implement this once we have decided if this is actually possible in dm v2 - unimplemented!(); -} - #[test_each_connector] -async fn adding_an_inline_relation_must_result_in_a_foreign_key_in_the_model_table(api: &TestApi) { +async fn adding_an_inline_relation_must_result_in_a_foreign_key_in_the_model_table(api: &TestApi) -> TestResult { let dm1 = r#" model A { id Int @id - b B @relation(references: [id]) - c C? @relation(references: [id]) + bid Int + cid Int? + b B @relation(fields: [bid], references: [id]) + c C? @relation(fields: [cid], references: [id]) } model B { @@ -513,11 +512,11 @@ async fn adding_an_inline_relation_must_result_in_a_foreign_key_in_the_model_tab let result = api.infer_and_apply(&dm1).await.sql_schema; let table = result.table_bang("A"); - let b_column = table.column_bang("b"); + let b_column = table.column_bang("bid"); assert_eq!(b_column.tpe.family, ColumnTypeFamily::Int); assert_eq!(b_column.tpe.arity, ColumnArity::Required); - let c_column = table.column_bang("c"); + let c_column = table.column_bang("cid"); assert_eq!(c_column.tpe.family, ColumnTypeFamily::Int); assert_eq!(c_column.tpe.arity, ColumnArity::Nullable); @@ -526,7 +525,7 @@ async fn adding_an_inline_relation_must_result_in_a_foreign_key_in_the_model_tab &[ ForeignKey { constraint_name: match api.sql_family() { - SqlFamily::Postgres => Some("A_b_fkey".to_owned()), + SqlFamily::Postgres => Some("A_bid_fkey".to_owned()), SqlFamily::Mysql => Some("A_ibfk_1".to_owned()), SqlFamily::Sqlite => None, }, @@ -537,7 +536,7 @@ async fn adding_an_inline_relation_must_result_in_a_foreign_key_in_the_model_tab }, ForeignKey { constraint_name: match api.sql_family() { - SqlFamily::Postgres => Some("A_c_fkey".to_owned()), + SqlFamily::Postgres => Some("A_cid_fkey".to_owned()), SqlFamily::Mysql => Some("A_ibfk_2".to_owned()), SqlFamily::Sqlite => None, }, @@ -548,6 +547,8 @@ async fn adding_an_inline_relation_must_result_in_a_foreign_key_in_the_model_tab } ] ); + + Ok(()) } #[test_each_connector] @@ -555,7 +556,8 @@ async fn specifying_a_db_name_for_an_inline_relation_must_work(api: &TestApi) { let dm1 = r#" model A { id Int @id - b B @relation(references: [id]) @map(name: "b_column") + b_id_field Int @map(name: "b_column") + b B @relation(fields: [b_id_field]) } model B { @@ -587,7 +589,8 @@ async fn adding_an_inline_relation_to_a_model_with_an_exotic_id_type(api: &TestA let dm1 = r#" model A { id Int @id - b B @relation(references: [id]) + b_id String + b B @relation(fields: [b_id], references: [id]) } model B { @@ -596,13 +599,13 @@ async fn adding_an_inline_relation_to_a_model_with_an_exotic_id_type(api: &TestA "#; let result = api.infer_and_apply(&dm1).await.sql_schema; let table = result.table_bang("A"); - let column = table.column_bang("b"); + let column = table.column_bang("b_id"); assert_eq!(column.tpe.family, ColumnTypeFamily::String); assert_eq!( table.foreign_keys, &[ForeignKey { constraint_name: match api.sql_family() { - SqlFamily::Postgres => Some("A_b_fkey".to_owned()), + SqlFamily::Postgres => Some("A_b_id_fkey".to_owned()), SqlFamily::Mysql => Some("A_ibfk_1".to_owned()), SqlFamily::Sqlite => None, }, @@ -619,16 +622,19 @@ async fn removing_an_inline_relation_must_work(api: &TestApi) -> TestResult { let dm1 = r#" model A { id Int @id - b B @relation(references: [id]) + b_id Int + b B @relation(fields: [b_id], references: [id]) } model B { id Int @id } "#; - let result = api.infer_and_apply(&dm1).await.sql_schema; - let column = result.table_bang("A").column("b"); - assert_eq!(column.is_some(), true); + + api.infer_apply(&dm1).send_assert().await?.assert_green()?; + api.assert_schema() + .await? + .assert_table("A", |table| table.assert_has_column("b_id"))?; let dm2 = r#" model A { @@ -642,15 +648,14 @@ async fn removing_an_inline_relation_must_work(api: &TestApi) -> TestResult { api.infer_apply(dm2).send().await?; - api.assert_schema() - .await? - .assert_table("A", |table| { - table - .assert_foreign_keys_count(0)? - .assert_indexes_count(0)? - .assert_does_not_have_column("b") - }) - .map(drop) + api.assert_schema().await?.assert_table("A", |table| { + table + .assert_foreign_keys_count(0)? + .assert_indexes_count(0)? + .assert_does_not_have_column("b") + })?; + + Ok(()) } #[test_each_connector] @@ -658,7 +663,8 @@ async fn moving_an_inline_relation_to_the_other_side_must_work(api: &TestApi) -> let dm1 = r#" model A { id Int @id - b B @relation(references: [id]) + b_id Int + b B @relation(fields: [b_id], references: [id]) } model B { @@ -671,11 +677,11 @@ async fn moving_an_inline_relation_to_the_other_side_must_work(api: &TestApi) -> table.foreign_keys, &[ForeignKey { constraint_name: match api.sql_family() { - SqlFamily::Postgres => Some("A_b_fkey".to_owned()), + SqlFamily::Postgres => Some("A_b_id_fkey".to_owned()), SqlFamily::Sqlite => None, SqlFamily::Mysql => Some("A_ibfk_1".to_owned()), }, - columns: vec!["b".to_string()], + columns: vec!["b_id".to_string()], referenced_table: "B".to_string(), referenced_columns: vec!["id".to_string()], on_delete_action: ForeignKeyAction::Cascade, @@ -689,7 +695,8 @@ async fn moving_an_inline_relation_to_the_other_side_must_work(api: &TestApi) -> model B { id Int @id - a A @relation(references: [id]) + a_id Int + a A @relation(fields: [a_id], references: [id]) } "#; let result = api.infer_and_apply(&dm2).await.sql_schema; @@ -698,11 +705,11 @@ async fn moving_an_inline_relation_to_the_other_side_must_work(api: &TestApi) -> table.foreign_keys, &[ForeignKey { constraint_name: match api.sql_family() { - SqlFamily::Postgres => Some("B_a_fkey".to_owned()), + SqlFamily::Postgres => Some("B_a_id_fkey".to_owned()), SqlFamily::Sqlite => None, SqlFamily::Mysql => Some("B_ibfk_1".to_owned()), }, - columns: vec!["a".to_string()], + columns: vec!["a_id".to_string()], referenced_table: "A".to_string(), referenced_columns: vec!["id".to_string()], on_delete_action: ForeignKeyAction::Cascade, @@ -1158,7 +1165,8 @@ async fn reserved_sql_key_words_must_work(api: &TestApi) { let dm = r#" model Group { id String @default(cuid()) @id - parent Group? @relation(name: "ChildGroups") + parent_id String? + parent Group? @relation(name: "ChildGroups", fields: [parent_id]) childGroups Group[] @relation(name: "ChildGroups") } "#; @@ -1169,11 +1177,11 @@ async fn reserved_sql_key_words_must_work(api: &TestApi) { table.foreign_keys, vec![ForeignKey { constraint_name: match sql_family { - SqlFamily::Postgres => Some("Group_parent_fkey".to_owned()), + SqlFamily::Postgres => Some("Group_parent_id_fkey".to_owned()), SqlFamily::Mysql => Some("Group_ibfk_1".to_owned()), SqlFamily::Sqlite => None, }, - columns: vec!["parent".to_string()], + columns: vec!["parent_id".to_string()], referenced_table: "Group".to_string(), referenced_columns: vec!["id".to_string()], on_delete_action: ForeignKeyAction::SetNull, @@ -1241,11 +1249,12 @@ async fn migrations_with_many_to_many_related_models_must_not_recreate_indexes(a } #[test_each_connector] -async fn removing_a_relation_field_must_work(api: &TestApi) { +async fn removing_a_relation_field_must_work(api: &TestApi) -> TestResult { let dm_1 = r#" model User { id String @default(cuid()) @id - address Address @map("address_name") + address_id String @map("address_name") + address Address @relation(fields: [address_id]) } model Address { @@ -1254,15 +1263,10 @@ async fn removing_a_relation_field_must_work(api: &TestApi) { } "#; - let sql_schema = api.infer_and_apply(&dm_1).await.sql_schema; - - let address_name_field = sql_schema - .table_bang("User") - .columns - .iter() - .find(|col| col.name == "address_name"); - - assert!(address_name_field.is_some()); + api.infer_apply(&dm_1).send_assert().await?.assert_green()?; + api.assert_schema() + .await? + .assert_table("User", |table| table.assert_has_column("address_name"))?; let dm_2 = r#" model User { @@ -1284,6 +1288,8 @@ async fn removing_a_relation_field_must_work(api: &TestApi) { .find(|col| col.name == "address_name"); assert!(address_name_field.is_none()); + + Ok(()) } #[test_each_connector] @@ -1319,9 +1325,12 @@ async fn model_with_multiple_indexes_works(api: &TestApi) -> TestResult { model Like { id Int @id - user User - post Post - comment Comment + user_id Int + user User @relation(fields: [user_id], references: [id]) + post_id Int + post Post @relation(fields: [post_id], references: [id]) + comment_id Int + comment Comment @relation(fields: [comment_id], references: [id]) @@index([post]) @@index([user]) @@ -1347,7 +1356,8 @@ async fn foreign_keys_of_inline_one_to_one_relations_have_a_unique_constraint(ap model Box { id Int @id - cat Cat + cat_id Int + cat Cat @relation(fields: [cat_id], references: [id]) } "#; @@ -1356,8 +1366,8 @@ async fn foreign_keys_of_inline_one_to_one_relations_have_a_unique_constraint(ap let box_table = schema.table_bang("Box"); let expected_indexes = &[Index { - name: "Box_cat".into(), - columns: vec!["cat".into()], + name: "Box_cat_id".into(), + columns: vec!["cat_id".into()], tpe: IndexType::Unique, }]; @@ -1395,7 +1405,7 @@ async fn column_defaults_must_be_migrated(api: &TestApi) -> TestResult { Ok(()) } -#[test_each_connector(log = "debug")] +#[test_each_connector] async fn escaped_string_defaults_are_not_arbitrarily_migrated(api: &TestApi) -> TestResult { use quaint::ast::*; @@ -1478,15 +1488,10 @@ async fn created_at_does_not_get_arbitrarily_migrated(api: &TestApi) -> TestResu api.database().query(insert.into()).await.unwrap(); anyhow::ensure!( - schema - .table_bang("Fruit") - .column_bang("createdAt") - .default - .as_ref() - .unwrap() - .as_value() - .unwrap() - .contains("1970"), + matches!( + schema.table_bang("Fruit").column_bang("createdAt").default, + Some(DefaultValue::NOW) + ), "createdAt default is set" ); @@ -1498,7 +1503,7 @@ async fn created_at_does_not_get_arbitrarily_migrated(api: &TestApi) -> TestResu } "#; - let output = api.infer_apply(dm2).send().await?; + let output = api.infer_apply(dm2).send_assert().await?.assert_green()?.into_inner(); anyhow::ensure!(output.warnings.is_empty(), "No warnings"); anyhow::ensure!(output.datamodel_steps.is_empty(), "Migration should be empty"); @@ -1551,11 +1556,12 @@ async fn relations_can_reference_arbitrary_unique_fields(api: &TestApi) -> TestR model Account { id Int @id - user User @relation(references: [email]) + uem String + user User @relation(fields: [uem], references: [email]) } "#; - api.infer_apply(dm).send().await?; + api.infer_apply(dm).send_assert().await?.assert_green()?; let schema = api.describe_database().await?; @@ -1565,7 +1571,7 @@ async fn relations_can_reference_arbitrary_unique_fields(api: &TestApi) -> TestR let fk = fks.iter().next().unwrap(); - assert_eq!(fk.columns, &["user"]); + assert_eq!(fk.columns, &["uem"]); assert_eq!(fk.referenced_table, "User"); assert_eq!(fk.referenced_columns, &["email"]); @@ -1585,7 +1591,8 @@ async fn relations_can_reference_arbitrary_unique_fields_with_maps(api: &TestApi model Account { id Int @id - user User @relation(references: [email]) @map("user-id") + uem String @map("user-id") + user User @relation(fields: [uem], references: [email]) } "#; @@ -1613,7 +1620,9 @@ async fn relations_can_reference_multiple_fields(api: &TestApi) -> TestResult { model Account { id Int @id - user User @relation(references: [email, age]) + usermail String + userage Int + user User @relation(fields: [usermail, userage], references: [email, age]) } "#; @@ -1623,7 +1632,7 @@ async fn relations_can_reference_multiple_fields(api: &TestApi) -> TestResult { schema .assert_table("Account")? .assert_foreign_keys_count(1)? - .assert_fk_on_columns(&["user_email", "user_age"], |fk| { + .assert_fk_on_columns(&["usermail", "userage"], |fk| { fk.assert_references("User", &["email", "age"]) })?; @@ -1639,13 +1648,15 @@ async fn relations_with_mappings_on_both_sides_can_reference_multiple_fields(api age Int @map("birthdays-count") @@unique([email, age]) - @@map("users") } model Account { id Int @id - user User @relation(references: [email, age]) @map(["emergency-mail-fk1", "age-fk2"]) + usermail String @map("emergency-mail-fk-1") + userage Int @map("age-fk2") + + user User @relation(fields: [usermail, userage], references: [email, age]) } "#; @@ -1654,7 +1665,7 @@ async fn relations_with_mappings_on_both_sides_can_reference_multiple_fields(api api.assert_schema().await?.assert_table("Account", |table| { table .assert_foreign_keys_count(1)? - .assert_fk_on_columns(&["emergency-mail-fk1", "age-fk2"], |fk| { + .assert_fk_on_columns(&["emergency-mail-fk-1", "age-fk2"], |fk| { fk.assert_references("users", &["emergency-mail", "birthdays-count"]) }) })?; @@ -1671,13 +1682,14 @@ async fn relations_with_mappings_on_referenced_side_can_reference_multiple_field age Int @map("birthdays-count") @@unique([email, age]) - @@map("users") } model Account { id Int @id - user User @relation(references: [email, age]) + useremail String + userage Int + user User @relation(fields: [useremail, userage], references: [email, age]) } "#; @@ -1686,7 +1698,7 @@ async fn relations_with_mappings_on_referenced_side_can_reference_multiple_field api.assert_schema().await?.assert_table("Account", |table| { table .assert_foreign_keys_count(1)? - .assert_fk_on_columns(&["user_emergency-mail", "user_birthdays-count"], |fk| { + .assert_fk_on_columns(&["useremail", "userage"], |fk| { fk.assert_references("users", &["emergency-mail", "birthdays-count"]) }) })?; @@ -1703,13 +1715,14 @@ async fn relations_with_mappings_on_referencing_side_can_reference_multiple_fiel age Int @@unique([email, age]) - @@map("users") } model Account { id Int @id - user User @relation(references: [email, age]) @map(["emergency-mail-fk1", "age-fk2"]) + user_email String @map("emergency-mail-fk1") + user_age Int @map("age-fk2") + user User @relation(fields: [user_email, user_age], references: [email, age]) } "#; @@ -1739,7 +1752,7 @@ async fn foreign_keys_are_added_on_existing_tables(api: &TestApi) -> TestResult } "#; - api.infer_apply(dm1).send().await?; + api.infer_apply(dm1).send_assert().await?.assert_green()?; api.assert_schema() .await? // There should be no foreign keys yet. @@ -1753,15 +1766,16 @@ async fn foreign_keys_are_added_on_existing_tables(api: &TestApi) -> TestResult model Account { id Int @id - user User @relation(references: [email]) + user_email String + user User @relation(fields: [user_email], references: [email]) } "#; - api.infer_apply(dm2).send().await?; + api.infer_apply(dm2).send_assert().await?.assert_green()?; api.assert_schema().await?.assert_table("Account", |table| { table .assert_foreign_keys_count(1)? - .assert_fk_on_columns(&["user"], |fk| fk.assert_references("User", &["email"])) + .assert_fk_on_columns(&["user_email"], |fk| fk.assert_references("User", &["email"])) })?; Ok(()) @@ -1811,7 +1825,7 @@ async fn compound_primary_keys_on_mapped_columns_must_work(api: &TestApi) -> Tes Ok(()) } -#[test_each_connector] +#[test_each_connector(tags("sql"))] async fn references_to_models_with_compound_primary_keys_must_work(api: &TestApi) -> TestResult { let dm = r#" model User { @@ -1824,7 +1838,10 @@ async fn references_to_models_with_compound_primary_keys_must_work(api: &TestApi model Pet { id String @id - human User + human_firstName String + human_lastName String + + human User @relation(fields: [human_firstName, human_lastName]) } "#; diff --git a/migration-engine/migration-engine-tests/tests/migrations/mariadb.rs b/migration-engine/migration-engine-tests/tests/migrations/mariadb.rs index efb09fcf199d..5ee26d565b84 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/mariadb.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/mariadb.rs @@ -13,7 +13,8 @@ async fn foreign_keys_to_indexes_being_renamed_must_work(api: &TestApi) -> TestR model Post { id String @id - author User @relation(references: name) + author String + author_rel User @relation(fields: [author], references: name) } "#; @@ -50,7 +51,8 @@ async fn foreign_keys_to_indexes_being_renamed_must_work(api: &TestApi) -> TestR model Post { id String @id - author User @relation(references: name) + author String + author_rel User @relation(fields: [author], references: name) } "#; diff --git a/migration-engine/migration-engine-tests/tests/migrations/mysql.rs b/migration-engine/migration-engine-tests/tests/migrations/mysql.rs index 88a68c998b01..ebff86e88d0c 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/mysql.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/mysql.rs @@ -8,7 +8,8 @@ async fn indexes_on_foreign_key_fields_are_not_created_twice(api: &TestApi) -> T let schema = r#" model Human { id String @id - cat Cat @relation(references: [name]) + catname String + cat_rel Cat @relation(fields: [catname], references: [name]) } model Cat { @@ -26,9 +27,9 @@ async fn indexes_on_foreign_key_fields_are_not_created_twice(api: &TestApi) -> T .assert_table("Human", |table| { table .assert_foreign_keys_count(1)? - .assert_fk_on_columns(&["cat"], |fk| fk.assert_references("Cat", &["name"]))? + .assert_fk_on_columns(&["catname"], |fk| fk.assert_references("Cat", &["name"]))? .assert_indexes_count(1)? - .assert_index_on_columns(&["cat"], |idx| idx.assert_is_not_unique()) + .assert_index_on_columns(&["catname"], |idx| idx.assert_is_not_unique()) })? .into_schema(); diff --git a/migration-engine/migration-engine-tests/tests/migrations/sql.rs b/migration-engine/migration-engine-tests/tests/migrations/sql.rs index 186ec8870f12..1d22dcb63781 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/sql.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/sql.rs @@ -37,7 +37,9 @@ async fn relations_to_models_without_a_primary_key_work(api: &TestApi) -> TestRe model PairMetadata { id String @id - pair Pair + pairidx Int + pairname String + pair Pair @relation(fields: [pairidx, pairname], references: [index, name]) } "#; @@ -49,7 +51,7 @@ async fn relations_to_models_without_a_primary_key_work(api: &TestApi) -> TestRe .assert_table("PairMetadata", |table| { table .assert_pk(|pk| pk.assert_columns(&["id"]))? - .assert_fk_on_columns(&["pair_index", "pair_name"], |fk| { + .assert_fk_on_columns(&["pairidx", "pairname"], |fk| { fk.assert_references("Pair", &["index", "name"]) }) })?; @@ -68,7 +70,8 @@ async fn relations_to_models_with_no_pk_and_a_single_unique_required_field_work( model PairMetadata { id String @id - pair Pair + pweight Float + pair Pair @relation(fields: [pweight], references: [weight]) } "#; @@ -76,12 +79,11 @@ async fn relations_to_models_with_no_pk_and_a_single_unique_required_field_work( api.assert_schema() .await? - .debug_print() .assert_table("Pair", |table| table.assert_has_no_pk())? .assert_table("PairMetadata", |table| { table .assert_pk(|pk| pk.assert_columns(&["id"]))? - .assert_fk_on_columns(&["pair"], |fk| fk.assert_references("Pair", &["weight"])) + .assert_fk_on_columns(&["pweight"], |fk| fk.assert_references("Pair", &["weight"])) })?; Ok(()) @@ -204,7 +206,8 @@ async fn enum_defaults_must_work(api: &TestApi) -> TestResult { async fn id_as_part_of_relation_must_work(api: &TestApi) -> TestResult { let dm = r##" model Cat { - nemesis Dog @id + nemesis_id String @id + nemesis Dog @relation(fields: [nemesis_id], references: [id]) } model Dog { @@ -216,18 +219,23 @@ async fn id_as_part_of_relation_must_work(api: &TestApi) -> TestResult { api.assert_schema().await?.assert_table("Cat", |table| { table - .assert_pk(|pk| pk.assert_columns(&["nemesis"]))? - .assert_fk_on_columns(&["nemesis"], |fk| fk.assert_references("Dog", &["id"])) + .assert_pk(|pk| pk.assert_columns(&["nemesis_id"]))? + .assert_fk_on_columns(&["nemesis_id"], |fk| fk.assert_references("Dog", &["id"])) })?; Ok(()) } -#[test_each_connector(tags("sql"), log = "debug")] +#[test_each_connector(tags("sql"))] async fn multi_field_id_as_part_of_relation_must_work(api: &TestApi) -> TestResult { let dm = r##" model Cat { - nemesis Dog @id + nemesis_name String + nemesis_weight Int + + nemesis Dog @relation(fields: [nemesis_name, nemesis_weight], references: [name, weight]) + + @@id([nemesis_name, nemesis_weight]) } model Dog { @@ -255,7 +263,11 @@ async fn multi_field_id_as_part_of_relation_must_work(api: &TestApi) -> TestResu async fn remapped_multi_field_id_as_part_of_relation_must_work(api: &TestApi) -> TestResult { let dm = r##" model Cat { - nemesis Dog @map(["dogname", "dogweight"]) @id + nemesis_name String @map("dogname") + nemesis_weight Int @map("dogweight") + nemesis Dog @relation(fields: [nemesis_name, nemesis_weight], references: [name, weight]) + + @@id([nemesis_name, nemesis_weight]) } model Dog { @@ -284,14 +296,19 @@ async fn unique_constraints_on_composite_relation_fields(api: &TestApi) -> TestR let dm = r##" model Parent { id Int @id - child Child @relation(references: [id, c]) @unique + chiid Int + chic String + child Child @relation(fields: [chiid, chic], references: [id, c]) p String + + @@unique([chiid, chic]) } model Child { - id Int @id - c String - parent Parent + id Int @id + c String + parent_id Int + parent Parent @relation(name: "ChildParent", fields: [parent_id], references: [id]) @@unique([id, c]) } @@ -300,7 +317,7 @@ async fn unique_constraints_on_composite_relation_fields(api: &TestApi) -> TestR api.infer_apply(dm).send_assert().await?.assert_green()?; api.assert_schema().await?.assert_table("Parent", |table| { - table.assert_index_on_columns(&["child_id", "child_c"], |idx| idx.assert_is_unique()) + table.assert_index_on_columns(&["chiid", "chic"], |idx| idx.assert_is_unique()) })?; Ok(()) @@ -319,7 +336,9 @@ async fn indexes_on_composite_relation_fields(api: &TestApi) -> TestResult { model SpamList { id Int @id - user User @relation(references: [firstName, lastName]) + ufn String + uln String + user User @relation(fields: [ufn, uln], references: [firstName, lastName]) @@index([user]) } @@ -328,7 +347,7 @@ async fn indexes_on_composite_relation_fields(api: &TestApi) -> TestResult { api.infer_apply(dm).send_assert().await?.assert_green()?; api.assert_schema().await?.assert_table("SpamList", |table| { - table.assert_index_on_columns(&["user_firstName", "user_lastName"], |idx| idx.assert_is_not_unique()) + table.assert_index_on_columns(&["ufn", "uln"], |idx| idx.assert_is_not_unique()) })?; Ok(()) diff --git a/query-engine/prisma/src/tests/type_mappings/mysql_types.rs b/query-engine/prisma/src/tests/type_mappings/mysql_types.rs index 24fa769ffc93..c930ed2a6413 100644 --- a/query-engine/prisma/src/tests/type_mappings/mysql_types.rs +++ b/query-engine/prisma/src/tests/type_mappings/mysql_types.rs @@ -322,7 +322,7 @@ async fn mysql_floats_do_not_lose_precision(api: &TestApi) -> TestResult { Ok(()) } -#[test_each_connector(tags("mysql"), log = "debug")] +#[test_each_connector(tags("mysql"))] async fn all_mysql_identifier_types_work(api: &TestApi) -> TestResult { let identifier_types = &[ ("tinyint", "12", ""), @@ -346,11 +346,7 @@ async fn all_mysql_identifier_types_work(api: &TestApi) -> TestResult { ("text", "\"dolphins\"", "(100)"), ("mediumtext", "\"medium dolphins\"", "(100)"), ("longtext", "\"long dolphins\"", "(100)"), - ( - "enum('pollicle_dogs','jellicle_cats')", - "\"jellicle_cats\"", - "", - ), + ("enum('pollicle_dogs','jellicle_cats')", "\"jellicle_cats\"", ""), // ("json", "\"{\\\"name\\\": null}\"", ""), ]; @@ -384,10 +380,7 @@ async fn all_mysql_identifier_types_work(api: &TestApi) -> TestResult { let response = engine.request(query).await; - let expected_response = format!( - r#"{{"data":{{"createOnepk_test":{{"id":{}}}}}}}"#, - identifier_value - ); + let expected_response = format!(r#"{{"data":{{"createOnepk_test":{{"id":{}}}}}}}"#, identifier_value); assert_eq!(response.to_string(), expected_response); } } diff --git a/rustfmt.toml b/rustfmt.toml index 75306517965a..261644ce7835 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1 +1,2 @@ +edition = "2018" max_width = 120