Skip to content

Commit

Permalink
Preview Flagged Named Constraints (#2099)
Browse files Browse the repository at this point in the history
* introduce NamedConstraints preview flag,
add it to parser db,
start reading id name/ map properties

* cleanups, model level attributes done

* @Index and @unique

* first step of parsing of new attributes done

* transfer db name

* flesh out compatibility hack

* test fixes

* move legacy default name generation from sql schema calculator to parser

* legacy name generation for indices moved

* more index parsing logic

* conditional index rendering

* test cases for map on index

* additional tests for indices

* fix other positive test expectations

* more (failing) tests

* validations for length of db_name,
validations for client name characters

* transfer of test cases

* add client name in use error

* cleanups

* investigate failing mssql test

* activate more id tests

* re-introspection and id positive tests

* re-introspection of client names

* preview flag reintrospection

* lower id names

* fix preview flags in introspection test api

* only generate db names on connectors that support them

* more tests, start parsing map argument for relations

* dry up attributes code

* more refactoring and notes

* lower map argument for relation

* test fixes

* review feedback,
validate and test against both map and name on index

* cleanups, test for having fk map on wrong side

* extra test cases for ids

* more tests, ignore one relation test for now

* moar tests

* pass preview flags in introspection test api

* reorganize some IE tests

* disable named pk test on sqlite and mysql

* introspection tests

* test fixes on non-postgres

* named foreign keys capability

* sqlite fk handling

* start adding more migrate tests,
enable NamedForeignKey capability

* start on migration engine

* always add the datasource to the datamodel in schema_push

* test fixes, schema push naming

* create table constraint test

* test fixes

* test fix

* postgres test fixes

* more postgres tests fixed

* cargo pedantic

* Fixes for sqlite

* do not diff fks on constraint name in legacy mode

* mysql tests

* do not rename pks unless on preview flagged

* some more test fixes

* some mssql test fixes

* more test fixes

* more test fixes

* chugging along

* remaining mssql tests

* pedantic fixes

* port QE changes

* test fix

* test named compounds in rust in the QE

* persist PK client names in the dmmf

* test fix

* spell out free disk space in gh actions

* also log usage at the end

* remove unwanted packages and check the difference

* try docker prune

* increase postgres' shared memory

* cleaning up disk didnt matter

* remove build args for postgres

* specify tmpfs size

* increase size

* 12 and 13 were sharing a disk

* blow up size again

* spelling

* try lower size

* try 3g

* 4 it is then
  • Loading branch information
do4gr committed Aug 3, 2021
1 parent 7a25b4c commit 29ab90f
Show file tree
Hide file tree
Showing 128 changed files with 5,516 additions and 1,690 deletions.
1 change: 0 additions & 1 deletion .github/workflows/migration-engine.yml
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,6 @@ jobs:
CLICOLOR_FORCE: 1
TEST_DATABASE_URL: ${{ matrix.database.url }}


test-windows:
strategy:
fail-fast: false
Expand Down
4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ services:
- "5434:5432"
networks:
- databases
tmpfs: /pgtmpfs12
tmpfs: /pgtmpfs12:size=4g

postgres13:
image: postgres:13
Expand All @@ -87,7 +87,7 @@ services:
- "5435:5432"
networks:
- databases
tmpfs: /pgtmpfs12
tmpfs: /pgtmpfs13

mysql-5-6:
image: mysql:5.6.50
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,13 @@ pub fn introspect(
}

for index in &table.indices {
model.add_index(calculate_index(index));
model.add_index(calculate_index(index, ctx));
}

if let Some(pk) = &table.primary_key {
model.primary_key = Some(PrimaryKeyDefinition {
name: None,
db_name: None,
db_name: pk.constraint_name.clone(),
fields: pk.columns.clone(),
defined_on_field: pk.columns.len() == 1,
});
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate::Dedup;
use crate::PreviewFeature::NamedConstraints;
use crate::SqlError;
use datamodel::{
common::RelationNames, Datamodel, DefaultValue as DMLDef, FieldArity, FieldType, IndexDefinition, Model,
Expand Down Expand Up @@ -121,15 +122,22 @@ pub fn calculate_many_to_many_field(
RelationField::new(&name, FieldArity::List, FieldArity::List, relation_info)
}

pub(crate) fn calculate_index(index: &Index) -> IndexDefinition {
pub(crate) fn calculate_index(index: &Index, ctx: &IntrospectionContext) -> IndexDefinition {
debug!("Handling index {:?}", index);
let tpe = match index.tpe {
IndexType::Unique => datamodel::dml::IndexType::Unique,
IndexType::Normal => datamodel::dml::IndexType::Normal,
};

//We do not populate name in client by default. It increases datamodel noise,
//and we would need to sanitize it. Users can give their own names if they want
//and re-introspection will keep them. This is a change in introspection behaviour,
//but due to re-introspection previous datamodels and clients should keep working as before.

let name = (!ctx.preview_features.contains(NamedConstraints)).then(|| index.name.clone());

IndexDefinition {
name: Some(index.name.clone()),
name,
db_name: Some(index.name.clone()),
fields: index.columns.clone(),
tpe,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use crate::introspection_helpers::replace_field_names;
use crate::warnings::*;
use crate::PreviewFeature::NamedConstraints;
use crate::SqlFamilyTrait;
use datamodel::{Datamodel, DefaultValue, FieldType, Ignorable, ValueGenerator};
use introspection_connector::{IntrospectionContext, Warning};
Expand Down Expand Up @@ -43,6 +44,68 @@ pub fn enrich(old_data_model: &Datamodel, new_data_model: &mut Datamodel, ctx: &
}
}

let mut changed_index_names = vec![];
let mut changed_primary_key_names = vec![];
if ctx.preview_features.contains(NamedConstraints) {
//custom index names
{
for model in new_data_model.models() {
if let Some(old_model) = &old_data_model.find_model(&model.name) {
for index in &model.indices {
if let Some(old_index) = old_model.indices.iter().find(|old| old.db_name == index.db_name) {
if old_index.name.is_some() {
let mf = ModelAndIndex::new(&model.name, &old_index.db_name.as_ref().unwrap());
changed_index_names.push((mf, old_index.name.clone()))
}
}
}
}
}

//change index name
for changed_index_name in &changed_index_names {
let index = new_data_model
.find_model_mut(&changed_index_name.0.model)
.indices
.iter_mut()
.find(|i| i.db_name == Some(changed_index_name.0.index_db_name.clone()))
.unwrap();
index.name = changed_index_name.1.clone();
}
}

//custom primary key names
{
for model in new_data_model.models() {
if let Some(old_model) = &old_data_model.find_model(&model.name) {
if let Some(primary_key) = &model.primary_key {
if let Some(old_primary_key) = &old_model.primary_key {
if old_primary_key.fields == primary_key.fields
&& (old_primary_key.db_name == primary_key.db_name || primary_key.db_name.is_none())
&& old_primary_key.name.is_some()
{
let mf = Model::new(&model.name);
changed_primary_key_names.push((mf, old_primary_key.name.clone()))
}
}
}
}
}

//change primary key names
for changed_primary_key_name in &changed_primary_key_names {
let pk = new_data_model
.find_model_mut(&changed_primary_key_name.0.model)
.primary_key
.as_mut();

if let Some(primary_key) = pk {
primary_key.name = changed_primary_key_name.1.clone()
}
}
}
}

// @map on fields
let mut changed_scalar_field_names = vec![];
{
Expand Down Expand Up @@ -464,6 +527,16 @@ pub fn enrich(old_data_model: &Datamodel, new_data_model: &mut Datamodel, ctx: &
warnings.push(warning_enriched_with_map_on_model(&models));
}

if !changed_index_names.is_empty() {
let index: Vec<_> = changed_index_names.iter().map(|c| c.0.clone()).collect();
warnings.push(warning_enriched_with_custom_index_names(&index));
}

if !changed_primary_key_names.is_empty() {
let pk: Vec<_> = changed_primary_key_names.iter().map(|c| c.0.clone()).collect();
warnings.push(warning_enriched_with_custom_primary_key_names(&pk));
}

if !changed_scalar_field_names.is_empty() {
let models_and_fields: Vec<_> = changed_scalar_field_names
.iter()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,21 @@ impl ModelAndField {
}
}

#[derive(Serialize, Debug, Clone)]
pub struct ModelAndIndex {
pub(crate) model: String,
pub(crate) index_db_name: String,
}

impl ModelAndIndex {
pub fn new(model: &str, index_db_name: &str) -> Self {
ModelAndIndex {
model: model.to_owned(),
index_db_name: index_db_name.to_owned(),
}
}
}

#[derive(Serialize, Debug)]
pub struct ModelAndFieldAndType {
pub(crate) model: String,
Expand Down Expand Up @@ -204,3 +219,20 @@ pub fn warning_enriched_fields_with_ignore(affected: &[ModelAndField]) -> Warnin
affected: serde_json::to_value(&affected).unwrap(),
}
}

pub fn warning_enriched_with_custom_index_names(affected: &[ModelAndIndex]) -> Warning {
Warning {
code: 17,
message: "These Indices were enriched with custom index names taken from the previous Prisma schema.".into(),
affected: serde_json::to_value(&affected).unwrap(),
}
}

pub fn warning_enriched_with_custom_primary_key_names(affected: &[Model]) -> Warning {
Warning {
code: 18,
message: "These models were enriched with custom compound id names taken from the previous Prisma schema."
.into(),
affected: serde_json::to_value(&affected).unwrap(),
}
}
62 changes: 54 additions & 8 deletions introspection-engine/introspection-engine-tests/src/test_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,15 @@ impl TestApi {
async fn test_introspect_internal(&self, data_model: Datamodel) -> ConnectorResult<IntrospectionResult> {
let config = self.configuration();

let preview_features = self
.args
.preview_features()
.iter()
.flat_map(|f| PreviewFeature::parse_opt(f))
.collect();

let ctx = IntrospectionContext {
preview_features: config.preview_features().map(Clone::clone).collect(),
preview_features,
source: config.datasources.into_iter().next().unwrap(),
};

Expand All @@ -116,7 +123,6 @@ impl TestApi {
pub async fn re_introspect(&self, data_model_string: &str) -> Result<String> {
let config = self.configuration();
let data_model = parse_datamodel(data_model_string);

let introspection_result = self.test_introspect_internal(data_model).await?;

let rendering_span = tracing::info_span!("render_datamodel after introspection");
Expand Down Expand Up @@ -196,16 +202,22 @@ impl TestApi {
}

pub fn configuration(&self) -> Configuration {
datamodel::parse_configuration(&self.datasource_block().to_string())
.unwrap()
.subject
datamodel::parse_configuration(&format!(
"{}\n{}",
&self.datasource_block().to_string(),
&self.generator_block()
))
.unwrap()
.subject
}

#[track_caller]
pub fn assert_eq_datamodels(&self, expected_without_header: &str, result_with_header: &str) {
let expected = self.dm_with_sources(expected_without_header);
let parsed_expected = datamodel::parse_datamodel(&expected)
.map_err(|err| err.to_pretty_string("schema.prisma", &expected))
let expected_with_source = self.dm_with_sources(expected_without_header);
let expected_with_generator = self.dm_with_generator_and_preview_flags(&expected_with_source);

let parsed_expected = datamodel::parse_datamodel(&expected_with_generator)
.map_err(|err| err.to_pretty_string("schema.prisma", &expected_with_generator))
.unwrap()
.subject;

Expand All @@ -216,6 +228,9 @@ impl TestApi {
let reformatted_result =
datamodel::render_datamodel_and_config_to_string(&parsed_result, &self.configuration());

println!("{}", reformatted_expected);
println!("{}", reformatted_result);

pretty_assertions::assert_eq!(reformatted_expected, reformatted_result);
}

Expand All @@ -226,6 +241,37 @@ impl TestApi {

out
}

pub fn dm_with_generator_and_preview_flags(&self, schema: &str) -> String {
let mut out = String::with_capacity(320 + schema.len());

write!(out, "{}\n{}", self.generator_block(), schema).unwrap();

out
}

fn generator_block(&self) -> String {
let preview_features: Vec<String> = self
.args
.preview_features()
.iter()
.map(|pf| format!(r#""{}""#, pf))
.collect();

let preview_feature_string = if preview_features.is_empty() {
"".to_string()
} else {
format!("\npreviewFeatures = [{}]", preview_features.join(", "))
};

let generator_block = format!(
r#"generator client {{
provider = "prisma-client-js"{}
}}"#,
preview_feature_string
);
generator_block
}
}

#[track_caller]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,11 @@ mod identify_version;
mod lists;
mod model_renames;
mod mssql;
mod named_constraints;
mod native_types;
mod postgres;
mod re_introspection;
mod referential_actions;
mod relations;
mod relations_with_compound_fk;
mod remapping_database_names;
Expand Down

0 comments on commit 29ab90f

Please sign in to comment.