Skip to content

Commit

Permalink
Merge pull request #2102 from prisma/datamodel/lift-off
Browse files Browse the repository at this point in the history
Finish moving validation out of lift.rs
  • Loading branch information
tomhoule committed Jul 19, 2021
2 parents dd92cef + eb8d753 commit 51d2228
Show file tree
Hide file tree
Showing 5 changed files with 94 additions and 124 deletions.
60 changes: 60 additions & 0 deletions libs/datamodel/core/src/transform/ast_to_dml/db/types.rs
@@ -1,6 +1,8 @@
use super::{attributes, context::Context};
use crate::{ast, diagnostics::DatamodelError};
use itertools::Itertools;
use once_cell::sync::Lazy;
use regex::Regex;
use std::{
collections::{BTreeMap, HashMap},
str::FromStr,
Expand Down Expand Up @@ -184,6 +186,10 @@ fn visit_model<'ast>(model_id: ast::ModelId, ast_model: &'ast ast::Model, ctx: &
));
}

if matches!(scalar_field_type, ScalarFieldType::Unsupported) {
validate_unsupported_field_type(ast_field, ast_field.field_type.as_unsupported().unwrap().0, ctx);
}

ctx.db.types.scalar_fields.insert((model_id, field_id), field_data);
}
Err(supported) => ctx.push_error(DatamodelError::new_type_not_found_error(
Expand Down Expand Up @@ -261,6 +267,23 @@ fn detect_alias_cycles(ctx: &mut Context<'_>) {
fn visit_enum<'ast>(enum_id: ast::EnumId, enm: &'ast ast::Enum, ctx: &mut Context<'ast>) {
let mut enum_data = EnumData::default();

if !ctx.db.active_connector().supports_enums() {
ctx.push_error(DatamodelError::new_validation_error(
&format!(
"You defined the enum `{}`. But the current connector does not support enums.",
&enm.name.name
),
enm.span,
));
}

if enm.values.is_empty() {
ctx.push_error(DatamodelError::new_validation_error(
"An enum must have at least one value.",
enm.span,
))
}

for (field_idx, field) in enm.values.iter().enumerate() {
ctx.visit_attributes(&field.attributes, |attributes, ctx| {
// @map
Expand Down Expand Up @@ -328,3 +351,40 @@ fn field_type<'ast>(field: &'ast ast::Field, ctx: &mut Context<'ast>) -> Result<
_ => unreachable!(),
}
}

fn validate_unsupported_field_type(ast_field: &ast::Field, unsupported_lit: &str, ctx: &mut Context<'_>) {
static TYPE_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"(?x)
^ # beginning of the string
(?P<prefix>[^(]+) # a required prefix that is any character until the first opening brace
(?:\((?P<params>.*?)\))? # (optional) an opening parenthesis, a closing parenthesis and captured params in-between
(?P<suffix>.+)? # (optional) captured suffix after the params until the end of the string
$ # end of the string
"#).unwrap()
});

if let Some(source) = ctx.db.datasource() {
let connector = &source.active_connector;

if let Some(captures) = TYPE_REGEX.captures(unsupported_lit) {
let prefix = captures.name("prefix").unwrap().as_str().trim();

let params = captures.name("params");
let args = match params {
None => vec![],
Some(params) => params.as_str().split(',').map(|s| s.trim().to_string()).collect(),
};

if let Ok(native_type) = connector.parse_native_type(prefix, args) {
let prisma_type = connector.scalar_type_for_native_type(native_type.serialized_native_type.clone());

let msg = format!(
"The type `Unsupported(\"{}\")` you specified in the type definition for the field `{}` is supported as a native type by Prisma. Please use the native type notation `{} @{}.{}` for full support.",
unsupported_lit, ast_field.name.name, prisma_type.to_string(), &source.name, native_type.render()
);

ctx.push_error(DatamodelError::new_validation_error(&msg, ast_field.span));
}
}
}
}
108 changes: 17 additions & 91 deletions libs/datamodel/core/src/transform/ast_to_dml/lift.rs
@@ -1,33 +1,24 @@
use crate::Datasource;
use crate::{
ast::{self, WithName},
diagnostics::{DatamodelError, Diagnostics},
dml,
transform::ast_to_dml::db::{self, ParserDatabase, ScalarFieldType},
transform::ast_to_dml::db,
};
use once_cell::sync::Lazy;
use regex::Regex;
use std::collections::HashMap;

/// Helper for lifting a datamodel.
///
/// When lifting, the AST is converted to the real datamodel, and additional
/// semantics are attached.
pub struct LiftAstToDml<'a> {
db: &'a ParserDatabase<'a>,
diagnostics: &'a mut Diagnostics,
/// When lifting, the AST is converted to the Datamodel data structure, and
/// additional semantics are attached.
pub(crate) struct LiftAstToDml<'a> {
db: &'a db::ParserDatabase<'a>,
}

impl<'a> LiftAstToDml<'a> {
/// Creates a new instance, with all builtin attributes and
/// the attributes defined by the given sources registered.
///
/// The attributes defined by the given sources will be namespaced.
pub(crate) fn new(db: &'a ParserDatabase<'a>, diagnostics: &'a mut Diagnostics) -> LiftAstToDml<'a> {
LiftAstToDml { db, diagnostics }
pub(crate) fn new(db: &'a db::ParserDatabase<'a>) -> LiftAstToDml<'a> {
LiftAstToDml { db }
}

pub fn lift(&mut self) -> dml::Datamodel {
pub(crate) fn lift(&self) -> dml::Datamodel {
let mut schema = dml::Datamodel::new();

for (top_id, ast_obj) in self.db.ast().iter_tops() {
Expand All @@ -45,7 +36,7 @@ impl<'a> LiftAstToDml<'a> {
}

/// Internal: Validates a model AST node and lifts it to a DML model.
fn lift_model(&mut self, model_id: ast::ModelId, ast_model: &ast::Model) -> dml::Model {
fn lift_model(&self, model_id: ast::ModelId, ast_model: &ast::Model) -> dml::Model {
let mut model = dml::Model::new(ast_model.name.name.clone(), None);
let model_data = self.db.get_model_data(&model_id).unwrap();

Expand Down Expand Up @@ -155,31 +146,13 @@ impl<'a> LiftAstToDml<'a> {
}

/// Internal: Validates an enum AST node.
fn lift_enum(&mut self, enum_id: ast::EnumId, ast_enum: &ast::Enum) -> dml::Enum {
fn lift_enum(&self, enum_id: ast::EnumId, ast_enum: &ast::Enum) -> dml::Enum {
let mut en = dml::Enum::new(&ast_enum.name.name, vec![]);

if !self.db.active_connector().supports_enums() {
self.diagnostics.push_error(DatamodelError::new_validation_error(
&format!(
"You defined the enum `{}`. But the current connector does not support enums.",
&ast_enum.name.name
),
ast_enum.span,
));
return en;
}

for (value_idx, ast_enum_value) in ast_enum.values.iter().enumerate() {
en.add_value(self.lift_enum_value(ast_enum_value, enum_id, value_idx as u32));
}

if en.values.is_empty() {
self.diagnostics.push_error(DatamodelError::new_validation_error(
"An enum must have at least one value.",
ast_enum.span,
))
}

en.documentation = ast_enum.documentation.clone().map(|comment| comment.text);
en.database_name = self.db.get_enum_database_name(enum_id).map(String::from);
en
Expand Down Expand Up @@ -207,28 +180,25 @@ impl<'a> LiftAstToDml<'a> {
}

fn lift_scalar_field_type(
&mut self,
&self,
ast_field: &ast::Field,
scalar_field_type: &db::ScalarFieldType,
scalar_field_data: &db::ScalarField<'_>,
) -> dml::FieldType {
match scalar_field_type {
ScalarFieldType::Enum(enum_id) => {
db::ScalarFieldType::Enum(enum_id) => {
let enum_name = &self.db.ast()[*enum_id].name.name;
dml::FieldType::Enum(enum_name.to_owned())
}
ScalarFieldType::Unsupported => lift_unsupported_field_type(
ast_field,
ast_field.field_type.as_unsupported().unwrap().0,
self.db.datasource(),
self.diagnostics,
),
ScalarFieldType::Alias(top_id) => {
db::ScalarFieldType::Unsupported => {
dml::FieldType::Unsupported(ast_field.field_type.as_unsupported().unwrap().0.to_owned())
}
db::ScalarFieldType::Alias(top_id) => {
let alias = &self.db.ast()[*top_id];
let scalar_field_type = self.db.alias_scalar_field_type(&top_id);
self.lift_scalar_field_type(alias, scalar_field_type, scalar_field_data)
}
ScalarFieldType::BuiltInScalar(scalar_type) => {
db::ScalarFieldType::BuiltInScalar(scalar_type) => {
let native_type = scalar_field_data.native_type.as_ref().map(|(name, args)| {
self.db
.active_connector()
Expand All @@ -240,47 +210,3 @@ impl<'a> LiftAstToDml<'a> {
}
}
}

fn lift_unsupported_field_type(
ast_field: &ast::Field,
unsupported_lit: &str,
source: Option<&Datasource>,
diagnostics: &mut Diagnostics,
) -> dml::FieldType {
static TYPE_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"(?x)
^ # beginning of the string
(?P<prefix>[^(]+) # a required prefix that is any character until the first opening brace
(?:\((?P<params>.*?)\))? # (optional) an opening parenthesis, a closing parenthesis and captured params in-between
(?P<suffix>.+)? # (optional) captured suffix after the params until the end of the string
$ # end of the string
"#).unwrap()
});

if let Some(source) = source {
let connector = &source.active_connector;

if let Some(captures) = TYPE_REGEX.captures(unsupported_lit) {
let prefix = captures.name("prefix").unwrap().as_str().trim();

let params = captures.name("params");
let args = match params {
None => vec![],
Some(params) => params.as_str().split(',').map(|s| s.trim().to_string()).collect(),
};

if let Ok(native_type) = connector.parse_native_type(prefix, args) {
let prisma_type = connector.scalar_type_for_native_type(native_type.serialized_native_type.clone());

let msg = format!(
"The type `Unsupported(\"{}\")` you specified in the type definition for the field `{}` is supported as a native type by Prisma. Please use the native type notation `{} @{}.{}` for full support.",
unsupported_lit, ast_field.name.name, prisma_type.to_string(), &source.name, native_type.render()
);

diagnostics.push_error(DatamodelError::new_validation_error(&msg, ast_field.span));
}
}
}

dml::FieldType::Unsupported(unsupported_lit.into())
}
4 changes: 0 additions & 4 deletions libs/datamodel/core/src/transform/ast_to_dml/mod.rs
Expand Up @@ -12,10 +12,6 @@ mod standardise_parsing;
mod validate;
mod validation_pipeline;

use lift::*;
use standardise_formatting::*;
use validate::*;

pub use datasource_loader::DatasourceLoader;
pub use generator_loader::GeneratorLoader;
pub use validation_pipeline::ValidationPipeline;
@@ -1,13 +1,9 @@
use enumflags2::BitFlags;

use ::dml::{field::FieldArity, relation_info::ReferentialAction};

use super::common::*;
use crate::{
common::{preview_features::PreviewFeature, RelationNames},
diagnostics::Diagnostics,
dml, Field,
};
use enumflags2::BitFlags;

/// Helper for standardising a datamodel during parsing.
///
Expand All @@ -22,13 +18,11 @@ impl StandardiserForParsing {
Self { preview_features }
}

pub fn standardise(&self, schema: &mut dml::Datamodel) -> Result<(), Diagnostics> {
pub fn standardise(&self, schema: &mut dml::Datamodel) {
self.name_unnamed_relations(schema);
self.set_relation_to_field_to_id_if_missing_for_m2m_relations(schema);
self.set_referential_arities(schema);
self.set_default_referential_actions(schema);

Ok(())
}

fn set_referential_arities(&self, schema: &mut dml::Datamodel) {
Expand All @@ -46,7 +40,7 @@ impl StandardiserForParsing {
.any(|field| field.arity().is_required());

let arity = if some_required {
FieldArity::Required
dml::FieldArity::Required
} else {
field.arity
};
Expand Down Expand Up @@ -80,10 +74,10 @@ impl StandardiserForParsing {
continue;
}

field.relation_info.on_update = Some(ReferentialAction::Cascade);
field.relation_info.on_update = Some(dml::ReferentialAction::Cascade);
field.relation_info.on_delete = Some(match field.referential_arity {
FieldArity::Required => ReferentialAction::Cascade,
_ => ReferentialAction::SetNull,
dml::FieldArity::Required => dml::ReferentialAction::Cascade,
_ => dml::ReferentialAction::SetNull,
});

// So our validator won't get a stroke when seeing the
Expand Down
28 changes: 11 additions & 17 deletions libs/datamodel/core/src/transform/ast_to_dml/validation_pipeline.rs
@@ -1,8 +1,9 @@
use super::db::ParserDatabase;
use super::*;
use super::{
db::ParserDatabase, lift::LiftAstToDml, standardise_formatting::StandardiserForFormatting,
standardise_parsing::StandardiserForParsing, validate::Validator,
};
use crate::{
ast, common::preview_features::PreviewFeature, configuration, diagnostics::Diagnostics,
transform::ast_to_dml::standardise_parsing::StandardiserForParsing, ValidatedDatamodel,
ast, common::preview_features::PreviewFeature, configuration, diagnostics::Diagnostics, ValidatedDatamodel,
};
use enumflags2::BitFlags;

Expand Down Expand Up @@ -48,40 +49,33 @@ impl<'a, 'b> ValidationPipeline<'a> {
// Phase 0 is parsing.
// Phase 1 is source block loading.

// Phase 2: Name resolution.
// Phase 2: Make sense of the AST.
let (db, mut diagnostics) = ParserDatabase::new(ast_schema, self.source, diagnostics);

// Early return so that the validator does not have to deal with invalid schemas
diagnostics.to_result()?;

// Phase 3: Lift AST to DML.
let mut schema = LiftAstToDml::new(&db, &mut diagnostics).lift();

// Cannot continue on lifter error.
diagnostics.to_result()?;
// Phase 3: Lift AST to DML. This can't fail.
let mut schema = LiftAstToDml::new(&db).lift();

// Phase 4: Validation
self.validator.validate(&db, &mut schema, &mut diagnostics);

// Early return so that the standardiser does not have to deal with invalid schemas
diagnostics.to_result()?;

// TODO: Move consistency stuff into different module.
// Phase 5: Consistency fixes. These don't fail and always run, during parsing AND formatting
if let Err(mut err) = self.standardiser_for_parsing.standardise(&mut schema) {
diagnostics.append(&mut err);
}
self.standardiser_for_parsing.standardise(&mut schema);

// Transform phase: These only run during formatting.
if relation_transformation_enabled {
if let Err(mut err) = self.standardiser_for_formatting.standardise(ast_schema, &mut schema) {
diagnostics.append(&mut err);
// Early return so that the post validation does not have to deal with invalid schemas
return Err(diagnostics);
}
}

// Early return so that the post validation does not have to deal with invalid schemas
diagnostics.to_result()?;

// Phase 6: Post Standardisation Validation
if let Err(mut err) = self.validator.post_standardisation_validate(ast_schema, &mut schema) {
diagnostics.append(&mut err);
Expand Down

0 comments on commit 51d2228

Please sign in to comment.