Skip to content

Commit

Permalink
Merge pull request #2100 from prisma/parser-db/native-types
Browse files Browse the repository at this point in the history
  • Loading branch information
tomhoule committed Jul 19, 2021
2 parents 354c407 + b77b2ce commit dd92cef
Show file tree
Hide file tree
Showing 12 changed files with 233 additions and 187 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/query-engine.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:

- uses: actions-rs/toolchain@v1
with:
toolchain: "1.51.0"
toolchain: stable
default: true

- uses: olafurpg/setup-scala@v10
Expand Down Expand Up @@ -78,7 +78,7 @@ jobs:

- uses: actions-rs/toolchain@v1
with:
toolchain: "1.51.0"
toolchain: stable
default: true

- uses: actions/cache@v2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ use datamodel::{Configuration, Datamodel};
use introspection_connector::{
ConnectorResult, DatabaseMetadata, IntrospectionConnector, IntrospectionContext, IntrospectionResult, Version,
};
use introspection_core::rpc::RpcImpl;
use migration_connector::MigrationConnector;
use quaint::{prelude::SqlFamily, single::Quaint};
use sql_introspection_connector::SqlIntrospectionConnector;
Expand Down Expand Up @@ -203,7 +202,9 @@ impl TestApi {

#[track_caller]
pub fn assert_eq_datamodels(&self, expected_without_header: &str, result_with_header: &str) {
let parsed_expected = datamodel::parse_datamodel(&self.dm_with_sources(expected_without_header))
let expected = self.dm_with_sources(expected_without_header);
let parsed_expected = datamodel::parse_datamodel(&expected)
.map_err(|err| err.to_pretty_string("schema.prisma", &expected))
.unwrap()
.subject;

Expand All @@ -228,5 +229,7 @@ impl TestApi {

#[track_caller]
fn parse_datamodel(dm: &str) -> Datamodel {
RpcImpl::parse_datamodel(dm).unwrap()
datamodel::parse_datamodel_or_pretty_error(dm, "schema.prisma")
.unwrap()
.subject
}
Original file line number Diff line number Diff line change
Expand Up @@ -1164,7 +1164,7 @@ async fn multiple_changed_relation_names_due_to_mapped_models(api: &TestApi) ->
}

#[test_connector(tags(Postgres))]
async fn virtual_cuid_default(api: &TestApi) -> TestResult {
async fn virtual_cuid_default(api: &TestApi) {
api.barrel()
.execute(|migration| {
migration.create_table("User", |t| {
Expand All @@ -1180,18 +1180,24 @@ async fn virtual_cuid_default(api: &TestApi) -> TestResult {
t.add_column("id", types::primary());
});
})
.await?;
.await
.unwrap();

let input_dm = indoc! {r#"
model User {
let input_dm = format!(
r#"
{datasource}
model User {{
id String @id @default(cuid()) @db.VarChar(30)
non_id String @default(cuid()) @db.VarChar(30)
}
}}
model User2 {
model User2 {{
id String @id @default(uuid()) @db.VarChar(36)
}
"#};
}}
"#,
datasource = api.datasource_block()
);

let final_dm = indoc! {r#"
model User {
Expand All @@ -1208,9 +1214,7 @@ async fn virtual_cuid_default(api: &TestApi) -> TestResult {
}
"#};

api.assert_eq_datamodels(final_dm, &api.re_introspect(input_dm).await?);

Ok(())
api.assert_eq_datamodels(final_dm, &api.re_introspect(&input_dm).await.unwrap());
}

#[test_connector(tags(Postgres))]
Expand Down Expand Up @@ -1282,7 +1286,7 @@ async fn comments_should_be_kept(api: &TestApi) -> TestResult {
}

#[test_connector]
async fn updated_at(api: &TestApi) -> TestResult {
async fn updated_at(api: &TestApi) {
api.barrel()
.execute(|migration| {
migration.create_table("User", move |t| {
Expand All @@ -1294,7 +1298,8 @@ async fn updated_at(api: &TestApi) -> TestResult {
t.add_column("id", types::primary());
});
})
.await?;
.await
.unwrap();

let native_datetime = if api.sql_family().is_postgres() {
"@db.Timestamp(6)"
Expand All @@ -1304,12 +1309,15 @@ async fn updated_at(api: &TestApi) -> TestResult {
""
};
let input_dm = formatdoc! {r#"
{datasource}
model User {{
id Int @id @default(autoincrement())
lastupdated DateTime? @updatedAt {native_datetime}
}}
"#,
native_datetime = native_datetime,
datasource = api.datasource_block(),
};

let final_dm = formatdoc! {r#"
Expand All @@ -1325,13 +1333,11 @@ async fn updated_at(api: &TestApi) -> TestResult {
native_datetime = native_datetime,
};

api.assert_eq_datamodels(&final_dm, &api.re_introspect(&input_dm).await?);

Ok(())
api.assert_eq_datamodels(&final_dm, &api.re_introspect(&input_dm).await.unwrap());
}

#[test_connector(tags(Mssql))]
async fn updated_at_with_native_types_on(api: &TestApi) -> TestResult {
async fn updated_at_with_native_types_on(api: &TestApi) {
api.barrel()
.execute(|migration| {
migration.create_table("User", move |t| {
Expand All @@ -1344,9 +1350,15 @@ async fn updated_at_with_native_types_on(api: &TestApi) -> TestResult {
t.add_column("id", types::primary());
});
})
.await?;
.await
.unwrap();

let input_dm = indoc! {r#"
datasource db {
provider = "sqlserver"
url = env("TEST_DATABASE_URL")
}
model User {
id Int @id
lastupdated DateTime? @updatedAt
Expand All @@ -1366,9 +1378,7 @@ async fn updated_at_with_native_types_on(api: &TestApi) -> TestResult {
}
"#};

api.assert_eq_datamodels(final_dm, &api.re_introspect(input_dm).await?);

Ok(())
api.assert_eq_datamodels(final_dm, &api.re_introspect(input_dm).await.unwrap());
}

#[test_connector]
Expand Down
4 changes: 2 additions & 2 deletions libs/datamodel/core/src/transform/ast_to_dml/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ mod context;
mod names;
mod types;

pub(crate) use types::ScalarFieldType;
pub(crate) use types::{ScalarField, ScalarFieldType};

use self::{
context::Context,
types::{RelationField, ScalarField, Types},
types::{RelationField, Types},
};
use crate::{ast, diagnostics::Diagnostics, Datasource};
use datamodel_connector::{Connector, EmptyDatamodelConnector};
Expand Down
9 changes: 9 additions & 0 deletions libs/datamodel/core/src/transform/ast_to_dml/db/attributes.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
mod native_types;

use super::{
context::{Arguments, Context},
types::{IndexData, ModelData, RelationField, ScalarField},
Expand Down Expand Up @@ -202,6 +204,13 @@ fn visit_scalar_field_attributes<'ast>(
visit_field_default(args, scalar_field_data, model_id, field_id, ctx);
});

if let ScalarFieldType::BuiltInScalar(scalar_type) = scalar_field_data.r#type {
// native type attributes
attributes.visit_datasource_scoped(ctx, |type_name, args, ctx| {
native_types::visit_native_type_attribute(type_name, args, scalar_type, scalar_field_data, ctx)
});
}

// @unique
attributes.visit_optional_single("unique", ctx, |args, ctx| {
if let Some(source) = &ctx.db.datasource{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
use crate::{
ast,
diagnostics::DatamodelError,
transform::{
ast_to_dml::db::{context::Context, types::ScalarField},
helpers::ValueValidator,
},
};
use datamodel_connector::connector_error::{ConnectorError, ErrorKind};
use dml::scalars::ScalarType;
use itertools::Itertools;

pub(super) fn visit_native_type_attribute<'ast>(
type_name: &'ast str,
attr: &'ast ast::Attribute,
scalar_type: ScalarType,
scalar_field: &mut ScalarField<'ast>,
ctx: &mut Context<'ast>,
) {
let args = &attr.arguments;
let diagnostics = &mut ctx.diagnostics;

// convert arguments to string if possible
let args: Vec<String> = args.iter().map(|arg| ValueValidator::new(&arg.value).raw()).collect();

let constructor = if let Some(cons) = ctx.db.active_connector().find_native_type_constructor(type_name) {
cons
} else {
diagnostics.push_error(DatamodelError::new_connector_error(
&ConnectorError::from_kind(ErrorKind::NativeTypeNameUnknown {
native_type: type_name.to_owned(),
connector_name: ctx
.db
.datasource()
.map(|ds| ds.active_provider.clone())
.unwrap_or_else(|| "Default".to_owned()),
})
.to_string(),
attr.span,
));
return;
};

let number_of_args = args.len();

if number_of_args < constructor._number_of_args
|| ((number_of_args > constructor._number_of_args) && constructor._number_of_optional_args == 0)
{
diagnostics.push_error(DatamodelError::new_argument_count_missmatch_error(
type_name,
constructor._number_of_args,
number_of_args,
attr.span,
));
return;
}

if number_of_args > constructor._number_of_args + constructor._number_of_optional_args
&& constructor._number_of_optional_args > 0
{
diagnostics.push_error(DatamodelError::new_connector_error(
&ConnectorError::from_kind(ErrorKind::OptionalArgumentCountMismatchError {
native_type: type_name.to_owned(),
optional_count: constructor._number_of_optional_args,
given_count: number_of_args,
})
.to_string(),
attr.span,
));
return;
}

// check for compatibility with scalar type
if !constructor.prisma_types.contains(&scalar_type) {
diagnostics.push_error(DatamodelError::new_connector_error(
&ConnectorError::from_kind(ErrorKind::IncompatibleNativeType {
native_type: type_name.to_owned(),
field_type: scalar_type.to_string(),
expected_types: constructor.prisma_types.iter().map(|s| s.to_string()).join(" or "),
})
.to_string(),
attr.span,
));
return;
}

if let Err(connector_error) = ctx.db.active_connector().parse_native_type(type_name, args.clone()) {
diagnostics.push_error(DatamodelError::new_connector_error(
&connector_error.to_string(),
attr.span,
));
return;
};

scalar_field.native_type = Some((type_name, args))
}
5 changes: 0 additions & 5 deletions libs/datamodel/core/src/transform/ast_to_dml/db/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,11 +104,6 @@ impl<'ast> Context<'ast> {
f(&mut attributes, self);

for attribute in attributes.unused_attributes() {
// Native types...
if attribute.name.name.contains('.') {
continue;
}

self.push_error(DatamodelError::new_attribute_not_known_error(
&attribute.name.name,
attribute.name.span,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use super::*;
use datamodel_connector::connector_error::{ConnectorError, ErrorKind};

#[derive(Default)]
pub(crate) struct Attributes<'ast> {
Expand Down Expand Up @@ -70,4 +71,62 @@ impl<'ast> Attributes<'ast> {
assert!(self.unused_attributes.remove(&attr_idx));
}
}

/// Look for an optional attribute with a name of the form
/// "<datasource_name>.<attribute_name>", and call the passed-in function
/// with the attribute name and the arguments.
///
/// Also note that native type arguments are treated differently from
/// arguments to other attributes: everywhere else, attributes are named,
/// with a default that can be first, but with native types, arguments are
/// purely positional.
pub(crate) fn visit_datasource_scoped<'ctx>(
&mut self,
ctx: &'ctx mut Context<'ast>,
f: impl FnOnce(&'ast str, &'ast ast::Attribute, &mut Context<'ast>),
) {
let datasource = if let Some(ds) = ctx.db.datasource() { ds } else { return };

let attrs = self
.attributes
.iter()
.enumerate()
.filter(|(_, attr)| attr.name.name.contains('.'));
let mut native_type_attr = None;

// Extract the attribute, validating that:
//
// 1. All scoped attributes are scoped with the right datasource name
// 2. There are no duplicates
for (attr_idx, attr) in attrs {
assert!(self.unused_attributes.remove(&attr_idx));

match attr.name.name.split_once('.') {
None => unreachable!(),
Some((ds, attr_name)) if ds == datasource.name => {
if native_type_attr.replace((attr, attr_name)).is_some() {
ctx.push_error(DatamodelError::new_duplicate_attribute_error(&ds, attr.span));
}
}
Some((bad_datasource, attr_name)) => {
ctx.push_error(DatamodelError::new_connector_error(
&ConnectorError::from_kind(ErrorKind::InvalidPrefixForNativeTypes {
given_prefix: bad_datasource.to_owned(),
expected_prefix: datasource.name.clone(),
suggestion: [datasource.name.as_str(), attr_name].join("."),
})
.to_string(),
attr.span,
));
}
}
}

let (attr, attr_name) = match native_type_attr {
Some(attr) => attr,
None => return, // early return if absent: it's optional
};

f(attr_name, attr, ctx);
}
}

0 comments on commit dd92cef

Please sign in to comment.