Skip to content

Commit

Permalink
Experimental planetScale mode (#1957)
Browse files Browse the repository at this point in the history
* Say no to barrel

The existing_database_tests suite is gone because it is redundant:
diffing without migrations is tested by a large array of tests, and
diffing in the presence of migrations is done in the respective command
test suites (diagnose_migration_history tests, etc.).

Since this was the last test suite using barrel in the migration engine,
we don't need that dependency anymore.

* Add planetScaleMode datasource arg and preview feature

TODO: allow the argument only when the preview feature is on

* Delete dead code

* Streamline migration core lib.rs

* Implement PlanetScale mode in the migration engine

This is a squashed version of two commits:

- Parse configuration and datamodel together in ME
- Finish ME-side implementation of planetscale mode

* Validate that preview feature is enabled for planetScaleMode = true

* Do not include mongo in migration engine tests

It is unnecessary extra compile time
  • Loading branch information
tomhoule committed May 27, 2021
1 parent 5e7f7c5 commit 9b2b3f8
Show file tree
Hide file tree
Showing 39 changed files with 453 additions and 610 deletions.
1 change: 0 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 6 additions & 6 deletions introspection-engine/introspection-engine-tests/src/test_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ use quaint::{prelude::SqlFamily, single::Quaint};
use sql_introspection_connector::SqlIntrospectionConnector;
use sql_migration_connector::SqlMigrationConnector;
use sql_schema_describer::SqlSchema;
use test_setup::{sqlite_test_url, TestApiArgs};
use std::fmt::Write;
use test_setup::{sqlite_test_url, DatasourceBlock, TestApiArgs};
use tracing::Instrument;

pub struct TestApi {
Expand Down Expand Up @@ -168,12 +169,12 @@ impl TestApi {
self.args.tags()
}

pub fn datasource_block(&self) -> String {
self.args.datasource_block(&self.connection_string)
pub fn datasource_block(&self) -> DatasourceBlock<'_> {
self.args.datasource_block(&self.connection_string, &[])
}

pub fn configuration(&self) -> Configuration {
datamodel::parse_configuration(&self.datasource_block())
datamodel::parse_configuration(&self.datasource_block().to_string())
.unwrap()
.subject
}
Expand All @@ -196,8 +197,7 @@ impl TestApi {
pub fn dm_with_sources(&self, schema: &str) -> String {
let mut out = String::with_capacity(320 + schema.len());

out.push_str(&self.datasource_block());
out.push_str(schema);
write!(out, "{}\n{}", self.datasource_block(), schema).unwrap();

out
}
Expand Down
7 changes: 7 additions & 0 deletions libs/datamodel/core/src/ast/top.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,13 @@ impl Top {
}
}

pub fn as_generator(&self) -> Option<&GeneratorConfig> {
match self {
Top::Generator(gen) => Some(gen),
_ => None,
}
}

pub fn as_type_alias(&self) -> Option<&Field> {
match self {
Top::Type(r#type) => Some(r#type),
Expand Down
10 changes: 6 additions & 4 deletions libs/datamodel/core/src/common/preview_features.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ use serde::{Serialize, Serializer};
use PreviewFeature::*;

macro_rules! features {
($( $variant:ident ),*) => {
#[derive(Debug, Copy, Clone, PartialEq)]
($( $variant:ident $(,)? ),*) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum PreviewFeature {
$(
$variant,
Expand Down Expand Up @@ -57,7 +57,8 @@ features!(
NApi,
SelectRelationCount,
OrderByAggregateGroup,
FilterJson
FilterJson,
PlanetScaleMode,
);

// Mapping of which active, deprecated and hidden
Expand All @@ -71,7 +72,8 @@ lazy_static! {
NApi,
SelectRelationCount,
OrderByAggregateGroup,
FilterJson
FilterJson,
PlanetScaleMode,
]).with_hidden(vec![
MongoDb
]).with_deprecated(vec![
Expand Down
12 changes: 10 additions & 2 deletions libs/datamodel/core/src/configuration/configuration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,26 @@ pub struct Configuration {
}

impl Configuration {
pub fn validate_that_one_datasource_is_provided(self) -> Result<Self, Diagnostics> {
pub fn validate_that_one_datasource_is_provided(&self) -> Result<(), Diagnostics> {
if self.datasources.is_empty() {
Err(DatamodelError::new_validation_error(
"You defined no datasource. You must define exactly one datasource.",
crate::ast::Span::new(0, 0),
)
.into())
} else {
Ok(self)
Ok(())
}
}

/// Returns true if PlanetScale mode is enabled
pub fn planet_scale_mode(&self) -> bool {
self.datasources
.first()
.map(|source| source.planet_scale_mode)
.unwrap_or(false)
}

pub fn preview_features(&self) -> impl Iterator<Item = &PreviewFeature> {
self.generators
.iter()
Expand Down
2 changes: 2 additions & 0 deletions libs/datamodel/core/src/configuration/datasource.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ pub struct Datasource {
pub active_connector: Box<dyn Connector>,
/// An optional user-defined shadow database URL.
pub(crate) shadow_database_url: Option<(StringFromEnvVar, Span)>,
/// Whether planetScaleMode = true was provided
pub planet_scale_mode: bool,
}

impl std::fmt::Debug for Datasource {
Expand Down
12 changes: 11 additions & 1 deletion libs/datamodel/core/src/diagnostics/validated.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crate::ast::reformat::MissingField;
use crate::diagnostics::DatamodelWarning;
use crate::{common::preview_features::PreviewFeature, diagnostics::DatamodelWarning};
use crate::{Configuration, Datamodel, Datasource, Generator};
use std::collections::HashSet;

#[derive(Debug, PartialEq, Clone)]
pub struct Validated<T> {
Expand All @@ -15,3 +16,12 @@ pub type ValidatedDatasources = Validated<Vec<Datasource>>;
pub type ValidatedGenerator = Validated<Generator>;
pub type ValidatedGenerators = Validated<Vec<Generator>>;
pub type ValidatedMissingFields = Validated<Vec<MissingField>>;

impl ValidatedGenerators {
pub(crate) fn preview_features(&self) -> HashSet<&PreviewFeature> {
self.subject
.iter()
.flat_map(|gen| gen.preview_features.iter())
.collect()
}
}
53 changes: 42 additions & 11 deletions libs/datamodel/core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,37 +90,55 @@ pub mod walkers;
pub use crate::dml::*;
pub use configuration::*;

use crate::ast::SchemaAst;
use crate::diagnostics::{ValidatedConfiguration, ValidatedDatamodel, ValidatedDatasources};
use crate::diagnostics::{Validated, ValidatedConfiguration, ValidatedDatamodel, ValidatedDatasources};
use crate::{ast::SchemaAst, common::preview_features::PreviewFeature};
use std::collections::HashSet;
use transform::{
ast_to_dml::{DatasourceLoader, GeneratorLoader, ValidationPipeline},
dml_to_ast::{DatasourceSerializer, GeneratorSerializer, LowerDmlToAst},
};

/// Parse and validate the whole schema
pub fn parse_schema(schema_str: &str) -> Result<Validated<(Configuration, Datamodel)>, diagnostics::Diagnostics> {
parse_datamodel_internal(schema_str, false)
}

/// Parses and validates a datamodel string, using core attributes only.
pub fn parse_datamodel(datamodel_string: &str) -> Result<ValidatedDatamodel, diagnostics::Diagnostics> {
parse_datamodel_internal(datamodel_string, false)
parse_datamodel_internal(datamodel_string, false).map(|validated| Validated {
subject: validated.subject.1,
warnings: validated.warnings,
})
}

pub fn parse_datamodel_for_formatter(datamodel_string: &str) -> Result<ValidatedDatamodel, diagnostics::Diagnostics> {
parse_datamodel_internal(datamodel_string, true)
parse_datamodel_internal(datamodel_string, true).map(|validated| Validated {
subject: validated.subject.1,
warnings: validated.warnings,
})
}

/// Parses and validates a datamodel string, using core attributes only.
/// In case of an error, a pretty, colorful string is returned.
pub fn parse_datamodel_or_pretty_error(datamodel_string: &str, file_name: &str) -> Result<ValidatedDatamodel, String> {
parse_datamodel_internal(datamodel_string, false).map_err(|err| err.to_pretty_string(file_name, datamodel_string))
parse_datamodel_internal(datamodel_string, false)
.map_err(|err| err.to_pretty_string(file_name, datamodel_string))
.map(|validated| Validated {
subject: validated.subject.1,
warnings: validated.warnings,
})
}

fn parse_datamodel_internal(
datamodel_string: &str,
transform: bool,
) -> Result<ValidatedDatamodel, diagnostics::Diagnostics> {
) -> Result<Validated<(Configuration, Datamodel)>, diagnostics::Diagnostics> {
let mut diagnostics = diagnostics::Diagnostics::new();
let ast = ast::parse_schema(datamodel_string)?;

let sources = load_sources(&ast)?;
let generators = GeneratorLoader::load_generators_from_ast(&ast)?;
let preview_features = generators.preview_features();
let sources = load_sources(&ast, &&preview_features)?;
let validator = ValidationPipeline::new(&sources.subject);

diagnostics.append_warning_vec(sources.warnings);
Expand All @@ -129,7 +147,16 @@ fn parse_datamodel_internal(
match validator.validate(&ast, transform) {
Ok(mut src) => {
src.warnings.append(&mut diagnostics.warnings);
Ok(src)
Ok(Validated {
subject: (
Configuration {
generators: generators.subject,
datasources: sources.subject,
},
src.subject,
),
warnings: src.warnings,
})
}
Err(mut err) => {
diagnostics.append(&mut err);
Expand All @@ -146,8 +173,9 @@ pub fn parse_schema_ast(datamodel_string: &str) -> Result<SchemaAst, diagnostics
pub fn parse_configuration(schema: &str) -> Result<ValidatedConfiguration, diagnostics::Diagnostics> {
let mut warnings = Vec::new();
let ast = ast::parse_schema(schema)?;
let mut validated_sources = load_sources(&ast)?;
let mut validated_generators = GeneratorLoader::load_generators_from_ast(&ast)?;
let preview_features = validated_generators.preview_features();
let mut validated_sources = load_sources(&ast, &preview_features)?;

warnings.append(&mut validated_generators.warnings);
warnings.append(&mut validated_sources.warnings);
Expand All @@ -161,9 +189,12 @@ pub fn parse_configuration(schema: &str) -> Result<ValidatedConfiguration, diagn
})
}

fn load_sources(schema_ast: &SchemaAst) -> Result<ValidatedDatasources, diagnostics::Diagnostics> {
fn load_sources(
schema_ast: &SchemaAst,
preview_features: &HashSet<&PreviewFeature>,
) -> Result<ValidatedDatasources, diagnostics::Diagnostics> {
let source_loader = DatasourceLoader::new();
source_loader.load_datasources_from_ast(&schema_ast)
source_loader.load_datasources_from_ast(&schema_ast, preview_features)
}

//
Expand Down
87 changes: 63 additions & 24 deletions libs/datamodel/core/src/transform/ast_to_dml/datasource_loader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,16 @@ use super::{
},
datasource_provider::DatasourceProvider,
};
use crate::diagnostics::{DatamodelError, Diagnostics, ValidatedDatasource, ValidatedDatasources};
use crate::{ast, Datasource};
use crate::{ast::Span, configuration::StringFromEnvVar};
use std::collections::HashMap;
use crate::{
ast::SourceConfig,
diagnostics::{DatamodelError, Diagnostics, ValidatedDatasource, ValidatedDatasources},
};
use crate::{ast::Span, common::preview_features::PreviewFeature, configuration::StringFromEnvVar};
use crate::{
ast::{self},
Datasource,
};
use std::collections::{HashMap, HashSet};

const PREVIEW_FEATURES_KEY: &str = "previewFeatures";
const SHADOW_DATABASE_URL_KEY: &str = "shadowDatabaseUrl";
Expand All @@ -31,12 +37,16 @@ impl DatasourceLoader {
/// Loads all datasources from the provided schema AST.
/// - `ignore_datasource_urls`: datasource URLs are not parsed. They are replaced with dummy values.
/// - `datasource_url_overrides`: datasource URLs are not parsed and overridden with the provided ones.
pub fn load_datasources_from_ast(&self, ast_schema: &ast::SchemaAst) -> Result<ValidatedDatasources, Diagnostics> {
pub fn load_datasources_from_ast(
&self,
ast_schema: &ast::SchemaAst,
preview_features: &HashSet<&PreviewFeature>,
) -> Result<ValidatedDatasources, Diagnostics> {
let mut sources = vec![];
let mut diagnostics = Diagnostics::new();

for src in &ast_schema.sources() {
match self.lift_datasource(&src) {
match self.lift_datasource(&src, preview_features) {
Ok(loaded_src) => {
diagnostics.append_warning_vec(loaded_src.warnings);
sources.push(loaded_src.subject)
Expand Down Expand Up @@ -82,7 +92,11 @@ impl DatasourceLoader {
}
}

fn lift_datasource(&self, ast_source: &ast::SourceConfig) -> Result<ValidatedDatasource, Diagnostics> {
fn lift_datasource(
&self,
ast_source: &ast::SourceConfig,
preview_features: &HashSet<&PreviewFeature>,
) -> Result<ValidatedDatasource, Diagnostics> {
let source_name = &ast_source.name.name;
let args: HashMap<_, _> = ast_source
.properties
Expand Down Expand Up @@ -135,30 +149,18 @@ impl DatasourceLoader {

let shadow_database_url: Option<(StringFromEnvVar, Span)> =
if let Some(shadow_database_url_arg) = shadow_database_url_arg.as_ref() {
let shadow_database_url = match shadow_database_url_arg.as_str_from_env() {
Ok(shadow_database_url) => {
Some(shadow_database_url).filter(|s| !s.as_literal().map(|lit| lit.is_empty()).unwrap_or(false))
}
match shadow_database_url_arg.as_str_from_env() {
Ok(shadow_database_url) => Some(shadow_database_url)
.filter(|s| !s.as_literal().map(|lit| lit.is_empty()).unwrap_or(false))
.map(|url| (url, shadow_database_url_arg.span())),

// We intentionally ignore the shadow database URL if it is defined in an env var that is missing.
Err(DatamodelError::EnvironmentFunctionalEvaluationError { .. }) => None,

Err(err) => {
return Err(diagnostics.merge_error(err));
}
};

// Temporarily disabled because of processing/hacks on URLs that make comparing the two URLs unreliable.
// if url.value == shadow_database_url.value {
// return Err(
// diagnostics.merge_error(DatamodelError::new_shadow_database_is_same_as_main_url_error(
// source_name.clone(),
// shadow_database_url_arg.span(),
// )),
// );
// }

shadow_database_url.map(|url| (url, shadow_database_url_arg.span()))
}
} else {
None
};
Expand Down Expand Up @@ -186,6 +188,7 @@ impl DatasourceLoader {
documentation,
active_connector: datasource_provider.connector(),
shadow_database_url,
planet_scale_mode: get_planet_scale_mode_arg(&args, preview_features, ast_source)?,
},
warnings: diagnostics.warnings,
})
Expand All @@ -209,6 +212,42 @@ fn get_builtin_datasource_providers() -> Vec<Box<dyn DatasourceProvider>> {
]
}

const PLANET_SCALE_PREVIEW_FEATURE_ERR: &str = r#"
The `planetScaleMode` option can only be set if the preview feature is enabled in a generator block.
Example:
generator client {
provider = "prisma-client-js"
previewFeatures = ["planetScaleMode"]
}
"#;

fn get_planet_scale_mode_arg(
args: &HashMap<&str, ValueValidator>,
preview_features: &HashSet<&PreviewFeature>,
source: &SourceConfig,
) -> Result<bool, DatamodelError> {
let arg = args.get("planetScaleMode");

match arg {
None => Ok(false),
Some(value) => {
let mode_enabled = value.as_bool()?;

if mode_enabled && !preview_features.contains(&PreviewFeature::PlanetScaleMode) {
return Err(DatamodelError::new_source_validation_error(
PLANET_SCALE_PREVIEW_FEATURE_ERR,
&source.name.name,
value.span(),
));
}

Ok(mode_enabled)
}
}
}

fn preview_features_guardrail(args: &HashMap<&str, ValueValidator>) -> Result<(), DatamodelError> {
args.get(PREVIEW_FEATURES_KEY)
.map(|val| -> Result<_, _> { Ok((val.as_array().to_str_vec()?, val.span())) })
Expand Down

0 comments on commit 9b2b3f8

Please sign in to comment.