diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/select_one_singular.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/select_one_singular.rs index 3cb40f337ce3..5df3d4a27763 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/select_one_singular.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/select_one_singular.rs @@ -3,6 +3,10 @@ use query_engine_tests::*; #[test_suite(schema(schema))] mod singular_batch { use indoc::indoc; + use query_engine_tests::{ + query_core::{BatchDocument, QueryDocument}, + run_query, Runner, TestResult, + }; fn schema() -> String { let schema = indoc! { @@ -253,6 +257,110 @@ mod singular_batch { Ok(()) } + // Regression test for https://github.com/prisma/prisma/issues/16548 + #[connector_test(schema(schemas::generic))] + async fn repro_16548(runner: Runner) -> TestResult<()> { + run_query!(&runner, r#"mutation { createOneTestModel(data: { id: 1 }) { id } }"#); + run_query!(&runner, r#"mutation { createOneTestModel(data: { id: 2 }) { id } }"#); + + // Working case + let (res, compact_doc) = compact_batch( + &runner, + vec![ + r#"{ findUniqueTestModelOrThrow(where: { id: 1 }) { id } }"#.to_string(), + r#"{ findUniqueTestModelOrThrow(where: { id: 2 }) { id } }"#.to_string(), + ], + ) + .await?; + insta::assert_snapshot!( + res.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueTestModelOrThrow":{"id":1}}},{"data":{"findUniqueTestModelOrThrow":{"id":2}}}]}"### + ); + assert_eq!(compact_doc.is_compact(), false); + + // Failing case + let (res, compact_doc) = compact_batch( + &runner, + vec![ + r#"{ findUniqueTestModelOrThrow(where: { id: 2 }) { id } }"#.to_string(), + r#"{ findUniqueTestModelOrThrow(where: { id: 3 }) { id } }"#.to_string(), + ], + ) + .await?; + insta::assert_snapshot!( + res.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueTestModelOrThrow":{"id":2}}},{"errors":[{"error":"Error occurred during query execution:\nConnectorError(ConnectorError { user_facing_error: Some(KnownError { message: \"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.\", meta: Object {\"cause\": String(\"Expected a record, found none.\")}, error_code: \"P2025\" }), kind: RecordDoesNotExist })","user_facing_error":{"is_panic":false,"message":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.","meta":{"cause":"Expected a record, found none."},"error_code":"P2025"}}]}]}"### + ); + assert_eq!(compact_doc.is_compact(), false); + + // Mix of findUnique & findUniqueOrThrow + let (res, compact_doc) = compact_batch( + &runner, + vec![ + r#"{ findUniqueTestModel(where: { id: 3 }) { id } }"#.to_string(), + r#"{ findUniqueTestModelOrThrow(where: { id: 2 }) { id } }"#.to_string(), + ], + ) + .await?; + insta::assert_snapshot!( + res.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueTestModel":null}},{"data":{"findUniqueTestModelOrThrow":{"id":2}}}]}"### + ); + assert_eq!(compact_doc.is_compact(), false); + + // Mix of findUnique & findUniqueOrThrow + let (res, compact_doc) = compact_batch( + &runner, + vec![ + r#"{ findUniqueTestModel(where: { id: 2 }) { id } }"#.to_string(), + r#"{ findUniqueTestModelOrThrow(where: { id: 4 }) { id } }"#.to_string(), + ], + ) + .await?; + insta::assert_snapshot!( + res.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueTestModel":{"id":2}}},{"errors":[{"error":"Error occurred during query execution:\nConnectorError(ConnectorError { user_facing_error: Some(KnownError { message: \"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.\", meta: Object {\"cause\": String(\"Expected a record, found none.\")}, error_code: \"P2025\" }), kind: RecordDoesNotExist })","user_facing_error":{"is_panic":false,"message":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.","meta":{"cause":"Expected a record, found none."},"error_code":"P2025"}}]}]}"### + ); + assert_eq!(compact_doc.is_compact(), false); + + // Mix of findUnique & findUniqueOrThrow + let (res, compact_doc) = compact_batch( + &runner, + vec![ + r#"{ findUniqueTestModelOrThrow(where: { id: 2 }) { id } }"#.to_string(), + r#"{ findUniqueTestModel(where: { id: 3 }) { id } }"#.to_string(), + ], + ) + .await?; + insta::assert_snapshot!( + res.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueTestModelOrThrow":{"id":2}}},{"data":{"findUniqueTestModel":null}}]}"### + ); + assert_eq!(compact_doc.is_compact(), false); + + Ok(()) + } + + async fn compact_batch(runner: &Runner, queries: Vec) -> TestResult<(QueryResult, BatchDocument)> { + // Ensure batched queries are valid + let res = runner.batch(queries.clone(), false, None).await?; + res.assert_success(); + + let doc = GraphQlBody::Multi(MultiQuery::new( + queries.into_iter().map(Into::into).collect(), + false, + None, + )) + .into_doc() + .unwrap(); + let batch = match doc { + QueryDocument::Multi(batch) => batch.compact(runner.query_schema()), + _ => unreachable!(), + }; + + Ok((res, batch.compact(runner.query_schema()))) + } + async fn create_test_data(runner: &Runner) -> TestResult<()> { runner .query( diff --git a/query-engine/core/src/query_document/mod.rs b/query-engine/core/src/query_document/mod.rs index 00047653dbf6..b91ee1cbcaf1 100644 --- a/query-engine/core/src/query_document/mod.rs +++ b/query-engine/core/src/query_document/mod.rs @@ -72,7 +72,7 @@ impl BatchDocument { /// - non scalar filters (ie: relation filters, boolean operators...) /// - any scalar filters that is not `EQUALS` fn invalid_compact_filter(op: &Operation, schema: &QuerySchemaRef) -> bool { - if !op.is_find_unique() { + if !op.is_find_unique(schema) { return true; } @@ -95,10 +95,11 @@ impl BatchDocument { }) } + /// Checks whether a BatchDocument can be compacted. fn can_compact(&self, schema: &QuerySchemaRef) -> bool { match self { Self::Multi(operations, _) => match operations.split_first() { - Some((first, rest)) if first.is_find_unique() => { + Some((first, rest)) if first.is_find_unique(schema) => { // If any of the operation has an "invalid" compact filter (see documentation of `invalid_compact_filter`), // we do not compact the queries. let has_invalid_compact_filter = @@ -109,7 +110,7 @@ impl BatchDocument { } rest.iter().all(|op| { - op.is_find_unique() + op.is_find_unique(schema) && first.name() == op.name() && first.nested_selections().len() == op.nested_selections().len() && first diff --git a/query-engine/core/src/query_document/operation.rs b/query-engine/core/src/query_document/operation.rs index 85da860d9b97..8aa54dccc08b 100644 --- a/query-engine/core/src/query_document/operation.rs +++ b/query-engine/core/src/query_document/operation.rs @@ -1,4 +1,5 @@ use super::Selection; +use schema::QuerySchemaRef; #[derive(Debug, Clone)] pub enum Operation { @@ -7,11 +8,11 @@ pub enum Operation { } impl Operation { - pub fn is_find_unique(&self) -> bool { - match self { - Self::Read(selection) => selection.is_find_unique(), - _ => false, - } + pub fn is_find_unique(&self, schema: &QuerySchemaRef) -> bool { + schema + .find_query_field(self.name()) + .map(|field| field.is_find_unique()) + .unwrap_or(false) } pub fn into_read(self) -> Option { diff --git a/query-engine/schema/src/output_types.rs b/query-engine/schema/src/output_types.rs index f9b3b30ddad6..bec07755dbe4 100644 --- a/query-engine/schema/src/output_types.rs +++ b/query-engine/schema/src/output_types.rs @@ -191,4 +191,12 @@ impl OutputField { pub fn model(&self) -> Option<&ModelRef> { self.query_info.as_ref().and_then(|info| info.model.as_ref()) } + + pub fn is_find_unique(&self) -> bool { + matches!(self.query_tag(), Some(&QueryTag::FindUnique)) + } + + fn query_tag(&self) -> Option<&QueryTag> { + self.query_info.as_ref().map(|info| &info.tag) + } }