diff --git a/.github/workflows/on-push-to-main.yml b/.github/workflows/on-push-to-main.yml index 5c095a39098e..c2967ab9a8a7 100644 --- a/.github/workflows/on-push-to-main.yml +++ b/.github/workflows/on-push-to-main.yml @@ -17,7 +17,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: cachix/install-nix-action@v25 + - uses: cachix/install-nix-action@v26 with: # we need internet access for the moment extra_nix_config: | diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index 9560ebeef3ba..4b5db4c04b30 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -57,7 +57,7 @@ jobs: run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - name: Slack Notification on Failure if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 + uses: rtCamp/action-slack-notify@v2.3.0 env: SLACK_TITLE: 'Building and publishing @prisma/prisma-schema-wasm failed :x:' SLACK_COLOR: '#FF0000' diff --git a/.github/workflows/publish-query-engine-wasm.yml b/.github/workflows/publish-query-engine-wasm.yml index 41d5d8611b15..bba9dc1eb658 100644 --- a/.github/workflows/publish-query-engine-wasm.yml +++ b/.github/workflows/publish-query-engine-wasm.yml @@ -57,7 +57,7 @@ jobs: run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - name: Slack Notification on Failure if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 + uses: rtCamp/action-slack-notify@v2.3.0 env: SLACK_TITLE: "Building and publishing @prisma/query-engine-wasm failed :x:" SLACK_COLOR: "#FF0000" diff --git a/.github/workflows/test-query-engine-driver-adapters.yml b/.github/workflows/test-query-engine-driver-adapters.yml index 6362525c053b..ce600021b413 100644 --- a/.github/workflows/test-query-engine-driver-adapters.yml +++ b/.github/workflows/test-query-engine-driver-adapters.yml @@ -41,6 +41,8 @@ jobs: setup_task: "dev-neon-wasm" - name: "libsql (wasm)" setup_task: "dev-libsql-wasm" + - name: "d1 (wasm)" + setup_task: "dev-d1" node_version: ["18"] partition: ["1/4", "2/4", "3/4", "4/4"] env: @@ -68,7 +70,7 @@ jobs: node-version: ${{ matrix.node_version }} - name: "Setup pnpm" - uses: pnpm/action-setup@v2 + uses: pnpm/action-setup@v3.0.0 with: version: 8 diff --git a/.github/workflows/wasm-benchmarks.yml b/.github/workflows/wasm-benchmarks.yml index 48aff148a698..0428eaa0518a 100644 --- a/.github/workflows/wasm-benchmarks.yml +++ b/.github/workflows/wasm-benchmarks.yml @@ -30,7 +30,7 @@ jobs: uses: actions/setup-node@v4 - name: "Setup pnpm" - uses: pnpm/action-setup@v2 + uses: pnpm/action-setup@v3.0.0 with: version: 8 diff --git a/.github/workflows/wasm-size.yml b/.github/workflows/wasm-size.yml index e03195a5651c..6adab5063909 100644 --- a/.github/workflows/wasm-size.yml +++ b/.github/workflows/wasm-size.yml @@ -1,4 +1,4 @@ -name: "QE: WASM size" +name: "QE: WASM Query Engine size" on: pull_request: paths-ignore: @@ -114,7 +114,7 @@ jobs: issue-number: ${{ github.event.pull_request.number }} body: | - ### WASM Size + ### WASM Query Engine file Size |Engine | This PR | Base branch | Diff |------------------|----------------------------------------------|--------------------------------------------------|----------------------------------------------- diff --git a/.gitignore b/.gitignore index d401ff68f180..0c2f9ea43181 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ prisma-gpg-private.asc .test_config *.pending-snap .pending.md +dev.db *.class *.log diff --git a/Cargo.lock b/Cargo.lock index b58037af9285..d838995c6f95 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3538,8 +3538,11 @@ dependencies = [ "psl", "quaint", "schema-core", + "serde", + "serde_json", "sql-schema-connector", "test-setup", + "tokio", "url", ] @@ -3810,6 +3813,7 @@ dependencies = [ "futures", "indoc 2.0.3", "insta", + "itertools 0.12.0", "once_cell", "paste", "prisma-value", diff --git a/Makefile b/Makefile index e4764c48b9a5..8c08ecaaa173 100644 --- a/Makefile +++ b/Makefile @@ -149,6 +149,12 @@ dev-libsql-wasm: build-qe-wasm build-driver-adapters-kit test-libsql-wasm: dev-libsql-wasm test-qe-st test-driver-adapter-libsql-wasm: test-libsql-wasm +dev-d1: build-qe-wasm build-driver-adapters-kit + cp $(CONFIG_PATH)/cloudflare-d1 $(CONFIG_FILE) + +test-d1: dev-d1 test-qe-st +test-driver-adapter-d1: test-d1 + start-postgres9: docker compose -f docker-compose.yml up --wait -d --remove-orphans postgres9 diff --git a/prisma-fmt/src/code_actions.rs b/prisma-fmt/src/code_actions.rs index 4f072f60b414..371e791e49cd 100644 --- a/prisma-fmt/src/code_actions.rs +++ b/prisma-fmt/src/code_actions.rs @@ -31,8 +31,13 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec let datasource = config.datasources.first(); - for source in validated_schema.db.ast().sources() { - relation_mode::edit_referential_integrity(&mut actions, ¶ms, validated_schema.db.source(), source) + for source in validated_schema.db.ast_assert_single().sources() { + relation_mode::edit_referential_integrity( + &mut actions, + ¶ms, + validated_schema.db.source_assert_single(), + source, + ) } // models AND views @@ -45,21 +50,27 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec multi_schema::add_schema_block_attribute_model( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), config, model, ); - multi_schema::add_schema_to_schemas(&mut actions, ¶ms, validated_schema.db.source(), config, model); + multi_schema::add_schema_to_schemas( + &mut actions, + ¶ms, + validated_schema.db.source_assert_single(), + config, + model, + ); } if matches!(datasource, Some(ds) if ds.active_provider == "mongodb") { - mongodb::add_at_map_for_id(&mut actions, ¶ms, validated_schema.db.source(), model); + mongodb::add_at_map_for_id(&mut actions, ¶ms, validated_schema.db.source_assert_single(), model); mongodb::add_native_for_auto_id( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), model, datasource.unwrap(), ); @@ -71,7 +82,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec multi_schema::add_schema_block_attribute_enum( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), config, enumerator, ) @@ -88,7 +99,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec relations::add_referenced_side_unique( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), complete_relation, ); @@ -96,7 +107,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec relations::add_referencing_side_unique( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), complete_relation, ); } @@ -105,7 +116,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec relations::add_index_for_relation_fields( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), complete_relation.referencing_field(), ); } @@ -114,7 +125,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec relation_mode::replace_set_default_mysql( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), complete_relation, config, ) diff --git a/prisma-fmt/src/code_actions/multi_schema.rs b/prisma-fmt/src/code_actions/multi_schema.rs index 0e47a008a910..aa5aaad05175 100644 --- a/prisma-fmt/src/code_actions/multi_schema.rs +++ b/prisma-fmt/src/code_actions/multi_schema.rs @@ -142,13 +142,12 @@ pub(super) fn add_schema_to_schemas( formatted_attribute, true, // todo: update spans so that we can just append to the end of the _inside_ of the array. Instead of needing to re-append the `]` or taking the span end -1 - Span::new(span.start, span.end - 1), + Span::new(span.start, span.end - 1, psl::parser_database::FileId::ZERO), params, ) } None => { - let has_properties = datasource.provider_defined() - || datasource.url_defined() + let has_properties = datasource.provider_defined() | datasource.url_defined() || datasource.direct_url_defined() || datasource.shadow_url_defined() || datasource.relation_mode_defined() diff --git a/prisma-fmt/src/get_config.rs b/prisma-fmt/src/get_config.rs index d6de194e1e86..97f714dc456c 100644 --- a/prisma-fmt/src/get_config.rs +++ b/prisma-fmt/src/get_config.rs @@ -1,14 +1,14 @@ -use psl::Diagnostics; +use psl::{Diagnostics, ValidatedSchema}; use serde::Deserialize; use serde_json::json; use std::collections::HashMap; -use crate::validate::SCHEMA_PARSER_ERROR_CODE; +use crate::{schema_file_input::SchemaFileInput, validate::SCHEMA_PARSER_ERROR_CODE}; #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct GetConfigParams { - prisma_schema: String, + prisma_schema: SchemaFileInput, #[serde(default)] ignore_env_var_errors: bool, #[serde(default)] @@ -43,29 +43,38 @@ pub(crate) fn get_config(params: &str) -> Result { } fn get_config_impl(params: GetConfigParams) -> Result { - let wrap_get_config_err = |errors: Diagnostics| -> GetConfigError { - use std::fmt::Write as _; - - let mut full_error = errors.to_pretty_string("schema.prisma", ¶ms.prisma_schema); - write!(full_error, "\nValidation Error Count: {}", errors.errors().len()).unwrap(); - - GetConfigError { - // this mirrors user_facing_errors::common::SchemaParserError - error_code: Some(SCHEMA_PARSER_ERROR_CODE), - message: full_error, - } - }; - - let mut config = psl::parse_configuration(¶ms.prisma_schema).map_err(wrap_get_config_err)?; + let mut schema = psl::validate_multi_file(params.prisma_schema.into()); + if schema.diagnostics.has_errors() { + return Err(create_get_config_error(&schema, &schema.diagnostics)); + } if !params.ignore_env_var_errors { let overrides: Vec<(_, _)> = params.datasource_overrides.into_iter().collect(); - config + schema + .configuration .resolve_datasource_urls_prisma_fmt(&overrides, |key| params.env.get(key).map(String::from)) - .map_err(wrap_get_config_err)?; + .map_err(|diagnostics| create_get_config_error(&schema, &diagnostics))?; } - Ok(psl::get_config(&config)) + Ok(psl::get_config(&schema.configuration)) +} + +fn create_get_config_error(schema: &ValidatedSchema, diagnostics: &Diagnostics) -> GetConfigError { + use std::fmt::Write as _; + + let mut rendered_diagnostics = schema.render_diagnostics(diagnostics); + write!( + rendered_diagnostics, + "\nValidation Error Count: {}", + diagnostics.errors().len() + ) + .unwrap(); + + GetConfigError { + // this mirrors user_facing_errors::common::SchemaParserError + error_code: Some(SCHEMA_PARSER_ERROR_CODE), + message: rendered_diagnostics, + } } #[cfg(test)] diff --git a/prisma-fmt/src/get_dmmf.rs b/prisma-fmt/src/get_dmmf.rs index 02eec126d17d..151cb7691ee5 100644 --- a/prisma-fmt/src/get_dmmf.rs +++ b/prisma-fmt/src/get_dmmf.rs @@ -1,11 +1,11 @@ use serde::Deserialize; -use crate::validate; +use crate::{schema_file_input::SchemaFileInput, validate}; #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct GetDmmfParams { - prisma_schema: String, + prisma_schema: SchemaFileInput, #[serde(default)] no_color: bool, } @@ -18,7 +18,7 @@ pub(crate) fn get_dmmf(params: &str) -> Result { } }; - validate::run(¶ms.prisma_schema, params.no_color).map(|_| dmmf::dmmf_json_from_schema(¶ms.prisma_schema)) + validate::run(params.prisma_schema, params.no_color).map(dmmf::dmmf_json_from_validated_schema) } #[cfg(test)] @@ -90,6 +90,47 @@ mod tests { expected.assert_eq(&response); } + #[test] + fn get_dmmf_multiple_files() { + let schema = vec![ + ( + "a.prisma", + r#" + datasource thedb { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#, + ), + ( + "b.prisma", + r#" + model B { + id String @id + a A? + } + "#, + ), + ]; + + let request = json!({ + "prismaSchema": schema, + }); + + let expected = expect![[ + r#"{"datamodel":{"enums":[],"models":[{"name":"A","dbName":null,"fields":[{"name":"id","kind":"scalar","isList":false,"isRequired":true,"isUnique":false,"isId":true,"isReadOnly":false,"hasDefaultValue":false,"type":"String","isGenerated":false,"isUpdatedAt":false},{"name":"b_id","kind":"scalar","isList":false,"isRequired":true,"isUnique":true,"isId":false,"isReadOnly":true,"hasDefaultValue":false,"type":"String","isGenerated":false,"isUpdatedAt":false},{"name":"b","kind":"object","isList":false,"isRequired":true,"isUnique":false,"isId":false,"isReadOnly":false,"hasDefaultValue":false,"type":"B","relationName":"AToB","relationFromFields":["b_id"],"relationToFields":["id"],"isGenerated":false,"isUpdatedAt":false}],"primaryKey":null,"uniqueFields":[],"uniqueIndexes":[],"isGenerated":false},{"name":"B","dbName":null,"fields":[{"name":"id","kind":"scalar","isList":false,"isRequired":true,"isUnique":false,"isId":true,"isReadOnly":false,"hasDefaultValue":false,"type":"String","isGenerated":false,"isUpdatedAt":false},{"name":"a","kind":"object","isList":false,"isRequired":false,"isUnique":false,"isId":false,"isReadOnly":false,"hasDefaultValue":false,"type":"A","relationName":"AToB","relationFromFields":[],"relationToFields":[],"isGenerated":false,"isUpdatedAt":false}],"primaryKey":null,"uniqueFields":[],"uniqueIndexes":[],"isGenerated":false}],"types":[]},"schema":{"inputObjectTypes":{"prisma":[{"name":"AWhereInput","meta":{"source":"A"},"constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]},{"name":"b","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BRelationFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AOrderByWithRelationInput","constraints":{"maxNumFields":1,"minNumFields":0},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AWhereUniqueInput","meta":{"source":"A"},"constraints":{"maxNumFields":null,"minNumFields":1,"fields":["id","b_id"]},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"b","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BRelationFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AOrderByWithAggregationInput","constraints":{"maxNumFields":1,"minNumFields":0},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"_count","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACountOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_max","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AMaxOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_min","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AMinOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AScalarWhereWithAggregatesInput","meta":{"source":"A"},"constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]}]},{"name":"BWhereInput","meta":{"source":"B"},"constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]},{"name":"a","isRequired":false,"isNullable":true,"inputTypes":[{"type":"ANullableRelationFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"Null","location":"scalar","isList":false}]}]},{"name":"BOrderByWithRelationInput","constraints":{"maxNumFields":1,"minNumFields":0},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BWhereUniqueInput","meta":{"source":"B"},"constraints":{"maxNumFields":null,"minNumFields":1,"fields":["id"]},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"a","isRequired":false,"isNullable":true,"inputTypes":[{"type":"ANullableRelationFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"Null","location":"scalar","isList":false}]}]},{"name":"BOrderByWithAggregationInput","constraints":{"maxNumFields":1,"minNumFields":0},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"_count","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCountOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_max","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BMaxOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_min","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BMinOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BScalarWhereWithAggregatesInput","meta":{"source":"B"},"constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]}]},{"name":"ACreateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"b","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateNestedOneWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedCreateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"AUpdateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"b","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BUpdateOneRequiredWithoutANestedInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedUpdateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ACreateManyInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"AUpdateManyMutationInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedUpdateManyInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BCreateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateNestedOneWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUncheckedCreateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUncheckedCreateNestedOneWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUpdateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpdateOneWithoutBNestedInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUncheckedUpdateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUncheckedUpdateOneWithoutBNestedInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BCreateManyInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"BUpdateManyMutationInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUncheckedUpdateManyInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"StringFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"contains","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"startsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"endsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"mode","isRequired":false,"isNullable":false,"inputTypes":[{"type":"QueryMode","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BRelationFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"is","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"isNot","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ACountOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"AMaxOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"AMinOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"StringWithAggregatesFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"contains","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"startsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"endsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"mode","isRequired":false,"isNullable":false,"inputTypes":[{"type":"QueryMode","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"NestedStringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_count","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedIntFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_min","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_max","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ANullableRelationFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"is","isRequired":false,"isNullable":true,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"Null","location":"scalar","isList":false}]},{"name":"isNot","isRequired":false,"isNullable":true,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"Null","location":"scalar","isList":false}]}]},{"name":"BCountOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"BMaxOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"BMinOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"BCreateNestedOneWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCreateOrConnectWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"StringFieldUpdateOperationsInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"set","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"BUpdateOneRequiredWithoutANestedInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCreateOrConnectWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"upsert","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BUpsertWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BUpdateToOneWithWhereWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ACreateNestedOneWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateOrConnectWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedCreateNestedOneWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateOrConnectWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUpdateOneWithoutBNestedInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateOrConnectWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"upsert","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpsertWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"disconnect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"delete","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpdateToOneWithWhereWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedUpdateOneWithoutBNestedInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateOrConnectWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"upsert","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpsertWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"disconnect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"delete","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpdateToOneWithWhereWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"NestedStringFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"contains","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"startsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"endsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"NestedStringWithAggregatesFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"contains","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"startsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"endsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"NestedStringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_count","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedIntFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_min","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_max","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"NestedIntFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":true},{"type":"ListIntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":true},{"type":"ListIntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"NestedIntFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BCreateWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"BUncheckedCreateWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"BCreateOrConnectWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUpsertWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"update","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUpdateToOneWithWhereWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUpdateWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUncheckedUpdateWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ACreateWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"AUncheckedCreateWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"ACreateOrConnectWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUpsertWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"update","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUpdateToOneWithWhereWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUpdateWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedUpdateWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]}]},"outputObjectTypes":{"prisma":[{"name":"Query","fields":[{"name":"findFirstA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findFirstAOrThrow","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findManyA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":false,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":true}},{"name":"aggregateA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AggregateA","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"groupByA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithAggregationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithAggregationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"by","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true},{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"having","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AGroupByOutputType","namespace":"prisma","location":"outputObjectTypes","isList":true}},{"name":"findUniqueA","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findUniqueAOrThrow","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findFirstB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findFirstBOrThrow","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findManyB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":false,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":true}},{"name":"aggregateB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AggregateB","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"groupByB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithAggregationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithAggregationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"by","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true},{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"having","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"BGroupByOutputType","namespace":"prisma","location":"outputObjectTypes","isList":true}},{"name":"findUniqueB","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findUniqueBOrThrow","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}}]},{"name":"Mutation","fields":[{"name":"createOneA","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"upsertOneA","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"createManyA","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"ACreateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"skipDuplicates","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"deleteOneA","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"updateOneA","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"updateManyA","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateManyMutationInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"deleteManyA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"createOneB","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"upsertOneB","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"createManyB","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BCreateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"skipDuplicates","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"deleteOneB","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"updateOneB","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"updateManyB","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateManyMutationInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"deleteManyB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"executeRaw","args":[{"name":"query","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"parameters","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Json","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"Json","location":"scalar","isList":false}},{"name":"queryRaw","args":[{"name":"query","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"parameters","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Json","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"Json","location":"scalar","isList":false}}]},{"name":"AggregateA","fields":[{"name":"_count","args":[],"isNullable":true,"outputType":{"type":"ACountAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_min","args":[],"isNullable":true,"outputType":{"type":"AMinAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_max","args":[],"isNullable":true,"outputType":{"type":"AMaxAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}}]},{"name":"AGroupByOutputType","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"_count","args":[],"isNullable":true,"outputType":{"type":"ACountAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_min","args":[],"isNullable":true,"outputType":{"type":"AMinAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_max","args":[],"isNullable":true,"outputType":{"type":"AMaxAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}}]},{"name":"AggregateB","fields":[{"name":"_count","args":[],"isNullable":true,"outputType":{"type":"BCountAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_min","args":[],"isNullable":true,"outputType":{"type":"BMinAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_max","args":[],"isNullable":true,"outputType":{"type":"BMaxAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}}]},{"name":"BGroupByOutputType","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"_count","args":[],"isNullable":true,"outputType":{"type":"BCountAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_min","args":[],"isNullable":true,"outputType":{"type":"BMinAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_max","args":[],"isNullable":true,"outputType":{"type":"BMaxAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}}]},{"name":"AffectedRowsOutput","fields":[{"name":"count","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}}]},{"name":"ACountAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}},{"name":"_all","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}}]},{"name":"AMinAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}}]},{"name":"AMaxAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}}]},{"name":"BCountAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}},{"name":"_all","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}}]},{"name":"BMinAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}}]},{"name":"BMaxAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}}]}],"model":[{"name":"A","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b","args":[],"isNullable":false,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}}]},{"name":"B","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"a","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}}]}]},"enumTypes":{"prisma":[{"name":"TransactionIsolationLevel","values":["ReadUncommitted","ReadCommitted","RepeatableRead","Serializable"]},{"name":"AScalarFieldEnum","values":["id","b_id"]},{"name":"BScalarFieldEnum","values":["id"]},{"name":"SortOrder","values":["asc","desc"]},{"name":"QueryMode","values":["default","insensitive"]}]},"fieldRefTypes":{"prisma":[{"name":"StringFieldRefInput","allowTypes":[{"type":"String","location":"scalar","isList":false}],"fields":[{"name":"_ref","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"_container","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"ListStringFieldRefInput","allowTypes":[{"type":"String","location":"scalar","isList":true}],"fields":[{"name":"_ref","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"_container","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"IntFieldRefInput","allowTypes":[{"type":"Int","location":"scalar","isList":false}],"fields":[{"name":"_ref","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"_container","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"ListIntFieldRefInput","allowTypes":[{"type":"Int","location":"scalar","isList":true}],"fields":[{"name":"_ref","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"_container","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]}]}},"mappings":{"modelOperations":[{"model":"A","aggregate":"aggregateA","createMany":"createManyA","createOne":"createOneA","deleteMany":"deleteManyA","deleteOne":"deleteOneA","findFirst":"findFirstA","findFirstOrThrow":"findFirstAOrThrow","findMany":"findManyA","findUnique":"findUniqueA","findUniqueOrThrow":"findUniqueAOrThrow","groupBy":"groupByA","updateMany":"updateManyA","updateOne":"updateOneA","upsertOne":"upsertOneA"},{"model":"B","aggregate":"aggregateB","createMany":"createManyB","createOne":"createOneB","deleteMany":"deleteManyB","deleteOne":"deleteOneB","findFirst":"findFirstB","findFirstOrThrow":"findFirstBOrThrow","findMany":"findManyB","findUnique":"findUniqueB","findUniqueOrThrow":"findUniqueBOrThrow","groupBy":"groupByB","updateMany":"updateManyB","updateOne":"updateOneB","upsertOne":"upsertOneB"}],"otherOperations":{"read":[],"write":["executeRaw","queryRaw"]}}}"# + ]]; + + let response = get_dmmf(&request.to_string()).unwrap(); + expected.assert_eq(&response); + } + #[test] fn get_dmmf_using_both_relation_mode_and_referential_integrity() { let schema = r#" diff --git a/prisma-fmt/src/lib.rs b/prisma-fmt/src/lib.rs index 0449faf52665..c1449b3b2053 100644 --- a/prisma-fmt/src/lib.rs +++ b/prisma-fmt/src/lib.rs @@ -3,8 +3,10 @@ mod code_actions; mod get_config; mod get_dmmf; mod lint; +mod merge_schemas; mod native; mod preview; +mod schema_file_input; mod text_document_completion; mod validate; @@ -89,6 +91,14 @@ pub fn validate(validate_params: String) -> Result<(), String> { validate::validate(&validate_params) } +/// Given a list of Prisma schema files (and their locations), returns the merged schema. +/// This is useful for `@prisma/client` generation, where the client needs a single - potentially large - schema, +/// while still allowing the user to split their schema copies into multiple files. +/// Internally, it uses `[validate]`. +pub fn merge_schemas(params: String) -> Result { + merge_schemas::merge_schemas(¶ms) +} + pub fn native_types(schema: String) -> String { native::run(&schema) } @@ -225,7 +235,7 @@ pub(crate) fn range_to_span(range: Range, document: &str) -> ast::Span { let start = position_to_offset(&range.start, document).unwrap(); let end = position_to_offset(&range.end, document).unwrap(); - ast::Span::new(start, end) + ast::Span::new(start, end, psl::parser_database::FileId::ZERO) } /// Gives the LSP position right after the given span. diff --git a/prisma-fmt/src/merge_schemas.rs b/prisma-fmt/src/merge_schemas.rs new file mode 100644 index 000000000000..bcb37922b68d --- /dev/null +++ b/prisma-fmt/src/merge_schemas.rs @@ -0,0 +1,127 @@ +use psl::reformat_validated_schema_into_single; +use serde::Deserialize; + +use crate::schema_file_input::SchemaFileInput; + +#[derive(Debug, Deserialize)] +pub struct MergeSchemasParams { + schema: SchemaFileInput, +} + +pub(crate) fn merge_schemas(params: &str) -> Result { + let params: MergeSchemasParams = match serde_json::from_str(params) { + Ok(params) => params, + Err(serde_err) => { + panic!("Failed to deserialize MergeSchemasParams: {serde_err}"); + } + }; + + let validated_schema = crate::validate::run(params.schema, false)?; + + let indent_width = 2usize; + let merged_schema = reformat_validated_schema_into_single(validated_schema, indent_width).unwrap(); + + Ok(merged_schema) +} + +#[cfg(test)] +mod tests { + use super::*; + use expect_test::expect; + use serde_json::json; + + #[test] + fn merge_two_valid_schemas_succeeds() { + let schema = vec![ + ( + "b.prisma", + r#" + model B { + id String @id + a A? + } + "#, + ), + ( + "a.prisma", + r#" + datasource db { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#, + ), + ]; + + let request = json!({ + "schema": schema, + }); + + let expected = expect![[r#" + model B { + id String @id + a A? + } + + datasource db { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#]]; + + let response = merge_schemas(&request.to_string()).unwrap(); + expected.assert_eq(&response); + } + + #[test] + fn merge_two_invalid_schemas_panics() { + let schema = vec![ + ( + "b.prisma", + r#" + model B { + id String @id + a A? + } + "#, + ), + ( + "a.prisma", + r#" + datasource db { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + } + "#, + ), + ]; + + let request = json!({ + "schema": schema, + }); + + let expected = expect![[ + r#"{"error_code":"P1012","message":"\u001b[1;91merror\u001b[0m: \u001b[1mError validating field `a` in model `B`: The relation field `a` on model `B` is missing an opposite relation field on the model `A`. Either run `prisma format` or add it manually.\u001b[0m\n \u001b[1;94m-->\u001b[0m \u001b[4mb.prisma:4\u001b[0m\n\u001b[1;94m | \u001b[0m\n\u001b[1;94m 3 | \u001b[0m id String @id\n\u001b[1;94m 4 | \u001b[0m \u001b[1;91ma A?\u001b[0m\n\u001b[1;94m 5 | \u001b[0m }\n\u001b[1;94m | \u001b[0m\n\nValidation Error Count: 1"}"# + ]]; + + let response = merge_schemas(&request.to_string()).unwrap_err(); + expected.assert_eq(&response); + } +} diff --git a/prisma-fmt/src/schema_file_input.rs b/prisma-fmt/src/schema_file_input.rs new file mode 100644 index 000000000000..a7204510ed8b --- /dev/null +++ b/prisma-fmt/src/schema_file_input.rs @@ -0,0 +1,26 @@ +use psl::SourceFile; +use serde::Deserialize; + +/// Struct for supporting multiple files +/// in a backward-compatible way: can either accept +/// a single file contents or vector of (filePath, content) tuples. +/// Can be converted to the input for `psl::validate_multi_file` from +/// any of the variants. +#[derive(Deserialize, Debug)] +#[serde(untagged)] +pub(crate) enum SchemaFileInput { + Single(String), + Multiple(Vec<(String, String)>), +} + +impl From for Vec<(String, SourceFile)> { + fn from(value: SchemaFileInput) -> Self { + match value { + SchemaFileInput::Single(content) => vec![("schema.prisma".to_owned(), content.into())], + SchemaFileInput::Multiple(file_list) => file_list + .into_iter() + .map(|(filename, content)| (filename, content.into())) + .collect(), + } + } +} diff --git a/prisma-fmt/src/text_document_completion.rs b/prisma-fmt/src/text_document_completion.rs index 4df8f3e91471..caca887c6ac6 100644 --- a/prisma-fmt/src/text_document_completion.rs +++ b/prisma-fmt/src/text_document_completion.rs @@ -41,7 +41,7 @@ pub(crate) fn completion(schema: String, params: CompletionParams) -> Completion let db = { let mut diag = Diagnostics::new(); - ParserDatabase::new(source_file, &mut diag) + ParserDatabase::new_single_file(source_file, &mut diag) }; let ctx = CompletionContext { @@ -91,7 +91,7 @@ impl<'a> CompletionContext<'a> { } fn push_ast_completions(ctx: CompletionContext<'_>, completion_list: &mut CompletionList) { - match ctx.db.ast().find_at_position(ctx.position) { + match ctx.db.ast_assert_single().find_at_position(ctx.position) { ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("relation", _, Some(attr_name))), @@ -190,7 +190,7 @@ fn ds_has_prop(ctx: CompletionContext<'_>, prop: &str) -> bool { fn push_namespaces(ctx: CompletionContext<'_>, completion_list: &mut CompletionList) { for (namespace, _) in ctx.namespaces() { - let insert_text = if add_quotes(ctx.params, ctx.db.source()) { + let insert_text = if add_quotes(ctx.params, ctx.db.source_assert_single()) { format!(r#""{namespace}""#) } else { namespace.to_string() diff --git a/prisma-fmt/src/text_document_completion/datasource.rs b/prisma-fmt/src/text_document_completion/datasource.rs index 02b7d9f4377b..22da182868ae 100644 --- a/prisma-fmt/src/text_document_completion/datasource.rs +++ b/prisma-fmt/src/text_document_completion/datasource.rs @@ -144,7 +144,7 @@ pub(super) fn url_env_db_completion(completion_list: &mut CompletionList, kind: _ => unreachable!(), }; - let insert_text = if add_quotes(ctx.params, ctx.db.source()) { + let insert_text = if add_quotes(ctx.params, ctx.db.source_assert_single()) { format!(r#""{text}""#) } else { text.to_owned() diff --git a/prisma-fmt/src/validate.rs b/prisma-fmt/src/validate.rs index 4cc9f88bf8bd..7bbce19e425d 100644 --- a/prisma-fmt/src/validate.rs +++ b/prisma-fmt/src/validate.rs @@ -1,14 +1,17 @@ +use psl::ValidatedSchema; use serde::Deserialize; use serde_json::json; use std::fmt::Write as _; +use crate::schema_file_input::SchemaFileInput; + // this mirrors user_facing_errors::common::SchemaParserError pub(crate) static SCHEMA_PARSER_ERROR_CODE: &str = "P1012"; #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct ValidateParams { - prisma_schema: String, + prisma_schema: SchemaFileInput, #[serde(default)] no_color: bool, } @@ -21,21 +24,22 @@ pub(crate) fn validate(params: &str) -> Result<(), String> { } }; - run(¶ms.prisma_schema, params.no_color) + run(params.prisma_schema, params.no_color)?; + Ok(()) } -pub fn run(input_schema: &str, no_color: bool) -> Result<(), String> { - let validate_schema = psl::validate(input_schema.into()); +pub fn run(input_schema: SchemaFileInput, no_color: bool) -> Result { + let validate_schema = psl::validate_multi_file(input_schema.into()); let diagnostics = &validate_schema.diagnostics; if !diagnostics.has_errors() { - return Ok(()); + return Ok(validate_schema); } // always colorise output regardless of the environment, which is important for Wasm colored::control::set_override(!no_color); - let mut formatted_error = diagnostics.to_pretty_string("schema.prisma", input_schema); + let mut formatted_error = validate_schema.render_own_diagnostics(); write!( formatted_error, "\nValidation Error Count: {}", @@ -109,6 +113,83 @@ mod tests { validate(&request.to_string()).unwrap(); } + #[test] + fn validate_multiple_files() { + let schema = vec![ + ( + "a.prisma", + r#" + datasource thedb { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#, + ), + ( + "b.prisma", + r#" + model B { + id String @id + a A? + } + "#, + ), + ]; + + let request = json!({ + "prismaSchema": schema, + }); + + validate(&request.to_string()).unwrap(); + } + + #[test] + fn validate_multiple_files_error() { + let schema = vec![ + ( + "a.prisma", + r#" + datasource thedb { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#, + ), + ( + "b.prisma", + r#" + model B { + id String @id + a A + } + "#, + ), + ]; + + let request = json!({ + "prismaSchema": schema, + }); + + let expected = expect![[ + r#"{"error_code":"P1012","message":"\u001b[1;91merror\u001b[0m: \u001b[1mError parsing attribute \"@relation\": The relation field `a` on Model `B` is required. This is no longer valid because it's not possible to enforce this constraint on the database level. Please change the field type from `A` to `A?` to fix this.\u001b[0m\n \u001b[1;94m-->\u001b[0m \u001b[4mb.prisma:4\u001b[0m\n\u001b[1;94m | \u001b[0m\n\u001b[1;94m 3 | \u001b[0m id String @id\n\u001b[1;94m 4 | \u001b[0m \u001b[1;91ma A\u001b[0m\n\u001b[1;94m 5 | \u001b[0m }\n\u001b[1;94m | \u001b[0m\n\nValidation Error Count: 1"}"# + ]]; + + let response = validate(&request.to_string()).unwrap_err(); + expected.assert_eq(&response); + } + #[test] fn validate_using_both_relation_mode_and_referential_integrity() { let schema = r#" diff --git a/prisma-fmt/tests/code_actions/test_api.rs b/prisma-fmt/tests/code_actions/test_api.rs index 2be0c978aa82..ff874cf86997 100644 --- a/prisma-fmt/tests/code_actions/test_api.rs +++ b/prisma-fmt/tests/code_actions/test_api.rs @@ -19,8 +19,8 @@ fn parse_schema_diagnostics(file: impl Into) -> Option) -> Option Result<(), JsError> { prisma_fmt::validate(params).map_err(|e| JsError::new(&e)) } +#[wasm_bindgen] +pub fn merge_schemas(input: String) -> Result { + register_panic_hook(); + prisma_fmt::merge_schemas(input).map_err(|e| JsError::new(&e)) +} + #[wasm_bindgen] pub fn native_types(input: String) -> String { register_panic_hook(); diff --git a/psl/diagnostics/src/error.rs b/psl/diagnostics/src/error.rs index c6a16dffdba0..6a11d461a138 100644 --- a/psl/diagnostics/src/error.rs +++ b/psl/diagnostics/src/error.rs @@ -293,6 +293,13 @@ impl DatamodelError { Self::new(msg, span) } + pub fn new_type_for_case_not_found_error(type_name: &str, suggestion: &str, span: Span) -> DatamodelError { + let msg = format!( + "Type \"{type_name}\" is neither a built-in type, nor refers to another model, custom type, or enum. Did you mean \"{suggestion}\"?" + ); + Self::new(msg, span) + } + pub fn new_scalar_type_not_found_error(type_name: &str, span: Span) -> DatamodelError { Self::new(format!("Type \"{type_name}\" is not a built-in type."), span) } diff --git a/psl/diagnostics/src/lib.rs b/psl/diagnostics/src/lib.rs index ba1665dd76ee..0d47d2c3c975 100644 --- a/psl/diagnostics/src/lib.rs +++ b/psl/diagnostics/src/lib.rs @@ -8,5 +8,5 @@ mod warning; pub use collection::Diagnostics; pub use error::DatamodelError; pub use native_type_error_factory::NativeTypeErrorFactory; -pub use span::Span; +pub use span::{FileId, Span}; pub use warning::DatamodelWarning; diff --git a/psl/diagnostics/src/span.rs b/psl/diagnostics/src/span.rs index 574115b42495..42110aa29792 100644 --- a/psl/diagnostics/src/span.rs +++ b/psl/diagnostics/src/span.rs @@ -1,19 +1,34 @@ +/// The stable identifier for a PSL file. +#[derive(Debug, PartialEq, Clone, Copy, Hash, Eq, PartialOrd, Ord)] +pub struct FileId(pub u32); // we can't encapsulate because it would be a circular crate + // dependency between diagnostics and parser-database + +impl FileId { + pub const ZERO: FileId = FileId(0); + pub const MAX: FileId = FileId(u32::MAX); +} + /// Represents a location in a datamodel's text representation. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct Span { pub start: usize, pub end: usize, + pub file_id: FileId, } impl Span { /// Constructor. - pub fn new(start: usize, end: usize) -> Span { - Span { start, end } + pub fn new(start: usize, end: usize, file_id: FileId) -> Span { + Span { start, end, file_id } } /// Creates a new empty span. pub fn empty() -> Span { - Span { start: 0, end: 0 } + Span { + start: 0, + end: 0, + file_id: FileId::ZERO, + } } /// Is the given position inside the span? (boundaries included) @@ -27,11 +42,12 @@ impl Span { } } -impl From> for Span { - fn from(s: pest::Span<'_>) -> Self { +impl From<(FileId, pest::Span<'_>)> for Span { + fn from((file_id, s): (FileId, pest::Span<'_>)) -> Self { Span { start: s.start(), end: s.end(), + file_id, } } } diff --git a/psl/parser-database/src/attributes.rs b/psl/parser-database/src/attributes.rs index e944b2fdc8ce..0d0bbfe786d3 100644 --- a/psl/parser-database/src/attributes.rs +++ b/psl/parser-database/src/attributes.rs @@ -23,12 +23,16 @@ pub(super) fn resolve_attributes(ctx: &mut Context<'_>) { visit_relation_field_attributes(rfid, ctx); } - for top in ctx.ast.iter_tops() { + for top in ctx.iter_tops() { match top { - (ast::TopId::Model(model_id), ast::Top::Model(_)) => resolve_model_attributes(model_id, ctx), - (ast::TopId::Enum(enum_id), ast::Top::Enum(ast_enum)) => resolve_enum_attributes(enum_id, ast_enum, ctx), - (ast::TopId::CompositeType(ctid), ast::Top::CompositeType(ct)) => { - resolve_composite_type_attributes(ctid, ct, ctx) + ((file_id, ast::TopId::Model(model_id)), ast::Top::Model(_)) => { + resolve_model_attributes((file_id, model_id), ctx) + } + ((file_id, ast::TopId::Enum(enum_id)), ast::Top::Enum(ast_enum)) => { + resolve_enum_attributes((file_id, enum_id), ast_enum, ctx) + } + ((file_id, ast::TopId::CompositeType(ctid)), ast::Top::CompositeType(ct)) => { + resolve_composite_type_attributes((file_id, ctid), ct, ctx) } _ => (), } @@ -36,14 +40,14 @@ pub(super) fn resolve_attributes(ctx: &mut Context<'_>) { } fn resolve_composite_type_attributes<'db>( - ctid: ast::CompositeTypeId, + ctid: crate::CompositeTypeId, ct: &'db ast::CompositeType, ctx: &mut Context<'db>, ) { for (field_id, field) in ct.iter_fields() { let CompositeTypeField { r#type, .. } = ctx.types.composite_type_fields[&(ctid, field_id)]; - ctx.visit_attributes((ctid, field_id).into()); + ctx.visit_attributes((ctid.0, (ctid.1, field_id))); if let ScalarFieldType::BuiltInScalar(_scalar_type) = r#type { // native type attributes @@ -52,7 +56,7 @@ fn resolve_composite_type_attributes<'db>( (ctid, field_id), datasource_name, type_name, - &ctx.ast[args], + &ctx.asts[args], ctx, ) } @@ -74,11 +78,11 @@ fn resolve_composite_type_attributes<'db>( } } -fn resolve_enum_attributes<'db>(enum_id: ast::EnumId, ast_enum: &'db ast::Enum, ctx: &mut Context<'db>) { +fn resolve_enum_attributes<'db>(enum_id: crate::EnumId, ast_enum: &'db ast::Enum, ctx: &mut Context<'db>) { let mut enum_attributes = EnumAttributes::default(); for value_idx in 0..ast_enum.values.len() { - ctx.visit_attributes((enum_id, value_idx as u32).into()); + ctx.visit_attributes((enum_id.0, (enum_id.1, value_idx as u32))); // @map if ctx.visit_optional_single_attr("map") { if let Some(mapped_name) = map::visit_map_attribute(ctx) { @@ -93,7 +97,7 @@ fn resolve_enum_attributes<'db>(enum_id: ast::EnumId, ast_enum: &'db ast::Enum, // Now validate the enum attributes. - ctx.visit_attributes(enum_id.into()); + ctx.visit_attributes(enum_id); // @@map if ctx.visit_optional_single_attr("map") { @@ -114,7 +118,7 @@ fn resolve_enum_attributes<'db>(enum_id: ast::EnumId, ast_enum: &'db ast::Enum, ctx.validate_visited_attributes(); } -fn resolve_model_attributes(model_id: ast::ModelId, ctx: &mut Context<'_>) { +fn resolve_model_attributes(model_id: crate::ModelId, ctx: &mut Context<'_>) { let mut model_attributes = ModelAttributes::default(); // First resolve all the attributes defined on fields **in isolation**. @@ -123,7 +127,7 @@ fn resolve_model_attributes(model_id: ast::ModelId, ctx: &mut Context<'_>) { } // Resolve all the attributes defined on the model itself **in isolation**. - ctx.visit_attributes(model_id.into()); + ctx.visit_attributes(model_id); // @@ignore if ctx.visit_optional_single_attr("ignore") { @@ -185,7 +189,7 @@ fn visit_scalar_field_attributes( r#type, .. } = ctx.types[scalar_field_id]; - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; let ast_field = &ast_model[field_id]; ctx.visit_scalar_field_attributes(model_id, field_id); @@ -240,7 +244,7 @@ fn visit_scalar_field_attributes( if let ScalarFieldType::BuiltInScalar(_scalar_type) = r#type { // native type attributes if let Some((datasource_name, type_name, attribute_id)) = ctx.visit_datasource_scoped() { - let attribute = &ctx.ast[attribute_id]; + let attribute = &ctx.asts[attribute_id]; native_types::visit_model_field_native_type_attribute( scalar_field_id, datasource_name, @@ -297,7 +301,7 @@ fn visit_field_unique(scalar_field_id: ScalarFieldId, model_data: &mut ModelAttr let attribute_id = ctx.current_attribute_id(); model_data.ast_indexes.push(( - attribute_id, + attribute_id.1, IndexAttribute { r#type: IndexType::Unique, fields: vec![FieldWithArgs { @@ -316,8 +320,8 @@ fn visit_field_unique(scalar_field_id: ScalarFieldId, model_data: &mut ModelAttr fn visit_relation_field_attributes(rfid: RelationFieldId, ctx: &mut Context<'_>) { let RelationField { model_id, field_id, .. } = ctx.types[rfid]; - let ast_field = &ctx.ast[model_id][field_id]; - ctx.visit_attributes((model_id, field_id).into()); + let ast_field = &ctx.asts[model_id][field_id]; + ctx.visit_attributes((model_id.0, (model_id.1, field_id))); // @relation // Relation attributes are not required at this stage. @@ -364,7 +368,7 @@ fn visit_relation_field_attributes(rfid: RelationFieldId, ctx: &mut Context<'_>) for underlying_field in ctx.types[rfid].fields.iter().flatten() { let ScalarField { model_id, field_id, .. } = ctx.types[*underlying_field]; - suggested_fields.push(ctx.ast[model_id][field_id].name()); + suggested_fields.push(ctx.asts[model_id][field_id].name()); } let suggestion = match suggested_fields.len() { @@ -391,7 +395,7 @@ fn visit_relation_field_attributes(rfid: RelationFieldId, ctx: &mut Context<'_>) ctx.validate_visited_attributes(); } -fn visit_model_ignore(model_id: ast::ModelId, model_data: &mut ModelAttributes, ctx: &mut Context<'_>) { +fn visit_model_ignore(model_id: crate::ModelId, model_data: &mut ModelAttributes, ctx: &mut Context<'_>) { let ignored_field_errors: Vec<_> = ctx .types .range_model_scalar_fields(model_id) @@ -400,7 +404,7 @@ fn visit_model_ignore(model_id: ast::ModelId, model_data: &mut ModelAttributes, DatamodelError::new_attribute_validation_error( "Fields on an already ignored Model do not need an `@ignore` annotation.", "@ignore", - ctx.ast[sf.model_id][sf.field_id].span(), + ctx.asts[sf.model_id][sf.field_id].span(), ) }) .collect(); @@ -413,7 +417,7 @@ fn visit_model_ignore(model_id: ast::ModelId, model_data: &mut ModelAttributes, } /// Validate @@fulltext on models -fn model_fulltext(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Context<'_>) { +fn model_fulltext(data: &mut ModelAttributes, model_id: crate::ModelId, ctx: &mut Context<'_>) { let mut index_attribute = IndexAttribute { r#type: IndexType::Fulltext, ..Default::default() @@ -440,11 +444,11 @@ fn model_fulltext(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut index_attribute.mapped_name = mapped_name; - data.ast_indexes.push((ctx.current_attribute_id(), index_attribute)); + data.ast_indexes.push((ctx.current_attribute_id().1, index_attribute)); } /// Validate @@index on models. -fn model_index(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Context<'_>) { +fn model_index(data: &mut ModelAttributes, model_id: crate::ModelId, ctx: &mut Context<'_>) { let mut index_attribute = IndexAttribute { r#type: IndexType::Normal, ..Default::default() @@ -514,11 +518,11 @@ fn model_index(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Con index_attribute.algorithm = algo; index_attribute.clustered = validate_clustering_setting(ctx); - data.ast_indexes.push((ctx.current_attribute_id(), index_attribute)); + data.ast_indexes.push((ctx.current_attribute_id().1, index_attribute)); } /// Validate @@unique on models. -fn model_unique(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Context<'_>) { +fn model_unique(data: &mut ModelAttributes, model_id: crate::ModelId, ctx: &mut Context<'_>) { let mut index_attribute = IndexAttribute { r#type: IndexType::Unique, ..Default::default() @@ -533,7 +537,7 @@ fn model_unique(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Co let current_attribute = ctx.current_attribute(); let current_attribute_id = ctx.current_attribute_id(); - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; let name = get_name_argument(ctx); let mapped_name = { @@ -570,12 +574,12 @@ fn model_unique(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Co index_attribute.mapped_name = mapped_name; index_attribute.clustered = validate_clustering_setting(ctx); - data.ast_indexes.push((current_attribute_id, index_attribute)); + data.ast_indexes.push((current_attribute_id.1, index_attribute)); } fn common_index_validations( index_data: &mut IndexAttribute, - model_id: ast::ModelId, + model_id: crate::ModelId, resolving: FieldResolvingSetup, ctx: &mut Context<'_>, ) { @@ -599,9 +603,9 @@ fn common_index_validations( if !unresolvable_fields.is_empty() { let fields = unresolvable_fields .iter() - .map(|(top_id, field_name)| match top_id { + .map(|((file_id, top_id), field_name)| match top_id { ast::TopId::CompositeType(ctid) => { - let composite_type = &ctx.ast[*ctid].name(); + let composite_type = &ctx.asts[(*file_id, *ctid)].name(); Cow::from(format!("{field_name} in type {composite_type}")) } @@ -616,7 +620,7 @@ fn common_index_validations( if index_data.is_unique() { "unique " } else { "" }, fields.join(", "), ); - let model_name = ctx.ast[model_id].name(); + let model_name = ctx.asts[model_id].name(); DatamodelError::new_model_validation_error(message, "model", model_name, current_attribute.span) }); } @@ -636,7 +640,7 @@ fn common_index_validations( .flatten(); for underlying_field in fields { let ScalarField { model_id, field_id, .. } = ctx.types[*underlying_field]; - suggested_fields.push(ctx.ast[model_id][field_id].name()); + suggested_fields.push(ctx.asts[model_id][field_id].name()); } } @@ -658,7 +662,7 @@ fn common_index_validations( suggestion = suggestion ), "model", - ctx.ast[model_id].name(), + ctx.asts[model_id].name(), current_attribute.span, )); } @@ -667,9 +671,9 @@ fn common_index_validations( } /// @relation validation for relation fields. -fn visit_relation(model_id: ast::ModelId, relation_field_id: RelationFieldId, ctx: &mut Context<'_>) { +fn visit_relation(model_id: crate::ModelId, relation_field_id: RelationFieldId, ctx: &mut Context<'_>) { let attr = ctx.current_attribute(); - ctx.types[relation_field_id].relation_attribute = Some(ctx.current_attribute_id()); + ctx.types[relation_field_id].relation_attribute = Some(ctx.current_attribute_id().1); if let Some(fields) = ctx.visit_optional_arg("fields") { let fields = match resolve_field_array_without_args(fields, attr.span, model_id, ctx) { @@ -724,7 +728,7 @@ fn visit_relation(model_id: ast::ModelId, relation_field_id: RelationFieldId, ct unknown_fields, }) => { if !unknown_fields.is_empty() { - let model_name = ctx.ast[ctx.types[relation_field_id].referenced_model].name(); + let model_name = ctx.asts[ctx.types[relation_field_id].referenced_model].name(); let field_names = unknown_fields .into_iter() @@ -742,7 +746,7 @@ fn visit_relation(model_id: ast::ModelId, relation_field_id: RelationFieldId, ct if !relation_fields.is_empty() { let msg = format!( "The argument `references` must refer only to scalar fields in the related model `{}`. But it is referencing the following relation fields: {}", - ctx.ast[ctx.types[relation_field_id].referenced_model].name(), + ctx.asts[ctx.types[relation_field_id].referenced_model].name(), relation_fields.iter().map(|(f, _)| f.name()).collect::>().join(", "), ); ctx.push_error(DatamodelError::new_validation_error(&msg, attr.span)); @@ -806,7 +810,7 @@ enum FieldResolutionError<'ast> { AlreadyDealtWith, ProblematicFields { /// Fields that do not exist on the model. - unknown_fields: Vec<(ast::TopId, &'ast str)>, + unknown_fields: Vec<(crate::TopId, &'ast str)>, /// Fields that exist on the model but are relation fields. relation_fields: Vec<(&'ast ast::Field, ast::FieldId)>, }, @@ -818,9 +822,10 @@ enum FieldResolutionError<'ast> { fn resolve_field_array_without_args<'db>( values: &'db ast::Expression, attribute_span: ast::Span, - model_id: ast::ModelId, + model_id: crate::ModelId, ctx: &mut Context<'db>, ) -> Result, FieldResolutionError<'db>> { + let file_id = model_id.0; let constant_array = match coerce_array(values, &coerce::constant, ctx.diagnostics) { Some(values) => values, None => { @@ -831,11 +836,11 @@ fn resolve_field_array_without_args<'db>( let mut field_ids: Vec = Vec::with_capacity(constant_array.len()); let mut unknown_fields = Vec::new(); let mut relation_fields = Vec::new(); - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; for field_name in constant_array { if field_name.contains('.') { - unknown_fields.push((ast::TopId::Model(model_id), field_name)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), field_name)); continue; } @@ -843,7 +848,7 @@ fn resolve_field_array_without_args<'db>( let field_id = if let Some(field_id) = ctx.find_model_field(model_id, field_name) { field_id } else { - unknown_fields.push((ast::TopId::Model(model_id), field_name)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), field_name)); continue; }; @@ -851,7 +856,7 @@ fn resolve_field_array_without_args<'db>( let sfid = if let Some(sfid) = ctx.types.find_model_scalar_field(model_id, field_id) { sfid } else { - relation_fields.push((&ctx.ast[model_id][field_id], field_id)); + relation_fields.push((&ctx.asts[model_id][field_id], field_id)); continue; }; @@ -900,10 +905,11 @@ impl FieldResolvingSetup { fn resolve_field_array_with_args<'db>( values: &'db ast::Expression, attribute_span: ast::Span, - model_id: ast::ModelId, + model_id: crate::ModelId, resolving: FieldResolvingSetup, ctx: &mut Context<'db>, ) -> Result, FieldResolutionError<'db>> { + let file_id = model_id.0; let constant_array = match crate::types::index_fields::coerce_field_array_with_args(values, ctx.diagnostics) { Some(values) => values, None => return Err(FieldResolutionError::AlreadyDealtWith), @@ -913,12 +919,12 @@ fn resolve_field_array_with_args<'db>( let mut unknown_fields = Vec::new(); let mut relation_fields = Vec::new(); - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; 'fields: for attrs in &constant_array { let path = if attrs.field_name.contains('.') { if !resolving.follow_composites() { - unknown_fields.push((ast::TopId::Model(model_id), attrs.field_name)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), attrs.field_name)); continue 'fields; } @@ -930,7 +936,7 @@ fn resolve_field_array_with_args<'db>( let field_id = match ctx.find_model_field(model_id, field_shard) { Some(field_id) => field_id, None => { - unknown_fields.push((ast::TopId::Model(model_id), field_shard)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), field_shard)); continue 'fields; } }; @@ -938,14 +944,14 @@ fn resolve_field_array_with_args<'db>( let sfid = if let Some(sfid) = ctx.types.find_model_scalar_field(model_id, field_id) { sfid } else { - relation_fields.push((&ctx.ast[model_id][field_id], field_id)); + relation_fields.push((&ctx.asts[model_id][field_id], field_id)); continue 'fields; }; match &ctx.types[sfid].r#type { ScalarFieldType::CompositeType(ctid) => (IndexFieldPath::new(sfid), ctid), _ => { - unknown_fields.push((ast::TopId::Model(model_id), attrs.field_name)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), attrs.field_name)); continue 'fields; } } @@ -961,7 +967,7 @@ fn resolve_field_array_with_args<'db>( let field_id = match ctx.find_composite_type_field(*next_type, field_shard) { Some(field_id) => field_id, None => { - unknown_fields.push((ast::TopId::CompositeType(*next_type), field_shard)); + unknown_fields.push(((next_type.0, ast::TopId::CompositeType(next_type.1)), field_shard)); continue 'fields; } }; @@ -973,7 +979,7 @@ fn resolve_field_array_with_args<'db>( next_type = ctid; } _ if i < field_count - 1 => { - unknown_fields.push((ast::TopId::Model(model_id), attrs.field_name)); + unknown_fields.push(((model_id.0, ast::TopId::Model(model_id.1)), attrs.field_name)); continue 'fields; } _ => (), @@ -986,12 +992,12 @@ fn resolve_field_array_with_args<'db>( match ctx.types.find_model_scalar_field(model_id, field_id) { Some(sfid) => IndexFieldPath::new(sfid), None => { - relation_fields.push((&ctx.ast[model_id][field_id], field_id)); + relation_fields.push((&ctx.asts[model_id][field_id], field_id)); continue; } } } else { - unknown_fields.push((ast::TopId::Model(model_id), attrs.field_name)); + unknown_fields.push(((model_id.0, ast::TopId::Model(model_id.1)), attrs.field_name)); continue; }; @@ -1000,8 +1006,8 @@ fn resolve_field_array_with_args<'db>( let path_str = match path.field_in_index() { either::Either::Left(_) => Cow::from(attrs.field_name), either::Either::Right((ctid, field_id)) => { - let field_name = &ctx.ast[ctid][field_id].name(); - let composite_type = &ctx.ast[ctid].name(); + let field_name = &ctx.asts[ctid][field_id].name(); + let composite_type = &ctx.asts[ctid].name(); Cow::from(format!("{field_name} in type {composite_type}")) } @@ -1097,13 +1103,17 @@ fn validate_clustering_setting(ctx: &mut Context<'_>) -> Option { /// access their corresponding entries in the attributes map in the database even in the presence /// of name and type resolution errors. This is useful for the language tools. pub(super) fn create_default_attributes(ctx: &mut Context<'_>) { - for top in ctx.ast.iter_tops() { + for ((file_id, top), _) in ctx.iter_tops() { match top { - (ast::TopId::Model(model_id), ast::Top::Model(_)) => { - ctx.types.model_attributes.insert(model_id, ModelAttributes::default()); + ast::TopId::Model(model_id) => { + ctx.types + .model_attributes + .insert((file_id, model_id), ModelAttributes::default()); } - (ast::TopId::Enum(enum_id), ast::Top::Enum(_)) => { - ctx.types.enum_attributes.insert(enum_id, EnumAttributes::default()); + ast::TopId::Enum(enum_id) => { + ctx.types + .enum_attributes + .insert((file_id, enum_id), EnumAttributes::default()); } _ => (), } diff --git a/psl/parser-database/src/attributes/default.rs b/psl/parser-database/src/attributes/default.rs index dcd22d316361..e2be240f152c 100644 --- a/psl/parser-database/src/attributes/default.rs +++ b/psl/parser-database/src/attributes/default.rs @@ -9,7 +9,7 @@ use crate::{ /// @default on model scalar fields pub(super) fn visit_model_field_default( scalar_field_id: ScalarFieldId, - model_id: ast::ModelId, + model_id: crate::ModelId, field_id: ast::FieldId, r#type: ScalarFieldType, ctx: &mut Context<'_>, @@ -19,7 +19,7 @@ pub(super) fn visit_model_field_default( Err(err) => return ctx.push_error(err), }; - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; let ast_field = &ast_model[field_id]; let mapped_name = default_attribute_mapped_name(ctx); @@ -74,7 +74,7 @@ pub(super) fn visit_model_field_default( /// @default on composite type fields pub(super) fn visit_composite_field_default( - ct_id: ast::CompositeTypeId, + ct_id: crate::CompositeTypeId, field_id: ast::FieldId, r#type: ScalarFieldType, ctx: &mut Context<'_>, @@ -84,7 +84,7 @@ pub(super) fn visit_composite_field_default( Err(err) => return ctx.push_error(err), }; - let ast_model = &ctx.ast[ct_id]; + let ast_model = &ctx.asts[ct_id]; let ast_field = &ast_model[field_id]; if ctx.visit_optional_arg("map").is_some() { @@ -181,10 +181,10 @@ fn validate_model_builtin_scalar_type_default( value: &ast::Expression, mapped_name: Option, accept: AcceptFn<'_>, - field_id: (ast::ModelId, ast::FieldId), + field_id: (crate::ModelId, ast::FieldId), ctx: &mut Context<'_>, ) { - let arity = ctx.ast[field_id.0][field_id.1].arity; + let arity = ctx.asts[field_id.0][field_id.1].arity; match (scalar_type, value) { // Functions (_, ast::Expression::Function(funcname, _, _)) if funcname == FN_AUTOINCREMENT && mapped_name.is_some() => { @@ -324,9 +324,13 @@ fn validate_invalid_function_default(fn_name: &str, scalar_type: ScalarType, ctx )); } -fn validate_default_value_on_composite_type(ctid: ast::CompositeTypeId, ast_field: &ast::Field, ctx: &mut Context<'_>) { +fn validate_default_value_on_composite_type( + ctid: crate::CompositeTypeId, + ast_field: &ast::Field, + ctx: &mut Context<'_>, +) { let attr = ctx.current_attribute(); - let ct_name = ctx.ast[ctid].name(); + let ct_name = ctx.asts[ctid].name(); ctx.push_error(DatamodelError::new_composite_type_field_validation_error( "Defaults on fields of type composite are not supported. Please remove the `@default` attribute.", @@ -395,13 +399,13 @@ fn validate_nanoid_args(args: &[ast::Argument], accept: AcceptFn<'_>, ctx: &mut fn validate_enum_default( found_value: &ast::Expression, - enum_id: ast::EnumId, + enum_id: crate::EnumId, accept: AcceptFn<'_>, ctx: &mut Context<'_>, ) { match found_value { ast::Expression::ConstantValue(enum_value, _) => { - if ctx.ast[enum_id].values.iter().any(|v| v.name() == enum_value) { + if ctx.asts[enum_id].values.iter().any(|v| v.name() == enum_value) { accept(ctx) } else { validate_invalid_default_enum_value(enum_value, ctx); @@ -413,7 +417,7 @@ fn validate_enum_default( fn validate_enum_list_default( found_value: &ast::Expression, - enum_id: ast::EnumId, + enum_id: crate::EnumId, accept: AcceptFn<'_>, ctx: &mut Context<'_>, ) { diff --git a/psl/parser-database/src/attributes/id.rs b/psl/parser-database/src/attributes/id.rs index 96892587c862..13618bbea737 100644 --- a/psl/parser-database/src/attributes/id.rs +++ b/psl/parser-database/src/attributes/id.rs @@ -10,7 +10,7 @@ use crate::{ use std::borrow::Cow; /// @@id on models -pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Context<'_>) { +pub(super) fn model(model_data: &mut ModelAttributes, model_id: crate::ModelId, ctx: &mut Context<'_>) { let attr = ctx.current_attribute(); let fields = match ctx.visit_default_arg("fields") { Ok(value) => value, @@ -29,9 +29,9 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct if !unresolvable_fields.is_empty() { let fields_str = unresolvable_fields .into_iter() - .map(|(top_id, field_name)| match top_id { + .map(|((file_id, top_id), field_name)| match top_id { ast::TopId::CompositeType(ctid) => { - let ct_name = &ctx.ast[ctid].name(); + let ct_name = ctx.asts[(file_id, ctid)].name(); Cow::from(format!("{field_name} in type {ct_name}")) } @@ -43,7 +43,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct let msg = format!("The multi field id declaration refers to the unknown fields {fields_str}."); let error = - DatamodelError::new_model_validation_error(&msg, "model", ctx.ast[model_id].name(), fields.span()); + DatamodelError::new_model_validation_error(&msg, "model", ctx.asts[model_id].name(), fields.span()); ctx.push_error(error); } @@ -60,7 +60,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct ctx.push_error(DatamodelError::new_model_validation_error( &msg, "model", - ctx.ast[model_id].name(), + ctx.asts[model_id].name(), attr.span, )); } @@ -69,7 +69,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct } }; - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; // ID attribute fields must reference only required fields. let fields_that_are_not_required: Vec<&str> = resolved_fields @@ -77,7 +77,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct .filter_map(|field| match field.path.field_in_index() { either::Either::Left(id) => { let ScalarField { model_id, field_id, .. } = ctx.types[id]; - let field = &ctx.ast[model_id][field_id]; + let field = &ctx.asts[model_id][field_id]; if field.arity.is_required() { None @@ -86,7 +86,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct } } either::Either::Right((ctid, field_id)) => { - let field = &ctx.ast[ctid][field_id]; + let field = &ctx.asts[ctid][field_id]; if field.arity.is_required() { None @@ -198,7 +198,7 @@ pub(super) fn field<'db>( } pub(super) fn validate_id_field_arities( - model_id: ast::ModelId, + model_id: crate::ModelId, model_attributes: &ModelAttributes, ctx: &mut Context<'_>, ) { @@ -213,7 +213,7 @@ pub(super) fn validate_id_field_arities( }; let ast_field = if let Some(field_id) = pk.source_field { - &ctx.ast[model_id][field_id] + &ctx.asts[model_id][field_id] } else { return; }; @@ -222,7 +222,7 @@ pub(super) fn validate_id_field_arities( ctx.push_error(DatamodelError::new_attribute_validation_error( "Fields that are marked as id must be required.", "@id", - ctx.ast[pk.source_attribute].span, + ctx.asts[pk.source_attribute].span, )) } } diff --git a/psl/parser-database/src/attributes/map.rs b/psl/parser-database/src/attributes/map.rs index b4bf82835eb2..d910447f96cf 100644 --- a/psl/parser-database/src/attributes/map.rs +++ b/psl/parser-database/src/attributes/map.rs @@ -19,7 +19,7 @@ pub(super) fn scalar_field( sfid: ScalarFieldId, ast_model: &ast::Model, ast_field: &ast::Field, - model_id: ast::ModelId, + model_id: crate::ModelId, field_id: ast::FieldId, ctx: &mut Context<'_>, ) { @@ -71,7 +71,7 @@ pub(super) fn scalar_field( pub(super) fn composite_type_field( ct: &ast::CompositeType, ast_field: &ast::Field, - ctid: ast::CompositeTypeId, + ctid: crate::CompositeTypeId, field_id: ast::FieldId, ctx: &mut Context<'_>, ) { diff --git a/psl/parser-database/src/attributes/native_types.rs b/psl/parser-database/src/attributes/native_types.rs index d9deccb99eb9..704df89e23ac 100644 --- a/psl/parser-database/src/attributes/native_types.rs +++ b/psl/parser-database/src/attributes/native_types.rs @@ -14,7 +14,7 @@ pub(super) fn visit_model_field_native_type_attribute( } pub(super) fn visit_composite_type_field_native_type_attribute( - id: (ast::CompositeTypeId, ast::FieldId), + id: (crate::CompositeTypeId, ast::FieldId), datasource_name: StringId, type_name: StringId, attr: &ast::Attribute, diff --git a/psl/parser-database/src/context.rs b/psl/parser-database/src/context.rs index 450146953024..6d4d72239824 100644 --- a/psl/parser-database/src/context.rs +++ b/psl/parser-database/src/context.rs @@ -3,7 +3,7 @@ mod attributes; use self::attributes::AttributesValidationState; use crate::{ ast, interner::StringInterner, names::Names, relations::Relations, types::Types, DatamodelError, Diagnostics, - StringId, + InFile, StringId, }; use schema_ast::ast::{Expression, WithName}; use std::collections::{HashMap, HashSet}; @@ -21,7 +21,7 @@ use std::collections::{HashMap, HashSet}; /// /// See `visit_attributes()`. pub(crate) struct Context<'db> { - pub(crate) ast: &'db ast::SchemaAst, + pub(crate) asts: &'db crate::Files, pub(crate) interner: &'db mut StringInterner, pub(crate) names: &'db mut Names, pub(crate) types: &'db mut Types, @@ -30,15 +30,15 @@ pub(crate) struct Context<'db> { attributes: AttributesValidationState, // state machine for attribute validation // @map'ed names indexes. These are not in the db because they are only used for validation. - pub(super) mapped_model_scalar_field_names: HashMap<(ast::ModelId, StringId), ast::FieldId>, - pub(super) mapped_composite_type_names: HashMap<(ast::CompositeTypeId, StringId), ast::FieldId>, - pub(super) mapped_enum_names: HashMap, - pub(super) mapped_enum_value_names: HashMap<(ast::EnumId, StringId), u32>, + pub(super) mapped_model_scalar_field_names: HashMap<(crate::ModelId, StringId), ast::FieldId>, + pub(super) mapped_composite_type_names: HashMap<(crate::CompositeTypeId, StringId), ast::FieldId>, + pub(super) mapped_enum_names: HashMap, + pub(super) mapped_enum_value_names: HashMap<(crate::EnumId, StringId), u32>, } impl<'db> Context<'db> { pub(super) fn new( - ast: &'db ast::SchemaAst, + asts: &'db crate::Files, interner: &'db mut StringInterner, names: &'db mut Names, types: &'db mut Types, @@ -46,7 +46,7 @@ impl<'db> Context<'db> { diagnostics: &'db mut Diagnostics, ) -> Self { Context { - ast, + asts, interner, names, types, @@ -68,7 +68,7 @@ impl<'db> Context<'db> { /// Return the attribute currently being validated. Panics if the context is not in the right /// state. #[track_caller] - pub(crate) fn current_attribute_id(&self) -> ast::AttributeId { + pub(crate) fn current_attribute_id(&self) -> crate::AttributeId { self.attributes.attribute.unwrap() } @@ -76,8 +76,7 @@ impl<'db> Context<'db> { /// state. #[track_caller] pub(crate) fn current_attribute(&self) -> &'db ast::Attribute { - let id = self.attributes.attribute.unwrap(); - &self.ast[id] + &self.asts[self.attributes.attribute.unwrap()] } /// Discard arguments without validation. @@ -102,8 +101,8 @@ impl<'db> Context<'db> { /// /// Other than for this peculiarity, this method is identical to /// `visit_attributes()`. - pub(super) fn visit_scalar_field_attributes(&mut self, model_id: ast::ModelId, field_id: ast::FieldId) { - self.visit_attributes((model_id, field_id).into()); + pub(super) fn visit_scalar_field_attributes(&mut self, model_id: crate::ModelId, field_id: ast::FieldId) { + self.visit_attributes((model_id.0, (model_id.1, field_id))); } /// All attribute validation should go through `visit_attributes()`. It lets @@ -116,7 +115,11 @@ impl<'db> Context<'db> { /// `validate_visited_arguments()`. Otherwise, Context will helpfully panic. /// - When you are done validating an attribute set, you must call /// `validate_visited_attributes()`. Otherwise, Context will helpfully panic. - pub(super) fn visit_attributes(&mut self, ast_attributes: ast::AttributeContainer) { + pub(super) fn visit_attributes(&mut self, ast_attributes: InFile) + where + T: Into, + { + let ast_attributes: crate::AttributeContainer = (ast_attributes.0, ast_attributes.1.into()); if self.attributes.attributes.is_some() || !self.attributes.unused_attributes.is_empty() { panic!( "`ctx.visit_attributes() called with {:?} while the Context is still validating previous attribute set on {:?}`", @@ -125,7 +128,8 @@ impl<'db> Context<'db> { ); } - self.attributes.set_attributes(ast_attributes, self.ast); + self.attributes + .set_attributes(ast_attributes, &self.asts[ast_attributes.0].2); } /// Look for an optional attribute with a name of the form @@ -136,8 +140,8 @@ impl<'db> Context<'db> { /// arguments to other attributes: everywhere else, attributes are named, /// with a default that can be first, but with native types, arguments are /// purely positional. - pub(crate) fn visit_datasource_scoped(&mut self) -> Option<(StringId, StringId, ast::AttributeId)> { - let attrs = iter_attributes(self.attributes.attributes.as_ref(), self.ast) + pub(crate) fn visit_datasource_scoped(&mut self) -> Option<(StringId, StringId, crate::AttributeId)> { + let attrs = iter_attributes(self.attributes.attributes.as_ref(), self.asts) .filter(|(_, attr)| attr.name.name.contains('.')); let mut native_type_attr = None; let diagnostics = &mut self.diagnostics; @@ -172,7 +176,7 @@ impl<'db> Context<'db> { #[must_use] pub(crate) fn visit_optional_single_attr(&mut self, name: &'static str) -> bool { let mut attrs = - iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name); + iter_attributes(self.attributes.attributes.as_ref(), self.asts).filter(|(_, a)| a.name.name == name); let (first_idx, first) = match attrs.next() { Some(first) => first, None => return false, @@ -181,7 +185,7 @@ impl<'db> Context<'db> { if attrs.next().is_some() { for (idx, attr) in - iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name) + iter_attributes(self.attributes.attributes.as_ref(), self.asts).filter(|(_, a)| a.name.name == name) { diagnostics.push_error(DatamodelError::new_duplicate_attribute_error( &attr.name.name, @@ -205,7 +209,7 @@ impl<'db> Context<'db> { let mut has_valid_attribute = false; while !has_valid_attribute { - let first_attr = iter_attributes(self.attributes.attributes.as_ref(), self.ast) + let first_attr = iter_attributes(self.attributes.attributes.as_ref(), self.asts) .filter(|(_, attr)| attr.name.name == name) .find(|(attr_id, _)| self.attributes.unused_attributes.contains(attr_id)); let (attr_id, attr) = if let Some(first_attr) = first_attr { @@ -267,7 +271,7 @@ impl<'db> Context<'db> { /// otherwise. pub(crate) fn validate_visited_arguments(&mut self) { let attr = if let Some(attrid) = self.attributes.attribute { - &self.ast[attrid] + &self.asts[attrid] } else { panic!("State error: missing attribute in validate_visited_arguments.") }; @@ -290,7 +294,7 @@ impl<'db> Context<'db> { let diagnostics = &mut self.diagnostics; for attribute_id in &self.attributes.unused_attributes { - let attribute = &self.ast[*attribute_id]; + let attribute = &self.asts[*attribute_id]; diagnostics.push_error(DatamodelError::new_attribute_not_known_error( &attribute.name.name, attribute.span, @@ -308,7 +312,7 @@ impl<'db> Context<'db> { } /// Find a specific field in a specific model. - pub(crate) fn find_model_field(&self, model_id: ast::ModelId, field_name: &str) -> Option { + pub(crate) fn find_model_field(&self, model_id: crate::ModelId, field_name: &str) -> Option { let name = self.interner.lookup(field_name)?; self.names.model_fields.get(&(model_id, name)).cloned() } @@ -316,7 +320,7 @@ impl<'db> Context<'db> { /// Find a specific field in a specific composite type. pub(crate) fn find_composite_type_field( &self, - composite_type_id: ast::CompositeTypeId, + composite_type_id: crate::CompositeTypeId, field_name: &str, ) -> Option { let name = self.interner.lookup(field_name)?; @@ -327,9 +331,15 @@ impl<'db> Context<'db> { .cloned() } + pub(crate) fn iter_tops(&self) -> impl Iterator + 'db { + self.asts + .iter() + .flat_map(|(file_id, _, _, ast)| ast.iter_tops().map(move |(top_id, top)| ((file_id, top_id), top))) + } + /// Starts validating the arguments for an attribute, checking for duplicate arguments in the /// process. Returns whether the attribute is valid enough to be usable. - fn set_attribute(&mut self, attribute_id: ast::AttributeId, attribute: &'db ast::Attribute) -> bool { + fn set_attribute(&mut self, attribute_id: crate::AttributeId, attribute: &'db ast::Attribute) -> bool { if self.attributes.attribute.is_some() || !self.attributes.args.is_empty() { panic!("State error: we cannot start validating new arguments before `validate_visited_arguments()` or `discard_arguments()` has been called.\n{:#?}", self.attributes); } @@ -430,13 +440,15 @@ impl<'db> Context<'db> { // Implementation detail. Used for arguments validation. fn iter_attributes<'a, 'ast: 'a>( - attrs: Option<&'a ast::AttributeContainer>, - ast: &'ast ast::SchemaAst, -) -> impl Iterator + 'a { + attrs: Option<&'a crate::AttributeContainer>, + asts: &'ast crate::Files, +) -> impl Iterator + 'a { attrs .into_iter() - .flat_map(move |container| ast[*container].iter().enumerate().map(|a| (a, *container))) - .map(|((idx, attr), container)| (ast::AttributeId::new_in_container(container, idx), attr)) + .flat_map(move |container| asts[*container].iter().enumerate().map(|a| (a, *container))) + .map(|((idx, attr), (file_id, container))| { + ((file_id, ast::AttributeId::new_in_container(container, idx)), attr) + }) } impl std::ops::Index for Context<'_> { diff --git a/psl/parser-database/src/context/attributes.rs b/psl/parser-database/src/context/attributes.rs index 9f35f5cc3644..48b75756004b 100644 --- a/psl/parser-database/src/context/attributes.rs +++ b/psl/parser-database/src/context/attributes.rs @@ -4,17 +4,19 @@ use crate::interner::StringId; #[derive(Default, Debug)] pub(super) struct AttributesValidationState { /// The attributes list being validated. - pub(super) attributes: Option, - pub(super) unused_attributes: HashSet, // the _remaining_ attributes + pub(super) attributes: Option, + pub(super) unused_attributes: HashSet, // the _remaining_ attributes /// The attribute being validated. - pub(super) attribute: Option, + pub(super) attribute: Option, pub(super) args: HashMap, usize>, // the _remaining_ arguments of `attribute` } impl AttributesValidationState { - pub(super) fn set_attributes(&mut self, attributes: ast::AttributeContainer, ast: &ast::SchemaAst) { - let attribute_ids = (0..ast[attributes].len()).map(|idx| ast::AttributeId::new_in_container(attributes, idx)); + pub(super) fn set_attributes(&mut self, attributes: crate::AttributeContainer, ast: &ast::SchemaAst) { + let file_id = attributes.0; + let attribute_ids = + (0..ast[attributes.1].len()).map(|idx| (file_id, ast::AttributeId::new_in_container(attributes.1, idx))); self.unused_attributes.clear(); self.unused_attributes.extend(attribute_ids); diff --git a/psl/parser-database/src/files.rs b/psl/parser-database/src/files.rs new file mode 100644 index 000000000000..9aef27d3d70a --- /dev/null +++ b/psl/parser-database/src/files.rs @@ -0,0 +1,44 @@ +use crate::FileId; +use schema_ast::ast; +use std::ops::Index; + +/// The content is a list of (file path, file source text, file AST). +/// +/// The file path can be anything, the PSL implementation will only use it to display the file name +/// in errors. For example, files can come from nested directories. +pub(crate) struct Files(pub(super) Vec<(String, schema_ast::SourceFile, ast::SchemaAst)>); + +impl Files { + pub(crate) fn iter(&self) -> impl Iterator { + self.0 + .iter() + .enumerate() + .map(|(idx, (path, contents, ast))| (FileId(idx as u32), path, contents, ast)) + } + + pub(crate) fn into_iter(self) -> impl Iterator { + self.0 + .into_iter() + .enumerate() + .map(|(idx, (path, contents, ast))| (FileId(idx as u32), path, contents, ast)) + } +} + +impl Index for Files { + type Output = (String, schema_ast::SourceFile, ast::SchemaAst); + + fn index(&self, index: crate::FileId) -> &Self::Output { + &self.0[index.0 as usize] + } +} + +impl Index> for Files +where + ast::SchemaAst: Index, +{ + type Output = >::Output; + + fn index(&self, index: crate::InFile) -> &Self::Output { + &self[index.0].2[index.1] + } +} diff --git a/psl/parser-database/src/ids.rs b/psl/parser-database/src/ids.rs new file mode 100644 index 000000000000..55e5836f17fe --- /dev/null +++ b/psl/parser-database/src/ids.rs @@ -0,0 +1,23 @@ +use diagnostics::FileId; +use schema_ast::ast; + +/// An AST identifier with the accompanyin file ID. +pub type InFile = (FileId, Id); + +/// See [ast::ModelId] +pub type ModelId = InFile; + +/// See [ast::EnumId] +pub type EnumId = InFile; + +/// See [ast::CompositeTypeId] +pub type CompositeTypeId = InFile; + +/// See [ast::TopId] +pub type TopId = InFile; + +/// See [ast::AttributeId] +pub type AttributeId = InFile; + +/// See [ast::AttributeContainer] +pub type AttributeContainer = InFile; diff --git a/psl/parser-database/src/lib.rs b/psl/parser-database/src/lib.rs index d57ff8c98ddd..61dc685f93b3 100644 --- a/psl/parser-database/src/lib.rs +++ b/psl/parser-database/src/lib.rs @@ -31,12 +31,16 @@ pub mod walkers; mod attributes; mod coerce_expression; mod context; +mod files; +mod ids; mod interner; mod names; mod relations; mod types; pub use coerce_expression::{coerce, coerce_array, coerce_opt}; +pub use diagnostics::FileId; +pub use ids::*; pub use names::is_reserved_type_name; pub use relations::{ManyToManyRelationId, ReferentialAction, RelationId}; pub use schema_ast::{ast, SourceFile}; @@ -45,7 +49,7 @@ pub use types::{ ScalarType, SortOrder, }; -use self::{context::Context, interner::StringId, relations::Relations, types::Types}; +use self::{context::Context, files::Files, interner::StringId, relations::Relations, types::Types}; use diagnostics::{DatamodelError, Diagnostics}; use names::Names; @@ -69,8 +73,7 @@ use names::Names; /// - Global validations are then performed on the mostly validated schema. /// Currently only index name collisions. pub struct ParserDatabase { - ast: ast::SchemaAst, - file: schema_ast::SourceFile, + asts: Files, interner: interner::StringInterner, names: Names, types: Types, @@ -79,14 +82,35 @@ pub struct ParserDatabase { impl ParserDatabase { /// See the docs on [ParserDatabase](/struct.ParserDatabase.html). - pub fn new(file: schema_ast::SourceFile, diagnostics: &mut Diagnostics) -> Self { - let ast = schema_ast::parse_schema(file.as_str(), diagnostics); + pub fn new_single_file(file: SourceFile, diagnostics: &mut Diagnostics) -> Self { + Self::new(vec![("schema.prisma".to_owned(), file)], diagnostics) + } + + /// See the docs on [ParserDatabase](/struct.ParserDatabase.html). + pub fn new(schemas: Vec<(String, schema_ast::SourceFile)>, diagnostics: &mut Diagnostics) -> Self { + let asts = schemas + .into_iter() + .enumerate() + .map(|(file_idx, (path, source))| { + let id = FileId(file_idx as u32); + let ast = schema_ast::parse_schema(source.as_str(), diagnostics, id); + (path, source, ast) + }) + .collect(); + let asts = Files(asts); let mut interner = Default::default(); let mut names = Default::default(); let mut types = Default::default(); let mut relations = Default::default(); - let mut ctx = Context::new(&ast, &mut interner, &mut names, &mut types, &mut relations, diagnostics); + let mut ctx = Context::new( + &asts, + &mut interner, + &mut names, + &mut types, + &mut relations, + diagnostics, + ); // First pass: resolve names. names::resolve_names(&mut ctx); @@ -96,8 +120,7 @@ impl ParserDatabase { attributes::create_default_attributes(&mut ctx); return ParserDatabase { - ast, - file, + asts, interner, names, types, @@ -113,8 +136,7 @@ impl ParserDatabase { attributes::create_default_attributes(&mut ctx); return ParserDatabase { - ast, - file, + asts, interner, names, types, @@ -131,8 +153,7 @@ impl ParserDatabase { relations::infer_relations(&mut ctx); ParserDatabase { - ast, - file, + asts, interner, names, types, @@ -140,9 +161,33 @@ impl ParserDatabase { } } - /// The parsed AST. - pub fn ast(&self) -> &ast::SchemaAst { - &self.ast + /// The parsed AST. This methods asserts that there is a single prisma schema file. As + /// multi-file schemas are implemented, calls to this methods should be replaced with + /// `ParserDatabase::ast()` and `ParserDatabase::iter_asts()`. + /// TODO: consider removing once the `multiFileSchema` preview feature goes GA. + pub fn ast_assert_single(&self) -> &ast::SchemaAst { + assert_eq!(self.asts.0.len(), 1); + &self.asts.0.first().unwrap().2 + } + + /// Iterate all parsed ASTs. + pub fn iter_asts(&self) -> impl Iterator { + self.asts.iter().map(|(_, _, _, ast)| ast) + } + + /// Iterate all parsed ASTs, consuming parser database + pub fn into_iter_asts(self) -> impl Iterator { + self.asts.into_iter().map(|(_, _, _, ast)| ast) + } + + /// Iterate all file ids + pub fn iter_file_ids(&self) -> impl Iterator + '_ { + self.asts.iter().map(|(file_id, _, _, _)| file_id) + } + + /// A parsed AST. + pub fn ast(&self, file_id: FileId) -> &ast::SchemaAst { + &self.asts[file_id].2 } /// The total number of enums in the schema. This is O(1). @@ -155,9 +200,35 @@ impl ParserDatabase { self.types.model_attributes.len() } + /// The source file contents. This methods asserts that there is a single prisma schema file. + /// As multi-file schemas are implemented, calls to this methods should be replaced with + /// `ParserDatabase::source()` and `ParserDatabase::iter_sources()`. + pub fn source_assert_single(&self) -> &str { + assert_eq!(self.asts.0.len(), 1); + self.asts.0[0].1.as_str() + } + /// The source file contents. - pub fn source(&self) -> &str { - self.file.as_str() + pub fn source(&self, file_id: FileId) -> &str { + self.asts[file_id].1.as_str() + } + + /// Iterate all source file contents. + pub fn iter_sources(&self) -> impl Iterator { + self.asts.iter().map(|ast| ast.2.as_str()) + } + + /// The name of the file. + pub fn file_name(&self, file_id: FileId) -> &str { + self.asts[file_id].0.as_str() + } +} + +impl std::ops::Index for ParserDatabase { + type Output = (String, SourceFile, ast::SchemaAst); + + fn index(&self, index: FileId) -> &Self::Output { + &self.asts[index] } } diff --git a/psl/parser-database/src/names.rs b/psl/parser-database/src/names.rs index 9ed71f98742f..dff646ca5101 100644 --- a/psl/parser-database/src/names.rs +++ b/psl/parser-database/src/names.rs @@ -5,7 +5,7 @@ pub use reserved_model_names::is_reserved_type_name; use crate::{ ast::{self, ConfigBlockProperty, TopId, WithAttributes, WithIdentifier, WithName, WithSpan}, types::ScalarType, - Context, DatamodelError, StringId, + Context, DatamodelError, FileId, StringId, }; use reserved_model_names::{validate_enum_name, validate_model_name}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; @@ -14,13 +14,13 @@ use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; #[derive(Default)] pub(super) struct Names { /// Models, enums, composite types and type aliases - pub(super) tops: HashMap, + pub(super) tops: HashMap, /// Generators have their own namespace. - pub(super) generators: HashMap, + pub(super) generators: HashMap, /// Datasources have their own namespace. - pub(super) datasources: HashMap, - pub(super) model_fields: HashMap<(ast::ModelId, StringId), ast::FieldId>, - pub(super) composite_type_fields: HashMap<(ast::CompositeTypeId, StringId), ast::FieldId>, + pub(super) datasources: HashMap, + pub(super) model_fields: HashMap<(crate::ModelId, StringId), ast::FieldId>, + pub(super) composite_type_fields: HashMap<(crate::CompositeTypeId, StringId), ast::FieldId>, } /// `resolve_names()` is responsible for populating `ParserDatabase.names` and @@ -35,7 +35,7 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { let mut tmp_names: HashSet<&str> = HashSet::default(); // throwaway container for duplicate checking let mut names = Names::default(); - for (top_id, top) in ctx.ast.iter_tops() { + for ((file_id, top_id), top) in ctx.iter_tops() { assert_is_not_a_reserved_scalar_type(top.identifier(), ctx); let namespace = match (top_id, top) { @@ -70,7 +70,11 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { validate_attribute_identifiers(field, ctx); let field_name_id = ctx.interner.intern(field.name()); - if names.model_fields.insert((model_id, field_name_id), field_id).is_some() { + if names + .model_fields + .insert(((file_id, model_id), field_name_id), field_id) + .is_some() + { ctx.push_error(DatamodelError::new_duplicate_field_error( model.name(), field.name(), @@ -92,7 +96,11 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { validate_attribute_identifiers(field, ctx); let field_name_id = ctx.interner.intern(field.name()); - if names.model_fields.insert((model_id, field_name_id), field_id).is_some() { + if names + .model_fields + .insert(((file_id, model_id), field_name_id), field_id) + .is_some() + { ctx.push_error(DatamodelError::new_duplicate_field_error( model.name(), field.name(), @@ -112,7 +120,7 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { // Check that there is no duplicate field on the composite type if names .composite_type_fields - .insert((ctid, field_name_id), field_id) + .insert(((file_id, ctid), field_name_id), field_id) .is_some() { ctx.push_error(DatamodelError::new_composite_type_duplicate_field_error( @@ -136,16 +144,22 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { _ => unreachable!(), }; - insert_name(top_id, top, namespace, ctx) + insert_name(file_id, top_id, top, namespace, ctx) } let _ = std::mem::replace(ctx.names, names); } -fn insert_name(top_id: TopId, top: &ast::Top, namespace: &mut HashMap, ctx: &mut Context<'_>) { +fn insert_name( + file_id: FileId, + top_id: TopId, + top: &ast::Top, + namespace: &mut HashMap, + ctx: &mut Context<'_>, +) { let name = ctx.interner.intern(top.name()); - if let Some(existing) = namespace.insert(name, top_id) { - ctx.push_error(duplicate_top_error(&ctx.ast[existing], top)); + if let Some(existing_top) = namespace.insert(name, (file_id, top_id)) { + ctx.push_error(duplicate_top_error(&ctx.asts[existing_top], top)); } } @@ -159,7 +173,7 @@ fn duplicate_top_error(existing: &ast::Top, duplicate: &ast::Top) -> DatamodelEr } fn assert_is_not_a_reserved_scalar_type(ident: &ast::Identifier, ctx: &mut Context<'_>) { - if ScalarType::try_from_str(&ident.name).is_some() { + if ScalarType::try_from_str(&ident.name, false).is_some() { ctx.push_error(DatamodelError::new_reserved_scalar_type_error(&ident.name, ident.span)); } } diff --git a/psl/parser-database/src/relations.rs b/psl/parser-database/src/relations.rs index 33bc8236cffa..0c1e0a454c69 100644 --- a/psl/parser-database/src/relations.rs +++ b/psl/parser-database/src/relations.rs @@ -2,7 +2,7 @@ use crate::{ ast::{self, WithName}, interner::StringId, walkers::RelationFieldId, - DatamodelError, Diagnostics, + DatamodelError, Diagnostics, FileId, {context::Context, types::RelationField}, }; use enumflags2::bitflags; @@ -75,11 +75,11 @@ pub(crate) struct Relations { /// (model_a, model_b, relation_idx) /// /// This can be interpreted as the relations _from_ a model. - forward: BTreeSet<(ast::ModelId, ast::ModelId, RelationId)>, + forward: BTreeSet<(crate::ModelId, crate::ModelId, RelationId)>, /// (model_b, model_a, relation_idx) /// /// This can be interpreted as the relations _to_ a model. - back: BTreeSet<(ast::ModelId, ast::ModelId, RelationId)>, + back: BTreeSet<(crate::ModelId, crate::ModelId, RelationId)>, } impl std::ops::Index for Relations { @@ -117,17 +117,23 @@ impl Relations { /// Iterator over relations where the provided model is model A, or the forward side of the /// relation. #[allow(clippy::wrong_self_convention)] // this is the name we want - pub(crate) fn from_model(&self, model_a_id: ast::ModelId) -> impl Iterator + '_ { + pub(crate) fn from_model(&self, model_a_id: crate::ModelId) -> impl Iterator + '_ { self.forward - .range((model_a_id, ast::ModelId::ZERO, RelationId::MIN)..(model_a_id, ast::ModelId::MAX, RelationId::MAX)) + .range( + (model_a_id, (FileId::ZERO, ast::ModelId::ZERO), RelationId::MIN) + ..(model_a_id, (FileId::MAX, ast::ModelId::MAX), RelationId::MAX), + ) .map(move |(_, _, relation_id)| *relation_id) } /// Iterator over relationss where the provided model is model B, or the backrelation side of /// the relation. - pub(crate) fn to_model(&self, model_a_id: ast::ModelId) -> impl Iterator + '_ { + pub(crate) fn to_model(&self, model_a_id: crate::ModelId) -> impl Iterator + '_ { self.back - .range((model_a_id, ast::ModelId::ZERO, RelationId::MIN)..(model_a_id, ast::ModelId::MAX, RelationId::MAX)) + .range( + (model_a_id, (FileId::ZERO, ast::ModelId::ZERO), RelationId::MIN) + ..(model_a_id, (FileId::MAX, ast::ModelId::MAX), RelationId::MAX), + ) .map(move |(_, _, relation_id)| *relation_id) } } @@ -180,8 +186,8 @@ pub(crate) struct Relation { /// The `name` argument in `@relation`. pub(super) relation_name: Option, pub(super) attributes: RelationAttributes, - pub(super) model_a: ast::ModelId, - pub(super) model_b: ast::ModelId, + pub(super) model_a: crate::ModelId, + pub(super) model_b: crate::ModelId, } impl Relation { @@ -209,7 +215,6 @@ impl Relation { // Implementation detail for this module. Should stay private. pub(super) struct RelationEvidence<'db> { pub(super) ast_model: &'db ast::Model, - pub(super) model_id: ast::ModelId, pub(super) ast_field: &'db ast::Field, pub(super) field_id: RelationFieldId, pub(super) is_self_relation: bool, @@ -219,14 +224,26 @@ pub(super) struct RelationEvidence<'db> { pub(super) opposite_relation_field: Option<(RelationFieldId, &'db ast::Field, &'db RelationField)>, } +impl RelationEvidence<'_> { + fn model_id(&self) -> crate::ModelId { + self.relation_field.model_id + } + + fn referenced_model_id(&self) -> crate::ModelId { + self.relation_field.referenced_model + } +} + pub(super) fn relation_evidence<'db>( (relation_field_id, relation_field): (RelationFieldId, &'db RelationField), ctx: &'db Context<'db>, ) -> RelationEvidence<'db> { - let ast = ctx.ast; - let ast_model = &ast[relation_field.model_id]; + let rf = &ctx.types[relation_field_id]; + let referencing_ast = &ctx.asts[rf.model_id.0].2; + let referenced_ast = &ctx.asts[rf.referenced_model.0].2; + let ast_model = &referencing_ast[relation_field.model_id.1]; let ast_field = &ast_model[relation_field.field_id]; - let opposite_model = &ast[relation_field.referenced_model]; + let opposite_model = &referenced_ast[relation_field.referenced_model.1]; let is_self_relation = relation_field.model_id == relation_field.referenced_model; let opposite_relation_field: Option<(RelationFieldId, &ast::Field, &'db RelationField)> = ctx .types @@ -238,7 +255,13 @@ pub(super) fn relation_evidence<'db>( !is_self_relation || opposite_relation_field.field_id != relation_field.field_id }) .find(|(_, opposite_relation_field)| opposite_relation_field.name == relation_field.name) - .map(|(opp_field_id, opp_rf)| (opp_field_id, &ast[opp_rf.model_id][opp_rf.field_id], opp_rf)); + .map(|(opp_field_id, opp_rf)| { + ( + opp_field_id, + &referenced_ast[opp_rf.model_id.1][opp_rf.field_id], + opp_rf, + ) + }); let is_two_way_embedded_many_to_many_relation = match (relation_field, opposite_relation_field) { (left, Some((_, _, right))) => left.fields.is_some() || right.fields.is_some(), @@ -247,7 +270,6 @@ pub(super) fn relation_evidence<'db>( RelationEvidence { ast_model, - model_id: relation_field.model_id, ast_field, field_id: relation_field_id, relation_field, @@ -359,7 +381,7 @@ pub(super) fn ingest_relation<'db>(evidence: RelationEvidence<'db>, relations: & match &evidence.relation_field.fields { Some(fields) => { let fields_are_unique = - ctx.types.model_attributes[&evidence.model_id] + ctx.types.model_attributes[&evidence.model_id()] .ast_indexes .iter() .any(|(_, idx)| { @@ -387,14 +409,14 @@ pub(super) fn ingest_relation<'db>(evidence: RelationEvidence<'db>, relations: & RelationAttributes::OneToMany(OneToManyRelationFields::Back(_)) => Relation { attributes: relation_type, relation_name: evidence.relation_field.name, - model_a: evidence.relation_field.referenced_model, - model_b: evidence.model_id, + model_a: evidence.referenced_model_id(), + model_b: evidence.model_id(), }, _ => Relation { attributes: relation_type, relation_name: evidence.relation_field.name, - model_a: evidence.model_id, - model_b: evidence.relation_field.referenced_model, + model_a: evidence.model_id(), + model_b: evidence.referenced_model_id(), }, }; @@ -408,11 +430,11 @@ pub(super) fn ingest_relation<'db>(evidence: RelationEvidence<'db>, relations: & relations .forward - .insert((evidence.model_id, evidence.relation_field.referenced_model, relation_id)); + .insert((evidence.model_id(), evidence.referenced_model_id(), relation_id)); relations .back - .insert((evidence.relation_field.referenced_model, evidence.model_id, relation_id)); + .insert((evidence.referenced_model_id(), evidence.model_id(), relation_id)); } /// An action describing the way referential integrity is managed in the system. diff --git a/psl/parser-database/src/types.rs b/psl/parser-database/src/types.rs index 1668243247bb..c7626e08649d 100644 --- a/psl/parser-database/src/types.rs +++ b/psl/parser-database/src/types.rs @@ -8,11 +8,13 @@ use schema_ast::ast::{self, WithName}; use std::{collections::BTreeMap, fmt}; pub(super) fn resolve_types(ctx: &mut Context<'_>) { - for (top_id, top) in ctx.ast.iter_tops() { + for ((file_id, top_id), top) in ctx.iter_tops() { match (top_id, top) { - (ast::TopId::Model(model_id), ast::Top::Model(model)) => visit_model(model_id, model, ctx), + (ast::TopId::Model(model_id), ast::Top::Model(model)) => visit_model((file_id, model_id), model, ctx), (ast::TopId::Enum(_), ast::Top::Enum(enm)) => visit_enum(enm, ctx), - (ast::TopId::CompositeType(ct_id), ast::Top::CompositeType(ct)) => visit_composite_type(ct_id, ct, ctx), + (ast::TopId::CompositeType(ct_id), ast::Top::CompositeType(ct)) => { + visit_composite_type((file_id, ct_id), ct, ctx) + } (_, ast::Top::Source(_)) | (_, ast::Top::Generator(_)) => (), _ => unreachable!(), } @@ -21,13 +23,13 @@ pub(super) fn resolve_types(ctx: &mut Context<'_>) { #[derive(Debug, Default)] pub(super) struct Types { - pub(super) composite_type_fields: BTreeMap<(ast::CompositeTypeId, ast::FieldId), CompositeTypeField>, + pub(super) composite_type_fields: BTreeMap<(crate::CompositeTypeId, ast::FieldId), CompositeTypeField>, scalar_fields: Vec, /// This contains only the relation fields actually present in the schema /// source text. relation_fields: Vec, - pub(super) enum_attributes: HashMap, - pub(super) model_attributes: HashMap, + pub(super) enum_attributes: HashMap, + pub(super) model_attributes: HashMap, /// Sorted array of scalar fields that have an `@default()` attribute with a function that is /// not part of the base Prisma ones. This is meant for later validation in the datamodel /// connector. @@ -37,7 +39,7 @@ pub(super) struct Types { impl Types { pub(super) fn find_model_scalar_field( &self, - model_id: ast::ModelId, + model_id: crate::ModelId, field_id: ast::FieldId, ) -> Option { self.scalar_fields @@ -48,7 +50,7 @@ impl Types { pub(super) fn range_model_scalar_fields( &self, - model_id: ast::ModelId, + model_id: crate::ModelId, ) -> impl Iterator + Clone { let start = self.scalar_fields.partition_point(|sf| sf.model_id < model_id); self.scalar_fields[start..] @@ -71,7 +73,7 @@ impl Types { pub(super) fn range_model_scalar_field_ids( &self, - model_id: ast::ModelId, + model_id: crate::ModelId, ) -> impl Iterator + Clone { let end = self.scalar_fields.partition_point(|sf| sf.model_id <= model_id); let start = self.scalar_fields[..end].partition_point(|sf| sf.model_id < model_id); @@ -80,7 +82,7 @@ impl Types { pub(super) fn range_model_relation_fields( &self, - model_id: ast::ModelId, + model_id: crate::ModelId, ) -> impl Iterator + Clone { let first_relation_field_idx = self.relation_fields.partition_point(|rf| rf.model_id < model_id); self.relation_fields[first_relation_field_idx..] @@ -90,7 +92,7 @@ impl Types { .map(move |(idx, rf)| (RelationFieldId((first_relation_field_idx + idx) as u32), rf)) } - pub(super) fn refine_field(&self, id: (ast::ModelId, ast::FieldId)) -> Either { + pub(super) fn refine_field(&self, id: (crate::ModelId, ast::FieldId)) -> Either { self.relation_fields .binary_search_by_key(&id, |rf| (rf.model_id, rf.field_id)) .map(|idx| Either::Left(RelationFieldId(idx as u32))) @@ -158,7 +160,7 @@ pub(super) struct CompositeTypeField { #[derive(Debug)] enum FieldType { - Model(ast::ModelId), + Model(crate::ModelId), Scalar(ScalarFieldType), } @@ -177,9 +179,9 @@ impl UnsupportedType { #[derive(Debug, Clone, Copy, PartialEq)] pub enum ScalarFieldType { /// A composite type - CompositeType(ast::CompositeTypeId), + CompositeType(crate::CompositeTypeId), /// An enum - Enum(ast::EnumId), + Enum(crate::EnumId), /// A Prisma scalar type BuiltInScalar(ScalarType), /// An `Unsupported("...")` type @@ -196,7 +198,7 @@ impl ScalarFieldType { } /// Try to interpret this field type as a Composite Type. - pub fn as_composite_type(self) -> Option { + pub fn as_composite_type(self) -> Option { match self { ScalarFieldType::CompositeType(id) => Some(id), _ => None, @@ -204,7 +206,7 @@ impl ScalarFieldType { } /// Try to interpret this field type as an enum. - pub fn as_enum(self) -> Option { + pub fn as_enum(self) -> Option { match self { ScalarFieldType::Enum(id) => Some(id), _ => None, @@ -261,12 +263,12 @@ impl ScalarFieldType { pub(crate) struct DefaultAttribute { pub(crate) mapped_name: Option, pub(crate) argument_idx: usize, - pub(crate) default_attribute: ast::AttributeId, + pub(crate) default_attribute: crate::AttributeId, } #[derive(Debug)] pub(crate) struct ScalarField { - pub(crate) model_id: ast::ModelId, + pub(crate) model_id: crate::ModelId, pub(crate) field_id: ast::FieldId, pub(crate) r#type: ScalarFieldType, pub(crate) is_ignored: bool, @@ -284,9 +286,9 @@ pub(crate) struct ScalarField { #[derive(Debug)] pub(crate) struct RelationField { - pub(crate) model_id: ast::ModelId, + pub(crate) model_id: crate::ModelId, pub(crate) field_id: ast::FieldId, - pub(crate) referenced_model: ast::ModelId, + pub(crate) referenced_model: crate::ModelId, pub(crate) on_delete: Option<(crate::ReferentialAction, ast::Span)>, pub(crate) on_update: Option<(crate::ReferentialAction, ast::Span)>, /// The fields _explicitly present_ in the AST. @@ -302,7 +304,7 @@ pub(crate) struct RelationField { } impl RelationField { - fn new(model_id: ast::ModelId, field_id: ast::FieldId, referenced_model: ast::ModelId) -> Self { + fn new(model_id: crate::ModelId, field_id: ast::FieldId, referenced_model: crate::ModelId) -> Self { RelationField { model_id, field_id, @@ -491,7 +493,7 @@ impl IndexAttribute { pub(crate) struct IdAttribute { pub(crate) fields: Vec, pub(super) source_field: Option, - pub(super) source_attribute: ast::AttributeId, + pub(super) source_attribute: crate::AttributeId, pub(super) name: Option, pub(super) mapped_name: Option, pub(super) clustered: Option, @@ -545,7 +547,7 @@ pub struct IndexFieldPath { /// // ^this one is the path. in this case a vector of one element /// } /// ``` - path: Vec<(ast::CompositeTypeId, ast::FieldId)>, + path: Vec<(crate::CompositeTypeId, ast::FieldId)>, } impl IndexFieldPath { @@ -553,7 +555,7 @@ impl IndexFieldPath { Self { root, path: Vec::new() } } - pub(crate) fn push_field(&mut self, ctid: ast::CompositeTypeId, field_id: ast::FieldId) { + pub(crate) fn push_field(&mut self, ctid: crate::CompositeTypeId, field_id: ast::FieldId) { self.path.push((ctid, field_id)); } @@ -593,7 +595,7 @@ impl IndexFieldPath { /// @@index([a.field]) /// } /// ``` - pub fn path(&self) -> &[(ast::CompositeTypeId, ast::FieldId)] { + pub fn path(&self) -> &[(crate::CompositeTypeId, ast::FieldId)] { &self.path } @@ -601,10 +603,10 @@ impl IndexFieldPath { /// or in a composite type embedded in the model. Returns the same value as /// the [`root`](Self::root()) method if the field is in a model rather than in a /// composite type. - pub fn field_in_index(&self) -> Either { + pub fn field_in_index(&self) -> Either { self.path .last() - .map(|id| Either::Right(*id)) + .map(|(ct, field)| Either::Right((*ct, *field))) .unwrap_or(Either::Left(self.root)) } } @@ -629,7 +631,7 @@ pub(super) struct EnumAttributes { pub(crate) schema: Option<(StringId, ast::Span)>, } -fn visit_model<'db>(model_id: ast::ModelId, ast_model: &'db ast::Model, ctx: &mut Context<'db>) { +fn visit_model<'db>(model_id: crate::ModelId, ast_model: &'db ast::Model, ctx: &mut Context<'db>) { for (field_id, ast_field) in ast_model.iter_fields() { match field_type(ast_field, ctx) { Ok(FieldType::Model(referenced_model)) => { @@ -648,15 +650,45 @@ fn visit_model<'db>(model_id: ast::ModelId, ast_model: &'db ast::Model, ctx: &mu native_type: None, }); } - Err(supported) => ctx.push_error(DatamodelError::new_type_not_found_error( - supported, - ast_field.field_type.span(), - )), + Err(supported) => { + let top_names: Vec<_> = ctx + .iter_tops() + .filter_map(|(_, top)| match top { + ast::Top::Source(_) | ast::Top::Generator(_) => None, + _ => Some(&top.identifier().name), + }) + .collect(); + + match top_names.iter().find(|&name| name.to_lowercase() == supported) { + Some(ignore_case_match) => { + ctx.push_error(DatamodelError::new_type_for_case_not_found_error( + supported, + ignore_case_match.as_str(), + ast_field.field_type.span(), + )); + } + None => match ScalarType::try_from_str(supported, true) { + Some(ignore_case_match) => { + ctx.push_error(DatamodelError::new_type_for_case_not_found_error( + supported, + ignore_case_match.as_str(), + ast_field.field_type.span(), + )); + } + None => { + ctx.push_error(DatamodelError::new_type_not_found_error( + supported, + ast_field.field_type.span(), + )); + } + }, + } + } } } } -fn visit_composite_type<'db>(ct_id: ast::CompositeTypeId, ct: &'db ast::CompositeType, ctx: &mut Context<'db>) { +fn visit_composite_type<'db>(ct_id: crate::CompositeTypeId, ct: &'db ast::CompositeType, ctx: &mut Context<'db>) { for (field_id, ast_field) in ct.iter_fields() { match field_type(ast_field, ctx) { Ok(FieldType::Scalar(scalar_type)) => { @@ -669,7 +701,7 @@ fn visit_composite_type<'db>(ct_id: ast::CompositeTypeId, ct: &'db ast::Composit ctx.types.composite_type_fields.insert((ct_id, field_id), field); } Ok(FieldType::Model(referenced_model_id)) => { - let referenced_model_name = ctx.ast[referenced_model_id].name(); + let referenced_model_name = ctx.asts[referenced_model_id].name(); ctx.push_error(DatamodelError::new_composite_type_validation_error(&format!("{referenced_model_name} refers to a model, making this a relation field. Relation fields inside composite types are not supported."), ct.name(), ast_field.field_type.span())) } Err(supported) => ctx.push_error(DatamodelError::new_type_not_found_error( @@ -699,17 +731,24 @@ fn field_type<'db>(field: &'db ast::Field, ctx: &mut Context<'db>) -> Result Ok(FieldType::Model(model_id)), - Some((ast::TopId::Enum(enum_id), ast::Top::Enum(_))) => Ok(FieldType::Scalar(ScalarFieldType::Enum(enum_id))), - Some((ast::TopId::CompositeType(ctid), ast::Top::CompositeType(_))) => { - Ok(FieldType::Scalar(ScalarFieldType::CompositeType(ctid))) + match ctx + .names + .tops + .get(&supported_string_id) + .map(|id| (id.0, id.1, &ctx.asts[*id])) + { + Some((file_id, ast::TopId::Model(model_id), ast::Top::Model(_))) => Ok(FieldType::Model((file_id, model_id))), + Some((file_id, ast::TopId::Enum(enum_id), ast::Top::Enum(_))) => { + Ok(FieldType::Scalar(ScalarFieldType::Enum((file_id, enum_id)))) } - Some((_, ast::Top::Generator(_))) | Some((_, ast::Top::Source(_))) => unreachable!(), + Some((file_id, ast::TopId::CompositeType(ctid), ast::Top::CompositeType(_))) => { + Ok(FieldType::Scalar(ScalarFieldType::CompositeType((file_id, ctid)))) + } + Some((_, _, ast::Top::Generator(_))) | Some((_, _, ast::Top::Source(_))) => unreachable!(), None => Err(supported), _ => unreachable!(), } @@ -1423,18 +1462,32 @@ impl ScalarType { matches!(self, ScalarType::Bytes) } - pub(crate) fn try_from_str(s: &str) -> Option { - match s { - "Int" => Some(ScalarType::Int), - "BigInt" => Some(ScalarType::BigInt), - "Float" => Some(ScalarType::Float), - "Boolean" => Some(ScalarType::Boolean), - "String" => Some(ScalarType::String), - "DateTime" => Some(ScalarType::DateTime), - "Json" => Some(ScalarType::Json), - "Bytes" => Some(ScalarType::Bytes), - "Decimal" => Some(ScalarType::Decimal), - _ => None, + pub(crate) fn try_from_str(s: &str, ignore_case: bool) -> Option { + match ignore_case { + true => match s.to_lowercase().as_str() { + "int" => Some(ScalarType::Int), + "bigint" => Some(ScalarType::BigInt), + "float" => Some(ScalarType::Float), + "boolean" => Some(ScalarType::Boolean), + "string" => Some(ScalarType::String), + "datetime" => Some(ScalarType::DateTime), + "json" => Some(ScalarType::Json), + "bytes" => Some(ScalarType::Bytes), + "decimal" => Some(ScalarType::Decimal), + _ => None, + }, + _ => match s { + "Int" => Some(ScalarType::Int), + "BigInt" => Some(ScalarType::BigInt), + "Float" => Some(ScalarType::Float), + "Boolean" => Some(ScalarType::Boolean), + "String" => Some(ScalarType::String), + "DateTime" => Some(ScalarType::DateTime), + "Json" => Some(ScalarType::Json), + "Bytes" => Some(ScalarType::Bytes), + "Decimal" => Some(ScalarType::Decimal), + _ => None, + }, } } } diff --git a/psl/parser-database/src/walkers.rs b/psl/parser-database/src/walkers.rs index 7ee92e3e3f70..abfe290b5bd6 100644 --- a/psl/parser-database/src/walkers.rs +++ b/psl/parser-database/src/walkers.rs @@ -25,6 +25,8 @@ pub use relation::*; pub use relation_field::*; pub use scalar_field::*; +use crate::{ast, FileId}; + /// A generic walker. Only walkers intantiated with a concrete ID type (`I`) are useful. #[derive(Clone, Copy)] pub struct Walker<'db, I> { @@ -52,12 +54,18 @@ where } impl crate::ParserDatabase { + fn iter_tops(&self) -> impl Iterator + '_ { + self.asts + .iter() + .flat_map(move |(file_id, _, _, ast)| ast.iter_tops().map(move |(top_id, top)| (file_id, top_id, top))) + } + /// Find an enum by name. pub fn find_enum<'db>(&'db self, name: &str) -> Option> { self.interner .lookup(name) .and_then(|name_id| self.names.tops.get(&name_id)) - .and_then(|top_id| top_id.as_enum_id()) + .and_then(|(file_id, top_id)| top_id.as_enum_id().map(|id| (*file_id, id))) .map(|enum_id| self.walk(enum_id)) } @@ -66,7 +74,7 @@ impl crate::ParserDatabase { self.interner .lookup(name) .and_then(|name_id| self.names.tops.get(&name_id)) - .and_then(|top_id| top_id.as_model_id()) + .and_then(|(file_id, top_id)| top_id.as_model_id().map(|id| (*file_id, id))) .map(|model_id| self.walk(model_id)) } @@ -77,35 +85,31 @@ impl crate::ParserDatabase { /// Walk all enums in the schema. pub fn walk_enums(&self) -> impl Iterator> { - self.ast() - .iter_tops() - .filter_map(|(top_id, _)| top_id.as_enum_id()) - .map(move |enum_id| Walker { db: self, id: enum_id }) + self.iter_tops() + .filter_map(|(file_id, top_id, _)| top_id.as_enum_id().map(|id| (file_id, id))) + .map(move |enum_id| self.walk(enum_id)) } /// Walk all the models in the schema. pub fn walk_models(&self) -> impl Iterator> + '_ { - self.ast() - .iter_tops() - .filter_map(|(top_id, _)| top_id.as_model_id()) - .map(move |model_id| self.walk(model_id)) + self.iter_tops() + .filter_map(|(file_id, top_id, _)| top_id.as_model_id().map(|id| (file_id, id))) + .map(move |(file_id, model_id)| self.walk((file_id, model_id))) .filter(|m| !m.ast_model().is_view()) } /// Walk all the views in the schema. pub fn walk_views(&self) -> impl Iterator> + '_ { - self.ast() - .iter_tops() - .filter_map(|(top_id, _)| top_id.as_model_id()) + self.iter_tops() + .filter_map(|(file_id, top_id, _)| top_id.as_model_id().map(|id| (file_id, id))) .map(move |model_id| self.walk(model_id)) .filter(|m| m.ast_model().is_view()) } /// Walk all the composite types in the schema. pub fn walk_composite_types(&self) -> impl Iterator> + '_ { - self.ast() - .iter_tops() - .filter_map(|(top_id, _)| top_id.as_composite_type_id()) + self.iter_tops() + .filter_map(|(file_id, top_id, _)| top_id.as_composite_type_id().map(|id| (file_id, id))) .map(|id| self.walk(id)) } diff --git a/psl/parser-database/src/walkers/composite_type.rs b/psl/parser-database/src/walkers/composite_type.rs index f22648e286e1..af286e9d0f2d 100644 --- a/psl/parser-database/src/walkers/composite_type.rs +++ b/psl/parser-database/src/walkers/composite_type.rs @@ -1,5 +1,5 @@ use super::Walker; -use crate::{ast, ScalarFieldType, ScalarType}; +use crate::{ast, FileId, ScalarFieldType, ScalarType}; use diagnostics::Span; use schema_ast::ast::{WithDocumentation, WithName}; @@ -17,20 +17,20 @@ use schema_ast::ast::{WithDocumentation, WithName}; /// countryCode String /// } /// ``` -pub type CompositeTypeWalker<'db> = Walker<'db, ast::CompositeTypeId>; +pub type CompositeTypeWalker<'db> = Walker<'db, crate::CompositeTypeId>; /// A field in a composite type. -pub type CompositeTypeFieldWalker<'db> = Walker<'db, (ast::CompositeTypeId, ast::FieldId)>; +pub type CompositeTypeFieldWalker<'db> = Walker<'db, (crate::CompositeTypeId, ast::FieldId)>; impl<'db> CompositeTypeWalker<'db> { /// The ID of the composite type node in the AST. - pub fn composite_type_id(self) -> ast::CompositeTypeId { + pub fn composite_type_id(self) -> (FileId, ast::CompositeTypeId) { self.id } /// The composite type node in the AST. pub fn ast_composite_type(self) -> &'db ast::CompositeType { - &self.db.ast()[self.id] + &self.db.asts[self.id] } /// The name of the composite type in the schema. @@ -53,7 +53,7 @@ impl<'db> CompositeTypeFieldWalker<'db> { /// The AST node for the field. pub fn ast_field(self) -> &'db ast::Field { - &self.db.ast[self.id.0][self.id.1] + &self.db.asts[self.id.0][self.id.1] } /// The composite type containing the field. @@ -101,7 +101,10 @@ impl<'db> CompositeTypeFieldWalker<'db> { /// The `@default()` AST attribute on the field, if any. pub fn default_attribute(self) -> Option<&'db ast::Attribute> { - self.field().default.as_ref().map(|d| &self.db.ast[d.default_attribute]) + self.field() + .default + .as_ref() + .map(|d| &self.db.asts[(self.id.0 .0, d.default_attribute.1)]) } /// (attribute scope, native type name, arguments, span) diff --git a/psl/parser-database/src/walkers/enum.rs b/psl/parser-database/src/walkers/enum.rs index c97b420a59fa..07624527bb11 100644 --- a/psl/parser-database/src/walkers/enum.rs +++ b/psl/parser-database/src/walkers/enum.rs @@ -1,11 +1,10 @@ -use schema_ast::ast::{IndentationType, NewlineType}; - use crate::{ast, ast::WithDocumentation, types, walkers::Walker}; +use schema_ast::ast::{IndentationType, NewlineType}; /// An `enum` declaration in the schema. -pub type EnumWalker<'db> = Walker<'db, ast::EnumId>; +pub type EnumWalker<'db> = Walker<'db, crate::EnumId>; /// One value in an `enum` declaration in the schema. -pub type EnumValueWalker<'db> = Walker<'db, (ast::EnumId, usize)>; +pub type EnumValueWalker<'db> = Walker<'db, (crate::EnumId, usize)>; impl<'db> EnumWalker<'db> { fn attributes(self) -> &'db types::EnumAttributes { @@ -19,7 +18,7 @@ impl<'db> EnumWalker<'db> { /// The AST node. pub fn ast_enum(self) -> &'db ast::Enum { - &self.db.ast()[self.id] + &self.db.asts[self.id] } /// The database name of the enum. diff --git a/psl/parser-database/src/walkers/field.rs b/psl/parser-database/src/walkers/field.rs index d8babd993391..87bea6560344 100644 --- a/psl/parser-database/src/walkers/field.rs +++ b/psl/parser-database/src/walkers/field.rs @@ -6,12 +6,12 @@ use crate::{ use schema_ast::ast; /// A model field, scalar or relation. -pub type FieldWalker<'db> = Walker<'db, (ast::ModelId, ast::FieldId)>; +pub type FieldWalker<'db> = Walker<'db, (crate::ModelId, ast::FieldId)>; impl<'db> FieldWalker<'db> { /// The AST node for the field. pub fn ast_field(self) -> &'db ast::Field { - &self.db.ast[self.id.0][self.id.1] + &self.db.asts[self.id.0][self.id.1] } /// The field name. @@ -45,20 +45,14 @@ pub enum RefinedFieldWalker<'db> { impl<'db> From> for FieldWalker<'db> { fn from(w: ScalarFieldWalker<'db>) -> Self { let ScalarField { model_id, field_id, .. } = w.db.types[w.id]; - Walker { - db: w.db, - id: (model_id, field_id), - } + w.db.walk((model_id, field_id)) } } impl<'db> From> for FieldWalker<'db> { fn from(w: RelationFieldWalker<'db>) -> Self { let RelationField { model_id, field_id, .. } = w.db.types[w.id]; - Walker { - db: w.db, - id: (model_id, field_id), - } + w.db.walk((model_id, field_id)) } } diff --git a/psl/parser-database/src/walkers/index.rs b/psl/parser-database/src/walkers/index.rs index e75c4c58fc87..63b6b30b7b44 100644 --- a/psl/parser-database/src/walkers/index.rs +++ b/psl/parser-database/src/walkers/index.rs @@ -11,7 +11,7 @@ use crate::{ /// An index, unique or fulltext attribute. #[derive(Copy, Clone)] pub struct IndexWalker<'db> { - pub(crate) model_id: ast::ModelId, + pub(crate) model_id: crate::ModelId, pub(crate) index: ast::AttributeId, pub(crate) db: &'db ParserDatabase, pub(crate) index_attribute: &'db IndexAttribute, @@ -69,7 +69,7 @@ impl<'db> IndexWalker<'db> { /// The AST node of the index/unique attribute. pub fn ast_attribute(self) -> &'db ast::Attribute { - &self.db.ast[self.index] + &self.db.asts[(self.model_id.0, self.index)] } pub(crate) fn attribute(self) -> &'db IndexAttribute { diff --git a/psl/parser-database/src/walkers/model.rs b/psl/parser-database/src/walkers/model.rs index 313efd0ca819..e4290a1a00f7 100644 --- a/psl/parser-database/src/walkers/model.rs +++ b/psl/parser-database/src/walkers/model.rs @@ -12,11 +12,12 @@ use super::{ use crate::{ ast::{self, WithName}, types::ModelAttributes, + FileId, }; use schema_ast::ast::{IndentationType, NewlineType, WithSpan}; /// A `model` declaration in the Prisma schema. -pub type ModelWalker<'db> = super::Walker<'db, ast::ModelId>; +pub type ModelWalker<'db> = super::Walker<'db, (FileId, ast::ModelId)>; impl<'db> ModelWalker<'db> { /// The name of the model. @@ -59,14 +60,9 @@ impl<'db> ModelWalker<'db> { .is_some() } - /// The ID of the model in the db - pub fn model_id(self) -> ast::ModelId { - self.id - } - /// The AST node. pub fn ast_model(self) -> &'db ast::Model { - &self.db.ast[self.id] + &self.db.asts[self.id] } /// The parsed attributes. @@ -86,7 +82,7 @@ impl<'db> ModelWalker<'db> { self.attributes() .mapped_name .map(|id| &self.db[id]) - .unwrap_or_else(|| self.db.ast[self.id].name()) + .unwrap_or_else(|| self.ast_model().name()) } /// Used in validation. True only if the model has a single field id. @@ -216,7 +212,7 @@ impl<'db> ModelWalker<'db> { None => return IndentationType::default(), }; - let src = self.db.source(); + let src = self.db.source(self.id.0); let start = field.ast_field().span().start; let mut spaces = 0; @@ -241,7 +237,7 @@ impl<'db> ModelWalker<'db> { None => return NewlineType::default(), }; - let src = self.db.source(); + let src = self.db.source(self.id.0); let start = field.ast_field().span().end - 2; match src.chars().nth(start) { diff --git a/psl/parser-database/src/walkers/model/primary_key.rs b/psl/parser-database/src/walkers/model/primary_key.rs index ba3de30ea633..71792dce770b 100644 --- a/psl/parser-database/src/walkers/model/primary_key.rs +++ b/psl/parser-database/src/walkers/model/primary_key.rs @@ -8,7 +8,7 @@ use crate::{ /// An `@(@)id` attribute in the schema. #[derive(Copy, Clone)] pub struct PrimaryKeyWalker<'db> { - pub(crate) model_id: ast::ModelId, + pub(crate) model_id: crate::ModelId, pub(crate) attribute: &'db IdAttribute, pub(crate) db: &'db ParserDatabase, } @@ -16,7 +16,7 @@ pub struct PrimaryKeyWalker<'db> { impl<'db> PrimaryKeyWalker<'db> { /// The `@(@)id` AST node. pub fn ast_attribute(self) -> &'db ast::Attribute { - &self.db.ast[self.attribute.source_attribute] + &self.db.asts[(self.model_id.0, self.attribute.source_attribute.1)] } /// The mapped name of the id. diff --git a/psl/parser-database/src/walkers/relation.rs b/psl/parser-database/src/walkers/relation.rs index 1557633fbc0b..26e3ec61e052 100644 --- a/psl/parser-database/src/walkers/relation.rs +++ b/psl/parser-database/src/walkers/relation.rs @@ -14,7 +14,7 @@ pub type RelationWalker<'db> = Walker<'db, RelationId>; impl<'db> RelationWalker<'db> { /// The models at each end of the relation. [model A, model B]. Can be the same model twice. - pub fn models(self) -> [ast::ModelId; 2] { + pub fn models(self) -> [(FileId, ast::ModelId); 2] { let rel = self.get(); [rel.model_a, rel.model_b] } diff --git a/psl/parser-database/src/walkers/relation_field.rs b/psl/parser-database/src/walkers/relation_field.rs index b96380f03bf6..7f6b2e8037a4 100644 --- a/psl/parser-database/src/walkers/relation_field.rs +++ b/psl/parser-database/src/walkers/relation_field.rs @@ -28,7 +28,7 @@ impl<'db> RelationFieldWalker<'db> { /// The AST node of the field. pub fn ast_field(self) -> &'db ast::Field { let RelationField { model_id, field_id, .. } = self.db.types[self.id]; - &self.db.ast[model_id][field_id] + &self.db.asts[model_id][field_id] } pub(crate) fn attributes(self) -> &'db RelationField { @@ -83,11 +83,12 @@ impl<'db> RelationFieldWalker<'db> { /// The `@relation` attribute in the field AST. pub fn relation_attribute(self) -> Option<&'db ast::Attribute> { - self.attributes().relation_attribute.map(|id| &self.db.ast[id]) + let attrs = self.attributes(); + attrs.relation_attribute.map(|id| &self.db.asts[(attrs.model_id.0, id)]) } /// Does the relation field reference the passed in model? - pub fn references_model(self, other: ast::ModelId) -> bool { + pub fn references_model(self, other: crate::ModelId) -> bool { self.attributes().referenced_model == other } diff --git a/psl/parser-database/src/walkers/scalar_field.rs b/psl/parser-database/src/walkers/scalar_field.rs index 9cea79b8485a..7a9a0984584a 100644 --- a/psl/parser-database/src/walkers/scalar_field.rs +++ b/psl/parser-database/src/walkers/scalar_field.rs @@ -19,7 +19,7 @@ impl<'db> ScalarFieldWalker<'db> { /// The field node in the AST. pub fn ast_field(self) -> &'db ast::Field { let ScalarField { model_id, field_id, .. } = self.attributes(); - &self.db.ast[*model_id][*field_id] + &self.db.asts[*model_id][*field_id] } /// Is this field unique? This method will return true if: @@ -53,7 +53,7 @@ impl<'db> ScalarFieldWalker<'db> { .default .as_ref() .map(|d| d.default_attribute) - .map(|id| &self.db.ast[id]) + .map(|id| &self.db.asts[id]) } /// The final database name of the field. See crate docs for explanations on database names. @@ -169,7 +169,7 @@ pub struct DefaultValueWalker<'db> { impl<'db> DefaultValueWalker<'db> { /// The AST node of the attribute. pub fn ast_attribute(self) -> &'db ast::Attribute { - &self.db.ast[self.default.default_attribute] + &self.db.asts[self.default.default_attribute] } /// The value expression in the `@default` attribute. @@ -374,7 +374,7 @@ impl<'db> ScalarFieldAttributeWalker<'db> { let mut result = vec![(root_name, None)]; for (ctid, field_id) in path.path() { - let ct = &self.db.ast[*ctid]; + let ct = &self.db.asts[*ctid]; let field = ct[*field_id].name(); result.push((field, Some(ct.name()))); @@ -400,7 +400,7 @@ impl<'db> ScalarFieldAttributeWalker<'db> { let mut result = vec![(root, None)]; for (ctid, field_id) in path.path() { - let ct = &self.db.ast[*ctid]; + let ct = &self.db.asts[*ctid]; let field = &self.db.types.composite_type_fields[&(*ctid, *field_id)] .mapped_name diff --git a/psl/psl-core/src/builtin_connectors/cockroach_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/cockroach_datamodel_connector.rs index 03b312ba3574..c5c9334fe981 100644 --- a/psl/psl-core/src/builtin_connectors/cockroach_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/cockroach_datamodel_connector.rs @@ -62,7 +62,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector RowIn | DeleteReturning | SupportsFiltersOnRelationsWithoutJoins | - LateralJoin + LateralJoin | + SupportsDefaultInInsert }); const SCALAR_TYPE_DEFAULTS: &[(ScalarType, CockroachType)] = &[ diff --git a/psl/psl-core/src/builtin_connectors/mongodb.rs b/psl/psl-core/src/builtin_connectors/mongodb.rs index 814f3f60fd48..1034521fac1d 100644 --- a/psl/psl-core/src/builtin_connectors/mongodb.rs +++ b/psl/psl-core/src/builtin_connectors/mongodb.rs @@ -31,7 +31,11 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector DefaultValueAuto | TwoWayEmbeddedManyToManyRelation | UndefinedType | - DeleteReturning + DeleteReturning | + // MongoDB does not have a notion of default values for fields. + // This capability is enabled as a performance optimisation to avoid issuing multiple queries + // when using `createMany()` with MongoDB. + SupportsDefaultInInsert }); pub(crate) struct MongoDbDatamodelConnector; diff --git a/psl/psl-core/src/builtin_connectors/mssql_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/mssql_datamodel_connector.rs index 2146e2b95a1d..9fe851aa94e5 100644 --- a/psl/psl-core/src/builtin_connectors/mssql_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/mssql_datamodel_connector.rs @@ -51,7 +51,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector SupportsTxIsolationRepeatableRead | SupportsTxIsolationSerializable | SupportsTxIsolationSnapshot | - SupportsFiltersOnRelationsWithoutJoins + SupportsFiltersOnRelationsWithoutJoins | + SupportsDefaultInInsert // InsertReturning | DeleteReturning - unimplemented. }); diff --git a/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs index 4240525bc5e3..1d91e590981a 100644 --- a/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs @@ -68,7 +68,8 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne SupportsTxIsolationSerializable | RowIn | SupportsFiltersOnRelationsWithoutJoins | - CorrelatedSubqueries + CorrelatedSubqueries | + SupportsDefaultInInsert }); const CONSTRAINT_SCOPES: &[ConstraintScope] = &[ConstraintScope::GlobalForeignKey, ConstraintScope::ModelKeyIndex]; diff --git a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs index 35bcc30d0244..3bb04eed4514 100644 --- a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs @@ -71,7 +71,8 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne DistinctOn | DeleteReturning | SupportsFiltersOnRelationsWithoutJoins | - LateralJoin + LateralJoin | + SupportsDefaultInInsert }); pub struct PostgresDatamodelConnector; @@ -496,7 +497,7 @@ impl Connector for PostgresDatamodelConnector { let index_field = db .walk_models() .chain(db.walk_views()) - .find(|model| model.model_id() == model_id) + .find(|model| model.id.1 == model_id) .and_then(|model| { model.indexes().find(|index| { index.attribute_id() diff --git a/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs index 4d5febb74b51..b58dd9e2bbd4 100644 --- a/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs @@ -28,7 +28,9 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne InsertReturning | DeleteReturning | UpdateReturning | - SupportsFiltersOnRelationsWithoutJoins + SupportsFiltersOnRelationsWithoutJoins | + CreateMany | + CreateManyWriteableAutoIncId }); pub struct SqliteDatamodelConnector; diff --git a/psl/psl-core/src/common/preview_features.rs b/psl/psl-core/src/common/preview_features.rs index 5b6349aa2b94..5c55acd32851 100644 --- a/psl/psl-core/src/common/preview_features.rs +++ b/psl/psl-core/src/common/preview_features.rs @@ -77,7 +77,8 @@ features!( TransactionApi, UncheckedScalarInputs, Views, - RelationJoins + RelationJoins, + PrismaSchemaFolder ); /// Generator preview features (alphabetically sorted) @@ -128,7 +129,7 @@ pub const ALL_PREVIEW_FEATURES: FeatureMap = FeatureMap { | TransactionApi | UncheckedScalarInputs }), - hidden: enumflags2::BitFlags::EMPTY, + hidden: enumflags2::make_bitflags!(PreviewFeature::{PrismaSchemaFolder}), }; #[derive(Debug)] diff --git a/psl/psl-core/src/configuration/configuration_struct.rs b/psl/psl-core/src/configuration/configuration_struct.rs index 3da58f6efdea..41d3d6ebf413 100644 --- a/psl/psl-core/src/configuration/configuration_struct.rs +++ b/psl/psl-core/src/configuration/configuration_struct.rs @@ -6,7 +6,7 @@ use crate::{ }; use enumflags2::BitFlags; -#[derive(Debug)] +#[derive(Debug, Default)] pub struct Configuration { pub generators: Vec, pub datasources: Vec, @@ -18,7 +18,7 @@ impl Configuration { if self.datasources.is_empty() { Err(DatamodelError::new_validation_error( "You defined no datasource. You must define exactly one datasource.", - schema_ast::ast::Span::new(0, 0), + schema_ast::ast::Span::new(0, 0, diagnostics::FileId::ZERO), ) .into()) } else { diff --git a/psl/psl-core/src/datamodel_connector/capabilities.rs b/psl/psl-core/src/datamodel_connector/capabilities.rs index b520e53841a2..cf3f36eeea13 100644 --- a/psl/psl-core/src/datamodel_connector/capabilities.rs +++ b/psl/psl-core/src/datamodel_connector/capabilities.rs @@ -74,6 +74,7 @@ capabilities!( InsensitiveFilters, CreateMany, CreateManyWriteableAutoIncId, + SupportsDefaultInInsert, // This capability is set if connector supports using `DEFAULT` instead of a value in the list of `INSERT` arguments. WritableAutoincField, CreateSkipDuplicates, UpdateableId, diff --git a/psl/psl-core/src/lib.rs b/psl/psl-core/src/lib.rs index ca0ce37cc0f1..03e1dca4356f 100644 --- a/psl/psl-core/src/lib.rs +++ b/psl/psl-core/src/lib.rs @@ -20,7 +20,7 @@ pub use crate::{ configuration::{ Configuration, Datasource, DatasourceConnectorData, Generator, GeneratorConfigValue, StringFromEnvVar, }, - reformat::reformat, + reformat::{reformat, reformat_multiple, reformat_validated_schema_into_single}, }; pub use diagnostics; pub use parser_database::{self, is_reserved_type_name}; @@ -52,14 +52,61 @@ impl ValidatedSchema { pub fn relation_mode(&self) -> datamodel_connector::RelationMode { self.relation_mode } + + pub fn render_own_diagnostics(&self) -> String { + self.render_diagnostics(&self.diagnostics) + } + + pub fn render_diagnostics(&self, diagnostics: &Diagnostics) -> String { + let mut out = Vec::new(); + + for error in diagnostics.errors() { + let (file_name, source, _) = &self.db[error.span().file_id]; + error.pretty_print(&mut out, file_name, source.as_str()).unwrap(); + } + + String::from_utf8(out).unwrap() + } } /// The most general API for dealing with Prisma schemas. It accumulates what analysis and /// validation information it can, and returns it along with any error and warning diagnostics. pub fn validate(file: SourceFile, connectors: ConnectorRegistry<'_>) -> ValidatedSchema { let mut diagnostics = Diagnostics::new(); - let db = ParserDatabase::new(file, &mut diagnostics); - let configuration = validate_configuration(db.ast(), &mut diagnostics, connectors); + let db = ParserDatabase::new_single_file(file, &mut diagnostics); + let configuration = validate_configuration(db.ast_assert_single(), &mut diagnostics, connectors); + let datasources = &configuration.datasources; + let out = validate::validate(db, datasources, configuration.preview_features(), diagnostics); + + ValidatedSchema { + diagnostics: out.diagnostics, + configuration, + connector: out.connector, + db: out.db, + relation_mode: out.relation_mode, + } +} + +/// The most general API for dealing with Prisma schemas. It accumulates what analysis and +/// validation information it can, and returns it along with any error and warning diagnostics. +pub fn validate_multi_file(files: Vec<(String, SourceFile)>, connectors: ConnectorRegistry<'_>) -> ValidatedSchema { + assert!( + !files.is_empty(), + "psl::validate_multi_file() must be called with at least one file" + ); + let mut diagnostics = Diagnostics::new(); + let db = ParserDatabase::new(files, &mut diagnostics); + + // TODO: the bulk of configuration block analysis should be part of ParserDatabase::new(). + let mut configuration = Configuration::default(); + for ast in db.iter_asts() { + let new_config = validate_configuration(ast, &mut diagnostics, connectors); + + configuration.datasources.extend(new_config.datasources.into_iter()); + configuration.generators.extend(new_config.generators.into_iter()); + configuration.warnings.extend(new_config.warnings.into_iter()); + } + let datasources = &configuration.datasources; let out = validate::validate(db, datasources, configuration.preview_features(), diagnostics); @@ -77,8 +124,8 @@ pub fn validate(file: SourceFile, connectors: ConnectorRegistry<'_>) -> Validate /// computationally or in terms of bundle size (e.g., for `query-engine-wasm`). pub fn parse_without_validation(file: SourceFile, connectors: ConnectorRegistry<'_>) -> ValidatedSchema { let mut diagnostics = Diagnostics::new(); - let db = ParserDatabase::new(file, &mut diagnostics); - let configuration = validate_configuration(db.ast(), &mut diagnostics, connectors); + let db = ParserDatabase::new_single_file(file, &mut diagnostics); + let configuration = validate_configuration(db.ast_assert_single(), &mut diagnostics, connectors); let datasources = &configuration.datasources; let out = validate::parse_without_validation(db, datasources); @@ -97,7 +144,7 @@ pub fn parse_configuration( connectors: ConnectorRegistry<'_>, ) -> Result { let mut diagnostics = Diagnostics::default(); - let ast = schema_ast::parse_schema(schema, &mut diagnostics); + let ast = schema_ast::parse_schema(schema, &mut diagnostics, diagnostics::FileId::ZERO); let out = validate_configuration(&ast, &mut diagnostics, connectors); diagnostics.to_result().map(|_| out) } diff --git a/psl/psl-core/src/reformat.rs b/psl/psl-core/src/reformat.rs index eaf8aa5400b4..a18b32e301b2 100644 --- a/psl/psl-core/src/reformat.rs +++ b/psl/psl-core/src/reformat.rs @@ -1,46 +1,95 @@ -use crate::ParserDatabase; +use crate::{ParserDatabase, ValidatedSchema}; +use diagnostics::FileId; use parser_database::{ast::WithSpan, walkers}; use schema_ast::{ast, SourceFile}; -use std::{borrow::Cow, sync::Arc}; +use std::{borrow::Cow, collections::HashMap}; /// Returns either the reformatted schema, or the original input if we can't reformat. This happens /// if and only if the source does not parse to a well formed AST. pub fn reformat(source: &str, indent_width: usize) -> Option { - let file = SourceFile::new_allocated(Arc::from(source.to_owned().into_boxed_str())); + let reformatted = reformat_multiple(vec![("schema.prisma".to_owned(), source.into())], indent_width); - let mut diagnostics = diagnostics::Diagnostics::new(); - let db = parser_database::ParserDatabase::new(file, &mut diagnostics); + reformatted.first().map(|(_, source)| source).cloned() +} + +pub fn reformat_validated_schema_into_single(schema: ValidatedSchema, indent_width: usize) -> Option { + let db = schema.db; + + let source = db + .iter_sources() + .map(|source| source.to_owned()) + .collect::>() + .join("\n"); - let source_to_reformat = if diagnostics.has_errors() { - Cow::Borrowed(source) + schema_ast::reformat(&source, indent_width) +} + +pub fn reformat_multiple(sources: Vec<(String, SourceFile)>, indent_width: usize) -> Vec<(String, String)> { + let mut diagnostics = diagnostics::Diagnostics::new(); + let db = parser_database::ParserDatabase::new(sources, &mut diagnostics); + + if diagnostics.has_errors() { + db.iter_file_ids() + .filter_map(|file_id| { + let formatted_source = schema_ast::reformat(db.source(file_id), indent_width)?; + Some((db.file_name(file_id).to_owned(), formatted_source)) + }) + .collect() } else { - let mut missing_bits = Vec::new(); + let mut missing_bits = HashMap::new(); + let mut ctx = MagicReformatCtx { - original_schema: source, - missing_bits: &mut missing_bits, + missing_bits_map: &mut missing_bits, db: &db, }; + push_missing_fields(&mut ctx); push_missing_attributes(&mut ctx); push_missing_relation_attribute_args(&mut ctx); - missing_bits.sort_by_key(|bit| bit.position); + ctx.sort_missing_bits(); - if missing_bits.is_empty() { - Cow::Borrowed(source) - } else { - Cow::Owned(enrich(source, &missing_bits)) - } - }; + db.iter_file_ids() + .filter_map(|file_id| { + let source = if let Some(missing_bits) = ctx.get_missing_bits(file_id) { + Cow::Owned(enrich(db.source(file_id), missing_bits)) + } else { + Cow::Borrowed(db.source(file_id)) + }; + + let formatted_source = schema_ast::reformat(&source, indent_width)?; - schema_ast::reformat(&source_to_reformat, indent_width) + Some((db.file_name(file_id).to_owned(), formatted_source)) + }) + .collect() + } } struct MagicReformatCtx<'a> { - original_schema: &'a str, - missing_bits: &'a mut Vec, + missing_bits_map: &'a mut HashMap>, db: &'a ParserDatabase, } +impl<'a> MagicReformatCtx<'a> { + fn add_missing_bit(&mut self, file_id: FileId, bit: MissingBit) { + self.missing_bits_map.entry(file_id).or_default().push(bit); + } + + fn get_missing_bits(&self, file_id: FileId) -> Option<&Vec> { + let bits_vec = self.missing_bits_map.get(&file_id)?; + if bits_vec.is_empty() { + None + } else { + Some(bits_vec) + } + } + + fn sort_missing_bits(&mut self) { + self.missing_bits_map + .iter_mut() + .for_each(|(_, bits)| bits.sort_by_key(|bit| bit.position)) + } +} + fn enrich(input: &str, missing_bits: &[MissingBit]) -> String { let bits = missing_bits.iter().scan(0usize, |last_insert_position, missing_bit| { let start: usize = *last_insert_position; @@ -109,10 +158,13 @@ fn push_inline_relation_missing_arguments( (", ", "", relation_attribute.span.end - 1) }; - ctx.missing_bits.push(MissingBit { - position, - content: format!("{prefix}{extra_args}{suffix}"), - }); + ctx.add_missing_bit( + relation_attribute.span.file_id, + MissingBit { + position, + content: format!("{prefix}{extra_args}{suffix}"), + }, + ); } } @@ -136,10 +188,14 @@ fn push_missing_relation_attribute(inline_relation: walkers::InlineRelationWalke content.push_str(&references_argument(inline_relation)); content.push(')'); - ctx.missing_bits.push(MissingBit { - position: after_type(forward.ast_field().field_type.span().end, ctx.original_schema), - content, - }) + let file_id = forward.ast_field().span().file_id; + ctx.add_missing_bit( + file_id, + MissingBit { + position: after_type(forward.ast_field().field_type.span().end, ctx.db.source(file_id)), + content, + }, + ); } } @@ -167,10 +223,14 @@ fn push_missing_relation_fields(inline: walkers::InlineRelationWalker<'_>, ctx: }; let arity = if inline.is_one_to_one() { "?" } else { "[]" }; - ctx.missing_bits.push(MissingBit { - position: inline.referenced_model().ast_model().span().end - 1, - content: format!("{referencing_model_name} {referencing_model_name}{arity} {ignore}\n"), - }); + let span = inline.referenced_model().ast_model().span(); + ctx.add_missing_bit( + span.file_id, + MissingBit { + position: span.end - 1, + content: format!("{referencing_model_name} {referencing_model_name}{arity} {ignore}\n"), + }, + ); } if inline.forward_relation_field().is_none() { @@ -179,10 +239,14 @@ fn push_missing_relation_fields(inline: walkers::InlineRelationWalker<'_>, ctx: let arity = render_arity(forward_relation_field_arity(inline)); let fields_arg = fields_argument(inline); let references_arg = references_argument(inline); - ctx.missing_bits.push(MissingBit { - position: inline.referencing_model().ast_model().span().end - 1, - content: format!("{field_name} {field_type}{arity} @relation({fields_arg}, {references_arg})\n"), - }) + let span = inline.referencing_model().ast_model().span(); + ctx.add_missing_bit( + span.file_id, + MissingBit { + position: span.end - 1, + content: format!("{field_name} {field_type}{arity} @relation({fields_arg}, {references_arg})\n"), + }, + ) } } @@ -211,13 +275,17 @@ fn push_missing_scalar_fields(inline: walkers::InlineRelationWalker<'_>, ctx: &m let mut attributes: String = String::new(); if let Some((_datasource_name, _type_name, _args, span)) = field.blueprint.raw_native_type() { - attributes.push_str(&ctx.original_schema[span.start..span.end]); + attributes.push_str(&ctx.db.source(span.file_id)[span.start..span.end]); } - ctx.missing_bits.push(MissingBit { - position: inline.referencing_model().ast_model().span().end - 1, - content: format!("{field_name} {field_type}{arity} {attributes}\n"), - }); + let span = inline.referencing_model().ast_model().span(); + ctx.add_missing_bit( + span.file_id, + MissingBit { + position: span.end - 1, + content: format!("{field_name} {field_type}{arity} {attributes}\n"), + }, + ); } } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs b/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs index da0a3db3a515..fbaaa3525a49 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs @@ -2,7 +2,7 @@ use super::default_value; use crate::{datamodel_connector::ConnectorCapability, validate::validation_pipeline::context::Context}; use diagnostics::DatamodelError; use parser_database::{ - ast::{self, WithSpan}, + ast::WithSpan, walkers::{CompositeTypeFieldWalker, CompositeTypeWalker}, ScalarFieldType, }; @@ -11,8 +11,8 @@ use std::{fmt, rc::Rc}; /// Detect compound type chains that form a cycle, that is not broken with either an optional or an /// array type. pub(super) fn detect_composite_cycles(ctx: &mut Context<'_>) { - let mut visited: Vec = Vec::new(); - let mut errors: Vec<(ast::CompositeTypeId, DatamodelError)> = Vec::new(); + let mut visited: Vec = Vec::new(); + let mut errors: Vec<(parser_database::CompositeTypeId, DatamodelError)> = Vec::new(); let mut fields_to_traverse: Vec<(CompositeTypeFieldWalker<'_>, Option>>)> = ctx .db diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/constraint_namespace.rs b/psl/psl-core/src/validate/validation_pipeline/validations/constraint_namespace.rs index e4b02ebc9308..495aa9b44670 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/constraint_namespace.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/constraint_namespace.rs @@ -1,5 +1,4 @@ use crate::datamodel_connector::{walker_ext_traits::*, ConstraintScope}; -use parser_database::ast; use std::{borrow::Cow, collections::HashMap, ops::Deref}; /// A constraint namespace consists of two kinds of namespaces: @@ -10,8 +9,8 @@ use std::{borrow::Cow, collections::HashMap, ops::Deref}; pub(crate) struct ConstraintNamespace<'db> { // (ConstraintScope, schema name, name) -> occurrences global: HashMap<(ConstraintScope, Option<&'db str>, Cow<'db, str>), usize>, - local: HashMap<(ast::ModelId, ConstraintScope, Cow<'db, str>), usize>, - local_custom_name: HashMap<(ast::ModelId, Cow<'db, str>), usize>, + local: HashMap<(parser_database::ModelId, ConstraintScope, Cow<'db, str>), usize>, + local_custom_name: HashMap<(parser_database::ModelId, Cow<'db, str>), usize>, } impl<'db> ConstraintNamespace<'db> { @@ -19,7 +18,7 @@ impl<'db> ConstraintNamespace<'db> { /// local violations in the given model. pub(crate) fn constraint_name_scope_violations( &self, - model_id: ast::ModelId, + model_id: parser_database::ModelId, name: ConstraintName<'db>, ctx: &super::Context<'db>, ) -> impl Iterator + '_ { @@ -43,7 +42,7 @@ impl<'db> ConstraintNamespace<'db> { fn local_constraint_name_scope_violations( &self, - model_id: ast::ModelId, + model_id: parser_database::ModelId, name: ConstraintName<'db>, ) -> impl Iterator + '_ { name.possible_scopes().filter(move |scope| { @@ -54,7 +53,11 @@ impl<'db> ConstraintNamespace<'db> { }) } - pub(crate) fn local_custom_name_scope_violations(&self, model_id: ast::ModelId, name: &'db str) -> bool { + pub(crate) fn local_custom_name_scope_violations( + &self, + model_id: parser_database::ModelId, + name: &'db str, + ) -> bool { match self.local_custom_name.get(&(model_id, Cow::from(name))) { Some(count) => *count > 1, None => false, @@ -127,7 +130,7 @@ impl<'db> ConstraintNamespace<'db> { for index in model.indexes() { let counter = self .local - .entry((model.model_id(), scope, index.constraint_name(ctx.connector))) + .entry((model.id, scope, index.constraint_name(ctx.connector))) .or_default(); *counter += 1; @@ -139,7 +142,7 @@ impl<'db> ConstraintNamespace<'db> { pub(super) fn add_local_primary_keys(&mut self, scope: ConstraintScope, ctx: &super::Context<'db>) { for model in ctx.db.walk_models().chain(ctx.db.walk_views()) { if let Some(name) = model.primary_key().and_then(|pk| pk.constraint_name(ctx.connector)) { - let counter = self.local.entry((model.model_id(), scope, name)).or_default(); + let counter = self.local.entry((model.id, scope, name)).or_default(); *counter += 1; } } @@ -149,18 +152,12 @@ impl<'db> ConstraintNamespace<'db> { pub(super) fn add_local_custom_names_for_primary_keys_and_uniques(&mut self, ctx: &super::Context<'db>) { for model in ctx.db.walk_models().chain(ctx.db.walk_views()) { if let Some(name) = model.primary_key().and_then(|pk| pk.name()) { - let counter = self - .local_custom_name - .entry((model.model_id(), Cow::from(name))) - .or_default(); + let counter = self.local_custom_name.entry((model.id, Cow::from(name))).or_default(); *counter += 1; } for index in model.indexes() { if let Some(name) = index.name() { - let counter = self - .local_custom_name - .entry((model.model_id(), Cow::from(name))) - .or_default(); + let counter = self.local_custom_name.entry((model.id, Cow::from(name))).or_default(); *counter += 1; } } @@ -175,7 +172,7 @@ impl<'db> ConstraintNamespace<'db> { .filter_map(|r| r.refine().as_inline()) .map(|r| r.constraint_name(ctx.connector)) { - let counter = self.local.entry((model.model_id(), scope, name)).or_default(); + let counter = self.local.entry((model.id, scope, name)).or_default(); *counter += 1; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs b/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs index 0613fda2a48f..674d8e50d3bc 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs @@ -21,7 +21,7 @@ pub(super) fn validate_client_name(field: FieldWalker<'_>, names: &Names<'_>, ct "model" }; - for taken in names.name_taken(model.model_id(), field.name()).into_iter() { + for taken in names.name_taken(model.id, field.name()).into_iter() { match taken { NameTaken::Index => { let message = format!( @@ -82,7 +82,7 @@ pub(super) fn has_a_unique_default_constraint_name( }; for violation in names.constraint_namespace.constraint_name_scope_violations( - field.model().model_id(), + field.model().id, ConstraintName::Default(name.as_ref()), ctx, ) { diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs index 9a7ac919fff7..e9bae626f374 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs @@ -14,11 +14,11 @@ pub(super) fn has_a_unique_constraint_name(index: IndexWalker<'_>, names: &super let name = index.constraint_name(ctx.connector); let model = index.model(); - for violation in names.constraint_namespace.constraint_name_scope_violations( - model.model_id(), - ConstraintName::Index(name.as_ref()), - ctx, - ) { + for violation in + names + .constraint_namespace + .constraint_name_scope_violations(model.id, ConstraintName::Index(name.as_ref()), ctx) + { let message = format!( "The given constraint name `{}` has to be unique in the following namespace: {}. Please provide a different name using the `map` argument.", name, @@ -52,7 +52,7 @@ pub(super) fn unique_index_has_a_unique_custom_name_per_model( if let Some(name) = index.name() { if names .constraint_namespace - .local_custom_name_scope_violations(model.model_id(), name.as_ref()) + .local_custom_name_scope_violations(model.id, name.as_ref()) { let message = format!( "The given custom name `{name}` has to be unique on the model. Please provide a different name for the `name` argument." diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/models.rs b/psl/psl-core/src/validate/validation_pipeline/validations/models.rs index a8c222c91600..a53063624b2d 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/models.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/models.rs @@ -1,6 +1,5 @@ use super::database_name::validate_db_name; use crate::{ - ast, datamodel_connector::{walker_ext_traits::*, ConnectorCapability}, diagnostics::DatamodelError, parser_database::ast::{WithName, WithSpan}, @@ -77,7 +76,7 @@ pub(super) fn has_a_unique_primary_key_name(model: ModelWalker<'_>, names: &supe ); for violation in names.constraint_namespace.constraint_name_scope_violations( - model.model_id(), + model.id, super::constraint_namespace::ConstraintName::PrimaryKey(name.as_ref()), ctx, ) { @@ -115,7 +114,7 @@ pub(super) fn has_a_unique_custom_primary_key_name_per_model( if let Some(name) = pk.name() { if names .constraint_namespace - .local_custom_name_scope_violations(model.model_id(), name.as_ref()) + .local_custom_name_scope_violations(model.id, name.as_ref()) { let message = format!( "The given custom name `{name}` has to be unique on the model. Please provide a different name for the `name` argument." @@ -362,15 +361,16 @@ pub(super) fn schema_attribute_missing(model: ModelWalker<'_>, ctx: &mut Context pub(super) fn database_name_clashes(ctx: &mut Context<'_>) { // (schema_name, model_database_name) -> ModelId - let mut database_names: HashMap<(Option<&str>, &str), ast::ModelId> = HashMap::with_capacity(ctx.db.models_count()); + let mut database_names: HashMap<(Option<&str>, &str), parser_database::ModelId> = + HashMap::with_capacity(ctx.db.models_count()); for model in ctx.db.walk_models().chain(ctx.db.walk_views()) { let key = (model.schema().map(|(name, _)| name), model.database_name()); - match database_names.insert(key, model.model_id()) { + match database_names.insert(key, model.id) { // Two branches because we want to put the error on the @@map attribute, and it can be // on either model. Some(existing) if model.mapped_name().is_some() => { - let existing_model_name = &ctx.db.ast()[existing].name(); + let existing_model_name = &ctx.db.ast(existing.0)[existing.1].name(); let attribute = model .ast_model() .attributes @@ -385,7 +385,7 @@ pub(super) fn database_name_clashes(ctx: &mut Context<'_>) { )); } Some(existing) => { - let existing_model = &ctx.db.ast()[existing]; + let existing_model = &ctx.db.ast(existing.0)[existing.1]; let attribute = existing_model .attributes .iter() diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/names.rs b/psl/psl-core/src/validate/validation_pipeline/validations/names.rs index 0c818610f082..fdc0afaf7b8b 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/names.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/names.rs @@ -1,6 +1,8 @@ use super::constraint_namespace::ConstraintNamespace; -use crate::ast::ModelId; -use parser_database::walkers::{RelationFieldId, RelationName}; +use parser_database::{ + walkers::{RelationFieldId, RelationName}, + ModelId, +}; use std::collections::{HashMap, HashSet}; type RelationIdentifier<'db> = (ModelId, ModelId, RelationName<'db>); @@ -28,11 +30,11 @@ impl<'db> Names<'db> { let mut primary_key_names: HashMap = HashMap::new(); for model in ctx.db.walk_models().chain(ctx.db.walk_views()) { - let model_id = model.model_id(); + let model_id = model.id; for field in model.relation_fields() { - let model_id = field.model().model_id(); - let related_model_id = field.related_model().model_id(); + let model_id = field.model().id; + let related_model_id = field.related_model().id; let identifier = (model_id, related_model_id, field.relation_name()); let field_ids = relation_names.entry(identifier).or_default(); @@ -51,7 +53,7 @@ impl<'db> Names<'db> { } if let Some(pk) = model.primary_key().and_then(|pk| pk.name()) { - primary_key_names.insert(model.model_id(), pk); + primary_key_names.insert(model.id, pk); } } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs index 765b6b2bb39f..6d1b9cb51669 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs @@ -59,7 +59,7 @@ pub(super) fn ambiguity(field: RelationFieldWalker<'_>, names: &Names<'_>) -> Re let model = field.model(); let related_model = field.related_model(); - let identifier = (model.model_id(), related_model.model_id(), field.relation_name()); + let identifier = (model.id, related_model.id, field.relation_name()); match names.relation_names.get(&identifier) { Some(fields) if fields.len() > 1 => { @@ -267,23 +267,27 @@ pub(super) fn validate_missing_relation_indexes(relation_field: RelationFieldWal // Considers all groups of indexes explicitly declared in the given model. // An index group can be: // - a singleton (@unique or @id) - // - an ordered set (@@unique or @@index) - let index_field_groups = model.indexes(); - - let referencing_fields_appear_in_index = index_field_groups - .map(|index_walker| index_walker.fields().map(|index| index.field_id())) - .any(|index_fields_it| { - let fields_it = referencing_fields_it.clone(); - is_leftwise_included_it(fields_it, index_fields_it) - }); - - if !referencing_fields_appear_in_index { - let ast_field = relation_field.ast_field(); - let span = ast_field - .span_for_attribute("relation") - .unwrap_or_else(|| ast_field.span()); - ctx.push_warning(DatamodelWarning::new_missing_index_on_emulated_relation(span)); + // - an ordered set (@@unique, @@index, or @@id) + for index_walker in model.indexes() { + let index_fields_it = index_walker.fields().map(|col| col.field_id()); + let referencing_fields_it = referencing_fields_it.clone(); + if is_leftwise_included_it(referencing_fields_it, index_fields_it) { + return; + } + } + + if let Some(primary_key_walker) = model.primary_key() { + let primary_key_fields_it = primary_key_walker.fields().map(|col| col.field_id()); + if is_leftwise_included_it(referencing_fields_it, primary_key_fields_it) { + return; + } } + + let ast_field = relation_field.ast_field(); + let span = ast_field + .span_for_attribute("relation") + .unwrap_or_else(|| ast_field.span()); + ctx.push_warning(DatamodelWarning::new_missing_index_on_emulated_relation(span)); } } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs index ec78b9a61a3f..e834fe3b54ea 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs @@ -38,7 +38,7 @@ pub(super) fn has_a_unique_constraint_name( let model = relation.referencing_model(); for violation in names.constraint_namespace.constraint_name_scope_violations( - model.model_id(), + model.id, ConstraintName::Relation(name.as_ref()), ctx, ) { diff --git a/psl/psl/build.rs b/psl/psl/build.rs index 509b60875998..1b0d560da551 100644 --- a/psl/psl/build.rs +++ b/psl/psl/build.rs @@ -1,19 +1,46 @@ use std::{env, fs, io::Write as _, path}; const VALIDATIONS_ROOT_DIR: &str = "tests/validation"; -const REFORMAT_ROOT_DIR: &str = "tests/reformatter"; +const REFORMAT_SINGLE_FILE_ROOT_DIR: &str = "tests/reformatter"; +const REFORMAT_MULTI_FILE_ROOT_DIR: &str = "tests/reformatter_multi_file"; const CARGO_MANIFEST_DIR: &str = env!("CARGO_MANIFEST_DIR"); fn main() { build_validation_tests(); - build_reformat_tests(); + build_reformat_single_file_tests(); + build_reformat_multi_file_tests(); } -fn build_reformat_tests() { - println!("cargo:rerun-if-changed={REFORMAT_ROOT_DIR}"); +fn build_reformat_multi_file_tests() { + println!("cargo:rerun-if-changed={REFORMAT_MULTI_FILE_ROOT_DIR}"); + let schema_dirs_to_reformat = fs::read_dir(format!("{CARGO_MANIFEST_DIR}/{REFORMAT_MULTI_FILE_ROOT_DIR}")) + .unwrap() + .map(Result::unwrap) + .filter_map(|entry| { + let name = entry.file_name(); + let name = name.to_str().unwrap(); + if name == "." || name == ".." || name.ends_with(".reformatted") { + None + } else { + Some(name.trim_start_matches('/').to_owned()) + } + }); + let mut out_file = out_file("reformat_multi_file_tests.rs"); + for schema_dir in schema_dirs_to_reformat { + let test_name = test_name(&schema_dir); + writeln!( + out_file, + "#[test] fn {test_name}() {{ run_reformat_multi_file_test(\"{schema_dir}\"); }}" + ) + .unwrap(); + } +} + +fn build_reformat_single_file_tests() { + println!("cargo:rerun-if-changed={REFORMAT_SINGLE_FILE_ROOT_DIR}"); let mut all_schemas = Vec::new(); - find_all_schemas("", &mut all_schemas, REFORMAT_ROOT_DIR); + find_all_schemas("", &mut all_schemas, REFORMAT_SINGLE_FILE_ROOT_DIR); let mut out_file = out_file("reformat_tests.rs"); let schemas_to_reformat = all_schemas.iter().filter(|name| !name.ends_with(".reformatted.prisma")); diff --git a/psl/psl/src/lib.rs b/psl/psl/src/lib.rs index 9d7fb8f26168..af78ef19b3b8 100644 --- a/psl/psl/src/lib.rs +++ b/psl/psl/src/lib.rs @@ -12,6 +12,8 @@ pub use psl_core::{ parser_database::{self, SourceFile}, reachable_only_with_capability, reformat, + reformat_multiple, + reformat_validated_schema_into_single, schema_ast, set_config_dir, Configuration, @@ -44,7 +46,7 @@ pub fn parse_schema(file: impl Into) -> Result ValidatedSchema { pub fn parse_without_validation(file: SourceFile, connector_registry: ConnectorRegistry<'_>) -> ValidatedSchema { psl_core::parse_without_validation(file, connector_registry) } +/// The most general API for dealing with Prisma schemas. It accumulates what analysis and +/// validation information it can, and returns it along with any error and warning diagnostics. +pub fn validate_multi_file(files: Vec<(String, SourceFile)>) -> ValidatedSchema { + psl_core::validate_multi_file(files, builtin_connectors::BUILTIN_CONNECTORS) +} diff --git a/psl/psl/tests/base/basic.rs b/psl/psl/tests/base/basic.rs index ca806fb6f510..a8c47884c213 100644 --- a/psl/psl/tests/base/basic.rs +++ b/psl/psl/tests/base/basic.rs @@ -239,3 +239,67 @@ fn type_aliases_must_error() { expectation.assert_eq(&error); } + +#[test] +fn must_return_good_error_message_for_type_match() { + let dml = indoc! {r#" + model User { + firstName String + } + model B { + a datetime + b footime + c user + d DB + e JS + } + + datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") + extensions = [citext, pg_trgm] + } + + generator js { + provider = "prisma-client-js" + previewFeatures = ["postgresqlExtensions"] + } + "#}; + + let error = parse_unwrap_err(dml); + + let expected = expect![[r#" + error: Type "datetime" is neither a built-in type, nor refers to another model, custom type, or enum. Did you mean "DateTime"? + --> schema.prisma:5 +  |  +  4 | model B { +  5 |  a datetime +  |  + error: Type "footime" is neither a built-in type, nor refers to another model, custom type, or enum. + --> schema.prisma:6 +  |  +  5 |  a datetime +  6 |  b footime +  |  + error: Type "user" is neither a built-in type, nor refers to another model, custom type, or enum. Did you mean "User"? + --> schema.prisma:7 +  |  +  6 |  b footime +  7 |  c user +  |  + error: Type "DB" is neither a built-in type, nor refers to another model, custom type, or enum. + --> schema.prisma:8 +  |  +  7 |  c user +  8 |  d DB +  |  + error: Type "JS" is neither a built-in type, nor refers to another model, custom type, or enum. + --> schema.prisma:9 +  |  +  8 |  d DB +  9 |  e JS +  |  + "#]]; + + expected.assert_eq(&error); +} diff --git a/psl/psl/tests/common/asserts.rs b/psl/psl/tests/common/asserts.rs index 81d5472d4c16..4278f5cb77e5 100644 --- a/psl/psl/tests/common/asserts.rs +++ b/psl/psl/tests/common/asserts.rs @@ -3,7 +3,7 @@ use std::fmt::Debug; use either::Either::{Left, Right}; use psl::datamodel_connector::Connector; use psl::diagnostics::DatamodelWarning; -use psl::parser_database::{walkers, IndexAlgorithm, OperatorClass, ReferentialAction, ScalarType, SortOrder}; +use psl::parser_database::{walkers, IndexAlgorithm, ModelId, OperatorClass, ReferentialAction, ScalarType, SortOrder}; use psl::schema_ast::ast::WithDocumentation; use psl::schema_ast::ast::{self, FieldArity}; use psl::{Diagnostics, StringFromEnvVar}; @@ -67,7 +67,7 @@ pub(crate) trait CompositeFieldAssert { pub(crate) trait RelationFieldAssert { fn assert_ignored(&self, ignored: bool) -> &Self; - fn assert_relation_to(&self, model_id: ast::ModelId) -> &Self; + fn assert_relation_to(&self, model_id: ModelId) -> &Self; fn assert_relation_delete_strategy(&self, action: ReferentialAction) -> &Self; fn assert_relation_update_strategy(&self, action: ReferentialAction) -> &Self; } @@ -151,7 +151,7 @@ impl<'a> DatamodelAssert<'a> for psl::ValidatedSchema { impl<'a> RelationFieldAssert for walkers::RelationFieldWalker<'a> { #[track_caller] - fn assert_relation_to(&self, model_id: ast::ModelId) -> &Self { + fn assert_relation_to(&self, model_id: ModelId) -> &Self { assert!(self.references_model(model_id)); self } diff --git a/psl/psl/tests/config/nice_warnings.rs b/psl/psl/tests/config/nice_warnings.rs index 4e1c7ed2bfad..955cbbd89fd3 100644 --- a/psl/psl/tests/config/nice_warnings.rs +++ b/psl/psl/tests/config/nice_warnings.rs @@ -14,6 +14,6 @@ fn nice_warning_for_deprecated_generator_preview_feature() { res.warnings.assert_is(DatamodelWarning::new_feature_deprecated( "middlewares", - Span::new(88, 103), + Span::new(88, 103, psl_core::parser_database::FileId::ZERO), )); } diff --git a/psl/psl/tests/datamodel_tests.rs b/psl/psl/tests/datamodel_tests.rs index b950ff6fc2fd..ba723194a4fd 100644 --- a/psl/psl/tests/datamodel_tests.rs +++ b/psl/psl/tests/datamodel_tests.rs @@ -8,6 +8,7 @@ mod capabilities; mod common; mod config; mod functions; +mod multi_file; mod parsing; mod reformat; mod types; diff --git a/psl/psl/tests/multi_file/basic.rs b/psl/psl/tests/multi_file/basic.rs new file mode 100644 index 000000000000..d5eaf5b8b489 --- /dev/null +++ b/psl/psl/tests/multi_file/basic.rs @@ -0,0 +1,114 @@ +use crate::common::expect; + +fn expect_errors(schemas: &[[&'static str; 2]], expectation: expect_test::Expect) { + let out = psl::validate_multi_file( + schemas + .iter() + .map(|[file_name, contents]| ((*file_name).into(), (*contents).into())) + .collect(), + ); + + let actual = out.render_own_diagnostics(); + expectation.assert_eq(&actual) +} + +#[test] +fn multi_file_errors_single_file() { + let files: &[[&'static str; 2]] = &[["a.prisma", "meow"]]; + + let expected = expect![[r#" + error: Error validating: This line is invalid. It does not start with any known Prisma schema keyword. + --> a.prisma:1 +  |  +  |  +  1 | meow +  |  + "#]]; + expect_errors(files, expected); +} + +#[test] +fn multi_file_errors_two_files() { + let files: &[[&'static str; 2]] = &[ + ["a.prisma", "meow"], + ["b.prisma", "woof woof"], + ["c.prisma", "choo choo"], + ]; + + let expected = expect![[r#" + error: Error validating: This line is invalid. It does not start with any known Prisma schema keyword. + --> a.prisma:1 +  |  +  |  +  1 | meow +  |  + error: Error validating: This line is invalid. It does not start with any known Prisma schema keyword. + --> b.prisma:1 +  |  +  |  +  1 | woof woof +  |  + error: Error validating: This line is invalid. It does not start with any known Prisma schema keyword. + --> c.prisma:1 +  |  +  |  +  1 | choo choo +  |  + "#]]; + expect_errors(files, expected); +} + +#[test] +fn multi_file_errors_relation() { + let files: &[[&'static str; 2]] = &[ + [ + "b.prisma", + r#" +generator client { + provider = "prisma-client-js" +} + +model Post { + id Int @id + test String @db.Text + user_id Int + user User @relation(fields: [user_id], references: [id]) +} +"#, + ], + [ + "a.prisma", + r#" +datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") +} + +model User { + id Int @id + test String @db.FunnyText + post_id Int @unique + post Post +} + +"#, + ], + ]; + + let expected = expect![[r#" + error: Native type FunnyText is not supported for postgresql connector. + --> a.prisma:9 +  |  +  8 |  id Int @id +  9 |  test String @db.FunnyText +  |  + error: Error parsing attribute "@relation": A one-to-one relation must use unique fields on the defining side. Either add an `@unique` attribute to the field `user_id`, or change the relation to one-to-many. + --> b.prisma:10 +  |  +  9 |  user_id Int + 10 |  user User @relation(fields: [user_id], references: [id]) + 11 | } +  |  + "#]]; + expect_errors(files, expected); +} diff --git a/psl/psl/tests/multi_file/mod.rs b/psl/psl/tests/multi_file/mod.rs new file mode 100644 index 000000000000..1bca5f8cba77 --- /dev/null +++ b/psl/psl/tests/multi_file/mod.rs @@ -0,0 +1 @@ +mod basic; diff --git a/psl/psl/tests/panic_with_diff/mod.rs b/psl/psl/tests/panic_with_diff/mod.rs index a66b81643fdc..6360545e8515 100644 --- a/psl/psl/tests/panic_with_diff/mod.rs +++ b/psl/psl/tests/panic_with_diff/mod.rs @@ -1,9 +1,12 @@ -pub(crate) fn panic_with_diff(expected: &str, found: &str) { +pub(crate) fn panic_with_diff(expected: &str, found: &str, name: Option<&str>) { + let title = name + .map(|name| format!("Snapshot '{name}'")) + .unwrap_or("Snapshot".to_owned()); let chunks = dissimilar::diff(expected, found); let diff = format_chunks(chunks); panic!( r#" -Snapshot comparison failed. Run the test again with UPDATE_EXPECT=1 in the environment to update the snapshot. +${title} comparison failed. Run the test again with UPDATE_EXPECT=1 in the environment to update the snapshot. ===== EXPECTED ==== {expected} diff --git a/psl/psl/tests/reformat_tests.rs b/psl/psl/tests/reformat_tests.rs index c945ad53c077..c3c1748d8fed 100644 --- a/psl/psl/tests/reformat_tests.rs +++ b/psl/psl/tests/reformat_tests.rs @@ -24,7 +24,7 @@ fn run_reformat_test(test_file_path: &str) { let mut file = fs::File::create(&snapshot_file_name).unwrap(); // truncate file.write_all(reformatted_text.as_bytes()).unwrap(); } else { - panic_with_diff::panic_with_diff(&expected_text, &reformatted_text); + panic_with_diff::panic_with_diff(&expected_text, &reformatted_text, None); } if reformat(&reformatted_text) != reformatted_text { @@ -39,3 +39,72 @@ include!(concat!(env!("OUT_DIR"), "/reformat_tests.rs")); fn reformat(s: &str) -> String { psl::reformat(s, 2).unwrap() } + +mod reformat_multi_file { + use std::{collections::HashMap, fs, io::Write, path}; + + use psl::{reformat_multiple, SourceFile}; + + use crate::panic_with_diff; + + const MULTIFILE_TESTS_ROOT: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/reformatter_multi_file"); + + #[inline(never)] + fn run_reformat_multi_file_test(test_dir_name: &str) { + let dir_path = path::Path::new(MULTIFILE_TESTS_ROOT).join(test_dir_name); + let snapshot_dir_path = path::Path::new(MULTIFILE_TESTS_ROOT).join(format!("{test_dir_name}.reformatted")); + + fs::create_dir_all(&snapshot_dir_path).unwrap(); + let schemas: Vec<_> = read_schemas_from_dir(dir_path).collect(); + + let result = reformat_multiple(schemas, 2); + + let should_update = std::env::var("UPDATE_EXPECT").is_ok(); + let mut snapshot_schemas: HashMap<_, _> = read_schemas_from_dir(&snapshot_dir_path).collect(); + for (path, content) in result { + let content = content.as_str(); + let snapshot_content = snapshot_schemas.remove(&path).unwrap_or_default(); + let snapshot_content = snapshot_content.as_str(); + if content == snapshot_content { + continue; + } + + if should_update { + let snapshot_file_path = path::Path::new(&snapshot_dir_path).join(path); + let mut file = fs::File::create(&snapshot_file_path).unwrap(); + file.write_all(content.as_bytes()).unwrap() + } else { + panic_with_diff::panic_with_diff(snapshot_content, content, Some(&path)); + } + } + + // cleanup removed files + for missing_file in snapshot_schemas.keys() { + if should_update { + fs::remove_file(path::Path::new(&snapshot_dir_path).join(missing_file)).unwrap() + } else { + panic!("{missing_file} is present in the snapshot directory, but missing from formatting results") + } + } + } + + fn read_schemas_from_dir(root_dir_path: impl AsRef) -> impl Iterator { + let root_dir_path = root_dir_path.as_ref().to_owned(); + fs::read_dir(&root_dir_path) + .unwrap() + .map(Result::unwrap) + .filter_map(move |entry| { + let file_name = entry.file_name(); + let file_name = file_name.to_str().unwrap(); + if !file_name.ends_with(".prisma") { + None + } else { + let full_path = root_dir_path.clone().join(file_name); + let content = fs::read_to_string(full_path).unwrap(); + Some((file_name.to_owned(), content.into())) + } + }) + } + + include!(concat!(env!("OUT_DIR"), "/reformat_multi_file_tests.rs")); +} diff --git a/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/User.prisma b/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/User.prisma new file mode 100644 index 000000000000..5a45cefe0114 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/User.prisma @@ -0,0 +1,5 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float +} diff --git a/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/db.prisma b/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/db.prisma new file mode 100644 index 000000000000..e4acdefaaa68 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/align_blocks/User.prisma b/psl/psl/tests/reformatter_multi_file/align_blocks/User.prisma new file mode 100644 index 000000000000..f24cc66e4d25 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/align_blocks/User.prisma @@ -0,0 +1,5 @@ +model User { + id Int @id @default( autoincrement()) + name String + age Float +} \ No newline at end of file diff --git a/psl/psl/tests/reformatter_multi_file/align_blocks/db.prisma b/psl/psl/tests/reformatter_multi_file/align_blocks/db.prisma new file mode 100644 index 000000000000..34e89da163e4 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/align_blocks/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} \ No newline at end of file diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/Post.prisma new file mode 100644 index 000000000000..84b3d54ea7f5 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/Post.prisma @@ -0,0 +1,5 @@ +model Post { + id Int @id @default(autoincrement()) + title String + User User? +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/User.prisma new file mode 100644 index 000000000000..fc08b7b116d6 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/User.prisma @@ -0,0 +1,7 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + postId Int @unique + post Post @relation(fields: [postId], references: [id]) +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/db.prisma new file mode 100644 index 000000000000..e4acdefaaa68 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/Post.prisma new file mode 100644 index 000000000000..149498bbab37 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/Post.prisma @@ -0,0 +1,4 @@ +model Post { + id Int @id @default(autoincrement()) + title String +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/User.prisma new file mode 100644 index 000000000000..51b16016f509 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/User.prisma @@ -0,0 +1,7 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + postId Int @unique + post Post @relation(fields: [postId], references: [id]) +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/db.prisma new file mode 100644 index 000000000000..e4acdefaaa68 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/Post.prisma new file mode 100644 index 000000000000..93707cea33fd --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/Post.prisma @@ -0,0 +1,6 @@ +model Post { + id Int @id @default(autoincrement()) + title String + User User? @relation(fields: [userId], references: [id]) + userId Int? +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/User.prisma new file mode 100644 index 000000000000..0057debd6bd5 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/User.prisma @@ -0,0 +1,6 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + posts Post[] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/db.prisma new file mode 100644 index 000000000000..e4acdefaaa68 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_list/Post.prisma new file mode 100644 index 000000000000..149498bbab37 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list/Post.prisma @@ -0,0 +1,4 @@ +model Post { + id Int @id @default(autoincrement()) + title String +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_list/User.prisma new file mode 100644 index 000000000000..91bb36ac360b --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list/User.prisma @@ -0,0 +1,6 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + posts Post[] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_list/db.prisma new file mode 100644 index 000000000000..e4acdefaaa68 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/Post.prisma new file mode 100644 index 000000000000..d5524c154ee7 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/Post.prisma @@ -0,0 +1,5 @@ +model Post { + id Int @id @default(autoincrement()) + title String + User User[] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/User.prisma new file mode 100644 index 000000000000..a2690c937c76 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/User.prisma @@ -0,0 +1,7 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + post Post @relation(fields: [postId], references: [id]) + postId Int +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/db.prisma new file mode 100644 index 000000000000..e4acdefaaa68 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_single/Post.prisma new file mode 100644 index 000000000000..149498bbab37 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single/Post.prisma @@ -0,0 +1,4 @@ +model Post { + id Int @id @default(autoincrement()) + title String +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_single/User.prisma new file mode 100644 index 000000000000..9d892ac43e6e --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single/User.prisma @@ -0,0 +1,6 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + post Post +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_single/db.prisma new file mode 100644 index 000000000000..e4acdefaaa68 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/validation/attributes/index/missing_index_warning/relation_mode_prisma/one_field_at_at_id.prisma b/psl/psl/tests/validation/attributes/index/missing_index_warning/relation_mode_prisma/one_field_at_at_id.prisma new file mode 100644 index 000000000000..5083b79c0d94 --- /dev/null +++ b/psl/psl/tests/validation/attributes/index/missing_index_warning/relation_mode_prisma/one_field_at_at_id.prisma @@ -0,0 +1,19 @@ +// no relation index validation warning on relationMode = "prisma" when a referenced field is already in @@id. + +datasource db { + provider = "mysql" + url = env("TEST_DATABASE_URL") + relationMode = "prisma" +} + +model SomeUser { + id Int @id + profile Profile? +} + +model Profile { + id Int + user SomeUser? @relation(fields: [id], references: [id]) + + @@id([id]) +} diff --git a/psl/psl/tests/validation/attributes/index/missing_index_warning/relation_mode_prisma/one_field_at_id.prisma b/psl/psl/tests/validation/attributes/index/missing_index_warning/relation_mode_prisma/one_field_at_id.prisma new file mode 100644 index 000000000000..57de9b5c6fa3 --- /dev/null +++ b/psl/psl/tests/validation/attributes/index/missing_index_warning/relation_mode_prisma/one_field_at_id.prisma @@ -0,0 +1,17 @@ +// no relation index validation warning on relationMode = "prisma" when a referenced field is already in @id. + +datasource db { + provider = "mysql" + url = env("TEST_DATABASE_URL") + relationMode = "prisma" +} + +model SomeUser { + id Int @id + profile Profile? +} + +model Profile { + id Int @id + user SomeUser? @relation(fields: [id], references: [id]) +} diff --git a/psl/psl/tests/validation/attributes/index/missing_index_warning/relation_mode_prisma/three_fields_mixed_id.prisma b/psl/psl/tests/validation/attributes/index/missing_index_warning/relation_mode_prisma/three_fields_mixed_id.prisma new file mode 100644 index 000000000000..10a2d3877fcb --- /dev/null +++ b/psl/psl/tests/validation/attributes/index/missing_index_warning/relation_mode_prisma/three_fields_mixed_id.prisma @@ -0,0 +1,32 @@ +// add missing relation index validation warning on relationMode = "prisma". + +datasource db { + provider = "mysql" + url = env("TEST_DATABASE_URL") + relationMode = "prisma" +} + +model SomeUser { + idA Int + idB Int + idC Int + posts Post[] + + @@id([idA, idB, idC]) +} + +model Post { + userIdA Int @unique + userIdB Int + userIdC Int @unique + user SomeUser @relation(fields: [userIdA, userIdB, userIdC], references: [idA, idB, idC]) + + @@id([userIdA, userIdB]) +} + +// warning: With `relationMode = "prisma"`, no foreign keys are used, so relation fields will not benefit from the index usually created by the relational database under the hood. This can lead to poor performance when querying these fields. We recommend adding an index manually. Learn more at https://pris.ly/d/relation-mode-prisma-indexes"  +// --> schema.prisma:22 +//  |  +// 21 |  userIdC Int @unique +// 22 |  user SomeUser @relation(fields: [userIdA, userIdB, userIdC], references: [idA, idB, idC]) +//  |  diff --git a/psl/psl/tests/validation_tests.rs b/psl/psl/tests/validation_tests.rs index b6efaa4215c1..9859dbbc6bd3 100644 --- a/psl/psl/tests/validation_tests.rs +++ b/psl/psl/tests/validation_tests.rs @@ -10,7 +10,7 @@ fn parse_schema_fail_on_diagnostics(file: impl Into) -> Result Ok(schema), @@ -91,7 +91,7 @@ fn run_validation_test(test_file_path: &str) { return; } - panic_with_diff::panic_with_diff(&last_comment_contents, &diagnostics) + panic_with_diff::panic_with_diff(&last_comment_contents, &diagnostics, None) } include!(concat!(env!("OUT_DIR"), "/validation_tests.rs")); diff --git a/psl/schema-ast/src/ast/identifier.rs b/psl/schema-ast/src/ast/identifier.rs index d1c72732a54e..92eccefecf1a 100644 --- a/psl/schema-ast/src/ast/identifier.rs +++ b/psl/schema-ast/src/ast/identifier.rs @@ -1,4 +1,5 @@ use super::{Span, WithSpan}; +use diagnostics::FileId; /// An identifier. #[derive(Debug, Clone, PartialEq)] @@ -9,17 +10,17 @@ pub struct Identifier { pub span: Span, } -impl WithSpan for Identifier { - fn span(&self) -> Span { - self.span - } -} - -impl From> for Identifier { - fn from(pair: pest::iterators::Pair<'_, T>) -> Self { +impl Identifier { + pub(crate) fn new(pair: pest::iterators::Pair<'_, T>, file_id: FileId) -> Self { Identifier { name: pair.as_str().to_owned(), - span: pair.as_span().into(), + span: (file_id, pair.as_span()).into(), } } } + +impl WithSpan for Identifier { + fn span(&self) -> Span { + self.span + } +} diff --git a/psl/schema-ast/src/parser/parse_arguments.rs b/psl/schema-ast/src/parser/parse_arguments.rs index 67b5d930f83b..b2579c6e6cde 100644 --- a/psl/schema-ast/src/parser/parse_arguments.rs +++ b/psl/schema-ast/src/parser/parse_arguments.rs @@ -4,20 +4,25 @@ use super::{ Rule, }; use crate::ast; -use diagnostics::Diagnostics; +use diagnostics::{Diagnostics, FileId}; -pub(crate) fn parse_arguments_list(token: Pair<'_>, arguments: &mut ast::ArgumentsList, diagnostics: &mut Diagnostics) { +pub(crate) fn parse_arguments_list( + token: Pair<'_>, + arguments: &mut ast::ArgumentsList, + diagnostics: &mut Diagnostics, + file_id: FileId, +) { debug_assert_eq!(token.as_rule(), Rule::arguments_list); for current in token.into_inner() { let current_span = current.as_span(); match current.as_rule() { // This is a named arg. - Rule::named_argument => arguments.arguments.push(parse_named_arg(current, diagnostics)), + Rule::named_argument => arguments.arguments.push(parse_named_arg(current, diagnostics, file_id)), // This is an unnamed arg. Rule::expression => arguments.arguments.push(ast::Argument { name: None, - value: parse_expression(current, diagnostics), - span: ast::Span::from(current_span), + value: parse_expression(current, diagnostics, file_id), + span: ast::Span::from((file_id, current_span)), }), // This is an argument without a value. // It is not valid, but we parse it for autocompletion. @@ -26,17 +31,19 @@ pub(crate) fn parse_arguments_list(token: Pair<'_>, arguments: &mut ast::Argumen .into_inner() .find(|tok| tok.as_rule() == Rule::identifier) .unwrap(); - arguments.empty_arguments.push(ast::EmptyArgument { name: name.into() }) + arguments.empty_arguments.push(ast::EmptyArgument { + name: ast::Identifier::new(name, file_id), + }) } Rule::trailing_comma => { - arguments.trailing_comma = Some(current.as_span().into()); + arguments.trailing_comma = Some((file_id, current.as_span()).into()); } _ => parsing_catch_all(¤t, "attribute arguments"), } } } -fn parse_named_arg(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ast::Argument { +fn parse_named_arg(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> ast::Argument { debug_assert_eq!(pair.as_rule(), Rule::named_argument); let mut name: Option = None; let mut argument: Option = None; @@ -44,8 +51,8 @@ fn parse_named_arg(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ast::Argume for current in pair.into_inner() { match current.as_rule() { - Rule::identifier => name = Some(current.into()), - Rule::expression => argument = Some(parse_expression(current, diagnostics)), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), + Rule::expression => argument = Some(parse_expression(current, diagnostics, file_id)), _ => parsing_catch_all(¤t, "attribute argument"), } } @@ -54,7 +61,7 @@ fn parse_named_arg(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ast::Argume (Some(name), Some(value)) => ast::Argument { name: Some(name), value, - span: ast::Span::from(pair_span), + span: ast::Span::from((file_id, pair_span)), }, _ => panic!("Encountered impossible attribute arg during parsing: {pair_str:?}"), } diff --git a/psl/schema-ast/src/parser/parse_attribute.rs b/psl/schema-ast/src/parser/parse_attribute.rs index 16983303097b..6420d796ad6b 100644 --- a/psl/schema-ast/src/parser/parse_attribute.rs +++ b/psl/schema-ast/src/parser/parse_attribute.rs @@ -3,16 +3,21 @@ use super::{ Rule, }; use crate::{ast::*, parser::parse_arguments::parse_arguments_list}; +use diagnostics::FileId; -pub(crate) fn parse_attribute(pair: Pair<'_>, diagnostics: &mut diagnostics::Diagnostics) -> Attribute { - let span = Span::from(pair.as_span()); +pub(crate) fn parse_attribute( + pair: Pair<'_>, + diagnostics: &mut diagnostics::Diagnostics, + file_id: FileId, +) -> Attribute { + let span = Span::from((file_id, pair.as_span())); let mut name = None; let mut arguments: ArgumentsList = ArgumentsList::default(); for current in pair.into_inner() { match current.as_rule() { - Rule::path => name = Some(current.into()), - Rule::arguments_list => parse_arguments_list(current, &mut arguments, diagnostics), + Rule::path => name = Some(Identifier::new(current, file_id)), + Rule::arguments_list => parse_arguments_list(current, &mut arguments, diagnostics, file_id), _ => parsing_catch_all(¤t, "attribute"), } } diff --git a/psl/schema-ast/src/parser/parse_composite_type.rs b/psl/schema-ast/src/parser/parse_composite_type.rs index 6ada40e61e16..28873fbf701f 100644 --- a/psl/schema-ast/src/parser/parse_composite_type.rs +++ b/psl/schema-ast/src/parser/parse_composite_type.rs @@ -6,12 +6,13 @@ use super::{ Rule, }; use crate::ast; -use diagnostics::{DatamodelError, Diagnostics, Span}; +use diagnostics::{DatamodelError, Diagnostics, FileId, Span}; pub(crate) fn parse_composite_type( pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> ast::CompositeType { let pair_span = pair.as_span(); let mut name: Option = None; @@ -22,53 +23,53 @@ pub(crate) fn parse_composite_type( match current.as_rule() { Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE => {} Rule::TYPE_KEYWORD => (), - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::model_contents => { let mut pending_field_comment: Option> = None; - inner_span = Some(current.as_span().into()); + inner_span = Some((file_id, current.as_span()).into()); for item in current.into_inner() { let current_span = item.as_span(); match item.as_rule() { Rule::block_attribute => { - let attr = parse_attribute(item, diagnostics); + let attr = parse_attribute(item, diagnostics, file_id); let err = match attr.name.name.as_str() { "map" => { DatamodelError::new_validation_error( "The name of a composite type is not persisted in the database, therefore it does not need a mapped database name.", - current_span.into(), + (file_id, current_span).into(), ) } "unique" => { DatamodelError::new_validation_error( "A unique constraint should be defined in the model containing the embed.", - current_span.into(), + (file_id, current_span).into(), ) } "index" => { DatamodelError::new_validation_error( "An index should be defined in the model containing the embed.", - current_span.into(), + (file_id, current_span).into(), ) } "fulltext" => { DatamodelError::new_validation_error( "A fulltext index should be defined in the model containing the embed.", - current_span.into(), + (file_id, current_span).into(), ) } "id" => { DatamodelError::new_validation_error( "A composite type cannot define an id.", - current_span.into(), + (file_id, current_span).into(), ) } _ => { DatamodelError::new_validation_error( "A composite type cannot have block-level attributes.", - current_span.into(), + (file_id, current_span).into(), ) } }; @@ -81,6 +82,7 @@ pub(crate) fn parse_composite_type( item, pending_field_comment.take(), diagnostics, + file_id, ) { Ok(field) => { for attr in field.attributes.iter() { @@ -92,7 +94,7 @@ pub(crate) fn parse_composite_type( "Defining `@{name}` attribute for a field in a composite type is not allowed." ); - DatamodelError::new_validation_error(&msg, current_span.into()) + DatamodelError::new_validation_error(&msg, (file_id, current_span).into()) } _ => continue, }; @@ -107,7 +109,7 @@ pub(crate) fn parse_composite_type( Rule::comment_block => pending_field_comment = Some(item), Rule::BLOCK_LEVEL_CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is not a valid field or attribute definition.", - item.as_span().into(), + (file_id, item.as_span()).into(), )), _ => parsing_catch_all(&item, "composite type"), } @@ -122,7 +124,7 @@ pub(crate) fn parse_composite_type( name, fields, documentation: doc_comment.and_then(parse_comment_block), - span: ast::Span::from(pair_span), + span: ast::Span::from((file_id, pair_span)), inner_span: inner_span.unwrap(), }, _ => panic!("Encountered impossible model declaration during parsing",), diff --git a/psl/schema-ast/src/parser/parse_enum.rs b/psl/schema-ast/src/parser/parse_enum.rs index 5e5109de1a91..2dc1f8e7e3fd 100644 --- a/psl/schema-ast/src/parser/parse_enum.rs +++ b/psl/schema-ast/src/parser/parse_enum.rs @@ -4,10 +4,15 @@ use super::{ parse_comments::*, Rule, }; -use crate::ast::{Attribute, Comment, Enum, EnumValue, Identifier}; -use diagnostics::{DatamodelError, Diagnostics, Span}; +use crate::ast::{self, Attribute, Comment, Enum, EnumValue, Identifier}; +use diagnostics::{DatamodelError, Diagnostics, FileId, Span}; -pub fn parse_enum(pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics) -> Enum { +pub fn parse_enum( + pair: Pair<'_>, + doc_comment: Option>, + diagnostics: &mut Diagnostics, + file_id: FileId, +) -> Enum { let comment: Option = doc_comment.and_then(parse_comment_block); let pair_span = pair.as_span(); let mut name: Option = None; @@ -19,16 +24,16 @@ pub fn parse_enum(pair: Pair<'_>, doc_comment: Option>, diagnostics: &m for current in pairs { match current.as_rule() { Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE | Rule::ENUM_KEYWORD => {} - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::enum_contents => { let mut pending_value_comment = None; - inner_span = Some(current.as_span().into()); + inner_span = Some((file_id, current.as_span()).into()); for item in current.into_inner() { match item.as_rule() { - Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics)), + Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics, file_id)), Rule::enum_value_declaration => { - match parse_enum_value(item, pending_value_comment.take(), diagnostics) { + match parse_enum_value(item, pending_value_comment.take(), diagnostics, file_id) { Ok(enum_value) => values.push(enum_value), Err(err) => diagnostics.push_error(err), } @@ -36,7 +41,7 @@ pub fn parse_enum(pair: Pair<'_>, doc_comment: Option>, diagnostics: &m Rule::comment_block => pending_value_comment = Some(item), Rule::BLOCK_LEVEL_CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is not an enum value definition.", - item.as_span().into(), + (file_id, item.as_span()).into(), )), _ => parsing_catch_all(&item, "enum"), } @@ -52,7 +57,7 @@ pub fn parse_enum(pair: Pair<'_>, doc_comment: Option>, diagnostics: &m values, attributes, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), inner_span: inner_span.unwrap(), }, _ => panic!("Encountered impossible enum declaration during parsing, name is missing.",), @@ -63,6 +68,7 @@ fn parse_enum_value( pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> Result { let (pair_str, pair_span) = (pair.as_str(), pair.as_span()); let mut name: Option = None; @@ -71,8 +77,8 @@ fn parse_enum_value( for current in pair.into_inner() { match current.as_rule() { - Rule::identifier => name = Some(current.into()), - Rule::field_attribute => attributes.push(parse_attribute(current, diagnostics)), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), + Rule::field_attribute => attributes.push(parse_attribute(current, diagnostics, file_id)), Rule::trailing_comment => { comment = match (comment, parse_trailing_comment(current)) { (None, a) | (a, None) => a, @@ -93,7 +99,7 @@ fn parse_enum_value( name, attributes, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }), _ => panic!("Encountered impossible enum value declaration during parsing, name is missing: {pair_str:?}",), } diff --git a/psl/schema-ast/src/parser/parse_expression.rs b/psl/schema-ast/src/parser/parse_expression.rs index c5a9b68b17fc..f252bbbc41bc 100644 --- a/psl/schema-ast/src/parser/parse_expression.rs +++ b/psl/schema-ast/src/parser/parse_expression.rs @@ -4,17 +4,21 @@ use super::{ Rule, }; use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; -pub(crate) fn parse_expression(token: Pair<'_>, diagnostics: &mut diagnostics::Diagnostics) -> Expression { +pub(crate) fn parse_expression( + token: Pair<'_>, + diagnostics: &mut diagnostics::Diagnostics, + file_id: FileId, +) -> Expression { let first_child = token.into_inner().next().unwrap(); - let span = Span::from(first_child.as_span()); + let span = Span::from((file_id, first_child.as_span())); match first_child.as_rule() { Rule::numeric_literal => Expression::NumericValue(first_child.as_str().to_string(), span), - Rule::string_literal => Expression::StringValue(parse_string_literal(first_child, diagnostics), span), + Rule::string_literal => Expression::StringValue(parse_string_literal(first_child, diagnostics, file_id), span), Rule::path => Expression::ConstantValue(first_child.as_str().to_string(), span), - Rule::function_call => parse_function(first_child, diagnostics), - Rule::array_expression => parse_array(first_child, diagnostics), + Rule::function_call => parse_function(first_child, diagnostics, file_id), + Rule::array_expression => parse_array(first_child, diagnostics, file_id), _ => unreachable!( "Encountered impossible literal during parsing: {:?}", first_child.tokens() @@ -22,7 +26,7 @@ pub(crate) fn parse_expression(token: Pair<'_>, diagnostics: &mut diagnostics::D } } -fn parse_function(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> Expression { +fn parse_function(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> Expression { let mut name: Option = None; let mut arguments = ArgumentsList::default(); let (pair_str, span) = (pair.as_str(), pair.as_span()); @@ -30,32 +34,32 @@ fn parse_function(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> Expression { for current in pair.into_inner() { match current.as_rule() { Rule::path => name = Some(current.as_str().to_string()), - Rule::arguments_list => parse_arguments_list(current, &mut arguments, diagnostics), + Rule::arguments_list => parse_arguments_list(current, &mut arguments, diagnostics, file_id), _ => parsing_catch_all(¤t, "function"), } } match name { - Some(name) => Expression::Function(name, arguments, Span::from(span)), + Some(name) => Expression::Function(name, arguments, Span::from((file_id, span))), _ => unreachable!("Encountered impossible function during parsing: {:?}", pair_str), } } -fn parse_array(token: Pair<'_>, diagnostics: &mut Diagnostics) -> Expression { +fn parse_array(token: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> Expression { let mut elements: Vec = vec![]; let span = token.as_span(); for current in token.into_inner() { match current.as_rule() { - Rule::expression => elements.push(parse_expression(current, diagnostics)), + Rule::expression => elements.push(parse_expression(current, diagnostics, file_id)), _ => parsing_catch_all(¤t, "array"), } } - Expression::Array(elements, Span::from(span)) + Expression::Array(elements, Span::from((file_id, span))) } -fn parse_string_literal(token: Pair<'_>, diagnostics: &mut Diagnostics) -> String { +fn parse_string_literal(token: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> String { assert!(token.as_rule() == Rule::string_literal); let contents = token.clone().into_inner().next().unwrap(); let contents_str = contents.as_str(); @@ -98,6 +102,7 @@ fn parse_string_literal(token: Pair<'_>, diagnostics: &mut Diagnostics) -> Strin &contents_str[start..], contents.as_span().start() + start, diagnostics, + file_id, ); if let Some(char) = char { @@ -109,7 +114,7 @@ fn parse_string_literal(token: Pair<'_>, diagnostics: &mut Diagnostics) -> Strin } } (_, c) => { - let mut final_span: crate::ast::Span = contents.as_span().into(); + let mut final_span: crate::ast::Span = (file_id, contents.as_span()).into(); final_span.start += start; final_span.end = final_span.start + 1 + c.len_utf8(); diagnostics.push_error(DatamodelError::new_static( @@ -132,11 +137,13 @@ fn try_parse_unicode_codepoint( slice: &str, slice_offset: usize, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> (usize, Option) { let unicode_sequence_error = |consumed| { let span = crate::ast::Span { start: slice_offset, end: (slice_offset + slice.len()).min(slice_offset + consumed), + file_id, }; DatamodelError::new_static("Invalid unicode escape sequence.", span) }; diff --git a/psl/schema-ast/src/parser/parse_field.rs b/psl/schema-ast/src/parser/parse_field.rs index 6f11da80aaf5..488a315b66b5 100644 --- a/psl/schema-ast/src/parser/parse_field.rs +++ b/psl/schema-ast/src/parser/parse_field.rs @@ -5,8 +5,8 @@ use super::{ parse_types::parse_field_type, Rule, }; -use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use crate::ast::{self, *}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; pub(crate) fn parse_field( model_name: &str, @@ -14,6 +14,7 @@ pub(crate) fn parse_field( pair: Pair<'_>, block_comment: Option>, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> Result { let pair_span = pair.as_span(); let mut name: Option = None; @@ -23,15 +24,15 @@ pub(crate) fn parse_field( for current in pair.into_inner() { match current.as_rule() { - Rule::identifier => name = Some(current.into()), - Rule::field_type => field_type = Some(parse_field_type(current, diagnostics)?), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), + Rule::field_type => field_type = Some(parse_field_type(current, diagnostics, file_id)?), Rule::LEGACY_COLON => { return Err(DatamodelError::new_legacy_parser_error( "Field declarations don't require a `:`.", - current.as_span().into(), + (file_id, current.as_span()).into(), )) } - Rule::field_attribute => attributes.push(parse_attribute(current, diagnostics)), + Rule::field_attribute => attributes.push(parse_attribute(current, diagnostics, file_id)), Rule::trailing_comment => { comment = match (comment, parse_trailing_comment(current)) { (c, None) | (None, c) => c, @@ -51,13 +52,13 @@ pub(crate) fn parse_field( arity, attributes, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }), _ => Err(DatamodelError::new_model_validation_error( "This field declaration is invalid. It is either missing a name or a type.", container_type, model_name, - pair_span.into(), + (file_id, pair_span).into(), )), } } diff --git a/psl/schema-ast/src/parser/parse_model.rs b/psl/schema-ast/src/parser/parse_model.rs index f2aec884d61f..549ba52c5320 100644 --- a/psl/schema-ast/src/parser/parse_model.rs +++ b/psl/schema-ast/src/parser/parse_model.rs @@ -5,10 +5,15 @@ use super::{ parse_field::parse_field, Rule, }; -use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use crate::ast::{self, *}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; -pub(crate) fn parse_model(pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics) -> Model { +pub(crate) fn parse_model( + pair: Pair<'_>, + doc_comment: Option>, + diagnostics: &mut Diagnostics, + file_id: FileId, +) -> Model { let pair_span = pair.as_span(); let mut name: Option = None; let mut attributes: Vec = Vec::new(); @@ -17,19 +22,20 @@ pub(crate) fn parse_model(pair: Pair<'_>, doc_comment: Option>, diagnos for current in pair.into_inner() { match current.as_rule() { Rule::MODEL_KEYWORD | Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE => {} - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::model_contents => { let mut pending_field_comment: Option> = None; for item in current.into_inner() { match item.as_rule() { - Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics)), + Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics, file_id)), Rule::field_declaration => match parse_field( &name.as_ref().unwrap().name, "model", item, pending_field_comment.take(), diagnostics, + file_id, ) { Ok(field) => fields.push(field), Err(err) => diagnostics.push_error(err), @@ -37,7 +43,7 @@ pub(crate) fn parse_model(pair: Pair<'_>, doc_comment: Option>, diagnos Rule::comment_block => pending_field_comment = Some(item), Rule::BLOCK_LEVEL_CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is not a valid field or attribute definition.", - item.as_span().into(), + (file_id, item.as_span()).into(), )), _ => parsing_catch_all(&item, "model"), } @@ -54,7 +60,7 @@ pub(crate) fn parse_model(pair: Pair<'_>, doc_comment: Option>, diagnos attributes, documentation: doc_comment.and_then(parse_comment_block), is_view: false, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }, _ => panic!("Encountered impossible model declaration during parsing",), } diff --git a/psl/schema-ast/src/parser/parse_schema.rs b/psl/schema-ast/src/parser/parse_schema.rs index 6782caab9e44..eb26a48478b0 100644 --- a/psl/schema-ast/src/parser/parse_schema.rs +++ b/psl/schema-ast/src/parser/parse_schema.rs @@ -3,11 +3,11 @@ use super::{ parse_source_and_generator::parse_config_block, parse_view::parse_view, PrismaDatamodelParser, Rule, }; use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; use pest::Parser; /// Parse a PSL string and return its AST. -pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics) -> SchemaAst { +pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics, file_id: FileId) -> SchemaAst { let datamodel_result = PrismaDatamodelParser::parse(Rule::schema, datamodel_string); match datamodel_result { @@ -24,26 +24,26 @@ pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics) -> Sc match keyword.as_rule() { Rule::TYPE_KEYWORD => { - top_level_definitions.push(Top::CompositeType(parse_composite_type(current, pending_block_comment.take(), diagnostics))) + top_level_definitions.push(Top::CompositeType(parse_composite_type(current, pending_block_comment.take(), diagnostics, file_id))) } Rule::MODEL_KEYWORD => { - top_level_definitions.push(Top::Model(parse_model(current, pending_block_comment.take(), diagnostics))) + top_level_definitions.push(Top::Model(parse_model(current, pending_block_comment.take(), diagnostics, file_id))) } Rule::VIEW_KEYWORD => { - top_level_definitions.push(Top::Model(parse_view(current, pending_block_comment.take(), diagnostics))) + top_level_definitions.push(Top::Model(parse_view(current, pending_block_comment.take(), diagnostics, file_id))) } _ => unreachable!(), } }, - Rule::enum_declaration => top_level_definitions.push(Top::Enum(parse_enum(current,pending_block_comment.take(), diagnostics))), + Rule::enum_declaration => top_level_definitions.push(Top::Enum(parse_enum(current,pending_block_comment.take(), diagnostics, file_id))), Rule::config_block => { - top_level_definitions.push(parse_config_block(current, diagnostics)); + top_level_definitions.push(parse_config_block(current, diagnostics, file_id)); }, Rule::type_alias => { let error = DatamodelError::new_validation_error( "Invalid type definition. Please check the documentation in https://pris.ly/d/composite-types", - current.as_span().into() + (file_id, current.as_span()).into() ); diagnostics.push_error(error); @@ -62,12 +62,12 @@ pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics) -> Sc Rule::EOI => {} Rule::CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is invalid. It does not start with any known Prisma schema keyword.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), // TODO: Add view when we want it to be more visible as a feature. Rule::arbitrary_block => diagnostics.push_error(DatamodelError::new_validation_error( "This block is invalid. It does not start with any known Prisma schema keyword. Valid keywords include \'model\', \'enum\', \'type\', \'datasource\' and \'generator\'.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), Rule::empty_lines => (), _ => unreachable!(), @@ -89,7 +89,7 @@ pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics) -> Sc _ => panic!("Could not construct parsing error. This should never happend."), }; - diagnostics.push_error(DatamodelError::new_parser_error(expected, location.into())); + diagnostics.push_error(DatamodelError::new_parser_error(expected, (file_id, location).into())); SchemaAst { tops: Vec::new() } } diff --git a/psl/schema-ast/src/parser/parse_source_and_generator.rs b/psl/schema-ast/src/parser/parse_source_and_generator.rs index d5abb6935fca..4c8285e0b5f6 100644 --- a/psl/schema-ast/src/parser/parse_source_and_generator.rs +++ b/psl/schema-ast/src/parser/parse_source_and_generator.rs @@ -4,11 +4,10 @@ use super::{ parse_expression::parse_expression, Rule, }; -use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use crate::ast::{self, *}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; -#[track_caller] -pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> Top { +pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> Top { let pair_span = pair.as_span(); let mut name: Option = None; let mut properties = Vec::new(); @@ -19,10 +18,10 @@ pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics) for current in pair.into_inner() { match current.as_rule() { Rule::config_contents => { - inner_span = Some(current.as_span().into()); + inner_span = Some((file_id, current.as_span()).into()); for item in current.into_inner() { match item.as_rule() { - Rule::key_value => properties.push(parse_key_value(item, diagnostics)), + Rule::key_value => properties.push(parse_key_value(item, diagnostics, file_id)), Rule::comment_block => comment = parse_comment_block(item), Rule::BLOCK_LEVEL_CATCH_ALL => { let msg = format!( @@ -30,14 +29,14 @@ pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics) kw.unwrap_or("configuration block") ); - let err = DatamodelError::new_validation_error(&msg, item.as_span().into()); + let err = DatamodelError::new_validation_error(&msg, (file_id, item.as_span()).into()); diagnostics.push_error(err); } _ => parsing_catch_all(&item, "source"), } } } - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::DATASOURCE_KEYWORD | Rule::GENERATOR_KEYWORD => kw = Some(current.as_str()), Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE => {} @@ -50,28 +49,28 @@ pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics) name: name.unwrap(), properties, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), inner_span: inner_span.unwrap(), }), Some("generator") => Top::Generator(GeneratorConfig { name: name.unwrap(), properties, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }), _ => unreachable!(), } } -fn parse_key_value(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ConfigBlockProperty { +fn parse_key_value(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> ConfigBlockProperty { let mut name: Option = None; let mut value: Option = None; let (pair_span, pair_str) = (pair.as_span(), pair.as_str()); for current in pair.into_inner() { match current.as_rule() { - Rule::identifier => name = Some(current.into()), - Rule::expression => value = Some(parse_expression(current, diagnostics)), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), + Rule::expression => value = Some(parse_expression(current, diagnostics, file_id)), Rule::trailing_comment => (), _ => unreachable!( "Encountered impossible source property declaration during parsing: {:?}", @@ -84,7 +83,7 @@ fn parse_key_value(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ConfigBlock (Some(name), value) => ConfigBlockProperty { name, value, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }, _ => unreachable!( "Encountered impossible source property declaration during parsing: {:?}", diff --git a/psl/schema-ast/src/parser/parse_types.rs b/psl/schema-ast/src/parser/parse_types.rs index 7629ae636f82..d22cfe986fd7 100644 --- a/psl/schema-ast/src/parser/parse_types.rs +++ b/psl/schema-ast/src/parser/parse_types.rs @@ -1,47 +1,48 @@ use super::{helpers::Pair, Rule}; use crate::{ast::*, parser::parse_expression::parse_expression}; -use diagnostics::{DatamodelError, Diagnostics}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; pub fn parse_field_type( pair: Pair<'_>, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> Result<(FieldArity, FieldType), DatamodelError> { assert!(pair.as_rule() == Rule::field_type); let current = pair.into_inner().next().unwrap(); match current.as_rule() { Rule::optional_type => Ok(( FieldArity::Optional, - parse_base_type(current.into_inner().next().unwrap(), diagnostics), + parse_base_type(current.into_inner().next().unwrap(), diagnostics, file_id), )), - Rule::base_type => Ok((FieldArity::Required, parse_base_type(current, diagnostics))), + Rule::base_type => Ok((FieldArity::Required, parse_base_type(current, diagnostics, file_id))), Rule::list_type => Ok(( FieldArity::List, - parse_base_type(current.into_inner().next().unwrap(), diagnostics), + parse_base_type(current.into_inner().next().unwrap(), diagnostics, file_id), )), Rule::legacy_required_type => Err(DatamodelError::new_legacy_parser_error( "Fields are required by default, `!` is no longer required.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), Rule::legacy_list_type => Err(DatamodelError::new_legacy_parser_error( "To specify a list, please use `Type[]` instead of `[Type]`.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), Rule::unsupported_optional_list_type => Err(DatamodelError::new_legacy_parser_error( "Optional lists are not supported. Use either `Type[]` or `Type?`.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), _ => unreachable!("Encountered impossible field during parsing: {:?}", current.tokens()), } } -fn parse_base_type(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> FieldType { +fn parse_base_type(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> FieldType { let current = pair.into_inner().next().unwrap(); match current.as_rule() { Rule::identifier => FieldType::Supported(Identifier { name: current.as_str().to_string(), - span: Span::from(current.as_span()), + span: Span::from((file_id, current.as_span())), }), - Rule::unsupported_type => match parse_expression(current, diagnostics) { + Rule::unsupported_type => match parse_expression(current, diagnostics, file_id) { Expression::StringValue(lit, span) => FieldType::Unsupported(lit, span), _ => unreachable!("Encountered impossible type during parsing"), }, diff --git a/psl/schema-ast/src/parser/parse_view.rs b/psl/schema-ast/src/parser/parse_view.rs index 38066067b7a8..546c6e775c67 100644 --- a/psl/schema-ast/src/parser/parse_view.rs +++ b/psl/schema-ast/src/parser/parse_view.rs @@ -6,9 +6,14 @@ use super::{ Rule, }; use crate::ast::{self, Attribute}; -use diagnostics::{DatamodelError, Diagnostics}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; -pub(crate) fn parse_view(pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics) -> ast::Model { +pub(crate) fn parse_view( + pair: Pair<'_>, + doc_comment: Option>, + diagnostics: &mut Diagnostics, + file_id: FileId, +) -> ast::Model { let pair_span = pair.as_span(); let mut name: Option = None; let mut fields: Vec = vec![]; @@ -17,19 +22,20 @@ pub(crate) fn parse_view(pair: Pair<'_>, doc_comment: Option>, diagnost for current in pair.into_inner() { match current.as_rule() { Rule::VIEW_KEYWORD | Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE => (), - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::model_contents => { let mut pending_field_comment: Option> = None; for item in current.into_inner() { match item.as_rule() { - Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics)), + Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics, file_id)), Rule::field_declaration => match parse_field( &name.as_ref().unwrap().name, "view", item, pending_field_comment.take(), diagnostics, + file_id, ) { Ok(field) => fields.push(field), Err(err) => diagnostics.push_error(err), @@ -37,7 +43,7 @@ pub(crate) fn parse_view(pair: Pair<'_>, doc_comment: Option>, diagnost Rule::comment_block => pending_field_comment = Some(item), Rule::BLOCK_LEVEL_CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is not a valid field or attribute definition.", - item.as_span().into(), + (file_id, item.as_span()).into(), )), _ => parsing_catch_all(&item, "view"), } @@ -54,7 +60,7 @@ pub(crate) fn parse_view(pair: Pair<'_>, doc_comment: Option>, diagnost attributes, documentation: doc_comment.and_then(parse_comment_block), is_view: true, - span: ast::Span::from(pair_span), + span: ast::Span::from((file_id, pair_span)), }, _ => panic!("Encountered impossible model declaration during parsing",), } diff --git a/psl/schema-ast/src/reformat.rs b/psl/schema-ast/src/reformat.rs index 853258226e2f..3492bb0524f0 100644 --- a/psl/schema-ast/src/reformat.rs +++ b/psl/schema-ast/src/reformat.rs @@ -19,6 +19,8 @@ pub fn reformat(input: &str, indent_width: usize) -> Option { renderer.stream.push('\n'); } + // TODO: why do we need to use a `Some` here? + // Also: if we really want to return an `Option`, why do unwrap in `ast.next()`? Some(renderer.stream) } diff --git a/psl/schema-ast/src/source_file.rs b/psl/schema-ast/src/source_file.rs index 3d7deafd3a24..63329ad93c39 100644 --- a/psl/schema-ast/src/source_file.rs +++ b/psl/schema-ast/src/source_file.rs @@ -6,6 +6,14 @@ pub struct SourceFile { contents: Contents, } +impl Default for SourceFile { + fn default() -> Self { + Self { + contents: Contents::Static(""), + } + } +} + impl SourceFile { pub fn new_static(content: &'static str) -> Self { Self { diff --git a/quaint/src/connector/sqlite/native/conversion.rs b/quaint/src/connector/sqlite/native/conversion.rs index fced37abca4c..3113d3e81a98 100644 --- a/quaint/src/connector/sqlite/native/conversion.rs +++ b/quaint/src/connector/sqlite/native/conversion.rs @@ -191,9 +191,10 @@ impl<'a> GetRow for SqliteRow<'a> { } } ValueRef::Real(f) if column.is_real() => { - use bigdecimal::{BigDecimal, FromPrimitive}; + use bigdecimal::BigDecimal; + use std::str::FromStr; - Value::numeric(BigDecimal::from_f64(f).unwrap()) + Value::numeric(BigDecimal::from_str(&f.to_string()).unwrap()) } ValueRef::Real(f) => Value::double(f), ValueRef::Text(bytes) if column.is_datetime() => { diff --git a/query-engine/connector-test-kit-rs/qe-setup/Cargo.toml b/query-engine/connector-test-kit-rs/qe-setup/Cargo.toml index 322c9559c6f8..b3b75f294fcc 100644 --- a/query-engine/connector-test-kit-rs/qe-setup/Cargo.toml +++ b/query-engine/connector-test-kit-rs/qe-setup/Cargo.toml @@ -11,6 +11,9 @@ schema-core = { path = "../../../schema-engine/core" } sql-schema-connector = { path = "../../../schema-engine/connectors/sql-schema-connector" } test-setup = { path = "../../../libs/test-setup" } enumflags2.workspace = true +serde.workspace = true +serde_json.workspace = true +tokio.workspace = true connection-string = "*" mongodb = "2.8.0" diff --git a/query-engine/connector-test-kit-rs/qe-setup/src/driver_adapters.rs b/query-engine/connector-test-kit-rs/qe-setup/src/driver_adapters.rs new file mode 100644 index 000000000000..f6faa46d33f0 --- /dev/null +++ b/query-engine/connector-test-kit-rs/qe-setup/src/driver_adapters.rs @@ -0,0 +1,43 @@ +use serde::{Deserialize, Serialize}; +use std::fmt::{Display, Formatter}; + +#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] +pub enum DriverAdapter { + #[serde(rename = "planetscale")] + PlanetScale, + + #[serde(rename = "neon:ws")] + Neon, + + #[serde(rename = "pg")] + Pg, + + #[serde(rename = "libsql")] + LibSQL, + + #[serde(rename = "d1")] + D1, +} + +impl From for DriverAdapter { + fn from(s: String) -> Self { + let s = s.as_str(); + serde_json::from_str(s).unwrap_or_else(|_| panic!("Unknown driver adapter: {}", &s)) + } +} + +impl From for String { + fn from(driver_adapter: DriverAdapter) -> String { + serde_json::value::to_value(driver_adapter) + .ok() + .and_then(|v| v.as_str().map(|v| v.to_owned())) + .unwrap() + } +} + +impl Display for DriverAdapter { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let s: String = (*self).into(); + write!(f, "{}", s) + } +} diff --git a/query-engine/connector-test-kit-rs/qe-setup/src/lib.rs b/query-engine/connector-test-kit-rs/qe-setup/src/lib.rs index 9756c2efec66..74b2d015f7df 100644 --- a/query-engine/connector-test-kit-rs/qe-setup/src/lib.rs +++ b/query-engine/connector-test-kit-rs/qe-setup/src/lib.rs @@ -1,21 +1,40 @@ //! Query Engine test setup. #![allow(clippy::await_holding_lock)] - mod cockroachdb; +pub mod driver_adapters; mod mongodb; mod mssql; mod mysql; mod postgres; +mod providers; +mod sqlite; pub use schema_core::schema_connector::ConnectorError; +use sqlite::sqlite_setup; use self::{cockroachdb::*, mongodb::*, mssql::*, mysql::*, postgres::*}; +use driver_adapters::DriverAdapter; use enumflags2::BitFlags; +use providers::Provider; use psl::{builtin_connectors::*, Datasource}; use schema_core::schema_connector::{ConnectorResult, DiffTarget, SchemaConnector}; use std::env; +pub trait ExternalInitializer<'a> +where + Self: Sized, +{ + #[allow(async_fn_in_trait)] + async fn init_with_migration(&self, script: String) -> Result<(), Box>; + + #[allow(async_fn_in_trait)] + async fn init(&self) -> Result<(), Box>; + + fn url(&self) -> &'a str; + fn datamodel(&self) -> &'a str; +} + fn parse_configuration(datamodel: &str) -> ConnectorResult<(Datasource, String, BitFlags)> { let config = psl::parse_configuration(datamodel) .map_err(|err| ConnectorError::new_schema_parser_error(err.to_pretty_string("schema.prisma", datamodel)))?; @@ -35,30 +54,63 @@ fn parse_configuration(datamodel: &str) -> ConnectorResult<(Datasource, String, Ok((source, url, preview_features)) } +/// Database setup for connector-test-kit-rs with Driver Adapters. +/// If the external driver adapter requires a migration by means of the JavaScript runtime +/// (rather than just the Schema Engine), this function will call [`ExternalInitializer::init_with_migration`]. +/// Otherwise, it will call [`ExternalInitializer::init`], and then proceed with the standard +/// setup based on the Schema Engine. +pub async fn setup_external<'a, EI>( + driver_adapter: DriverAdapter, + initializer: EI, + db_schemas: &[&str], +) -> ConnectorResult<()> +where + EI: ExternalInitializer<'a> + ?Sized, +{ + let prisma_schema = initializer.datamodel(); + let (source, url, _preview_features) = parse_configuration(prisma_schema)?; + + if driver_adapter == DriverAdapter::D1 { + // 1. Compute the diff migration script. + std::fs::remove_file(source.url.as_literal().unwrap().trim_start_matches("file:")).ok(); + let mut connector = sql_schema_connector::SqlSchemaConnector::new_sqlite(); + let migration_script = crate::diff(prisma_schema, url, &mut connector).await?; + + // 2. Tell JavaScript to take care of the schema migration. + // This results in a JSON-RPC call to the JS runtime. + // The JSON-RPC machinery is defined in the `[query-tests-setup]` crate, and it + // implements the `ExternalInitializer<'a>` trait. + initializer + .init_with_migration(migration_script) + .await + .map_err(|err| ConnectorError::from_msg(format!("Error migrating with D1 adapter: {}", err)))?; + } else { + setup(prisma_schema, db_schemas).await?; + + // 3. Tell JavaScript to initialize the external test session. + // The schema migration is taken care of by the Schema Engine. + initializer.init().await.map_err(|err| { + ConnectorError::from_msg(format!("Error initializing {} adapter: {}", driver_adapter, err)) + })?; + } + + Ok(()) +} + /// Database setup for connector-test-kit-rs. pub async fn setup(prisma_schema: &str, db_schemas: &[&str]) -> ConnectorResult<()> { let (source, url, _preview_features) = parse_configuration(prisma_schema)?; - match &source.active_provider { - provider if [POSTGRES.provider_name()].contains(provider) => { - postgres_setup(url, prisma_schema, db_schemas).await - } - provider if COCKROACH.is_provider(provider) => cockroach_setup(url, prisma_schema).await, - provider if MSSQL.is_provider(provider) => mssql_setup(url, prisma_schema, db_schemas).await, - provider if MYSQL.is_provider(provider) => { - mysql_reset(&url).await?; - let mut connector = sql_schema_connector::SqlSchemaConnector::new_mysql(); - diff_and_apply(prisma_schema, url, &mut connector).await - } - provider if SQLITE.is_provider(provider) => { - std::fs::remove_file(source.url.as_literal().unwrap().trim_start_matches("file:")).ok(); - let mut connector = sql_schema_connector::SqlSchemaConnector::new_sqlite(); - diff_and_apply(prisma_schema, url, &mut connector).await - } - - provider if MONGODB.is_provider(provider) => mongo_setup(prisma_schema, &url).await, + let provider = Provider::try_from(source.active_provider).ok(); - x => unimplemented!("Connector {} is not supported yet", x), + match provider { + Some(Provider::SqlServer) => mssql_setup(url, prisma_schema, db_schemas).await, + Some(Provider::Postgres) => postgres_setup(url, prisma_schema, db_schemas).await, + Some(Provider::Cockroach) => cockroach_setup(url, prisma_schema).await, + Some(Provider::Mysql) => mysql_setup(url, prisma_schema).await, + Some(Provider::Mongo) => mongo_setup(prisma_schema, &url).await, + Some(Provider::Sqlite) => sqlite_setup(source, url, prisma_schema).await, + None => unimplemented!("Connector is not supported yet"), } } @@ -87,7 +139,9 @@ pub async fn teardown(prisma_schema: &str, db_schemas: &[&str]) -> ConnectorResu Ok(()) } -async fn diff_and_apply(schema: &str, url: String, connector: &mut dyn SchemaConnector) -> ConnectorResult<()> { +/// Compute an initialisation migration script via +/// `prisma migrate diff --from-empty --to-schema-datamodel $SCHEMA_PATH --script`. +pub(crate) async fn diff(schema: &str, url: String, connector: &mut dyn SchemaConnector) -> ConnectorResult { connector.set_params(schema_core::schema_connector::ConnectorParams { connection_string: url, preview_features: Default::default(), @@ -100,6 +154,15 @@ async fn diff_and_apply(schema: &str, url: String, connector: &mut dyn SchemaCon .database_schema_from_diff_target(DiffTarget::Datamodel(schema.into()), None, None) .await?; let migration = connector.diff(from, to); - let script = connector.render_script(&migration, &Default::default()).unwrap(); + connector.render_script(&migration, &Default::default()) +} + +/// Apply the script returned by [`diff`] against the database. +pub(crate) async fn diff_and_apply( + schema: &str, + url: String, + connector: &mut dyn SchemaConnector, +) -> ConnectorResult<()> { + let script = diff(schema, url, connector).await.unwrap(); connector.db_execute(script).await } diff --git a/query-engine/connector-test-kit-rs/qe-setup/src/mysql.rs b/query-engine/connector-test-kit-rs/qe-setup/src/mysql.rs index cd3f67a300a5..ba67d9ff4583 100644 --- a/query-engine/connector-test-kit-rs/qe-setup/src/mysql.rs +++ b/query-engine/connector-test-kit-rs/qe-setup/src/mysql.rs @@ -5,7 +5,13 @@ use std::{future::Future, pin::Pin, sync::mpsc}; use test_setup::{mysql::mysql_safe_identifier, runtime::run_with_thread_local_runtime as tok}; use url::Url; -pub(crate) async fn mysql_reset(original_url: &str) -> ConnectorResult<()> { +pub(crate) async fn mysql_setup(url: String, prisma_schema: &str) -> ConnectorResult<()> { + mysql_reset(&url).await?; + let mut connector = sql_schema_connector::SqlSchemaConnector::new_mysql(); + crate::diff_and_apply(prisma_schema, url, &mut connector).await +} + +async fn mysql_reset(original_url: &str) -> ConnectorResult<()> { let url = Url::parse(original_url).map_err(ConnectorError::url_parse_error)?; let db_name = url.path().trim_start_matches('/'); create_mysql_database(original_url, db_name).await diff --git a/query-engine/connector-test-kit-rs/qe-setup/src/providers.rs b/query-engine/connector-test-kit-rs/qe-setup/src/providers.rs new file mode 100644 index 000000000000..23d9bf4411b6 --- /dev/null +++ b/query-engine/connector-test-kit-rs/qe-setup/src/providers.rs @@ -0,0 +1,60 @@ +use std::fmt::{Display, Formatter}; + +use psl::builtin_connectors::*; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] +pub(crate) enum Provider { + #[serde(rename = "postgres")] + Postgres, + + #[serde(rename = "mysql")] + Mysql, + + #[serde(rename = "sqlite")] + Sqlite, + + #[serde(rename = "sqlserver")] + SqlServer, + + #[serde(rename = "mongo")] + Mongo, + + #[serde(rename = "cockroach")] + Cockroach, +} + +impl TryFrom<&str> for Provider { + type Error = String; + + fn try_from(provider: &str) -> Result { + if POSTGRES.is_provider(provider) { + Ok(Provider::Postgres) + } else if MYSQL.is_provider(provider) { + Ok(Provider::Mysql) + } else if SQLITE.is_provider(provider) { + Ok(Provider::Sqlite) + } else if MSSQL.is_provider(provider) { + Ok(Provider::SqlServer) + } else if MONGODB.is_provider(provider) { + Ok(Provider::Mongo) + } else if COCKROACH.is_provider(provider) { + Ok(Provider::Cockroach) + } else { + Err(format!("Connector {} is not supported yet", provider)) + } + } +} + +impl From for String { + fn from(val: Provider) -> Self { + serde_json::to_string(&val).unwrap() + } +} + +impl Display for Provider { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let s: String = (*self).into(); + write!(f, "{}", s) + } +} diff --git a/query-engine/connector-test-kit-rs/qe-setup/src/sqlite.rs b/query-engine/connector-test-kit-rs/qe-setup/src/sqlite.rs new file mode 100644 index 000000000000..547032944a75 --- /dev/null +++ b/query-engine/connector-test-kit-rs/qe-setup/src/sqlite.rs @@ -0,0 +1,7 @@ +use schema_core::schema_connector::ConnectorResult; + +pub(crate) async fn sqlite_setup(source: psl::Datasource, url: String, prisma_schema: &str) -> ConnectorResult<()> { + std::fs::remove_file(source.url.as_literal().unwrap().trim_start_matches("file:")).ok(); + let mut connector = sql_schema_connector::SqlSchemaConnector::new_sqlite(); + crate::diff_and_apply(prisma_schema, url, &mut connector).await +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/.gitignore b/query-engine/connector-test-kit-rs/query-engine-tests/.gitignore new file mode 100644 index 000000000000..6f76229c4ee1 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/.gitignore @@ -0,0 +1,4 @@ +node_modules/ + +# wrangler is used for testing the D1 adapter +.wrangler diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/Cargo.toml b/query-engine/connector-test-kit-rs/query-engine-tests/Cargo.toml index 2ac097a7a187..c60b9cca4593 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/Cargo.toml +++ b/query-engine/connector-test-kit-rs/query-engine-tests/Cargo.toml @@ -27,3 +27,4 @@ paste = "1.0.14" [dev-dependencies] insta = "1.7.1" +itertools.workspace = true diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/batch.rs b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/batch.rs new file mode 100644 index 000000000000..017c9a80dc37 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/batch.rs @@ -0,0 +1,30 @@ +use query_tests_setup::{ + query_core::{BatchDocument, QueryDocument}, + GraphqlBody, MultiQuery, Runner, TestResult, +}; + +use crate::run_query; + +pub async fn compact_batch(runner: &Runner, queries: Vec) -> TestResult { + // Ensure individual queries are valid. Helps to debug tests when writing them. + for q in queries.iter() { + run_query!(runner, q.to_string()); + } + + // Ensure batched queries are valid + runner.batch(queries.clone(), false, None).await?.assert_success(); + + let doc = GraphqlBody::Multi(MultiQuery::new( + queries.into_iter().map(Into::into).collect(), + false, + None, + )) + .into_doc() + .unwrap(); + let batch = match doc { + QueryDocument::Multi(batch) => batch.compact(runner.query_schema()), + _ => unreachable!(), + }; + + Ok(batch.compact(runner.query_schema())) +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/metrics.rs new file mode 100644 index 000000000000..df6da0fec9a0 --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/metrics.rs @@ -0,0 +1,23 @@ +use serde_json::Value; + +pub fn get_counter(json: &Value, name: &str) -> u64 { + let metric_value = get_metric_value(json, "counters", name); + metric_value.as_u64().unwrap() +} + +pub fn get_gauge(json: &Value, name: &str) -> f64 { + let metric_value = get_metric_value(json, "gauges", name); + metric_value.as_f64().unwrap() +} + +pub fn get_metric_value(json: &Value, metric_type: &str, name: &str) -> serde_json::Value { + let metrics = json.get(metric_type).unwrap().as_array().unwrap(); + let metric = metrics + .iter() + .find(|metric| metric.get("key").unwrap().as_str() == Some(name)) + .unwrap() + .as_object() + .unwrap(); + + metric.get("value").unwrap().clone() +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/mod.rs index 75d712a27037..a3fadb7d1956 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/mod.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/mod.rs @@ -1,10 +1,13 @@ +mod batch; mod bytes; mod json; +pub mod metrics; mod querying; mod raw; mod string; mod time; +pub use batch::*; pub use bytes::*; pub use raw::*; pub use string::*; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/create_many.rs index fc3ec925352b..06988cf1de1a 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/create_many.rs @@ -64,7 +64,7 @@ mod cockroachdb { mod single_col { use query_engine_tests::run_query; - #[connector_test(exclude(CockroachDb))] + #[connector_test(exclude(CockroachDb, Sqlite("cfd1")))] async fn foo(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, "mutation { createManyTestModel(data: [{},{}]) { count }}"), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index 743c42154db8..bf738076d912 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -1,7 +1,7 @@ use query_engine_tests::test_suite; use std::borrow::Cow; -#[test_suite(schema(generic))] +#[test_suite(schema(generic), exclude(Sqlite("cfd1")))] mod interactive_tx { use query_engine_tests::*; use tokio::time; @@ -213,7 +213,7 @@ mod interactive_tx { Ok(()) } - #[connector_test(exclude(Vitess("planetscale.js.wasm")))] + #[connector_test(exclude(Vitess("planetscale.js.wasm"), Sqlite("cfd1")))] async fn batch_queries_failure(mut runner: Runner) -> TestResult<()> { // Tx expires after five second. let tx_id = runner.start_tx(5000, 5000, None).await?; @@ -568,7 +568,7 @@ mod interactive_tx { } } -#[test_suite(schema(generic))] +#[test_suite(schema(generic), exclude(Sqlite("cfd1")))] mod itx_isolation { use query_engine_tests::*; @@ -576,7 +576,7 @@ mod itx_isolation { // However, there's a bug in the PlanetScale driver adapter: // "Transaction characteristics can't be changed while a transaction is in progress // (errno 1568) (sqlstate 25001) during query: SET TRANSACTION ISOLATION LEVEL SERIALIZABLE" - #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] + #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm"), Sqlite("cfd1")))] async fn basic_serializable(mut runner: Runner) -> TestResult<()> { let tx_id = runner.start_tx(5000, 5000, Some("Serializable".to_owned())).await?; runner.set_active_tx(tx_id.clone()); @@ -600,7 +600,7 @@ mod itx_isolation { // On PlanetScale, this fails with: // `InteractiveTransactionError("Error in connector: Error querying the database: Server error: `ERROR 25001 (1568): Transaction characteristics can't be changed while a transaction is in progress'")` - #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm")))] + #[connector_test(exclude(MongoDb, Vitess("planetscale.js", "planetscale.js.wasm"), Sqlite("cfd1")))] async fn casing_doesnt_matter(mut runner: Runner) -> TestResult<()> { let tx_id = runner.start_tx(5000, 5000, Some("sErIaLiZaBlE".to_owned())).await?; runner.set_active_tx(tx_id.clone()); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs index 827a35daeac7..7a020f27aa31 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs @@ -5,7 +5,7 @@ use query_engine_tests::test_suite; exclude( Vitess("planetscale.js", "planetscale.js.wasm"), Postgres("neon.js", "pg.js", "neon.js.wasm", "pg.js.wasm"), - Sqlite("libsql.js", "libsql.js.wasm") + Sqlite("libsql.js", "libsql.js.wasm", "cfd1") ) )] mod metrics { @@ -14,7 +14,6 @@ mod metrics { }; use query_engine_tests::ConnectorVersion::*; use query_engine_tests::*; - use serde_json::Value; #[connector_test] async fn metrics_are_recorded(runner: Runner) -> TestResult<()> { @@ -30,8 +29,8 @@ mod metrics { let json = runner.get_metrics().to_json(Default::default()); // We cannot assert the full response it will be slightly different per database - let total_queries = get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); - let total_operations = get_counter(&json, PRISMA_CLIENT_QUERIES_TOTAL); + let total_queries = utils::metrics::get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); + let total_operations = utils::metrics::get_counter(&json, PRISMA_CLIENT_QUERIES_TOTAL); match runner.connector_version() { Sqlite(_) => assert_eq!(total_queries, 2), @@ -63,7 +62,7 @@ mod metrics { let _ = runner.commit_tx(tx_id).await?; let json = runner.get_metrics().to_json(Default::default()); - let active_transactions = get_gauge(&json, PRISMA_CLIENT_QUERIES_ACTIVE); + let active_transactions = utils::metrics::get_gauge(&json, PRISMA_CLIENT_QUERIES_ACTIVE); assert_eq!(active_transactions, 0.0); let tx_id = runner.start_tx(5000, 5000, None).await?; @@ -80,30 +79,8 @@ mod metrics { let _ = runner.rollback_tx(tx_id.clone()).await?; let json = runner.get_metrics().to_json(Default::default()); - let active_transactions = get_gauge(&json, PRISMA_CLIENT_QUERIES_ACTIVE); + let active_transactions = utils::metrics::get_gauge(&json, PRISMA_CLIENT_QUERIES_ACTIVE); assert_eq!(active_transactions, 0.0); Ok(()) } - - fn get_counter(json: &Value, name: &str) -> u64 { - let metric_value = get_metric_value(json, "counters", name); - metric_value.as_u64().unwrap() - } - - fn get_gauge(json: &Value, name: &str) -> f64 { - let metric_value = get_metric_value(json, "gauges", name); - metric_value.as_f64().unwrap() - } - - fn get_metric_value(json: &Value, metric_type: &str, name: &str) -> serde_json::Value { - let metrics = json.get(metric_type).unwrap().as_array().unwrap(); - let metric = metrics - .iter() - .find(|metric| metric.get("key").unwrap().as_str() == Some(name)) - .unwrap() - .as_object() - .unwrap(); - - metric.get("value").unwrap().clone() - } } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs index 504c5cb0bee0..0d9bb8bb386f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs @@ -112,13 +112,27 @@ mod occ { assert_eq!(booked_user_id, found_booked_user_id); } - // On PlanetScale: - // assertion `left == right` failed - // left: 6 - // right: 1 + // On PlanetScale, this fails with: + // ``` + // assertion `left == right` failed + // left: 6 + // right: 1 + // ``` + // + // On D1, this fails with: + // ``` + // assertion `left == right` failed + // left: 3 + // right: 1 + // ``` #[connector_test( schema(occ_simple), - exclude(MongoDB, CockroachDb, Vitess("planetscale.js", "planetscale.js.wasm")) + exclude( + MongoDB, + CockroachDb, + Vitess("planetscale.js", "planetscale.js.wasm"), + Sqlite("cfd1") + ) )] async fn occ_update_many_test(runner: Runner) -> TestResult<()> { let runner = Arc::new(runner); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs index d96c3d3576ff..131dbcf89591 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs @@ -69,10 +69,7 @@ mod one2one_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test( - schema(required_with_default), - exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) - )] + #[connector_test(schema(required_with_default))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -285,10 +282,7 @@ mod one2many_req { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test( - schema(required_with_default), - exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) - )] + #[connector_test(schema(required_with_default))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), @@ -392,10 +386,7 @@ mod one2many_opt { } /// Deleting the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test( - schema(optional_with_default), - exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) - )] + #[connector_test(schema(optional_with_default))] async fn delete_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs index 5ceb0bbabaec..99cd190e161a 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs @@ -1,3 +1,4 @@ +//! D1 seems to silently ignore Cascade. use query_engine_tests::*; #[test_suite(suite = "cascade_onU_1to1_req", schema(required), relation_mode = "prisma")] @@ -32,7 +33,13 @@ mod one2one_req { schema.to_owned() } - #[connector_test(schema(required))] + #[connector_test(schema(required), exclude(Sqlite("cfd1")))] + /// On D1, this fails with: + /// + /// ```diff + /// - {"data":{"updateManyParent":{"count":1}}} + /// + {"data":{"updateManyParent":{"count":2}}} + /// ``` async fn update_parent_cascade(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -169,8 +176,14 @@ mod one2one_opt { schema.to_owned() } + #[connector_test(schema(optional), exclude(Sqlite("cfd1")))] // Updating the parent updates the child FK as well. - #[connector_test(schema(optional))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"updateManyParent":{"count":1}}} + // + {"data":{"updateManyParent":{"count":2}}} + // ``` async fn update_parent_cascade(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs index cda3b52c9736..99c3c0b094dc 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs @@ -1,4 +1,5 @@ //! SQL Server doesn't support Restrict. +//! D1 seems to silently ignore Restrict. use indoc::indoc; use query_engine_tests::*; @@ -254,8 +255,13 @@ mod one2many_req { Ok(()) } + #[connector_test(exclude(Sqlite("cfd1")))] /// Updating the parent succeeds if no child is connected or if the linking fields aren't part of the update payload. - #[connector_test] + /// + /// ```diff + /// - {"data":{"updateManyParent":{"count":1}}} + /// + {"data":{"updateManyParent":{"count":2}}} + /// ``` async fn update_parent(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; run_query!( @@ -377,8 +383,13 @@ mod one2many_opt { Ok(()) } + #[connector_test(exclude(Sqlite("cfd1")))] /// Updating the parent succeeds if no child is connected or if the linking fields aren't part of the update payload. - #[connector_test] + /// + /// ```diff + /// - {"data":{"updateManyParent":{"count":1}}} + /// + {"data":{"updateManyParent":{"count":2}}} + /// ``` async fn update_parent(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; run_query!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs index b942d6f0bc7b..99c2ffb63a5e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs @@ -68,7 +68,7 @@ mod one2one_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, Vitess))] + #[connector_test(schema(required_with_default))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -171,7 +171,7 @@ mod one2one_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, Vitess))] + #[connector_test(schema(optional_with_default))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -276,7 +276,7 @@ mod one2many_req { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(required_with_default), exclude(MongoDb, MySQL, Vitess))] + #[connector_test(schema(required_with_default))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -379,7 +379,7 @@ mod one2many_opt { } /// Updating the parent reconnects the child to the default and fails (the default doesn't exist). - #[connector_test(schema(optional_with_default), exclude(MongoDb, MySQL, Vitess))] + #[connector_test(schema(optional_with_default))] async fn update_parent_no_exist_fail(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs index 01dbffad6ca1..8ef0ab0d1e8c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs @@ -1,4 +1,5 @@ //! Only Postgres (except CockroachDB) allows SetNull on a non-nullable FK at all, rest fail during migration. +//! D1 also seems to silently ignore Restrict. use indoc::indoc; use query_engine_tests::*; @@ -64,7 +65,13 @@ mod one2one_opt { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"updateManyParent":{"count":1}}} + // + {"data":{"updateManyParent":{"count":2}}} + // ``` async fn update_many_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -450,7 +457,13 @@ mod one2many_opt { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"updateManyParent":{"count":1}}} + // + {"data":{"updateManyParent":{"count":2}}} + // ``` async fn update_many_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs index dd76837d92f1..60d7ca964955 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs @@ -157,7 +157,13 @@ mod max_integer { // Specific messages are asserted down below for native types. // MongoDB is excluded because it automatically upcasts a value as an i64 if doesn't fit in an i32. // MySQL 5.6 is excluded because it never overflows but inserts the min or max of the range of the column type instead. - #[connector_test(exclude(MongoDb, MySql(5.6)))] + // D1 doesn't fail. + // + // On D1, this panics with + // ``` + // Expected result to return an error, but found success: {"data":{"createOneTest":{"id":1,"int":2147483648}}} + // ``` + #[connector_test(exclude(MongoDb, MySql(5.6), Sqlite("cfd1")))] async fn unfitted_int_should_fail(runner: Runner) -> TestResult<()> { assert_error!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14001.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14001.rs index 8b08a70c16c4..9b7b0e514b74 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14001.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14001.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite))] +#[test_suite(schema(schema))] mod prisma_14001 { fn schema() -> String { r#" diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_5952.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_5952.rs index b851c1c2be64..0281fc60a832 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_5952.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_5952.rs @@ -43,6 +43,48 @@ mod regression { r#"query {findUniqueArtist(where:{firstName_netWorth:{firstName:"George",netWorth:"-0.23660010012409"}}) {firstName netWorth}}"#.to_string(), ]; + let doc = compact_batch(&runner, queries.clone()).await?; + assert!(doc.is_compact()); + + let batch_results = runner.batch(queries, false, None).await?; + insta::assert_snapshot!( + batch_results.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueArtist":{"firstName":"Michael","netWorth":"236600000.12409"}}},{"data":{"findUniqueArtist":{"firstName":"George","netWorth":"-0.23660010012409"}}}]}"### + ); + + Ok(()) + } + + #[connector_test] + async fn decimal_find_different_uniques_unquoted(runner: Runner) -> TestResult<()> { + runner + .query(indoc! { + r#"mutation {createOneArtist(data:{ + firstName: "Michael" + netWorth: 236600000.12409 + }){ firstName }}"# + }) + .await? + .assert_success(); + + runner + .query(indoc! { + r#"mutation {createOneArtist(data:{ + firstName: "George" + netWorth: -0.23660010012409 + }){ firstName }}"# + }) + .await? + .assert_success(); + + let queries = vec![ + r#"query {findUniqueArtist(where:{firstName_netWorth:{firstName:"Michael",netWorth:236600000.12409}}) {firstName netWorth}}"#.to_string(), + r#"query {findUniqueArtist(where:{firstName_netWorth:{firstName:"George",netWorth:-0.23660010012409}}) {firstName netWorth}}"#.to_string(), + ]; + + let doc = compact_batch(&runner, queries.clone()).await?; + assert!(doc.is_compact()); + let batch_results = runner.batch(queries, false, None).await?; insta::assert_snapshot!( batch_results.to_string(), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs index f7114d249839..166e9e1e4a94 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs @@ -4,7 +4,7 @@ use query_engine_tests::*; mod not_in_chunking { use query_engine_tests::Runner; - #[connector_test(exclude(CockroachDb))] + #[connector_test(exclude(CockroachDb, Sqlite("cfd1")))] async fn not_in_batch_filter(runner: Runner) -> TestResult<()> { assert_error!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs index 55acc7b30521..9cccd46caeb1 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs @@ -438,8 +438,7 @@ mod relation_load_strategy { count } } - "#, - exclude(Sqlite) + "# ); relation_load_strategy_not_available_test!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs index 15d11967178e..545c44cfe41c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs @@ -52,7 +52,13 @@ mod aggr_group_by_having { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_count":{"int":2}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_count_scalar_filter(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, int: 1, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, int: 2, string: "group1" }"#).await?; @@ -127,7 +133,13 @@ mod aggr_group_by_having { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_sum":{"float":16.0,"int":16}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_sum_scalar_filter(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 6, int: 6, string: "group1" }"#).await?; @@ -196,7 +208,13 @@ mod aggr_group_by_having { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_min":{"float":0.0,"int":0}},{"string":"group2","_min":{"float":0.0,"int":0}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_min_scalar_filter(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?; @@ -264,7 +282,13 @@ mod aggr_group_by_having { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_max":{"float":10.0,"int":10}},{"string":"group2","_max":{"float":10.0,"int":10}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_max_scalar_filter(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?; @@ -332,7 +356,13 @@ mod aggr_group_by_having { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_count":{"string":2}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_count_non_numerical_field(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?; @@ -350,7 +380,16 @@ mod aggr_group_by_having { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this panics with: + // + // ``` + // assertion `left == right` failed: Query result: {"data":{"groupByTestModel":[]}} is not part of the expected results: ["{\"data\":{\"groupByTestModel\":[{\"string\":\"group1\"},{\"string\":\"group2\"}]}}", "{\"data\":{\"groupByTestModel\":[{\"string\":\"group2\"},{\"string\":\"group1\"}]}}"] for connector SQLite (cfd1) + // left: false + // right: true + // note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace + // FAILED + // ``` async fn having_without_aggr_sel(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs index 433bcc899081..0d055f591c72 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs @@ -3,7 +3,6 @@ use query_engine_tests::*; #[test_suite(schema(schema), capabilities(AnyId))] mod compound_batch { use indoc::indoc; - use query_engine_tests::query_core::{BatchDocument, QueryDocument}; fn schema() -> String { let schema = indoc! { @@ -384,28 +383,106 @@ mod compound_batch { Ok(()) } - async fn compact_batch(runner: &Runner, queries: Vec) -> TestResult { - // Ensure individual queries are valid. Helps to debug tests when writing them. - for q in queries.iter() { - run_query!(runner, q.to_string()); - } - - // Ensure batched queries are valid - runner.batch(queries.clone(), false, None).await?.assert_success(); - - let doc = GraphqlBody::Multi(MultiQuery::new( - queries.into_iter().map(Into::into).collect(), - false, - None, - )) - .into_doc() - .unwrap(); - let batch = match doc { - QueryDocument::Multi(batch) => batch.compact(runner.query_schema()), - _ => unreachable!(), - }; + #[connector_test(schema(common_list_types), capabilities(ScalarLists))] + async fn should_only_batch_if_possible_list_boolean(runner: Runner) -> TestResult<()> { + run_query!( + &runner, + r#"mutation { + createOneTestModel(data: { id: 1, bool: [true, false] }) { id } + }"# + ); + run_query!( + &runner, + r#"mutation { + createOneTestModel(data: { id: 2, bool: [false, true] }) { id } + }"# + ); + + let queries = vec![ + r#"query { + findUniqueTestModel(where: { id: 1, bool: { equals: [true, false] } }) { id, bool } + }"# + .to_string(), + r#"query { + findUniqueTestModel( where: { id: 2, bool: { equals: [false, true] } }) { id, bool } + }"# + .to_string(), + ]; + + // COMPACT: Queries use scalar list + let doc = compact_batch(&runner, queries.clone()).await?; + assert!(doc.is_compact()); - Ok(batch.compact(runner.query_schema())) + let batch_results = runner.batch(queries, false, None).await?; + insta::assert_snapshot!( + batch_results.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueTestModel":{"id":1,"bool":[true,false]}}},{"data":{"findUniqueTestModel":{"id":2,"bool":[false,true]}}}]}"### + ); + + Ok(()) + } + + fn schema_23343() -> String { + let schema = indoc! { r#" + model Post { + id Int + tenantId String + userId Int + text String + + @@unique([tenantId, userId]) + } + "# }; + + schema.to_owned() + } + + #[connector_test(schema(schema_23343))] + async fn batch_23343(runner: Runner) -> TestResult<()> { + create_test_data_23343(&runner).await?; + + let queries = vec![ + r#"query { + findUniquePost(where: { tenantId_userId: { tenantId: "tenant1", userId: 1 }, tenantId: "tenant1" }) + { id, tenantId, userId, text }}"# + .to_string(), + r#"query { + findUniquePost(where: { tenantId_userId: { tenantId: "tenant2", userId: 3 }, tenantId: "tenant2" }) + { id, tenantId, userId, text }}"# + .to_string(), + ]; + + let batch_results = runner.batch(queries, false, None).await?; + insta::assert_snapshot!( + batch_results.to_string(), + @r###"{"batchResult":[{"data":{"findUniquePost":{"id":1,"tenantId":"tenant1","userId":1,"text":"Post 1!"}}},{"data":{"findUniquePost":{"id":3,"tenantId":"tenant2","userId":3,"text":"Post 3!"}}}]}"### + ); + + Ok(()) + } + + async fn create_test_data_23343(runner: &Runner) -> TestResult<()> { + runner + .query(r#"mutation { createOnePost(data: { id: 1, tenantId: "tenant1", userId: 1, text: "Post 1!" }) { id } }"#) + .await? + .assert_success(); + + runner + .query(r#"mutation { createOnePost(data: { id: 2, tenantId: "tenant1", userId: 2, text: "Post 2!" }) { id } }"#) + .await? + .assert_success(); + + runner + .query(r#"mutation { createOnePost(data: { id: 3, tenantId: "tenant2", userId: 3, text: "Post 3!" }) { id } }"#) + .await? + .assert_success(); + + runner + .query(r#"mutation { createOnePost(data: { id: 4, tenantId: "tenant2", userId: 4, text: "Post 4!" }) { id } }"#) + .await? + .assert_success(); + + Ok(()) } async fn create_test_data(runner: &Runner) -> TestResult<()> { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs index ab8884605b25..257620e1bdbe 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs @@ -348,6 +348,53 @@ mod singular_batch { Ok(()) } + fn boolean_unique() -> String { + let schema = indoc! { + r#" + model User { + #id(id, String, @id) + isManager Boolean? @unique + } + "# + }; + + schema.to_owned() + } + + #[connector_test(schema(boolean_unique))] + async fn batch_boolean(runner: Runner) -> TestResult<()> { + run_query!( + &runner, + r#"mutation { + createOneUser(data: { id: "A", isManager: true }) { id } + }"# + ); + run_query!( + &runner, + r#"mutation { + createOneUser(data: { id: "B", isManager: false }) { id } + }"# + ); + + let (res, compact_doc) = compact_batch( + &runner, + vec![ + r#"{ findUniqueUser(where: { isManager: true }) { id, isManager } }"#.to_string(), + r#"{ findUniqueUser(where: { isManager: false }) { id, isManager } }"#.to_string(), + ], + ) + .await?; + + insta::assert_snapshot!( + res.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueUser":{"id":"A","isManager":true}}},{"data":{"findUniqueUser":{"id":"B","isManager":false}}}]}"### + ); + + assert!(compact_doc.is_compact()); + + Ok(()) + } + // Regression test for https://github.com/prisma/prisma/issues/16548 #[connector_test(schema(schemas::generic))] async fn repro_16548(runner: Runner) -> TestResult<()> { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/transactional_batch.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/transactional_batch.rs index 50fe1372948a..0130b3ee710b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/transactional_batch.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/transactional_batch.rs @@ -44,7 +44,13 @@ mod transactional { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyModelA":[]}} + // + {"data":{"findManyModelA":[{"id":1}]}} + // ``` async fn one_success_one_fail(runner: Runner) -> TestResult<()> { let queries = vec![ r#"mutation { createOneModelA(data: { id: 1 }) { id }}"#.to_string(), @@ -77,7 +83,13 @@ mod transactional { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyModelB":[]}} + // + {"data":{"findManyModelB":[{"id":1}]}} + // ``` async fn one_query(runner: Runner) -> TestResult<()> { // Existing ModelA in the DB will prevent the nested ModelA creation in the batch. insta::assert_snapshot!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/chunking.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/chunking.rs index e30c280fe7f0..30d07c1d0651 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/chunking.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/chunking.rs @@ -96,7 +96,7 @@ mod chunking { Ok(()) } - #[connector_test(exclude(MongoDb))] + #[connector_test(exclude(MongoDb, Sqlite("cfd1")))] async fn order_by_aggregation_should_fail(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs index 803aabb406cb..ea3cf5460473 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs @@ -181,25 +181,34 @@ mod scalar_relations { schema.to_owned() } - #[connector_test(schema(schema_decimal), capabilities(DecimalType))] + #[connector_test(schema(schema_decimal), capabilities(DecimalType), exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyParent":[{"id":1,"children":[{"childId":1,"dec":"1"},{"childId":2,"dec":"-1"},{"childId":3,"dec":"123.4567891"},{"childId":4,"dec":"95993.57"}]}]}} + // + {"data":{"findManyParent":[{"id":1,"children":[{"childId":1,"dec":"1"},{"childId":2,"dec":"-1"},{"childId":3,"dec":"123.4567891"},{"childId":4,"dec":"95993.57000000001"}]}]}} + // ``` + // + // Basically, decimals are treated as doubles (and lose precision) due to D1 not providing column type information on queries. async fn decimal_type(runner: Runner) -> TestResult<()> { create_child(&runner, r#"{ childId: 1, dec: "1" }"#).await?; create_child(&runner, r#"{ childId: 2, dec: "-1" }"#).await?; create_child(&runner, r#"{ childId: 3, dec: "123.45678910" }"#).await?; + create_child(&runner, r#"{ childId: 4, dec: "95993.57" }"#).await?; create_parent( &runner, - r#"{ id: 1, children: { connect: [{ childId: 1 }, { childId: 2 }, { childId: 3 }] } }"#, + r#"{ id: 1, children: { connect: [{ childId: 1 }, { childId: 2 }, { childId: 3 }, { childId: 4 }] } }"#, ) .await?; insta::assert_snapshot!( run_query!(&runner, r#"{ findManyParent(orderBy: { id: asc }) { id children { childId dec } } }"#), - @r###"{"data":{"findManyParent":[{"id":1,"children":[{"childId":1,"dec":"1"},{"childId":2,"dec":"-1"},{"childId":3,"dec":"123.4567891"}]}]}}"### + @r###"{"data":{"findManyParent":[{"id":1,"children":[{"childId":1,"dec":"1"},{"childId":2,"dec":"-1"},{"childId":3,"dec":"123.4567891"},{"childId":4,"dec":"95993.57"}]}]}}"### ); insta::assert_snapshot!( run_query!(&runner, r#"{ findUniqueParent(where: { id: 1 }) { id children { childId dec } } }"#), - @r###"{"data":{"findUniqueParent":{"id":1,"children":[{"childId":1,"dec":"1"},{"childId":2,"dec":"-1"},{"childId":3,"dec":"123.4567891"}]}}}"### + @r###"{"data":{"findUniqueParent":{"id":1,"children":[{"childId":1,"dec":"1"},{"childId":2,"dec":"-1"},{"childId":3,"dec":"123.4567891"},{"childId":4,"dec":"95993.57"}]}}}"### ); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/self_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/self_relation.rs index 4c7714bfdf72..1b3cd11df1ff 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/self_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/self_relation.rs @@ -43,8 +43,12 @@ mod self_relation_filters { schema.to_owned() } + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] // Filter Queries along self relations should succeed with one level. - #[connector_test(exclude(SqlServer))] + // On D1, this test fails with a panic: + // ``` + // {"errors":[{"error":"RecordNotFound(\"Expected 1 records to be connected after connect operation on one-to-many relation 'Cuckoo', found 4.\")","user_facing_error":{"is_panic":false,"message":"The required connected records were not found. Expected 1 records to be connected after connect operation on one-to-many relation 'Cuckoo', found 4.","meta":{"details":"Expected 1 records to be connected after connect operation on one-to-many relation 'Cuckoo', found 4."},"error_code":"P2018"}}]} + // ``` async fn l1_query(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -63,7 +67,7 @@ mod self_relation_filters { } // Filter Queries along self relations should succeed with two levels. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn l2_query(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -86,7 +90,7 @@ mod self_relation_filters { } // Filter Queries along OneToOne self relations should succeed with two levels. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn l2_one2one(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -107,7 +111,7 @@ mod self_relation_filters { } // Filter Queries along OneToOne self relations should succeed with null filter. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn one2one_null(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -126,7 +130,7 @@ mod self_relation_filters { } // Filter Queries along OneToOne self relations should succeed with {} filter. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn one2one_empty(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -145,7 +149,7 @@ mod self_relation_filters { } // Filter Queries along OneToMany self relations should fail with null filter. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn one2one_null_fail(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -167,7 +171,7 @@ mod self_relation_filters { } // Filter Queries along OneToMany self relations should succeed with empty filter (`{}`). - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn one2many_empty(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -186,7 +190,7 @@ mod self_relation_filters { } // Filter Queries along ManyToMany self relations should succeed with valid filter `some`. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2many_some(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -208,7 +212,7 @@ mod self_relation_filters { } // Filter Queries along ManyToMany self relations should succeed with valid filter `none`. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2many_none(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -227,7 +231,7 @@ mod self_relation_filters { } // Filter Queries along ManyToMany self relations should succeed with valid filter `every`. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2many_every(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -246,7 +250,7 @@ mod self_relation_filters { } // Filter Queries along ManyToMany self relations should give an error with null. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2many_null_error(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -269,7 +273,7 @@ mod self_relation_filters { } // Filter Queries along ManyToMany self relations should succeed with {} filter `some`. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2many_empty_some(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -288,7 +292,7 @@ mod self_relation_filters { } // Filter Queries along ManyToMany self relations should succeed with {} filter `none`. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2many_empty_none(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -308,7 +312,7 @@ mod self_relation_filters { } // Filter Queries along ManyToMany self relations should succeed with {} filter `every`. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2many_empty_every(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -328,7 +332,7 @@ mod self_relation_filters { } // Filter Queries along ManyToOne self relations should succeed valid filter. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2one(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -347,7 +351,7 @@ mod self_relation_filters { } // Filter Queries along ManyToOne self relations should succeed with {} filter. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2one_empty_filter(runner: Runner) -> TestResult<()> { test_data(&runner).await?; @@ -366,7 +370,7 @@ mod self_relation_filters { } // Filter Queries along ManyToOne self relations should succeed with null filter. - #[connector_test(exclude(SqlServer))] + #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] async fn many2one_null_filter(runner: Runner) -> TestResult<()> { test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs index dac031f788f8..3c94dd50d30c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs @@ -769,7 +769,6 @@ mod order_by_aggr { schema.to_owned() } - // Regression test for: // https://github.com/prisma/prisma/issues/8036 #[connector_test(schema(schema_regression_8036))] async fn count_m2m_records_not_connected(runner: Runner) -> TestResult<()> { run_query!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs index feab8a87f2fe..961101e18b33 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs @@ -1,8 +1,9 @@ use query_engine_tests::*; // https://stackoverflow.com/questions/4380813/how-to-get-rid-of-mysql-error-prepared-statement-needs-to-be-re-prepared -// Looks like there's a bug with create view stmt on MariaDB -#[test_suite(schema(schema), exclude(MongoDb, MySQL("mariadb"), Vitess))] +// Looks like there's a bug with create view stmt on MariaDB. +// On D1, the migration setup fails because Schema Engine doesn't know anything about Driver Adapters. +#[test_suite(schema(schema), exclude(MongoDb, MySQL("mariadb"), Vitess, Sqlite("cfd1")))] mod views { use query_engine_tests::{connector_test, run_query, Runner}; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs index c3687ddd9f3e..ac5f27f2a0f7 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs @@ -483,7 +483,7 @@ mod typed_output { schema.to_owned() } - #[connector_test(schema(schema_sqlite), only(Sqlite))] + #[connector_test(schema(schema_sqlite), only(Sqlite), exclude(Sqlite("cfd1")))] async fn all_scalars_sqlite(runner: Runner) -> TestResult<()> { create_row( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs index 469ebd227d49..9f158e37d319 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs @@ -16,8 +16,14 @@ mod bigint { schema.to_owned() } - // "Using a BigInt field" should "work" - #[connector_test()] + #[connector_test(exclude(Sqlite("cfd1")))] + // "Using a BigInt field" should "work". + // On D1, this fails with: + // + // ```diff + // - {"data":{"createOneModel":{"field":"123456789012341234"}}} + // + {"data":{"createOneModel":{"field":"123456789012341200"}}} + // ``` async fn using_bigint_field(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs index 7a85da2e9d33..d3ead4332b65 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs @@ -60,6 +60,9 @@ mod byoid { | query_engine_tests::ConnectorVersion::Vitess(Some(query_tests_setup::VitessVersion::PlanetscaleJsWasm)) => { "constraint: `PRIMARY`" } + query_engine_tests::ConnectorVersion::Sqlite(Some(query_tests_setup::SqliteVersion::CloudflareD1)) => { + "fields: (`UNIQUE constraint failed`)" + } query_engine_tests::ConnectorVersion::Vitess(_) => "(not available)", _ => "fields: (`id`)", }; @@ -92,6 +95,9 @@ mod byoid { | query_engine_tests::ConnectorVersion::Vitess(Some(query_tests_setup::VitessVersion::PlanetscaleJsWasm)) => { "constraint: `PRIMARY`" } + query_engine_tests::ConnectorVersion::Sqlite(Some(query_tests_setup::SqliteVersion::CloudflareD1)) => { + "fields: (`UNIQUE constraint failed`)" + } ConnectorVersion::Vitess(_) => "(not available)", _ => "fields: (`id`)", }; @@ -154,6 +160,9 @@ mod byoid { | query_engine_tests::ConnectorVersion::Vitess(Some(query_tests_setup::VitessVersion::PlanetscaleJsWasm)) => { "constraint: `PRIMARY`" } + query_engine_tests::ConnectorVersion::Sqlite(Some(query_tests_setup::SqliteVersion::CloudflareD1)) => { + "fields: (`UNIQUE constraint failed`)" + } ConnectorVersion::Vitess(_) => "(not available)", _ => "fields: (`id`)", }; @@ -186,6 +195,9 @@ mod byoid { | query_engine_tests::ConnectorVersion::Vitess(Some(query_tests_setup::VitessVersion::PlanetscaleJsWasm)) => { "constraint: `PRIMARY`" } + query_engine_tests::ConnectorVersion::Sqlite(Some(query_tests_setup::SqliteVersion::CloudflareD1)) => { + "fields: (`UNIQUE constraint failed`)" + } ConnectorVersion::Vitess(_) => "(not available)", _ => "fields: (`id`)", }; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs index 3cd6be2eabe2..821b99f9fce8 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs @@ -25,7 +25,7 @@ mod nested_create_many { } // "A basic createMany on a create top level" should "work" - #[connector_test(exclude(Sqlite))] + #[connector_test] async fn create_many_on_create(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -53,7 +53,7 @@ mod nested_create_many { } // "A basic createMany on a create top level" should "work" - #[connector_test(exclude(Sqlite))] + #[connector_test] async fn create_many_shorthand_on_create(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -78,7 +78,7 @@ mod nested_create_many { // "Nested createMany" should "error on duplicates by default" // TODO(dom): Not working for mongo - #[connector_test(exclude(Sqlite, MongoDb))] + #[connector_test(exclude(MongoDb))] async fn nested_createmany_fail_dups(runner: Runner) -> TestResult<()> { assert_error!( &runner, @@ -140,7 +140,7 @@ mod nested_create_many { // Each DB allows a certain amount of params per single query, and a certain number of rows. // We create 1000 nested records. // "Nested createMany" should "allow creating a large number of records (horizontal partitioning check)" - #[connector_test(exclude(Sqlite))] + #[connector_test(exclude(Sqlite("cfd1")))] async fn allow_create_large_number_records(runner: Runner) -> TestResult<()> { let records: Vec<_> = (1..=1000).map(|i| format!(r#"{{ id: {i}, str1: "{i}" }}"#)).collect(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs index a543ba7b8f51..b25dc7e8cfa3 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs @@ -590,8 +590,14 @@ mod update_inside_update { // Transactionality + #[connector_test(schema(schema_1), exclude(Sqlite("cfd1")))] // "TRANSACTIONAL: a many to many relation" should "fail gracefully on wrong where and assign error correctly and not execute partially" - #[connector_test(schema(schema_1))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findUniqueNote":{"text":"Some Text"}}} + // + {"data":{"findUniqueNote":{"text":"Some Changed Text"}}} + // ``` async fn tx_m2m_fail_wrong_where(runner: Runner) -> TestResult<()> { let res = run_query_json!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs index 35a044b1473d..f59aee0756fb 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs @@ -1,6 +1,7 @@ use query_engine_tests::*; -#[test_suite(capabilities(CreateMany))] +// TODO: create many returns the wrong count for CFD1 +#[test_suite(capabilities(CreateMany), exclude(Sqlite("cfd1")))] mod create_many { use indoc::indoc; use query_engine_tests::{assert_error, run_query}; @@ -64,7 +65,11 @@ mod create_many { } // Covers: AutoIncrement ID working with basic autonincrement functionality. - #[connector_test(schema(schema_2), capabilities(CreateManyWriteableAutoIncId), exclude(CockroachDb))] + #[connector_test( + schema(schema_2), + capabilities(CreateManyWriteableAutoIncId), + exclude(CockroachDb, Sqlite("cfd1")) + )] async fn basic_create_many_autoincrement(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -294,6 +299,181 @@ mod create_many { Ok(()) } + + fn schema_7() -> String { + let schema = indoc! { + r#"model Test { + req Int @id + req_default Int @default(dbgenerated("1")) + req_default_static Int @default(1) + opt Int? + opt_default Int? @default(dbgenerated("1")) + opt_default_static Int? @default(1) + }"# + }; + + schema.to_owned() + } + + #[connector_test(schema(schema_7), only(Sqlite))] + async fn create_many_by_shape(runner: Runner) -> TestResult<()> { + use itertools::Itertools; + + let mut id = 1; + + // Generates a powerset of all combinations of these fields + // In an attempt to ensure that we never generate invalid insert statements + // because of the grouping logic. + for sets in vec!["req_default", "opt", "opt_default"] + .into_iter() + .powerset() + .map(|mut set| { + set.extend_from_slice(&["req"]); + set + }) + .powerset() + { + let data = sets + .into_iter() + .map(|set| { + let res = set.into_iter().map(|field| format!("{field}: {id}")).join(", "); + + id += 1; + + format!("{{ {res} }}") + }) + .join(", "); + + run_query!( + &runner, + format!(r#"mutation {{ createManyTest(data: [{data}]) {{ count }} }}"#) + ); + } + + Ok(()) + } + + // LibSQL & co are ignored because they don't support metrics + #[connector_test(schema(schema_7), only(Sqlite("3")))] + async fn create_many_by_shape_counter_1(runner: Runner) -> TestResult<()> { + use query_engine_metrics::PRISMA_DATASOURCE_QUERIES_TOTAL; + + // Generated queries: + // INSERT INTO `main`.`Test` (`opt`, `req`) VALUES (null, ?), (?, ?) params=[1,2,2] + // INSERT INTO `main`.`Test` (`opt_default`, `opt`, `req`) VALUES (?, null, ?), (?, ?, ?) params=[3,3,6,6,6] + // INSERT INTO `main`.`Test` (`req_default`, `opt_default`, `req`, `opt`) VALUES (?, ?, ?, null), (?, ?, ?, ?) params=[5,5,5,7,7,7,7] + // INSERT INTO `main`.`Test` (`req`, `req_default`, `opt`) VALUES (?, ?, ?) params=[4,4,4] + run_query!( + &runner, + r#"mutation { + createManyTest( + data: [ + { req: 1 } + { opt: 2, req: 2 } + { opt_default: 3, req: 3 } + { req_default: 4, opt: 4, req: 4 } + { req_default: 5, opt_default: 5, req: 5 } + { opt: 6, opt_default: 6, req: 6 } + { req_default: 7, opt: 7, opt_default: 7, req: 7 } + ] + ) { + count + } + }"# + ); + + let json = runner.get_metrics().to_json(Default::default()); + let counter = metrics::get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); + + match runner.max_bind_values() { + Some(x) if x > 18 => assert_eq!(counter, 6), // 4 queries in total (BEGIN/COMMIT are counted) + // Some queries are being split because of `QUERY_BATCH_SIZE` being set to `10` in dev. + Some(_) => assert_eq!(counter, 7), // 5 queries in total (BEGIN/COMMIT are counted) + _ => panic!("Expected max bind values to be set"), + } + + Ok(()) + } + + // LibSQL & co are ignored because they don't support metrics + #[connector_test(schema(schema_7), only(Sqlite("3")))] + async fn create_many_by_shape_counter_2(runner: Runner) -> TestResult<()> { + use query_engine_metrics::PRISMA_DATASOURCE_QUERIES_TOTAL; + + // Generated queries: + // INSERT INTO `main`.`Test` ( `opt_default_static`, `req_default_static`, `opt`, `req` ) VALUES (?, ?, null, ?), (?, ?, null, ?), (?, ?, null, ?) params=[1,1,1,2,1,2,1,3,3] + // INSERT INTO `main`.`Test` ( `opt_default_static`, `req_default_static`, `opt`, `req` ) VALUES (?, ?, ?, ?), (?, ?, ?, ?) params=[1,1,8,4,1,1,null,5] + // Note: Two queries are generated because QUERY_BATCH_SIZE is set to 10. In production, a single query would be generated for this example. + run_query!( + &runner, + r#"mutation { + createManyTest( + data: [ + { req: 1 } + { req: 2, opt_default_static: 2 }, + { req: 3, req_default_static: 3 }, + { req: 4, opt: 8 }, + { req: 5, opt: null }, + ] + ) { + count + } + }"# + ); + + let json = runner.get_metrics().to_json(Default::default()); + let counter = metrics::get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); + + match runner.max_bind_values() { + Some(x) if x >= 18 => assert_eq!(counter, 3), // 1 createMany queries (BEGIN/COMMIT are counted) + // Some queries are being split because of `QUERY_BATCH_SIZE` being set to `10` in dev. + Some(_) => assert_eq!(counter, 4), // 2 createMany queries (BEGIN/COMMIT are counted) + _ => panic!("Expected max bind values to be set"), + } + + Ok(()) + } + + // LibSQL & co are ignored because they don't support metrics + #[connector_test(schema(schema_7), only(Sqlite("3")))] + async fn create_many_by_shape_counter_3(runner: Runner) -> TestResult<()> { + use query_engine_metrics::PRISMA_DATASOURCE_QUERIES_TOTAL; + + // Generated queries: + // INSERT INTO `main`.`Test` ( `req_default_static`, `req`, `opt_default`, `opt_default_static` ) VALUES (?, ?, ?, ?) params=[1,6,3,1] + // INSERT INTO `main`.`Test` ( `opt`, `req`, `req_default_static`, `opt_default_static` ) VALUES (null, ?, ?, ?), (null, ?, ?, ?), (null, ?, ?, ?) params=[1,1,1,2,1,2,3,3,1] + // INSERT INTO `main`.`Test` ( `opt`, `req`, `req_default_static`, `opt_default_static` ) VALUES (?, ?, ?, ?), (?, ?, ?, ?) params=[8,4,1,1,null,5,1,1] + // Note: The first two queries are split because QUERY_BATCH_SIZE is set to 10. In production, only two queries would be generated for this example. + run_query!( + &runner, + r#"mutation { + createManyTest( + data: [ + { req: 1 } + { req: 2, opt_default_static: 2 }, + { req: 3, req_default_static: 3 }, + { req: 4, opt: 8 }, + { req: 5, opt: null }, + { req: 6, opt_default: 3 }, + ] + ) { + count + } + }"# + ); + + let json = runner.get_metrics().to_json(Default::default()); + let counter = metrics::get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); + + match runner.max_bind_values() { + Some(x) if x > 21 => assert_eq!(counter, 4), // 3 createMany queries in total (BEGIN/COMMIT are counted) + // Some queries are being split because of `QUERY_BATCH_SIZE` being set to `10` in dev. + Some(_) => assert_eq!(counter, 5), // 3 createMany queries in total (BEGIN/COMMIT are counted) + _ => panic!("Expected max bind values to be set"), + } + + Ok(()) + } } #[test_suite(schema(json_opt), exclude(MySql(5.6)), capabilities(CreateMany, Json))] diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs index 01e4aa7d55b2..ec9508347a6e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs @@ -6,8 +6,19 @@ mod delete_many_rels { use query_engine_tests::{run_query, Runner}; use query_test_macros::relation_link_test; + #[relation_link_test( + on_parent = "ToOneOpt", + on_child = "ToOneOpt", + id_only = true, + exclude(Sqlite("cfd1")) + )] // "a P1 to C1 relation " should "succeed when trying to delete the parent" - #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", id_only = true)] + // On D1, this fails with: + // + // ```diff + // - {"data":{"deleteManyParent":{"count":1}}} + // + {"data":{"deleteManyParent":{"count":3}}} + // ``` async fn p1_c1(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" @@ -115,8 +126,14 @@ mod delete_many_rels { Ok(()) } - // "a PM to C1 " should "succeed in deleting the parent" - #[relation_link_test(on_parent = "ToMany", on_child = "ToOneOpt")] + #[relation_link_test(on_parent = "ToMany", on_child = "ToOneOpt", exclude(Sqlite("cfd1")))] + // "a PM to C1 relation " should "succeed in deleting the parent" + // On D1, this fails with: + // + // ```diff + // - {"data":{"deleteManyParent":{"count":1}}} + // + {"data":{"deleteManyParent":{"count":3}}} + // ``` async fn pm_c1(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" @@ -262,8 +279,14 @@ mod delete_many_rels { Ok(()) } - // "a PM to CM relation" should "succeed in deleting the parent" - #[relation_link_test(on_parent = "ToMany", on_child = "ToMany")] + #[relation_link_test(on_parent = "ToMany", on_child = "ToMany", exclude(Sqlite("cfd1")))] + // "a PM to CM relation" should "succeed in deleting the parent" + // On D1, this fails with: + // + // ```diff + // - {"data":{"deleteManyParent":{"count":1}}} + // + {"data":{"deleteManyParent":{"count":3}}} + // ``` async fn pm_cm(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" @@ -349,8 +372,14 @@ mod delete_many_rels { schema.to_owned() } + #[connector_test(schema(additional_schema), exclude(Sqlite("cfd1")))] // "a PM to CM relation" should "delete the parent from other relations as well" - #[connector_test(schema(additional_schema))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"deleteManyParent":{"count":1}}} + // + {"data":{"deleteManyParent":{"count":3}}} + // ``` async fn pm_cm_other_relations(runner: Runner) -> TestResult<()> { runner .query( diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index f1248e3c4d94..fa7964d4a263 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -2,13 +2,15 @@ use crate::{ CockroachDbConnectorTag, ConnectorTag, ConnectorVersion, MongoDbConnectorTag, MySqlConnectorTag, PostgresConnectorTag, SqlServerConnectorTag, SqliteConnectorTag, TestResult, VitessConnectorTag, }; +use qe_setup::driver_adapters::DriverAdapter; use serde::{Deserialize, Serialize}; use std::{convert::TryFrom, env, fmt::Display, fs::File, io::Read, path::PathBuf}; static TEST_CONFIG_FILE_NAME: &str = ".test_config"; -#[derive(Debug, Deserialize, Clone)] +#[derive(Debug, Deserialize, Default, Clone)] pub enum TestExecutor { + #[default] Napi, Wasm, } @@ -23,8 +25,10 @@ impl Display for TestExecutor { } /// The central test configuration. +/// This struct is a 1:1 mapping to the test config file. +/// After validation, this is used to generate [`TestConfig`] #[derive(Debug, Default, Deserialize)] -pub struct TestConfig { +pub struct TestConfigFromSerde { /// The connector that tests should run for. /// Env key: `TEST_CONNECTOR` pub(crate) connector: String, @@ -35,33 +39,156 @@ pub struct TestConfig { #[serde(rename = "version")] pub(crate) connector_version: Option, + /// Indicates whether or not the tests are running in CI context. + /// Env key: `BUILDKITE` + #[serde(default)] + pub(crate) is_ci: bool, + /// An external process to execute the test queries and produced responses for assertion /// Used when testing driver adapters, this process is expected to be a javascript process /// loading the library engine (as a library, or WASM modules) and providing it with a /// driver adapter. - /// Possible values: Napi, Wasm - /// Env key: `EXTERNAL_TEST_EXECUTOR` + /// Env key: `EXTERNAL_TEST_EXECUTOR`. + /// Correctness: if set, [`TestConfigFromSerde::driver_adapter`] must be set as well. pub(crate) external_test_executor: Option, /// The driver adapter to use when running tests, will be forwarded to the external test - /// executor by setting the `DRIVER_ADAPTER` env var when spawning the executor process - pub(crate) driver_adapter: Option, + /// executor by setting the `DRIVER_ADAPTER` env var when spawning the executor process. + /// Correctness: if set, [`TestConfigFromSerde::external_test_executor`] and + /// [`TestConfigFromSerde::driver_adapter_config`] must be set as well. + pub(crate) driver_adapter: Option, /// The driver adapter configuration to forward as a stringified JSON object to the external - /// test executor by setting the `DRIVER_ADAPTER_CONFIG` env var when spawning the executor + /// test executor by setting the `DRIVER_ADAPTER_CONFIG` env var when spawning the executor. + /// Correctness: if set, [`TestConfigFromSerde::driver_adapter`] must be set as well. pub(crate) driver_adapter_config: Option, +} - /// Indicates whether or not the tests are running in CI context. - /// Env key: `BUILDKITE` - #[serde(default)] +impl TestConfigFromSerde { + pub fn test_connector(&self) -> TestResult<(ConnectorTag, ConnectorVersion)> { + let version = ConnectorVersion::try_from((self.connector.as_str(), self.connector_version.as_deref()))?; + let tag = match version { + ConnectorVersion::SqlServer(_) => &SqlServerConnectorTag as ConnectorTag, + ConnectorVersion::Postgres(_) => &PostgresConnectorTag, + ConnectorVersion::MySql(_) => &MySqlConnectorTag, + ConnectorVersion::MongoDb(_) => &MongoDbConnectorTag, + ConnectorVersion::Sqlite(_) => &SqliteConnectorTag, + ConnectorVersion::CockroachDb(_) => &CockroachDbConnectorTag, + ConnectorVersion::Vitess(_) => &VitessConnectorTag, + }; + + Ok((tag, version)) + } + + pub(crate) fn validate(&self) { + if self.connector.is_empty() { + exit_with_message("A test connector is required but was not set."); + } + + match self.test_connector().map(|(_, v)| v) { + Ok(ConnectorVersion::Vitess(None)) + | Ok(ConnectorVersion::MySql(None)) + | Ok(ConnectorVersion::SqlServer(None)) + | Ok(ConnectorVersion::MongoDb(None)) + | Ok(ConnectorVersion::CockroachDb(None)) + | Ok(ConnectorVersion::Postgres(None)) + | Ok(ConnectorVersion::Sqlite(None)) => { + exit_with_message("The current test connector requires a version to be set to run."); + } + Ok(ConnectorVersion::Vitess(Some(_))) + | Ok(ConnectorVersion::MySql(Some(_))) + | Ok(ConnectorVersion::SqlServer(Some(_))) + | Ok(ConnectorVersion::MongoDb(Some(_))) + | Ok(ConnectorVersion::CockroachDb(Some(_))) + | Ok(ConnectorVersion::Postgres(Some(_))) + | Ok(ConnectorVersion::Sqlite(Some(_))) => (), + Err(err) => exit_with_message(&err.to_string()), + } + + if self.external_test_executor.is_some() && self.driver_adapter.is_none() { + exit_with_message( + "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", + ); + } + + if self.driver_adapter.is_some() && self.external_test_executor.is_none() { + exit_with_message( + "When using a driver adapter, the external test executor (EXTERNAL_TEST_EXECUTOR env var) must be set.", + ); + } + + if self.driver_adapter.is_none() && self.driver_adapter_config.is_some() { + exit_with_message( + "When using a driver adapter config, the driver adapter (DRIVER_ADAPTER env var) must be set.", + ); + } + } +} + +// This struct contains every `driverAdapters`-related configuration entry. +pub(crate) struct WithDriverAdapter { + /// The driver adapter to use when running tests, will be forwarded to the external test + /// executor by setting the `DRIVER_ADAPTER` env var when spawning the executor process. + pub(crate) adapter: DriverAdapter, + + /// An external process to execute the test queries and produced responses for assertion + /// Used when testing driver adapters, this process is expected to be a javascript process + /// loading the library engine (as a library, or WASM modules) and providing it with a + /// driver adapter. + /// Env key: `EXTERNAL_TEST_EXECUTOR`. + pub(crate) test_executor: TestExecutor, + + /// The driver adapter configuration to forward as a stringified JSON object to the external + /// test executor by setting the `DRIVER_ADAPTER_CONFIG` env var when spawning the executor. + pub(crate) config: Option, +} + +impl WithDriverAdapter { + fn json_stringify_config(&self) -> String { + self.config.as_ref().map(|cfg| cfg.json_stringify()).unwrap_or_default() + } +} + +pub struct TestConfig { + pub(crate) connector: String, + pub(crate) connector_version: Option, + pub(crate) with_driver_adapter: Option, pub(crate) is_ci: bool, } +impl From for TestConfig { + fn from(config: TestConfigFromSerde) -> Self { + config.validate(); + + let with_driver_adapter = match config.driver_adapter { + Some(adapter) => Some(WithDriverAdapter { + adapter, + test_executor: config.external_test_executor.unwrap(), + config: config.driver_adapter_config, + }), + None => None, + }; + + Self { + connector: config.connector, + connector_version: config.connector_version, + is_ci: config.is_ci, + with_driver_adapter, + } + } +} + #[derive(Debug, Default, Serialize, Deserialize)] pub(crate) struct DriverAdapterConfig { pub(crate) proxy_url: Option, } +impl DriverAdapterConfig { + fn json_stringify(&self) -> String { + serde_json::to_string(self).unwrap() + } +} + const CONFIG_LOAD_FAILED: &str = r####" ============================================= 🔴 Unable to load config from file or env. 🔴 @@ -117,6 +244,10 @@ impl TestConfig { config } + pub(crate) fn with_driver_adapter(&self) -> Option<&WithDriverAdapter> { + self.with_driver_adapter.as_ref() + } + #[rustfmt::skip] fn log_info(&self) { println!("******************************"); @@ -127,10 +258,10 @@ impl TestConfig { self.connector_version().unwrap_or_default() ); println!("* CI? {}", self.is_ci); - if let Some(external_test_executor) = self.external_test_executor.as_ref() { - println!("* External test executor: {}", external_test_executor); - println!("* Driver adapter: {}", self.driver_adapter().unwrap_or_default()); - println!("* Driver adapter config: {}", self.json_stringify_driver_adapter_config()); + if let Some(with_driver_adapter) = self.with_driver_adapter() { + println!("* External test executor: {}", with_driver_adapter.test_executor); + println!("* Driver adapter: {}", with_driver_adapter.adapter); + println!("* Driver adapter config: {}", with_driver_adapter.json_stringify_config()); } println!("******************************"); } @@ -142,7 +273,7 @@ impl TestConfig { .map(|value| serde_json::from_str::(&value).ok()) .unwrap_or_default(); - let driver_adapter = std::env::var("DRIVER_ADAPTER").ok(); + let driver_adapter = std::env::var("DRIVER_ADAPTER").ok().map(DriverAdapter::from); let driver_adapter_config = std::env::var("DRIVER_ADAPTER_CONFIG") .map(|config| serde_json::from_str::(config.as_str()).ok()) .unwrap_or_default(); @@ -150,14 +281,16 @@ impl TestConfig { // Just care for a set value for now. let is_ci = std::env::var("BUILDKITE").is_ok(); - connector.map(|connector| Self { - connector, - connector_version, - is_ci, - external_test_executor, - driver_adapter, - driver_adapter_config, - }) + connector + .map(|connector| TestConfigFromSerde { + connector, + connector_version, + is_ci, + external_test_executor, + driver_adapter, + driver_adapter_config, + }) + .map(Self::from) } fn from_file() -> Option { @@ -174,7 +307,8 @@ impl TestConfig { f.read_to_string(&mut config) .ok() - .and_then(|_| serde_json::from_str(&config).ok()) + .and_then(|_| serde_json::from_str::(&config).ok()) + .map(Self::from) }) } @@ -184,8 +318,7 @@ impl TestConfig { pub fn external_test_executor_path(&self) -> Option { const DEFAULT_TEST_EXECUTOR: &str = "query-engine/driver-adapters/executor/script/testd.sh"; - self.external_test_executor - .as_ref() + self.with_driver_adapter() .and_then(|_| { Self::workspace_root().or_else(|| { exit_with_message( @@ -198,30 +331,6 @@ impl TestConfig { } fn validate(&self) { - if self.connector.is_empty() { - exit_with_message("A test connector is required but was not set."); - } - - match self.test_connector().map(|(_, v)| v) { - Ok(ConnectorVersion::Vitess(None)) - | Ok(ConnectorVersion::MySql(None)) - | Ok(ConnectorVersion::SqlServer(None)) - | Ok(ConnectorVersion::MongoDb(None)) - | Ok(ConnectorVersion::CockroachDb(None)) - | Ok(ConnectorVersion::Postgres(None)) - | Ok(ConnectorVersion::Sqlite(None)) => { - exit_with_message("The current test connector requires a version to be set to run."); - } - Ok(ConnectorVersion::Vitess(Some(_))) - | Ok(ConnectorVersion::MySql(Some(_))) - | Ok(ConnectorVersion::SqlServer(Some(_))) - | Ok(ConnectorVersion::MongoDb(Some(_))) - | Ok(ConnectorVersion::CockroachDb(Some(_))) - | Ok(ConnectorVersion::Postgres(Some(_))) - | Ok(ConnectorVersion::Sqlite(Some(_))) => (), - Err(err) => exit_with_message(&err.to_string()), - } - if let Some(file) = self.external_test_executor_path().as_ref() { let path = PathBuf::from(file); let md = path.metadata(); @@ -243,24 +352,6 @@ impl TestConfig { } } } - - if self.external_test_executor.is_some() && self.driver_adapter.is_none() { - exit_with_message( - "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", - ); - } - - if self.driver_adapter.is_some() && self.external_test_executor.is_none() { - exit_with_message( - "When using a driver adapter, the external test executor (EXTERNAL_TEST_EXECUTOR env var) must be set.", - ); - } - - if self.driver_adapter.is_none() && self.driver_adapter_config.is_some() { - exit_with_message( - "When using a driver adapter config, the driver adapter (DRIVER_ADAPTER env var) must be set.", - ); - } } pub fn connector(&self) -> &str { @@ -275,18 +366,6 @@ impl TestConfig { self.is_ci } - pub fn external_test_executor(&self) -> Option { - self.external_test_executor.clone() - } - - pub fn driver_adapter(&self) -> Option<&str> { - self.driver_adapter.as_deref() - } - - fn json_stringify_driver_adapter_config(&self) -> String { - serde_json::to_string(&self.driver_adapter_config).unwrap_or_default() - } - pub fn test_connector(&self) -> TestResult<(ConnectorTag, ConnectorVersion)> { let version = ConnectorVersion::try_from((self.connector(), self.connector_version()))?; let tag = match version { @@ -304,18 +383,20 @@ impl TestConfig { #[rustfmt::skip] pub fn for_external_executor(&self) -> Vec<(String, String)> { + let with_driver_adapter = self.with_driver_adapter().unwrap(); + vec!( ( "DRIVER_ADAPTER".to_string(), - self.driver_adapter.clone().unwrap_or_default() + with_driver_adapter.adapter.to_string() ), ( "DRIVER_ADAPTER_CONFIG".to_string(), - self.json_stringify_driver_adapter_config() + with_driver_adapter.json_stringify_config(), ), ( "EXTERNAL_TEST_EXECUTOR".to_string(), - self.external_test_executor.clone().unwrap_or(TestExecutor::Napi).to_string(), + with_driver_adapter.test_executor.to_string(), ), ( "PRISMA_DISABLE_QUAINT_EXECUTORS".to_string(), diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs index a09666794bcc..8912c227c079 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs @@ -8,6 +8,7 @@ mod sqlite; mod vitess; pub use mysql::MySqlVersion; +pub use sqlite::SqliteVersion; pub use vitess::VitessVersion; pub(crate) use cockroachdb::*; @@ -406,7 +407,7 @@ pub(crate) fn should_run( // FIXME: This skips vitess unless explicitly opted in. Replace with `true` when fixing // https://github.com/prisma/client-planning/issues/332 - CONFIG.external_test_executor().is_some() || !matches!(version, ConnectorVersion::Vitess(_)) + CONFIG.with_driver_adapter().is_some() || !matches!(version, ConnectorVersion::Vitess(_)) } impl TryFrom<(&str, Option<&str>)> for ConnectorVersion { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs index 2173bbdd38f2..d1f185a6cf88 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs @@ -31,6 +31,7 @@ pub enum SqliteVersion { V3, LibsqlJsNapi, LibsqlJsWasm, + CloudflareD1, } impl ToString for SqliteVersion { @@ -39,6 +40,7 @@ impl ToString for SqliteVersion { SqliteVersion::V3 => "3".to_string(), SqliteVersion::LibsqlJsNapi => "libsql.js".to_string(), SqliteVersion::LibsqlJsWasm => "libsql.js.wasm".to_string(), + SqliteVersion::CloudflareD1 => "cfd1".to_owned(), } } } @@ -51,6 +53,7 @@ impl TryFrom<&str> for SqliteVersion { "3" => Self::V3, "libsql.js" => Self::LibsqlJsNapi, "libsql.js.wasm" => Self::LibsqlJsWasm, + "cfd1" => Self::CloudflareD1, _ => return Err(TestError::parse_error(format!("Unknown SQLite version `{s}`"))), }; Ok(version) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs index 6fa095a1a15b..a5b376e4fb68 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs @@ -1,7 +1,7 @@ mod json_adapter; pub use json_adapter::*; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use crate::{ executor_process_request, ConnectorTag, ConnectorVersion, QueryResult, TestError, TestLogCapture, TestResult, @@ -48,6 +48,13 @@ impl From for TxResult { } } +#[derive(Deserialize, Debug)] +#[serde(untagged)] +enum StartTransactionResponse { + Ok { id: String }, + Error(user_facing_errors::Error), +} + pub enum RunnerExecutor { // Builtin is a runner that uses the query engine in-process, issuing queries against a // `core::InterpretingExecutor` that uses the particular connector under test in the test suite. @@ -56,30 +63,135 @@ pub enum RunnerExecutor { // External is a runner that uses an external process that responds to queries piped to its STDIN // in JsonRPC format. In particular this is used to test the query engine against a node process // running a library engine configured to use a javascript driver adapter to connect to a database. - // - // In this struct variant, usize represents the index of the schema used for the test suite to - // execute queries against. When the suite starts, a message with the schema and the id is sent to - // the external process, which will create a new instance of the library engine configured to - // access that schema. - // - // Everytime a query is sent to the external process, it's provided the id of the schema, so the - // process knows how to associate the query to the instance of the library engine that will dispatch - // it. - External(usize), + External(ExternalExecutor), } -impl RunnerExecutor { - async fn new_external(url: &str, schema: &str) -> TestResult { - static COUNTER: AtomicUsize = AtomicUsize::new(0); - let id = COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed); +/// [`ExternalExecutor::schema_id`] represents the index of the schema used for the test suite to +/// execute queries against. When the suite starts, a message with the schema and the id is sent to +/// the external process, which will create a new instance of the library engine configured to +/// access that schema. +/// +/// Everytime a query is sent to the external process, it's provided the id of the schema, so the +/// process knows how to associate the query to the instance of the library engine that will dispatch +/// it. +#[derive(Copy, Clone)] +pub struct ExternalExecutor { + schema_id: usize, +} + +/// [`ExternalExecutorInitializer`] is responsible for initialising a test session for the external process. +/// The initialisation can happen with or without a migration script, and is performed by submitting the +/// "initializeSchema" JSON-RPC request. +/// [`ExternalExecutorInitializer::schema_id`] is the schema id of the parent [`ExternalExecutor`]. +/// [`ExternalExecutorInitializer::url`] and [`ExternalExecutorInitializer::schema`] are the context +/// necessary for the "initializeSchema" JSON-RPC request. +/// The usage of `&'a str` is to avoid problems with `String` not implementing the `Copy` trait. +struct ExternalExecutorInitializer<'a> { + schema_id: usize, + url: &'a str, + schema: &'a str, +} + +impl<'a> qe_setup::ExternalInitializer<'a> for ExternalExecutorInitializer<'a> { + async fn init_with_migration( + &self, + migration_script: String, + ) -> Result<(), Box> { + let migration_script = Some(migration_script); + executor_process_request("initializeSchema", json!({ "schemaId": self.schema_id, "schema": self.schema, "url": self.url, "migrationScript": migration_script })).await?; + Ok(()) + } + async fn init(&self) -> Result<(), Box> { executor_process_request( "initializeSchema", - json!({ "schema": schema, "schemaId": id, "url": url }), + json!({ "schemaId": self.schema_id, "schema": self.schema, "url": self.url }), ) .await?; + Ok(()) + } - Ok(RunnerExecutor::External(id)) + fn url(&self) -> &'a str { + self.url + } + + fn datamodel(&self) -> &'a str { + self.schema + } +} + +impl ExternalExecutor { + /// Request a new schema id to be used for the external process. + /// This operation wraps around on overflow. + fn external_schema_id() -> usize { + static COUNTER: AtomicUsize = AtomicUsize::new(0); + COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed) + } + + fn new() -> Self { + let schema_id = Self::external_schema_id(); + Self { schema_id } + } + + /// Create a temporary initializer for external Driver Adapters. + fn init<'a>(&self, datamodel: &'a str, url: &'a str) -> ExternalExecutorInitializer<'a> { + ExternalExecutorInitializer { + schema_id: self.schema_id, + url, + schema: datamodel, + } + } + + pub(self) async fn query( + &self, + json_query: JsonQuery, + current_tx_id: Option<&TxId>, + ) -> Result> { + let response_str: String = executor_process_request( + "query", + json!({ "schemaId": self.schema_id, "query": json_query, "txId": current_tx_id.map(ToString::to_string) }), + ) + .await?; + let response: QueryResult = serde_json::from_str(&response_str).unwrap(); + Ok(response) + } + + pub(self) async fn start_tx( + &self, + tx_opts: TransactionOptions, + ) -> Result> { + let response: StartTransactionResponse = + executor_process_request("startTx", json!({ "schemaId": self.schema_id, "options": tx_opts })).await?; + Ok(response) + } + + pub(self) async fn commit_tx( + &self, + tx_id: TxId, + ) -> Result> { + let response: TransactionEndResponse = executor_process_request( + "commitTx", + json!({ "schemaId": self.schema_id, "txId": tx_id.to_string() }), + ) + .await?; + Ok(response) + } + + pub(self) async fn rollback_tx( + &self, + tx_id: TxId, + ) -> Result> { + let response: TransactionEndResponse = executor_process_request( + "rollbackTx", + json!({ "schemaId": self.schema_id, "txId": tx_id.to_string() }), + ) + .await?; + Ok(response) + } + + pub(crate) async fn get_logs(&self) -> Result, Box> { + let response: Vec = executor_process_request("getLogs", json!({ "schemaId": self.schema_id })).await?; + Ok(response) } } @@ -98,14 +210,14 @@ pub struct Runner { impl Runner { pub(crate) fn schema_id(&self) -> Option { - match self.executor { + match &self.executor { RunnerExecutor::Builtin(_) => None, - RunnerExecutor::External(schema_id) => Some(schema_id), + RunnerExecutor::External(external) => Some(external.schema_id), } } pub fn prisma_dml(&self) -> &str { - self.query_schema.internal_data_model.schema.db.source() + self.query_schema.internal_data_model.schema.db.source_assert_single() } pub fn max_bind_values(&self) -> Option { @@ -120,17 +232,27 @@ impl Runner { metrics: MetricRegistry, log_capture: TestLogCapture, ) -> TestResult { - qe_setup::setup(&datamodel, db_schemas).await?; - let protocol = EngineProtocol::from(&ENGINE_PROTOCOL.to_string()); let schema = psl::parse_schema(&datamodel).unwrap(); let datasource = schema.configuration.datasources.first().unwrap(); let url = datasource.load_url(|key| env::var(key).ok()).unwrap(); - let (executor, db_version) = match crate::CONFIG.external_test_executor() { - Some(_) => (RunnerExecutor::new_external(&url, &datamodel).await?, None), + let (executor, db_version) = match crate::CONFIG.with_driver_adapter() { + Some(with_driver_adapter) => { + let external_executor = ExternalExecutor::new(); + let external_initializer: ExternalExecutorInitializer<'_> = + external_executor.init(&datamodel, url.as_str()); + let executor = RunnerExecutor::External(external_executor); + + qe_setup::setup_external(with_driver_adapter.adapter, external_initializer, db_schemas).await?; + + let database_version = None; + (executor, database_version) + } None => { - let executor = request_handlers::load_executor( + qe_setup::setup(&datamodel, db_schemas).await?; + + let query_executor = request_handlers::load_executor( ConnectorKind::Rust { url: url.to_owned(), datasource, @@ -138,12 +260,12 @@ impl Runner { schema.configuration.preview_features(), ) .await?; - - let connector = executor.primary_connector(); + let connector = query_executor.primary_connector(); let conn = connector.get_connection().await.unwrap(); let database_version = conn.version().await; - (RunnerExecutor::Builtin(executor), database_version) + let executor = RunnerExecutor::Builtin(query_executor); + (executor, database_version) } }; @@ -172,11 +294,9 @@ impl Runner { let executor = match &self.executor { RunnerExecutor::Builtin(e) => e, - RunnerExecutor::External(schema_id) => match JsonRequest::from_graphql(&query, self.query_schema()) { + RunnerExecutor::External(external) => match JsonRequest::from_graphql(&query, self.query_schema()) { Ok(json_query) => { - let response_str: String = - executor_process_request("query", json!({ "query": json_query, "schemaId": schema_id, "txId": self.current_tx_id.as_ref().map(ToString::to_string) })).await?; - let mut response: QueryResult = serde_json::from_str(&response_str).unwrap(); + let mut response = external.query(json_query, self.current_tx_id.as_ref()).await?; response.detag(); return Ok(response); } @@ -242,13 +362,8 @@ impl Runner { let executor = match &self.executor { RunnerExecutor::Builtin(e) => e, - RunnerExecutor::External(schema_id) => { - let response_str: String = executor_process_request( - "query", - json!({ "query": query, "schemaId": schema_id, "txId": self.current_tx_id.as_ref().map(ToString::to_string) }), - ) - .await?; - let response: QueryResult = serde_json::from_str(&response_str).unwrap(); + RunnerExecutor::External(external) => { + let response = external.query(query, self.current_tx_id.as_ref()).await?; return Ok(response); } }; @@ -316,7 +431,7 @@ impl Runner { isolation_level: Option, ) -> TestResult { let executor = match &self.executor { - RunnerExecutor::External(schema_id) => { + RunnerExecutor::External(external) => { // Translate the GraphQL query to JSON let batch = queries .into_iter() @@ -328,12 +443,7 @@ impl Runner { false => None, }; let json_query = JsonBody::Batch(JsonBatchQuery { batch, transaction }); - let response_str: String = executor_process_request( - "query", - json!({ "query": json_query, "schemaId": schema_id, "txId": self.current_tx_id.as_ref().map(ToString::to_string) }) - ).await?; - - let mut response: QueryResult = serde_json::from_str(&response_str).unwrap(); + let mut response: QueryResult = external.query(json_query, self.current_tx_id.as_ref()).await?; response.detag(); return Ok(response); } @@ -390,15 +500,8 @@ impl Runner { .await?; Ok(id) } - RunnerExecutor::External(schema_id) => { - #[derive(Deserialize, Debug)] - #[serde(untagged)] - enum StartTransactionResponse { - Ok { id: String }, - Error(user_facing_errors::Error), - } - let response: StartTransactionResponse = - executor_process_request("startTx", json!({ "schemaId": schema_id, "options": tx_opts })).await?; + RunnerExecutor::External(external) => { + let response: StartTransactionResponse = external.start_tx(tx_opts).await?; match response { StartTransactionResponse::Ok { id } => Ok(id.into()), @@ -421,11 +524,8 @@ impl Runner { Ok(Ok(())) } } - RunnerExecutor::External(schema_id) => { - let response: TransactionEndResponse = - executor_process_request("commitTx", json!({ "schemaId": schema_id, "txId": tx_id.to_string() })) - .await?; - + RunnerExecutor::External(external) => { + let response = external.commit_tx(tx_id).await?; Ok(response.into()) } } @@ -442,13 +542,8 @@ impl Runner { Ok(Ok(())) } } - RunnerExecutor::External(schema_id) => { - let response: TransactionEndResponse = executor_process_request( - "rollbackTx", - json!({ "schemaId": schema_id, "txId": tx_id.to_string() }), - ) - .await?; - + RunnerExecutor::External(external) => { + let response = external.rollback_tx(tx_id).await?; Ok(response.into()) } } @@ -478,11 +573,8 @@ impl Runner { let mut logs = self.log_capture.get_logs().await; match &self.executor { RunnerExecutor::Builtin(_) => logs, - RunnerExecutor::External(schema_id) => { - let mut external_logs: Vec = - executor_process_request("getLogs", json!({ "schemaId": schema_id })) - .await - .unwrap(); + RunnerExecutor::External(external) => { + let mut external_logs = external.get_logs().await.unwrap(); logs.append(&mut external_logs); logs } diff --git a/query-engine/connector-test-kit-rs/test-configs/cloudflare-d1 b/query-engine/connector-test-kit-rs/test-configs/cloudflare-d1 new file mode 100644 index 000000000000..51f9a52edea3 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/cloudflare-d1 @@ -0,0 +1,6 @@ +{ + "connector": "sqlite", + "version": "cfd1", + "driver_adapter": "d1", + "external_test_executor": "Wasm" +} \ No newline at end of file diff --git a/query-engine/connectors/query-connector/src/write_args.rs b/query-engine/connectors/query-connector/src/write_args.rs index 445037bdbbe2..b02fa873f83c 100644 --- a/query-engine/connectors/query-connector/src/write_args.rs +++ b/query-engine/connectors/query-connector/src/write_args.rs @@ -16,7 +16,7 @@ pub struct WriteArgs { /// Wrapper struct to force a bit of a reflection whether or not the string passed /// to the write arguments is the data source field name, not the model field name. /// Also helps to avoid errors with convenient from-field conversions. -#[derive(Debug, PartialEq, Clone, Hash, Eq)] +#[derive(Debug, PartialEq, Clone, Hash, Eq, PartialOrd, Ord)] pub struct DatasourceFieldName(pub String); impl Deref for DatasourceFieldName { diff --git a/query-engine/connectors/sql-query-connector/src/database/operations/read/coerce.rs b/query-engine/connectors/sql-query-connector/src/database/operations/read/coerce.rs index 51508704ab89..0b2020e933bf 100644 --- a/query-engine/connectors/sql-query-connector/src/database/operations/read/coerce.rs +++ b/query-engine/connectors/sql-query-connector/src/database/operations/read/coerce.rs @@ -1,4 +1,4 @@ -use bigdecimal::{BigDecimal, FromPrimitive, ParseBigDecimalError}; +use bigdecimal::{BigDecimal, ParseBigDecimalError}; use itertools::Itertools; use query_structure::*; use std::{borrow::Cow, io, str::FromStr}; @@ -137,13 +137,9 @@ pub(crate) fn coerce_json_scalar_to_pv(value: serde_json::Value, sf: &ScalarFiel build_conversion_error(sf, &format!("Number({n})"), &format!("{:?}", sf.type_identifier())) })?)), TypeIdentifier::Float | TypeIdentifier::Decimal => { - let bd = n - .as_f64() - .and_then(BigDecimal::from_f64) - .map(|bd| bd.normalized()) - .ok_or_else(|| { - build_conversion_error(sf, &format!("Number({n})"), &format!("{:?}", sf.type_identifier())) - })?; + let bd = parse_decimal(&n.to_string()).map_err(|_| { + build_conversion_error(sf, &format!("Number({n})"), &format!("{:?}", sf.type_identifier())) + })?; Ok(PrismaValue::Float(bd)) } diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs index abcf73cb29c6..c089f0834dcb 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs @@ -59,6 +59,7 @@ pub(crate) fn create_records_nonempty( for field in affected_fields.iter() { let value = arg.take_field_value(field.db_name()); + match value { Some(write_op) => { let value: PrismaValue = write_op @@ -67,7 +68,10 @@ pub(crate) fn create_records_nonempty( row.push(field.value(value, ctx).into()); } - + // We can't use `DEFAULT` for SQLite so we provided an explicit `NULL` instead. + None if !field.is_required() && field.default_value().is_none() => { + row.push(Value::null_int32().raw().into()) + } None => row.push(default_value()), } } diff --git a/query-engine/connectors/sql-query-connector/src/row.rs b/query-engine/connectors/sql-query-connector/src/row.rs index 6f154b1f77dc..68a4d30d5cd5 100644 --- a/query-engine/connectors/sql-query-connector/src/row.rs +++ b/query-engine/connectors/sql-query-connector/src/row.rs @@ -147,6 +147,7 @@ fn row_value_to_prisma_value(p_value: Value, meta: ColumnMetadata<'_>) -> Result ValueType::Boolean(Some(b)) => PrismaValue::Boolean(b), ValueType::Bytes(Some(bytes)) if bytes.as_ref() == [0u8] => PrismaValue::Boolean(false), ValueType::Bytes(Some(bytes)) if bytes.as_ref() == [1u8] => PrismaValue::Boolean(true), + ValueType::Double(Some(i)) => PrismaValue::Boolean(i.to_i64().unwrap() != 0), _ => return Err(create_error(&p_value)), }, TypeIdentifier::Enum(_) => match p_value.typed { diff --git a/query-engine/core/src/interpreter/query_interpreters/write.rs b/query-engine/core/src/interpreter/query_interpreters/write.rs index 6d88c254312a..ad50bbbae0c0 100644 --- a/query-engine/core/src/interpreter/query_interpreters/write.rs +++ b/query-engine/core/src/interpreter/query_interpreters/write.rs @@ -1,9 +1,12 @@ +use std::collections::HashMap; + use crate::{ interpreter::{InterpretationResult, InterpreterError}, query_ast::*, QueryResult, RecordSelection, }; -use connector::{ConnectionLike, NativeUpsert}; +use connector::{ConnectionLike, DatasourceFieldName, NativeUpsert, WriteArgs}; +use query_structure::{ManyRecords, Model}; pub(crate) async fn execute( tx: &mut dyn ConnectionLike, @@ -60,6 +63,10 @@ async fn create_many( q: CreateManyRecords, trace_id: Option, ) -> InterpretationResult { + if q.split_by_shape { + return create_many_split_by_shape(tx, q, trace_id).await; + } + if let Some(selected_fields) = q.selected_fields { let records = tx .create_records_returning(&q.model, q.args, q.skip_duplicates, selected_fields.fields, trace_id) @@ -81,6 +88,109 @@ async fn create_many( } } +/// Performs bulk inserts grouped by record shape. +/// +/// This is required to support connectors which do not support `DEFAULT` in the list of values for `INSERT`. +/// See [`create_many_shape`] for more information as to which heuristic we use to group create many entries. +async fn create_many_split_by_shape( + tx: &mut dyn ConnectionLike, + q: CreateManyRecords, + trace_id: Option, +) -> InterpretationResult { + let mut args_by_shape: HashMap> = Default::default(); + let model = &q.model; + + for write_args in q.args { + let shape = create_many_shape(&write_args, model); + + args_by_shape.entry(shape).or_default().push(write_args); + } + + if let Some(selected_fields) = q.selected_fields { + let mut result: Option = None; + for args in args_by_shape.into_values() { + let current_batch = tx + .create_records_returning( + &q.model, + args, + q.skip_duplicates, + selected_fields.fields.clone(), + trace_id.clone(), + ) + .await?; + + if let Some(result) = &mut result { + // We assume that all records have the same set and order of fields, + // since we pass the same `selected_fields.fields` to the + // `create_records_returning()` above. + result.records.extend(current_batch.records.into_iter()); + } else { + result = Some(current_batch); + } + } + + let records = if let Some(result) = result { + result + } else { + // Empty result means that the list of arguments was empty as well. + tx.create_records_returning(&q.model, vec![], q.skip_duplicates, selected_fields.fields, trace_id) + .await? + }; + + let selection = RecordSelection { + name: q.name, + fields: selected_fields.order, + records, + nested: vec![], + model: q.model, + virtual_fields: vec![], + }; + + Ok(QueryResult::RecordSelection(Some(Box::new(selection)))) + } else { + let mut result = 0; + for args in args_by_shape.into_values() { + let affected_records = tx + .create_records(&q.model, args, q.skip_duplicates, trace_id.clone()) + .await?; + result += affected_records; + } + Ok(QueryResult::Count(result)) + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +struct CreateManyShape(Vec); + +/// Returns a [`CreateManyShape`] that can be used to group CreateMany entries optimally. +/// +/// This is needed for connectors that don't support the `DEFAULT` expression when inserting records in bulk. +/// `DEFAULT` is needed for fields that have a default value that the QueryEngine cannot generate at runtime (@autoincrement(), @dbgenerated()). +/// +/// Two CreateMany entries cannot be grouped together when they contain different fields that require the use of a `DEFAULT` expression. +/// - When they have the same set of fields that require `DEFAULT`, those fields can be ommited entirely from the `INSERT` expression, in which case `DEFAULT` is implied. +/// - When they don't, since all `VALUES` entries of the `INSERT` expression must be the same, we have to split the CreateMany entries into separate `INSERT` expressions. +/// +/// Consequently, if a field has a default value and is _not_ present in the [`WriteArgs`], this constitutes a discriminant that can be used to group CreateMany entries. +/// +/// As such, the [`CreateManyShape`] that we compute for a given CreateMany entry is the set of fields that are _not_ present in the [`WriteArgs`] and that have a default value. +/// Note: This works because the [`crate::QueryDocumentParser`] injects into the CreateMany entries, the default values that _can_ be generated at runtime. +/// Note: We can ignore optional fields without default values because they can be inserted as `NULL`. It is a value that the QueryEngine _can_ generate at runtime. +fn create_many_shape(write_args: &WriteArgs, model: &Model) -> CreateManyShape { + let mut shape = Vec::new(); + + for field in model.fields().scalar() { + if !write_args.args.contains_key(field.db_name()) && field.default_value().is_some() { + shape.push(DatasourceFieldName(field.db_name().to_string())); + } + } + + // This ensures that shapes are not dependent on order of fields. + shape.sort_unstable(); + + CreateManyShape(shape) +} + async fn update_one( tx: &mut dyn ConnectionLike, q: UpdateRecord, diff --git a/query-engine/core/src/query_ast/write.rs b/query-engine/core/src/query_ast/write.rs index 76c8ffb81cbc..975a2be877bf 100644 --- a/query-engine/core/src/query_ast/write.rs +++ b/query-engine/core/src/query_ast/write.rs @@ -272,6 +272,14 @@ pub struct CreateManyRecords { /// Fields of created records that client has requested to return. /// `None` if the connector does not support returning the created rows. pub selected_fields: Option, + /// If set to true, connector will perform the operation using multiple bulk `INSERT` queries. + /// One query will be issued per a unique set of fields present in the batch. For example, if + /// `args` contains records: + /// {a: 1, b: 1} + /// {a: 2, b: 2} + /// {a: 3, b: 3, c: 3} + /// Two queries will be issued: one containing first two records and one for the last record. + pub split_by_shape: bool, } #[derive(Debug, Clone)] diff --git a/query-engine/core/src/query_document/mod.rs b/query-engine/core/src/query_document/mod.rs index fa424bc44d6e..575e3074df2f 100644 --- a/query-engine/core/src/query_document/mod.rs +++ b/query-engine/core/src/query_document/mod.rs @@ -37,6 +37,8 @@ use schema::{constants::*, QuerySchema}; use std::collections::HashMap; use user_facing_errors::query_engine::validation::ValidationError; +use self::selection::QueryFilters; + pub(crate) type QueryParserResult = std::result::Result; #[derive(Debug)] @@ -213,21 +215,21 @@ impl CompactedDocument { // The query arguments are extracted here. Combine all query // arguments from the different queries into a one large argument. - let selection_set = selections.iter().fold(SelectionSet::new(), |mut acc, selection| { - // findUnique always has only one argument. We know it must be an object, otherwise this will panic. - let where_obj = selection.arguments()[0] - .1 - .clone() - .into_object() - .expect("Trying to compact a selection with non-object argument"); - let filters = extract_filter(where_obj, &model); - - for (field, filter) in filters { - acc = acc.push(field, filter); - } - - acc - }); + let query_filters = selections + .iter() + .map(|selection| { + // findUnique always has only one argument. We know it must be an object, otherwise this will panic. + let where_obj = selection.arguments()[0] + .1 + .clone() + .into_object() + .expect("Trying to compact a selection with non-object argument"); + let filters = extract_filter(where_obj, &model); + + QueryFilters::new(filters) + }) + .collect(); + let selection_set = SelectionSet::new(query_filters); // We must select all unique fields in the query so we can // match the right response back to the right request later on. diff --git a/query-engine/core/src/query_document/selection.rs b/query-engine/core/src/query_document/selection.rs index 206fc95c8315..5b950fc38d3c 100644 --- a/query-engine/core/src/query_document/selection.rs +++ b/query-engine/core/src/query_document/selection.rs @@ -1,8 +1,9 @@ +use std::iter; + use crate::{ArgumentValue, ArgumentValueObject}; use indexmap::IndexMap; use itertools::Itertools; use schema::constants::filters; -use std::borrow::Cow; pub type SelectionArgument = (String, ArgumentValue); @@ -102,106 +103,132 @@ impl Selection { } } +#[derive(Debug, Clone, PartialEq, Default)] +pub struct QueryFilters(Vec<(String, ArgumentValue)>); + +impl QueryFilters { + pub fn new(filters: Vec<(String, ArgumentValue)>) -> Self { + Self(filters) + } + + pub fn keys(&self) -> impl IntoIterator + '_ { + self.0.iter().map(|(key, _)| key.as_str()) + } + + pub fn has_many_keys(&self) -> bool { + self.0.len() > 1 + } + + pub fn get_single_key(&self) -> Option<&(String, ArgumentValue)> { + self.0.first() + } +} + #[derive(Debug, Clone, PartialEq)] -pub enum SelectionSet<'a> { - Single(Cow<'a, str>, Vec), - Multi(Vec>>, Vec>), +pub enum SelectionSet { + Single(QuerySingle), + Many(Vec), Empty, } -impl<'a> Default for SelectionSet<'a> { - fn default() -> Self { - Self::Empty - } -} +#[derive(Debug, Clone, PartialEq)] +pub struct QuerySingle(String, Vec); + +impl QuerySingle { + /// Attempt at building a single query filter from multiple query filters. + /// Returns `None` if one of the query filters have more than one key. + pub fn new(query_filters: &[QueryFilters]) -> Option { + if query_filters.is_empty() { + return None; + } -impl<'a> SelectionSet<'a> { - pub fn new() -> Self { - Self::default() - } + if query_filters.iter().any(|query_filters| query_filters.has_many_keys()) { + return None; + } - pub fn push(self, column: impl Into>, value: ArgumentValue) -> Self { - let column = column.into(); + let first = query_filters.first().unwrap(); + let (key, value) = first.get_single_key().unwrap(); - match self { - Self::Single(key, mut vals) if key == column => { - vals.push(value); - Self::Single(key, vals) - } - Self::Single(key, mut vals) => { - vals.push(value); - Self::Multi(vec![vec![key, column]], vec![vals]) - } - Self::Multi(mut keys, mut vals) => { - match (keys.last_mut(), vals.last_mut()) { - (Some(keys), Some(vals)) if !keys.contains(&column) => { - keys.push(column); - vals.push(value); - } - _ => { - keys.push(vec![column]); - vals.push(vec![value]); - } - } + let mut result = QuerySingle(key.clone(), vec![value.clone()]); - Self::Multi(keys, vals) + for filters in query_filters.iter().skip(1) { + if let Some(single) = QuerySingle::push(result, filters) { + result = single; + } else { + return None; } - Self::Empty => Self::Single(column, vec![value]), } + + Some(result) } - pub fn len(&self) -> usize { - match self { - Self::Single(_, _) => 1, - Self::Multi(v, _) => v.len(), - Self::Empty => 0, + fn push(mut previous: Self, next: &QueryFilters) -> Option { + if next.0.is_empty() { + Some(previous) + // We have already validated that all `QueryFilters` have a single key. + // So we can continue building it. + } else { + let (key, value) = next.0.first().unwrap(); + + // if key matches, push value + if key == &previous.0 { + previous.1.push(value.clone()); + + Some(previous) + } else { + // if key does not match, it's a many + None + } } } +} - pub fn is_single(&self) -> bool { - matches!(self, Self::Single(_, _)) +impl Default for SelectionSet { + fn default() -> Self { + Self::Empty } +} - pub fn is_multi(&self) -> bool { - matches!(self, Self::Multi(_, _)) - } +impl SelectionSet { + pub fn new(filters: Vec) -> Self { + let single = QuerySingle::new(&filters); - pub fn is_empty(&self) -> bool { - self.len() == 0 + match single { + Some(single) => SelectionSet::Single(single), + None if filters.is_empty() => SelectionSet::Empty, + None => SelectionSet::Many(filters), + } } - pub fn keys(&self) -> Vec<&str> { + pub fn keys(&self) -> Box + '_> { match self { - Self::Single(key, _) => vec![key.as_ref()], - Self::Multi(keys, _) => match keys.first() { - Some(keys) => keys.iter().map(|key| key.as_ref()).collect(), - None => Vec::new(), - }, - Self::Empty => Vec::new(), + Self::Single(single) => Box::new(iter::once(single.0.as_str())), + Self::Many(filters) => Box::new(filters.iter().flat_map(|f| f.keys()).unique()), + Self::Empty => Box::new(iter::empty()), } } } -pub struct In<'a> { - selection_set: SelectionSet<'a>, +#[derive(Debug)] +pub struct In { + selection_set: SelectionSet, } -impl<'a> In<'a> { - pub fn new(selection_set: SelectionSet<'a>) -> Self { +impl In { + pub fn new(selection_set: SelectionSet) -> Self { Self { selection_set } } } -impl<'a> From> for ArgumentValue { - fn from(other: In<'a>) -> Self { +impl From for ArgumentValue { + fn from(other: In) -> Self { match other.selection_set { - SelectionSet::Multi(key_sets, val_sets) => { - let key_vals = key_sets.into_iter().zip(val_sets); - - let conjuctive = key_vals.fold(Conjuctive::new(), |acc, (keys, vals)| { - let ands = keys.into_iter().zip(vals).fold(Conjuctive::new(), |acc, (key, val)| { - let mut argument = IndexMap::new(); - argument.insert(key.into_owned(), val); + SelectionSet::Many(buckets) => { + let conjuctive = buckets.into_iter().fold(Conjuctive::new(), |acc, bucket| { + // Needed because we flush the last bucket by pushing an empty one, which gets translated to a `Null` as the Conjunctive is empty. + let ands = bucket.0.into_iter().fold(Conjuctive::new(), |acc, (key, value)| { + let mut argument = IndexMap::with_capacity(1); + argument.insert(key.clone(), value); acc.and(argument) }); @@ -211,10 +238,28 @@ impl<'a> From> for ArgumentValue { ArgumentValue::from(conjuctive) } - SelectionSet::Single(key, vals) => ArgumentValue::object([( - key.to_string(), - ArgumentValue::object([(filters::IN.to_owned(), ArgumentValue::list(vals))]), - )]), + SelectionSet::Single(QuerySingle(key, vals)) => { + let is_bool = vals.clone().into_iter().any(|v| match v { + ArgumentValue::Scalar(s) => matches!(s, query_structure::PrismaValue::Boolean(_)), + _ => false, + }); + + if is_bool { + let conjunctive = vals.into_iter().fold(Conjuctive::new(), |acc, val| { + let mut argument = IndexMap::new(); + + argument.insert(key.to_string(), val); + acc.or(argument) + }); + + return ArgumentValue::from(conjunctive); + } + + ArgumentValue::object([( + key.to_string(), + ArgumentValue::object([(filters::IN.to_owned(), ArgumentValue::list(vals))]), + )]) + } SelectionSet::Empty => ArgumentValue::null(), } } diff --git a/query-engine/core/src/query_graph_builder/write/create.rs b/query-engine/core/src/query_graph_builder/write/create.rs index 014910a43aa9..fe0e49a29370 100644 --- a/query-engine/core/src/query_graph_builder/write/create.rs +++ b/query-engine/core/src/query_graph_builder/write/create.rs @@ -66,7 +66,7 @@ pub(crate) fn create_record( /// Creates a create record query and adds it to the query graph, together with it's nested queries and companion read query. pub(crate) fn create_many_records( graph: &mut QueryGraph, - _query_schema: &QuerySchema, + query_schema: &QuerySchema, model: Model, mut field: ParsedField<'_>, ) -> QueryGraphBuilderResult<()> { @@ -99,6 +99,7 @@ pub(crate) fn create_many_records( args, skip_duplicates, selected_fields: None, + split_by_shape: !query_schema.has_capability(ConnectorCapability::SupportsDefaultInInsert), }; graph.create_node(Query::Write(WriteQuery::CreateManyRecords(query))); diff --git a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs index aaea8d24efde..c3d6196b61e5 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs @@ -75,6 +75,7 @@ pub fn nested_create( args: data_maps.into_iter().map(|(args, _nested)| args).collect(), skip_duplicates: false, selected_fields, + split_by_shape: !query_schema.has_capability(ConnectorCapability::SupportsDefaultInInsert), }; let create_many_node = graph.create_node(Query::Write(WriteQuery::CreateManyRecords(query))); @@ -554,6 +555,7 @@ fn handle_one_to_one( pub fn nested_create_many( graph: &mut QueryGraph, + query_schema: &QuerySchema, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, value: ParsedInputValue<'_>, @@ -585,6 +587,7 @@ pub fn nested_create_many( args, skip_duplicates, selected_fields: None, + split_by_shape: !query_schema.has_capability(ConnectorCapability::SupportsDefaultInInsert), }; let create_node = graph.create_node(Query::Write(WriteQuery::CreateManyRecords(query))); diff --git a/query-engine/core/src/query_graph_builder/write/nested/mod.rs b/query-engine/core/src/query_graph_builder/write/nested/mod.rs index 5d0ad21a4c7e..a8f984edbbf4 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/mod.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/mod.rs @@ -36,7 +36,7 @@ pub fn connect_nested_query( for (field_name, value) in data_map { match field_name.as_ref() { operations::CREATE => nested_create(graph, query_schema,parent, &parent_relation_field, value, &child_model)?, - operations::CREATE_MANY => nested_create_many(graph, parent, &parent_relation_field, value, &child_model)?, + operations::CREATE_MANY => nested_create_many(graph, query_schema, parent, &parent_relation_field, value, &child_model)?, operations::UPDATE => nested_update(graph, query_schema, &parent, &parent_relation_field, value, &child_model)?, operations::UPSERT => nested_upsert(graph, query_schema, parent, &parent_relation_field, value)?, operations::DELETE => nested_delete(graph, query_schema, &parent, &parent_relation_field, value, &child_model)?, diff --git a/query-engine/dmmf/src/lib.rs b/query-engine/dmmf/src/lib.rs index 340566c83de1..42cfb2757ca4 100644 --- a/query-engine/dmmf/src/lib.rs +++ b/query-engine/dmmf/src/lib.rs @@ -4,6 +4,7 @@ mod serialization_ast; #[cfg(test)] mod tests; +use psl::ValidatedSchema; pub use serialization_ast::DataModelMetaFormat; use ast_builders::schema_to_dmmf; @@ -15,6 +16,11 @@ pub fn dmmf_json_from_schema(schema: &str) -> String { serde_json::to_string(&dmmf).unwrap() } +pub fn dmmf_json_from_validated_schema(schema: ValidatedSchema) -> String { + let dmmf = from_precomputed_parts(&schema::build(Arc::new(schema), true)); + serde_json::to_string(&dmmf).unwrap() +} + pub fn dmmf_from_schema(schema: &str) -> DataModelMetaFormat { let schema = Arc::new(psl::parse_schema(schema).unwrap()); from_precomputed_parts(&schema::build(schema, true)) diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index c215f37b6fd5..e76453877a50 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -8,7 +8,9 @@ "description": "", "private": true, "scripts": { - "build": "tsup ./src/testd.ts ./src/bench.ts --format esm --dts" + "build": "tsup ./src/testd.ts ./src/bench.ts --format esm --dts", + "test": "node --import tsx ./src/testd.ts", + "clean:d1": "rm -rf ../../connector-test-kit-rs/query-engine-tests/.wrangler" }, "tsup": { "external": [ @@ -22,21 +24,27 @@ "sideEffects": false, "license": "Apache-2.0", "dependencies": { - "query-engine-wasm-latest": "npm:@prisma/query-engine-wasm@latest", - "query-engine-wasm-baseline": "npm:@prisma/query-engine-wasm@0.0.19", + "@effect/schema": "0.64.18", + "@prisma/adapter-d1": "workspace:*", "@prisma/adapter-libsql": "workspace:*", "@prisma/adapter-neon": "workspace:*", "@prisma/adapter-pg": "workspace:*", "@prisma/adapter-planetscale": "workspace:*", - "@prisma/driver-adapter-utils": "workspace:*", "@prisma/bundled-js-drivers": "workspace:*", + "@prisma/driver-adapter-utils": "workspace:*", "mitata": "0.1.11", - "undici": "6.7.0", + "query-engine-wasm-baseline": "npm:@prisma/query-engine-wasm@0.0.19", + "query-engine-wasm-latest": "npm:@prisma/query-engine-wasm@latest", + "ts-pattern": "5.1.0", + "undici": "6.11.1", + "wrangler": "3.44.0", "ws": "8.16.0" }, "devDependencies": { - "@types/node": "20.11.24", + "@cloudflare/workers-types": "4.20240402.0", + "@types/node": "20.12.3", "tsup": "8.0.2", - "typescript": "5.3.3" + "tsx": "4.7.1", + "typescript": "5.4.3" } } diff --git a/query-engine/driver-adapters/executor/src/bench.ts b/query-engine/driver-adapters/executor/src/bench.ts index e168e95a9cab..14923f69cf9e 100644 --- a/query-engine/driver-adapters/executor/src/bench.ts +++ b/query-engine/driver-adapters/executor/src/bench.ts @@ -5,7 +5,7 @@ import { webcrypto } from "node:crypto"; import * as fs from "node:fs/promises"; import path from "node:path"; -import { fileURLToPath } from "node:url"; +import { __dirname } from './utils' import * as qe from "./qe"; @@ -32,9 +32,8 @@ if (!global.crypto) { async function main(): Promise { // read the prisma schema from stdin - const dirname = path.dirname(fileURLToPath(import.meta.url)); var datamodel = ( - await fs.readFile(path.resolve(dirname, "..", "bench", "schema.prisma")) + await fs.readFile(path.resolve(__dirname, "..", "bench", "schema.prisma")) ).toString(); const url = process.env.DATABASE_URL; diff --git a/query-engine/driver-adapters/executor/src/driver-adapters-manager/d1.ts b/query-engine/driver-adapters/executor/src/driver-adapters-manager/d1.ts new file mode 100644 index 000000000000..f2a5392587cf --- /dev/null +++ b/query-engine/driver-adapters/executor/src/driver-adapters-manager/d1.ts @@ -0,0 +1,105 @@ +import path from 'node:path' +import * as S from '@effect/schema/Schema' +import { PrismaD1 } from '@prisma/adapter-d1' +import { DriverAdapter } from '@prisma/driver-adapter-utils' +import { getPlatformProxy } from 'wrangler' +import type { D1Database, D1Result } from '@cloudflare/workers-types' + +import { __dirname, runBatch } from '../utils' +import type { ConnectParams, DriverAdaptersManager } from './index' +import type { DriverAdapterTag, EnvForAdapter } from '../types' +import { D1Tables } from '../types/d1' + +const TAG = 'd1' as const satisfies DriverAdapterTag +type TAG = typeof TAG + +export class D1Manager implements DriverAdaptersManager { + #driver: D1Database + #dispose: () => Promise + #adapter?: DriverAdapter + + private constructor(private env: EnvForAdapter, driver: D1Database, dispose: () => Promise) { + this.#driver = driver + this.#dispose = dispose + } + + static async setup(env: EnvForAdapter, migrationScript?: string) { + const { env: cfBindings, dispose } = await getPlatformProxy<{ D1_DATABASE: D1Database }>({ + configPath: path.join(__dirname, "../wrangler.toml"), + }) + + const { D1_DATABASE } = cfBindings + + /* prisma migrate reset */ + console.warn('[D1] Resetting database') + await migrateReset(D1_DATABASE) + + /* prisma migrate diff */ + if (migrationScript) { + console.warn('[D1] Running database migration script') + await migrateDiff(D1_DATABASE, migrationScript) + } + + return new D1Manager(env, D1_DATABASE, dispose) + } + + async connect({}: ConnectParams) { + this.#adapter = new PrismaD1(this.#driver) + return this.#adapter + } + + async teardown() { + await this.#dispose() + } +} + +async function migrateDiff(D1_DATABASE: D1Database, migrationScript: string) { + // Note: when running a script with multiple statements, D1 fails with + // `D1_ERROR: A prepared SQL statement must contain only one statement.` + // We thus need to run each statement separately, splitting the script by `;`. + const sqlStatements = migrationScript.split(';') + const preparedStatements = sqlStatements.map((sqlStatement) => D1_DATABASE.prepare(sqlStatement)) + await runBatch(D1_DATABASE, preparedStatements) +} + +async function migrateReset(D1_DATABASE: D1Database) { + let { results: rawTables } = ((await D1_DATABASE.prepare(`PRAGMA main.table_list;`).run()) as D1Result) + let tables = S + .decodeUnknownSync(D1Tables, { onExcessProperty: 'preserve' })(rawTables) + .filter((item) => !['_cf_KV', 'sqlite_schema', 'sqlite_sequence'].includes(item.name)) + + // This may sometimes fail with `D1_ERROR: no such table: sqlite_sequence`, + // so it needs to be outside of the batch transaction. + // From the docs (https://www.sqlite.org/autoinc.html): + // "The sqlite_sequence table is created automatically, if it does not already exist, + // whenever a normal table that contains an AUTOINCREMENT column is created". + try { + await D1_DATABASE.prepare(`DELETE FROM "sqlite_sequence";`).run() + } catch (_) { + // Ignore the error, as the table may not exist. + console.warn('Failed to reset sqlite_sequence table, but continuing with the reset.') + } + + const batch = [] as string[] + + // Allow violating foreign key constraints on the batch transaction. + // The foreign key constraints are automatically re-enabled at the end of the transaction, regardless of it succeeding. + batch.push(`PRAGMA defer_foreign_keys = ${1};`) + + for (const table of tables) { + if (table.type === 'view') { + batch.push(`DROP VIEW IF EXISTS "${table.name}";`) + } else { + batch.push(`DROP TABLE IF EXISTS "${table.name}";`) + } + } + + const statements = batch.map((sql) => D1_DATABASE.prepare(sql)) + const batchResult = await runBatch(D1_DATABASE, statements) + + for (const { error } of batchResult) { + if (error) { + console.error('Error in batch: %O', error) + } + } +} diff --git a/query-engine/driver-adapters/executor/src/driver-adapters-manager/index.ts b/query-engine/driver-adapters/executor/src/driver-adapters-manager/index.ts new file mode 100644 index 000000000000..df5b2f1b47e0 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/driver-adapters-manager/index.ts @@ -0,0 +1,10 @@ +import type { DriverAdapter } from '@prisma/driver-adapter-utils' + +export type ConnectParams = { + url: string +} + +export interface DriverAdaptersManager { + connect: (params: ConnectParams) => Promise + teardown: () => Promise +} diff --git a/query-engine/driver-adapters/executor/src/driver-adapters-manager/libsql.ts b/query-engine/driver-adapters/executor/src/driver-adapters-manager/libsql.ts new file mode 100644 index 000000000000..d86bf435c129 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/driver-adapters-manager/libsql.ts @@ -0,0 +1,28 @@ +import { PrismaLibSQL } from '@prisma/adapter-libsql' +import { libSql } from '@prisma/bundled-js-drivers' +import { DriverAdapter } from '@prisma/driver-adapter-utils' +import type { ConnectParams, DriverAdaptersManager } from './index' +import type { DriverAdapterTag, EnvForAdapter } from '../types' + +const TAG = 'libsql' as const satisfies DriverAdapterTag +type TAG = typeof TAG + +export class LibSQLManager implements DriverAdaptersManager { + #driver?: libSql.Client + #adapter?: DriverAdapter + + private constructor(private env: EnvForAdapter) {} + + static async setup(env: EnvForAdapter) { + return new LibSQLManager(env) + } + + async connect({ url }: ConnectParams) { + this.#driver = libSql.createClient({ url, intMode: 'bigint' }) + this.#adapter = new PrismaLibSQL(this.#driver) as DriverAdapter + + return this.#adapter + } + + async teardown() {} +} diff --git a/query-engine/driver-adapters/executor/src/driver-adapters-manager/neon.ws.ts b/query-engine/driver-adapters/executor/src/driver-adapters-manager/neon.ws.ts new file mode 100644 index 000000000000..66cdfc6e5a42 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/driver-adapters-manager/neon.ws.ts @@ -0,0 +1,46 @@ +import { PrismaNeon } from '@prisma/adapter-neon' +import { neon } from '@prisma/bundled-js-drivers' +import { DriverAdapter } from '@prisma/driver-adapter-utils' +import { WebSocket } from 'ws' +import { postgresSchemaName, postgres_options } from '../utils' +import type { DriverAdaptersManager } from './index' +import type { DriverAdapterTag, EnvForAdapter } from '../types' + +const TAG = 'neon:ws' as const satisfies DriverAdapterTag +type TAG = typeof TAG + +type ConnectParams = { + url: string +} + +export class NeonWsManager implements DriverAdaptersManager { + #driver?: neon.Pool + #adapter?: DriverAdapter + + private constructor(private env: EnvForAdapter) {} + + static async setup(env: EnvForAdapter) { + return new NeonWsManager(env) + } + + async connect({ url }: ConnectParams) { + const { proxy_url: proxyUrl } = this.env.DRIVER_ADAPTER_CONFIG + const { neonConfig, Pool } = neon + + neonConfig.wsProxy = () => proxyUrl + neonConfig.webSocketConstructor = WebSocket + neonConfig.useSecureWebSocket = false + neonConfig.pipelineConnect = false + + const schemaName = postgresSchemaName(url) + + this.#driver = new Pool(postgres_options(url)) + this.#adapter = new PrismaNeon(this.#driver, { schema: schemaName }) as DriverAdapter + + return this.#adapter + } + + async teardown() { + await this.#driver?.end() + } +} diff --git a/query-engine/driver-adapters/executor/src/driver-adapters-manager/pg.ts b/query-engine/driver-adapters/executor/src/driver-adapters-manager/pg.ts new file mode 100644 index 000000000000..a3b28e119651 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/driver-adapters-manager/pg.ts @@ -0,0 +1,33 @@ +import { PrismaPg } from '@prisma/adapter-pg' +import { pg } from '@prisma/bundled-js-drivers' +import { DriverAdapter } from '@prisma/driver-adapter-utils' +import { postgresSchemaName, postgres_options } from '../utils' +import type { ConnectParams, DriverAdaptersManager } from './index' +import type { DriverAdapterTag, EnvForAdapter } from '../types' + +const TAG = 'pg' as const satisfies DriverAdapterTag +type TAG = typeof TAG + +export class PgManager implements DriverAdaptersManager { + #driver?: pg.Pool + #adapter?: DriverAdapter + + private constructor(private env: EnvForAdapter) {} + + static async setup(env: EnvForAdapter) { + return new PgManager(env) + } + + async connect({ url }: ConnectParams) { + const schemaName = postgresSchemaName(url) + + this.#driver = new pg.Pool(postgres_options(url)) + this.#adapter = new PrismaPg(this.#driver, { schema: schemaName }) as DriverAdapter + + return this.#adapter + } + + async teardown() { + await this.#driver?.end() + } +} diff --git a/query-engine/driver-adapters/executor/src/driver-adapters-manager/planetscale.ts b/query-engine/driver-adapters/executor/src/driver-adapters-manager/planetscale.ts new file mode 100644 index 000000000000..7c1d2314b34a --- /dev/null +++ b/query-engine/driver-adapters/executor/src/driver-adapters-manager/planetscale.ts @@ -0,0 +1,37 @@ +import { PrismaPlanetScale } from '@prisma/adapter-planetscale' +import { planetScale } from '@prisma/bundled-js-drivers' +import { DriverAdapter } from '@prisma/driver-adapter-utils' +import { fetch } from 'undici' +import { copyPathName } from '../utils' +import type { ConnectParams, DriverAdaptersManager } from './index' +import type { DriverAdapterTag, EnvForAdapter } from '../types' + +const TAG = 'planetscale' as const satisfies DriverAdapterTag +type TAG = typeof TAG + +export class PlanetScaleManager implements DriverAdaptersManager { + #driver?: planetScale.Client + #adapter?: DriverAdapter + + private constructor(private env: EnvForAdapter) {} + + static async setup(env: EnvForAdapter) { + return new PlanetScaleManager(env) + } + + async connect({ url }: ConnectParams) { + const { proxy_url: proxyUrl } = this.env.DRIVER_ADAPTER_CONFIG + + this.#driver = new planetScale.Client({ + // preserving path name so proxy url would look like real DB url + url: copyPathName({ fromURL: url, toURL: proxyUrl }), + fetch, + }) + + this.#adapter = new PrismaPlanetScale(this.#driver) as DriverAdapter + + return this.#adapter + } + + async teardown() {} +} diff --git a/query-engine/driver-adapters/executor/src/jsonRpc.ts b/query-engine/driver-adapters/executor/src/jsonRpc.ts deleted file mode 100644 index ec734e7b543f..000000000000 --- a/query-engine/driver-adapters/executor/src/jsonRpc.ts +++ /dev/null @@ -1,28 +0,0 @@ -export interface Request { - jsonrpc: '2.0' - method: string - params?: Object, - id: number -} - -export type Response = OkResponse | ErrResponse - -export interface OkResponse { - jsonrpc: '2.0' - result: unknown - error?: never - id: number -} - -export interface ErrResponse { - jsonrpc: '2.0' - error: RpcError - result?: never - id: number -} - -export interface RpcError { - code: number - message: string - data?: unknown -} diff --git a/query-engine/driver-adapters/executor/src/qe.ts b/query-engine/driver-adapters/executor/src/qe.ts index e95f76ff05c5..d9553a684f2b 100644 --- a/query-engine/driver-adapters/executor/src/qe.ts +++ b/query-engine/driver-adapters/executor/src/qe.ts @@ -2,14 +2,12 @@ import type { DriverAdapter } from "@prisma/driver-adapter-utils"; import * as napi from "./engines/Library"; import * as os from "node:os"; import * as path from "node:path"; -import { fileURLToPath } from "node:url"; - -const dirname = path.dirname(fileURLToPath(import.meta.url)); +import { __dirname } from './utils' export interface QueryEngine { connect(trace: string): Promise; disconnect(trace: string): Promise; - query(body: string, trace: string, tx_id?: string): Promise; + query(body: string, trace: string, tx_id?: string | null): Promise; startTransaction(input: string, trace: string): Promise; commitTransaction(tx_id: string, trace: string): Promise; rollbackTransaction(tx_id: string, trace: string): Promise; @@ -65,7 +63,7 @@ function loadNapiEngine(): napi.Library { : "debug"; const libQueryEnginePath = path.resolve( - dirname, + __dirname, `../../../../target/${target}/libquery_engine.${libExt}` ); diff --git a/query-engine/driver-adapters/executor/src/recording.ts b/query-engine/driver-adapters/executor/src/recording.ts index 0602cb69dc4e..88b9d369bc23 100644 --- a/query-engine/driver-adapters/executor/src/recording.ts +++ b/query-engine/driver-adapters/executor/src/recording.ts @@ -1,9 +1,9 @@ -import { - type DriverAdapter, - type Query, - type Result, - type ResultSet, -} from "@prisma/driver-adapter-utils"; +import type { + DriverAdapter, + Query, + Result, + ResultSet, +} from "@prisma/driver-adapter-utils" type Recordings = ReturnType; @@ -20,6 +20,7 @@ export function recording(adapter: DriverAdapter) { function recorder(adapter: DriverAdapter, recordings: Recordings) { return { provider: adapter.provider, + adapterName: adapter.adapterName, startTransaction: () => { throw new Error("Not implemented"); }, @@ -34,12 +35,13 @@ function recorder(adapter: DriverAdapter, recordings: Recordings) { executeRaw: async (params) => { throw new Error("Not implemented"); }, - }; + } satisfies DriverAdapter } function replayer(adapter: DriverAdapter, recordings: Recordings) { return { provider: adapter.provider, + adapterName: adapter.adapterName, recordings: recordings, startTransaction: () => { throw new Error("Not implemented"); @@ -53,7 +55,7 @@ function replayer(adapter: DriverAdapter, recordings: Recordings) { executeRaw: async (params) => { return recordings.getCommandResults(params); }, - }; + } satisfies DriverAdapter & { recordings: Recordings } } function createInMemoryRecordings() { diff --git a/query-engine/driver-adapters/executor/src/testd.ts b/query-engine/driver-adapters/executor/src/testd.ts index 4345887fe659..0c96fb927379 100644 --- a/query-engine/driver-adapters/executor/src/testd.ts +++ b/query-engine/driver-adapters/executor/src/testd.ts @@ -1,38 +1,31 @@ -import * as qe from './qe' import * as readline from 'node:readline' -import * as jsonRpc from './jsonRpc' - -// pg dependencies -import * as prismaPg from '@prisma/adapter-pg' - -// neon dependencies -import { fetch } from 'undici' -import { WebSocket } from 'ws' -import { pg, neon, planetScale, libSql } from '@prisma/bundled-js-drivers' -import * as prismaNeon from '@prisma/adapter-neon' - -// libsql dependencies -import { PrismaLibSQL } from '@prisma/adapter-libsql' +import { match } from 'ts-pattern' +import * as S from '@effect/schema/Schema' +import {bindAdapter, ErrorCapturingDriverAdapter} from '@prisma/driver-adapter-utils' +import { webcrypto } from 'node:crypto' -// planetscale dependencies -import { PrismaPlanetScale } from '@prisma/adapter-planetscale' - - -import {bindAdapter, DriverAdapter, ErrorCapturingDriverAdapter} from "@prisma/driver-adapter-utils"; -import { webcrypto } from 'node:crypto'; +import type { DriverAdaptersManager } from './driver-adapters-manager' +import { jsonRpc, Env, ExternalTestExecutor } from './types' +import * as qe from './qe' +import { PgManager } from './driver-adapters-manager/pg' +import { NeonWsManager } from './driver-adapters-manager/neon.ws' +import { LibSQLManager } from './driver-adapters-manager/libsql' +import { PlanetScaleManager } from './driver-adapters-manager/planetscale' +import { D1Manager } from './driver-adapters-manager/d1' if (!global.crypto) { global.crypto = webcrypto as Crypto } - -const SUPPORTED_ADAPTERS: Record Promise> - = { - "pg": pgAdapter, - "neon:ws" : neonWsAdapter, - "libsql": libsqlAdapter, - "planetscale": planetscaleAdapter, - }; +async function initialiseDriverAdapterManager(env: Env, migrationScript?: string): Promise { + return match(env) + .with({ DRIVER_ADAPTER: 'pg' }, async (env) => await PgManager.setup(env)) + .with({ DRIVER_ADAPTER: 'neon:ws' }, async (env) => await NeonWsManager.setup(env)) + .with({ DRIVER_ADAPTER: 'libsql' }, async (env) => await LibSQLManager.setup(env)) + .with({ DRIVER_ADAPTER: 'planetscale' }, async (env) => await PlanetScaleManager.setup(env)) + .with({ DRIVER_ADAPTER: 'd1' }, async (env) => await D1Manager.setup(env, migrationScript)) + .exhaustive() +} // conditional debug logging based on LOG_LEVEL env var const debug = (() => { @@ -49,6 +42,9 @@ const debug = (() => { const err = (...args: any[]) => console.error('[nodejs] ERROR:', ...args); async function main(): Promise { + const env = S.decodeUnknownSync(Env)(process.env) + console.log('[env]', env) + const iface = readline.createInterface({ input: process.stdin, output: process.stdout, @@ -57,10 +53,11 @@ async function main(): Promise { iface.on('line', async (line) => { try { - const request: jsonRpc.Request = JSON.parse(line); // todo: validate + const request = S.decodeSync(jsonRpc.RequestFromString)(line) debug(`Got a request: ${line}`) + try { - const response = await handleRequest(request.method, request.params) + const response = await handleRequest(request, env) respondOk(request.id, response) } catch (err) { debug("[nodejs] Error from request handler: ", err) @@ -71,57 +68,57 @@ async function main(): Promise { } } catch (err) { debug("Received non-json line: ", line); + console.error(err) } }); } const state: Record = {} -async function handleRequest(method: string, params: unknown): Promise { +async function handleRequest({ method, params }: jsonRpc.Request, env: Env): Promise { switch (method) { case 'initializeSchema': { - interface InitializeSchemaParams { - schema: string - schemaId: string - url: string, - } - - const castParams = params as InitializeSchemaParams; + const { url, schema, schemaId, migrationScript } = params const logs = [] as string[] - const [engine, adapter] = await initQe(castParams.url, castParams.schema, (log) => { - logs.push(log) - }); - await engine.connect("") - state[castParams.schemaId] = { + const logCallback = (log) => { logs.push(log) } + + const driverAdapterManager = await initialiseDriverAdapterManager(env, migrationScript) + const engineType = env.EXTERNAL_TEST_EXECUTOR ?? 'Napi' + + const { engine, adapter } = await initQe({ + engineType, + url, + driverAdapterManager,schema, + logCallback, + }) + await engine.connect('') + + state[schemaId] = { engine, + driverAdapterManager, adapter, logs } return null } case 'query': { - interface QueryPayload { - query: string - schemaId: number - txId?: string - } - debug("Got `query`", params) - const castParams = params as QueryPayload; - const engine = state[castParams.schemaId].engine - const result = await engine.query(JSON.stringify(castParams.query), "", castParams.txId) + const { query, schemaId, txId } = params + const engine = state[schemaId].engine + const result = await engine.query(JSON.stringify(query), "", txId) const parsedResult = JSON.parse(result) if (parsedResult.errors) { const error = parsedResult.errors[0]?.user_facing_error if (error.error_code === 'P2036') { - const jsError = state[castParams.schemaId].adapter.errorRegistry.consumeError(error.meta.id) + const jsError = state[schemaId].adapter.errorRegistry.consumeError(error.meta.id) if (!jsError) { err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) } else { @@ -137,59 +134,38 @@ async function handleRequest(method: string, params: unknown): Promise } case 'startTx': { - interface StartTxPayload { - schemaId: number, - options: unknown - } - debug("Got `startTx", params) - const {schemaId, options} = params as StartTxPayload + const { schemaId, options } = params const result = await state[schemaId].engine.startTransaction(JSON.stringify(options), "") return JSON.parse(result) } case 'commitTx': { - interface CommitTxPayload { - schemaId: number, - txId: string, - } - debug("Got `commitTx", params) - const {schemaId, txId} = params as CommitTxPayload + const { schemaId, txId } = params const result = await state[schemaId].engine.commitTransaction(txId, '{}') return JSON.parse(result) } case 'rollbackTx': { - interface RollbackTxPayload { - schemaId: number, - txId: string, - } - debug("Got `rollbackTx", params) - const {schemaId, txId} = params as RollbackTxPayload + const { schemaId, txId } = params const result = await state[schemaId].engine.rollbackTransaction(txId, '{}') return JSON.parse(result) } case 'teardown': { - interface TeardownPayload { - schemaId: number - } - debug("Got `teardown", params) - const castParams = params as TeardownPayload; - await state[castParams.schemaId].engine.disconnect("") - delete state[castParams.schemaId] + const { schemaId } = params + + await state[schemaId].engine.disconnect("") + await state[schemaId].driverAdapterManager.teardown() + delete state[schemaId] return {} } case 'getLogs': { - interface GetLogsPayload { - schemaId: number - } - - const castParams = params as GetLogsPayload - return state[castParams.schemaId].logs + const { schemaId } = params + return state[schemaId].logs } default: { throw new Error(`Unknown method: \`${method}\``) @@ -216,93 +192,29 @@ function respondOk(requestId: number, payload: unknown) { console.log(JSON.stringify(msg)) } -async function initQe(url: string, prismaSchema: string, logCallback: qe.QueryLogCallback): Promise<[qe.QueryEngine, ErrorCapturingDriverAdapter]> { - const engineType = process.env.EXTERNAL_TEST_EXECUTOR === "Wasm" ? "Wasm" : "Napi"; - const adapter = await adapterFromEnv(url) as DriverAdapter - const errorCapturingAdapter = bindAdapter(adapter) - const engineInstance = await qe.initQueryEngine(engineType, errorCapturingAdapter, prismaSchema, logCallback, debug) - return [engineInstance, errorCapturingAdapter]; -} - -async function adapterFromEnv(url: string): Promise { - const adapter = process.env.DRIVER_ADAPTER ?? '' - - if (adapter == '') { - throw new Error("DRIVER_ADAPTER is not defined or empty.") - } - - if (!(adapter in SUPPORTED_ADAPTERS)) { - throw new Error(`Unsupported driver adapter: ${adapter}`) - } - - return await SUPPORTED_ADAPTERS[adapter](url) -} - -function postgres_options(url: string): any { - let args: any = {connectionString: url} - const schemaName = postgresSchemaName(url) - if (schemaName != null) { - args.options = `--search_path="${schemaName}"` - } - return args; -} - -function postgresSchemaName(url: string) { - return new URL(url).searchParams.get('schema') ?? undefined -} - -async function pgAdapter(url: string): Promise { - const schemaName = postgresSchemaName(url) - const pool = new pg.Pool(postgres_options(url)) - return new prismaPg.PrismaPg(pool, { - schema: schemaName - }) - -} - -async function neonWsAdapter(url: string): Promise { - const { neonConfig, Pool: NeonPool } = neon - const proxyURL = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxy_url ?? '' - if (proxyURL == '') { - throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for neon adapter."); - } - - neonConfig.wsProxy = () => proxyURL - neonConfig.webSocketConstructor = WebSocket - neonConfig.useSecureWebSocket = false - neonConfig.pipelineConnect = false - - const schemaName = postgresSchemaName(url) - - const pool = new NeonPool(postgres_options(url)) - return new prismaNeon.PrismaNeon(pool, { schema: schemaName }) +type InitQueryEngineParams = { + engineType: ExternalTestExecutor, + driverAdapterManager: DriverAdaptersManager, + url: string, + schema: string, + logCallback: qe.QueryLogCallback } -async function libsqlAdapter(url: string): Promise { - const libsql = libSql.createClient({ url, intMode: 'bigint' }) - return new PrismaLibSQL(libsql) -} - -async function planetscaleAdapter(url: string): Promise { - const proxyUrl = JSON.parse(process.env.DRIVER_ADAPTER_CONFIG || '{}').proxy_url ?? '' - if (proxyUrl == '') { - throw new Error("DRIVER_ADAPTER_CONFIG is not defined or empty, but its required for planetscale adapter."); +async function initQe({ + engineType, + driverAdapterManager, + url, + schema, + logCallback +}: InitQueryEngineParams) { + const adapter = await driverAdapterManager.connect({ url }) + const errorCapturingAdapter = bindAdapter(adapter) + const engineInstance = await qe.initQueryEngine(engineType, errorCapturingAdapter, schema, logCallback, debug) + + return { + engine: engineInstance, + adapter: errorCapturingAdapter, } - - const client = new planetScale.Client({ - // preserving path name so proxy url would look like real DB url - url: copyPathName(url, proxyUrl), - fetch, - }) - - return new PrismaPlanetScale(client) -} - -function copyPathName(fromUrl: string, toUrl: string) { - const toObj = new URL(toUrl) - toObj.pathname = new URL(fromUrl).pathname - - return toObj.toString() } main().catch(err) diff --git a/query-engine/driver-adapters/executor/src/types/d1.ts b/query-engine/driver-adapters/executor/src/types/d1.ts new file mode 100644 index 000000000000..52eb39500264 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/types/d1.ts @@ -0,0 +1,27 @@ +import * as S from '@effect/schema/Schema' + +const D1Table = S.union( + S.struct({ + schema: S.union(S.literal('main'), S.string), + name: S.string, + type: S.literal('table', 'view', 'shadow', 'virtual'), + }), + S.struct({ + schema: S.literal('main'), + name: S.literal('sqlite_sequence'), + type: S.literal('table'), + }), + S.struct({ + schema: S.literal('main'), + name: S.literal('_cf_KV'), + type: S.literal('table'), + }), + S.struct({ + schema: S.literal('main'), + name: S.literal('sqlite_schema'), + type: S.literal('table'), + }), +) +export type D1Table = S.Schema.Type + +export const D1Tables = S.array(D1Table) diff --git a/query-engine/driver-adapters/executor/src/types/env.ts b/query-engine/driver-adapters/executor/src/types/env.ts new file mode 100644 index 000000000000..e80e1c87e0fc --- /dev/null +++ b/query-engine/driver-adapters/executor/src/types/env.ts @@ -0,0 +1,46 @@ +import * as S from '@effect/schema/Schema' + +const DriverAdapterConfig = S.struct({ + proxy_url: S.string.pipe(S.nonEmpty({ + message: () => 'proxy_url must not be empty', + })), +}) + +const DriverAdapterConfigFromString = S.transform( + S.string, + DriverAdapterConfig, + (str) => JSON.parse(str), + (config) => JSON.stringify(config), +) + +const EnvPlanetScale = S.struct({ + DRIVER_ADAPTER: S.literal('planetscale'), + DRIVER_ADAPTER_CONFIG: DriverAdapterConfigFromString, +}) + +const EnvNeonWS = S.struct({ + DRIVER_ADAPTER: S.literal('neon:ws'), + DRIVER_ADAPTER_CONFIG: DriverAdapterConfigFromString, +}) + +export const ExternalTestExecutor = S.literal('Wasm', 'Napi') +export type ExternalTestExecutor = S.Schema.Type + +export const Env = S.extend( + S.union( + EnvPlanetScale, + EnvNeonWS, + S.struct({ + DRIVER_ADAPTER: S.literal('pg', 'libsql', 'd1'), + }), + ), + S.struct({ + EXTERNAL_TEST_EXECUTOR: S.optional(ExternalTestExecutor), + }), +) + +export type Env = S.Schema.Type + +export type DriverAdapterTag = Env['DRIVER_ADAPTER'] + +export type EnvForAdapter = Env & { readonly DRIVER_ADAPTER: T } diff --git a/query-engine/driver-adapters/executor/src/types/index.ts b/query-engine/driver-adapters/executor/src/types/index.ts new file mode 100644 index 000000000000..1792388e8836 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/types/index.ts @@ -0,0 +1,2 @@ +export * from './env' +export * as jsonRpc from './jsonRpc' diff --git a/query-engine/driver-adapters/executor/src/types/jsonRpc.ts b/query-engine/driver-adapters/executor/src/types/jsonRpc.ts new file mode 100644 index 000000000000..204aab3625c7 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/types/jsonRpc.ts @@ -0,0 +1,130 @@ +import * as S from '@effect/schema/Schema' + +const SchemaId = S.number.pipe(S.int(), S.nonNegative()) +export type SchemaId = S.Schema.Type + +const InitializeSchemaParams = S.struct({ + schemaId: SchemaId, + schema: S.string, + url: S.string, + migrationScript: S.optional(S.string), +}) +export type InitializeSchemaParams = S.Schema.Type + +const InitializeSchema = S.struct({ + method: S.literal('initializeSchema'), + params: InitializeSchemaParams, +}) + +const QueryParams = S.struct({ + schemaId: SchemaId, + query: S.record(S.string, S.unknown), + txId: S.nullable(S.string), +}) +export type QueryParams = S.Schema.Type + +const Query = S.struct({ + method: S.literal('query'), + params: QueryParams, +}) + +const StartTxParams = S.struct({ + schemaId: SchemaId, + options: S.unknown, +}) +export type StartTxParams = S.Schema.Type + +const StartTx = S.struct({ + method: S.literal('startTx'), + params: StartTxParams, +}) + +const CommitTxParams = S.struct({ + schemaId: SchemaId, + txId: S.string, +}) +export type CommitTxParams = S.Schema.Type + +const CommitTx = S.struct({ + method: S.literal('commitTx'), + params: CommitTxParams, +}) + +const RollbackTxParams = S.struct({ + schemaId: SchemaId, + txId: S.string, +}) +export type RollbackTxParams = S.Schema.Type + +const RollbackTx = S.struct({ + method: S.literal('rollbackTx'), + params: RollbackTxParams, +}) + +const TeardownParams = S.struct({ + schemaId: SchemaId, +}) +export type TeardownParams = S.Schema.Type + +const TeardownSchema = S.struct({ + method: S.literal('teardown'), + params: TeardownParams, +}) + +const GetLogsParams = S.struct({ + schemaId: SchemaId, +}) +export type GetLogsParams = S.Schema.Type + +const GetLogs = S.struct({ + method: S.literal('getLogs'), + params: GetLogsParams, +}) + +export const Request = S.extend( + S.struct({ + jsonrpc: S.literal('2.0'), + id: S.number.pipe(S.int()), + }), + S.union( + InitializeSchema, + Query, + StartTx, + CommitTx, + RollbackTx, + TeardownSchema, + GetLogs, + ), +) + +export type Request = S.Schema.Type + +export const RequestFromString = S.transform( + S.string, + Request, + (str) => JSON.parse(str), + (request) => JSON.stringify(request), +) +export type RequestFromString = S.Schema.Type + +export type Response = OkResponse | ErrResponse + +export interface OkResponse { + jsonrpc: '2.0' + result: unknown + error?: never + id: number +} + +export interface ErrResponse { + jsonrpc: '2.0' + error: RpcError + result?: never + id: number +} + +export interface RpcError { + code: number + message: string + data?: unknown +} diff --git a/query-engine/driver-adapters/executor/src/utils.ts b/query-engine/driver-adapters/executor/src/utils.ts new file mode 100644 index 000000000000..f46e44dd2d95 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/utils.ts @@ -0,0 +1,42 @@ +import type { D1Database, D1PreparedStatement, D1Result } from '@cloudflare/workers-types' +import path from 'node:path' +import { fileURLToPath } from 'node:url' + +export const __dirname = path.dirname(fileURLToPath(import.meta.url)) + +export function copyPathName({ fromURL, toURL }: { fromURL: string, toURL: string }) { + const toObj = new URL(toURL) + toObj.pathname = new URL(fromURL).pathname + + return toObj.toString() +} + +export function postgresSchemaName(url: string) { + return new URL(url).searchParams.get('schema') ?? undefined +} + +type PostgresOptions = { + connectionString: string, options?: string +} + +export function postgres_options(url: string): PostgresOptions { + let args: PostgresOptions = { connectionString: url } + + const schemaName = postgresSchemaName(url) + + if (schemaName != null) { + args.options = `--search_path="${schemaName}"` + } + + return args +} + +// Utility to avoid the `D1_ERROR: No SQL statements detected` error when running +// `D1_DATABASE.batch` with an empty array of statements. +export async function runBatch(D1_DATABASE: D1Database, statements: D1PreparedStatement[]): Promise[]> { + if (statements.length === 0) { + return [] + } + + return D1_DATABASE.batch(statements) +} diff --git a/query-engine/driver-adapters/executor/src/wasm.ts b/query-engine/driver-adapters/executor/src/wasm.ts index c9040b398395..c60d54f398c3 100644 --- a/query-engine/driver-adapters/executor/src/wasm.ts +++ b/query-engine/driver-adapters/executor/src/wasm.ts @@ -3,9 +3,7 @@ import * as wasmMysql from '../../../query-engine-wasm/pkg/mysql/query_engine_bg import * as wasmSqlite from '../../../query-engine-wasm/pkg/sqlite/query_engine_bg.js' import fs from 'node:fs/promises' import path from 'node:path' -import { fileURLToPath } from 'node:url' - -const dirname = path.dirname(fileURLToPath(import.meta.url)) +import { __dirname } from './utils' const wasm = { postgres: wasmPostgres, @@ -13,19 +11,16 @@ const wasm = { sqlite: wasmSqlite } -type EngineName = keyof typeof wasm; +type EngineName = keyof typeof wasm const initializedModules = new Set() - - export async function getEngineForProvider(provider: EngineName) { const engine = wasm[provider] if (!initializedModules.has(provider)) { const subDir = provider === 'postgres' ? 'postgresql' : provider - const bytes = await fs.readFile(path.resolve(dirname, '..', '..', '..', 'query-engine-wasm', 'pkg', subDir, 'query_engine_bg.wasm')) - console.error(bytes) - const module = new WebAssembly.Module(bytes) + const bytes = await fs.readFile(path.resolve(__dirname, '..', '..', '..', 'query-engine-wasm', 'pkg', subDir, 'query_engine_bg.wasm')) + const module = new WebAssembly.Module(bytes) const instance = new WebAssembly.Instance(module, { './query_engine_bg.js': engine }) engine.__wbg_set_wasm(instance.exports); initializedModules.add(provider) diff --git a/query-engine/driver-adapters/executor/wrangler.toml b/query-engine/driver-adapters/executor/wrangler.toml new file mode 100644 index 000000000000..d60992dce4ac --- /dev/null +++ b/query-engine/driver-adapters/executor/wrangler.toml @@ -0,0 +1,4 @@ +[[d1_databases]] +binding = "D1_DATABASE" # i.e., available in the Worker at env.D1_DATABASE +database_name = "d1-qe" +database_id = "" diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json index 6682ebf08ac6..4d7b5a59e716 100644 --- a/query-engine/driver-adapters/package.json +++ b/query-engine/driver-adapters/package.json @@ -9,17 +9,19 @@ }, "license": "Apache-2.0", "scripts": { - "build": "pnpm -r run build", + "build": "pnpm build:prisma && pnpm build:executor", + "build:prisma": "pnpm -r --parallel dev", + "build:executor": "pnpm -r --filter executor build", "lint": "pnpm -r run lint", "clean": "git clean -dXf -e !query-engine/driver-adapters" }, "keywords": [], "author": "", "devDependencies": { - "@types/node": "20.11.24", - "esbuild": "0.20.1", + "@types/node": "20.12.3", + "esbuild": "0.20.2", "tsup": "8.0.2", "tsx": "4.7.1", - "typescript": "5.3.3" + "typescript": "5.4.3" } } diff --git a/query-engine/driver-adapters/pnpm-workspace.yaml b/query-engine/driver-adapters/pnpm-workspace.yaml index 7d2cb5c6d311..c12624bc6a17 100644 --- a/query-engine/driver-adapters/pnpm-workspace.yaml +++ b/query-engine/driver-adapters/pnpm-workspace.yaml @@ -1,9 +1,10 @@ packages: + - '../../../prisma/packages/adapter-d1' - '../../../prisma/packages/adapter-libsql' - '../../../prisma/packages/adapter-neon' - - '../../../prisma/packages/adapter-pg' - '../../../prisma/packages/adapter-planetscale' - - '../../../prisma/packages/driver-adapter-utils' - - '../../../prisma/packages/debug' + - '../../../prisma/packages/adapter-pg' - '../../../prisma/packages/bundled-js-drivers' + - '../../../prisma/packages/debug' + - '../../../prisma/packages/driver-adapter-utils' - './executor' diff --git a/query-engine/driver-adapters/src/conversion/js_arg.rs b/query-engine/driver-adapters/src/conversion/js_arg.rs index d6f67ed7716d..6521829bd274 100644 --- a/query-engine/driver-adapters/src/conversion/js_arg.rs +++ b/query-engine/driver-adapters/src/conversion/js_arg.rs @@ -2,6 +2,7 @@ use serde_json::value::Value as JsonValue; #[derive(Debug, PartialEq)] pub enum JSArg { + SafeInt(i32), Value(serde_json::Value), Buffer(Vec), Array(Vec), diff --git a/query-engine/driver-adapters/src/conversion/js_to_quaint.rs b/query-engine/driver-adapters/src/conversion/js_to_quaint.rs index d57b2f5bd7b9..b723cced716e 100644 --- a/query-engine/driver-adapters/src/conversion/js_to_quaint.rs +++ b/query-engine/driver-adapters/src/conversion/js_to_quaint.rs @@ -2,7 +2,7 @@ use std::borrow::Cow; use std::str::FromStr; pub use crate::types::{ColumnType, JSResultSet}; -use quaint::bigdecimal::{BigDecimal, FromPrimitive}; +use quaint::bigdecimal::BigDecimal; use quaint::chrono::{DateTime, NaiveDate, NaiveTime, Utc}; use quaint::{ connector::ResultSet as QuaintResultSet, @@ -137,12 +137,8 @@ pub fn js_value_to_quaint( serde_json::Value::String(s) => BigDecimal::from_str(&s).map(QuaintValue::numeric).map_err(|e| { conversion_error!("invalid numeric value when parsing {s} in column '{column_name}': {e}") }), - serde_json::Value::Number(n) => n - .as_f64() - .and_then(BigDecimal::from_f64) - .ok_or(conversion_error!( - "number must be an f64 in column '{column_name}', got {n}" - )) + serde_json::Value::Number(n) => BigDecimal::from_str(&n.to_string()) + .map_err(|_| conversion_error!("number must be an f64 in column '{column_name}', got {n}")) .map(QuaintValue::numeric), serde_json::Value::Null => Ok(QuaintValue::null_numeric()), mismatch => Err(conversion_error!( diff --git a/query-engine/driver-adapters/src/conversion/mysql.rs b/query-engine/driver-adapters/src/conversion/mysql.rs index bd59d3b94ed0..08704b06bccf 100644 --- a/query-engine/driver-adapters/src/conversion/mysql.rs +++ b/query-engine/driver-adapters/src/conversion/mysql.rs @@ -13,6 +13,7 @@ pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { quaint::ValueType::Bytes(Some(bytes)) => JSArg::Buffer(bytes.to_vec()), quaint::ValueType::Date(Some(d)) => JSArg::Value(JsonValue::String(d.format(DATE_FORMAT).to_string())), quaint::ValueType::DateTime(Some(dt)) => JSArg::Value(JsonValue::String(dt.format(DATETIME_FORMAT).to_string())), + quaint::ValueType::Int32(Some(value)) => JSArg::SafeInt(*value), quaint::ValueType::Time(Some(t)) => JSArg::Value(JsonValue::String(t.format(TIME_FORMAT).to_string())), quaint::ValueType::Array(Some(ref items)) => JSArg::Array( items diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs index 949cc17e9eba..524834111bce 100644 --- a/query-engine/driver-adapters/src/conversion/postgres.rs +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -14,6 +14,7 @@ pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { (quaint::ValueType::DateTime(Some(dt)), _) => JSArg::Value(JsonValue::String(dt.naive_utc().to_string())), (quaint::ValueType::Json(Some(s)), _) => JSArg::Value(JsonValue::String(serde_json::to_string(s)?)), (quaint::ValueType::Bytes(Some(bytes)), _) => JSArg::Buffer(bytes.to_vec()), + (quaint::ValueType::Int32(Some(value)), _) => JSArg::SafeInt(*value), (quaint::ValueType::Numeric(Some(bd)), _) => JSArg::Value(JsonValue::String(bd.to_string())), (quaint::ValueType::Array(Some(items)), _) => JSArg::Array( items diff --git a/query-engine/driver-adapters/src/conversion/sqlite.rs b/query-engine/driver-adapters/src/conversion/sqlite.rs index b11acdca0d7f..af070ec0b2cd 100644 --- a/query-engine/driver-adapters/src/conversion/sqlite.rs +++ b/query-engine/driver-adapters/src/conversion/sqlite.rs @@ -9,6 +9,7 @@ pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { }, quaint::ValueType::Json(Some(s)) => JSArg::Value(s.to_owned()), quaint::ValueType::Bytes(Some(bytes)) => JSArg::Buffer(bytes.to_vec()), + quaint::ValueType::Int32(Some(value)) => JSArg::SafeInt(*value), quaint::ValueType::Array(Some(ref items)) => JSArg::Array( items .iter() diff --git a/query-engine/driver-adapters/src/napi/conversion.rs b/query-engine/driver-adapters/src/napi/conversion.rs index 6cfe445925e3..2fab5a28bb73 100644 --- a/query-engine/driver-adapters/src/napi/conversion.rs +++ b/query-engine/driver-adapters/src/napi/conversion.rs @@ -16,6 +16,7 @@ impl FromNapiValue for JSArg { impl ToNapiValue for JSArg { unsafe fn to_napi_value(env: napi::sys::napi_env, value: Self) -> napi::Result { match value { + JSArg::SafeInt(v) => ToNapiValue::to_napi_value(env, v), JSArg::Value(v) => ToNapiValue::to_napi_value(env, v), JSArg::Buffer(bytes) => { let env = napi::Env::from_raw(env); diff --git a/query-engine/driver-adapters/src/wasm/conversion.rs b/query-engine/driver-adapters/src/wasm/conversion.rs index 73e6a7c30331..d2039210a626 100644 --- a/query-engine/driver-adapters/src/wasm/conversion.rs +++ b/query-engine/driver-adapters/src/wasm/conversion.rs @@ -24,6 +24,7 @@ impl ToJsValue for Query { impl ToJsValue for JSArg { fn to_js_value(&self) -> Result { match self { + JSArg::SafeInt(num) => Ok(JsValue::from(*num)), JSArg::Value(value) => serde_serialize(value), JSArg::Buffer(buf) => { let array = Uint8Array::from(buf.as_slice()); diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 4ca524af699c..d9f5314e2489 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -122,7 +122,7 @@ impl QueryEngine { schema .diagnostics .to_result() - .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + .map_err(|err| ApiError::conversion(err, schema.db.source_assert_single()))?; config .resolve_datasource_urls_query_engine( @@ -130,11 +130,11 @@ impl QueryEngine { |key| env.get(key).map(ToString::to_string), ignore_env_var_errors, ) - .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + .map_err(|err| ApiError::conversion(err, schema.db.source_assert_single()))?; config .validate_that_one_datasource_is_provided() - .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + .map_err(|errors| ApiError::conversion(errors, schema.db.source_assert_single()))?; let enable_metrics = config.preview_features().contains(PreviewFeature::Metrics); let enable_tracing = config.preview_features().contains(PreviewFeature::Tracing); @@ -203,7 +203,10 @@ impl QueryEngine { builder.native.env.get(key).map(ToString::to_string) }) .map_err(|err| { - crate::error::ApiError::Conversion(err, builder.schema.db.source().to_owned()) + crate::error::ApiError::Conversion( + err, + builder.schema.db.source_assert_single().to_owned(), + ) })?; ConnectorKind::Rust { url, diff --git a/query-engine/query-engine-wasm/analyse/package.json b/query-engine/query-engine-wasm/analyse/package.json index e752ad090781..718129e4f4c9 100644 --- a/query-engine/query-engine-wasm/analyse/package.json +++ b/query-engine/query-engine-wasm/analyse/package.json @@ -10,6 +10,6 @@ "devDependencies": { "ts-node": "10.9.2", "tsx": "4.7.1", - "typescript": "5.4.2" + "typescript": "5.4.3" } } diff --git a/query-engine/query-engine-wasm/analyse/pnpm-lock.yaml b/query-engine/query-engine-wasm/analyse/pnpm-lock.yaml index a15028ee9f73..6f0e83bca27b 100644 --- a/query-engine/query-engine-wasm/analyse/pnpm-lock.yaml +++ b/query-engine/query-engine-wasm/analyse/pnpm-lock.yaml @@ -7,13 +7,13 @@ settings: devDependencies: ts-node: specifier: 10.9.2 - version: 10.9.2(@types/node@20.10.8)(typescript@5.4.2) + version: 10.9.2(@types/node@20.10.8)(typescript@5.4.3) tsx: specifier: 4.7.1 version: 4.7.1 typescript: - specifier: 5.4.2 - version: 5.4.2 + specifier: 5.4.3 + version: 5.4.3 packages: @@ -346,7 +346,7 @@ packages: resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} dev: true - /ts-node@10.9.2(@types/node@20.10.8)(typescript@5.4.2): + /ts-node@10.9.2(@types/node@20.10.8)(typescript@5.4.3): resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} hasBin: true peerDependencies: @@ -372,7 +372,7 @@ packages: create-require: 1.1.1 diff: 4.0.2 make-error: 1.3.6 - typescript: 5.4.2 + typescript: 5.4.3 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 dev: true @@ -388,8 +388,8 @@ packages: fsevents: 2.3.3 dev: true - /typescript@5.4.2: - resolution: {integrity: sha512-+2/g0Fds1ERlP6JsakQQDXjZdZMM+rqpamFZJEKh4kwTIn3iDkgKtby0CeNd5ATNZ4Ry1ax15TMx0W2V+miizQ==} + /typescript@5.4.3: + resolution: {integrity: sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==} engines: {node: '>=14.17'} hasBin: true dev: true diff --git a/query-engine/query-structure/src/composite_type.rs b/query-engine/query-structure/src/composite_type.rs index 431c033dd195..9bbff74e1290 100644 --- a/query-engine/query-structure/src/composite_type.rs +++ b/query-engine/query-structure/src/composite_type.rs @@ -1,6 +1,7 @@ use crate::{ast, Field}; +use psl::parser_database::CompositeTypeId; -pub type CompositeType = crate::Zipper; +pub type CompositeType = crate::Zipper; impl CompositeType { pub fn name(&self) -> &str { diff --git a/query-engine/query-structure/src/field/composite.rs b/query-engine/query-structure/src/field/composite.rs index 30564e5859b7..aebe2b36aadf 100644 --- a/query-engine/query-structure/src/field/composite.rs +++ b/query-engine/query-structure/src/field/composite.rs @@ -1,6 +1,6 @@ use crate::{parent_container::ParentContainer, CompositeType}; use psl::{ - parser_database::ScalarFieldId, + parser_database::{self as db, ScalarFieldId}, schema_ast::ast::{self, FieldArity}, }; use std::fmt::{Debug, Display}; @@ -8,7 +8,7 @@ use std::fmt::{Debug, Display}; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum CompositeFieldId { InModel(ScalarFieldId), - InCompositeType((ast::CompositeTypeId, ast::FieldId)), + InCompositeType((db::CompositeTypeId, ast::FieldId)), } pub type CompositeField = crate::Zipper; diff --git a/query-engine/query-structure/src/field/mod.rs b/query-engine/query-structure/src/field/mod.rs index 39e43f186c13..d8faf404e662 100644 --- a/query-engine/query-structure/src/field/mod.rs +++ b/query-engine/query-structure/src/field/mod.rs @@ -6,8 +6,8 @@ pub use composite::*; pub use relation::*; pub use scalar::*; -use crate::{ast, parent_container::ParentContainer, Model}; -use psl::parser_database::{walkers, ScalarType}; +use crate::{parent_container::ParentContainer, Model}; +use psl::parser_database::{walkers, EnumId, ScalarType}; use std::{borrow::Cow, hash::Hash}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -143,7 +143,7 @@ pub enum TypeIdentifier { Float, Decimal, Boolean, - Enum(ast::EnumId), + Enum(EnumId), UUID, Json, DateTime, diff --git a/query-engine/query-structure/src/field/scalar.rs b/query-engine/query-structure/src/field/scalar.rs index becd438db276..c03ada0a9b71 100644 --- a/query-engine/query-structure/src/field/scalar.rs +++ b/query-engine/query-structure/src/field/scalar.rs @@ -1,7 +1,7 @@ use crate::{ast, parent_container::ParentContainer, prelude::*, DefaultKind, NativeTypeInstance, ValueGenerator}; use chrono::{DateTime, FixedOffset}; use psl::{ - parser_database::{walkers, ScalarFieldType, ScalarType}, + parser_database::{self as db, walkers, ScalarFieldType, ScalarType}, schema_ast::ast::FieldArity, }; use std::fmt::{Debug, Display}; @@ -12,7 +12,7 @@ pub type ScalarFieldRef = ScalarField; #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum ScalarFieldId { InModel(psl::parser_database::ScalarFieldId), - InCompositeType((ast::CompositeTypeId, ast::FieldId)), + InCompositeType((db::CompositeTypeId, ast::FieldId)), } impl ScalarField { diff --git a/query-engine/query-structure/src/internal_data_model.rs b/query-engine/query-structure/src/internal_data_model.rs index 70f8761cbdc0..ce8dd059fa03 100644 --- a/query-engine/query-structure/src/internal_data_model.rs +++ b/query-engine/query-structure/src/internal_data_model.rs @@ -1,5 +1,5 @@ use crate::{prelude::*, CompositeType, InternalEnum}; -use psl::schema_ast::ast; +use psl::parser_database as db; use std::sync::Arc; pub(crate) type InternalDataModelRef = InternalDataModel; @@ -52,11 +52,11 @@ impl InternalDataModel { .ok_or_else(|| DomainError::ModelNotFound { name: name.to_string() }) } - pub fn find_composite_type_by_id(&self, ctid: ast::CompositeTypeId) -> CompositeType { + pub fn find_composite_type_by_id(&self, ctid: db::CompositeTypeId) -> CompositeType { self.clone().zip(ctid) } - pub fn find_model_by_id(&self, model_id: ast::ModelId) -> Model { + pub fn find_model_by_id(&self, model_id: db::ModelId) -> Model { self.clone().zip(model_id) } diff --git a/query-engine/query-structure/src/internal_enum.rs b/query-engine/query-structure/src/internal_enum.rs index 6467adcebf6d..13dfd7206dca 100644 --- a/query-engine/query-structure/src/internal_enum.rs +++ b/query-engine/query-structure/src/internal_enum.rs @@ -1,9 +1,8 @@ use crate::Zipper; +use psl::{parser_database::EnumId, schema_ast::ast::EnumValueId}; -use psl::schema_ast::ast; - -pub type InternalEnum = Zipper; -pub type InternalEnumValue = Zipper; +pub type InternalEnum = Zipper; +pub type InternalEnumValue = Zipper; impl InternalEnum { pub fn name(&self) -> &str { diff --git a/query-engine/query-structure/src/model.rs b/query-engine/query-structure/src/model.rs index a2d9fa4ff462..310df1fbe6c3 100644 --- a/query-engine/query-structure/src/model.rs +++ b/query-engine/query-structure/src/model.rs @@ -1,7 +1,7 @@ use crate::prelude::*; -use psl::{parser_database::walkers, schema_ast::ast}; +use psl::parser_database::{walkers, ModelId}; -pub type Model = crate::Zipper; +pub type Model = crate::Zipper; impl Model { pub fn name(&self) -> &str { diff --git a/query-engine/request-handlers/src/handler.rs b/query-engine/request-handlers/src/handler.rs index cd5d887718f0..a8e6ba8b8a9b 100644 --- a/query-engine/request-handlers/src/handler.rs +++ b/query-engine/request-handlers/src/handler.rs @@ -1,5 +1,6 @@ use super::GQLResponse; use crate::{GQLError, PrismaResponse, RequestBody}; +use bigdecimal::BigDecimal; use futures::FutureExt; use indexmap::IndexMap; use query_core::{ @@ -11,7 +12,7 @@ use query_core::{ QueryDocument, QueryExecutor, TxId, }; use query_structure::{parse_datetime, stringify_datetime, PrismaValue}; -use std::{collections::HashMap, fmt, panic::AssertUnwindSafe}; +use std::{collections::HashMap, fmt, panic::AssertUnwindSafe, str::FromStr}; type ArgsToResult = (HashMap, IndexMap); @@ -258,6 +259,10 @@ impl<'a> RequestHandler<'a> { Some(t1) => Self::compare_values(t1, t2), None => left == right, }, + (ArgumentValue::Scalar(PrismaValue::Float(s1)), ArgumentValue::Scalar(PrismaValue::String(s2))) + | (ArgumentValue::Scalar(PrismaValue::String(s2)), ArgumentValue::Scalar(PrismaValue::Float(s1))) => { + BigDecimal::from_str(s2).map(|s2| s2 == *s1).unwrap_or(false) + } (left, right) => left == right, } } diff --git a/query-engine/schema/src/build.rs b/query-engine/schema/src/build.rs index 2970be408b59..b4562757b983 100644 --- a/query-engine/schema/src/build.rs +++ b/query-engine/schema/src/build.rs @@ -16,7 +16,7 @@ pub(crate) use output_types::{mutation_type, query_type}; use self::{enum_types::*, utils::*}; use crate::*; use psl::{datamodel_connector::ConnectorCapability, PreviewFeatures}; -use query_structure::{ast, Field as ModelField, Model, RelationFieldRef, TypeIdentifier}; +use query_structure::{Field as ModelField, Model, RelationFieldRef, TypeIdentifier}; pub fn build(schema: Arc, enable_raw_queries: bool) -> QuerySchema { let preview_features = schema.configuration.preview_features(); diff --git a/query-engine/schema/src/build/enum_types.rs b/query-engine/schema/src/build/enum_types.rs index b0ddc66a638d..7401732e939b 100644 --- a/query-engine/schema/src/build/enum_types.rs +++ b/query-engine/schema/src/build/enum_types.rs @@ -1,6 +1,7 @@ use super::*; use crate::EnumType; use constants::{filters, itx, json_null, load_strategy, ordering}; +use psl::parser_database as db; use query_structure::prelude::ParentContainer; pub(crate) fn sort_order_enum() -> EnumType { @@ -16,7 +17,7 @@ pub(crate) fn nulls_order_enum() -> EnumType { ) } -pub(crate) fn map_schema_enum_type(ctx: &'_ QuerySchema, enum_id: ast::EnumId) -> EnumType { +pub(crate) fn map_schema_enum_type(ctx: &'_ QuerySchema, enum_id: db::EnumId) -> EnumType { let ident = Identifier::new_model(IdentifierType::Enum(ctx.internal_data_model.clone().zip(enum_id))); let schema_enum = ctx.internal_data_model.clone().zip(enum_id); diff --git a/query-engine/schema/src/output_types.rs b/query-engine/schema/src/output_types.rs index 32956d01d50b..2b7a86dd5162 100644 --- a/query-engine/schema/src/output_types.rs +++ b/query-engine/schema/src/output_types.rs @@ -1,7 +1,7 @@ use super::*; use fmt::Debug; use once_cell::sync::Lazy; -use query_structure::ast::ModelId; +use psl::parser_database as db; use std::{borrow::Cow, fmt}; #[derive(Debug, Clone)] @@ -120,8 +120,7 @@ pub struct ObjectType<'a> { pub(crate) fields: OutputObjectFields<'a>, // Object types can directly map to models. - pub(crate) model: Option, - _heh: (), + pub(crate) model: Option, } impl Debug for ObjectType<'_> { @@ -145,7 +144,6 @@ impl<'a> ObjectType<'a> { identifier, fields: Arc::new(lazy), model: None, - _heh: (), } } @@ -215,7 +213,7 @@ impl<'a> OutputField<'a> { } } - pub fn model(&self) -> Option { + pub fn model(&self) -> Option { self.query_info.as_ref().and_then(|info| info.model) } diff --git a/query-engine/schema/src/query_schema.rs b/query-engine/schema/src/query_schema.rs index e677b10e75a5..ff25c17159fa 100644 --- a/query-engine/schema/src/query_schema.rs +++ b/query-engine/schema/src/query_schema.rs @@ -2,9 +2,9 @@ use crate::{IdentifierType, ObjectType, OutputField}; use psl::{ can_support_relation_load_strategy, datamodel_connector::{Connector, ConnectorCapabilities, ConnectorCapability, JoinStrategySupport, RelationMode}, - has_capability, PreviewFeature, PreviewFeatures, + has_capability, parser_database as db, PreviewFeature, PreviewFeatures, }; -use query_structure::{ast, InternalDataModel}; +use query_structure::InternalDataModel; use std::{collections::HashMap, fmt}; #[derive(Clone, Debug, Hash, Eq, PartialEq)] @@ -218,7 +218,7 @@ impl QuerySchema { /// Designates a specific top-level operation on a corresponding model. #[derive(Debug, Clone, PartialEq, Hash, Eq)] pub struct QueryInfo { - pub model: Option, + pub model: Option, pub tag: QueryTag, } diff --git a/renovate.json b/renovate.json index 83ea8d3b2950..6490ec42b53f 100644 --- a/renovate.json +++ b/renovate.json @@ -11,57 +11,35 @@ "sbt": { "enabled": false }, - "schedule": [ - "every weekend" - ], + "schedule": ["every weekend"], "minimumReleaseAge": "7 days", "rangeStrategy": "pin", "separateMinorPatch": true, "configMigration": true, - "ignoreDeps": [ - "query-engine-wasm-baseline", - ], + "ignoreDeps": ["query-engine-wasm-baseline"], "packageRules": [ { - "matchFileNames": [ - "docker-compose.yml" - ], - "matchUpdateTypes": [ - "minor", - "major" - ], + "matchFileNames": ["docker-compose.yml"], + "matchUpdateTypes": ["minor", "major"], "enabled": false }, { "groupName": "Weekly vitess docker image version update", - "matchPackageNames": [ - "vitess/vttestserver" - ], - "schedule": [ - "before 7am on Wednesday" - ] + "matchPackageNames": ["vitess/vttestserver"], + "schedule": ["before 7am on Wednesday"] }, { "groupName": "Prisma Driver Adapters", - "matchPackageNames": [ - "@prisma/driver-adapter-utils" - ], - "matchPackagePrefixes": [ - "@prisma/adapter" - ], - "schedule": [ - "at any time" - ] + "matchPackageNames": ["@prisma/driver-adapter-utils"], + "matchPackagePrefixes": ["@prisma/adapter"], + "schedule": ["at any time"] }, { "groupName": "Driver Adapters directory", "matchFileNames": ["query-engine/driver-adapters/**"] }, { - "matchPackageNames": [ - "node", - "pnpm" - ], + "matchPackageNames": ["node", "pnpm"], "enabled": false } ] diff --git a/schema-engine/connectors/schema-connector/src/introspection_context.rs b/schema-engine/connectors/schema-connector/src/introspection_context.rs index 54f197935bd3..62f116e5ca94 100644 --- a/schema-engine/connectors/schema-connector/src/introspection_context.rs +++ b/schema-engine/connectors/schema-connector/src/introspection_context.rs @@ -38,13 +38,14 @@ impl IntrospectionContext { ) -> Self { let mut config_blocks = String::new(); - for source in previous_schema.db.ast().sources() { - config_blocks.push_str(&previous_schema.db.source()[source.span.start..source.span.end]); + for source in previous_schema.db.ast_assert_single().sources() { + config_blocks.push_str(&previous_schema.db.source_assert_single()[source.span.start..source.span.end]); config_blocks.push('\n'); } - for generator in previous_schema.db.ast().generators() { - config_blocks.push_str(&previous_schema.db.source()[generator.span.start..generator.span.end]); + for generator in previous_schema.db.ast_assert_single().generators() { + config_blocks + .push_str(&previous_schema.db.source_assert_single()[generator.span.start..generator.span.end]); config_blocks.push('\n'); } @@ -70,7 +71,7 @@ impl IntrospectionContext { /// The string source of the PSL schema file. pub fn schema_string(&self) -> &str { - self.previous_schema.db.source() + self.previous_schema.db.source_assert_single() } /// The configuration block of the PSL schema file. diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/datamodel_calculator/context.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/datamodel_calculator/context.rs index 32f2ed0a5893..04dcfa7345de 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/datamodel_calculator/context.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/datamodel_calculator/context.rs @@ -11,7 +11,7 @@ use crate::introspection::{ use psl::{ builtin_connectors::*, datamodel_connector::Connector, - parser_database::{ast, walkers}, + parser_database::{self as db, walkers}, Configuration, PreviewFeature, }; use quaint::prelude::SqlFamily; @@ -363,11 +363,11 @@ impl<'a> DatamodelCalculatorContext<'a> { self.introspection_map.relation_names.m2m_relation_name(id) } - pub(crate) fn table_missing_for_model(&self, id: &ast::ModelId) -> bool { + pub(crate) fn table_missing_for_model(&self, id: &db::ModelId) -> bool { self.introspection_map.missing_tables_for_previous_models.contains(id) } - pub(crate) fn view_missing_for_model(&self, id: &ast::ModelId) -> bool { + pub(crate) fn view_missing_for_model(&self, id: &db::ModelId) -> bool { self.introspection_map.missing_views_for_previous_models.contains(id) } diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_map.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_map.rs index 099408e1dcf7..5fd5019213ac 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_map.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_map.rs @@ -7,7 +7,7 @@ use crate::introspection::{ introspection_pair::RelationFieldDirection, sanitize_datamodel_names, }; use psl::{ - parser_database::{self, ast, ScalarFieldId}, + parser_database::{self as db, ScalarFieldId}, PreviewFeature, }; use relation_names::RelationNames; @@ -24,15 +24,15 @@ pub(crate) use relation_names::RelationName; /// schema. #[derive(Default)] pub(crate) struct IntrospectionMap<'a> { - pub(crate) existing_enums: HashMap, - pub(crate) existing_models: HashMap, - pub(crate) existing_views: HashMap, - pub(crate) missing_tables_for_previous_models: HashSet, - pub(crate) missing_views_for_previous_models: HashSet, + pub(crate) existing_enums: HashMap, + pub(crate) existing_models: HashMap, + pub(crate) existing_views: HashMap, + pub(crate) missing_tables_for_previous_models: HashSet, + pub(crate) missing_views_for_previous_models: HashSet, pub(crate) existing_model_scalar_fields: HashMap, pub(crate) existing_view_scalar_fields: HashMap, - pub(crate) existing_inline_relations: HashMap, - pub(crate) existing_m2m_relations: HashMap, + pub(crate) existing_inline_relations: HashMap, + pub(crate) existing_m2m_relations: HashMap, pub(crate) relation_names: RelationNames<'a>, pub(crate) inline_relation_positions: Vec<(sql::TableId, sql::ForeignKeyId, RelationFieldDirection)>, pub(crate) m2m_relation_positions: Vec<(sql::TableId, sql::ForeignKeyId, RelationFieldDirection)>, diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/enumerator.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/enumerator.rs index b14c2c51ea30..29fff1f18c36 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/enumerator.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/enumerator.rs @@ -1,8 +1,8 @@ use super::IntrospectionPair; use crate::introspection::sanitize_datamodel_names::{EnumVariantName, ModelName}; use psl::{ - parser_database::walkers, - schema_ast::ast::{self, WithDocumentation}, + parser_database::{self as db, walkers}, + schema_ast::ast::WithDocumentation, }; use sql_schema_describer as sql; use std::borrow::Cow; @@ -51,7 +51,7 @@ impl<'a> EnumPair<'a> { /// The position of the enum from the PSL, if existing. Used for /// sorting the enums in the final introspected data model. - pub(crate) fn previous_position(self) -> Option { + pub(crate) fn previous_position(self) -> Option { self.previous.map(|e| e.id) } diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/model.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/model.rs index 13f3b78f88e0..0e907fdbefcd 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/model.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/model.rs @@ -1,7 +1,7 @@ use psl::{ datamodel_connector::walker_ext_traits::IndexWalkerExt, - parser_database::walkers, - schema_ast::ast::{self, WithDocumentation}, + parser_database::{self as db, walkers}, + schema_ast::ast::WithDocumentation, }; use sql::postgres::PostgresSchemaExt; use sql_schema_describer as sql; @@ -18,7 +18,7 @@ pub(crate) type ModelPair<'a> = IntrospectionPair<'a, Option ModelPair<'a> { /// The position of the model from the PSL, if existing. Used for /// sorting the models in the final introspected data model. - pub(crate) fn previous_position(self) -> Option { + pub(crate) fn previous_position(self) -> Option { self.previous.map(|m| m.id) } diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/view.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/view.rs index e5b58ebd3cf3..ea7ac6cd30ca 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/view.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/view.rs @@ -1,12 +1,10 @@ -use std::borrow::Cow; - +use super::{IdPair, IndexPair, IntrospectionPair, RelationFieldPair, ScalarFieldPair}; use psl::{ - parser_database::walkers, - schema_ast::ast::{self, WithDocumentation}, + parser_database::{self as db, walkers}, + schema_ast::ast::WithDocumentation, }; use sql_schema_describer as sql; - -use super::{IdPair, IndexPair, IntrospectionPair, RelationFieldPair, ScalarFieldPair}; +use std::borrow::Cow; /// Comparing a PSL view (which currently utilizes the /// model structure due to them being completely the same @@ -16,7 +14,7 @@ pub(crate) type ViewPair<'a> = IntrospectionPair<'a, Option ViewPair<'a> { /// The position of the view from the PSL, if existing. Used for /// sorting the views in the final introspected data model. - pub(crate) fn previous_position(self) -> Option { + pub(crate) fn previous_position(self) -> Option { self.previous.map(|m| m.id) } diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/rendering/enums.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/rendering/enums.rs index fe8f2a96807d..11c87ab7de09 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/rendering/enums.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/rendering/enums.rs @@ -5,11 +5,11 @@ use crate::introspection::{ sanitize_datamodel_names, }; use datamodel_renderer::datamodel as renderer; -use psl::parser_database::ast; +use psl::parser_database as db; /// Render all enums. pub(super) fn render<'a>(ctx: &'a DatamodelCalculatorContext<'a>, rendered: &mut renderer::Datamodel<'a>) { - let mut all_enums: Vec<(Option, renderer::Enum<'_>)> = Vec::new(); + let mut all_enums: Vec<(Option, renderer::Enum<'_>)> = Vec::new(); for pair in ctx.enum_pairs() { all_enums.push((pair.previous_position(), render_enum(pair))) diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator.rs b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator.rs index 3b36829cfcf0..5ef3bb69529a 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator.rs @@ -6,7 +6,7 @@ use crate::{flavour::SqlFlavour, SqlDatabaseSchema}; use psl::{ datamodel_connector::walker_ext_traits::*, parser_database::{ - ast, + self as db, ast, walkers::{ModelWalker, ScalarFieldWalker}, ReferentialAction, ScalarFieldType, ScalarType, SortOrder, }, @@ -61,7 +61,7 @@ fn push_model_tables(ctx: &mut Context<'_>) { .schema .describer_schema .push_table(model.database_name().to_owned(), namespace_id, None); - ctx.model_id_to_table_id.insert(model.model_id(), table_id); + ctx.model_id_to_table_id.insert(model.id, table_id); for field in model.scalar_fields() { push_column_for_scalar_field(field, table_id, ctx); @@ -138,8 +138,8 @@ fn push_inline_relations(ctx: &mut Context<'_>) { let relation_field = relation .forward_relation_field() .expect("Expecting a complete relation in sql_schmea_calculator"); - let referencing_model = ctx.model_id_to_table_id[&relation_field.model().model_id()]; - let referenced_model = ctx.model_id_to_table_id[&relation.referenced_model().model_id()]; + let referencing_model = ctx.model_id_to_table_id[&relation_field.model().id]; + let referenced_model = ctx.model_id_to_table_id[&relation.referenced_model().id]; let on_delete_action = relation_field.explicit_on_delete().unwrap_or_else(|| { relation_field.default_on_delete_action( ctx.datamodel.configuration.relation_mode().unwrap_or_default(), @@ -193,9 +193,9 @@ fn push_relation_tables(ctx: &mut Context<'_>) { .take(datamodel.configuration.max_identifier_length()) .collect::(); let model_a = m2m.model_a(); - let model_a_table_id = ctx.model_id_to_table_id[&model_a.model_id()]; + let model_a_table_id = ctx.model_id_to_table_id[&model_a.id]; let model_b = m2m.model_b(); - let model_b_table_id = ctx.model_id_to_table_id[&model_b.model_id()]; + let model_b_table_id = ctx.model_id_to_table_id[&model_b.id]; let model_a_column = m2m.column_a_name(); let model_b_column = m2m.column_b_name(); let model_a_id = model_a.primary_key().unwrap().fields().next().unwrap(); @@ -300,7 +300,7 @@ fn push_relation_tables(ctx: &mut Context<'_>) { if ctx.datamodel.relation_mode().uses_foreign_keys() { let fkid = ctx.schema.describer_schema.push_foreign_key( Some(model_a_fk_name), - [table_id, ctx.model_id_to_table_id[&model_a.model_id()]], + [table_id, ctx.model_id_to_table_id[&model_a.id]], [flavour.m2m_foreign_key_action(model_a, model_b); 2], ); @@ -319,7 +319,7 @@ fn push_relation_tables(ctx: &mut Context<'_>) { let fkid = ctx.schema.describer_schema.push_foreign_key( Some(model_b_fk_name), - [table_id, ctx.model_id_to_table_id[&model_b.model_id()]], + [table_id, ctx.model_id_to_table_id[&model_b.id]], [flavour.m2m_foreign_key_action(model_a, model_b); 2], ); @@ -354,7 +354,7 @@ fn push_column_for_scalar_field(field: ScalarFieldWalker<'_>, table_id: sql::Tab fn push_column_for_model_enum_scalar_field( field: ScalarFieldWalker<'_>, - enum_id: ast::EnumId, + enum_id: db::EnumId, table_id: sql::TableId, ctx: &mut Context<'_>, ) { @@ -582,8 +582,8 @@ pub(crate) struct Context<'a> { schema: &'a mut SqlDatabaseSchema, flavour: &'a dyn SqlFlavour, schemas: HashMap<&'a str, sql::NamespaceId>, - model_id_to_table_id: HashMap, - enum_ids: HashMap, + model_id_to_table_id: HashMap, + enum_ids: HashMap, } impl Context<'_> { diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs index 51a8f5ef54be..7e6b94a761ab 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs @@ -27,7 +27,7 @@ impl SqlSchemaCalculatorFlavour for MssqlFlavour { let mut data = MssqlSchemaExt::default(); for model in context.datamodel.db.walk_models() { - let table_id = context.model_id_to_table_id[&model.model_id()]; + let table_id = context.model_id_to_table_id[&model.id]; let table = context.schema.walk(table_id); if model .primary_key() diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/postgres.rs b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/postgres.rs index 656fe432a970..c2193252be99 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/postgres.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/postgres.rs @@ -69,7 +69,7 @@ impl SqlSchemaCalculatorFlavour for PostgresFlavour { } for model in db.walk_models() { - let table_id = context.model_id_to_table_id[&model.model_id()]; + let table_id = context.model_id_to_table_id[&model.id]; // Add index algorithms and opclasses. for index in model.indexes() { diff --git a/schema-engine/core/src/state.rs b/schema-engine/core/src/state.rs index 9143ef1fb767..c376cb300fba 100644 --- a/schema-engine/core/src/state.rs +++ b/schema-engine/core/src/state.rs @@ -177,7 +177,8 @@ impl EngineState { return Err(ConnectorError::from_msg("Missing --datamodel".to_owned())); }; - self.with_connector_for_schema(schema.db.source(), None, f).await + self.with_connector_for_schema(schema.db.source_assert_single(), None, f) + .await } } diff --git a/schema-engine/sql-introspection-tests/tests/referential_actions/mysql.rs b/schema-engine/sql-introspection-tests/tests/referential_actions/mysql.rs index 7e184686c146..3f7ec20f5423 100644 --- a/schema-engine/sql-introspection-tests/tests/referential_actions/mysql.rs +++ b/schema-engine/sql-introspection-tests/tests/referential_actions/mysql.rs @@ -55,7 +55,7 @@ async fn introspect_set_default_should_warn(api: &mut TestApi) -> TestResult { let warning_messages = schema .diagnostics - .warnings_to_pretty_string("schema.prisma", schema.db.source()); + .warnings_to_pretty_string("schema.prisma", schema.db.source_assert_single()); let expected_validation = expect![[r#" warning: MySQL does not actually support the `SetDefault` referential action, so using it may result in unexpected errors. Read more at https://pris.ly/d/mysql-set-default  diff --git a/schema-engine/sql-schema-describer/src/sqlite.rs b/schema-engine/sql-schema-describer/src/sqlite.rs index 1f28958605a2..51f75a90343a 100644 --- a/schema-engine/sql-schema-describer/src/sqlite.rs +++ b/schema-engine/sql-schema-describer/src/sqlite.rs @@ -162,7 +162,7 @@ impl<'a> SqlSchemaDescriber<'a> { (name, r#type, definition) }) - .filter(|(table_name, _, _)| !is_system_table(table_name)); + .filter(|(table_name, _, _)| !is_table_ignored(table_name)); let mut map = IndexMap::default(); @@ -603,18 +603,22 @@ fn unquote_sqlite_string_default(s: &str) -> Cow<'_, str> { } } -/// Returns whether a table is one of the SQLite system tables. -fn is_system_table(table_name: &str) -> bool { - SQLITE_SYSTEM_TABLES - .iter() - .any(|system_table| table_name == *system_table) +/// Returns whether a table is one of the SQLite system tables or a Cloudflare D1 specific table. +fn is_table_ignored(table_name: &str) -> bool { + SQLITE_IGNORED_TABLES.iter().any(|table| table_name == *table) } /// See https://www.sqlite.org/fileformat2.html -const SQLITE_SYSTEM_TABLES: &[&str] = &[ +/// + Cloudflare D1 specific tables +const SQLITE_IGNORED_TABLES: &[&str] = &[ + // SQLite system tables "sqlite_sequence", "sqlite_stat1", "sqlite_stat2", "sqlite_stat3", "sqlite_stat4", + // Cloudflare D1 specific tables + "_cf_KV", + // This is the default but can be configured by the user + "d1_migrations", ];