Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into feat/ulid
Browse files Browse the repository at this point in the history
  • Loading branch information
jacek-prisma committed Dec 18, 2024
2 parents 8bf3a3e + 3dc72ed commit 8f62b60
Show file tree
Hide file tree
Showing 29 changed files with 355 additions and 98 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector
Json |
JsonFiltering |
JsonFilteringArrayPath |
JsonArrayContains |
NamedPrimaryKeys |
NamedForeignKeys |
SqlQueryRaw |
RelationFieldsInArbitraryOrder |
ScalarLists |
UpdateableId |
Expand Down
1 change: 0 additions & 1 deletion psl/psl-core/src/builtin_connectors/mongodb.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector
CompositeTypes |
FullTextIndex |
SortOrderInFullTextIndex |
MongoDbQueryRaw |
DefaultValueAuto |
TwoWayEmbeddedManyToManyRelation |
UndefinedType |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector
NamedDefaultValues |
NamedForeignKeys |
NamedPrimaryKeys |
SqlQueryRaw |
ReferenceCycleDetection |
UpdateableId |
PrimaryKeySortOrderDefinition |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne
JsonFiltering |
JsonFilteringJsonPath |
JsonFilteringAlphanumeric |
JsonArrayContains |
CreateManyWriteableAutoIncId |
AutoIncrement |
CompoundIds |
AnyId |
SqlQueryRaw |
NamedForeignKeys |
AdvancedJsonNullability |
IndexColumnLengthPrefixing |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,10 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne
JsonFilteringArrayPath |
JsonFilteringAlphanumeric |
JsonFilteringAlphanumericFieldRef |
JsonArrayContains |
MultiSchema |
NamedForeignKeys |
NamedPrimaryKeys |
SqlQueryRaw |
RelationFieldsInArbitraryOrder |
ScalarLists |
JsonLists |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne
AnyId |
AutoIncrement |
CompoundIds |
SqlQueryRaw |
RelationFieldsInArbitraryOrder |
UpdateableId |
ImplicitManyToManyRelation |
Expand All @@ -30,7 +29,11 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne
UpdateReturning |
SupportsFiltersOnRelationsWithoutJoins |
CreateMany |
CreateManyWriteableAutoIncId
CreateManyWriteableAutoIncId |
Json |
JsonFiltering |
JsonFilteringJsonPath |
AdvancedJsonNullability
});

pub struct SqliteDatamodelConnector;
Expand Down
10 changes: 10 additions & 0 deletions psl/psl-core/src/datamodel_connector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,16 @@ pub enum Flavour {
Sqlite,
}

impl Flavour {
pub fn is_sql(&self) -> bool {
!self.is_mongo()
}

pub fn is_mongo(&self) -> bool {
matches!(self, Flavour::Mongo)
}
}

impl FromStr for Flavour {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Expand Down
3 changes: 1 addition & 2 deletions psl/psl-core/src/datamodel_connector/capabilities.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,9 @@ capabilities!(
JsonFilteringArrayPath, // Connector supports filtering json fields using array path (eg: postgres).
JsonFilteringAlphanumeric, // Connector supports alphanumeric json filters (gt, gte, lt, lte...).
JsonFilteringAlphanumericFieldRef, // Connector supports alphanumeric json filters against a json field reference.
JsonArrayContains, // Connector supports the contains operator for json fields.
CompoundIds,
AnyId, // Any (or combination of) uniques and not only id fields can constitute an id for a model.
SqlQueryRaw,
MongoDbQueryRaw,
NativeFullTextSearch,
NativeFullTextSearchWithoutIndex,
NativeFullTextSearchWithIndex,
Expand Down
2 changes: 1 addition & 1 deletion psl/psl/tests/base/base_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ fn json_type_must_work_for_some_connectors() {
.assert_has_scalar_field("json")
.assert_scalar_type(ScalarType::Json);

let error = parse_unwrap_err(&format!("{SQLITE_SOURCE}\n{dml}"));
let error = parse_unwrap_err(&format!("{MSSQL_SOURCE}\n{dml}"));

let expectation = expect![[r#"
error: Error validating field `json` in model `User`: Field `json` in model `User` can't be of type Json. The current connector does not support the Json type.
Expand Down
45 changes: 16 additions & 29 deletions psl/psl/tests/capabilities/sqlite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,22 @@ fn enum_support() {
expectation.assert_eq(&error);
}

#[test]
fn json_support() {
let dml = indoc! {r#"
datasource db {
provider = "sqlite"
url = "file:test.db"
}
model User {
id Int @id
data Json
}
"#};

assert_valid(dml);
}

#[test]
fn scalar_list_support() {
let dml = indoc! {r#"
Expand Down Expand Up @@ -108,35 +124,6 @@ fn unique_index_names_support() {
expectation.assert_eq(&error);
}

#[test]
fn json_support() {
let dml = indoc! {r#"
datasource db {
provider = "sqlite"
url = "file:test.db"
}
model User {
id Int @id
data Json
}
"#};

let error = parse_unwrap_err(dml);

let expectation = expect![[r#"
error: Error validating field `data` in model `User`: Field `data` in model `User` can't be of type Json. The current connector does not support the Json type.
--> schema.prisma:8
 | 
 7 |  id Int @id
 8 |  data Json
 9 | }
 | 
"#]];

expectation.assert_eq(&error);
}

#[test]
fn non_unique_relation_criteria_support() {
let dml = indoc! {r#"
Expand Down
14 changes: 7 additions & 7 deletions psl/psl/tests/common/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,6 @@ pub(crate) fn assert_valid(schema: &str) {
}
}

pub(crate) const SQLITE_SOURCE: &str = r#"
datasource db {
provider = "sqlite"
url = "file:dev.db"
}
"#;

pub(crate) const POSTGRES_SOURCE: &str = r#"
datasource db {
provider = "postgres"
Expand All @@ -85,3 +78,10 @@ pub(crate) const MYSQL_SOURCE: &str = r#"
url = "mysql://localhost:3306"
}
"#;

pub(crate) const MSSQL_SOURCE: &str = r#"
datasource db {
provider = "sqlserver"
url = "jdbc:sqlserver://localhost:3306"
}
"#;
21 changes: 20 additions & 1 deletion quaint/src/connector/sqlite/native/conversion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ impl TypeIdentifier for &Column<'_> {
}

fn is_json(&self) -> bool {
false
matches!(self.decl_type(), Some("JSONB") | Some("jsonb"))
}

fn is_enum(&self) -> bool {
Expand Down Expand Up @@ -148,6 +148,7 @@ impl GetRow for SqliteRow<'_> {
c if c.is_datetime() => Value::null_datetime(),
c if c.is_date() => Value::null_date(),
c if c.is_bool() => Value::null_boolean(),
c if c.is_json() => Value::null_json(),
c => match c.decl_type() {
Some(n) => {
let msg = format!("Value {n} not supported");
Expand Down Expand Up @@ -186,6 +187,7 @@ impl GetRow for SqliteRow<'_> {
return Err(Error::builder(kind).build());
}
}
c if c.is_json() => Value::json(serde_json::Value::Number(serde_json::Number::from(i))),
// NOTE: When SQLite does not know what type the return is (for example at explicit values and RETURNING statements) we will 'assume' int64
_ => Value::int64(i),
}
Expand All @@ -196,6 +198,9 @@ impl GetRow for SqliteRow<'_> {

Value::numeric(BigDecimal::from_str(&f.to_string()).unwrap())
}
ValueRef::Real(f) if column.is_json() => Value::json(serde_json::Value::Number(
serde_json::Number::from_f64(f).expect("JSON real should always be convertible to serde Number"),
)),
ValueRef::Real(f) => Value::double(f),
ValueRef::Text(bytes) if column.is_datetime() => {
let parse_res = std::str::from_utf8(bytes).map_err(|_| {
Expand All @@ -222,6 +227,20 @@ impl GetRow for SqliteRow<'_> {
})
})?
}
ValueRef::Text(bytes) if column.is_json() => {
let json_str = std::str::from_utf8(bytes).map_err(|_| {
Error::builder(ErrorKind::ConversionError(
"Failed to read contents of SQLite JSON column as UTF-8".into(),
))
.build()
})?;

let json_value: serde_json::Value = serde_json::from_str(json_str).map_err(|serde_error| {
Error::builder(ErrorKind::ConversionError(serde_error.to_string().into())).build()
})?;

Value::json(json_value)
}
ValueRef::Text(bytes) => Value::text(String::from_utf8(bytes.to_vec())?),
ValueRef::Blob(bytes) => Value::bytes(bytes.to_owned()),
};
Expand Down
12 changes: 6 additions & 6 deletions quaint/src/visitor/mysql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ impl<'a> Visitor<'a> for Mysql<'a> {
}
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> visitor::Result {
if json_extract.extract_as_string {
self.write("JSON_UNQUOTE(")?;
Expand All @@ -431,7 +431,7 @@ impl<'a> Visitor<'a> for Mysql<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_array_contains(&mut self, left: Expression<'a>, right: Expression<'a>, not: bool) -> visitor::Result {
self.write("JSON_CONTAINS(")?;
self.visit_expression(left)?;
Expand All @@ -446,7 +446,7 @@ impl<'a> Visitor<'a> for Mysql<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_type_equals(&mut self, left: Expression<'a>, json_type: JsonType<'a>, not: bool) -> visitor::Result {
self.write("(")?;
self.write("JSON_TYPE")?;
Expand Down Expand Up @@ -558,7 +558,7 @@ impl<'a> Visitor<'a> for Mysql<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> visitor::Result {
self.write("JSON_EXTRACT(")?;
self.visit_expression(*extract.expr.clone())?;
Expand All @@ -571,7 +571,7 @@ impl<'a> Visitor<'a> for Mysql<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result {
self.write("JSON_EXTRACT(")?;
self.visit_expression(*extract.expr)?;
Expand All @@ -582,7 +582,7 @@ impl<'a> Visitor<'a> for Mysql<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> visitor::Result {
self.write("JSON_UNQUOTE(")?;
self.visit_expression(*json_unquote.expr)?;
Expand Down
10 changes: 5 additions & 5 deletions quaint/src/visitor/postgres.rs
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ impl<'a> Visitor<'a> for Postgres<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_extract(&mut self, json_extract: JsonExtract<'a>) -> visitor::Result {
match json_extract.path {
JsonPath::String(_) => panic!("JSON path string notation is not supported for Postgres"),
Expand Down Expand Up @@ -445,7 +445,7 @@ impl<'a> Visitor<'a> for Postgres<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_unquote(&mut self, json_unquote: JsonUnquote<'a>) -> visitor::Result {
self.write("(")?;
self.visit_expression(*json_unquote.expr)?;
Expand All @@ -472,7 +472,7 @@ impl<'a> Visitor<'a> for Postgres<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_extract_last_array_item(&mut self, extract: JsonExtractLastArrayElem<'a>) -> visitor::Result {
self.write("(")?;
self.visit_expression(*extract.expr)?;
Expand All @@ -482,7 +482,7 @@ impl<'a> Visitor<'a> for Postgres<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_extract_first_array_item(&mut self, extract: JsonExtractFirstArrayElem<'a>) -> visitor::Result {
self.write("(")?;
self.visit_expression(*extract.expr)?;
Expand All @@ -492,7 +492,7 @@ impl<'a> Visitor<'a> for Postgres<'a> {
Ok(())
}

#[cfg(any(feature = "postgresql", feature = "mysql"))]
#[cfg(any(feature = "postgresql", feature = "mysql", feature = "sqlite"))]
fn visit_json_type_equals(&mut self, left: Expression<'a>, json_type: JsonType<'a>, not: bool) -> visitor::Result {
self.write("JSONB_TYPEOF")?;
self.write("(")?;
Expand Down
Loading

0 comments on commit 8f62b60

Please sign in to comment.