From 6ae51ef595db4d704690196b5168aae535dbb8ba Mon Sep 17 00:00:00 2001 From: yliuuuu Date: Thu, 25 Jul 2024 10:16:17 -0700 Subject: [PATCH] fix matching logic in ddl --- .../planner/internal/transforms/AstToPlan.kt | 1 + .../planner/internal/typer/DdlUtils.kt | 403 +++++++++--------- .../planner/internal/typer/PlanTyper.kt | 59 ++- .../planner/internal/ddl/DDLTestBase.kt | 168 ++++---- .../partiql/planner/internal/ddl/DDLTests.kt | 29 +- 5 files changed, 355 insertions(+), 305 deletions(-) diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/AstToPlan.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/AstToPlan.kt index ed37ea98ae..7e707aae05 100644 --- a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/AstToPlan.kt +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/AstToPlan.kt @@ -64,6 +64,7 @@ internal object AstToPlan { } fun convert(binder: Binder): PlanIdentifier.Symbol = when (binder.isRegular) { + // This should be a dead branch true -> identifierSymbol(binder.symbol, PlanIdentifier.CaseSensitivity.INSENSITIVE) false -> identifierSymbol(binder.symbol, PlanIdentifier.CaseSensitivity.SENSITIVE) } diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/DdlUtils.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/DdlUtils.kt index ee35465583..320c8ff6e8 100644 --- a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/DdlUtils.kt +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/DdlUtils.kt @@ -35,241 +35,246 @@ import org.partiql.types.TimestampType import org.partiql.types.TupleConstraint import org.partiql.value.PartiQLTimestampExperimental -internal class ShapeNormalizer() { - - fun normalize(collectionType: Type.Collection, prefix: String) = - Normalizer.visitTypeCollection(collectionType, Ctx(0, prefix)).first - - internal data class Ctx( - var count: Int, - val prefix: String - ) - - // The normalizer will only lift constraint up, instead of push constraint down - // this is cause the nature of a struct or collection constraint might be declared at attribute level, - // but an attribute level constraint will never be declared at struct/collection level. - private object Normalizer : PlanBaseVisitor>, Ctx>() { - override fun defaultReturn(node: PlanNode, ctx: Ctx): Pair> = throw IllegalArgumentException("Unsupported feature during shape normalization") - - override fun visitType(node: Type, ctx: Ctx): Pair> = - when (node) { - is Type.Atomic -> node to emptyList() - is Type.Collection -> visitTypeCollection(node, ctx) - is Type.Record -> visitTypeRecord(node, ctx) +internal object DdlUtils { + + // TODO: Should Lvalue in the plan just use a string? + internal fun match(lvalue: Identifier.Symbol, rvalue: Identifier.Symbol) = when (lvalue.caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> + when (rvalue.caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> lvalue.symbol == rvalue.symbol + Identifier.CaseSensitivity.INSENSITIVE -> lvalue.symbol.equals(rvalue.symbol, ignoreCase = true) } + Identifier.CaseSensitivity.INSENSITIVE -> TODO("Lvalue in this stage should all be case sensitive") + } - override fun visitTypeCollection(node: Type.Collection, ctx: Ctx): Pair> { - if (node.type == null) { - return typeCollection(null, node.isOrdered, node.constraints) to emptyList() - } else { - val (elementType, constraints) = visitType(node.type, ctx) - val named = constraints.map { it.addNameIfNotExists(ctx) } - return typeCollection(elementType, node.isOrdered, named) to emptyList() - } - } + internal class ShapeNormalizer { - override fun visitTypeRecord(node: Type.Record, ctx: Ctx): Pair> { - val structConstraints = mutableListOf() - val collectionConstraints = mutableListOf() + fun normalize(collectionType: Type.Collection, prefix: String) = + Normalizer.visitTypeCollection(collectionType, Ctx(0, prefix)).first - // arrange partition on the struct - node.constraints.partitionTo(structConstraints, collectionConstraints) { - it.definition.isStructConstraint() - } + internal data class Ctx( + var count: Int, + val prefix: String + ) - val fields = node.fields.map { f -> - val (field, carried) = visitTypeRecordField(f, ctx) - // arrange carried partition - carried.partitionTo(structConstraints, collectionConstraints) { - it.definition.isStructConstraint() + // The normalizer will only lift constraint up, instead of push constraint down + // this is cause the nature of a struct or collection constraint might be declared at attribute level, + // but an attribute level constraint will never be declared at struct/collection level. + private object Normalizer : PlanBaseVisitor>, Ctx>() { + override fun defaultReturn(node: PlanNode, ctx: Ctx): Pair> = throw IllegalArgumentException("Unsupported feature during shape normalization") + + override fun visitType(node: Type, ctx: Ctx): Pair> = + when (node) { + is Type.Atomic -> node to emptyList() + is Type.Collection -> visitTypeCollection(node, ctx) + is Type.Record -> visitTypeRecord(node, ctx) + } + + override fun visitTypeCollection(node: Type.Collection, ctx: Ctx): Pair> { + if (node.type == null) { + return typeCollection(null, node.isOrdered, node.constraints) to emptyList() + } else { + val (elementType, constraints) = visitType(node.type, ctx) + val named = constraints.map { it.addNameIfNotExists(ctx) } + return typeCollection(elementType, node.isOrdered, named) to emptyList() } - field } - val named = structConstraints.map { it.addNameIfNotExists(ctx) } + override fun visitTypeRecord(node: Type.Record, ctx: Ctx): Pair> { + val structConstraints = mutableListOf() + val collectionConstraints = mutableListOf() - return typeRecord(fields, named) to collectionConstraints - } + // arrange partition on the struct + node.constraints.partitionTo(structConstraints, collectionConstraints) { + it.definition.isStructConstraint() + } - override fun visitTypeRecordField(node: Type.Record.Field, ctx: Ctx): Pair> { - val (carried, attrConstrs) = node.constraints.partition { it.definition.isStructConstraint() || it.definition.isCollectionConstraint() } - val (type, carriedCollectionConstrs) = visitType(node.type, ctx) - val named = attrConstrs.map { it.addNameIfNotExists(ctx) } - return typeRecordField(node.name, type, named, node.isOptional, node.comment) to carriedCollectionConstrs + carried - } + val fields = node.fields.map { f -> + val (field, carried) = visitTypeRecordField(f, ctx) + // arrange carried partition + carried.partitionTo(structConstraints, collectionConstraints) { + it.definition.isStructConstraint() + } + field + } - private fun Constraint.addNameIfNotExists(ctx: Ctx): Constraint { - val named = - if (this.name == null) constraint("\$_${ctx.prefix}_${ctx.count}", this.definition) - else this - ctx.count += 1 - return named - } + val named = structConstraints.map { it.addNameIfNotExists(ctx) } - private fun List.partitionTo(container1: MutableList, container2: MutableList, predicate: (T) -> Boolean) { - val (p1, p2) = this.partition(predicate) - container1.addAll(p1) - container2.addAll(p2) - } + return typeRecord(fields, named) to collectionConstraints + } - private fun Constraint.Definition.isStructConstraint() = - when (this) { - is Constraint.Definition.Unique -> false - is Constraint.Definition.Check -> true - is Constraint.Definition.NotNull -> false - is Constraint.Definition.Nullable -> false + override fun visitTypeRecordField(node: Type.Record.Field, ctx: Ctx): Pair> { + val (carried, attrConstrs) = node.constraints.partition { it.definition.isStructConstraint() || it.definition.isCollectionConstraint() } + val (type, carriedCollectionConstrs) = visitType(node.type, ctx) + val named = attrConstrs.map { it.addNameIfNotExists(ctx) } + return typeRecordField(node.name, type, named, node.isOptional, node.comment) to carriedCollectionConstrs + carried } - private fun Constraint.Definition.isCollectionConstraint() = - when (this) { - is Constraint.Definition.Unique -> true - is Constraint.Definition.Check -> false - is Constraint.Definition.NotNull -> false - is Constraint.Definition.Nullable -> false + private fun Constraint.addNameIfNotExists(ctx: Ctx): Constraint { + val named = + if (this.name == null) constraint("\$_${ctx.prefix}_${ctx.count}", this.definition) + else this + ctx.count += 1 + return named } - } -} -internal object ConstraintResolver { - fun resolveTable(type: Type.Collection): BagType { - val type = Visitor.visitTypeCollection(type, Ctx(emptyList())).removeNull() as BagType - return type.copy(type.elementType.removeNull(), type.metas, type.constraints) + private fun List.partitionTo(container1: MutableList, container2: MutableList, predicate: (T) -> Boolean) { + val (p1, p2) = this.partition(predicate) + container1.addAll(p1) + container2.addAll(p2) + } + + private fun Constraint.Definition.isStructConstraint() = + when (this) { + is Constraint.Definition.Unique -> false + is Constraint.Definition.Check -> true + is Constraint.Definition.NotNull -> false + is Constraint.Definition.Nullable -> false + } + + private fun Constraint.Definition.isCollectionConstraint() = + when (this) { + is Constraint.Definition.Unique -> true + is Constraint.Definition.Check -> false + is Constraint.Definition.NotNull -> false + is Constraint.Definition.Nullable -> false + } + } } - private fun StaticType.removeNull() = - this.allTypes.filterNot { it is NullType }.toSet().let { StaticType.unionOf(it).flatten() } + internal object ConstraintResolver { + fun resolveTable(type: Type.Collection): BagType { + val type = Visitor.visitTypeCollection(type, Ctx(emptyList())).removeNull() as BagType + return type.copy(type.elementType.removeNull(), type.metas, type.constraints) + } - data class Ctx( - val primaryKey: List - ) + private fun StaticType.removeNull() = + this.allTypes.filterNot { it is NullType }.toSet().let { StaticType.unionOf(it).flatten() } - object Visitor : PlanBaseVisitor() { - override fun defaultReturn(node: PlanNode, ctx: Ctx): StaticType = throw IllegalArgumentException("Unsupported Feature during constraint resolution") + data class Ctx( + val primaryKey: List + ) - override fun visitTypeAtomic(node: Type.Atomic, ctx: Ctx): StaticType = - node.toStaticType() + object Visitor : PlanBaseVisitor() { + override fun defaultReturn(node: PlanNode, ctx: Ctx): StaticType = throw IllegalArgumentException("Unsupported Feature during constraint resolution") - override fun visitTypeCollection(node: Type.Collection, ctx: Ctx): StaticType { - val elementType = node.type ?: return if (node.isOrdered) StaticType.LIST.asNullable() else StaticType.BAG.asNullable() - // only one pk constraint - val pkConstrs = node.constraints.filter { - val def = it.definition - if (def is Constraint.Definition.Unique) { - def.isPrimaryKey - } else false - } - val pkConstr = when (pkConstrs.size) { - 0 -> null - 1 -> pkConstrs.first() - else -> throw IllegalArgumentException("Only one primary key constraint is allowed") - } - val pkAttr = pkConstr?.let { (it.definition as Constraint.Definition.Unique).attributes } ?: emptyList() - // if associated with PK - // the underlying type must be a non null struct - val resolvedElementType = visitType(elementType, Ctx(pkAttr)).let { - if (pkAttr.isNotEmpty()) { - it.removeNull() - } else it - } - val collectionConstraint = node.constraints.mapNotNull { contr -> - val def = contr.definition - if (def is Constraint.Definition.Unique) { - val attributes = def.attributes.map { it.normalize() }.toSet() - if (def.isPrimaryKey) CollectionConstraint.PrimaryKey(attributes) - else CollectionConstraint.UniqueKey(attributes) - } else null - }.toSet() - return if (node.isOrdered) { - ListType(resolvedElementType, mapOf(), collectionConstraint).asNullable() - } else { - BagType(resolvedElementType, mapOf(), collectionConstraint).asNullable() - } - } + override fun visitTypeAtomic(node: Type.Atomic, ctx: Ctx): StaticType = + node.toStaticType() - override fun visitTypeRecord(node: Type.Record, ctx: Ctx): StaticType { - // TODO: For now struct level constraint are only check - // and struct by default is closed and unique - // For now we dump check constraint in struct meta - val constraintMeta = node.constraints.mapNotNull { constr -> - if (constr.definition is Constraint.Definition.Check) { - field(constr.name!!, ionString(constr.definition.sql)) - } else null - }.let { if (it.isNotEmpty()) { mapOf("check_constraints" to ionStructOf(it)) } else emptyMap() } - val seen = mutableSetOf() - val resolvedField = node.fields.map { - StructType.Field( - it.name.normalize(), - visitTypeRecordField(it, ctx), - it.comment?.let { mapOf("comment" to it) } ?: emptyMap() - ).also { field -> - if (!seen.add(field.key)) throw IllegalArgumentException("Duplicated binding name ${field.key}") + override fun visitTypeCollection(node: Type.Collection, ctx: Ctx): StaticType { + val elementType = node.type ?: return if (node.isOrdered) StaticType.LIST.asNullable() else StaticType.BAG.asNullable() + // only one pk constraint + val pkConstrs = node.constraints.filter { + val def = it.definition + if (def is Constraint.Definition.Unique) { + def.isPrimaryKey + } else false + } + val pkConstr = when (pkConstrs.size) { + 0 -> null + 1 -> pkConstrs.first() + else -> throw IllegalArgumentException("Only one primary key constraint is allowed") + } + val pkAttr = pkConstr?.let { (it.definition as Constraint.Definition.Unique).attributes } ?: emptyList() + // if associated with PK + // the underlying type must be a non null struct + val resolvedElementType = visitType(elementType, Ctx(pkAttr)).let { + if (pkAttr.isNotEmpty()) { + it.removeNull() + } else it + } + val collectionConstraint = node.constraints.mapNotNull { contr -> + val def = contr.definition + if (def is Constraint.Definition.Unique) { + val uniqueReference = def.attributes.map { it.symbol }.toSet() + if (def.isPrimaryKey) CollectionConstraint.PrimaryKey(uniqueReference) + else CollectionConstraint.UniqueKey(uniqueReference) + } else null + }.toSet() + return if (node.isOrdered) { + ListType(resolvedElementType, mapOf(), collectionConstraint).asNullable() + } else { + BagType(resolvedElementType, mapOf(), collectionConstraint).asNullable() } } - return StructType( - resolvedField, - true, - listOf(), - setOf( - TupleConstraint.Open(false), - TupleConstraint.UniqueAttrs(true) - ), - constraintMeta - ).asNullable() - } + override fun visitTypeRecord(node: Type.Record, ctx: Ctx): StaticType { + // TODO: For now struct level constraint are only check + // and struct by default is closed and unique + // For now we dump check constraint in struct meta + val constraintMeta = node.constraints.mapNotNull { constr -> + if (constr.definition is Constraint.Definition.Check) { + field(constr.name!!, ionString(constr.definition.sql)) + } else null + }.let { if (it.isNotEmpty()) { mapOf("check_constraints" to ionStructOf(it)) } else emptyMap() } + val seen = mutableSetOf() + val resolvedField = node.fields.map { + StructType.Field( + it.name.symbol, + visitTypeRecordField(it, ctx), + it.comment?.let { mapOf("comment" to it) } ?: emptyMap() + ).also { field -> + if (!seen.add(field.key)) throw IllegalArgumentException("Duplicated binding name ${field.key}") + } + } - override fun visitTypeRecordField(node: Type.Record.Field, ctx: Ctx): StaticType { - val isPK = ctx.primaryKey.any { it.isEquivalentTo(node.name) } + return StructType( + resolvedField, + true, + listOf(), + setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ), + constraintMeta + ).asNullable() + } - if (node.isOptional && isPK) throw IllegalArgumentException("Primary key attribute cannot be optional") + override fun visitTypeRecordField(node: Type.Record.Field, ctx: Ctx): StaticType { + val isPK = ctx.primaryKey.any { it.isEquivalentTo(node.name) } - val notNullable = - (node.constraints.any { it.definition is Constraint.Definition.NotNull }) || isPK - val type = visitType(node.type, ctx).let { if (notNullable) it.removeNull() else it } + if (node.isOptional && isPK) throw IllegalArgumentException("Primary key attribute cannot be optional") - return if (node.isOptional) type.asOptional() else type - } + val notNullable = + (node.constraints.any { it.definition is Constraint.Definition.NotNull }) || isPK + val type = visitType(node.type, ctx).let { if (notNullable) it.removeNull() else it } - @OptIn(PartiQLTimestampExperimental::class) - private fun Type.Atomic.toStaticType() = when (this) { - is Type.Atomic.Bool -> BOOL - is Type.Atomic.Int2 -> INT2 - is Type.Atomic.Int4 -> INT4 - is Type.Atomic.Int8 -> INT8 - is Type.Atomic.Int -> INT - is Type.Atomic.Decimal -> - if (this.precision != null) - DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(this.precision, this.scale!!)) - else DECIMAL - is Type.Atomic.Float64 -> FLOAT - - is Type.Atomic.Char -> StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(this.length ?: 1))) - is Type.Atomic.Varchar -> - this.length?.let { - StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(it))) - } ?: STRING - - is Type.Atomic.Date -> DATE - is Type.Atomic.Time -> TimeType(precision, false) - is Type.Atomic.TimeWithTz -> TimeType(precision, true) - is Type.Atomic.Timestamp -> TimestampType(precision ?: 6, false) - is Type.Atomic.TimestampWithTz -> TimestampType(precision ?: 6, true) - }.asNullable() - - internal fun Identifier.Symbol.normalize() = - when (this.caseSensitivity) { - Identifier.CaseSensitivity.SENSITIVE -> this.symbol - // TODO: should partiql case normalize the identifier? - // Lowercase for now to follow Postgres - Identifier.CaseSensitivity.INSENSITIVE -> this.symbol.lowercase() + return if (node.isOptional) type.asOptional() else type } - private fun Identifier.Symbol.isEquivalentTo(other: Identifier.Symbol): Boolean = when (caseSensitivity) { - Identifier.CaseSensitivity.SENSITIVE -> when (other.caseSensitivity) { - Identifier.CaseSensitivity.SENSITIVE -> symbol.equals(other.symbol) + @OptIn(PartiQLTimestampExperimental::class) + private fun Type.Atomic.toStaticType() = when (this) { + is Type.Atomic.Bool -> BOOL + is Type.Atomic.Int2 -> INT2 + is Type.Atomic.Int4 -> INT4 + is Type.Atomic.Int8 -> INT8 + is Type.Atomic.Int -> INT + is Type.Atomic.Decimal -> + if (this.precision != null) + DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(this.precision, this.scale!!)) + else DECIMAL + is Type.Atomic.Float64 -> FLOAT + + is Type.Atomic.Char -> StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(this.length ?: 1))) + is Type.Atomic.Varchar -> + this.length?.let { + StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(it))) + } ?: STRING + + is Type.Atomic.Date -> DATE + is Type.Atomic.Time -> TimeType(precision, false) + is Type.Atomic.TimeWithTz -> TimeType(precision, true) + is Type.Atomic.Timestamp -> TimestampType(precision ?: 6, false) + is Type.Atomic.TimestampWithTz -> TimestampType(precision ?: 6, true) + }.asNullable() + + private fun Identifier.Symbol.isEquivalentTo(other: Identifier.Symbol): Boolean = when (caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> when (other.caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> symbol.equals(other.symbol) + Identifier.CaseSensitivity.INSENSITIVE -> symbol.equals(other.symbol, ignoreCase = true) + } Identifier.CaseSensitivity.INSENSITIVE -> symbol.equals(other.symbol, ignoreCase = true) } - Identifier.CaseSensitivity.INSENSITIVE -> symbol.equals(other.symbol, ignoreCase = true) } } } diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/PlanTyper.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/PlanTyper.kt index 479fa85400..939c4dc7b8 100644 --- a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/PlanTyper.kt +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/PlanTyper.kt @@ -33,6 +33,7 @@ import org.partiql.planner.internal.ir.constraintDefinitionCheck import org.partiql.planner.internal.ir.constraintDefinitionUnique import org.partiql.planner.internal.ir.ddlOpCreateTable import org.partiql.planner.internal.ir.identifierSymbol +import org.partiql.planner.internal.ir.partitionByAttrList import org.partiql.planner.internal.ir.rel import org.partiql.planner.internal.ir.relBinding import org.partiql.planner.internal.ir.relOpAggregate @@ -71,7 +72,7 @@ import org.partiql.planner.internal.ir.typeCollection import org.partiql.planner.internal.ir.typeRecord import org.partiql.planner.internal.ir.typeRecordField import org.partiql.planner.internal.ir.util.PlanRewriter -import org.partiql.planner.internal.typer.ConstraintResolver.Visitor.normalize +import org.partiql.planner.internal.typer.DdlUtils.match import org.partiql.planner.internal.utils.PlanUtils import org.partiql.spi.BindingCase import org.partiql.spi.BindingName @@ -1482,7 +1483,7 @@ internal class PlanTyper(private val env: Env) { when (node.op) { is DdlOp.CreateTable -> { val op = visitDdlOpCreateTable(node.op, ctx) - val shape = ConstraintResolver.resolveTable(op.shape) + val shape = DdlUtils.ConstraintResolver.resolveTable(op.shape) return statementDDL(shape, op) } } @@ -1490,7 +1491,7 @@ internal class PlanTyper(private val env: Env) { override fun visitDdlOpCreateTable(node: DdlOp.CreateTable, ctx: List): DdlOp.CreateTable { val shape = visitTypeCollection(node.shape, ctx) - val normalizedShape = ShapeNormalizer().normalize(shape, node.name.debug()) + val normalizedShape = DdlUtils.ShapeNormalizer().normalize(shape, node.name.debug()) val partitionBy = node.partitionBy?.let { visitPartitionBy(it, (shape.type as Type.Record).fields) } return ddlOpCreateTable( @@ -1543,30 +1544,39 @@ internal class PlanTyper(private val env: Env) { } override fun visitConstraintDefinitionUnique(node: Constraint.Definition.Unique, ctx: List): Constraint.Definition.Unique { + val uniqueReference = node.attributes + // inline primary key - return if (node.attributes.isEmpty()) { + return if (uniqueReference.isEmpty()) { val attr = ctx.first() if (attr.type !is Type.Atomic) throw IllegalArgumentException("Primary Key contains attribute whose type is a complex type: ${attr.name.symbol}") constraintDefinitionUnique(listOf(ctx.first().name), node.isPrimaryKey) } else { - val seen = mutableSetOf() + val seen = mutableSetOf() // instead of invoking the rex typer, we manually check if the attribtue are in the scope - node.attributes.forEach { attr -> + uniqueReference.forEach { rvalue -> val fields = ctx.filter { - it.name.normalize() == attr.normalize() + val lvalue = it.name + match(lvalue, rvalue) } - when (fields.size) { - 0 -> throw IllegalArgumentException("Primary Key contains non-existing attribute - ${attr.symbol}") + val lvalue = when (fields.size) { + 0 -> throw IllegalArgumentException("Primary Key contains non-existing attribute - ${rvalue.symbol}") // check the type 1 -> { val type = fields.first().type - if (type !is Type.Atomic) throw IllegalArgumentException("Primary Key contains attribute whose type is a complex type: ${attr.symbol}") + if (type !is Type.Atomic) throw IllegalArgumentException("Primary Key contains attribute whose type is a complex type: ${rvalue.symbol}") + fields.first().name } - else -> throw IllegalArgumentException("Primary Key contains duplicated attribute: ${attr.symbol}") + else -> throw IllegalArgumentException("Primary Key contains duplicated attribute: ${rvalue.symbol}") } - if (!seen.add(attr.normalize())) throw IllegalArgumentException("Primary Key Clause contains duplicated attribute: ${attr.symbol}") + // added the lvalue that has been referred + // This is: PRIMARY KEY (bar, BAR) + // in partial mode does not get normalize + // but should throw an error + if (!seen.add(lvalue)) throw IllegalArgumentException("Primary Key Clause contains duplicated attribute: ${lvalue.symbol}") } - node + // we rewrite rvalue symbol to be exactly the same as lvalue so we don't have to worry about those in resolve + constraintDefinitionUnique(seen.toList(), node.isPrimaryKey) } } @@ -1621,25 +1631,28 @@ internal class PlanTyper(private val env: Env) { super.visitPartitionBy(node, ctx) as PartitionBy override fun visitPartitionByAttrList(node: PartitionBy.AttrList, ctx: List): PartitionBy.AttrList { - val seen = mutableSetOf() - node.attrs.forEach { attr -> + val partitionReference = node.attrs + val seen = mutableSetOf() + partitionReference.forEach { rvalue -> val fields = ctx.filter { - it.name.normalize() == attr.normalize() + val lvalue = it.name + match(lvalue, rvalue) } - when (fields.size) { - 0 -> throw IllegalArgumentException("Partition By Clause contains non-existing attribute - ${attr.symbol}") + val lvalue = when (fields.size) { + 0 -> throw IllegalArgumentException("Partition By Clause contains non-existing attribute - ${rvalue.symbol}") // check the type 1 -> { val field = fields.first() val type = field.type - if (type !is Type.Atomic) throw IllegalArgumentException("Partition By Clause contains attribute whose type is a complex type: ${attr.symbol} : $type") - if (field.isOptional) throw IllegalArgumentException("Partition By Clause contains optional attributes: ${attr.symbol}") + if (type !is Type.Atomic) throw IllegalArgumentException("Partition By Clause contains attribute whose type is a complex type: ${rvalue.symbol} : $type") + if (field.isOptional) throw IllegalArgumentException("Partition By Clause contains optional attributes: ${rvalue.symbol}") + field.name } - else -> throw IllegalArgumentException("Partition By Clause contains ambiguous binding: ${attr.symbol}") + else -> throw IllegalArgumentException("Partition By Clause contains ambiguous binding: ${rvalue.symbol}") } - if (!seen.add(attr.normalize())) throw IllegalArgumentException("Partition By Clause contains duplicated attribute: ${attr.symbol}") + if (!seen.add(lvalue)) throw IllegalArgumentException("Partition By Clause contains duplicated attribute: ${rvalue.symbol}") } - return node + return partitionByAttrList(seen.toList()) } } // HELPERS diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/ddl/DDLTestBase.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/ddl/DDLTestBase.kt index f89d9e767a..fa289cf2c0 100644 --- a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/ddl/DDLTestBase.kt +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/ddl/DDLTestBase.kt @@ -76,8 +76,8 @@ internal class DDLTestBase { tblProperties: Map = emptyMap() ): TestCase { val typedOp = ddlOpCreateTable( - id(tableName), normalizedShape, - if (partitionByAttrs.isEmpty()) null else partitionByAttrList(partitionByAttrs.map { id(it) }), + idSensitive(tableName), normalizedShape, + if (partitionByAttrs.isEmpty()) null else partitionByAttrList(partitionByAttrs.map { idSensitive(it) }), tblProperties.map { tableProperty(it.key, stringValue(it.value)) } ) val resolved = statementDDL( @@ -85,8 +85,8 @@ internal class DDLTestBase { ) val publicPlan = org.partiql.plan.statementDDL( org.partiql.plan.ddlOpCreateTable( - id(tableName).toPublicPlan(), staticType, - if (partitionByAttrs.isEmpty()) null else org.partiql.plan.partitionByAttrList(partitionByAttrs.map { id(it).toPublicPlan() }), + idSensitive(tableName).toPublicPlan(), staticType, + if (partitionByAttrs.isEmpty()) null else org.partiql.plan.partitionByAttrList(partitionByAttrs.map { idSensitive(it).toPublicPlan() }), tblProperties.map { org.partiql.plan.tableProperty(it.key, stringValue(it.value)) } ) ) @@ -121,8 +121,8 @@ internal class DDLTestBase { description = description, untyped = op, typed = ddlOpCreateTable( - id(tableName), normalizedShape, - if (partitionByAttrs.isEmpty()) null else partitionByAttrList(partitionByAttrs.map { id(it) }), + idSensitive(tableName), normalizedShape, + if (partitionByAttrs.isEmpty()) null else partitionByAttrList(partitionByAttrs.map { idInsensitive(it) }), tblProperties.map { tableProperty(it.key, stringValue(it.value)) } ), resolved = null, @@ -139,8 +139,8 @@ internal class DDLTestBase { tblProperties: Map = emptyMap() ): TestCase { val typedOp = ddlOpCreateTable( - id(tableName), normalizedShape, - if (partitionByAttrs.isEmpty()) null else partitionByAttrList(partitionByAttrs.map { id(it) }), + idSensitive(tableName), normalizedShape, + if (partitionByAttrs.isEmpty()) null else partitionByAttrList(partitionByAttrs.map { idInsensitive(it) }), tblProperties.map { tableProperty(it.key, stringValue(it.value)) } ) val resolved = statementDDL( @@ -185,7 +185,7 @@ internal class DDLTestBase { // Convenient val FIELD_A_INT4 = Pair( Type.Record.Field( - id("a"), typeAtomicInt4(), + idSensitive("a"), typeAtomicInt4(), emptyList(), false, null, ), @@ -194,7 +194,7 @@ internal class DDLTestBase { val FIELD_B_INT4 = Pair( Type.Record.Field( - id("b"), typeAtomicInt4(), + idSensitive("b"), typeAtomicInt4(), emptyList(), false, null, ), @@ -203,14 +203,14 @@ internal class DDLTestBase { val FIELD_C_VARCHAR10 = Pair( Type.Record.Field( - id("c"), typeAtomicVarchar(10), + idSensitive("c"), typeAtomicVarchar(10), emptyList(), false, null, ), StructType.Field("c", StaticType.unionOf(StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(10))), StaticType.NULL)) ) - val CONSTR_NAME_ZERO = "\$_${tableName}_0" + val CONSTR_NAME_ZERO = "\$_\"${tableName}\"_0" val COMMENT = "this is a comment" @@ -220,7 +220,7 @@ internal class DDLTestBase { val CONSTRA_A_LT_ZERO = Pair( checkConstraintUnresolved( null, - rex(StaticType.ANY, rexOpVarUnresolved(id("a"), Rex.Op.Var.Scope.LOCAL)), + rex(StaticType.ANY, rexOpVarUnresolved(idInsensitive("a"), Rex.Op.Var.Scope.LOCAL)), rex(StaticType.INT4, rexOpLit(int32Value(0))), "a < 0" ), @@ -233,12 +233,12 @@ internal class DDLTestBase { val CONSTRA_B_LT_ZERO = Pair( checkConstraintUnresolved( null, - rex(StaticType.ANY, rexOpVarUnresolved(id("b"), Rex.Op.Var.Scope.LOCAL)), + rex(StaticType.ANY, rexOpVarUnresolved(idInsensitive("b"), Rex.Op.Var.Scope.LOCAL)), rex(StaticType.INT4, rexOpLit(int32Value(0))), "b < 0" ), checkConstraintResolved( - "\$_tbl_0", rex(StaticType.INT4, rexOpVarLocal(0, 0)), rex(StaticType.INT4, rexOpLit(int32Value(0))), "b < 0" + "\$_\"tbl\"_0", rex(StaticType.INT4, rexOpVarLocal(0, 0)), rex(StaticType.INT4, rexOpLit(int32Value(0))), "b < 0" ) ) @@ -247,7 +247,7 @@ internal class DDLTestBase { TestCase.success( "CREATE TABLE tbl (a INT4)", ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first), null, emptyList() @@ -259,14 +259,14 @@ internal class DDLTestBase { TestCase.success( "CREATE TABLE tbl (a INT4 PRIMARY KEY)", ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first.withConstraints(listOf(inlinePK(null)))), null, emptyList() ), tableInternal( FIELD_A_INT4.first, - collectionConstraint = listOf(tuplePk("\$_tbl_0", listOf("a"))) + collectionConstraint = listOf(tuplePk("\$_\"tbl\"_0", listOf("a"), true)) ), table(StructType.Field("a", StaticType.INT4), tableConstraint = setOf(CollectionConstraint.PrimaryKey(setOf("a")))), ), @@ -274,7 +274,7 @@ internal class DDLTestBase { TestCase.success( "CREATE TABLE tbl (a OPTIONAL INT4)", ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first.asOptional()), null, emptyList() @@ -288,21 +288,21 @@ internal class DDLTestBase { PRIMARY KEY cannot be optional """.trimMargin(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first.withConstraints(listOf(inlinePK(null))).asOptional()), null, emptyList() ), tableInternal( FIELD_A_INT4.first.asOptional(), - collectionConstraint = listOf(tuplePk("\$_tbl_0", listOf("a"))) + collectionConstraint = listOf(tuplePk("\$_\"tbl\"_0", listOf("a"), true)) ), ), TestCase.success( "CREATE TABLE tbl(a INT4 NOT NULL)", ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first.withConstraints(listOf(nonNullConstraint(null)),)), null, emptyList() @@ -314,7 +314,7 @@ internal class DDLTestBase { TestCase.success( "CREATE TABLE tbl(a OPTIONAL INT4 NOT NULL)", ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first.withConstraints(listOf(nonNullConstraint(null))).asOptional()), null, emptyList() @@ -326,7 +326,7 @@ internal class DDLTestBase { TestCase.success( "CREATE TABLE tbl (a INT4 COMMENT 'this is a comment')", ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first.withComment(COMMENT)), null, emptyList() @@ -343,7 +343,7 @@ internal class DDLTestBase { The constraint name is not exposed to public plan """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first.withConstraints(listOf(nonNullConstraint("a_not_null")),)), null, emptyList() @@ -362,7 +362,7 @@ internal class DDLTestBase { but will be normalized to struct level. """.trimIndent(), ddlOpCreateTable( - id("tbl"), + idSensitive(tableName), tableInternal( FIELD_A_INT4.first.withConstraints(listOf(CONSTRA_A_LT_ZERO.first)) ), @@ -391,7 +391,7 @@ internal class DDLTestBase { Note that the CHECK Constraint is set to tuple level """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( FIELD_A_INT4.first, structConstraints = listOf(CONSTRA_A_LT_ZERO.first) @@ -421,15 +421,15 @@ internal class DDLTestBase { Note that the CHECK Constraint refers to multiple attribute in declared. """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( FIELD_A_INT4.first, FIELD_B_INT4.first, structConstraints = listOf( checkConstraintUnresolved( null, - rex(StaticType.ANY, rexOpVarUnresolved(id("a"), Rex.Op.Var.Scope.LOCAL)), - rex(StaticType.ANY, rexOpVarUnresolved(id("b"), Rex.Op.Var.Scope.LOCAL)), + rex(StaticType.ANY, rexOpVarUnresolved(idInsensitive("a"), Rex.Op.Var.Scope.LOCAL)), + rex(StaticType.ANY, rexOpVarUnresolved(idInsensitive("b"), Rex.Op.Var.Scope.LOCAL)), "a < b" ) ) @@ -466,7 +466,7 @@ internal class DDLTestBase { Primary key as tuple level constraint """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( FIELD_A_INT4.first, FIELD_B_INT4.first, @@ -478,7 +478,7 @@ internal class DDLTestBase { tableInternal( FIELD_A_INT4.first, FIELD_B_INT4.first, - collectionConstraint = listOf(tuplePk("\$_tbl_0", listOf("a", "b"))) + collectionConstraint = listOf(tuplePk("\$_\"tbl\"_0", listOf("a", "b"), true)) ), table( StructType.Field("a", StaticType.INT4), @@ -497,11 +497,11 @@ internal class DDLTestBase { Primary key contains duplicated attributes """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( FIELD_A_INT4.first, FIELD_B_INT4.first, - structConstraints = listOf(tuplePk("\$_tbl_0", listOf("a", "a"))) + structConstraints = listOf(tuplePk("\$_\"tbl\"_0", listOf("a", "a"))) ), null, emptyList() @@ -518,7 +518,7 @@ internal class DDLTestBase { Primary key contains non-existing attributes """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( FIELD_A_INT4.first, FIELD_B_INT4.first, @@ -537,7 +537,7 @@ internal class DDLTestBase { Note that the check constraint refers to an attribute that is not the attribute being declared """.trimIndent(), ddlOpCreateTable( - id("tbl"), + idSensitive(tableName), tableInternal( FIELD_A_INT4.first.withConstraints(listOf(CONSTRA_B_LT_ZERO.first)) ), @@ -557,7 +557,7 @@ internal class DDLTestBase { the attribute being declared. """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( FIELD_A_INT4.first.withConstraints(listOf(CONSTRA_B_LT_ZERO.first)), FIELD_B_INT4.first @@ -575,7 +575,7 @@ internal class DDLTestBase { Duplicated Binding at the same level """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first, FIELD_A_INT4.first), null, emptyList() @@ -591,10 +591,10 @@ internal class DDLTestBase { ) """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first), emptyList() @@ -609,7 +609,7 @@ internal class DDLTestBase { ), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first), emptyList() @@ -636,10 +636,10 @@ internal class DDLTestBase { ) """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first.withConstraints(listOf(nonNullConstraint(null)))), emptyList() @@ -654,9 +654,9 @@ internal class DDLTestBase { ), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( - listOf(FIELD_A_INT4.first.withConstraints(listOf(nonNullConstraint("\$_tbl_0")))), + listOf(FIELD_A_INT4.first.withConstraints(listOf(nonNullConstraint("\$_\"tbl\"_0")))), emptyList() ), emptyList(), @@ -681,10 +681,10 @@ internal class DDLTestBase { ) """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first.asOptional()), emptyList() @@ -699,7 +699,7 @@ internal class DDLTestBase { ), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first.asOptional()), emptyList() @@ -726,10 +726,10 @@ internal class DDLTestBase { ) """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first.withComment(COMMENT)), emptyList() @@ -744,7 +744,7 @@ internal class DDLTestBase { ), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first.withComment(COMMENT)), emptyList() @@ -769,10 +769,10 @@ internal class DDLTestBase { ) """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first), emptyList() @@ -796,10 +796,10 @@ internal class DDLTestBase { We allow this as the two "a"s are in different scope """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first), emptyList() @@ -815,7 +815,7 @@ internal class DDLTestBase { ), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first), emptyList() @@ -847,10 +847,10 @@ internal class DDLTestBase { Duplicated binding in nested scope """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first, FIELD_A_INT4.first), emptyList() @@ -865,7 +865,7 @@ internal class DDLTestBase { ), tableInternal( typeRecordField( - id("nested"), + idSensitive("nested"), typeRecord( listOf(FIELD_A_INT4.first, FIELD_A_INT4.first), emptyList() @@ -885,10 +885,10 @@ internal class DDLTestBase { ) """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( Type.Record.Field( - id("a"), typeCollection(typeAtomicInt4(), true, emptyList()), + idSensitive("a"), typeCollection(typeAtomicInt4(), true, emptyList()), emptyList(), false, null, ), @@ -899,7 +899,7 @@ internal class DDLTestBase { ), tableInternal( Type.Record.Field( - id("a"), typeCollection(typeAtomicInt4(), true, emptyList()), + idSensitive("a"), typeCollection(typeAtomicInt4(), true, emptyList()), emptyList(), false, null, ), @@ -919,10 +919,10 @@ internal class DDLTestBase { This should fail as currently we do not allow setting attribute whose type is collection as primary key """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( Type.Record.Field( - id("a"), typeCollection(typeAtomicInt4(), true, emptyList()), + idSensitive("a"), typeCollection(typeAtomicInt4(), true, emptyList()), listOf(inlinePK(null)), false, null, ), @@ -941,10 +941,10 @@ internal class DDLTestBase { COLLECTION OF COLLECTION, this should failed the conversion. """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( Type.Record.Field( - id("a"), typeCollection(typeCollection(typeAtomicInt4(), true, emptyList()), true, emptyList()), + idSensitive("a"), typeCollection(typeCollection(typeAtomicInt4(), true, emptyList()), true, emptyList()), emptyList(), false, null, ), @@ -955,7 +955,7 @@ internal class DDLTestBase { ), tableInternal( Type.Record.Field( - id("a"), typeCollection(typeCollection(typeAtomicInt4(), true, emptyList()), true, emptyList()), + idSensitive("a"), typeCollection(typeCollection(typeAtomicInt4(), true, emptyList()), true, emptyList()), emptyList(), false, null, ), @@ -977,10 +977,10 @@ internal class DDLTestBase { Purpose is to explore how we model create a bag(struct(bag(struct))) """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( typeRecordField( - id("tbl2"), + idSensitive("tbl2"), typeCollection( typeRecord( listOf(FIELD_A_INT4.first), @@ -999,7 +999,7 @@ internal class DDLTestBase { ), tableInternal( typeRecordField( - id("tbl2"), + idSensitive("tbl2"), typeCollection( typeRecord( listOf(FIELD_A_INT4.first), @@ -1034,10 +1034,10 @@ internal class DDLTestBase { How do we model this? """.trimIndent(), ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal( typeRecordField( - id("tbl2"), + idSensitive("tbl2"), typeCollection( typeRecord( listOf(FIELD_A_INT4.first.withConstraints(listOf(inlinePK(null)))), @@ -1056,14 +1056,14 @@ internal class DDLTestBase { ), tableInternal( typeRecordField( - id("tbl2"), + idSensitive("tbl2"), typeCollection( typeRecord( listOf(FIELD_A_INT4.first), emptyList() ), false, - listOf(tuplePk("\$_tbl_0", listOf("a"))) + listOf(tuplePk("\$_\"tbl\"_0", listOf("a"), true)) ), emptyList(), false, @@ -1087,9 +1087,9 @@ internal class DDLTestBase { TestCase.success( "CREATE TABLE tbl (a INT4) PARTITION BY (a)", ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first), - partitionByAttrList(listOf(id("a"))), + partitionByAttrList(listOf(idInsensitive("a"))), emptyList() ), tableInternal(FIELD_A_INT4.first), @@ -1100,9 +1100,9 @@ internal class DDLTestBase { TestCase.failedValidation( "CREATE TABLE tbl (a INT4) PARTITION BY (b)", ddlOpCreateTable( - id(tableName), + idSensitive(tableName), tableInternal(FIELD_A_INT4.first), - partitionByAttrList(listOf(id("b"))), + partitionByAttrList(listOf(idInsensitive("b"))), emptyList() ), ), @@ -1133,7 +1133,9 @@ internal class DDLTestBase { collectionConstraint ) - private fun id(id: String) = identifierSymbol(id, Identifier.CaseSensitivity.INSENSITIVE) + private fun idInsensitive(id: String) = identifierSymbol(id, Identifier.CaseSensitivity.INSENSITIVE) + + private fun idSensitive(id: String) = identifierSymbol(id, Identifier.CaseSensitivity.SENSITIVE) private fun nonNullConstraint(name: String?) = constraint(name, constraintDefinitionNotNull()) @@ -1143,7 +1145,7 @@ internal class DDLTestBase { rex( StaticType.ANY, rexOpCallUnresolved( - id("lt"), + idInsensitive("lt"), listOf(lhs, rhs), ) ), @@ -1201,6 +1203,10 @@ internal class DDLTestBase { private fun inlinePK(name: String?) = constraint(name, constraintDefinitionUnique(emptyList(), true)) - private fun tuplePk(name: String?, attrs: List) = constraint(name, constraintDefinitionUnique(attrs.map { id(it) }, true)) + private fun tuplePk(name: String?, attrs: List, sensitive: Boolean = false) = if (sensitive) { + constraint(name, constraintDefinitionUnique(attrs.map { idSensitive(it) }, true)) + } else { + constraint(name, constraintDefinitionUnique(attrs.map { idInsensitive(it) }, true)) + } } } diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/ddl/DDLTests.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/ddl/DDLTests.kt index c1915a077a..16999f0a72 100644 --- a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/ddl/DDLTests.kt +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/ddl/DDLTests.kt @@ -9,6 +9,7 @@ import org.partiql.plan.DdlOp import org.partiql.plan.Statement import org.partiql.plan.debug.PlanPrinter import org.partiql.planner.PartiQLPlanner +import org.partiql.planner.PartiQLPlannerBuilder import org.partiql.plugins.memory.MemoryCatalog import org.partiql.plugins.memory.MemoryConnector import org.partiql.spi.connector.ConnectorSession @@ -23,7 +24,7 @@ import kotlin.test.assertEquals class DDLTests { private val parser = PartiQLParser.default() - private val planner = PartiQLPlanner.builder().caseNormalize("EXACTCASE").build() + private val planner = PartiQLPlannerBuilder().build() private val catalogName = "TEST" private val catalog = MemoryCatalog.PartiQL().name(catalogName).build() @@ -76,7 +77,7 @@ class DDLTests { primaryKeyFields = emptyList(), constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)), metas = mapOf( - "check_constraints" to ionStructOf(field("\$_\"my_catalog\".\"my_schema\".\"tbl\"_0", ionString("\"a\" <> \"b\""))), + "check_constraints" to ionStructOf(field("\$_my_catalog.my_schema.\"tbl\"_0", ionString("a <> b"))), ) ), metas = mapOf(), @@ -186,4 +187,28 @@ class DDLTests { println(staticType) } + + @Test + fun sanity7() { + val query = """ + CREATE TABLE foo ( + A INT2, + PRIMARY KEY(A) + ) + """.trimIndent() + + val ast = parser.parse(query).root + val plan = planner + .plan(ast, plannerSession) {} + .plan + val res = buildString { + PlanPrinter.append(this, plan) + } + println(res) + + val staticType = + ((plan.statement as Statement.DDL).op as DdlOp.CreateTable).shape + + println(staticType) + } }