diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e69b3331b..64813934f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -23,6 +23,7 @@ jobs: steps: - uses: actions/checkout@v2 with: + ref: ${{ github.event.pull_request.head.sha }} submodules: recursive - name: Use Java ${{ matrix.java }} diff --git a/CHANGELOG.md b/CHANGELOG.md index f94504080..d423fdc58 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,9 +31,18 @@ Thank you to all who have contributed! - Adds top-level IR node creation functions. - Adds `componentN` functions (destructuring) to IR nodes via Kotlin data classes - Adds public `tag` field to IR nodes for associating metadata +- Adds AST Normalization Pass. +- Adds PartiQLPlanner Interface, which is responsible for translate an AST to a Plan. ### Changed - StaticTypeInferencer and PlanTyper will not raise an error when an expression is inferred to `NULL` or `unionOf(NULL, MISSING)`. In these cases the StaticTypeInferencer and PlanTyper will still raise the Problem Code `ExpressionAlwaysReturnsNullOrMissing` but the severity of the problem has been changed to warning. In the case an expression always returns `MISSING`, problem code `ExpressionAlwaysReturnsMissing` will be raised, which will have problem severity of error. +- **Breaking** The default integer literal type is now 32-bit; if the literal can not fit in a 32-bit integer, it overflows to 64-bit. +- **BREAKING** `PartiQLValueType` now distinguishes between Arbitrary Precision Decimal and Fixed Precision Decimal. +- **BREAKING** Function Signature Changes. Now Function signature has two subclasses, `Scalar` and `Aggregation`. +- **BREAKING** In the produced plan: + - The new plan is fully resolved and typed. + - Operators will be converted to function call. + ### Deprecated @@ -41,8 +50,9 @@ Thank you to all who have contributed! - Fixes the CLI hanging on invalid queries. See issue #1230. ### Removed -- [Breaking] Removed IR factory in favor of static top-level functions. Change `Ast.foo()` +- **Breaking** Removed IR factory in favor of static top-level functions. Change `Ast.foo()` to `foo()` +- **Breaking** Removed `org.partiql.lang.planner.transforms.AstToPlan`. Use `org.partiql.planner.PartiQLPlanner`. ### Security @@ -153,6 +163,8 @@ classes in `:partiql-ast` and `:partiql-plan`. - `org.partiql.lang.errors.ProblemSeverity` -> `org.partiql.errors.ProblemSeverity` - `org.partiql.lang.errors.ProblemHandler` -> `org.partiql.errors.ProblemHandler` - **Breaking** the `sourceLocation` field of `org.partiql.errors.Problem` was changed from `org.partiql.lang.ast.SoureceLocationMeta` to `org.partiql.errors.ProblemLocation`. +- Removed `Nullable Int -> "_${index()}" } + +/** + * Produces a "binder" (AS alias) for an expression following the given rules: + * + * 1. If item is an id, use the last symbol + * 2. If item is a path with a final symbol step, use the symbol — else 4 + * 3. If item is a cast, use the value name + * 4. Else, use item index with prefix _ + * + * See https://github.com/partiql/partiql-lang-kotlin/issues/1122 + */ +public fun Expr.toBinder(index: () -> Int): Identifier.Symbol = when (this) { + is Expr.Var -> this.identifier.toBinder() + is Expr.Path -> this.toBinder(index) + is Expr.Cast -> this.value.toBinder(index) + is Expr.SessionAttribute -> this.attribute.name.uppercase().toBinder() + else -> col(index).toBinder() +} + +/** + * Simple toBinder that uses an int literal rather than a closure. + * + * @param index + * @return + */ +public fun Expr.toBinder(index: Int): Identifier.Symbol = toBinder { index } + +private fun String.toBinder(): Identifier.Symbol = ast { + // Every binder preserves case + identifierSymbol(this@toBinder, Identifier.CaseSensitivity.SENSITIVE) +} + +private fun Identifier.toBinder(): Identifier.Symbol = when (this@toBinder) { + is Identifier.Qualified -> when (steps.isEmpty()) { + true -> root.symbol.toBinder() + else -> steps.last().symbol.toBinder() + } + is Identifier.Symbol -> symbol.toBinder() +} + +@OptIn(PartiQLValueExperimental::class) +private fun Expr.Path.toBinder(index: () -> Int): Identifier.Symbol { + if (steps.isEmpty()) return root.toBinder(index) + return when (val last = steps.last()) { + is Expr.Path.Step.Symbol -> last.symbol.toBinder() + is Expr.Path.Step.Index -> { + val k = last.key + if (k is Expr.Lit && k.value is StringValue) { + k.value.value!!.toBinder() + } else { + col(index).toBinder() + } + } + else -> col(index).toBinder() + } +} diff --git a/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/AstPass.kt b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/AstPass.kt new file mode 100644 index 000000000..722e0309e --- /dev/null +++ b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/AstPass.kt @@ -0,0 +1,25 @@ +/* + * Copyright 2022 Amazon.com, Inc. or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at: + * + * http://aws.amazon.com/apache2.0/ + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific + * language governing permissions and limitations under the License. + */ + +package org.partiql.ast.normalize + +import org.partiql.ast.Statement + +/** + * Wraps a rewriter with a default entry point. + */ +public interface AstPass { + + public fun apply(statement: Statement): Statement +} diff --git a/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/Normalize.kt b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/Normalize.kt new file mode 100644 index 000000000..c4aadcf42 --- /dev/null +++ b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/Normalize.kt @@ -0,0 +1,29 @@ +/* + * Copyright 2022 Amazon.com, Inc. or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at: + * + * http://aws.amazon.com/apache2.0/ + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific + * language governing permissions and limitations under the License. + */ + +package org.partiql.ast.normalize + +import org.partiql.ast.Statement + +/** + * AST normalization + */ +public fun Statement.normalize(): Statement { + // could be a fold, but this is nice for setting breakpoints + var ast = this + ast = NormalizeFromSource.apply(ast) + ast = NormalizeSelect.apply(ast) + ast = NormalizeGroupBy.apply(ast) + return ast +} diff --git a/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/NormalizeFromSource.kt b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/NormalizeFromSource.kt new file mode 100644 index 000000000..081321f46 --- /dev/null +++ b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/NormalizeFromSource.kt @@ -0,0 +1,63 @@ +/* + * Copyright 2022 Amazon.com, Inc. or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at: + * + * http://aws.amazon.com/apache2.0/ + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific + * language governing permissions and limitations under the License. + */ + +package org.partiql.ast.normalize + +import org.partiql.ast.AstNode +import org.partiql.ast.Expr +import org.partiql.ast.From +import org.partiql.ast.Statement +import org.partiql.ast.fromJoin +import org.partiql.ast.helpers.toBinder +import org.partiql.ast.util.AstRewriter + +/** + * Assign aliases to any FROM source which does not have one. + */ +internal object NormalizeFromSource : AstPass { + + override fun apply(statement: Statement): Statement = Visitor.visitStatement(statement, 0) as Statement + + private object Visitor : AstRewriter() { + + // Each SFW starts the ctx count again. + override fun visitExprSFW(node: Expr.SFW, ctx: Int): AstNode = super.visitExprSFW(node, 0) + + override fun visitStatementDMLBatchLegacy(node: Statement.DML.BatchLegacy, ctx: Int): AstNode = + super.visitStatementDMLBatchLegacy(node, 0) + + override fun visitFrom(node: From, ctx: Int) = super.visitFrom(node, ctx) as From + + override fun visitFromJoin(node: From.Join, ctx: Int): From { + val lhs = visitFrom(node.lhs, ctx) + val rhs = visitFrom(node.rhs, ctx + 1) + val condition = node.condition?.let { visitExpr(it, ctx) as Expr } + return if (lhs !== node.lhs || rhs !== node.rhs || condition !== node.condition) { + fromJoin(lhs, rhs, node.type, condition) + } else { + node + } + } + + override fun visitFromValue(node: From.Value, ctx: Int): From { + val expr = visitExpr(node.expr, ctx) as Expr + val asAlias = node.asAlias ?: expr.toBinder(ctx) + return if (expr !== node.expr || asAlias !== node.asAlias) { + node.copy(expr = expr, asAlias = asAlias) + } else { + node + } + } + } +} diff --git a/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/NormalizeGroupBy.kt b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/NormalizeGroupBy.kt new file mode 100644 index 000000000..4ef1f701c --- /dev/null +++ b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/NormalizeGroupBy.kt @@ -0,0 +1,46 @@ +/* + * Copyright 2022 Amazon.com, Inc. or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at: + * + * http://aws.amazon.com/apache2.0/ + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific + * language governing permissions and limitations under the License. + */ + +package org.partiql.ast.normalize + +import org.partiql.ast.Expr +import org.partiql.ast.GroupBy +import org.partiql.ast.Statement +import org.partiql.ast.groupByKey +import org.partiql.ast.helpers.toBinder +import org.partiql.ast.util.AstRewriter + +/** + * Adds a unique binder to each group key. + */ +object NormalizeGroupBy : AstPass { + + override fun apply(statement: Statement) = Visitor.visitStatement(statement, 0) as Statement + + private object Visitor : AstRewriter() { + + override fun visitGroupByKey(node: GroupBy.Key, ctx: Int): GroupBy.Key { + val expr = visitExpr(node.expr, 0) as Expr + val alias = when (node.asAlias) { + null -> expr.toBinder(ctx) + else -> node.asAlias + } + return if (expr !== node.expr || alias !== node.asAlias) { + groupByKey(expr, alias) + } else { + node + } + } + } +} diff --git a/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/NormalizeSelect.kt b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/NormalizeSelect.kt new file mode 100644 index 000000000..ea3a5b49f --- /dev/null +++ b/partiql-ast/src/main/kotlin/org/partiql/ast/normalize/NormalizeSelect.kt @@ -0,0 +1,288 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at: + * + * http://aws.amazon.com/apache2.0/ + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific + * language governing permissions and limitations under the License. + */ + +package org.partiql.ast.normalize + +import org.partiql.ast.AstNode +import org.partiql.ast.Expr +import org.partiql.ast.From +import org.partiql.ast.Identifier +import org.partiql.ast.Select +import org.partiql.ast.Statement +import org.partiql.ast.exprCall +import org.partiql.ast.exprCase +import org.partiql.ast.exprCaseBranch +import org.partiql.ast.exprIsType +import org.partiql.ast.exprLit +import org.partiql.ast.exprStruct +import org.partiql.ast.exprStructField +import org.partiql.ast.exprVar +import org.partiql.ast.helpers.toBinder +import org.partiql.ast.identifierSymbol +import org.partiql.ast.selectProject +import org.partiql.ast.selectProjectItemExpression +import org.partiql.ast.selectValue +import org.partiql.ast.typeStruct +import org.partiql.ast.util.AstRewriter +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.stringValue + +/** + * Converts SQL-style SELECT to PartiQL SELECT VALUE. + * - If there is a PROJECT ALL, we use the TUPLEUNION. + * - If there is NOT a PROJECT ALL, we use a literal struct. + * + * Here are some example of rewrites: + * + * ``` + * SELECT * + * FROM + * A AS x, + * B AS y AT i + * ``` + * gets rewritten to: + * ``` + * SELECT VALUE TUPLEUNION( + * CASE WHEN x IS STRUCT THEN x ELSE { '_1': x }, + * CASE WHEN y IS STRUCT THEN y ELSE { '_2': y }, + * { 'i': i } + * ) FROM A AS x, B AS y AT i + * ``` + * + * ``` + * SELECT x.*, x.a FROM A AS x + * ``` + * gets rewritten to: + * ``` + * SELECT VALUE TUPLEUNION( + * CASE WHEN x IS STRUCT THEN x ELSE { '_1': x }, + * { 'a': x.a } + * ) FROM A AS x + * ``` + * + * ``` + * SELECT x.a FROM A AS x + * ``` + * gets rewritten to: + * ``` + * SELECT VALUE { + * 'a': x.a + * } FROM A AS x + * ``` + * + * TODO: LET + * + * Requires [NormalizeFromSource]. + */ +internal object NormalizeSelect : AstPass { + + override fun apply(statement: Statement): Statement = Visitor.visitStatement(statement, newCtx()) as Statement + + /** + * Closure for incrementing a derived binding counter + */ + private fun newCtx(): () -> Int = run { + var i = 1; + { i++ } + } + + /** + * The type parameter () -> Int + */ + private object Visitor : AstRewriter<() -> Int>() { + + /** + * This is used to give projections a name. For example: + * ``` + * SELECT t.* FROM t AS t + * ``` + * + * Will get converted into: + * ``` + * SELECT VALUE TUPLEUNION( + * CASE + * WHEN t IS STRUCT THEN t + * ELSE { '_1': t } + * END + * ) + * FROM t AS t + * ``` + * + * In order to produce the struct's key in `{ '_1': t }` above, we use [col] to produce the column name + * given the ordinal. + */ + private val col = { index: Int -> "_${index + 1}" } + + override fun visitExprSFW(node: Expr.SFW, ctx: () -> Int): Expr.SFW { + val sfw = super.visitExprSFW(node, ctx) as Expr.SFW + return when (val select = sfw.select) { + is Select.Star -> sfw.copy(select = visitSelectAll(select, sfw.from)) + else -> sfw + } + } + + override fun visitSelectProject(node: Select.Project, ctx: () -> Int): AstNode { + + // Visit items, adding a binder if necessary + var diff = false + val visitedItems = ArrayList(node.items.size) + node.items.forEach { n -> + val item = visitSelectProjectItem(n, ctx) as Select.Project.Item + if (item !== n) diff = true + visitedItems.add(item) + } + val visitedNode = if (diff) selectProject(visitedItems, node.setq) else node + + // Rewrite selection + return when (node.items.any { it is Select.Project.Item.All }) { + false -> visitSelectProjectWithoutProjectAll(visitedNode) + true -> visitSelectProjectWithProjectAll(visitedNode) + } + } + + override fun visitSelectProjectItemExpression(node: Select.Project.Item.Expression, ctx: () -> Int): Select.Project.Item.Expression { + val expr = visitExpr(node.expr, newCtx()) as Expr + val alias = when (node.asAlias) { + null -> expr.toBinder(ctx) + else -> node.asAlias + } + return if (expr != node.expr || alias != node.asAlias) { + selectProjectItemExpression(expr, alias) + } else { + node + } + } + + // Helpers + + /** + * We need to call this from [visitExprSFW] and not override [visitSelectStar] because we need access to the + * [From] aliases. + * + * Note: We assume that [select] and [from] have already been visited. + */ + private fun visitSelectAll(select: Select.Star, from: From): Select.Value { + val tupleUnionArgs = from.aliases().flatMapIndexed { i, binding -> + val asAlias = binding.first + val atAlias = binding.second + val byAlias = binding.third + val atAliasItem = atAlias?.simple()?.let { + val alias = it.asAlias ?: error("The AT alias should be present. This wasn't normalized.") + buildSimpleStruct(it.expr, alias.symbol) + } + val byAliasItem = byAlias?.simple()?.let { + val alias = it.asAlias ?: error("The BY alias should be present. This wasn't normalized.") + buildSimpleStruct(it.expr, alias.symbol) + } + listOfNotNull( + buildCaseWhenStruct(asAlias.star(i).expr, i), + atAliasItem, + byAliasItem + ) + } + return selectValue( + constructor = exprCall( + function = identifierSymbol("TUPLEUNION", Identifier.CaseSensitivity.SENSITIVE), + args = tupleUnionArgs + ), + setq = select.setq + ) + } + + private fun visitSelectProjectWithProjectAll(node: Select.Project): AstNode { + val tupleUnionArgs = node.items.mapIndexed { index, item -> + when (item) { + is Select.Project.Item.All -> buildCaseWhenStruct(item.expr, index) + is Select.Project.Item.Expression -> buildSimpleStruct( + item.expr, + item.asAlias?.symbol + ?: error("The alias should've been here. This AST is not normalized.") + ) + } + } + return selectValue( + setq = node.setq, + constructor = exprCall( + function = identifierSymbol("TUPLEUNION", Identifier.CaseSensitivity.SENSITIVE), + args = tupleUnionArgs + ) + ) + } + + @OptIn(PartiQLValueExperimental::class) + private fun visitSelectProjectWithoutProjectAll(node: Select.Project): AstNode { + val structFields = node.items.map { item -> + val itemExpr = item as? Select.Project.Item.Expression ?: error("Expected the projection to be an expression.") + exprStructField( + name = exprLit(stringValue(itemExpr.asAlias?.symbol!!)), + value = item.expr + ) + } + return selectValue( + setq = node.setq, + constructor = exprStruct( + fields = structFields + ) + ) + } + + @OptIn(PartiQLValueExperimental::class) + private fun buildCaseWhenStruct(expr: Expr, index: Int): Expr.Case = exprCase( + expr = null, + branches = listOf( + exprCaseBranch( + condition = exprIsType(expr, typeStruct(), null), + expr = expr + ) + ), + default = buildSimpleStruct(expr, col(index)) + ) + + @OptIn(PartiQLValueExperimental::class) + private fun buildSimpleStruct(expr: Expr, name: String): Expr.Struct = exprStruct( + fields = listOf( + exprStructField( + name = exprLit(stringValue(name)), + value = expr + ) + ) + ) + + private fun From.aliases(): List> = when (this) { + is From.Join -> lhs.aliases() + rhs.aliases() + is From.Value -> { + val asAlias = asAlias?.symbol ?: error("AST not normalized, missing asAlias on FROM source.") + val atAlias = atAlias?.symbol + val byAlias = byAlias?.symbol + listOf(Triple(asAlias, atAlias, byAlias)) + } + } + + // t -> t.* AS _i + private fun String.star(i: Int): Select.Project.Item.Expression { + val expr = exprVar(id(this), Expr.Var.Scope.DEFAULT) + val alias = expr.toBinder(i) + return selectProjectItemExpression(expr, alias) + } + + // t -> t AS t + private fun String.simple(): Select.Project.Item.Expression { + val expr = exprVar(id(this), Expr.Var.Scope.DEFAULT) + val alias = id(this) + return selectProjectItemExpression(expr, alias) + } + + private fun id(symbol: String) = identifierSymbol(symbol, Identifier.CaseSensitivity.INSENSITIVE) + } +} diff --git a/partiql-ast/src/main/kotlin/org/partiql/ast/sql/SqlDialect.kt b/partiql-ast/src/main/kotlin/org/partiql/ast/sql/SqlDialect.kt index 1c5be817a..de6e6e17f 100644 --- a/partiql-ast/src/main/kotlin/org/partiql/ast/sql/SqlDialect.kt +++ b/partiql-ast/src/main/kotlin/org/partiql/ast/sql/SqlDialect.kt @@ -48,6 +48,23 @@ public abstract class SqlDialect : AstBaseVisitor() { // IDENTIFIERS & PATHS + /** + * Default behavior is to wrap all SFW queries with parentheses. + * + * @param node + * @param head + */ + public open fun visitExprWrapped(node: Expr, head: SqlBlock) = when (node) { + is Expr.SFW -> { + var h = head + h = h concat "(" + h = visitExprSFW(node, h) + h = h concat ")" + h + } + else -> visitExpr(node, head) + } + override fun visitIdentifierSymbol(node: Identifier.Symbol, head: SqlBlock) = head concat r(node.sql()) override fun visitIdentifierQualified(node: Identifier.Qualified, head: SqlBlock): SqlBlock { @@ -186,7 +203,7 @@ public abstract class SqlDialect : AstBaseVisitor() { } var h = head h = h concat r(op) - return visitExpr(node.expr, h) + return visitExprWrapped(node.expr, h) } override fun visitExprBinary(node: Expr.Binary, head: SqlBlock): SqlBlock { @@ -208,9 +225,9 @@ public abstract class SqlDialect : AstBaseVisitor() { Expr.Binary.Op.BITWISE_AND -> "&" } var h = head - h = visitExpr(node.lhs, h) + h = visitExprWrapped(node.lhs, h) h = h concat r(" $op ") - h = visitExpr(node.rhs, h) + h = visitExprWrapped(node.rhs, h) return h } @@ -228,7 +245,7 @@ public abstract class SqlDialect : AstBaseVisitor() { head concat r(node.attribute.name) override fun visitExprPath(node: Expr.Path, head: SqlBlock): SqlBlock { - var h = visitExpr(node.root, head) + var h = visitExprWrapped(node.root, head) h = node.steps.fold(h) { b, step -> visitExprPathStep(step, b) } return h } @@ -241,7 +258,7 @@ public abstract class SqlDialect : AstBaseVisitor() { val key = node.key // use [ ] syntax h = h concat r("[") - h = visitExpr(key, h) + h = visitExprWrapped(key, h) h = h concat r("]") return h } @@ -291,45 +308,45 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitExprStructField(node: Expr.Struct.Field, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.name, h) + h = visitExprWrapped(node.name, h) h = h concat r(": ") - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) return h } override fun visitExprLike(node: Expr.Like, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) h = h concat if (node.not == true) r(" NOT LIKE ") else r(" LIKE ") - h = visitExpr(node.pattern, h) + h = visitExprWrapped(node.pattern, h) if (node.escape != null) { h = h concat r(" ESCAPE ") - h = visitExpr(node.escape, h) + h = visitExprWrapped(node.escape!!, h) } return h } override fun visitExprBetween(node: Expr.Between, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) h = h concat if (node.not == true) r(" NOT BETWEEN ") else r(" BETWEEN ") - h = visitExpr(node.from, h) + h = visitExprWrapped(node.from, h) h = h concat r(" AND ") - h = visitExpr(node.to, h) + h = visitExprWrapped(node.to, h) return h } override fun visitExprInCollection(node: Expr.InCollection, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.lhs, h) + h = visitExprWrapped(node.lhs, h) h = h concat if (node.not == true) r(" NOT IN ") else r(" IN ") - h = visitExpr(node.rhs, h) + h = visitExprWrapped(node.rhs, h) return h } override fun visitExprIsType(node: Expr.IsType, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) h = h concat if (node.not == true) r(" IS NOT ") else r(" IS ") h = visitType(node.type, h) return h @@ -340,7 +357,7 @@ public abstract class SqlDialect : AstBaseVisitor() { h = h concat r("CASE") h = when (node.expr) { null -> h - else -> visitExpr(node.expr, h concat r(" ")) + else -> visitExprWrapped(node.expr!!, h concat r(" ")) } // WHEN(s) h = node.branches.fold(h) { acc, branch -> visitExprCaseBranch(branch, acc) } @@ -349,7 +366,7 @@ public abstract class SqlDialect : AstBaseVisitor() { null -> h else -> { h = h concat r(" ELSE ") - visitExpr(node.default, h) + visitExprWrapped(node.default!!, h) } } h = h concat r(" END") @@ -359,9 +376,9 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitExprCaseBranch(node: Expr.Case.Branch, head: SqlBlock): SqlBlock { var h = head h = h concat r(" WHEN ") - h = visitExpr(node.condition, h) + h = visitExprWrapped(node.condition, h) h = h concat r(" THEN ") - h = visitExpr(node.expr, h) + h = visitExprWrapped(node.expr, h) return h } @@ -383,14 +400,14 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitExprSubstring(node: Expr.Substring, head: SqlBlock): SqlBlock { var h = head h = h concat r("SUBSTRING(") - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) if (node.start != null) { h = h concat r(" FROM ") - h = visitExpr(node.start, h) + h = visitExprWrapped(node.start!!, h) } if (node.length != null) { h = h concat r(" FOR ") - h = visitExpr(node.length, h) + h = visitExprWrapped(node.length!!, h) } h = h concat r(")") return h @@ -399,9 +416,9 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitExprPosition(node: Expr.Position, head: SqlBlock): SqlBlock { var h = head h = h concat r("POSITION(") - h = visitExpr(node.lhs, h) + h = visitExprWrapped(node.lhs, h) h = h concat r(" IN ") - h = visitExpr(node.rhs, h) + h = visitExprWrapped(node.rhs, h) h = h concat r(")") return h } @@ -415,10 +432,10 @@ public abstract class SqlDialect : AstBaseVisitor() { } // [ FROM] if (node.chars != null) { - h = visitExpr(node.chars, h) + h = visitExprWrapped(node.chars!!, h) h = h concat r(" FROM ") } - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) h = h concat r(")") return h } @@ -426,14 +443,14 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitExprOverlay(node: Expr.Overlay, head: SqlBlock): SqlBlock { var h = head h = h concat r("OVERLAY(") - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) h = h concat r(" PLACING ") - h = visitExpr(node.overlay, h) + h = visitExprWrapped(node.overlay, h) h = h concat r(" FROM ") - h = visitExpr(node.start, h) + h = visitExprWrapped(node.start, h) if (node.length != null) { h = h concat r(" FOR ") - h = visitExpr(node.length, h) + h = visitExprWrapped(node.length!!, h) } h = h concat r(")") return h @@ -444,7 +461,7 @@ public abstract class SqlDialect : AstBaseVisitor() { h = h concat r("EXTRACT(") h = h concat r(node.field.name) h = h concat r(" FROM ") - h = visitExpr(node.source, h) + h = visitExprWrapped(node.source, h) h = h concat r(")") return h } @@ -452,7 +469,7 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitExprCast(node: Expr.Cast, head: SqlBlock): SqlBlock { var h = head h = h concat r("CAST(") - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) h = h concat r(" AS ") h = visitType(node.asType, h) h = h concat r(")") @@ -462,7 +479,7 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitExprCanCast(node: Expr.CanCast, head: SqlBlock): SqlBlock { var h = head h = h concat r("CAN_CAST(") - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) h = h concat r(" AS ") h = visitType(node.asType, h) h = h concat r(")") @@ -472,7 +489,7 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitExprCanLosslessCast(node: Expr.CanLosslessCast, head: SqlBlock): SqlBlock { var h = head h = h concat r("CAN_LOSSLESS_CAST(") - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) h = h concat r(" AS ") h = visitType(node.asType, h) h = h concat r(")") @@ -484,9 +501,9 @@ public abstract class SqlDialect : AstBaseVisitor() { h = h concat r("DATE_ADD(") h = h concat r(node.field.name) h = h concat r(", ") - h = visitExpr(node.lhs, h) + h = visitExprWrapped(node.lhs, h) h = h concat r(", ") - h = visitExpr(node.rhs, h) + h = visitExprWrapped(node.rhs, h) h = h concat r(")") return h } @@ -496,9 +513,9 @@ public abstract class SqlDialect : AstBaseVisitor() { h = h concat r("DATE_DIFF(") h = h concat r(node.field.name) h = h concat r(", ") - h = visitExpr(node.lhs, h) + h = visitExprWrapped(node.lhs, h) h = h concat r(", ") - h = visitExpr(node.rhs, h) + h = visitExprWrapped(node.rhs, h) h = h concat r(")") return h } @@ -521,9 +538,9 @@ public abstract class SqlDialect : AstBaseVisitor() { null -> {} } var h = head - h = visitExpr(node.lhs, h) + h = visitExprWrapped(node.lhs, h) h = h concat r(" ${op.joinToString(" ")} ") - h = visitExpr(node.rhs, h) + h = visitExprWrapped(node.rhs, h) return h } @@ -538,19 +555,19 @@ public abstract class SqlDialect : AstBaseVisitor() { // LET h = if (node.let != null) visitLet(node.let, h concat r(" ")) else h // WHERE - h = if (node.where != null) visitExpr(node.where, h concat r(" WHERE ")) else h + h = if (node.where != null) visitExprWrapped(node.where, h concat r(" WHERE ")) else h // GROUP BY h = if (node.groupBy != null) visitGroupBy(node.groupBy, h concat r(" ")) else h // HAVING - h = if (node.having != null) visitExpr(node.having, h concat r(" HAVING ")) else h + h = if (node.having != null) visitExprWrapped(node.having, h concat r(" HAVING ")) else h // SET OP h = if (node.setOp != null) visitExprSFWSetOp(node.setOp, h concat r(" ")) else h // ORDER BY h = if (node.orderBy != null) visitOrderBy(node.orderBy, h concat r(" ")) else h // LIMIT - h = if (node.limit != null) visitExpr(node.limit, h concat r(" LIMIT ")) else h + h = if (node.limit != null) visitExprWrapped(node.limit, h concat r(" LIMIT ")) else h // OFFSET - h = if (node.offset != null) visitExpr(node.offset, h concat r(" OFFSET ")) else h + h = if (node.offset != null) visitExprWrapped(node.offset, h concat r(" OFFSET ")) else h return h } @@ -576,14 +593,14 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitSelectProjectItemAll(node: Select.Project.Item.All, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.expr, h) + h = visitExprWrapped(node.expr, h) h = h concat r(".*") return h } override fun visitSelectProjectItemExpression(node: Select.Project.Item.Expression, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.expr, h) + h = visitExprWrapped(node.expr, h) h = if (node.asAlias != null) h concat r(" AS ${node.asAlias.sql()}") else h return h } @@ -591,9 +608,9 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitSelectPivot(node: Select.Pivot, head: SqlBlock): SqlBlock { var h = head h = h concat r("PIVOT ") - h = visitExpr(node.key, h) + h = visitExprWrapped(node.key, h) h = h concat r(" AT ") - h = visitExpr(node.value, h) + h = visitExprWrapped(node.value, h) return h } @@ -605,7 +622,7 @@ public abstract class SqlDialect : AstBaseVisitor() { } var h = head h = h concat r(select) - h = visitExpr(node.constructor, h) + h = visitExprWrapped(node.constructor, h) return h } @@ -617,10 +634,10 @@ public abstract class SqlDialect : AstBaseVisitor() { From.Value.Type.SCAN -> h From.Value.Type.UNPIVOT -> h concat r("UNPIVOT ") } - h = visitExpr(node.expr, h) - h = if (node.asAlias != null) h concat r(" AS ${node.asAlias.sql()}") else h - h = if (node.atAlias != null) h concat r(" AT ${node.atAlias.sql()}") else h - h = if (node.byAlias != null) h concat r(" BY ${node.byAlias.sql()}") else h + h = visitExprWrapped(node.expr, h) + h = if (node.asAlias != null) h concat r(" AS ${node.asAlias!!.sql()}") else h + h = if (node.atAlias != null) h concat r(" AT ${node.atAlias!!.sql()}") else h + h = if (node.byAlias != null) h concat r(" BY ${node.byAlias!!.sql()}") else h return h } @@ -650,7 +667,7 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitLetBinding(node: Let.Binding, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.expr, h) + h = visitExprWrapped(node.expr, h) h = h concat r(" AS ${node.asAlias.sql()}") return h } @@ -670,8 +687,8 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitGroupByKey(node: GroupBy.Key, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.expr, h) - h = if (node.asAlias != null) h concat r(" AS ${node.asAlias.sql()}") else h + h = visitExprWrapped(node.expr, h) + h = if (node.asAlias != null) h concat r(" AS ${node.asAlias!!.sql()}") else h return h } @@ -702,7 +719,7 @@ public abstract class SqlDialect : AstBaseVisitor() { override fun visitSort(node: Sort, head: SqlBlock): SqlBlock { var h = head - h = visitExpr(node.expr, h) + h = visitExprWrapped(node.expr, h) h = when (node.dir) { Sort.Dir.ASC -> h concat r(" ASC") Sort.Dir.DESC -> h concat r(" DESC") diff --git a/partiql-ast/src/test/kotlin/org/partiql/ast/normalize/NormalizeSelectTest.kt b/partiql-ast/src/test/kotlin/org/partiql/ast/normalize/NormalizeSelectTest.kt new file mode 100644 index 000000000..aace30761 --- /dev/null +++ b/partiql-ast/src/test/kotlin/org/partiql/ast/normalize/NormalizeSelectTest.kt @@ -0,0 +1,184 @@ +package org.partiql.ast.normalize + +import org.junit.jupiter.api.Test +import org.partiql.ast.Expr +import org.partiql.ast.From +import org.partiql.ast.Identifier +import org.partiql.ast.Select +import org.partiql.ast.builder.ast +import org.partiql.ast.exprLit +import org.partiql.ast.exprVar +import org.partiql.ast.identifierSymbol +import org.partiql.ast.selectProjectItemExpression +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.int32Value +import org.partiql.value.stringValue +import kotlin.test.assertEquals + +class NormalizeSelectTest { + + /** + * SELECT a, b, c FROM T + * + * SELECT VALUE { + * 'a': a, + * 'b': b, + * 'c': c + * } FROM T + */ + @Test + fun testDerivedBinders_00() { + val input = select( + varItem("a"), + varItem("b"), + varItem("c"), + ) + val expected = selectValue( + "a" to variable("a"), + "b" to variable("b"), + "c" to variable("c"), + ) + val actual = NormalizeSelect.apply(input) + assertEquals(expected, actual) + } + + /** + * SELECT 1, 2, 3 FROM T + * + * SELECT VALUE { + * '_1': 1, + * '_2': 2, + * '_3': 3 + * } FROM T + */ + @Test + fun testDerivedBinders_01() { + val input = select( + litItem(1), + litItem(2), + litItem(3), + ) + val expected = selectValue( + "_1" to lit(1), + "_2" to lit(2), + "_3" to lit(3), + ) + val actual = NormalizeSelect.apply(input) + assertEquals(expected, actual) + } + + /** + * SELECT a, 2, 3 FROM T + * + * SELECT VALUE { + * 'a': a, + * '_1': 2, + * '_2': 3 + * } FROM T + */ + @Test + fun testDerivedBinders_02() { + val input = select( + varItem("a"), + litItem(2), + litItem(3), + ) + val expected = selectValue( + "a" to variable("a"), + "_1" to lit(2), + "_2" to lit(3), + ) + val actual = NormalizeSelect.apply(input) + assertEquals(expected, actual) + } + + /** + * SELECT a AS a, 2 AS b, 3 AS c FROM T + * + * SELECT VALUE { + * 'a': a, + * 'b': 2, + * 'c': 3 + * } FROM T + */ + @Test + fun testDerivedBinders_03() { + val input = select( + varItem("a", "a"), + litItem(2, "b"), + litItem(3, "c"), + ) + val expected = selectValue( + "a" to variable("a"), + "b" to lit(2), + "c" to lit(3), + ) + val actual = NormalizeSelect.apply(input) + assertEquals(expected, actual) + } + + // ----- HELPERS ------------------------- + + private fun variable(name: String) = exprVar( + identifier = identifierSymbol( + symbol = name, + caseSensitivity = Identifier.CaseSensitivity.INSENSITIVE, + ), + scope = Expr.Var.Scope.DEFAULT, + ) + + private fun select(vararg items: Select.Project.Item) = ast { + statementQuery { + expr = exprSFW { + select = selectProject { + this.items += items + } + from = fromValue { + expr = variable("T") + type = From.Value.Type.SCAN + } + } + } + } + + @OptIn(PartiQLValueExperimental::class) + private fun selectValue(vararg items: Pair) = ast { + statementQuery { + expr = exprSFW { + select = selectValue { + constructor = exprStruct { + for ((k, v) in items) { + fields += exprStructField { + name = exprLit(stringValue(k)) + value = v + } + } + } + } + from = fromValue { + expr = exprVar { + identifier = identifierSymbol { + symbol = "T" + caseSensitivity = Identifier.CaseSensitivity.INSENSITIVE + } + scope = Expr.Var.Scope.DEFAULT + } + type = From.Value.Type.SCAN + } + } + } + } + + private fun varItem(symbol: String, asAlias: String? = null) = selectProjectItemExpression( + expr = variable(symbol), + asAlias = asAlias?.let { identifierSymbol(asAlias, Identifier.CaseSensitivity.INSENSITIVE) } + ) + + private fun litItem(value: Int, asAlias: String? = null) = selectProjectItemExpression( + expr = lit(value), + asAlias = asAlias?.let { identifierSymbol(asAlias, Identifier.CaseSensitivity.INSENSITIVE) } + ) + + @OptIn(PartiQLValueExperimental::class) + private fun lit(value: Int) = exprLit(int32Value(value)) +} diff --git a/partiql-ast/src/test/kotlin/org/partiql/ast/sql/SqlDialectTest.kt b/partiql-ast/src/test/kotlin/org/partiql/ast/sql/SqlDialectTest.kt index 89172a29d..e1c2f5035 100644 --- a/partiql-ast/src/test/kotlin/org/partiql/ast/sql/SqlDialectTest.kt +++ b/partiql-ast/src/test/kotlin/org/partiql/ast/sql/SqlDialectTest.kt @@ -164,6 +164,11 @@ class SqlDialectTest { @Execution(ExecutionMode.CONCURRENT) fun testOtherClauses(case: Case) = case.assert() + @ParameterizedTest(name = "subqueries #{index}") + @MethodSource("subqueryCases") + @Execution(ExecutionMode.CONCURRENT) + fun testSubqueries(case: Case) = case.assert() + companion object { private val NULL = exprLit(nullValue()) @@ -1599,6 +1604,35 @@ class SqlDialectTest { }, ) + // These are simple clauses + @JvmStatic + private fun subqueryCases() = listOf( + expect("1 = (SELECT a FROM T)") { + exprBinary { + op = Expr.Binary.Op.EQ + lhs = exprLit(int32Value(1)) + rhs = exprSFW { + select = select("a") + from = table("T") + } + } + }, + expect("(1, 2) = (SELECT a FROM T)") { + exprBinary { + op = Expr.Binary.Op.EQ + lhs = exprCollection { + type = Expr.Collection.Type.LIST + values += exprLit(int32Value(1)) + values += exprLit(int32Value(2)) + } + rhs = exprSFW { + select = select("a") + from = table("T") + } + } + }, + ) + private fun expect(expected: String, block: AstBuilder.() -> AstNode): Case { val i = ast(block) return Case.Success(i, expected) diff --git a/partiql-cli/build.gradle.kts b/partiql-cli/build.gradle.kts index 01bdea700..196bd0b5b 100644 --- a/partiql-cli/build.gradle.kts +++ b/partiql-cli/build.gradle.kts @@ -20,7 +20,10 @@ plugins { dependencies { implementation(project(":partiql-lang")) + implementation(project(":partiql-ast")) + implementation(project(":partiql-parser")) implementation(project(":partiql-plan")) + implementation(project(":partiql-planner")) implementation(project(":partiql-types")) implementation(project(":plugins:partiql-local")) implementation(project(":partiql-spi")) diff --git a/partiql-cli/src/main/kotlin/org/partiql/cli/Main.kt b/partiql-cli/src/main/kotlin/org/partiql/cli/Main.kt index 7603534ef..481030278 100644 --- a/partiql-cli/src/main/kotlin/org/partiql/cli/Main.kt +++ b/partiql-cli/src/main/kotlin/org/partiql/cli/Main.kt @@ -15,14 +15,22 @@ package org.partiql.cli +import AstPrinter import com.amazon.ion.system.IonSystemBuilder +import com.amazon.ionelement.api.field +import com.amazon.ionelement.api.ionString +import com.amazon.ionelement.api.ionStructOf import org.partiql.cli.pico.PartiQLCommand +import org.partiql.cli.shell.info import org.partiql.lang.eval.EvaluationSession -import org.partiql.lang.planner.transforms.AstToPlan -import org.partiql.lang.syntax.PartiQLParserBuilder +import org.partiql.parser.PartiQLParserBuilder import org.partiql.plan.debug.PlanPrinter +import org.partiql.planner.PartiQLPlanner +import org.partiql.planner.PartiQLPlannerBuilder +import org.partiql.plugins.local.LocalPlugin import picocli.CommandLine import java.io.PrintStream +import java.util.UUID import kotlin.system.exitProcess /** @@ -43,16 +51,41 @@ fun main(args: Array) { */ object Debug { + private const val USER_ID = "DEBUG_USER_ID" + + private val plugins = listOf(LocalPlugin()) + private val catalogs = mapOf( + "local" to ionStructOf( + field("connector_name", ionString("local")), + ) + ) + + private val planner = PartiQLPlannerBuilder().plugins(plugins).build() + private val parser = PartiQLParserBuilder.standard().build() + + // !! + // IMPLEMENT DEBUG BEHAVIOR HERE + // !! @Suppress("UNUSED_PARAMETER") @Throws(Exception::class) fun action(input: String, session: EvaluationSession): String { - // IMPLEMENT DEBUG BEHAVIOR HERE val out = PrintStream(System.out) - val parser = PartiQLParserBuilder.standard().build() - val ast = parser.parseAstStatement(input) - val plan = AstToPlan.transform(ast) - // print plan as tree - PlanPrinter.append(out, plan) + + // Parse + val statement = parser.parse(input).root + out.info("-- AST ----------") + AstPrinter.append(out, statement) + + // Plan + val sess = PartiQLPlanner.Session( + queryId = UUID.randomUUID().toString(), + userId = "debug", + catalogConfig = catalogs, + ) + val result = planner.plan(statement, sess).plan + out.info("-- Plan ----------") + PlanPrinter.append(out, result.statement) + return "OK" } } diff --git a/partiql-cli/src/main/kotlin/org/partiql/cli/format/ast/AstPrinter.kt b/partiql-cli/src/main/kotlin/org/partiql/cli/format/ast/AstPrinter.kt new file mode 100644 index 000000000..d202cf181 --- /dev/null +++ b/partiql-cli/src/main/kotlin/org/partiql/cli/format/ast/AstPrinter.kt @@ -0,0 +1,75 @@ + +import org.partiql.ast.AstNode +import org.partiql.ast.visitor.AstBaseVisitor +import kotlin.reflect.KVisibility +import kotlin.reflect.full.isSubclassOf +import kotlin.reflect.full.memberProperties +import kotlin.reflect.jvm.jvmErasure + +/** + * Basic printer for debugging during early development lifecycle + */ +internal object AstPrinter { + + fun toString(ast: AstNode): String = buildString { append(this, ast) } + + fun append(out: Appendable, ast: AstNode) { + val ctx = Args(out) + Visitor.visit(ast, ctx) + } + + // args for a visitor invocation + private class Args( + val out: Appendable, + val levels: Array = emptyArray(), + val last: Boolean = true, + ) { + // leading characters of a tree print + val lead: String = when (levels.size) { + 0 -> "⚬ " + else -> { + val prefix = levels.joinToString("") { if (it) "│ " else " " } + val suffix = if (last) "└──" else "├──" + prefix + suffix + } + } + } + + private object Visitor : AstBaseVisitor() { + + private val EOL = System.lineSeparator() + + override fun defaultReturn(node: AstNode, ctx: Args) = Unit + + override fun defaultVisit(node: AstNode, ctx: Args): Unit = with(ctx) { + out.append(lead) + // print node name + out.append(node::class.simpleName) + // print primitive items + val primitives = node.primitives().filter { it.second != null && it.first != "_id" } + if (primitives.isNotEmpty()) { + out.append("[") + out.append(primitives.joinToString { "${it.first}=${it.second}" }) + out.append("]") + } + out.append(EOL) + // print child nodes + node.children.forEachIndexed { i, child -> + val args = Args(out, levels + !last, last = i == node.children.size - 1) + child.accept(Visitor, args) + } + } + + // joins all primitive properties as strings [ (), ... (, ) ] + private fun AstNode.primitives(): List> = javaClass.kotlin.memberProperties + .filter { + val t = it.returnType.jvmErasure + val notChildren = it.name != "children" + val notNode = !t.isSubclassOf(AstNode::class) + // not currently correct + val notCollectionOfNodes = !(t.isSubclassOf(Collection::class)) + notChildren && notNode && notCollectionOfNodes && it.visibility == KVisibility.PUBLIC + } + .map { it.name to it.get(this) } + } +} diff --git a/partiql-cli/src/main/kotlin/org/partiql/cli/shell/Shell.kt b/partiql-cli/src/main/kotlin/org/partiql/cli/shell/Shell.kt index 31efbfb6b..96ddec2af 100644 --- a/partiql-cli/src/main/kotlin/org/partiql/cli/shell/Shell.kt +++ b/partiql-cli/src/main/kotlin/org/partiql/cli/shell/Shell.kt @@ -454,7 +454,7 @@ fun PrintStream.success(string: String) = this.println(ansi(string, SUCCESS)) internal fun PrintStream.error(string: String) = this.println(ansi(string, ERROR)) -internal fun PrintStream.info(string: String) = this.println(ansi(string, INFO)) +fun PrintStream.info(string: String) = this.println(ansi(string, INFO)) fun PrintStream.warn(string: String) = this.println(ansi(string, WARN)) diff --git a/partiql-cli/src/main/kotlin/org/partiql/cli/utils/ServiceLoaderUtil.kt b/partiql-cli/src/main/kotlin/org/partiql/cli/utils/ServiceLoaderUtil.kt index 4b777a88b..3f1581ad3 100644 --- a/partiql-cli/src/main/kotlin/org/partiql/cli/utils/ServiceLoaderUtil.kt +++ b/partiql-cli/src/main/kotlin/org/partiql/cli/utils/ServiceLoaderUtil.kt @@ -152,7 +152,7 @@ class ServiceLoaderUtil { PartiQLValueType.INT32 -> StaticType.INT4.asNullable() PartiQLValueType.INT64 -> StaticType.INT8.asNullable() PartiQLValueType.INT -> StaticType.INT.asNullable() - PartiQLValueType.DECIMAL -> StaticType.DECIMAL.asNullable() + PartiQLValueType.DECIMAL_ARBITRARY -> StaticType.DECIMAL.asNullable() PartiQLValueType.FLOAT32 -> StaticType.FLOAT.asNullable() PartiQLValueType.FLOAT64 -> StaticType.FLOAT.asNullable() PartiQLValueType.CHAR -> StaticType.STRING.asNullable() @@ -170,6 +170,7 @@ class ServiceLoaderUtil { PartiQLValueType.LIST -> StaticType.LIST.asNullable() PartiQLValueType.SEXP -> StaticType.SEXP.asNullable() PartiQLValueType.STRUCT -> StaticType.STRUCT.asNullable() + PartiQLValueType.DECIMAL -> TODO() } } @@ -193,7 +194,7 @@ class ServiceLoaderUtil { PartiQLValueType.INT -> (partiqlValue as? IntValue)?.long?.let { newInt(it) } ?: ExprValue.nullValue - PartiQLValueType.DECIMAL -> (partiqlValue as? DecimalValue)?.value?.let { newDecimal(it) } + PartiQLValueType.DECIMAL_ARBITRARY -> (partiqlValue as? DecimalValue)?.value?.let { newDecimal(it) } ?: ExprValue.nullValue PartiQLValueType.FLOAT32 -> (partiqlValue as? Float32Value)?.double?.let { newFloat(it) } @@ -290,6 +291,8 @@ class ServiceLoaderUtil { ) }?.let { newStruct(it, StructOrdering.ORDERED) } ?: ExprValue.nullValue } + + PartiQLValueType.DECIMAL -> TODO() } } @@ -354,11 +357,11 @@ class ServiceLoaderUtil { PartiQLValueType.INT, ExprToPartiQLValueType(exprValue) ) } - PartiQLValueType.DECIMAL -> when (exprValue.type) { + PartiQLValueType.DECIMAL_ARBITRARY -> when (exprValue.type) { ExprValueType.NULL -> decimalValue(null) ExprValueType.DECIMAL -> decimalValue(exprValue.numberValue() as BigDecimal) else -> throw ExprToPartiQLValueTypeMismatchException( - PartiQLValueType.DECIMAL, ExprToPartiQLValueType(exprValue) + PartiQLValueType.DECIMAL_ARBITRARY, ExprToPartiQLValueType(exprValue) ) } PartiQLValueType.FLOAT32 -> when (exprValue.type) { @@ -488,6 +491,8 @@ class ServiceLoaderUtil { PartiQLValueType.STRUCT, ExprToPartiQLValueType(exprValue) ) } + + PartiQLValueType.DECIMAL -> TODO() } } @@ -498,7 +503,7 @@ class ServiceLoaderUtil { ExprValueType.BOOL -> PartiQLValueType.BOOL ExprValueType.INT -> PartiQLValueType.INT ExprValueType.FLOAT -> PartiQLValueType.FLOAT32 - ExprValueType.DECIMAL -> PartiQLValueType.DECIMAL + ExprValueType.DECIMAL -> PartiQLValueType.DECIMAL_ARBITRARY ExprValueType.DATE -> PartiQLValueType.DATE ExprValueType.TIMESTAMP -> PartiQLValueType.TIMESTAMP ExprValueType.TIME -> PartiQLValueType.TIME diff --git a/partiql-lang/build.gradle.kts b/partiql-lang/build.gradle.kts index 42ca26e40..3ae38c801 100644 --- a/partiql-lang/build.gradle.kts +++ b/partiql-lang/build.gradle.kts @@ -18,6 +18,7 @@ plugins { id(Plugins.jmh) version Versions.jmh id(Plugins.library) id(Plugins.publish) + id(Plugins.shadow) version Versions.shadow } // Disabled for partiql-lang project. @@ -28,9 +29,13 @@ kotlin { dependencies { api(project(":partiql-ast")) api(project(":partiql-parser")) + api(project(":partiql-plan")) + api(project(":partiql-planner")) api(project(":partiql-spi")) api(project(":partiql-types")) api(project(":partiql-plan")) + api(project(":partiql-planner")) + // api(Deps.ionElement) api(Deps.ionJava) api(Deps.ionSchema) @@ -38,7 +43,8 @@ dependencies { implementation(Deps.csv) implementation(Deps.kotlinReflect) - testImplementation(project(":plugins:partiql-local")) + testImplementation(testFixtures(project(":partiql-planner"))) + testImplementation(project(":plugins:partiql-memory")) testImplementation(project(":lib:isl")) testImplementation(Deps.assertj) testImplementation(Deps.junit4) @@ -72,3 +78,14 @@ tasks.processResources { include("partiql.ion") } } + +tasks.processTestResources { + dependsOn(":partiql-planner:generateResourcePath") + from("${project(":partiql-planner").buildDir}/resources/testFixtures") +} + +tasks.shadowJar { + archiveBaseName.set("shadow") + exclude("**/*.kotlin_metadata") + archiveClassifier.set("") +} diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/AstToPlan.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/AstToPlan.kt deleted file mode 100644 index d79a20f0d..000000000 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/AstToPlan.kt +++ /dev/null @@ -1,89 +0,0 @@ -package org.partiql.lang.planner.transforms - -import org.partiql.lang.domains.PartiqlAst -import org.partiql.lang.eval.CompileOptions -import org.partiql.lang.eval.TypedOpBehavior -import org.partiql.lang.eval.visitors.AggregationVisitorTransform -import org.partiql.lang.eval.visitors.FromSourceAliasVisitorTransform -import org.partiql.lang.eval.visitors.OrderBySortSpecVisitorTransform -import org.partiql.lang.eval.visitors.PartiqlAstSanityValidator -import org.partiql.lang.eval.visitors.PipelinedVisitorTransform -import org.partiql.lang.eval.visitors.SelectListItemAliasVisitorTransform -import org.partiql.lang.eval.visitors.SelectStarVisitorTransform -import org.partiql.lang.planner.transforms.plan.RelConverter -import org.partiql.lang.planner.transforms.plan.RexConverter -import org.partiql.plan.PartiQLPlan -import org.partiql.plan.Rex -import org.partiql.plan.partiQLPlan - -/** - * Translate the PIG AST to an implementation of the PartiQL Plan Representation. - */ -object AstToPlan { - - /** - * Converts a PartiqlAst.Statement to a [PartiQLPlan] - */ - fun transform(statement: PartiqlAst.Statement): PartiQLPlan { - val ast = statement.normalize() - if (ast !is PartiqlAst.Statement.Query) { - unsupported(ast) - } - val root = transform(ast.expr) - return partiQLPlan( - version = PartiQLPlan.Version.PARTIQL_V0, - root = root, - ) - } - - // --- Internal --------------------------------------------- - - /** - * Common place to throw exceptions with access to the AST node. - * Error handling pattern is undecided - */ - internal fun unsupported(node: PartiqlAst.PartiqlAstNode): Nothing { - throw UnsupportedOperationException("node: $node") - } - - /** - * Normalizes a statement AST node. Copied from EvaluatingCompiler, and include the validation. - * - * Notes: - * - AST normalization assumes operating on statement rather than a query statement, but the normalization - * only changes the SFW nodes. There's room to simplify here. Also, you have to enter the transform at - * `transformStatement` or nothing happens. I initially had `transformQuery` but that doesn't work because - * the pipelinedVisitorTransform traversal can only be entered on statement. - */ - private fun PartiqlAst.Statement.normalize(): PartiqlAst.Statement { - val transform = PipelinedVisitorTransform( - SelectListItemAliasVisitorTransform(), - FromSourceAliasVisitorTransform(), - OrderBySortSpecVisitorTransform(), - AggregationVisitorTransform(), - SelectStarVisitorTransform() - ) - // normalize - val ast = transform.transformStatement(this) - // validate - val validatorCompileOptions = CompileOptions.build { typedOpBehavior(TypedOpBehavior.HONOR_PARAMETERS) } - PartiqlAstSanityValidator().validate(this, validatorCompileOptions) - return ast - } - - /** - * Convert Partiql.Ast.Expr to a Rex/Rel tree - */ - private fun transform(query: PartiqlAst.Expr): Rex = when (query) { - is PartiqlAst.Expr.Select -> { - // - val rex = RelConverter.convert(query) - rex - } - else -> { - // - val rex = RexConverter.convert(query) - rex - } - } -} diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/ObjectHandle.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/ObjectHandle.kt deleted file mode 100644 index 3bc93c9e0..000000000 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/ObjectHandle.kt +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Amazon.com, Inc. or its affiliates. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at: - * - * http://aws.amazon.com/apache2.0/ - * - * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific - * language governing permissions and limitations under the License. - */ - -package org.partiql.lang.planner.transforms - -import org.partiql.spi.connector.ConnectorObjectHandle - -/** - * Represents a [ConnectorObjectHandle], but also adds information relevant to the associated Catalog. - * - * This way, the [ConnectorObjectHandle] can be returned by a [org.partiql.spi.connector.Connector] without the Connector - * knowing which Catalog it is mapped to internally. - */ -internal class ObjectHandle( - val connectorHandle: ConnectorObjectHandle, - val catalogName: String -) diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencer.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencer.kt index 14a6c953a..78df76e91 100644 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencer.kt +++ b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencer.kt @@ -25,11 +25,12 @@ import org.partiql.errors.UNKNOWN_PROBLEM_LOCATION import org.partiql.lang.SqlException import org.partiql.lang.planner.PlanningProblemDetails import org.partiql.lang.planner.transforms.PartiQLSchemaInferencer.infer -import org.partiql.lang.planner.transforms.impl.Metadata -import org.partiql.lang.planner.transforms.plan.PlanTyper -import org.partiql.lang.planner.transforms.plan.PlanUtils.grabType -import org.partiql.lang.syntax.PartiQLParserBuilder import org.partiql.lang.util.propertyValueMapOf +import org.partiql.parser.PartiQLParserBuilder +import org.partiql.plan.PartiQLPlan +import org.partiql.plan.Statement +import org.partiql.planner.PartiQLPlanner +import org.partiql.planner.PartiQLPlannerBuilder import org.partiql.spi.Plugin import org.partiql.types.StaticType @@ -69,7 +70,7 @@ public object PartiQLSchemaInferencer { ctx: Context ): StaticType { return try { - inferInternal(query, ctx) + inferInternal(query, ctx).second } catch (t: Throwable) { throw when (t) { is SqlException -> InferenceException( @@ -93,12 +94,10 @@ public object PartiQLSchemaInferencer { * Context object required for performing schema inference. */ public class Context( - public val session: PlannerSession, - plugins: List, + public val session: PartiQLPlanner.Session, + public val plugins: List, public val problemHandler: ProblemHandler = ProblemThrower() - ) { - internal val metadata = Metadata(plugins, session.catalogConfig) - } + ) public class InferenceException( message: String = "", @@ -134,30 +133,21 @@ public object PartiQLSchemaInferencer { } } - private fun inferInternal(query: String, ctx: Context): StaticType { + internal fun inferInternal(query: String, ctx: Context): Pair { val parser = PartiQLParserBuilder.standard().build() - val ast = parser.parseAstStatement(query) - - // Transform to Plan - val plan = AstToPlan.transform(ast) - val typedPlan = PlanTyper.type( - plan.root, - PlanTyper.Context( - input = null, - session = ctx.session, - metadata = ctx.metadata, - scopingOrder = PlanTyper.ScopingOrder.LEXICAL_THEN_GLOBALS, - customFunctionSignatures = emptyList(), - problemHandler = ctx.problemHandler - ) - ) - - // Convert Logical Plan to Static Type - return typedPlan.grabType() ?: throw InferenceException( - Problem( - UNKNOWN_PROBLEM_LOCATION, - PlanningProblemDetails.CompileError("Unable to infer the output type of plan.") + val planner = PartiQLPlannerBuilder() + .plugins(ctx.plugins) + .build() + val ast = parser.parse(query).root + val plan = planner.plan(ast, ctx.session, ctx.problemHandler::handleProblem).plan + if (plan.statement !is Statement.Query) { + throw InferenceException( + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.CompileError("Invalid statement, only `Statement.Query` supported for schema inference") + ) ) - ) + } + return plan to (plan.statement as Statement.Query).root.type } } diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/impl/Metadata.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/impl/Metadata.kt deleted file mode 100644 index b26c83a80..000000000 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/impl/Metadata.kt +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Amazon.com, Inc. or its affiliates. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at: - * - * http://aws.amazon.com/apache2.0/ - * - * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific - * language governing permissions and limitations under the License. - */ - -package org.partiql.lang.planner.transforms.impl - -import com.amazon.ionelement.api.StructElement -import org.partiql.lang.planner.transforms.ObjectHandle -import org.partiql.lang.planner.transforms.PlannerSession -import org.partiql.spi.BindingCase -import org.partiql.spi.BindingName -import org.partiql.spi.BindingPath -import org.partiql.spi.Plugin -import org.partiql.spi.connector.ConnectorMetadata -import org.partiql.spi.connector.ConnectorSession -import org.partiql.spi.connector.Constants -import org.partiql.types.StaticType - -/** - * Acts to consolidate multiple [org.partiql.spi.connector.ConnectorMetadata]'s. - */ -internal class Metadata( - private val plugins: List, - private val catalogMap: Map -) { - - private val connectorFactories = plugins.flatMap { it.getConnectorFactories() } - private val connectorMap = catalogMap.toList().associate { - val (catalogName, catalogConfig) = it - catalogName to connectorFactories.first { factory -> - val connectorName = catalogConfig[Constants.CONFIG_KEY_CONNECTOR_NAME].stringValue - factory.getName() == connectorName - }.create(catalogName, catalogConfig) - } - - public fun getObjectHandle(session: PlannerSession, catalog: BindingName, path: BindingPath): ObjectHandle? { - val connectorSession = session.toConnectorSession() - val metadataInfo = getMetadata(session.toConnectorSession(), catalog) ?: return null - return metadataInfo.metadata.getObjectHandle(connectorSession, path)?.let { - ObjectHandle( - connectorHandle = it, - catalogName = metadataInfo.catalogName - ) - } - } - - public fun getObjectDescriptor(session: PlannerSession, handle: ObjectHandle): StaticType { - val connectorSession = session.toConnectorSession() - val metadata = getMetadata(session.toConnectorSession(), BindingName(handle.catalogName, BindingCase.SENSITIVE))!!.metadata - return metadata.getObjectType(connectorSession, handle.connectorHandle)!! - } - - private fun getMetadata(connectorSession: ConnectorSession, catalogName: BindingName): MetadataInformation? { - val catalogKey = catalogMap.keys.firstOrNull { catalogName.isEquivalentTo(it) } ?: return null - val connector = connectorMap[catalogKey] ?: return null - return MetadataInformation(catalogKey, connector.getMetadata(session = connectorSession)) - } - - private class MetadataInformation( - internal val catalogName: String, - internal val metadata: ConnectorMetadata - ) -} diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/PlanTyper.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/PlanTyper.kt deleted file mode 100644 index c87b5a758..000000000 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/PlanTyper.kt +++ /dev/null @@ -1,1922 +0,0 @@ -/* - * Copyright Amazon.com, Inc. or its affiliates. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at: - * - * http://aws.amazon.com/apache2.0/ - * - * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific - * language governing permissions and limitations under the License. - */ - -package org.partiql.lang.planner.transforms.plan - -import com.amazon.ionelement.api.ElementType -import com.amazon.ionelement.api.StringElement -import com.amazon.ionelement.api.TextElement -import org.partiql.errors.Problem -import org.partiql.errors.ProblemHandler -import org.partiql.errors.UNKNOWN_PROBLEM_LOCATION -import org.partiql.lang.ast.passes.SemanticProblemDetails -import org.partiql.lang.ast.passes.inference.cast -import org.partiql.lang.eval.ExprValueType -import org.partiql.lang.eval.builtins.SCALAR_BUILTINS_DEFAULT -import org.partiql.lang.planner.PlanningProblemDetails -import org.partiql.lang.planner.transforms.PlannerSession -import org.partiql.lang.planner.transforms.impl.Metadata -import org.partiql.lang.planner.transforms.plan.PlanUtils.addType -import org.partiql.lang.planner.transforms.plan.PlanUtils.grabType -import org.partiql.lang.types.FunctionSignature -import org.partiql.lang.types.StaticTypeUtils -import org.partiql.lang.types.TypedOpParameter -import org.partiql.lang.types.UnknownArguments -import org.partiql.lang.util.cartesianProduct -import org.partiql.plan.Arg -import org.partiql.plan.Attribute -import org.partiql.plan.Binding -import org.partiql.plan.Case -import org.partiql.plan.ExcludeExpr -import org.partiql.plan.ExcludeStep -import org.partiql.plan.PlanNode -import org.partiql.plan.Property -import org.partiql.plan.Rel -import org.partiql.plan.Rex -import org.partiql.plan.Step -import org.partiql.plan.attribute -import org.partiql.plan.binding -import org.partiql.plan.rexId -import org.partiql.plan.util.PlanRewriter -import org.partiql.spi.BindingCase -import org.partiql.spi.BindingName -import org.partiql.spi.BindingPath -import org.partiql.types.AnyOfType -import org.partiql.types.AnyType -import org.partiql.types.BagType -import org.partiql.types.BoolType -import org.partiql.types.CollectionType -import org.partiql.types.DecimalType -import org.partiql.types.FloatType -import org.partiql.types.IntType -import org.partiql.types.ListType -import org.partiql.types.MissingType -import org.partiql.types.NullType -import org.partiql.types.NumberConstraint -import org.partiql.types.SexpType -import org.partiql.types.SingleType -import org.partiql.types.StaticType -import org.partiql.types.StringType -import org.partiql.types.StructType -import org.partiql.types.SymbolType -import org.partiql.types.TupleConstraint - -/** - * Types a given logical plan. - */ -internal object PlanTyper : PlanRewriter() { - - /** - * Given a [Rex], types the logical plan by adding the output Type Environment to each relational operator. - * - * Along with typing, this also validates expressions for typing issues. - */ - internal fun type(node: Rex, ctx: Context): Rex { - return visitRex(node, ctx) as Rex - } - - /** - * Used for maintaining state through the visitors - */ - internal class Context( - internal val input: Rel?, - internal val session: PlannerSession, - internal val metadata: Metadata, - internal val scopingOrder: ScopingOrder, - internal val customFunctionSignatures: List, - internal val tolerance: MinimumTolerance = MinimumTolerance.FULL, - internal val problemHandler: ProblemHandler - ) { - internal val inputTypeEnv = input?.let { PlanUtils.getTypeEnv(it) } ?: emptyList() - internal val allFunctions: Map> = - (SCALAR_BUILTINS_DEFAULT.map { it.signature.name to it.signature } + customFunctionSignatures.map { it.name to it }) - .groupBy({ it.first }, { it.second }) - } - - /** - * Scoping - */ - internal enum class ScopingOrder { - GLOBALS_THEN_LEXICAL, - LEXICAL_THEN_GLOBALS - } - - /** - * [FULL] -- CANNOT tolerate references to unresolved variables - * [PARTIAL] -- CAN tolerate references to unresolved variables - */ - internal enum class MinimumTolerance { - FULL, - PARTIAL - } - - // - // - // RELATIONAL ALGEBRA OPERATORS - // - // - - override fun visitRelBag(node: Rel.Bag, ctx: Context): PlanNode { - TODO("BAG OPERATORS are not supported by the PartiQLTypeEnvInferencer yet.") - } - - override fun visitRel(node: Rel, ctx: Context): Rel = super.visitRel(node, ctx) as Rel - - override fun visitRelJoin(node: Rel.Join, ctx: Context): Rel.Join { - val lhs = visitRel(node.lhs, ctx) - val rhs = typeRel(node.rhs, lhs, ctx) - val newJoin = node.copy( - common = node.common.copy( - typeEnv = lhs.getTypeEnv() + rhs.getTypeEnv(), - ) - ) - val predicateType = when (val condition = node.condition) { - null -> StaticType.BOOL - else -> { - val predicate = typeRex(condition, newJoin, ctx) - // verify `JOIN` predicate is bool. - // If an operand is always missing, an ExpressionAlwaysReturnsMissing error will be raised. - // If an operand is always NULL, or unionOf(NULL, MISSING), an ExpressionAlwaysReturnsNullOrMissing warning will be raised. - // If it could never be a bool, gives an incompatible data type for expression error - assertType(expected = StaticType.BOOL, actual = predicate.grabType() ?: handleMissingType(ctx), ctx) - - // continuation type (even in the case of an error) is [StaticType.BOOL] - StaticType.BOOL - } - } - return newJoin.copy( - condition = node.condition?.addType(predicateType) - ) - } - - /** - * Initial implementation of `EXCLUDE` schema inference. Until an RFC is finalized for `EXCLUDE` - * (https://github.com/partiql/partiql-spec/issues/39), this behavior is considered experimental and subject to - * change. - * - * So far this implementation includes - * - Excluding tuple attrs (e.g. t.a.b.c) - * - Excluding tuple wildcards (e.g. t.a.*.b) - * - Excluding collection indexes (e.g. t.a[0].b -- behavior subject to change; see below discussion) - * - Excluding collection wildcards (e.g. t.a[*].b) - * - * There are still discussion points regarding the following edge cases - * - EXCLUDE on a tuple attribute that doesn't exist -- give an error/warning? - * - currently no error - * - EXCLUDE on a tuple attribute that has duplicates -- give an error/warning? exclude one? exclude both? - * - currently excludes both w/ no error - * - EXCLUDE on a collection index as the last step -- mark element type as optional? - * - currently element type as-is - * - EXCLUDE on a collection index w/ remaining path steps -- mark last step's type as optional? - * - currently marks last step's type as optional - * - EXCLUDE on a binding tuple variable (e.g. SELECT ... EXCLUDE t FROM t) -- error? - * - currently a parser error - * - EXCLUDE on a union type -- give an error/warning? no-op? exclude on each type in union? - * - currently exclude on each union type - * - If SELECT list includes an attribute that is excluded, we could consider giving an error in PlanTyper or - * some other semantic pass - * - currently does not give an error - */ - override fun visitRelExclude(node: Rel.Exclude, ctx: Context): Rel.Exclude { - val input = visitRel(node.input, ctx) - val exprs = node.exprs - val typeEnv = input.getTypeEnv() - val newTypeEnv = exprs.fold(typeEnv) { tEnv, expr -> - excludeExpr(tEnv, expr, ctx) - } - return node.copy( - input = input, - common = node.common.copy( - typeEnv = newTypeEnv, - properties = input.getProperties() - ) - ) - } - - private fun attrEqualsExcludeRoot(attr: Attribute, expr: ExcludeExpr): Boolean { - val rootId = expr.root - return attr.name == rootId || (expr.rootCase == Case.INSENSITIVE && attr.name.equals(expr.root, ignoreCase = true)) - } - - private fun excludeExpr(attrs: List, expr: ExcludeExpr, ctx: Context): List { - val resultAttrs = mutableListOf() - val attrsExist = attrs.find { attr -> attrEqualsExcludeRoot(attr, expr) } != null - if (!attrsExist) { - handleUnresolvedExcludeExprRoot(expr.root, ctx) - } - attrs.forEach { attr -> - if (attrEqualsExcludeRoot(attr, expr)) { - if (expr.steps.isEmpty()) { - throw IllegalStateException("Empty `ExcludeExpr.steps` encountered. This should have been caught by the parser.") - } else { - val newType = excludeExprSteps(attr.type, expr.steps, lastStepAsOptional = false, ctx) - resultAttrs.add( - attr.copy( - type = newType - ) - ) - } - } else { - resultAttrs.add( - attr - ) - } - } - return resultAttrs - } - - private fun excludeExprSteps(type: StaticType, steps: List, lastStepAsOptional: Boolean, ctx: Context): StaticType { - fun excludeExprStepsStruct(s: StructType, steps: List, lastStepAsOptional: Boolean): StaticType { - val outputFields = mutableListOf() - val first = steps.first() - s.fields.forEach { field -> - when (first) { - is ExcludeStep.TupleAttr -> { - if (field.key == first.attr || (first.case == Case.INSENSITIVE && field.key.equals(first.attr, ignoreCase = true))) { - if (steps.size == 1) { - if (lastStepAsOptional) { - val newField = StructType.Field(field.key, field.value.asOptional()) - outputFields.add(newField) - } - } else { - outputFields.add(StructType.Field(field.key, excludeExprSteps(field.value, steps.drop(1), lastStepAsOptional, ctx))) - } - } else { - outputFields.add(field) - } - } - is ExcludeStep.TupleWildcard -> { - if (steps.size == 1) { - if (lastStepAsOptional) { - val newField = StructType.Field(field.key, field.value.asOptional()) - outputFields.add(newField) - } - } else { - outputFields.add(StructType.Field(field.key, excludeExprSteps(field.value, steps.drop(1), lastStepAsOptional, ctx))) - } - } - else -> { - // currently no change to field.value and no error thrown; could consider an error/warning in - // the future - outputFields.add(StructType.Field(field.key, field.value)) - } - } - } - return s.copy(fields = outputFields) - } - - fun excludeExprStepsCollection(c: CollectionType, steps: List, lastStepAsOptional: Boolean): StaticType { - var elementType = c.elementType - when (steps.first()) { - is ExcludeStep.CollectionIndex -> { - if (steps.size > 1) { - elementType = excludeExprSteps(elementType, steps.drop(1), lastStepAsOptional = true, ctx) - } - } - is ExcludeStep.CollectionWildcard -> { - if (steps.size > 1) { - elementType = - excludeExprSteps(elementType, steps.drop(1), lastStepAsOptional = lastStepAsOptional, ctx) - } - // currently no change to elementType if collection wildcard is last element; this behavior could - // change based on RFC definition - } - else -> { - // currently no change to elementType and no error thrown; could consider an error/warning in - // the future - } - } - return when (c) { - is BagType -> c.copy(elementType) - is ListType -> c.copy(elementType) - is SexpType -> c.copy(elementType) - } - } - - return when (type) { - is StructType -> excludeExprStepsStruct(type, steps, lastStepAsOptional) - is CollectionType -> excludeExprStepsCollection(type, steps, lastStepAsOptional) - is AnyOfType -> { - StaticType.unionOf( - type.types.map { - excludeExprSteps(it, steps, lastStepAsOptional, ctx) - }.toSet() - ) - } - else -> type - }.flatten() - } - - override fun visitRelUnpivot(node: Rel.Unpivot, ctx: Context): Rel.Unpivot { - val from = node - - val asSymbolicName = node.alias - ?: error("Unpivot alias is null. This wouldn't be the case if FromSourceAliasVisitorTransform was executed first.") - - val value = visitRex(from.value, ctx) as Rex - - val fromExprType = value.grabType() ?: handleMissingType(ctx) - - val valueType = getUnpivotValueType(fromExprType) - val typeEnv = mutableListOf(attribute(asSymbolicName, valueType)) - - from.at?.let { - val valueHasMissing = StaticTypeUtils.getTypeDomain(valueType).contains(ExprValueType.MISSING) - val valueOnlyHasMissing = valueHasMissing && StaticTypeUtils.getTypeDomain(valueType).size == 1 - when { - valueOnlyHasMissing -> { - typeEnv.add(attribute(it, StaticType.MISSING)) - } - valueHasMissing -> { - typeEnv.add(attribute(it, StaticType.STRING.asOptional())) - } - else -> { - typeEnv.add(attribute(it, StaticType.STRING)) - } - } - } - - node.by?.let { TODO("BY variable's inference is not implemented yet.") } - - return from.copy( - common = from.common.copy( - typeEnv = typeEnv - ), - value = value - ) - } - - override fun visitRelAggregate(node: Rel.Aggregate, ctx: Context): PlanNode { - val input = visitRel(node.input, ctx) - val calls = node.calls.map { binding(it.name, typeRex(it.value, input, ctx)) } - val groups = node.groups.map { binding(it.name, typeRex(it.value, input, ctx)) } - return node.copy( - calls = calls, - groups = groups, - common = node.common.copy( - typeEnv = groups.toAttributes(ctx) + calls.toAttributes(ctx) - ) - ) - } - - override fun visitRelProject(node: Rel.Project, ctx: Context): PlanNode { - val input = visitRel(node.input, ctx) - val typeEnv = node.bindings.flatMap { binding -> - val type = inferType(binding.value, input, ctx) - when (binding.value.isProjectAll()) { - true -> { - when (val structType = type as? StructType) { - null -> { - handleIncompatibleDataTypeForExprError(StaticType.STRUCT, type, ctx) - listOf(attribute(binding.name, type)) - } - else -> structType.fields.map { entry -> attribute(entry.key, entry.value) } - } - } - false -> listOf(attribute(binding.name, type)) - } - } - return node.copy( - input = input, - common = node.common.copy( - typeEnv = typeEnv, - properties = input.getProperties() - ) - ) - } - - override fun visitRelScan(node: Rel.Scan, ctx: Context): Rel { - val value = visitRex( - node.value, - Context( - ctx.input, - ctx.session, - ctx.metadata, - ScopingOrder.GLOBALS_THEN_LEXICAL, - ctx.customFunctionSignatures, - ctx.tolerance, - ctx.problemHandler - ) - ) as Rex - val asSymbolicName = node.alias ?: error("From Source Alias is null when it should not be.") - val valueType = value.grabType() ?: handleMissingType(ctx) - val sourceType = getElementTypeForFromSource(valueType) - - node.at?.let { TODO("AT is not supported yet.") } - node.by?.let { TODO("BY is not supported yet.") } - - return when (value) { - is Rex.Query.Collection -> when (value.constructor) { - null -> value.rel - else -> { - val typeEnv = listOf(attribute(asSymbolicName, sourceType)) - node.copy( - value = value, - common = node.common.copy( - typeEnv = typeEnv - ) - ) - } - } - else -> { - val typeEnv = listOf(attribute(asSymbolicName, sourceType)) - node.copy( - value = value, - common = node.common.copy( - typeEnv = typeEnv - ) - ) - } - } - } - - override fun visitRelFilter(node: Rel.Filter, ctx: Context): PlanNode { - val input = visitRel(node.input, ctx) - val condition = typeRex(node.condition, input, ctx) - assertType(StaticType.BOOL, condition.grabType() ?: handleMissingType(ctx), ctx) - return node.copy( - condition = condition, - input = input, - common = node.common.copy( - typeEnv = input.getTypeEnv(), - properties = input.getProperties() - ) - ) - } - - override fun visitRelSort(node: Rel.Sort, ctx: Context): PlanNode { - val input = visitRel(node.input, ctx) - return node.copy( - input = input, - common = node.common.copy( - typeEnv = input.getTypeEnv(), - properties = setOf(Property.ORDERED) - ) - ) - } - - override fun visitRelFetch(node: Rel.Fetch, ctx: Context): PlanNode { - val input = visitRel(node.input, ctx) - val limit = typeRex(node.limit, input, ctx) - val offset = typeRex(node.offset, input, ctx) - limit.grabType()?.let { assertAsInt(it, ctx) } - offset.grabType()?.let { assertAsInt(it, ctx) } - return node.copy( - input = input, - common = node.common.copy( - typeEnv = input.getTypeEnv(), - properties = input.getProperties() - ), - limit = limit, - offset = offset - ) - } - - // - // - // EXPRESSIONS - // - // - - override fun visitRexQueryScalarPivot(node: Rex.Query.Scalar.Pivot, ctx: Context): PlanNode { - // TODO: This is to match the StaticTypeInferenceVisitorTransform logic, but needs to be changed - return node.copy( - type = StaticType.STRUCT - ) - } - - override fun visitRexQueryScalarSubquery(node: Rex.Query.Scalar.Subquery, ctx: Context): PlanNode { - val query = visitRex(node.query, ctx) as Rex.Query.Collection - // If it is SELECT VALUE, do not coerce. - if (query.constructor != null) { - val type = query.type as? CollectionType - return node.copy(query = query, type = type?.elementType?.flatten()) - } - val type = when (val queryType = query.grabType() ?: handleMissingType(ctx)) { - is CollectionType -> queryType.elementType - else -> error("Query collection subqueries should always return a CollectionType.") - } - val resultType = when (type) { - is StructType -> { - if (StaticTypeUtils.isClosedSafe(type) == true && type.fields.size == 1) { - type.fields[0].value - } else { - handleCoercionError(ctx, type) - StaticType.ANY - } - } - else -> { - handleCoercionError(ctx, type) - StaticType.ANY - } - } - return node.copy( - query = query, - type = resultType.flatten() - ) - } - - override fun visitRex(node: Rex, ctx: Context): PlanNode = super.visitRex(node, ctx) - - override fun visitRexAgg(node: Rex.Agg, ctx: Context): PlanNode { - val funcName = node.id - val args = node.args.map { visitRex(it, ctx) as Rex } - // unwrap the type if this is a collectionType - val argType = when (val type = args[0].grabType() ?: handleMissingType(ctx)) { - is CollectionType -> type.elementType - else -> type - } - return node.copy( - type = computeReturnTypeForAggFunc(funcName, argType, ctx), - args = args - ) - } - - private fun computeReturnTypeForAggFunc(funcName: String, elementType: StaticType, ctx: Context): StaticType { - val elementTypes = elementType.allTypes - - fun List.convertMissingToNull() = toMutableSet().apply { - if (contains(StaticType.MISSING)) { - remove(StaticType.MISSING) - add(StaticType.NULL) - } - } - - fun StaticType.isUnknownOrNumeric() = isUnknown() || isNumeric() - - return when (funcName) { - "count" -> StaticType.INT - // In case that any element is MISSING or there is no element, we should return NULL - "max", "min" -> StaticType.unionOf(elementTypes.convertMissingToNull()) - "sum" -> when { - elementTypes.none { it.isUnknownOrNumeric() } -> { - handleInvalidInputTypeForAggFun(funcName, elementType, StaticType.unionOf(StaticType.NULL_OR_MISSING, StaticType.NUMERIC).flatten(), ctx) - StaticType.unionOf(StaticType.NULL, StaticType.NUMERIC) - } - // If any single type is mismatched, We should add MISSING to the result types set to indicate there is a chance of data mismatch error - elementTypes.any { !it.isUnknownOrNumeric() } -> StaticType.unionOf( - elementTypes.filter { it.isUnknownOrNumeric() }.toMutableSet().apply { add(StaticType.MISSING) } - ) - // In case that any element is MISSING or there is no element, we should return NULL - else -> StaticType.unionOf(elementTypes.convertMissingToNull()) - } - // "avg" returns DECIMAL or NULL - "avg" -> when { - elementTypes.none { it.isUnknownOrNumeric() } -> { - handleInvalidInputTypeForAggFun(funcName, elementType, StaticType.unionOf(StaticType.NULL_OR_MISSING, StaticType.NUMERIC).flatten(), ctx) - StaticType.unionOf(StaticType.NULL, StaticType.DECIMAL) - } - else -> StaticType.unionOf( - mutableSetOf().apply { - if (elementTypes.any { it.isUnknown() }) { add(StaticType.NULL) } - if (elementTypes.any { it.isNumeric() }) { add(StaticType.DECIMAL) } - // If any single type is mismatched, We should add MISSING to the result types set to indicate there is a chance of data mismatch error - if (elementTypes.any { !it.isUnknownOrNumeric() }) { add(StaticType.MISSING) } - } - ) - } - else -> error("Internal Error: Unsupported aggregate function. This probably indicates a parser bug.") - }.flatten() - } - - private fun handleInvalidInputTypeForAggFun(funcName: String, actualType: StaticType, expectedType: StaticType, ctx: Context) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.InvalidArgumentTypeForFunction( - functionName = funcName, - expectedType = expectedType, - actualType = actualType - ) - ) - ) - } - - override fun visitRexQueryScalar(node: Rex.Query.Scalar, ctx: Context): PlanNode = super.visitRexQueryScalar(node, ctx) - - override fun visitRexQuery(node: Rex.Query, ctx: Context): PlanNode = super.visitRexQuery(node, ctx) - - override fun visitRexQueryCollection(node: Rex.Query.Collection, ctx: Context): PlanNode { - val input = visitRel(node.rel, ctx) - val typeConstructor = when (input.getProperties().contains(Property.ORDERED)) { - true -> { type: StaticType -> ListType(type) } - false -> { type: StaticType -> BagType(type) } - } - return when (val constructor = node.constructor) { - null -> { - node.copy( - rel = input, - type = typeConstructor.invoke( - StructType( - fields = input.getTypeEnv().map { attribute -> - StructType.Field(attribute.name, attribute.type) - }, - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) - ) - ) - ) - } - else -> { - val constructorType = typeRex(constructor, input, ctx).grabType() ?: handleMissingType(ctx) - return node.copy( - type = typeConstructor.invoke(constructorType) - ) - } - } - } - - override fun visitRexPath(node: Rex.Path, ctx: Context): Rex.Path { - val ids = grabFirstIds(node) - val qualifier = ids.getOrNull(0)?.qualifier ?: Rex.Id.Qualifier.UNQUALIFIED - val path = BindingPath(ids.map { rexIdToBindingName(it) }) - val pathAndType = findBind(path, qualifier, ctx) - val remainingFirstIndex = pathAndType.levelsMatched - 1 - val remaining = when (remainingFirstIndex > node.steps.lastIndex) { - true -> emptyList() - false -> node.steps.subList(remainingFirstIndex, node.steps.size) - } - var currentType = pathAndType.type - remaining.forEach { pathComponent -> - currentType = when (pathComponent) { - is Step.Key -> { - val type = inferPathComponentExprType(currentType, pathComponent, ctx) - type - } - is Step.Wildcard -> currentType - is Step.Unpivot -> error("Not implemented yet") - } - } - return node.copy( - type = currentType - ) - } - - override fun visitRexId(node: Rex.Id, ctx: Context): Rex.Id { - val bindingPath = BindingPath(listOf(rexIdToBindingName(node))) - return node.copy(type = findBind(bindingPath, node.qualifier, ctx).type) - } - - override fun visitRexBinary(node: Rex.Binary, ctx: Context): Rex.Binary { - val lhs = visitRex(node.lhs, ctx).grabType() ?: handleMissingType(ctx) - val rhs = visitRex(node.rhs, ctx).grabType() ?: handleMissingType(ctx) - val args = listOf(lhs, rhs) - val type = when (node.op) { - Rex.Binary.Op.PLUS, Rex.Binary.Op.MINUS, Rex.Binary.Op.TIMES, Rex.Binary.Op.DIV, Rex.Binary.Op.MODULO -> when (hasValidOperandTypes(args, node.op.name, ctx) { it.isNumeric() }) { - true -> computeReturnTypeForNAry(args, PlanTyper::inferBinaryArithmeticOp) - false -> StaticType.NUMERIC // continuation type to prevent incompatible types and unknown errors from propagating - } - Rex.Binary.Op.BITWISE_AND -> when (hasValidOperandTypes(args, node.op.name, ctx) { it is IntType }) { - true -> computeReturnTypeForNAry(args, PlanTyper::inferBinaryArithmeticOp) - false -> StaticType.unionOf(StaticType.INT2, StaticType.INT4, StaticType.INT8, StaticType.INT) // continuation type to prevent incompatible types and unknown errors from propagating - } - - Rex.Binary.Op.CONCAT -> when (hasValidOperandTypes(args, node.op.name, ctx) { it.isText() }) { - true -> computeReturnTypeForNAry(args, PlanTyper::inferConcatOp) - false -> StaticType.STRING // continuation type to prevent incompatible types and unknown errors from propagating - } - Rex.Binary.Op.AND, Rex.Binary.Op.OR -> inferNaryLogicalOp(args, node.op.name, ctx) - Rex.Binary.Op.EQ, Rex.Binary.Op.NEQ -> when (operandsAreComparable(args, node.op.name, ctx)) { - true -> computeReturnTypeForNAry(args, PlanTyper::inferEqNeOp) - false -> StaticType.BOOL // continuation type to prevent incompatible types and unknown errors from propagating - } - Rex.Binary.Op.LT, Rex.Binary.Op.GT, Rex.Binary.Op.LTE, Rex.Binary.Op.GTE -> when (operandsAreComparable(args, node.op.name, ctx)) { - true -> computeReturnTypeForNAry(args, PlanTyper::inferComparatorOp) - false -> StaticType.BOOL // continuation type prevent incompatible types and unknown errors from propagating - } - } - return node.copy(type = type) - } - - override fun visitRexUnary(node: Rex.Unary, ctx: Context): PlanNode { - val valueType = visitRex(node.value, ctx).grabType() ?: handleMissingType(ctx) - val type = when (node.op) { - Rex.Unary.Op.NOT -> when (hasValidOperandTypes(listOf(valueType), node.op.name, ctx) { it is BoolType }) { - true -> computeReturnTypeForUnary(valueType, PlanTyper::inferNotOp) - false -> StaticType.BOOL // continuation type to prevent incompatible types and unknown errors from propagating - } - Rex.Unary.Op.POS -> when (hasValidOperandTypes(listOf(valueType), node.op.name, ctx) { it.isNumeric() }) { - true -> computeReturnTypeForUnary(valueType, PlanTyper::inferUnaryArithmeticOp) - false -> StaticType.NUMERIC - } - Rex.Unary.Op.NEG -> when (hasValidOperandTypes(listOf(valueType), node.op.name, ctx) { it.isNumeric() }) { - true -> computeReturnTypeForUnary(valueType, PlanTyper::inferUnaryArithmeticOp) - false -> StaticType.NUMERIC - } - } - return node.copy(type = type) - } - - // This type comes from RexConverter - override fun visitRexLit(node: Rex.Lit, ctx: Context): Rex.Lit = node - - override fun visitRexCollection(node: Rex.Collection, ctx: Context): PlanNode = super.visitRexCollection(node, ctx) - - override fun visitRexCollectionArray(node: Rex.Collection.Array, ctx: Context): PlanNode { - val typedValues = node.values.map { visitRex(it, ctx) as Rex } - val elementType = AnyOfType(typedValues.map { it.grabType() ?: handleMissingType(ctx) }.toSet()).flatten() - return node.copy(type = ListType(elementType), values = typedValues) - } - - override fun visitRexCollectionBag(node: Rex.Collection.Bag, ctx: Context): PlanNode { - val typedValues = node.values.map { visitRex(it, ctx) } - val elementType = AnyOfType(typedValues.map { it.grabType()!! }.toSet()).flatten() - return node.copy(type = BagType(elementType)) - } - - override fun visitRexCall(node: Rex.Call, ctx: Context): Rex.Call { - val processedNode = processRexCall(node, ctx) - visitRexCallManual(processedNode, ctx)?.let { return it } - val funcName = node.id - val signatures = ctx.allFunctions[funcName] - val arguments = processedNode.args.getTypes(ctx) - if (signatures == null) { - handleNoSuchFunctionError(ctx, funcName) - return node.copy(type = StaticType.ANY) - } - - var types: MutableSet = mutableSetOf() - val funcsMatchingArity = signatures.filter { it.arity.contains(arguments.size) } - if (funcsMatchingArity.isEmpty()) { - handleIncorrectNumberOfArgumentsToFunctionCallError(funcName, getMinMaxArities(signatures).first..getMinMaxArities(signatures).second, arguments.size, ctx) - } else { - if (node.type != null) { - return processedNode.copy(type = node.type) - } - for (sign in funcsMatchingArity) { - when (sign.unknownArguments) { - UnknownArguments.PROPAGATE -> types.add(returnTypeForPropagatingFunction(sign, arguments, ctx)) - UnknownArguments.PASS_THRU -> types.add(returnTypeForPassThruFunction(sign, arguments)) - } - } - } - - return processedNode.copy(type = StaticType.unionOf(types).flatten()) - } - - private fun getMinMaxArities(funcs: List): Pair { - val minArity = funcs.map { it.arity.first }.minOrNull() ?: Int.MAX_VALUE - val maxArity = funcs.map { it.arity.last }.maxOrNull() ?: Int.MIN_VALUE - - return Pair(minArity, maxArity) - } - - override fun visitRexSwitch(node: Rex.Switch, ctx: Context): PlanNode { - val match = node.match?.let { visitRex(it, ctx) as Rex } - val caseValueType = when (match) { - null -> null - else -> { - val type = match.grabType() ?: handleMissingType(ctx) - // comparison never succeeds if caseValue is an unknown - handleExpressionAlwaysReturnsUnknown(type, ctx) - type - } - } - val check = when (caseValueType) { - null -> { conditionType: StaticType -> - conditionType.allTypes.none { it is BoolType } - } - else -> { conditionType: StaticType -> - !StaticTypeUtils.areStaticTypesComparable(caseValueType, conditionType) - } - } - val branches = node.branches.map { branch -> - val condition = visitRex(branch.condition, ctx) as Rex - val value = visitRex(branch.value, ctx) as Rex - val conditionType = condition.grabType() ?: handleMissingType(ctx) - // comparison never succeeds if whenExpr is unknown -> appropriate warning or error - if (conditionType.isUnknown()) { - handleExpressionAlwaysReturnsUnknown(conditionType, ctx) - } - // if caseValueType is incomparable to whenExprType -> data type mismatch - else if (check.invoke(conditionType)) { - handleIncompatibleDataTypesForOpError( - ctx, - actualTypes = listOfNotNull(caseValueType, conditionType), - op = "CASE" - ) - } - branch.copy(condition = condition, value = value) - } - val valueTypes = branches.map { it.value }.map { it.grabType() ?: handleMissingType(ctx) } - - // keep all the `THEN` expr types even if the comparison doesn't succeed - val default = node.default?.let { visitRex(it, ctx) } - val type = inferCaseWhenBranches(valueTypes, default?.grabType()) - return node.copy( - match = match, - branches = branches, - type = type - ) - } - - override fun visitRexTuple(node: Rex.Tuple, ctx: Context): PlanNode { - val fields = node.fields.map { field -> - field.copy( - name = visitRex(field.name, ctx) as Rex, - value = visitRex(field.value, ctx) as Rex - ) - } - - val structFields = mutableListOf() - var closedContent = true - fields.forEach { field -> - when (val name = field.name) { - is Rex.Lit -> - // A field is only included in the StructType if its key is a text literal - if (name.value is TextElement) { - val value = name.value as TextElement - val type = field.value.grabType() ?: handleMissingType(ctx) - structFields.add(StructType.Field(value.textValue, type)) - } - else -> { - // A field with a non-literal key name is not included. - // If the non-literal could be text, StructType will have open content. - val nameType = field.name.grabType() ?: handleMissingType(ctx) - if (nameType.allTypes.any { it.isText() }) { - closedContent = false - } - } - } - } - - val hasDuplicateKeys = structFields - .groupingBy { it.key } - .eachCount() - .any { it.value > 1 } - - return node.copy( - type = StructType( - structFields, - contentClosed = closedContent, - constraints = setOf(TupleConstraint.Open(closedContent.not()), TupleConstraint.UniqueAttrs(hasDuplicateKeys.not())) - ), - fields = fields - ) - } - - override fun visitArgValue(node: Arg.Value, ctx: Context): PlanNode { - return node.copy( - value = visitRex(node.value, ctx) as Rex - ) - } - - // - // - // HELPER METHODS - // - // - - private fun inferCaseWhenBranches(thenExprsTypes: List, elseExpr: StaticType?): StaticType { - val elseExprType = when (elseExpr) { - // If there is no ELSE clause in the expression, it possible that - // none of the WHEN clauses succeed and the output of CASE WHEN expression - // ends up being NULL - null -> StaticType.NULL - else -> elseExpr - } - - if (thenExprsTypes.any { it is AnyType } || elseExprType is AnyType) { - return StaticType.ANY - } - - val possibleTypes = thenExprsTypes + elseExprType - return AnyOfType(possibleTypes.toSet()).flatten() - } - - /** - * Assumes that [node] has been pre-processed. - */ - private fun visitRexCallManual(node: Rex.Call, ctx: Context): Rex.Call? { - return when (node.id) { - RexConverter.Constants.inCollection -> visitRexCallInCollection(node, ctx) - RexConverter.Constants.between -> visitRexCallBetween(node, ctx) - RexConverter.Constants.like, RexConverter.Constants.likeEscape -> visitRexCallLike(node, ctx) - RexConverter.Constants.canCast, RexConverter.Constants.canLosslessCast, RexConverter.Constants.isType -> node.copy(type = StaticType.BOOL) - RexConverter.Constants.coalesce -> visitRexCallCoalesce(node, ctx) - RexConverter.Constants.nullIf -> visitRexCallNullIf(node, ctx) - RexConverter.Constants.cast -> visitRexCallCast(node, ctx) - RexConverter.Constants.outerBagExcept, - RexConverter.Constants.outerBagIntersect, - RexConverter.Constants.outerBagUnion, - RexConverter.Constants.outerSetExcept, - RexConverter.Constants.outerSetIntersect, - RexConverter.Constants.outerSetUnion -> TODO("Bag Operators have not been implemented yet.") - else -> null - } - } - - private fun processRexCall(node: Rex.Call, ctx: Context): Rex.Call { - val args = node.args.visit(ctx) - return node.copy(args = args) - } - - /** - * [node] must be pre-processed - */ - private fun visitRexCallNullIf(node: Rex.Call, ctx: Context): Rex.Call { - // check for comparability of the two arguments to `NULLIF` - operandsAreComparable(node.args.getTypes(ctx), node.id, ctx) - - // output type will be the first argument's types along with `NULL` (even in the case of an error) - val possibleOutputTypes = node.args[0].grabType()?.asNullable() ?: handleMissingType(ctx) - return node.copy(type = possibleOutputTypes) - } - - /** - * [node] must be pre-processed - */ - private fun visitRexCallCast(node: Rex.Call, ctx: Context): Rex.Call { - val sourceType = node.args[0].grabType() ?: handleMissingType(ctx) - val targetType = node.args[1].grabType() ?: handleMissingType(ctx) - val targetTypeParam = targetType.toTypedOpParameter() - val castOutputType = sourceType.cast(targetType).let { - if (targetTypeParam.validationThunk == null) { - // There is no additional validation for this parameter, return this type as-is - it - } else { - StaticType.unionOf(StaticType.MISSING, it) - } - } - return node.copy(type = castOutputType) - } - - private fun StaticType.toTypedOpParameter(): TypedOpParameter { - return TypedOpParameter(staticType = this) - } - - /** - * [node] must be pre-processed - */ - private fun visitRexCallCoalesce(node: Rex.Call, ctx: Context): Rex.Call { - var allMissing = true - val outputTypes = mutableSetOf() - - val args = node.args.map { visitArg(it, ctx) } - for (arg in args) { - val staticType = arg.grabType() ?: handleMissingType(ctx) - val staticTypes = staticType.allTypes - outputTypes += staticTypes - // If at least one known type is found, remove null and missing from the result - // It means there is at least one type which doesn't contain unknown types. - if (staticTypes.all { type -> !type.isNullOrMissing() }) { - outputTypes.remove(StaticType.MISSING) - outputTypes.remove(StaticType.NULL) - break - } - if (!staticTypes.contains(StaticType.MISSING)) { - allMissing = false - } - } - // If every argument has MISSING as one of it's types, - // then output should contain MISSING and not otherwise. - if (!allMissing) { - outputTypes.remove(StaticType.MISSING) - } - - return node.copy( - type = when (outputTypes.size) { - 1 -> outputTypes.first() - else -> StaticType.unionOf(outputTypes) - } - ) - } - - /** - * [node] must be pre-processed - */ - private fun visitRexCallLike(node: Rex.Call, ctx: Context): Rex.Call { - val argTypes = node.args.getTypes(ctx) - val argsAllTypes = argTypes.map { it.allTypes } - - if (!hasValidOperandTypes(argTypes, "LIKE", ctx) { it.isText() }) { - return node.copy(type = StaticType.BOOL) - } - - val possibleReturnTypes: MutableSet = mutableSetOf() - argsAllTypes.cartesianProduct().forEach { argsChildType -> - val argsSingleType = argsChildType.map { it as SingleType } - when { - // If any one of the operands is null, return NULL - argsSingleType.any { it is NullType } -> possibleReturnTypes.add(StaticType.NULL) - // Arguments for LIKE need to be text type - argsSingleType.all { it.isText() } -> { - possibleReturnTypes.add(StaticType.BOOL) - // If the optional escape character is provided, it can result in failure even if the type is text (string, in this case) - // This is because the escape character needs to be a single character (string with length 1), - // Even if the escape character is of length 1, escape sequence can be incorrect. - if (node.args.getOrNull(2) != null) { - possibleReturnTypes.add(StaticType.MISSING) - } - } - else -> possibleReturnTypes.add(StaticType.MISSING) - } - } - - return node.copy(type = StaticType.unionOf(possibleReturnTypes).flatten()) - } - - /** - * [node] must be pre-processed - */ - private fun visitRexCallInCollection(node: Rex.Call, ctx: Context): Rex.Call { - val operands = node.args.getTypes(ctx) - val lhs = operands[0] - val rhs = operands[1] - var errorAdded = false - - // check for an unknown operand type - if (expressionAlwaysReturnsUnknown(operands, ctx)) { - errorAdded = true - } - - // if none of the [rhs] types are [CollectionType]s with comparable element types to [lhs], then data type - // mismatch error - if (!rhs.isUnknown() && rhs.allTypes.none { - it is CollectionType && StaticTypeUtils.areStaticTypesComparable(it.elementType, lhs) - } - ) { - handleIncompatibleDataTypesForOpError(ctx, operands, "IN") - errorAdded = true - } - - return when (errorAdded) { - true -> StaticType.BOOL - false -> computeReturnTypeForNAryIn(operands) - }.let { node.copy(type = it) } - } - - private fun computeReturnTypeForNAryIn(argTypes: List): StaticType { - require(argTypes.size >= 2) { "IN must have at least two args" } - val leftTypes = argTypes.first().allTypes - val rightTypes = argTypes.drop(1).flatMap { it.allTypes } - - val finalTypes = leftTypes - .flatMap { left -> - rightTypes.flatMap { right -> - computeReturnTypeForBinaryIn(left, right).allTypes - } - }.distinct() - - return when (finalTypes.size) { - 1 -> finalTypes.first() - else -> StaticType.unionOf(*finalTypes.toTypedArray()) - } - } - - private fun computeReturnTypeForBinaryIn(left: StaticType, right: StaticType): StaticType = - when (right) { - is NullType -> when (left) { - is MissingType -> StaticType.MISSING - else -> StaticType.NULL - } - is MissingType -> StaticType.MISSING - is CollectionType -> when (left) { - is NullType -> StaticType.NULL - is MissingType -> StaticType.MISSING - else -> { - val rightElemTypes = right.elementType.allTypes - val possibleTypes = mutableSetOf() - if (rightElemTypes.any { it is MissingType }) { - possibleTypes.add(StaticType.MISSING) - } - if (rightElemTypes.any { it is NullType }) { - possibleTypes.add(StaticType.NULL) - } - if (rightElemTypes.any { !it.isNullOrMissing() }) { - possibleTypes.add(StaticType.BOOL) - } - StaticType.unionOf(possibleTypes).flatten() - } - } - else -> when (left) { - is NullType -> StaticType.unionOf(StaticType.NULL, StaticType.MISSING) - else -> StaticType.MISSING - } - } - - /** - * [node] must be pre-processed - */ - private fun visitRexCallBetween(node: Rex.Call, ctx: Context): Rex.Call { - val argTypes = listOf(node.args[0], node.args[1], node.args[2]).getTypes(ctx) - if (!operandsAreComparable(argTypes, node.id, ctx)) { - return node.copy(type = StaticType.BOOL) - } - - val argsAllTypes = argTypes.map { it.allTypes } - val possibleReturnTypes: MutableSet = mutableSetOf() - - argsAllTypes.cartesianProduct().forEach { argsChildType -> - val argsSingleType = argsChildType.map { it as SingleType } - when { - // If any one of the operands is null or missing, return NULL - argsSingleType.any { it is NullType || it is MissingType } -> possibleReturnTypes.add(StaticType.NULL) - StaticTypeUtils.areStaticTypesComparable( - argsSingleType[0], - argsSingleType[1] - ) || StaticTypeUtils.areStaticTypesComparable(argsSingleType[0], argsSingleType[2]) -> possibleReturnTypes.add(StaticType.BOOL) - else -> possibleReturnTypes.add(StaticType.MISSING) - } - } - return node.copy(type = StaticType.unionOf(possibleReturnTypes).flatten()) - } - - private fun List.getTypes(ctx: Context): List = this.map { it.grabType() ?: handleMissingType(ctx) } - - private fun List.visit(ctx: Context): List = this.map { arg -> - when (arg) { - is Arg.Value -> { - val rex = visitRex(arg.value, ctx) as Rex - arg.copy(value = rex) - } - is Arg.Type -> arg - } - } - - /** - * Verifies the given [actual] has type [expected]. If [actual] is unknown, a null or missing - * error is given. If [actual] could never be [expected], an incompatible data types for - * expression error is given. - */ - private fun assertType(expected: StaticType, actual: StaticType, ctx: Context) { - // Relates to `verifyExpressionType` - if (actual.isUnknown()) { - } else if (actual.allTypes.none { it == expected }) { - handleIncompatibleDataTypeForExprError( - expectedType = expected, - actualType = actual, - ctx = ctx - ) - } - } - - private fun getElementTypeForFromSource(fromSourceType: StaticType): StaticType = - when (fromSourceType) { - is BagType -> fromSourceType.elementType - is ListType -> fromSourceType.elementType - is AnyType -> StaticType.ANY - is AnyOfType -> AnyOfType(fromSourceType.types.map { getElementTypeForFromSource(it) }.toSet()) - // All the other types coerce into a bag of themselves (including null/missing/sexp). - else -> fromSourceType - } - - private fun Rel.getTypeEnv() = PlanUtils.getTypeEnv(this) - - private fun Rel.getProperties() = this.getCommon().properties - - private fun Rel.getCommon() = when (this) { - is Rel.Aggregate -> this.common - is Rel.Bag -> this.common - is Rel.Fetch -> this.common - is Rel.Filter -> this.common - is Rel.Join -> this.common - is Rel.Project -> this.common - is Rel.Scan -> this.common - is Rel.Sort -> this.common - is Rel.Unpivot -> this.common - is Rel.Exclude -> this.common - } - - private fun inferPathComponentExprType( - previousComponentType: StaticType, - currentPathComponent: Step.Key, - ctx: Context - ): StaticType = - when (previousComponentType) { - is AnyType -> StaticType.ANY - is StructType -> inferStructLookupType( - currentPathComponent, - previousComponentType - ).flatten() - is ListType, - is SexpType -> { - val previous = previousComponentType as CollectionType // help Kotlin's type inference to be more specific - val key = visitRex(currentPathComponent.value, ctx = ctx) - if (key.grabType() is IntType) { - previous.elementType - } else { - StaticType.MISSING - } - } - is AnyOfType -> { - when (previousComponentType.types.size) { - 0 -> throw IllegalStateException("Cannot path on an empty StaticType union") - else -> { - val prevTypes = previousComponentType.allTypes - if (prevTypes.any { it is AnyType }) { - StaticType.ANY - } else { - val staticTypes = prevTypes.map { inferPathComponentExprType(it, currentPathComponent, ctx) } - AnyOfType(staticTypes.toSet()).flatten() - } - } - } - } - else -> StaticType.MISSING - } - - private fun inferStructLookupType( - currentPathComponent: Step.Key, - struct: StructType - ): StaticType = - when (val key = currentPathComponent.value) { - is Rex.Lit -> { - if (key.value is StringElement) { - val case = rexCaseToBindingCase(currentPathComponent.case) - ReferenceResolver.inferStructLookup(struct, BindingName(key.value.asAnyElement().stringValue, case)) - ?: when (struct.contentClosed) { - true -> StaticType.MISSING - false -> StaticType.ANY - } - } else { - // Should this branch result in an error? - StaticType.MISSING - } - } - else -> { - StaticType.MISSING - } - } - - private fun rexBindingNameToLangBindingName(name: BindingName) = org.partiql.lang.eval.BindingName( - name.name, - when (name.bindingCase) { - BindingCase.SENSITIVE -> org.partiql.lang.eval.BindingCase.SENSITIVE - BindingCase.INSENSITIVE -> org.partiql.lang.eval.BindingCase.INSENSITIVE - } - ) - - private fun rexIdToBindingName(node: Rex.Id): BindingName = BindingName( - node.name, - rexCaseToBindingCase(node.case) - ) - - private fun List.toAttributes(ctx: Context) = this.map { attribute(it.name, it.grabType() ?: handleMissingType(ctx)) } - - private fun inferConcatOp(leftType: SingleType, rightType: SingleType): SingleType { - fun checkUnconstrainedText(type: SingleType) = type is SymbolType || type is StringType && type.lengthConstraint is StringType.StringLengthConstraint.Unconstrained - - return when { - // Propagate missing as missing. Missing has precedence over null - leftType is MissingType || rightType is MissingType -> StaticType.MISSING - leftType is NullType || rightType is NullType -> StaticType.NULL - !leftType.isText() || !rightType.isText() -> StaticType.MISSING - checkUnconstrainedText(leftType) || checkUnconstrainedText(rightType) -> StaticType.STRING - else -> { // Constrained string types (char & varchar) - val leftLength = ((leftType as StringType).lengthConstraint as StringType.StringLengthConstraint.Constrained).length - val rightLength = ((rightType as StringType).lengthConstraint as StringType.StringLengthConstraint.Constrained).length - val sum = leftLength.value + rightLength.value - val newConstraint = when { - leftLength is NumberConstraint.UpTo || rightLength is NumberConstraint.UpTo -> NumberConstraint.UpTo(sum) - else -> NumberConstraint.Equals(sum) - } - StringType(StringType.StringLengthConstraint.Constrained(newConstraint)) - } - } - } - - private fun inferUnaryArithmeticOp(type: SingleType): SingleType = when (type) { - // Propagate NULL or MISSING - is NullType -> StaticType.NULL - is MissingType -> StaticType.MISSING - is DecimalType, is IntType, is FloatType -> type - else -> StaticType.MISSING - } - - private fun computeReturnTypeForUnary( - argStaticType: StaticType, - unaryOpInferencer: (SingleType) -> SingleType - ): StaticType { - val argSingleTypes = argStaticType.allTypes.map { it as SingleType } - val possibleReturnTypes = argSingleTypes.map { st -> unaryOpInferencer(st) } - - return StaticType.unionOf(possibleReturnTypes.toSet()).flatten() - } - - private fun inferNotOp(type: SingleType): SingleType = when (type) { - // Propagate NULL or MISSING - is NullType -> StaticType.NULL - is MissingType -> StaticType.MISSING - is BoolType -> type - else -> StaticType.MISSING - } - - private fun inferNaryLogicalOp(argsStaticType: List, op: String, ctx: Context): StaticType { - return when (hasValidOperandTypes(argsStaticType, op, ctx) { it is BoolType }) { - true -> { - val argsSingleTypes = argsStaticType.map { argStaticType -> - argStaticType.allTypes.map { singleType -> singleType as SingleType } - } - val argsSingleTypeCombination = argsSingleTypes.cartesianProduct() - val possibleResultTypes = argsSingleTypeCombination.map { argsSingleType -> - getTypeForNAryLogicalOperations(argsSingleType) - }.toSet() - - StaticType.unionOf(possibleResultTypes).flatten() - } - false -> StaticType.BOOL // continuation type to prevent incompatible types and unknown errors from propagating - } - } - - private fun getTypeForNAryLogicalOperations(args: List): StaticType = when { - // Logical operands need to be of Boolean Type - args.all { it == StaticType.BOOL } -> StaticType.BOOL - // If any of the arguments is boolean, then the return type can be boolean because of short-circuiting - // in logical ops. For e.g. "TRUE OR ANY" returns TRUE. "FALSE AND ANY" returns FALSE. But in the case - // where the other arg is an incompatible type (not an unknown or bool), the result type is MISSING. - args.any { it == StaticType.BOOL } -> when { - // If other argument is missing, then return union(bool, missing) - args.any { it is MissingType } -> AnyOfType(setOf(StaticType.MISSING, StaticType.BOOL)) - // If other argument is null, then return union(bool, null) - args.any { it is NullType } -> AnyOfType(setOf(StaticType.NULL, StaticType.BOOL)) - // If other type is anything other than null or missing, then it is an error case - else -> StaticType.MISSING - } - // If any of the operands is MISSING, return MISSING. MISSING has a precedence over NULL - args.any { it is MissingType } -> StaticType.MISSING - // If any of the operands is NULL, return NULL - args.any { it is NullType } -> StaticType.NULL - else -> StaticType.MISSING - } - - private fun computeReturnTypeForNAry( - argsStaticType: List, - binaryOpInferencer: (SingleType, SingleType) -> SingleType - ): StaticType = - argsStaticType.reduce { leftStaticType, rightStaticType -> - val leftSingleTypes = leftStaticType.allTypes.map { it as SingleType } - val rightSingleTypes = rightStaticType.allTypes.map { it as SingleType } - val possibleResultTypes: List = - leftSingleTypes.flatMap { leftSingleType -> - rightSingleTypes.map { rightSingleType -> - binaryOpInferencer(leftSingleType, rightSingleType) - } - } - - StaticType.unionOf(possibleResultTypes.toSet()).flatten() - } - - /** - * Computes return type for functions with [FunctionSignature.unknownArguments] as [UnknownArguments.PROPAGATE] - */ - private fun returnTypeForPropagatingFunction(signature: FunctionSignature, arguments: List, ctx: Context): StaticType { - val requiredArgs = arguments.zip(signature.requiredParameters) - val allArgs = requiredArgs - - return if (functionHasValidArgTypes(signature.name, allArgs, ctx)) { - val finalReturnTypes = signature.returnType.allTypes + allArgs.flatMap { (actualType, expectedType) -> - listOfNotNull( - // if any type is `MISSING`, add `MISSING` to possible return types. - // if the actual type is not a subtype is the expected type, add `MISSING`. In the future, may - // want to give a warning that a data type mismatch could occur - // (https://github.com/partiql/partiql-lang-kotlin/issues/507) - StaticType.MISSING.takeIf { - actualType.allTypes.any { it is MissingType } || !StaticTypeUtils.isSubTypeOf( - actualType.filterNullMissing(), - expectedType - ) - }, - // if any type is `NULL`, add `NULL` to possible return types - StaticType.NULL.takeIf { actualType.allTypes.any { it is NullType } } - ) - } - AnyOfType(finalReturnTypes.toSet()).flatten() - } else { - // otherwise, has an invalid arg type and errors. continuation type of [FunctionSignature.returnType] - signature.returnType - } - } - - /** - * For [this] [StaticType], filters out [NullType] and [MissingType] from [AnyOfType]s. Otherwise, returns [this]. - */ - private fun StaticType.filterNullMissing(): StaticType = - when (this) { - is AnyOfType -> AnyOfType(this.types.filter { !it.isNullOrMissing() }.toSet()).flatten() - else -> this - } - - private fun getUnpivotValueType(fromSourceType: StaticType): StaticType = - when (fromSourceType) { - is StructType -> if (fromSourceType.contentClosed) { - AnyOfType(fromSourceType.fields.map { it.value }.toSet()).flatten() - } else { - // Content is open, so value can be of any type - StaticType.ANY - } - is AnyType -> StaticType.ANY - is AnyOfType -> AnyOfType(fromSourceType.types.map { getUnpivotValueType(it) }.toSet()) - // All the other types coerce into a struct of themselves with synthetic key names - else -> fromSourceType - } - - /** - * Returns true if for every pair (expr, expectedType) in [argsWithExpectedTypes], the expr's [StaticType] is - * not an unknown and has a shared type with expectedType. Returns false otherwise. - * - * If an argument has an unknown type, the [SemanticProblemDetails.NullOrMissingFunctionArgument] error is - * handled by [ProblemHandler]. If an expr has no shared type with the expectedType, the - * [SemanticProblemDetails.InvalidArgumentTypeForFunction] error is handled by [ProblemHandler]. - */ - private fun functionHasValidArgTypes(functionName: String, argsWithExpectedTypes: List>, ctx: Context): Boolean { - var allArgsValid = true - argsWithExpectedTypes.forEach { (actualType, expectedType) -> - if (actualType.isUnknown()) { - handleNullOrMissingFunctionArgument(functionName, ctx) - allArgsValid = false - } else { - val actualNonUnknownType = actualType.filterNullMissing() - if (StaticTypeUtils.getTypeDomain(actualNonUnknownType).intersect(StaticTypeUtils.getTypeDomain(expectedType)).isEmpty() - ) { - handleInvalidArgumentTypeForFunction( - functionName = functionName, - expectedType = expectedType, - actualType = actualType, - ctx - ) - allArgsValid = false - } - } - } - return allArgsValid - } - - /** - * Computes return type for functions with [FunctionSignature.unknownArguments] as [UnknownArguments.PASS_THRU] - */ - private fun returnTypeForPassThruFunction(signature: FunctionSignature, arguments: List): StaticType { - return when { - matchesAllArguments(arguments, signature) -> signature.returnType - matchesAtLeastOneArgument(arguments, signature) -> StaticType.unionOf(signature.returnType, StaticType.MISSING) - else -> StaticType.MISSING - } - } - - /** - * Function assumes the number of [arguments] passed agrees with the [signature] - * Returns true when all the arguments (required, optional, variadic) are subtypes of the expected arguments for the [signature]. - * Returns false otherwise - */ - private fun matchesAllArguments(arguments: List, signature: FunctionSignature): Boolean { - // Checks if the actual StaticType is subtype of expected StaticType ( filtering the null/missing for PROPAGATING functions - fun isSubType(actual: StaticType, expected: StaticType): Boolean { - val lhs = when (signature.unknownArguments) { - UnknownArguments.PROPAGATE -> when (actual) { - is AnyOfType -> actual.copy( - types = actual.types.filter { - !it.isNullOrMissing() - }.toSet() - ) - else -> actual - } - UnknownArguments.PASS_THRU -> actual - } - return StaticTypeUtils.isSubTypeOf(lhs, expected) - } - - val requiredArgumentsMatch = arguments - .zip(signature.requiredParameters) - .all { (actual, expected) -> - isSubType(actual, expected) - } - return requiredArgumentsMatch - } - - internal fun Rex.isProjectAll(): Boolean { - return when (this) { - is Rex.Path -> { - val step = this.steps.lastOrNull() ?: return false - step is Step.Wildcard - } - else -> false - } - } - - /** - * Function assumes the number of [arguments] passed agrees with the [signature] - * - * Returns true if there's at least one valid overlap between actual and expected - * for all the expected arguments (required, optional, variadic) for the [signature]. - * - * Returns false otherwise. - */ - private fun matchesAtLeastOneArgument(arguments: List, signature: FunctionSignature): Boolean { - val requiredArgumentsMatch = arguments - .zip(signature.requiredParameters) - .all { (actual, expected) -> - StaticTypeUtils.getTypeDomain(actual).intersect(StaticTypeUtils.getTypeDomain(expected)).isNotEmpty() - } - return requiredArgumentsMatch - } - - private fun inferEqNeOp(lhs: SingleType, rhs: SingleType): SingleType = when { - // Propagate missing as missing. Missing has precedence over null - lhs is MissingType || rhs is MissingType -> StaticType.MISSING - lhs.isNullable() || rhs.isNullable() -> StaticType.NULL - else -> StaticType.BOOL - } - - // LT, LTE, GT, GTE - private fun inferComparatorOp(lhs: SingleType, rhs: SingleType): SingleType = when { - // Propagate missing as missing. Missing has precedence over null - lhs is MissingType || rhs is MissingType -> StaticType.MISSING - lhs is NullType || rhs is NullType -> StaticType.NULL - StaticTypeUtils.areStaticTypesComparable(lhs, rhs) -> StaticType.BOOL - else -> StaticType.MISSING - } - - /** - * Returns true if all of the provided [argsStaticType] are comparable to each other and are not unknown. Otherwise, - * returns false. - * - * If an operand is not comparable to another, the [SemanticProblemDetails.IncompatibleDatatypesForOp] error is - * handled by [ProblemHandler]. - * - * If an operand is always missing, the - * [SemanticProblemDetails.ExpressionAlwaysReturnsMissing] error is handled by [ProblemHandler]. - * - * If an operand is always NULL, or unionOf(NULL, MISSING), the - * [SemanticProblemDetails.ExpressionAlwaysReturnsNullOrMissing] **warning** is handled by [ProblemHandler] - * - * TODO: consider if collection comparison semantics should be different (e.g. errors over warnings, - * more details in error message): https://github.com/partiql/partiql-lang-kotlin/issues/505 - */ - private fun operandsAreComparable(argsStaticType: List, op: String, ctx: Context): Boolean { - var hasValidOperands = true - - // check for comparability of all operands. currently only adds one data type mismatch error - outerLoop@ for (i in argsStaticType.indices) { - for (j in i + 1 until argsStaticType.size) { - if (!StaticTypeUtils.areStaticTypesComparable(argsStaticType[i], argsStaticType[j])) { - handleIncompatibleDataTypesForOpError(ctx, argsStaticType, op) - hasValidOperands = false - break@outerLoop - } - } - } - - // check for an unknown operand type - if (expressionAlwaysReturnsUnknown(argsStaticType, ctx)) { - hasValidOperands = false - } - return hasValidOperands - } - - // This could also have been a lookup table of types, however... doing this as a nested `when` allows - // us to not to rely on `.equals` and `.hashcode` implementations of [StaticType], which include metas - // and might introduce unwanted behavior. - private fun inferBinaryArithmeticOp(leftType: SingleType, rightType: SingleType): SingleType = when { - // Propagate missing as missing. Missing has precedence over null - leftType is MissingType || rightType is MissingType -> StaticType.MISSING - leftType is NullType || rightType is NullType -> StaticType.NULL - else -> when (leftType) { - is IntType -> - when (rightType) { - is IntType -> - when { - leftType.rangeConstraint == IntType.IntRangeConstraint.UNCONSTRAINED -> leftType - rightType.rangeConstraint == IntType.IntRangeConstraint.UNCONSTRAINED -> rightType - leftType.rangeConstraint.numBytes > rightType.rangeConstraint.numBytes -> leftType - else -> rightType - } - is FloatType -> StaticType.FLOAT - is DecimalType -> StaticType.DECIMAL // TODO: account for decimal precision - else -> StaticType.MISSING - } - is FloatType -> - when (rightType) { - is IntType -> StaticType.FLOAT - is FloatType -> StaticType.FLOAT - is DecimalType -> StaticType.DECIMAL // TODO: account for decimal precision - else -> StaticType.MISSING - } - is DecimalType -> - when (rightType) { - is IntType -> StaticType.DECIMAL // TODO: account for decimal precision - is FloatType -> StaticType.DECIMAL // TODO: account for decimal precision - is DecimalType -> StaticType.DECIMAL // TODO: account for decimal precision - else -> StaticType.MISSING - } - else -> StaticType.MISSING - } - } - - private fun hasValidOperandTypes( - operandsStaticType: List, - op: String, - ctx: Context, - operandTypeValidator: (StaticType) -> Boolean - ): Boolean { - // check for an incompatible operand type - if (operandsStaticType.any { operandStaticType -> !operandStaticType.isUnknown() && operandStaticType.allTypes.none(operandTypeValidator) }) { - handleIncompatibleDataTypesForOpError(ctx, operandsStaticType, op) - } - - // check for an unknown operand type - expressionAlwaysReturnsUnknown(operandsStaticType, ctx) - - return true - } - - private fun assertAsInt(type: StaticType, ctx: Context) { - if (type.flatten().allTypes.any { variant -> variant is IntType }.not()) { - handleIncompatibleDataTypeForExprError(StaticType.INT, type, ctx) - } - } - - private fun StaticType.isNullOrMissing(): Boolean = (this is NullType || this is MissingType) - - internal fun StaticType.isText(): Boolean = (this is SymbolType || this is StringType) - - private fun StaticType.isUnknown(): Boolean = (this.isNullOrMissing() || this == StaticType.NULL_OR_MISSING) - - internal fun StaticType.isNumeric(): Boolean = (this is IntType || this is FloatType || this is DecimalType) - - private fun rexCaseToBindingCase(node: Case): BindingCase = when (node) { - Case.SENSITIVE -> BindingCase.SENSITIVE - Case.INSENSITIVE -> BindingCase.INSENSITIVE - } - - private fun findBind(path: BindingPath, qualifier: Rex.Id.Qualifier, ctx: Context): ReferenceResolver.ResolvedType { - val scopingOrder = when (qualifier) { - Rex.Id.Qualifier.LOCALS_FIRST -> ScopingOrder.LEXICAL_THEN_GLOBALS - Rex.Id.Qualifier.UNQUALIFIED -> ctx.scopingOrder - } - return when (scopingOrder) { - ScopingOrder.GLOBALS_THEN_LEXICAL -> ReferenceResolver.resolveGlobalBind(path, ctx) - ?: ReferenceResolver.resolveLocalBind(path, ctx.inputTypeEnv) - ?: handleUnresolvedDescriptor(path.steps.last(), ctx) { - ReferenceResolver.ResolvedType(StaticType.ANY) - } - ScopingOrder.LEXICAL_THEN_GLOBALS -> ReferenceResolver.resolveLocalBind(path, ctx.inputTypeEnv) - ?: ReferenceResolver.resolveGlobalBind(path, ctx) - ?: handleUnresolvedDescriptor(path.steps.last(), ctx) { - ReferenceResolver.ResolvedType(StaticType.ANY) - } - } - } - - private fun handleUnresolvedDescriptor(name: BindingName, ctx: Context, input: () -> T): T { - return when (ctx.tolerance) { - MinimumTolerance.FULL -> { - handleUndefinedVariable(name, ctx) - input.invoke() - } - MinimumTolerance.PARTIAL -> input.invoke() - } - } - - private fun grabFirstIds(node: Rex.Path): List { - if (node.root !is Rex.Id) { return emptyList() } - val steps = node.steps.map { - when (it) { - is Step.Key -> when (val value = it.value) { - is Rex.Lit -> { - val ionElement = value.value.asAnyElement() - when (ionElement.type) { - ElementType.SYMBOL, ElementType.STRING -> { - val stringValue = value.value.asAnyElement().stringValueOrNull - stringValue?.let { str -> - rexId(str, it.case, Rex.Id.Qualifier.UNQUALIFIED, null) - } - } - else -> null - } - } - else -> null - } - else -> null - } - } - val nullPosition = when (val nullIndex = steps.indexOf(null)) { - -1 -> steps.size - else -> nullIndex - } - val firstSteps = steps.subList(0, nullPosition).filterNotNull() - return listOf(node.root as Rex.Id) + firstSteps - } - - private fun inferType(expr: Rex, input: Rel?, ctx: Context): StaticType { - return type( - expr, - Context( - input, - ctx.session, - ctx.metadata, - ScopingOrder.LEXICAL_THEN_GLOBALS, - ctx.customFunctionSignatures, - ctx.tolerance, - ctx.problemHandler - ) - ).grabType() ?: handleMissingType(ctx) - } - - private fun typeRex(expr: Rex, input: Rel?, ctx: Context): Rex { - return type( - expr, - Context( - input, - ctx.session, - ctx.metadata, - ctx.scopingOrder, - ctx.customFunctionSignatures, - ctx.tolerance, - ctx.problemHandler - ) - ) - } - - private fun typeRel(rel: Rel, input: Rel?, ctx: Context): Rel { - return visitRel( - rel, - Context( - input, - ctx.session, - ctx.metadata, - ctx.scopingOrder, - ctx.customFunctionSignatures, - ctx.tolerance, - ctx.problemHandler - ) - ) - } - - /** - * If an expression always returns missing, raise a [SemanticProblemDetails.ExpressionAlwaysReturnsMissing] and return true. - * - * If an expression always returns null or union(null, missing), raise a [SemanticProblemDetails.ExpressionAlwaysReturnsNullOrMissing] and return false. - * - * else returns false. - */ - private fun expressionAlwaysReturnsUnknown(types: List, ctx: Context): Boolean { - if (types.any { type -> type is MissingType }) { - handleExpressionAlwaysReturnsMissingError(ctx) - return true - } - - if (types.any { type -> type is NullType || type == StaticType.NULL_OR_MISSING }) { - handleExpressionAlwaysReturnsMissingOrNullWarning(ctx) - } - return false - } - - private fun handleExpressionAlwaysReturnsUnknown(type: StaticType, ctx: Context) { - if (type is MissingType) { - handleExpressionAlwaysReturnsMissingError(ctx) - return - } - - if (type is NullType || type == StaticType.NULL_OR_MISSING) { - handleExpressionAlwaysReturnsMissingOrNullWarning(ctx) - } - } - - private fun handleExpressionAlwaysReturnsMissingOrNullWarning(ctx: Context) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.ExpressionAlwaysReturnsNullOrMissing - ) - ) - } - - private fun handleExpressionAlwaysReturnsMissingError(ctx: Context) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.ExpressionAlwaysReturnsMissing - ) - ) - } - - // TODO: https://github.com/partiql/partiql-lang-kotlin/issues/508 consider not working directly with strings for `op` - private fun handleIncompatibleDataTypesForOpError(ctx: Context, actualTypes: List, op: String) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.IncompatibleDatatypesForOp( - actualTypes, - op - ) - ) - ) - } - - private fun handleNoSuchFunctionError(ctx: Context, functionName: String) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.NoSuchFunction(functionName) - ) - ) - } - - private fun handleIncompatibleDataTypeForExprError(expectedType: StaticType, actualType: StaticType, ctx: Context) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.IncompatibleDataTypeForExpr(expectedType, actualType) - ) - ) - } - - private fun handleIncorrectNumberOfArgumentsToFunctionCallError( - functionName: String, - expectedArity: IntRange, - actualArgCount: Int, - ctx: Context - ) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.IncorrectNumberOfArgumentsToFunctionCall( - functionName, - expectedArity, - actualArgCount - ) - ) - ) - } - - private fun handleNullOrMissingFunctionArgument(functionName: String, ctx: Context) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.NullOrMissingFunctionArgument( - functionName = functionName - ) - ) - ) - } - - private fun handleUndefinedVariable(name: BindingName, ctx: Context) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = PlanningProblemDetails.UndefinedVariable(name.name, name.bindingCase == BindingCase.SENSITIVE) - ) - ) - } - - private fun handleInvalidArgumentTypeForFunction(functionName: String, expectedType: StaticType, actualType: StaticType, ctx: Context) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.InvalidArgumentTypeForFunction( - functionName = functionName, - expectedType = expectedType, - actualType = actualType - ) - ) - ) - } - - private fun handleMissingType(ctx: Context): StaticType { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = PlanningProblemDetails.CompileError("Unable to determine type of node.") - ) - ) - return StaticType.ANY - } - - private fun handleDuplicateAliasesError(ctx: Context) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.DuplicateAliasesInSelectListItem - ) - ) - } - - private fun handleCoercionError(ctx: Context, actualType: StaticType) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.CoercionError(actualType) - ) - ) - } - - private fun handleUnresolvedExcludeExprRoot(root: String, ctx: Context) { - ctx.problemHandler.handleProblem( - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = PlanningProblemDetails.UnresolvedExcludeExprRoot(root) - ) - ) - } -} diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/PlanUtils.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/PlanUtils.kt deleted file mode 100644 index 4da66fcb0..000000000 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/PlanUtils.kt +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Amazon.com, Inc. or its affiliates. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at: - * - * http://aws.amazon.com/apache2.0/ - * - * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific - * language governing permissions and limitations under the License. - */ - -package org.partiql.lang.planner.transforms.plan - -import org.partiql.plan.Arg -import org.partiql.plan.Attribute -import org.partiql.plan.Binding -import org.partiql.plan.PlanNode -import org.partiql.plan.Rel -import org.partiql.plan.Rex -import org.partiql.plan.Step -import org.partiql.types.StaticType - -internal object PlanUtils { - internal fun getTypeEnv(input: Rel): List = when (input) { - is Rel.Project -> input.common.typeEnv - is Rel.Aggregate -> input.common.typeEnv - is Rel.Bag -> input.common.typeEnv - is Rel.Fetch -> input.common.typeEnv - is Rel.Filter -> input.common.typeEnv - is Rel.Join -> input.common.typeEnv - is Rel.Scan -> input.common.typeEnv - is Rel.Sort -> input.common.typeEnv - is Rel.Unpivot -> input.common.typeEnv - is Rel.Exclude -> input.common.typeEnv - } - - internal fun Rex.addType(type: StaticType): Rex = when (this) { - is Rex.Agg -> this.copy(type = type) - is Rex.Binary -> this.copy(type = type) - is Rex.Call -> this.copy(type = type) - is Rex.Collection.Array -> this.copy(type = type) - is Rex.Collection.Bag -> this.copy(type = type) - is Rex.Id -> this.copy(type = type) - is Rex.Lit -> this.copy(type = type) - is Rex.Path -> this.copy(type = type) - is Rex.Query.Collection -> this.copy(type = type) - is Rex.Query.Scalar.Pivot -> this.copy(type = type) - is Rex.Query.Scalar.Subquery -> this.copy(type = type) - is Rex.Switch -> this.copy(type = type) - is Rex.Tuple -> this.copy(type = type) - is Rex.Unary -> this.copy(type = type) - } - - internal fun Rex.grabType(): StaticType? = when (this) { - is Rex.Agg -> this.type - is Rex.Binary -> this.type - is Rex.Call -> this.type - is Rex.Collection.Array -> this.type - is Rex.Collection.Bag -> this.type - is Rex.Id -> this.type - is Rex.Lit -> this.type - is Rex.Path -> this.type - is Rex.Query.Collection -> this.type - is Rex.Query.Scalar.Pivot -> this.type - is Rex.Tuple -> this.type - is Rex.Unary -> this.type - is Rex.Query.Scalar.Subquery -> this.type - is Rex.Switch -> this.type - } - - internal fun PlanNode.grabType(): StaticType? = when (this) { - is Rex -> this.grabType() - is Arg.Value -> this.value.grabType() - is Arg.Type -> this.type - is Step.Key -> this.value.grabType() - is Binding -> this.value.grabType() - else -> error("Unable to grab static type of $this") - } -} diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/ReferenceResolver.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/ReferenceResolver.kt deleted file mode 100644 index f4f86c3a8..000000000 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/ReferenceResolver.kt +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Amazon.com, Inc. or its affiliates. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at: - * - * http://aws.amazon.com/apache2.0/ - * - * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific - * language governing permissions and limitations under the License. - */ - -package org.partiql.lang.planner.transforms.plan - -import org.partiql.plan.Attribute -import org.partiql.spi.BindingCase -import org.partiql.spi.BindingName -import org.partiql.spi.BindingPath -import org.partiql.spi.connector.ConnectorObjectPath -import org.partiql.types.StaticType -import org.partiql.types.StructType -import org.partiql.types.TupleConstraint - -internal object ReferenceResolver { - - internal class ResolvedType( - val type: StaticType, - val levelsMatched: Int = 1 - ) - - /** - * Logic is as follows: - * 1. If Current Catalog and Schema are set, create a Path to the object and attempt to grab handle and schema. - * a. If not found, just try to find the object in the catalog. - * 2. If Current Catalog is not set: - * a. Loop through all catalogs and try to find the object. - * - * TODO: Add global bindings - * TODO: Replace paths with global variable references if found - */ - internal fun resolveGlobalBind(path: BindingPath, ctx: PlanTyper.Context): ResolvedType? { - val currentCatalog = ctx.session.currentCatalog?.let { BindingName(it, BindingCase.SENSITIVE) } - val currentCatalogPath = BindingPath(ctx.session.currentDirectory.map { BindingName(it, BindingCase.SENSITIVE) }) - val absoluteCatalogPath = BindingPath(currentCatalogPath.steps + path.steps) - return when (path.steps.size) { - 0 -> null - 1 -> getDescriptor(ctx, currentCatalog, path, absoluteCatalogPath) - 2 -> getDescriptor(ctx, currentCatalog, path, path) ?: getDescriptor(ctx, currentCatalog, path, absoluteCatalogPath) - else -> { - val inferredCatalog = path.steps[0] - val newPath = BindingPath(path.steps.subList(1, path.steps.size)) - getDescriptor(ctx, inferredCatalog, path, newPath) - ?: getDescriptor(ctx, currentCatalog, path, path) - ?: getDescriptor(ctx, currentCatalog, path, absoluteCatalogPath) - } - } - } - - /** - * Logic is as follows: - * 1. Look through [input] to find the root of the [path]. If found, return. Else, go to step 2. - * 2. Look through [input] and grab all [StructType]s. Then, grab the fields of each Struct corresponding to the - * root of [path]. - * - If the Struct if ordered, grab the first matching field. - * - If unordered and if multiple fields found, merge the output type. If no structs contain a matching field, return null. - */ - internal fun resolveLocalBind(path: BindingPath, input: List): ResolvedType? { - if (path.steps.isEmpty()) { return null } - val root: StaticType = input.firstOrNull { - path.steps[0].isEquivalentTo(it.name) - }?.type ?: run { - input.map { it.type }.filterIsInstance().mapNotNull { struct -> - inferStructLookup(struct, path.steps[0]) - }.let { potentialTypes -> - when (potentialTypes.size) { - 1 -> potentialTypes.first() - else -> null - } - } - } ?: return null - return ResolvedType(root) - } - - /** - * Searches for the [key] in the [struct]. If not found, return null - */ - internal fun inferStructLookup( - struct: StructType, - key: BindingName, - ): StaticType? = when (struct.constraints.contains(TupleConstraint.Ordered)) { - true -> struct.fields.firstOrNull { entry -> - key.isEquivalentTo(entry.key) - }?.value - false -> struct.fields.mapNotNull { entry -> - entry.value.takeIf { key.isEquivalentTo(entry.key) } - }.let { valueTypes -> - StaticType.unionOf(valueTypes.toSet()).flatten().takeIf { valueTypes.isNotEmpty() } - } - } - - // - // - // HELPER METHODS - // - // - - private fun getDescriptor(ctx: PlanTyper.Context, catalog: BindingName?, originalPath: BindingPath, catalogPath: BindingPath): ResolvedType? { - return catalog?.let { cat -> - ctx.metadata.getObjectHandle(ctx.session, cat, catalogPath)?.let { handle -> - ctx.metadata.getObjectDescriptor(ctx.session, handle).let { - val matched = calculateMatched(originalPath, catalogPath, handle.connectorHandle.absolutePath) - ResolvedType(it, levelsMatched = matched) - } - } - } - } - - /** - * Logic for determining how many BindingNames were “matched” by the ConnectorMetadata - * 1. Matched = RelativePath - Not Found - * 2. Not Found = Input CatalogPath - Output CatalogPath - * 3. Matched = RelativePath - (Input CatalogPath - Output CatalogPath) - * 4. Matched = RelativePath + Output CatalogPath - Input CatalogPath - */ - private fun calculateMatched(originalPath: BindingPath, inputCatalogPath: BindingPath, outputCatalogPath: ConnectorObjectPath): Int { - return originalPath.steps.size + outputCatalogPath.steps.size - inputCatalogPath.steps.size - } -} diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/RelConverter.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/RelConverter.kt deleted file mode 100644 index 26ecb6c87..000000000 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/RelConverter.kt +++ /dev/null @@ -1,518 +0,0 @@ -package org.partiql.lang.planner.transforms.plan - -import com.amazon.ionelement.api.ionInt -import com.amazon.ionelement.api.ionString -import org.partiql.lang.domains.PartiqlAst -import org.partiql.lang.eval.visitors.VisitorTransformBase -import org.partiql.lang.planner.transforms.plan.RexConverter.convertCase -import org.partiql.plan.Binding -import org.partiql.plan.Case -import org.partiql.plan.ExcludeExpr -import org.partiql.plan.ExcludeStep -import org.partiql.plan.Rel -import org.partiql.plan.Rex -import org.partiql.plan.SortSpec -import org.partiql.plan.binding -import org.partiql.plan.common -import org.partiql.plan.excludeExpr -import org.partiql.plan.excludeStepCollectionIndex -import org.partiql.plan.excludeStepCollectionWildcard -import org.partiql.plan.excludeStepTupleAttr -import org.partiql.plan.excludeStepTupleWildcard -import org.partiql.plan.field -import org.partiql.plan.relAggregate -import org.partiql.plan.relExclude -import org.partiql.plan.relFetch -import org.partiql.plan.relFilter -import org.partiql.plan.relJoin -import org.partiql.plan.relProject -import org.partiql.plan.relScan -import org.partiql.plan.relSort -import org.partiql.plan.relUnpivot -import org.partiql.plan.rexAgg -import org.partiql.plan.rexId -import org.partiql.plan.rexLit -import org.partiql.plan.rexQueryCollection -import org.partiql.plan.rexQueryScalarPivot -import org.partiql.plan.rexTuple -import org.partiql.plan.sortSpec -import org.partiql.types.StaticType - -/** - * Lexically scoped state for use in translating an individual SELECT statement. - */ -internal class RelConverter { - - /** - * As of now, the COMMON property of relation operators is under development, so just use empty for now - */ - private val empty = common( - typeEnv = emptyList(), - properties = emptySet(), - metas = emptyMap() - ) - - companion object { - - /** - * Converts a SELECT-FROM-WHERE AST node to a [Rex.Query] - */ - @JvmStatic - fun convert(select: PartiqlAst.Expr.Select): Rex.Query = with(RelConverter()) { - val rel = convertSelect(select) - val rex = when (val projection = select.project) { - // PIVOT ... FROM - is PartiqlAst.Projection.ProjectPivot -> { - rexQueryScalarPivot( - rel = rel, - value = RexConverter.convert(projection.value), - at = RexConverter.convert(projection.key), - type = null - ) - } - // SELECT VALUE ... FROM - is PartiqlAst.Projection.ProjectValue -> { - rexQueryCollection( - rel = rel, - constructor = RexConverter.convert(projection.value), - type = null - ) - } - // SELECT ... FROM - else -> { - rexQueryCollection( - rel = rel, - constructor = null, - type = null - ) - } - } - rex - } - } - - // synthetic binding name counter - private var i = 0 - - // generate a synthetic binding name - private fun nextBindingName(): String = "\$__v${i++}" - - /** - * Translate SFW AST node to a pipeline of [Rel] operators; this skips the final projection. - * - * Note: - * - This does not append the final projection - * - The AST doesn't support set operators - * - The Parser doesn't have FETCH syntax - */ - private fun convertSelect(node: PartiqlAst.Expr.Select): Rel { - var sel = node - var rel = convertFrom(sel.from) - rel = convertWhere(rel, sel.where) - // kotlin does not have destructuring assignment - val (_sel, _rel) = convertAgg(rel, sel, sel.group) - sel = _sel - rel = _rel - // transform (possibly rewritten) sel node - rel = convertHaving(rel, sel.having) - rel = convertOrderBy(rel, sel.order) - rel = convertFetch(rel, sel.limit, sel.offset) - rel = convertExclude(rel, sel.excludeClause) - // append SQL projection if present - rel = when (val projection = sel.project) { - is PartiqlAst.Projection.ProjectList -> convertProjectList(rel, projection) - is PartiqlAst.Projection.ProjectStar -> error("AST not normalized, found project star") - else -> rel // skip - } - return rel - } - - private fun convertExclude(input: Rel, excludeOp: PartiqlAst.ExcludeOp?): Rel = when (excludeOp) { - null -> input - else -> { - val exprs = excludeOp.exprs.map { convertExcludeExpr(it) } - relExclude( - common = empty, - input = input, - exprs = exprs, - ) - } - } - - private fun convertExcludeExpr(excludeExpr: PartiqlAst.ExcludeExpr): ExcludeExpr { - val root = excludeExpr.root.name.text - val case = convertCase(excludeExpr.root.case) - val steps = excludeExpr.steps.map { convertExcludeSteps(it) } - return excludeExpr(root, case, steps) - } - - private fun convertExcludeSteps(excludeStep: PartiqlAst.ExcludeStep): ExcludeStep { - return when (excludeStep) { - is PartiqlAst.ExcludeStep.ExcludeCollectionWildcard -> excludeStepCollectionWildcard() - is PartiqlAst.ExcludeStep.ExcludeTupleWildcard -> excludeStepTupleWildcard() - is PartiqlAst.ExcludeStep.ExcludeTupleAttr -> excludeStepTupleAttr(excludeStep.attr.name.text, convertCase(excludeStep.attr.case)) - is PartiqlAst.ExcludeStep.ExcludeCollectionIndex -> excludeStepCollectionIndex(excludeStep.index.value.toInt()) - } - } - - /** - * Appends the appropriate [Rel] operator for the given FROM source - */ - private fun convertFrom(from: PartiqlAst.FromSource): Rel = when (from) { - is PartiqlAst.FromSource.Join -> convertJoin(from) - is PartiqlAst.FromSource.Scan -> convertScan(from) - is PartiqlAst.FromSource.Unpivot -> convertUnpivot(from) - } - - /** - * Appends [Rel.Join] where the left and right sides are converted FROM sources - */ - private fun convertJoin(join: PartiqlAst.FromSource.Join): Rel { - val lhs = convertFrom(join.left) - val rhs = convertFrom(join.right) - val condition = if (join.predicate != null) RexConverter.convert(join.predicate!!) else null - return relJoin( - common = empty, - lhs = lhs, - rhs = rhs, - condition = condition, - type = when (join.type) { - is PartiqlAst.JoinType.Full -> Rel.Join.Type.FULL - is PartiqlAst.JoinType.Inner -> Rel.Join.Type.INNER - is PartiqlAst.JoinType.Left -> Rel.Join.Type.LEFT - is PartiqlAst.JoinType.Right -> Rel.Join.Type.RIGHT - } - ) - } - - /** - * Appends [Rel.Scan] which takes no input relational expression - */ - private fun convertScan(scan: PartiqlAst.FromSource.Scan) = relScan( - common = empty, - value = when (val expr = scan.expr) { - is PartiqlAst.Expr.Select -> convert(expr) - else -> RexConverter.convert(scan.expr) - }, - alias = scan.asAlias?.text, - at = scan.atAlias?.text, - by = scan.byAlias?.text - ) - - /** - * Appends [Rel.Unpivot] to range over attribute value pairs - */ - private fun convertUnpivot(scan: PartiqlAst.FromSource.Unpivot) = relUnpivot( - common = empty, - value = RexConverter.convert(scan.expr), - alias = scan.asAlias?.text, - at = scan.atAlias?.text, - by = scan.byAlias?.text - ) - - /** - * Append [Rel.Filter] only if a WHERE condition exists - */ - private fun convertWhere(input: Rel, expr: PartiqlAst.Expr?): Rel = when (expr) { - null -> input - else -> relFilter( - common = empty, - input = input, - condition = RexConverter.convert(expr) - ) - } - - /** - * Append [Rel.Aggregate] only if SELECT contains aggregate expressions. - * - * @return Pair is returned where - * 1. Ast.Expr.Select has every Ast.Expr.CallAgg replaced by a synthetic Ast.Expr.Id - * 2. Rel which has the appropriate Rex.Agg calls and Rex groups - */ - private fun convertAgg( - input: Rel, - select: PartiqlAst.Expr.Select, - groupBy: PartiqlAst.GroupBy? - ): Pair { - // Rewrite and extract all aggregations in the SELECT clause - val (sel, aggregations) = AggregationTransform.apply(select) - - // No aggregation planning required for GROUP BY - if (aggregations.isEmpty()) { - if (groupBy != null) { - // As of now, GROUP BY with no aggregations is considered an error. - error("GROUP BY with no aggregations in SELECT clause") - } - return Pair(select, input) - } - - val calls = aggregations.toMutableList() - var groups = emptyList() - var strategy = Rel.Aggregate.Strategy.FULL - - if (groupBy != null) { - // GROUP AS is implemented as an aggregation function - if (groupBy.groupAsAlias != null) { - calls.add(convertGroupAs(groupBy.groupAsAlias!!.text, sel.from)) - } - groups = groupBy.keyList.keys.map { convertGroupByKey(it) } - strategy = when (groupBy.strategy) { - is PartiqlAst.GroupingStrategy.GroupFull -> Rel.Aggregate.Strategy.FULL - is PartiqlAst.GroupingStrategy.GroupPartial -> Rel.Aggregate.Strategy.PARTIAL - } - } - - val rel = relAggregate( - common = empty, - input = input, - calls = calls, - groups = groups, - strategy = strategy - ) - - return Pair(sel, rel) - } - - /** - * Each GROUP BY becomes a binding available in the output tuples of [Rel.Aggregate] - */ - private fun convertGroupByKey(groupKey: PartiqlAst.GroupKey) = binding( - name = groupKey.asAlias?.text ?: error("not normalized, group key $groupKey missing unique name"), - expr = groupKey.expr - ) - - /** - * Append [Rel.Filter] only if a HAVING condition exists - * - * Notes: - * - This currently does not support aggregation expressions in the WHERE condition - */ - private fun convertHaving(input: Rel, expr: PartiqlAst.Expr?): Rel = when (expr) { - null -> input - else -> relFilter( - common = empty, - input = input, - condition = RexConverter.convert(expr) - ) - } - - /** - * Append [Rel.Sort] only if an ORDER BY clause is present - */ - private fun convertOrderBy(input: Rel, orderBy: PartiqlAst.OrderBy?) = when (orderBy) { - null -> input - else -> relSort( - common = empty, - input = input, - specs = orderBy.sortSpecs.map { convertSortSpec(it) } - ) - } - - /** - * Append [Rel.Fetch] if there is a LIMIT or LIMIT and OFFSET. - * - * Notes: - * - It's unclear if OFFSET without LIMIT should be allowed in PartiQL, so err for now. - */ - private fun convertFetch( - input: Rel, - limit: PartiqlAst.Expr?, - offset: PartiqlAst.Expr? - ): Rel { - if (limit == null) { - if (offset != null) error("offset without limit") - return input - } - return relFetch( - common = empty, - input = input, - limit = RexConverter.convert(limit), - offset = RexConverter.convert(offset ?: PartiqlAst.Expr.Lit(ionInt(0).asAnyElement())) - ) - } - - /** - * Appends a [Rel.Project] which projects the result of each binding rex into its binding name. - * - * @param input - * @param projection - * @return - */ - private fun convertProjectList(input: Rel, projection: PartiqlAst.Projection.ProjectList) = relProject( - common = empty, - input = input, - bindings = projection.projectItems.bindings() - ) - - /** - * Converts Ast.SortSpec to SortSpec. - * - * Notes: - * - ASC NULLS LAST (default) - * - DESC NULLS FIRST (default for DESC) - */ - private fun convertSortSpec(sortSpec: PartiqlAst.SortSpec) = sortSpec( - value = RexConverter.convert(sortSpec.expr), - dir = when (sortSpec.orderingSpec) { - is PartiqlAst.OrderingSpec.Desc -> SortSpec.Dir.DESC - is PartiqlAst.OrderingSpec.Asc -> SortSpec.Dir.ASC - null -> SortSpec.Dir.ASC - }, - nulls = when (sortSpec.nullsSpec) { - is PartiqlAst.NullsSpec.NullsFirst -> SortSpec.Nulls.FIRST - is PartiqlAst.NullsSpec.NullsLast -> SortSpec.Nulls.LAST - null -> SortSpec.Nulls.LAST - } - ) - - /** - * Converts a GROUP AS X clause to a binding of the form: - * ``` - * { 'X': group_as({ 'a_0': e_0, ..., 'a_n': e_n }) } - * ``` - * - * Notes: - * - This was included to be consistent with the existing PartiqlAst and PartiqlLogical representations, - * but perhaps we don't want to represent GROUP AS with an agg function. - */ - private fun convertGroupAs(name: String, from: PartiqlAst.FromSource): Binding { - val fields = from.bindings().map { n -> - field( - name = rexLit(ionString(n), StaticType.STRING), - value = rexId(n, Case.SENSITIVE, Rex.Id.Qualifier.UNQUALIFIED, type = StaticType.STRUCT) - ) - } - return binding( - name = name, - value = rexAgg( - id = "group_as", - args = listOf(rexTuple(fields, StaticType.STRUCT)), - modifier = Rex.Agg.Modifier.ALL, - type = StaticType.STRUCT - ) - ) - } - - /** - * Helper to get all binding names in the FROM clause - */ - private fun PartiqlAst.FromSource.bindings(): List = when (this) { - is PartiqlAst.FromSource.Scan -> { - if (asAlias == null) { - error("not normalized, scan is missing an alias") - } - listOf(asAlias!!.text) - } - is PartiqlAst.FromSource.Join -> left.bindings() + right.bindings() - is PartiqlAst.FromSource.Unpivot -> { - if (asAlias == null) { - error("not normalized, scan is missing an alias") - } - listOf(asAlias!!.text) - } - } - - /** - * Helper to convert ProjectItems to bindings - * - * As of now, bindings is just a list, not a tuple. - * Binding and Tuple/Struct will be consolidated. - */ - private fun List.bindings() = map { - when (it) { - is PartiqlAst.ProjectItem.ProjectAll -> { - val path = PartiqlAst.Expr.Path(it.expr, listOf(PartiqlAst.PathStep.PathWildcard())) - val bindingName = when (val expr = it.expr) { - is PartiqlAst.Expr.Id -> expr.name.text - is PartiqlAst.Expr.Lit -> { - when (expr.value.type.isText) { - true -> expr.value.stringValue - false -> nextBindingName() - } - } - else -> nextBindingName() - } - binding(bindingName, path) - } - is PartiqlAst.ProjectItem.ProjectExpr -> binding( - name = it.asAlias?.text ?: error("not normalized"), - expr = it.expr - ) - } - } - - /** - * Rewrites a SFW node replacing all aggregations with a synthetic field name - * - * See AstToLogicalVisitorTransform.kt CallAggregationReplacer from org.partiql.lang.planner.transforms. - * - * ``` - * SELECT g, h, SUM(t.b) AS sumB - * FROM t - * GROUP BY t.a AS g GROUP AS h - * ``` - * - * into: - * - * ``` - * SELECT g, h, $__v0 AS sumB - * FROM t - * GROUP BY t.a AS g GROUP AS h - * ``` - * - * Where $__v0 is the binding name of SUM(t.b) in the aggregation output - * - * Inner object class to have access to current SELECT-FROM-WHERE converter state - */ - @Suppress("PrivatePropertyName") - private val AggregationTransform = object : VisitorTransformBase() { - - private var level = 0 - private var aggregations = mutableListOf() - - fun apply(node: PartiqlAst.Expr.Select): Pair> { - level = 0 - aggregations = mutableListOf() - val select = transformExprSelect(node) as PartiqlAst.Expr.Select - return Pair(select, aggregations) - } - - override fun transformProjectItemProjectExpr_expr(node: PartiqlAst.ProjectItem.ProjectExpr) = - transformExpr(node.expr) - - override fun transformProjectionProjectValue_value(node: PartiqlAst.Projection.ProjectValue) = - transformExpr(node.value) - - override fun transformExprSelect_having(node: PartiqlAst.Expr.Select): PartiqlAst.Expr? = - when (val having = node.having) { - null -> null - else -> transformExpr(having) - } - - override fun transformSortSpec_expr(node: PartiqlAst.SortSpec) = transformExpr(node.expr) - - override fun transformExprSelect(node: PartiqlAst.Expr.Select) = - if (level++ == 0) super.transformExprSelect(node) else node - - override fun transformExprCallAgg(node: PartiqlAst.Expr.CallAgg): PartiqlAst.Expr { - val name = nextBindingName() - aggregations.add(binding(name, node)) - return PartiqlAst.build { - id( - name = name, - case = caseInsensitive(), - qualifier = unqualified(), - metas = node.metas - ) - } - } - } - - /** - * Binding helper - */ - private fun binding(name: String, expr: PartiqlAst.Expr) = binding( - name = name, - value = RexConverter.convert(expr) - ) -} diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/RexConverter.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/RexConverter.kt deleted file mode 100644 index fa177fe2e..000000000 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/RexConverter.kt +++ /dev/null @@ -1,692 +0,0 @@ -package org.partiql.lang.planner.transforms.plan - -import com.amazon.ionelement.api.MetaContainer -import com.amazon.ionelement.api.ionNull -import org.partiql.errors.ErrorCode -import org.partiql.lang.domains.PartiqlAst -import org.partiql.lang.eval.EvaluationSession -import org.partiql.lang.eval.builtins.ExprFunctionCurrentUser -import org.partiql.lang.eval.err -import org.partiql.lang.eval.errorContextFrom -import org.partiql.lang.planner.transforms.AstToPlan -import org.partiql.lang.planner.transforms.plan.PlanTyper.isProjectAll -import org.partiql.plan.Case -import org.partiql.plan.Rel -import org.partiql.plan.Rex -import org.partiql.plan.argType -import org.partiql.plan.argValue -import org.partiql.plan.branch -import org.partiql.plan.field -import org.partiql.plan.rexAgg -import org.partiql.plan.rexBinary -import org.partiql.plan.rexCall -import org.partiql.plan.rexCollectionArray -import org.partiql.plan.rexCollectionBag -import org.partiql.plan.rexId -import org.partiql.plan.rexLit -import org.partiql.plan.rexPath -import org.partiql.plan.rexQueryScalarSubquery -import org.partiql.plan.rexSwitch -import org.partiql.plan.rexTuple -import org.partiql.plan.rexUnary -import org.partiql.plan.stepKey -import org.partiql.plan.stepUnpivot -import org.partiql.plan.stepWildcard -import org.partiql.types.StaticType -import java.util.Locale - -/** - * Some workarounds for transforming a PIG tree without having to create another visitor: - * - Using the VisitorFold with ctx struct to create a parameterized return and scoped arguments/context - * - Using walks to control traversal, also walks have generated if/else blocks for sum types so its more useful - */ -@Suppress("PARAMETER_NAME_CHANGED_ON_OVERRIDE") -internal object RexConverter : PartiqlAst.VisitorFold() { - - /** - * Workaround for PIG visitor where: - * - Args != null when Ctx is the accumulator IN - * - Rex != null when Ctx is the accumulator OUT - * - * Destructuring ordering chosen for val (in, out) = ... - * - * @property node Node to invoke the behavior on - * @property rex Return value - */ - data class Ctx( - val node: PartiqlAst.PartiqlAstNode, - var rex: Rex? = null, - ) - - /** - * Read as `val rex = node.accept(visitor = RexVisitor.INSTANCE, args = emptyList())` - * Only works because RexConverter errs for all non Expr AST nodes, and Expr is one sum type. - */ - internal fun convert(node: PartiqlAst.Expr) = RexConverter.walkExpr(node, Ctx(node)).rex!! - - /** - * List version of `accept` - */ - private fun convert(nodes: List) = nodes.map { convert(it) } - - /** - * Vararg version of `accept` - */ - private fun convert(vararg nodes: PartiqlAst.Expr) = nodes.map { convert(it) } - - private fun arg(name: String, node: PartiqlAst.PartiqlAstNode) = when (node) { - is PartiqlAst.Expr -> argValue( - name = name, - value = convert(node), - ) - is PartiqlAst.Type -> argType( - name = name, - type = TypeConverter.convert(node) - ) - else -> error("Argument must be of type PartiqlAst.Expr or PartiqlAst.Type, found ${node::class.qualifiedName}") - } - - /** - * Convert a list of arguments to arg0, ...., argN - */ - private fun args(nodes: List) = args(*nodes.toTypedArray()) - - /** - * Convert arguments to arg0, ...., argN - */ - private fun args(vararg nodes: PartiqlAst.PartiqlAstNode?) = - nodes.filterNotNull().mapIndexed { i, arg -> arg("arg$i", arg) } - - /** - * Convert keyword pairs of arguments - */ - private fun args(vararg args: Pair) = args.map { arg(it.first, it.second) } - - /** - * Helper so the visitor "body" looks like it has Rex as the return value - */ - private inline fun visit(node: PartiqlAst.PartiqlAstNode, block: () -> Rex) = Ctx(node, block()) - - /** - * !! DEFAULT VISIT !! - * - * The PIG visitor doesn't give us control over the default "visit" - * We can override walkMetas (which appears on every super.walk call) as if it were a default "visit" - * MetaContainer isn't actually a domain node, and we don't have any context as to where the MetaContainer - * is coming from which is why the current node is stuffed into Ctx - */ - override fun walkMetas(node: MetaContainer, ctx: Ctx) = AstToPlan.unsupported(ctx.node) - - override fun walkExprMissing(node: PartiqlAst.Expr.Missing, ctx: Ctx) = visit(node) { - rexLit(ionNull(), StaticType.MISSING) - } - - override fun walkExprLit(node: PartiqlAst.Expr.Lit, ctx: Ctx) = visit(node) { - val ionType = node.value.type.toIonType() - rexLit( - value = node.value, - type = TypeConverter.convert(ionType) - ) - } - - override fun walkExprSessionAttribute(node: PartiqlAst.Expr.SessionAttribute, accumulator: Ctx) = visit(node) { - val functionName = when (node.value.text.uppercase(Locale.getDefault())) { - EvaluationSession.Constants.CURRENT_USER_KEY -> ExprFunctionCurrentUser.FUNCTION_NAME - else -> err( - "Unsupported session attribute: ${node.value.text}", - errorCode = ErrorCode.SEMANTIC_PROBLEM, - errorContext = errorContextFrom(node.metas), - internal = false - ) - } - rexCall( - id = functionName, - args = emptyList(), - type = null - ) - } - - override fun walkExprId(node: PartiqlAst.Expr.Id, ctx: Ctx) = visit(node) { - rexId( - name = node.name.text, - case = convertCase(node.case), - qualifier = when (node.qualifier) { - is PartiqlAst.ScopeQualifier.LocalsFirst -> Rex.Id.Qualifier.LOCALS_FIRST - is PartiqlAst.ScopeQualifier.Unqualified -> Rex.Id.Qualifier.UNQUALIFIED - }, - type = null, - ) - } - - override fun walkExprPath(node: PartiqlAst.Expr.Path, ctx: Ctx) = visit(node) { - rexPath( - root = convert(node.root), - steps = node.steps.map { - when (it) { - is PartiqlAst.PathStep.PathExpr -> stepKey( - value = convert(it.index), - case = convertCase(it.case) - ) - is PartiqlAst.PathStep.PathUnpivot -> stepUnpivot() - is PartiqlAst.PathStep.PathWildcard -> stepWildcard() - } - }, - type = null, - ) - } - - override fun walkExprNot(node: PartiqlAst.Expr.Not, ctx: Ctx) = visit(node) { - rexUnary( - value = convert(node.expr), - op = Rex.Unary.Op.NOT, - type = StaticType.BOOL, - ) - } - - override fun walkExprPos(node: PartiqlAst.Expr.Pos, ctx: Ctx) = visit(node) { - rexUnary( - value = convert(node.expr), - op = Rex.Unary.Op.POS, - type = StaticType.NUMERIC, - ) - } - - override fun walkExprNeg(node: PartiqlAst.Expr.Neg, ctx: Ctx) = visit(node) { - rexUnary( - value = convert(node.expr), - op = Rex.Unary.Op.NEG, - type = StaticType.NUMERIC, - ) - } - - override fun walkExprPlus(node: PartiqlAst.Expr.Plus, ctx: Ctx) = visit(node) { - rexBinary( - lhs = convert(node.operands[0]), - rhs = convert(node.operands[1]), - op = Rex.Binary.Op.PLUS, - type = StaticType.NUMERIC, - ) - } - - override fun walkExprMinus(node: PartiqlAst.Expr.Minus, ctx: Ctx) = visit(node) { - rexBinary( - lhs = convert(node.operands[0]), - rhs = convert(node.operands[1]), - op = Rex.Binary.Op.MINUS, - type = StaticType.NUMERIC, - ) - } - - override fun walkExprTimes(node: PartiqlAst.Expr.Times, ctx: Ctx) = visit(node) { - rexBinary( - lhs = convert(node.operands[0]), - rhs = convert(node.operands[1]), - op = Rex.Binary.Op.TIMES, - type = StaticType.NUMERIC, - ) - } - - override fun walkExprDivide(node: PartiqlAst.Expr.Divide, ctx: Ctx) = visit(node) { - rexBinary( - lhs = convert(node.operands[0]), - rhs = convert(node.operands[1]), - op = Rex.Binary.Op.DIV, - type = StaticType.NUMERIC, - ) - } - - override fun walkExprModulo(node: PartiqlAst.Expr.Modulo, ctx: Ctx) = visit(node) { - rexBinary( - lhs = convert(node.operands[0]), - rhs = convert(node.operands[1]), - op = Rex.Binary.Op.MODULO, - type = StaticType.NUMERIC, - ) - } - - override fun walkExprBitwiseAnd(node: PartiqlAst.Expr.BitwiseAnd, accumulator: Ctx) = visit(node) { - rexBinary( - lhs = convert(node.operands[0]), - rhs = convert(node.operands[1]), - op = Rex.Binary.Op.BITWISE_AND, - type = StaticType.unionOf( - StaticType.INT2, StaticType.INT4, StaticType.INT8, StaticType.INT - ) - ) - } - - override fun walkExprConcat(node: PartiqlAst.Expr.Concat, ctx: Ctx) = visit(node) { - rexBinary( - lhs = convert(node.operands[0]), - rhs = convert(node.operands[1]), - op = Rex.Binary.Op.CONCAT, - type = StaticType.TEXT, - ) - } - - override fun walkExprAnd(node: PartiqlAst.Expr.And, ctx: Ctx) = visit(node) { - rexBinary( - lhs = convert(node.operands[0]), - rhs = convert(node.operands[1]), - op = Rex.Binary.Op.AND, - type = StaticType.BOOL, - ) - } - - override fun walkExprOr(node: PartiqlAst.Expr.Or, ctx: Ctx) = visit(node) { - rexBinary( - lhs = convert(node.operands[0]), - rhs = convert(node.operands[1]), - op = Rex.Binary.Op.OR, - type = StaticType.BOOL, - ) - } - - override fun walkExprEq(node: PartiqlAst.Expr.Eq, ctx: Ctx) = visit(node) { - val (lhs, rhs) = walkComparisonOperands(node.operands) - rexBinary( - lhs = lhs, - rhs = rhs, - op = Rex.Binary.Op.EQ, - type = StaticType.BOOL, - ) - } - - override fun walkExprNe(node: PartiqlAst.Expr.Ne, ctx: Ctx) = visit(node) { - val (lhs, rhs) = walkComparisonOperands(node.operands) - rexBinary( - lhs = lhs, - rhs = rhs, - op = Rex.Binary.Op.NEQ, - type = StaticType.BOOL, - ) - } - - override fun walkExprGt(node: PartiqlAst.Expr.Gt, ctx: Ctx) = visit(node) { - val (lhs, rhs) = walkComparisonOperands(node.operands) - rexBinary( - lhs = lhs, - rhs = rhs, - op = Rex.Binary.Op.GT, - type = StaticType.BOOL, - ) - } - - override fun walkExprGte(node: PartiqlAst.Expr.Gte, ctx: Ctx) = visit(node) { - val (lhs, rhs) = walkComparisonOperands(node.operands) - rexBinary( - lhs = lhs, - rhs = rhs, - op = Rex.Binary.Op.GTE, - type = StaticType.BOOL, - ) - } - - override fun walkExprLt(node: PartiqlAst.Expr.Lt, ctx: Ctx) = visit(node) { - val (lhs, rhs) = walkComparisonOperands(node.operands) - rexBinary( - lhs = lhs, - rhs = rhs, - op = Rex.Binary.Op.LT, - type = StaticType.BOOL, - ) - } - - override fun walkExprLte(node: PartiqlAst.Expr.Lte, ctx: Ctx) = visit(node) { - val (lhs, rhs) = walkComparisonOperands(node.operands) - rexBinary( - lhs = lhs, - rhs = rhs, - op = Rex.Binary.Op.LTE, - type = StaticType.BOOL, - ) - } - - /** - * Converts Comparison Operands. Also coerces them if one of them is an array. - */ - private fun walkComparisonOperands(operands: List): Pair { - var lhs = convert(operands[0]) - var rhs = convert(operands[1]) - if (lhs is Rex.Collection.Array) { rhs = coercePotentialSubquery(rhs) } - if (rhs is Rex.Collection.Array) { lhs = coercePotentialSubquery(lhs) } - return lhs to rhs - } - - /** - * We convert the scalar subquery of a SFW into a scalar subquery of a SELECT VALUE. - */ - private fun coercePotentialSubquery(rex: Rex): Rex { - var rhs = rex - if (rhs is Rex.Query.Scalar.Subquery) { - val sfw = rhs.query as? Rex.Query.Collection ?: error("Malformed plan, all scalar subqueries should hold collection queries") - val constructor = sfw.constructor ?: run { - val relProject = sfw.rel as? Rel.Project ?: error("Malformed plan, the top of a plan should be a projection") - getConstructorFromRelProject(relProject) - } - rhs = rhs.copy( - query = rhs.query.copy( - constructor = constructor - ) - ) - } - return rhs - } - - private fun getConstructorFromRelProject(relProject: Rel.Project): Rex { - return when (relProject.bindings.size) { - 0 -> error("The Projection should not have held empty bindings.") - 1 -> { - val binding = relProject.bindings.first() - if (binding.value.isProjectAll()) { - error("Unimplemented feature: coercion of SELECT *.") - } - relProject.bindings.first().value - } - else -> { - if (relProject.bindings.any { it.value.isProjectAll() }) { - error("Unimplemented feature: coercion of SELECT *.") - } - rexCollectionArray( - relProject.bindings.map { it.value }, - type = StaticType.LIST - ) - } - } - } - - override fun walkExprLike(node: PartiqlAst.Expr.Like, ctx: Ctx) = visit(node) { - when (val escape = node.escape) { - null -> rexCall( - id = Constants.like, - args = args( - "value" to node.value, - "pattern" to node.pattern, - ), - type = StaticType.BOOL, - ) - else -> rexCall( - id = Constants.likeEscape, - args = args( - "value" to node.value, - "pattern" to node.pattern, - "escape" to escape, - ), - type = StaticType.BOOL, - ) - } - } - - override fun walkExprBetween(node: PartiqlAst.Expr.Between, ctx: Ctx) = visit(node) { - rexCall( - id = Constants.between, - args = args("value" to node.value, "from" to node.from, "to" to node.to), - type = StaticType.BOOL, - ) - } - - /** - * Here, we must visit the RHS. If it is a scalar subquery, we need to grab the underlying collection. - */ - override fun walkExprInCollection(node: PartiqlAst.Expr.InCollection, ctx: Ctx) = visit(node) { - val lhs = convert(node.operands[0]) - val potentialSubqueryRex = convert(node.operands[1]) - val potentialSubquery = coercePotentialSubquery(potentialSubqueryRex) - val rhs = (potentialSubquery as? Rex.Query.Scalar.Subquery)?.query ?: potentialSubquery - rexCall( - id = Constants.inCollection, - args = listOf( - argValue("lhs", lhs), - argValue("rhs", rhs), - ), - type = StaticType.BOOL, - ) - } - - override fun walkExprStruct(node: PartiqlAst.Expr.Struct, ctx: Ctx) = visit(node) { - rexTuple( - fields = node.fields.map { - field( - name = convert(it.first), - value = convert(it.second) - ) - }, - type = StaticType.STRUCT, - ) - } - - override fun walkExprBag(node: PartiqlAst.Expr.Bag, ctx: Ctx) = visit(node) { - rexCollectionBag( - values = convert(node.values), - type = StaticType.BAG, - ) - } - - override fun walkExprList(node: PartiqlAst.Expr.List, ctx: Ctx) = visit(node) { - rexCollectionArray( - values = convert(node.values), - type = StaticType.LIST, - ) - } - - override fun walkExprSexp(node: PartiqlAst.Expr.Sexp, accumulator: Ctx) = visit(node) { - rexCollectionArray( - values = convert(node.values), - type = StaticType.LIST, - ) - } - - override fun walkExprCall(node: PartiqlAst.Expr.Call, ctx: Ctx) = visit(node) { - rexCall( - id = node.funcName.text, - args = args(*node.args.toTypedArray()), - type = null, - ) - } - - override fun walkExprCallAgg(node: PartiqlAst.Expr.CallAgg, ctx: Ctx) = visit(node) { - rexAgg( - id = node.funcName.text, - args = listOf(convert(node.arg)), - modifier = when (node.setq) { - is PartiqlAst.SetQuantifier.All -> Rex.Agg.Modifier.ALL - is PartiqlAst.SetQuantifier.Distinct -> Rex.Agg.Modifier.DISTINCT - }, - type = StaticType.NUMERIC, - ) - } - - override fun walkExprIsType(node: PartiqlAst.Expr.IsType, ctx: Ctx) = visit(node) { - rexCall( - id = Constants.isType, - args = args("value" to node.value, "type" to node.type), - type = StaticType.BOOL, - ) - } - - override fun walkExprSimpleCase(node: PartiqlAst.Expr.SimpleCase, ctx: Ctx) = visit(node) { - rexSwitch( - match = convert(node.expr), - branches = node.cases.pairs.map { - branch( - condition = convert(it.first), - value = convert(it.second), - ) - }, - default = if (node.default != null) convert(node.default!!) else null, - type = null - ) - } - - override fun walkExprSearchedCase(node: PartiqlAst.Expr.SearchedCase, ctx: Ctx) = visit(node) { - rexSwitch( - match = null, - branches = node.cases.pairs.map { - branch( - condition = convert(it.first), - value = convert(it.second), - ) - }, - default = if (node.default != null) convert(node.default!!) else null, - type = null - ) - } - - override fun walkExprDate(node: PartiqlAst.Expr.Date, ctx: Ctx): Ctx { - error("Date class undetermined at the moment") - } - - override fun walkExprLitTime(node: PartiqlAst.Expr.LitTime, ctx: Ctx): Ctx { - error("Time class undetermined at the moment") - } - - override fun walkExprBagOp(node: PartiqlAst.Expr.BagOp, ctx: Ctx) = visit(node) { - // Hack for UNION / INTERSECT / EXCEPT because they are missing from the parser - val op = when (node.quantifier) { - is PartiqlAst.SetQuantifier.All -> when (node.op) { - is PartiqlAst.BagOpType.Union, - is PartiqlAst.BagOpType.OuterUnion -> Constants.outerBagUnion - is PartiqlAst.BagOpType.Intersect, - is PartiqlAst.BagOpType.OuterIntersect -> Constants.outerBagIntersect - is PartiqlAst.BagOpType.Except, - is PartiqlAst.BagOpType.OuterExcept -> Constants.outerBagExcept - } - is PartiqlAst.SetQuantifier.Distinct -> when (node.op) { - is PartiqlAst.BagOpType.Union, - is PartiqlAst.BagOpType.OuterUnion -> Constants.outerSetUnion - is PartiqlAst.BagOpType.Intersect, - is PartiqlAst.BagOpType.OuterIntersect -> Constants.outerSetIntersect - is PartiqlAst.BagOpType.Except, - is PartiqlAst.BagOpType.OuterExcept -> Constants.outerSetExcept - } - } - rexCall( - id = op, - args = args("lhs" to node.operands[0], "rhs" to node.operands[1]), - type = StaticType.BAG, - ) - } - - override fun walkExprCast(node: PartiqlAst.Expr.Cast, ctx: Ctx) = visit(node) { - rexCall( - id = Constants.cast, - args = args("value" to node.value, "type" to node.asType), - type = TypeConverter.convert(node.asType), - ) - } - - override fun walkExprCanCast(node: PartiqlAst.Expr.CanCast, ctx: Ctx) = visit(node) { - rexCall( - id = Constants.canCast, - args = args("value" to node.value, "type" to node.asType), - type = StaticType.BOOL, - ) - } - - override fun walkExprCanLosslessCast(node: PartiqlAst.Expr.CanLosslessCast, ctx: Ctx) = visit(node) { - rexCall( - id = Constants.canLosslessCast, - args = args("value" to node.value, "type" to node.asType), - type = StaticType.BOOL, - ) - } - - override fun walkExprNullIf(node: PartiqlAst.Expr.NullIf, ctx: Ctx) = visit(node) { - rexCall( - id = Constants.nullIf, - args = args(node.expr1, node.expr2), - type = StaticType.BOOL, - ) - } - - override fun walkExprCoalesce(node: PartiqlAst.Expr.Coalesce, ctx: Ctx) = visit(node) { - rexCall( - id = Constants.coalesce, - args = args(node.args), - type = null, - ) - } - - override fun walkExprSelect(node: PartiqlAst.Expr.Select, ctx: Ctx) = visit(node) { - when (val query = RelConverter.convert(node)) { - is Rex.Query.Collection -> rexQueryScalarSubquery(query, null) - is Rex.Query.Scalar -> query - } - } - - internal fun convertCase(case: PartiqlAst.CaseSensitivity) = when (case) { - is PartiqlAst.CaseSensitivity.CaseInsensitive -> Case.INSENSITIVE - is PartiqlAst.CaseSensitivity.CaseSensitive -> Case.SENSITIVE - } - - internal object Constants { - - // const val unaryNot = "unary_not" - // - // const val unaryPlus = "unary_plus" - // - // const val unaryMinus = "unary_minus" - // - // const val unaryNegate = "unary_negate" - // - // const val binaryAdd = "binary_add" - // - // const val binarySub = "binary_sb" - // - // const val binaryMult = "binary_mult" - // - // const val binaryDiv = "binary_div" - // - // const val binaryMod = "binary_mod" - // - // const val binaryConcat = "binary_concat" - // - // const val binaryAnd = "binary_and" - // - // const val binaryOr = "binary_or" - // - // const val binaryEq = "binary_eq" - // - // const val binaryNeq = "binary_neq" - // - // const val binaryGt = "binary_gt" - // - // const val binaryGte = "binary_gte" - // - // const val binaryLt = "binary_lt" - // - // const val binaryLte = "binary_lte" - - const val like = "like" - - const val likeEscape = "like_escape" - - const val between = "between" - - const val inCollection = "in_collection" - - const val isType = "is_type" - - const val outerBagUnion = "outer_bag_union" - - const val outerBagIntersect = "outer_bag_intersect" - - const val outerBagExcept = "outer_bag_except" - - const val outerSetUnion = "outer_set_union" - - const val outerSetIntersect = "outer_set_intersect" - - const val outerSetExcept = "outer_set_except" - - const val cast = "cast" - - const val canCast = "can_cast" - - const val canLosslessCast = "can_lossless_cast" - - const val nullIf = "null_if" - - const val coalesce = "coalesce" - } -} diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/TypeConverter.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/TypeConverter.kt deleted file mode 100644 index fa5c52e1a..000000000 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/planner/transforms/plan/TypeConverter.kt +++ /dev/null @@ -1,135 +0,0 @@ -package org.partiql.lang.planner.transforms.plan - -import com.amazon.ion.IonType -import org.partiql.lang.domains.PartiqlAst -import org.partiql.types.DecimalType -import org.partiql.types.NumberConstraint -import org.partiql.types.StaticType -import org.partiql.types.StringType -import org.partiql.types.TimeType - -/** - * Converts a [PartiqlAst.Type] to [StaticType] - * - * Similar PIG workarounds to RexConverter; we could do a massive if-else like PartiqlPhysicalTypeExtensions. - * but VisitorFold already has the big if-else. - * - * StaticType could use some helper functions. - * The conversions (afaik) - * - PartiqlAst.Type to StaticType - * - IonType to StaticType - * both require converting to ExprValueType then StaticType, or converting to Ion Sexp then parsing as PartiQL physical - */ -@Suppress("PARAMETER_NAME_CHANGED_ON_OVERRIDE") -internal object TypeConverter : PartiqlAst.VisitorFold() { - - // Something to appease the input accumulator, we only want the return value - @Suppress("ObjectPropertyName") - private val _ignore = StaticType.NULL - - fun convert(type: PartiqlAst.Type): StaticType = TypeConverter.walkType(type, _ignore) - - fun convert(type: IonType): StaticType = when (type) { - IonType.NULL -> StaticType.NULL - IonType.BOOL -> StaticType.BOOL - IonType.INT -> StaticType.INT - IonType.FLOAT -> StaticType.FLOAT - IonType.DECIMAL -> StaticType.DECIMAL - IonType.TIMESTAMP -> StaticType.TIMESTAMP - IonType.SYMBOL -> StaticType.SYMBOL - IonType.STRING -> StaticType.STRING - IonType.CLOB -> StaticType.CLOB - IonType.BLOB -> StaticType.BLOB - IonType.LIST -> StaticType.LIST - IonType.SEXP -> StaticType.SEXP - IonType.STRUCT -> StaticType.STRUCT - // datagram - else -> error("unexpected Ion type $type") - } - - override fun walkTypeNullType(node: PartiqlAst.Type.NullType, _ignore: StaticType) = StaticType.NULL - - override fun walkTypeBooleanType(node: PartiqlAst.Type.BooleanType, _ignore: StaticType) = StaticType.BOOL - - override fun walkTypeSmallintType(node: PartiqlAst.Type.SmallintType, _ignore: StaticType) = StaticType.INT2 - - override fun walkTypeInteger4Type(node: PartiqlAst.Type.Integer4Type, _ignore: StaticType) = StaticType.INT4 - - override fun walkTypeInteger8Type(node: PartiqlAst.Type.Integer8Type, _ignore: StaticType) = StaticType.INT8 - - override fun walkTypeIntegerType(node: PartiqlAst.Type.IntegerType, _ignore: StaticType) = StaticType.INT - - override fun walkTypeFloatType(node: PartiqlAst.Type.FloatType, _ignore: StaticType) = StaticType.FLOAT - - override fun walkTypeRealType(node: PartiqlAst.Type.RealType, _ignore: StaticType) = StaticType.FLOAT - - override fun walkTypeDoublePrecisionType( - node: PartiqlAst.Type.DoublePrecisionType, - _ignore: StaticType - ) = StaticType.FLOAT - - override fun walkTypeDecimalType(node: PartiqlAst.Type.DecimalType, _ignore: StaticType) = when { - node.precision == null && node.scale == null -> StaticType.DECIMAL - else -> DecimalType( - DecimalType.PrecisionScaleConstraint.Constrained( - precision = node.precision!!.value.toInt(), - scale = node.scale?.value?.toInt() ?: 0 - ) - ) - } - - override fun walkTypeNumericType(node: PartiqlAst.Type.NumericType, _ignore: StaticType) = when { - node.precision == null && node.scale == null -> StaticType.DECIMAL - else -> DecimalType( - DecimalType.PrecisionScaleConstraint.Constrained( - precision = node.precision!!.value.toInt(), - scale = node.scale?.value?.toInt() ?: 0 - ) - ) - } - - override fun walkTypeTimestampType(node: PartiqlAst.Type.TimestampType, _ignore: StaticType) = StaticType.TIMESTAMP - - override fun walkTypeCharacterType(node: PartiqlAst.Type.CharacterType, _ignore: StaticType) = StringType( - StringType.StringLengthConstraint.Constrained( - NumberConstraint.Equals(node.length?.value?.toInt() ?: 1) - ) - ) - - override fun walkTypeCharacterVaryingType(node: PartiqlAst.Type.CharacterVaryingType, _ignore: StaticType) = when (val length = node.length) { - null -> StringType(StringType.StringLengthConstraint.Unconstrained) - else -> StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(length.value.toInt()))) - } - - override fun walkTypeMissingType(node: PartiqlAst.Type.MissingType, _ignore: StaticType) = StaticType.MISSING - - override fun walkTypeStringType(node: PartiqlAst.Type.StringType, _ignore: StaticType) = StaticType.STRING - - override fun walkTypeSymbolType(node: PartiqlAst.Type.SymbolType, _ignore: StaticType) = StaticType.SYMBOL - - override fun walkTypeBlobType(node: PartiqlAst.Type.BlobType, _ignore: StaticType) = StaticType.BLOB - - override fun walkTypeClobType(node: PartiqlAst.Type.ClobType, _ignore: StaticType) = StaticType.CLOB - - override fun walkTypeDateType(node: PartiqlAst.Type.DateType, _ignore: StaticType) = StaticType.DATE - - override fun walkTypeTimeType(node: PartiqlAst.Type.TimeType, _ignore: StaticType) = TimeType(node.precision?.value?.toInt(), withTimeZone = false) - - override fun walkTypeTimeWithTimeZoneType(node: PartiqlAst.Type.TimeWithTimeZoneType, _ignore: StaticType) = TimeType(node.precision?.value?.toInt(), withTimeZone = false) - - override fun walkTypeStructType(node: PartiqlAst.Type.StructType, _ignore: StaticType) = StaticType.STRUCT - - override fun walkTypeTupleType(node: PartiqlAst.Type.TupleType, _ignore: StaticType) = StaticType.STRUCT - - override fun walkTypeListType(node: PartiqlAst.Type.ListType, _ignore: StaticType) = StaticType.LIST - - override fun walkTypeSexpType(node: PartiqlAst.Type.SexpType, _ignore: StaticType) = StaticType.SEXP - - override fun walkTypeBagType(node: PartiqlAst.Type.BagType, _ignore: StaticType) = StaticType.BAG - - override fun walkTypeAnyType(node: PartiqlAst.Type.AnyType, _ignore: StaticType) = StaticType.ANY - - override fun walkTypeCustomType(node: PartiqlAst.Type.CustomType, _ignore: StaticType): StaticType { - error("custom type not supported in current representation") - } -} diff --git a/partiql-lang/src/test/kotlin/org/partiql/lang/planner/SchemaLoader.kt b/partiql-lang/src/test/kotlin/org/partiql/lang/planner/SchemaLoader.kt new file mode 100644 index 000000000..f6a291841 --- /dev/null +++ b/partiql-lang/src/test/kotlin/org/partiql/lang/planner/SchemaLoader.kt @@ -0,0 +1,229 @@ +package org.partiql.lang.planner + +import com.amazon.ionelement.api.IonElement +import com.amazon.ionelement.api.ListElement +import com.amazon.ionelement.api.StringElement +import com.amazon.ionelement.api.StructElement +import com.amazon.ionelement.api.SymbolElement +import com.amazon.ionelement.api.ionListOf +import com.amazon.ionelement.api.ionString +import com.amazon.ionelement.api.ionStructOf +import com.amazon.ionelement.api.ionSymbol +import org.partiql.types.AnyOfType +import org.partiql.types.AnyType +import org.partiql.types.BagType +import org.partiql.types.BlobType +import org.partiql.types.BoolType +import org.partiql.types.ClobType +import org.partiql.types.DateType +import org.partiql.types.DecimalType +import org.partiql.types.FloatType +import org.partiql.types.GraphType +import org.partiql.types.IntType +import org.partiql.types.ListType +import org.partiql.types.MissingType +import org.partiql.types.NullType +import org.partiql.types.SexpType +import org.partiql.types.StaticType +import org.partiql.types.StringType +import org.partiql.types.StructType +import org.partiql.types.SymbolType +import org.partiql.types.TimeType +import org.partiql.types.TimestampType +import org.partiql.types.TupleConstraint + +// TODO: This code is ported from plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/LocalSchema.kt +// In my opinion, the in-memory connector should be independent of schema file format, +// hence making it inappropriate to leave the code in plugins/partiql-memory +// We need to figure out where to put the code. +object SchemaLoader { +// Use some generated serde eventually + + public inline fun StructElement.getAngry(name: String): T { + val f = getOptional(name) ?: error("Expected field `$name`") + if (f !is T) { + error("Expected field `name` to be of type ${T::class.simpleName}") + } + return f + } + + /** + * Parses an IonElement to a StaticType. + * + * The format used is effectively Avro JSON, but with PartiQL type names. + */ + public fun IonElement.toStaticType(): StaticType { + return when (this) { + is StringElement -> this.toStaticType() + is ListElement -> this.toStaticType() + is StructElement -> this.toStaticType() + else -> error("Invalid element, expected string, list, or struct") + } + } + + // Atomic type + public fun StringElement.toStaticType(): StaticType = when (textValue) { + "any" -> StaticType.ANY + "bool" -> StaticType.BOOL + "int8" -> error("`int8` is currently not supported") + "int16" -> StaticType.INT2 + "int32" -> StaticType.INT4 + "int64" -> StaticType.INT8 + "int" -> StaticType.INT + "decimal" -> StaticType.DECIMAL + "float32" -> StaticType.FLOAT + "float64" -> StaticType.FLOAT + "string" -> StaticType.STRING + "symbol" -> StaticType.SYMBOL + "binary" -> error("`binary` is currently not supported") + "byte" -> error("`byte` is currently not supported") + "blob" -> StaticType.BLOB + "clob" -> StaticType.CLOB + "date" -> StaticType.DATE + "time" -> StaticType.TIME + "timestamp" -> StaticType.TIMESTAMP + "interval" -> error("`interval` is currently not supported") + "bag" -> error("`bag` is not an atomic type") + "list" -> error("`list` is not an atomic type") + "sexp" -> error("`sexp` is not an atomic type") + "struct" -> error("`struct` is not an atomic type") + "null" -> StaticType.NULL + "missing" -> StaticType.MISSING + else -> error("Invalid type `$textValue`") + } + + // Union type + public fun ListElement.toStaticType(): StaticType { + val types = values.map { it.toStaticType() }.toSet() + return StaticType.unionOf(types) + } + + // Complex type + public fun StructElement.toStaticType(): StaticType { + val type = getAngry("type").textValue + return when (type) { + "bag" -> toBagType() + "list" -> toListType() + "sexp" -> toSexpType() + "struct" -> toStructType() + else -> error("Unknown complex type $type") + } + } + + public fun StructElement.toBagType(): StaticType { + val items = getAngry("items").toStaticType() + return BagType(items) + } + + public fun StructElement.toListType(): StaticType { + val items = getAngry("items").toStaticType() + return ListType(items) + } + + public fun StructElement.toSexpType(): StaticType { + val items = getAngry("items").toStaticType() + return SexpType(items) + } + + public fun StructElement.toStructType(): StaticType { + // Constraints + var contentClosed = false + val constraintsE = getOptional("constraints") ?: ionListOf() + val constraints = (constraintsE as ListElement).values.map { + assert(it is SymbolElement) + it as SymbolElement + when (it.textValue) { + "ordered" -> TupleConstraint.Ordered + "unique" -> TupleConstraint.UniqueAttrs(true) + "closed" -> { + contentClosed = true + TupleConstraint.Open(false) + } + else -> error("unknown tuple constraint `${it.textValue}`") + } + }.toSet() + // Fields + val fieldsE = getAngry("fields") + val fields = fieldsE.values.map { + assert(it is StructElement) { "field definition must be as struct" } + it as StructElement + val name = it.getAngry("name").textValue + val type = it.getAngry("type").toStaticType() + StructType.Field(name, type) + } + return StructType(fields, contentClosed, constraints = constraints) + } + + public fun StaticType.toIon(): IonElement = when (this) { + is AnyOfType -> this.toIon() + is AnyType -> ionString("any") + is BlobType -> ionString("blob") + is BoolType -> ionString("bool") + is ClobType -> ionString("clob") + is BagType -> this.toIon() + is ListType -> this.toIon() + is SexpType -> this.toIon() + is DateType -> ionString("date") + is DecimalType -> ionString("decimal") + is FloatType -> ionString("float64") + is GraphType -> ionString("graph") + is IntType -> when (this.rangeConstraint) { + IntType.IntRangeConstraint.SHORT -> ionString("int16") + IntType.IntRangeConstraint.INT4 -> ionString("int32") + IntType.IntRangeConstraint.LONG -> ionString("int64") + IntType.IntRangeConstraint.UNCONSTRAINED -> ionString("int") + } + MissingType -> ionString("missing") + is NullType -> ionString("null") + is StringType -> ionString("string") // TODO char + is StructType -> this.toIon() + is SymbolType -> ionString("symbol") + is TimeType -> ionString("time") + is TimestampType -> ionString("timestamp") + } + + private fun AnyOfType.toIon(): IonElement { + // create some predictable ordering + val sorted = this.types.sortedWith { t1, t2 -> t1::class.java.simpleName.compareTo(t2::class.java.simpleName) } + val elements = sorted.map { it.toIon() } + return ionListOf(elements) + } + + private fun BagType.toIon(): IonElement = ionStructOf( + "type" to ionString("bag"), + "items" to elementType.toIon() + ) + + private fun ListType.toIon(): IonElement = ionStructOf( + "type" to ionString("list"), + "items" to elementType.toIon() + ) + + private fun SexpType.toIon(): IonElement = ionStructOf( + "type" to ionString("sexp"), + "items" to elementType.toIon() + ) + + private fun StructType.toIon(): IonElement { + val constraintSymbols = mutableListOf() + for (constraint in constraints) { + val c = when (constraint) { + is TupleConstraint.Open -> if (constraint.value) null else ionSymbol("closed") + TupleConstraint.Ordered -> ionSymbol("ordered") + is TupleConstraint.UniqueAttrs -> ionSymbol("unique") + } + if (c != null) constraintSymbols.add(c) + } + val fieldTypes = this.fields.map { + ionStructOf( + "name" to ionString(it.key), + "type" to it.value.toIon(), + ) + } + return ionStructOf( + "type" to ionString("struct"), + "fields" to ionListOf(fieldTypes), + "constraints" to ionListOf(constraintSymbols), + ) + } +} diff --git a/partiql-lang/src/test/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencerTests.kt b/partiql-lang/src/test/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencerTests.kt index 5753706e6..dc59a9e84 100644 --- a/partiql-lang/src/test/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencerTests.kt +++ b/partiql-lang/src/test/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencerTests.kt @@ -3,33 +3,46 @@ package org.partiql.lang.planner.transforms import com.amazon.ionelement.api.field import com.amazon.ionelement.api.ionString import com.amazon.ionelement.api.ionStructOf -import org.junit.jupiter.api.Test +import com.amazon.ionelement.api.loadSingleElement import org.junit.jupiter.api.assertThrows import org.junit.jupiter.api.extension.ExtensionContext +import org.junit.jupiter.api.parallel.Execution +import org.junit.jupiter.api.parallel.ExecutionMode import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.Arguments import org.junit.jupiter.params.provider.ArgumentsProvider import org.junit.jupiter.params.provider.ArgumentsSource +import org.junit.jupiter.params.provider.MethodSource import org.partiql.annotations.ExperimentalPartiQLSchemaInferencer import org.partiql.errors.Problem -import org.partiql.errors.ProblemSeverity import org.partiql.errors.UNKNOWN_PROBLEM_LOCATION -import org.partiql.lang.ast.passes.SemanticProblemDetails import org.partiql.lang.errors.ProblemCollector -import org.partiql.lang.planner.PlanningProblemDetails +import org.partiql.lang.planner.SchemaLoader.toStaticType import org.partiql.lang.planner.transforms.PartiQLSchemaInferencerTests.ProblemHandler import org.partiql.lang.planner.transforms.PartiQLSchemaInferencerTests.TestCase.ErrorTestCase import org.partiql.lang.planner.transforms.PartiQLSchemaInferencerTests.TestCase.SuccessTestCase import org.partiql.lang.planner.transforms.PartiQLSchemaInferencerTests.TestCase.ThrowingExceptionTestCase -import org.partiql.plan.Rex -import org.partiql.plugins.local.LocalPlugin +import org.partiql.plan.debug.PlanPrinter +import org.partiql.planner.PartiQLPlanner +import org.partiql.planner.PlanningProblemDetails +import org.partiql.planner.test.PartiQLTest +import org.partiql.planner.test.PartiQLTestProvider +import org.partiql.plugins.memory.MemoryCatalog +import org.partiql.plugins.memory.MemoryPlugin import org.partiql.types.AnyOfType import org.partiql.types.AnyType import org.partiql.types.BagType import org.partiql.types.ListType +import org.partiql.types.SexpType import org.partiql.types.StaticType +import org.partiql.types.StaticType.Companion.ANY +import org.partiql.types.StaticType.Companion.BAG import org.partiql.types.StaticType.Companion.BOOL +import org.partiql.types.StaticType.Companion.DATE +import org.partiql.types.StaticType.Companion.DECIMAL import org.partiql.types.StaticType.Companion.INT +import org.partiql.types.StaticType.Companion.INT4 +import org.partiql.types.StaticType.Companion.INT8 import org.partiql.types.StaticType.Companion.MISSING import org.partiql.types.StaticType.Companion.NULL import org.partiql.types.StaticType.Companion.STRING @@ -38,35 +51,147 @@ import org.partiql.types.StructType import org.partiql.types.TupleConstraint import java.time.Instant import java.util.stream.Stream -import kotlin.io.path.pathString -import kotlin.io.path.toPath import kotlin.reflect.KClass import kotlin.test.assertEquals import kotlin.test.assertNotNull import kotlin.test.assertTrue class PartiQLSchemaInferencerTests { + private val testProvider = PartiQLTestProvider() + + init { + // load test inputs + testProvider.load() + } + + @ParameterizedTest + @ArgumentsSource(TestProvider::class) + fun test(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("collections") + @Execution(ExecutionMode.CONCURRENT) + fun testCollections(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("selectStar") + @Execution(ExecutionMode.CONCURRENT) + fun testSelectStar(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("sessionVariables") + @Execution(ExecutionMode.CONCURRENT) + fun testSessionVariables(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("bitwiseAnd") + @Execution(ExecutionMode.CONCURRENT) + fun testBitwiseAnd(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("unpivotCases") + @Execution(ExecutionMode.CONCURRENT) + fun testUnpivot(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("joinCases") + @Execution(ExecutionMode.CONCURRENT) + fun testJoins(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("excludeCases") + @Execution(ExecutionMode.CONCURRENT) + fun testExclude(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("orderByCases") + @Execution(ExecutionMode.CONCURRENT) + fun testOrderBy(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("tupleUnionCases") + @Execution(ExecutionMode.CONCURRENT) + fun testTupleUnion(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("aggregationCases") + @Execution(ExecutionMode.CONCURRENT) + fun testAggregations(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("scalarFunctions") + @Execution(ExecutionMode.CONCURRENT) + fun testScalarFunctions(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("pathExpressions") + @Execution(ExecutionMode.CONCURRENT) + fun testPathExpressions(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("caseWhens") + @Execution(ExecutionMode.CONCURRENT) + fun testCaseWhens(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("subqueryCases") + @Execution(ExecutionMode.CONCURRENT) + fun testSubqueries(tc: TestCase) = runTest(tc) + + @ParameterizedTest + @MethodSource("dynamicCalls") + @Execution(ExecutionMode.CONCURRENT) + fun testDynamicCalls(tc: TestCase) = runTest(tc) companion object { + val inputStream = this::class.java.getResourceAsStream("/resource_path.txt")!! - private val root = this::class.java.getResource("/catalogs")!!.toURI().toPath().pathString + val catalogProvider = MemoryCatalog.Provider().also { + val map = mutableMapOf>>() + inputStream.reader().readLines().forEach { path -> + if (path.startsWith("catalogs/default")) { + val schema = this::class.java.getResourceAsStream("/$path")!! + val ion = loadSingleElement(schema.reader().readText()) + val staticType = ion.toStaticType() + val steps = path.split('/').drop(2) // drop the catalogs/default + val catalogName = steps.first() + val subPath = steps + .drop(1) + .joinToString(".") { it.lowercase() } + .let { + it.substring(0, it.length - 4) + } + if (map.containsKey(catalogName)) { + map[catalogName]!!.add(subPath to staticType) + } else { + map[catalogName] = mutableListOf(subPath to staticType) + } + } + } + map.forEach { (k: String, v: MutableList>) -> + it[k] = MemoryCatalog.of(*v.toTypedArray()) + } + } - private val PLUGINS = listOf(LocalPlugin()) + private val PLUGINS = listOf(MemoryPlugin(catalogProvider)) private const val USER_ID = "TEST_USER" private val catalogConfig = mapOf( "aws" to ionStructOf( - field("connector_name", ionString("local")), - field("root", ionString("$root/aws")), + field("connector_name", ionString("memory")), ), "b" to ionStructOf( - field("connector_name", ionString("local")), - field("root", ionString("$root/b")), + field("connector_name", ionString("memory")), ), "db" to ionStructOf( - field("connector_name", ionString("local")), - field("root", ionString("$root/db")), + field("connector_name", ionString("memory")), + ), + "pql" to ionStructOf( + field("connector_name", ionString("memory")), + ), + "subqueries" to ionStructOf( + field("connector_name", ionString("memory")), ), ) @@ -75,22 +200,36 @@ class PartiQLSchemaInferencerTests { const val CATALOG_DB = "db" val DB_SCHEMA_MARKETS = listOf("markets") - val TYPE_BOOL = BOOL - private val TYPE_AWS_DDB_PETS_ID = INT + val TYPE_BOOL = StaticType.BOOL + private val TYPE_AWS_DDB_PETS_ID = INT4 private val TYPE_AWS_DDB_PETS_BREED = STRING - val TABLE_AWS_DDB_PETS_ELEMENT_TYPE = StructType( - fields = mapOf( - "id" to TYPE_AWS_DDB_PETS_ID, - "breed" to TYPE_AWS_DDB_PETS_BREED - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) - ) - val TABLE_AWS_DDB_PETS_BAG = BagType( - elementType = TABLE_AWS_DDB_PETS_ELEMENT_TYPE + val TABLE_AWS_DDB_PETS = BagType( + elementType = StructType( + fields = mapOf( + "id" to TYPE_AWS_DDB_PETS_ID, + "breed" to TYPE_AWS_DDB_PETS_BREED + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) ) val TABLE_AWS_DDB_PETS_LIST = ListType( - elementType = TABLE_AWS_DDB_PETS_ELEMENT_TYPE + elementType = StructType( + fields = mapOf( + "id" to TYPE_AWS_DDB_PETS_ID, + "breed" to TYPE_AWS_DDB_PETS_BREED + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) ) val TABLE_AWS_DDB_B = BagType( StructType( @@ -105,7 +244,7 @@ class PartiQLSchemaInferencerTests { ) val TABLE_AWS_B_B = BagType( StructType( - fields = mapOf("identifier" to INT), + fields = mapOf("identifier" to INT4), contentClosed = true, constraints = setOf( TupleConstraint.Open(false), @@ -114,14 +253,14 @@ class PartiQLSchemaInferencerTests { ) ) ) - val TYPE_B_B_B_B_B = INT + val TYPE_B_B_B_B_B = INT4 private val TYPE_B_B_B_B = StructType( mapOf("b" to TYPE_B_B_B_B_B), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) ) - val TYPE_B_B_B_C = INT - val TYPE_B_B_C = INT + val TYPE_B_B_B_C = INT4 + val TYPE_B_B_C = INT4 val TYPE_B_B_B = StructType( fields = mapOf( @@ -135,62 +274,51 @@ class PartiQLSchemaInferencerTests { TupleConstraint.Ordered ) ) - } - - @ParameterizedTest - @ArgumentsSource(TestProvider::class) - fun test(tc: TestCase) = runTest(tc) - - sealed class TestCase { - class SuccessTestCase( - val name: String, - val query: String, - val catalog: String? = null, - val catalogPath: List = emptyList(), - val expected: StaticType, - val warnings: ProblemHandler? = null - ) : TestCase() { - override fun toString(): String = "$name : $query" - } - - class ErrorTestCase( - val name: String, - val query: String, - val catalog: String? = null, - val catalogPath: List = emptyList(), - val note: String? = null, - val expected: StaticType? = null, - val problemHandler: ProblemHandler? = null, - ) : TestCase() { - override fun toString(): String = "$name : $query" - } - class ThrowingExceptionTestCase( - val name: String, - val query: String, - val catalog: String? = null, - val catalogPath: List = emptyList(), - val note: String? = null, - val expectedThrowable: KClass - ) : TestCase() { - override fun toString(): String { - return "$name : $query" + private fun assertProblemExists(problem: () -> Problem) = ProblemHandler { problems, ignoreSourceLocation -> + when (ignoreSourceLocation) { + true -> assertTrue("Expected to find ${problem.invoke()} in $problems") { problems.any { it.details == problem.invoke().details } } + false -> assertTrue("Expected to find ${problem.invoke()} in $problems") { problems.any { it == problem.invoke() } } } } - } - class TestProvider : ArgumentsProvider { - override fun provideArguments(context: ExtensionContext?): Stream { - return parameters.map { Arguments.of(it) }.stream() - } + // Tests - private val parameters = listOf( - ErrorTestCase( - name = "Pets should not be accessible #1", - query = "SELECT * FROM pets", + private fun key(name: String) = PartiQLTest.Key("schema_inferencer", name) + + @JvmStatic + fun collections() = listOf( + SuccessTestCase( + name = "Collection BAG", + key = key("collections-01"), + expected = BagType(INT4), + ), + SuccessTestCase( + name = "Collection LIST", + key = key("collections-02"), + expected = ListType(INT4), + ), + SuccessTestCase( + name = "Collection LIST", + key = key("collections-03"), + expected = ListType(INT4), + ), + SuccessTestCase( + name = "Collection SEXP", + key = key("collections-04"), + expected = SexpType(INT4), + ), + SuccessTestCase( + name = "SELECT from array", + key = key("collections-05"), + expected = BagType(INT4), + ), + SuccessTestCase( + name = "SELECT from array", + key = key("collections-06"), expected = BagType( StructType( - fields = mapOf("pets" to StaticType.ANY), + fields = listOf(StructType.Field("x", INT4)), contentClosed = true, constraints = setOf( TupleConstraint.Open(false), @@ -198,77 +326,142 @@ class PartiQLSchemaInferencerTests { TupleConstraint.Ordered ) ) - ), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - PlanningProblemDetails.UndefinedVariable("pets", false) - ) - } + ) ), - ErrorTestCase( - name = "Pets should not be accessible #2", + ) + + @JvmStatic + fun structs() = listOf() + + @JvmStatic + fun selectStar() = listOf( + SuccessTestCase( + name = "Test #8", catalog = CATALOG_AWS, - query = "SELECT * FROM pets", + query = "SELECT * FROM ddb.pets", + expected = TABLE_AWS_DDB_PETS + ), + SuccessTestCase( + name = "Test #9", + catalog = CATALOG_AWS, + query = "SELECT * FROM b.b", + expected = TABLE_AWS_B_B + ), + SuccessTestCase( + name = "Select star with join", + key = key("sanity-05"), + catalog = "pql", expected = BagType( StructType( - fields = mapOf("pets" to StaticType.ANY), contentClosed = true, constraints = setOf( TupleConstraint.Open(false), - TupleConstraint.UniqueAttrs(true), + TupleConstraint.UniqueAttrs(false), TupleConstraint.Ordered + ), + fields = listOf( + StructType.Field( + "name", + StructType( + fields = listOf( + StructType.Field("first", STRING), + StructType.Field("last", STRING), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ), + ) + ), + StructType.Field("ssn", STRING), + StructType.Field("employer", STRING.asNullable()), + StructType.Field("name", STRING), + StructType.Field("tax_id", INT8), + StructType.Field( + "address", + StructType( + fields = listOf( + StructType.Field("street", STRING), + StructType.Field("zip", INT4), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ), ) ) - ), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - PlanningProblemDetails.UndefinedVariable("pets", false) - ) - } + ) ), SuccessTestCase( - name = "Project all explicitly", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM pets", - expected = TABLE_AWS_DDB_PETS_BAG + name = "Select star", + key = key("sanity-06"), + catalog = "pql", + expected = BagType( + StructType( + fields = listOf( + StructType.Field("first", STRING), + StructType.Field("last", STRING), + StructType.Field("full_name", STRING), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) ), + ) + + @JvmStatic + fun sessionVariables() = listOf( SuccessTestCase( - name = "Project all implicitly", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT id, breed FROM pets", - expected = TABLE_AWS_DDB_PETS_BAG + name = "Current User", + query = "CURRENT_USER", + expected = unionOf(STRING, StaticType.NULL) ), SuccessTestCase( - name = "Test #4", - catalog = CATALOG_B, - catalogPath = listOf("b"), - query = "b", - expected = TYPE_B_B_B + name = "Current User Concat", + query = "CURRENT_USER || 'hello'", + expected = unionOf(STRING, StaticType.NULL) ), SuccessTestCase( - name = "Test #5", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM b", - expected = TABLE_AWS_DDB_B + name = "Current User in WHERE", + query = "SELECT VALUE a FROM [ 0 ] AS a WHERE CURRENT_USER = 'hello'", + expected = BagType(INT4) ), SuccessTestCase( - name = "Test #6", - catalog = CATALOG_AWS, - catalogPath = listOf("b"), - query = "SELECT * FROM b", - expected = TABLE_AWS_B_B + name = "Current User in WHERE", + query = "SELECT VALUE a FROM [ 0 ] AS a WHERE CURRENT_USER = 5", + expected = BagType(INT4), ), - ErrorTestCase( - name = "Test #7", - query = "SELECT * FROM ddb.pets", + SuccessTestCase( + name = "Testing CURRENT_USER and CURRENT_DATE Binders", + query = """ + SELECT + CURRENT_USER, + CURRENT_DATE, + CURRENT_USER AS "curr_user", + CURRENT_DATE AS "curr_date", + CURRENT_USER || ' is my name.' AS name_desc + FROM << 0, 1 >>; + """.trimIndent(), expected = BagType( StructType( - fields = mapOf("pets" to StaticType.ANY), + fields = listOf( + StructType.Field("CURRENT_USER", STRING.asNullable()), + StructType.Field("CURRENT_DATE", DATE), + StructType.Field("curr_user", STRING.asNullable()), + StructType.Field("curr_date", DATE), + StructType.Field("name_desc", STRING.asNullable()), + ), contentClosed = true, constraints = setOf( TupleConstraint.Open(false), @@ -276,518 +469,286 @@ class PartiQLSchemaInferencerTests { TupleConstraint.Ordered ) ) - ), + ) + ), + ErrorTestCase( + name = "Current User (String) PLUS String", + query = "CURRENT_USER + 'hello'", + expected = StaticType.MISSING, problemHandler = assertProblemExists { Problem( UNKNOWN_PROBLEM_LOCATION, - PlanningProblemDetails.UndefinedVariable("pets", false) + PlanningProblemDetails.UnknownFunction( + "plus", + listOf( + unionOf(STRING, StaticType.NULL), + STRING, + ), + ) ) } ), + ) + + @JvmStatic + fun bitwiseAnd() = listOf( SuccessTestCase( - name = "Test #8", - catalog = CATALOG_AWS, - query = "SELECT * FROM ddb.pets", - expected = TABLE_AWS_DDB_PETS_BAG + name = "BITWISE_AND_1", + query = "1 & 2", + expected = INT4 ), SuccessTestCase( - name = "Test #9", - catalog = CATALOG_AWS, - query = "SELECT * FROM b.b", - expected = TABLE_AWS_B_B + name = "BITWISE_AND_2", + query = "CAST(1 AS INT2) & CAST(2 AS INT2)", + expected = StaticType.unionOf(StaticType.INT2, MISSING) ), SuccessTestCase( - name = "Test #10", - catalog = CATALOG_B, - query = "b.b", - expected = TYPE_B_B_B - ), - SuccessTestCase( - name = "Test #11", - catalog = CATALOG_B, - catalogPath = listOf("b"), - query = "b.b", - expected = TYPE_B_B_B - ), - SuccessTestCase( - name = "Test #12", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM b.b", - expected = TABLE_AWS_B_B - ), - SuccessTestCase( - name = "Test #13", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM ddb.b", - expected = TABLE_AWS_DDB_B - ), - SuccessTestCase( - name = "Test #14", - query = "SELECT * FROM aws.ddb.pets", - expected = TABLE_AWS_DDB_PETS_BAG - ), - SuccessTestCase( - name = "Test #15", - catalog = CATALOG_AWS, - query = "SELECT * FROM aws.b.b", - expected = TABLE_AWS_B_B - ), - SuccessTestCase( - name = "Test #16", - catalog = CATALOG_B, - query = "b.b.b", - expected = TYPE_B_B_B - ), - SuccessTestCase( - name = "Test #17", - catalog = CATALOG_B, - query = "b.b.c", - expected = TYPE_B_B_C - ), - SuccessTestCase( - name = "Test #18", - catalog = CATALOG_B, - catalogPath = listOf("b"), - query = "b.b.b", - expected = TYPE_B_B_B - ), - SuccessTestCase( - name = "Test #19", - query = "b.b.b.c", - expected = TYPE_B_B_B_C - ), - SuccessTestCase( - name = "Test #20", - query = "b.b.b.b", - expected = TYPE_B_B_B_B - ), - SuccessTestCase( - name = "Test #21", - catalog = CATALOG_B, - query = "b.b.b.b", - expected = TYPE_B_B_B_B - ), - SuccessTestCase( - name = "Test #22", - catalog = CATALOG_B, - query = "b.b.b.c", - expected = TYPE_B_B_C - ), - SuccessTestCase( - name = "Test #23", - catalog = CATALOG_B, - catalogPath = listOf("b"), - query = "b.b.b.b", - expected = TYPE_B_B_B_B - ), - SuccessTestCase( - name = "Test #24", - query = "b.b.b.b.b", - expected = TYPE_B_B_B_B_B + name = "BITWISE_AND_3", + query = "1 & 2", + expected = INT4 ), SuccessTestCase( - name = "Test #24", - catalog = CATALOG_B, - query = "b.b.b.b.b", - expected = TYPE_B_B_B_B_B + name = "BITWISE_AND_4", + query = "CAST(1 AS INT8) & CAST(2 AS INT8)", + expected = StaticType.INT8 ), SuccessTestCase( - name = "EQ", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id = 1", - expected = TYPE_BOOL + name = "BITWISE_AND_5", + query = "CAST(1 AS INT2) & 2", + expected = StaticType.unionOf(StaticType.INT4, MISSING) ), SuccessTestCase( - name = "NEQ", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id <> 1", - expected = TYPE_BOOL + name = "BITWISE_AND_6", + query = "CAST(1 AS INT2) & CAST(2 AS INT8)", + expected = StaticType.unionOf(StaticType.INT8, MISSING) ), SuccessTestCase( - name = "GEQ", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id >= 1", - expected = TYPE_BOOL + name = "BITWISE_AND_7", + query = "CAST(1 AS INT2) & 2", + expected = StaticType.unionOf(INT4, MISSING) ), SuccessTestCase( - name = "GT", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id > 1", - expected = TYPE_BOOL + name = "BITWISE_AND_8", + query = "1 & CAST(2 AS INT8)", + expected = StaticType.INT8 ), SuccessTestCase( - name = "LEQ", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id <= 1", - expected = TYPE_BOOL + name = "BITWISE_AND_9", + query = "1 & 2", + expected = StaticType.INT4 ), SuccessTestCase( - name = "LT", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id < 1", - expected = TYPE_BOOL + name = "BITWISE_AND_10", + query = "CAST(1 AS INT8) & 2", + expected = INT8 ), SuccessTestCase( - name = "IN", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id IN (1, 2, 3)", - expected = TYPE_BOOL + name = "BITWISE_AND_NULL_OPERAND", + query = "1 & NULL", + expected = StaticType.NULL, ), ErrorTestCase( - name = "IN Failure", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id IN 'hello'", - expected = TYPE_BOOL, + name = "BITWISE_AND_MISSING_OPERAND", + query = "1 & MISSING", + expected = StaticType.MISSING, problemHandler = assertProblemExists { Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf(INT, STRING), - "IN" + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.UnknownFunction( + "bitwise_and", + listOf(INT4, MISSING) ) ) } ), - SuccessTestCase( - name = "BETWEEN", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id BETWEEN 1 AND 2", - expected = TYPE_BOOL - ), ErrorTestCase( - name = "BETWEEN Failure", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id BETWEEN 1 AND 'a'", - expected = TYPE_BOOL, + name = "BITWISE_AND_NON_INT_OPERAND", + query = "1 & 'NOT AN INT'", + expected = StaticType.MISSING, problemHandler = assertProblemExists { Problem( UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf( - INT, - INT, - STRING - ), - "between" - ) + PlanningProblemDetails.UnknownFunction("bitwise_and", listOf(INT4, STRING)) ) } ), + ) + + @JvmStatic + fun unpivotCases() = listOf( SuccessTestCase( - name = "LIKE", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.ship_option LIKE '%ABC%'", - expected = TYPE_BOOL + name = "UNPIVOT", + query = "SELECT VALUE v FROM UNPIVOT { 'a': 2 } AS v AT attr WHERE attr = 'a'", + expected = BagType(INT4) ), - ErrorTestCase( - name = "LIKE Failure", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.ship_option LIKE 3", - expected = MISSING, - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf(STRING, INT), - "LIKE" + ) + + @JvmStatic + fun joinCases() = listOf( + SuccessTestCase( + name = "CROSS JOIN", + query = "SELECT * FROM <<{ 'a': 1 }>> AS t1, <<{ 'b': 2.0 }>> AS t2", + expected = BagType( + StructType( + fields = mapOf( + "a" to INT4, + "b" to StaticType.DECIMAL, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered ) ) - } + ) ), SuccessTestCase( - name = "Case insensitive", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.CUSTOMER_ID = 1", - expected = TYPE_BOOL - ), - ErrorTestCase( - name = "Case Sensitive failure", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.\"CUSTOMER_ID\" = 1", - expected = TYPE_BOOL + name = "LEFT JOIN", + query = "SELECT * FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'b': 2.0 }>> AS t2 ON TRUE", + expected = BagType( + StructType( + fields = mapOf( + "a" to INT4, + "b" to unionOf(NULL, DECIMAL), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) ), SuccessTestCase( - name = "Case Sensitive success", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.\"customer_id\" = 1", - expected = TYPE_BOOL + name = "LEFT JOIN", + query = "SELECT b, a FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'b': 2.0 }>> AS t2 ON TRUE", + expected = BagType( + StructType( + fields = listOf( + StructType.Field("b", unionOf(NULL, DECIMAL)), + StructType.Field("a", INT4), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) ), SuccessTestCase( - name = "1-Level Junction", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "(order_info.customer_id = 1) AND (order_info.marketplace_id = 2)", - expected = TYPE_BOOL - ), - SuccessTestCase( - name = "2-Level Junction", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "(order_info.customer_id = 1) AND (order_info.marketplace_id = 2) OR (order_info.customer_id = 3) AND (order_info.marketplace_id = 4)", - expected = TYPE_BOOL - ), - ErrorTestCase( - name = "INT and STR Comparison", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id = 'something'", - expected = TYPE_BOOL, - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf(INT, STRING), - "EQ" - ) - ) - } - ), - ErrorTestCase( - name = "Nonexisting Comparison", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "non_existing_column = 1", - expected = AnyOfType( - setOf( - MISSING, - NULL, - BOOL - ) - ), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - PlanningProblemDetails.UndefinedVariable("non_existing_column", false) - ) - } - ), - ErrorTestCase( - name = "Bad comparison", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.customer_id = 1 AND 1", - expected = MISSING, - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf(BOOL, INT), - "AND" - ) - ) - } - ), - ErrorTestCase( - name = "Bad comparison", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "1 AND order_info.customer_id = 1", - expected = MISSING, - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf(INT, BOOL), - "AND" - ) - ) - } - ), - ErrorTestCase( - name = "Unknown column", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "SELECT unknown_col FROM orders WHERE customer_id = 1", - expected = BagType( - StructType( - fields = mapOf("unknown_col" to AnyType()), - contentClosed = true, - constraints = setOf( - TupleConstraint.Open(false), - TupleConstraint.UniqueAttrs(true), - TupleConstraint.Ordered - ) - ) - ), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - PlanningProblemDetails.UndefinedVariable("unknown_col", false) - ) - } - ), - SuccessTestCase( - name = "ORDER BY int", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM pets ORDER BY id", - expected = TABLE_AWS_DDB_PETS_LIST - ), - SuccessTestCase( - name = "ORDER BY str", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM pets ORDER BY breed", - expected = TABLE_AWS_DDB_PETS_LIST - ), - SuccessTestCase( - name = "ORDER BY str", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM pets ORDER BY unknown_col", - expected = TABLE_AWS_DDB_PETS_LIST - ), - SuccessTestCase( - name = "LIMIT INT", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM pets LIMIT 5", - expected = TABLE_AWS_DDB_PETS_BAG - ), - ErrorTestCase( - name = "LIMIT STR", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM pets LIMIT '5'", - expected = TABLE_AWS_DDB_PETS_BAG, - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDataTypeForExpr(INT, STRING) - ) - } - ), - SuccessTestCase( - name = "OFFSET INT", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM pets LIMIT 1 OFFSET 5", - expected = TABLE_AWS_DDB_PETS_BAG - ), - ErrorTestCase( - name = "OFFSET STR", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT * FROM pets LIMIT 1 OFFSET '5'", - expected = TABLE_AWS_DDB_PETS_BAG, - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDataTypeForExpr(INT, STRING) - ) - } - ), - SuccessTestCase( - name = "CAST", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT CAST(breed AS INT) AS cast_breed FROM pets", + name = "LEFT JOIN", + query = "SELECT t1.a, t2.a FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'a': 2.0 }>> AS t2 ON t1.a = t2.a", expected = BagType( StructType( - fields = mapOf("cast_breed" to unionOf(INT, MISSING)), + fields = listOf( + StructType.Field("a", INT4), + StructType.Field("a", unionOf(NULL, DECIMAL)), + ), contentClosed = true, constraints = setOf( TupleConstraint.Open(false), - TupleConstraint.UniqueAttrs(true), + TupleConstraint.UniqueAttrs(false), TupleConstraint.Ordered ) ) ) ), SuccessTestCase( - name = "UPPER", - catalog = CATALOG_AWS, - catalogPath = listOf("ddb"), - query = "SELECT UPPER(breed) AS upper_breed FROM pets", + name = "LEFT JOIN ALL", + query = "SELECT * FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'a': 2.0 }>> AS t2 ON t1.a = t2.a", expected = BagType( StructType( - fields = mapOf("upper_breed" to STRING), + fields = listOf( + StructType.Field("a", INT4), + StructType.Field("a", unionOf(NULL, DECIMAL)), + ), contentClosed = true, constraints = setOf( TupleConstraint.Open(false), - TupleConstraint.UniqueAttrs(true), + TupleConstraint.UniqueAttrs(false), TupleConstraint.Ordered ) ) ) ), SuccessTestCase( - name = "Non-tuples", - query = "SELECT a FROM << [ 1, 1.0 ] >> AS a", + name = "LEFT JOIN ALL", + query = """ + SELECT * + FROM + <<{ 'a': 1 }>> AS t1 + LEFT JOIN + <<{ 'a': 2.0 }>> AS t2 + ON t1.a = t2.a + LEFT JOIN + <<{ 'a': 'hello, world' }>> AS t3 + ON t3.a = 'hello' + """, expected = BagType( StructType( - fields = mapOf("a" to ListType(unionOf(INT, StaticType.DECIMAL))), + fields = listOf( + StructType.Field("a", INT4), + StructType.Field("a", unionOf(DECIMAL, NULL)), + StructType.Field("a", unionOf(STRING, NULL)), + ), contentClosed = true, constraints = setOf( TupleConstraint.Open(false), - TupleConstraint.UniqueAttrs(true), + TupleConstraint.UniqueAttrs(false), TupleConstraint.Ordered ) ) ) ), - SuccessTestCase( - name = "Non-tuples in SELECT VALUE", - query = "SELECT VALUE a FROM << [ 1, 1.0 ] >> AS a", - expected = - BagType(ListType(unionOf(INT, StaticType.DECIMAL))) - ), - SuccessTestCase( - name = "SELECT VALUE", - query = "SELECT VALUE [1, 1.0] FROM <<>>", - expected = - BagType(ListType(unionOf(INT, StaticType.DECIMAL))) - ), - SuccessTestCase( - name = "UNPIVOT", - query = "SELECT VALUE v FROM UNPIVOT { 'a': 2 } AS v AT attr WHERE attr = 'a'", - expected = - BagType(INT) - - ), - SuccessTestCase( - name = "CROSS JOIN", - query = "SELECT * FROM <<{ 'a': 1 }>> AS t1, <<{ 'b': 2.0 }>> AS t2", + ErrorTestCase( + name = "LEFT JOIN Ambiguous Reference in ON", + query = "SELECT * FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'a': 2.0 }>> AS t2 ON a = 3", expected = BagType( StructType( - fields = mapOf( - "a" to INT, - "b" to StaticType.DECIMAL, + fields = listOf( + StructType.Field("a", INT4), + StructType.Field("a", unionOf(DECIMAL, NULL)), ), contentClosed = true, constraints = setOf( TupleConstraint.Open(false), - TupleConstraint.UniqueAttrs(true), + TupleConstraint.UniqueAttrs(false), TupleConstraint.Ordered ) ) - ) + ), + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UndefinedVariable("a", false) + ) + } ), + ) + + @JvmStatic + fun excludeCases() = listOf( SuccessTestCase( - name = "LEFT JOIN", - query = "SELECT * FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'b': 2.0 }>> AS t2 ON TRUE", + name = "EXCLUDE SELECT star", + key = key("exclude-01"), expected = BagType( StructType( fields = mapOf( - "a" to INT, - "b" to StaticType.DECIMAL, + "name" to StaticType.STRING, + "custId" to StaticType.INT4, + "address" to StructType( + fields = mapOf( + "city" to StaticType.STRING, + "zipcode" to StaticType.INT4, + "street" to StaticType.STRING, + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ) ), contentClosed = true, constraints = setOf( @@ -799,13 +760,21 @@ class PartiQLSchemaInferencerTests { ) ), SuccessTestCase( - name = "LEFT JOIN", - query = "SELECT b, a FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'b': 2.0 }>> AS t2 ON TRUE", + name = "EXCLUDE SELECT star multiple paths", + key = key("exclude-02"), expected = BagType( StructType( - fields = listOf( - StructType.Field("b", StaticType.DECIMAL), - StructType.Field("a", INT), + fields = mapOf( + "name" to StaticType.STRING, + "custId" to StaticType.INT4, + "address" to StructType( + fields = mapOf( + "city" to StaticType.STRING, + "zipcode" to StaticType.INT4 + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ) ), contentClosed = true, constraints = setOf( @@ -817,31 +786,44 @@ class PartiQLSchemaInferencerTests { ) ), SuccessTestCase( - name = "LEFT JOIN", - query = "SELECT t1.a, t2.a FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'a': 2.0 }>> AS t2 ON t1.a = t2.a", + name = "EXCLUDE SELECT star list index and list index field", + key = key("exclude-03"), expected = BagType( StructType( - fields = listOf( - StructType.Field("a", INT), - StructType.Field("a", StaticType.DECIMAL), - ), - contentClosed = true, - constraints = setOf( - TupleConstraint.Open(false), - TupleConstraint.UniqueAttrs(true), - TupleConstraint.Ordered - ) - ) - ) - ), - SuccessTestCase( - name = "LEFT JOIN ALL", - query = "SELECT * FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'a': 2.0 }>> AS t2 ON t1.a = t2.a", - expected = BagType( - StructType( - fields = listOf( - StructType.Field("a", INT), - StructType.Field("a", StaticType.DECIMAL), + fields = mapOf( + "a" to StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( + "c" to ListType( + elementType = StructType( + fields = mapOf( + "field" to AnyOfType( + setOf( + INT4, + MISSING // c[1]'s `field` was excluded + ) + ) + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) + ) + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ), + "foo" to StaticType.STRING ), contentClosed = true, constraints = setOf( @@ -853,24 +835,30 @@ class PartiQLSchemaInferencerTests { ) ), SuccessTestCase( - name = "LEFT JOIN ALL", - query = """ - SELECT * - FROM - <<{ 'a': 1 }>> AS t1 - LEFT JOIN - <<{ 'a': 2.0 }>> AS t2 - ON t1.a = t2.a - LEFT JOIN - <<{ 'a': 'hello, world' }>> AS t3 - ON t3.a = 'hello' - """, + name = "EXCLUDE SELECT star collection index as last step", + key = key("exclude-04"), expected = BagType( StructType( - fields = listOf( - StructType.Field("a", INT), - StructType.Field("a", StaticType.DECIMAL), - StructType.Field("a", STRING), + fields = mapOf( + "a" to StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( + "c" to ListType( + elementType = StaticType.INT4 + ) + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ), + "foo" to StaticType.STRING ), contentClosed = true, constraints = setOf( @@ -881,18 +869,16 @@ class PartiQLSchemaInferencerTests { ) ) ), + // EXCLUDE regression test (behavior subject to change pending RFC) SuccessTestCase( - name = "Duplicate fields in struct", - query = """ - SELECT t.a AS a - FROM << - { 'a': 1, 'a': 'hello' } - >> AS t - """, + name = "EXCLUDE SELECT star collection wildcard as last step", + key = key("exclude-05"), expected = BagType( StructType( - fields = listOf( - StructType.Field("a", unionOf(INT, STRING)) + fields = mapOf( + "a" to ListType( + elementType = StaticType.INT4 // empty list but still preserve typing information + ) ), contentClosed = true, constraints = setOf( @@ -904,15 +890,39 @@ class PartiQLSchemaInferencerTests { ) ), SuccessTestCase( - name = "Duplicate fields in ordered STRUCT. NOTE: b.b.d is an ordered struct with two attributes (e). First is INT.", - query = """ - SELECT d.e AS e - FROM << b.b.d >> AS d - """, + name = "EXCLUDE SELECT star list wildcard", + key = key("exclude-06"), expected = BagType( StructType( - fields = listOf( - StructType.Field("e", INT) + fields = mapOf( + "a" to StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( + "c" to ListType( + elementType = StructType( + fields = mapOf( + "field_y" to StaticType.INT4 + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) + ) + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ), + "foo" to StaticType.STRING ), contentClosed = true, constraints = setOf( @@ -923,14 +933,40 @@ class PartiQLSchemaInferencerTests { ) ) ), - ErrorTestCase( - name = "LEFT JOIN Ambiguous Reference in ON", - query = "SELECT * FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'a': 2.0 }>> AS t2 ON a = 3", + SuccessTestCase( + name = "EXCLUDE SELECT star list tuple wildcard", + key = key("exclude-07"), expected = BagType( StructType( - fields = listOf( - StructType.Field("a", INT), - StructType.Field("a", StaticType.DECIMAL), + fields = mapOf( + "a" to StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( + "c" to ListType( + elementType = StructType( + fields = mapOf( + // all fields gone + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) + ) + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ), + "foo" to StaticType.STRING ), contentClosed = true, constraints = setOf( @@ -939,26 +975,15 @@ class PartiQLSchemaInferencerTests { TupleConstraint.Ordered ) ) - ), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - PlanningProblemDetails.UndefinedVariable("a", false) - ) - } + ) ), SuccessTestCase( - name = "Duplicate fields in struct", - query = """ - SELECT a AS a - FROM << - { 'a': 1, 'a': 'hello' } - >> AS t - """, - expected = BagType( + name = "EXCLUDE SELECT star order by", + key = key("exclude-08"), + expected = ListType( StructType( - fields = listOf( - StructType.Field("a", unionOf(INT, STRING)) + fields = mapOf( + "foo" to StaticType.STRING ), contentClosed = true, constraints = setOf( @@ -970,15 +995,14 @@ class PartiQLSchemaInferencerTests { ) ), SuccessTestCase( - name = "AGGREGATE over INTS", - query = "SELECT a, COUNT(*) AS c, SUM(a) AS s, MIN(b) AS m FROM << {'a': 1, 'b': 2} >> GROUP BY a", + name = "EXCLUDE SELECT star with JOINs", + key = key("exclude-09"), expected = BagType( StructType( fields = mapOf( - "a" to INT, - "c" to INT, - "s" to INT, - "m" to INT, + "a" to StaticType.INT4, + "b" to StaticType.INT4, + "c" to StaticType.INT4 ), contentClosed = true, constraints = setOf( @@ -990,15 +1014,20 @@ class PartiQLSchemaInferencerTests { ) ), SuccessTestCase( - name = "AGGREGATE over DECIMALS", - query = "SELECT a, COUNT(*) AS c, SUM(a) AS s, MIN(b) AS m FROM << {'a': 1.0, 'b': 2.0}, {'a': 1.0, 'b': 2.0} >> GROUP BY a", + name = "SELECT t.b EXCLUDE ex 1", + key = key("exclude-10"), expected = BagType( StructType( fields = mapOf( - "a" to StaticType.DECIMAL, - "c" to INT, - "s" to StaticType.DECIMAL, - "m" to StaticType.DECIMAL, + "b" to ListType( + elementType = StructType( + fields = mapOf( + "b_2" to StaticType.INT4 + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ) + ), ), contentClosed = true, constraints = setOf( @@ -1010,964 +1039,524 @@ class PartiQLSchemaInferencerTests { ) ), SuccessTestCase( - name = "Current User", - query = "CURRENT_USER", - expected = unionOf(STRING, NULL) - ), - SuccessTestCase( - name = "Trim", - query = "trim(' ')", - expected = STRING - ), - SuccessTestCase( - name = "Current User Concat", - query = "CURRENT_USER || 'hello'", - expected = unionOf(STRING, NULL) - ), - SuccessTestCase( - name = "Current User Concat in WHERE", - query = "SELECT VALUE a FROM [ 0 ] AS a WHERE CURRENT_USER = 'hello'", - expected = BagType(INT) - ), - SuccessTestCase( - name = "TRIM_2", - query = "trim(' ' FROM ' Hello, World! ')", - expected = STRING - ), - SuccessTestCase( - name = "TRIM_1", - query = "trim(' Hello, World! ')", - expected = STRING - ), - SuccessTestCase( - name = "TRIM_3", - query = "trim(LEADING ' ' FROM ' Hello, World! ')", - expected = STRING - ), - SuccessTestCase( - name = "Subquery coercion in top-level expression", - query = "COALESCE(SELECT identifier FROM aws.ddb.b)", - expected = STRING - ), - SuccessTestCase( - name = "Subquery coercion in WHERE. Also showcases conflicting bindings. INT vs STRING.", - query = """ - SELECT VALUE identifier - FROM aws.b.b AS b -- aws.b.b.identifier is an INT - WHERE - COALESCE(SELECT identifier AS identifier FROM aws.ddb.b) IS NOT NULL -- aws.ddb.b.identifier is a STRING - """, - expected = BagType(INT) - ), - SuccessTestCase( - name = "Subquery coercion in SFW", - query = """ - SELECT - (SELECT identifier FROM aws.ddb.b) AS some_str, -- identifier is STRING - ('hello' || (SELECT identifier FROM aws.ddb.b)) AS concat_str, -- identifier is STRING - (1 < (SELECT id FROM aws.ddb.pets)) AS one_lt_id, -- id is INT - (1 > (SELECT id FROM aws.ddb.pets)) AS one_gt_id, -- id is INT - (1 <= (SELECT id FROM aws.ddb.pets)) AS one_lte_id, -- id is INT - (1 >= (SELECT id FROM aws.ddb.pets)) AS one_gte_id, -- id is INT - (1 = (SELECT id FROM aws.ddb.pets)) AS one_eq_id, -- id is INT - (1 != (SELECT id FROM aws.ddb.pets)) AS one_ne_id, -- id is INT - ((SELECT id FROM aws.ddb.pets) > 1) AS id_gt_one, -- id is INT - (1 IN (SELECT id FROM aws.ddb.pets)) AS one_in_ids, -- id is INT - ([1, 2] IN (SELECT id, id + 1 FROM aws.ddb.pets)) AS array_in_ids, -- id is INT - ([1, 2] <= (SELECT id, id + 1 FROM aws.ddb.pets)) AS lit_array_lte_ids, -- id is INT - ([1, 2] < (SELECT id, id + 1 FROM aws.ddb.pets)) AS lit_array_lt_ids, -- id is INT - ([1, 2] >= (SELECT id, id + 1 FROM aws.ddb.pets)) AS lit_array_gte_ids, -- id is INT - ([1, 2] > (SELECT id, id + 1 FROM aws.ddb.pets)) AS lit_array_gt_ids, -- id is INT - ([1, 2] = (SELECT id, id + 1 FROM aws.ddb.pets)) AS lit_array_eq_ids, -- id is INT - ([1, 2] != (SELECT id, id + 1 FROM aws.ddb.pets)) AS lit_array_ne_ids -- id is INT - FROM - << 0, 1, 2 >> AS t - """.trimIndent(), + name = "SELECT * EXCLUDE ex 2", + key = key("exclude-11"), expected = BagType( StructType( - fields = listOf( - StructType.Field("some_str", STRING), - StructType.Field("concat_str", STRING), - StructType.Field("one_lt_id", BOOL), - StructType.Field("one_gt_id", BOOL), - StructType.Field("one_lte_id", BOOL), - StructType.Field("one_gte_id", BOOL), - StructType.Field("one_eq_id", BOOL), - StructType.Field("one_ne_id", BOOL), - StructType.Field("id_gt_one", BOOL), - StructType.Field("one_in_ids", BOOL), - StructType.Field("array_in_ids", BOOL), - StructType.Field("lit_array_lte_ids", BOOL), - StructType.Field("lit_array_lt_ids", BOOL), - StructType.Field("lit_array_gte_ids", BOOL), - StructType.Field("lit_array_gt_ids", BOOL), - StructType.Field("lit_array_eq_ids", BOOL), - StructType.Field("lit_array_ne_ids", BOOL), + fields = mapOf( + "a" to StructType( + fields = mapOf( + "a_1" to StaticType.INT4, + "a_2" to StaticType.INT4 + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ), + "b" to ListType( + elementType = StructType( + fields = mapOf( + "b_2" to StaticType.INT4 + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ) + ), + "c" to StaticType.INT4, + "d" to StaticType.INT4 ), contentClosed = true, constraints = setOf( - TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered ) ) ) ), - ErrorTestCase( - name = "List of Lists on RHS for IN", - query = "1 IN (SELECT id, id + 1 FROM aws.ddb.pets)", - expected = BOOL, - problemHandler = assertProblemExists { - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf(INT, BagType(ListType(INT))), - "IN" + SuccessTestCase( + name = "SELECT VALUE t.b EXCLUDE", + key = key("exclude-12"), + expected = BagType( + ListType( + elementType = StructType( + fields = mapOf( + "b_2" to StaticType.INT4 + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ) - ) - } + ), + ) ), - ErrorTestCase( - name = "Coercion of select with multiple projections in COALESCE", - query = "COALESCE(SELECT id AS id, id + 1 AS increment FROM aws.ddb.pets)", - problemHandler = assertProblemExists { - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.CoercionError( - StructType( - fields = listOf( - StructType.Field("id", INT), - StructType.Field("increment", INT), - ), - contentClosed = true, - constraints = setOf( - TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered - ) - ), - ) - ) - } - ), - ErrorTestCase( - name = "Lists on LHS for IN", - query = "[1, 2] IN (SELECT id FROM aws.ddb.pets)", - expected = BOOL, - problemHandler = assertProblemExists { - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf(ListType(INT), BagType(INT)), - "IN" - ) - ) - } - ), - ErrorTestCase( - name = "List of Lists on RHS for LTE", - query = "1 <= (SELECT id AS id, id + 1 AS increment FROM aws.ddb.pets)", - expected = unionOf(MISSING, NULL, BOOL), - problemHandler = assertProblemExists { - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.CoercionError( - StructType( - fields = listOf( - StructType.Field("id", INT), - StructType.Field("increment", INT), - ), - contentClosed = true, - constraints = setOf( - TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered - ) - ), - ) - ) - } - ), - ThrowingExceptionTestCase( - name = "SELECT * in Subquery", // TODO: This needs to be fixed. - query = "1 IN (SELECT * FROM aws.ddb.pets)", - expectedThrowable = IllegalStateException::class - ), - ThrowingExceptionTestCase( - name = "SELECT * in Subquery with IN coercion rules", // TODO: This needs to be fixed. - query = "[1, 2] IN (SELECT * FROM aws.ddb.pets)", - expectedThrowable = IllegalStateException::class - ), - SuccessTestCase( - name = "SELECT * in Subquery with plus -- aws.b.b has one column (INT)", - query = "1 + (SELECT * FROM aws.b.b)", - expected = INT, - ), - ErrorTestCase( - name = "Cannot coerce subquery of multiple columns into single value", - query = "[1, 2] + (SELECT * FROM aws.ddb.pets)", - expected = unionOf(MISSING, NULL), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.CoercionError( - StructType( - fields = listOf( - StructType.Field("id", INT), - StructType.Field("breed", STRING), - ), - contentClosed = true, - constraints = setOf( - TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered - ) - ), - ) - ) - } - ), - ErrorTestCase( - name = "SELECT * in Subquery with comparison -- aws.ddb.pets has two columns", - query = "1 < (SELECT * FROM aws.ddb.pets)", - expected = unionOf(MISSING, NULL, BOOL), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.CoercionError( - StructType( - fields = listOf( - StructType.Field("id", INT), - StructType.Field("breed", STRING), - ), - contentClosed = true, - constraints = setOf( - TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered - ) - ), - ) - ) - } - ), - SuccessTestCase( - name = "SELECT * in Subquery with comparison -- aws.b.b has one column (INT)", - query = "1 < (SELECT * FROM aws.b.b)", - expected = BOOL, - ), - ErrorTestCase( - name = "SELECT * in Subquery with comparison -- aws.ddb.pets has two columns (INT)", - query = "1 < (SELECT * FROM aws.ddb.pets)", - expected = unionOf(NULL, MISSING, BOOL), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.CoercionError( - StructType( - fields = listOf( - StructType.Field("id", INT), - StructType.Field("breed", STRING), - ), - contentClosed = true, - constraints = setOf( - TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered - ) - ), - ) - ) - } - ), - ThrowingExceptionTestCase( - name = "SELECT * in multi-column subquery with comparison coercion.", // TODO: This needs to be fixed. - query = "[1, 2] < (SELECT * FROM aws.ddb.pets)", - expectedThrowable = IllegalStateException::class - ), - ThrowingExceptionTestCase( - name = "SELECT * in single-column subquery with comparison coercion.", // TODO: This needs to be fixed. - query = "[1, 2] < (SELECT * FROM aws.b.b)", - expectedThrowable = IllegalStateException::class - ), - ThrowingExceptionTestCase( - name = "SELECT * in Subquery with comparison of array", // TODO: This needs to be fixed. - query = "[1, 2] < (SELECT * FROM aws.ddb.pets)", - expectedThrowable = IllegalStateException::class - ), - ErrorTestCase( - name = "List of Lists on LHS for LTE", - query = "[1,2] <= (SELECT id FROM aws.ddb.pets)", - expected = BOOL, - problemHandler = assertProblemExists { - Problem( - sourceLocation = UNKNOWN_PROBLEM_LOCATION, - details = SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf(ListType(INT), INT), - "LTE" - ) - ) - } - ), - ErrorTestCase( - name = "TRIM_2_error", - query = "trim(2 FROM ' Hello, World! ')", - expected = STRING, - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.InvalidArgumentTypeForFunction( - "trim", - unionOf(STRING, StaticType.SYMBOL), - INT, - ) - ) - } - ), - ErrorTestCase( - name = "Current User Concat in WHERE", - query = "SELECT VALUE a FROM [ 0 ] AS a WHERE CURRENT_USER = 5", - expected = BagType(INT), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf( - unionOf(STRING, NULL), - INT, - ), - Rex.Binary.Op.EQ.name - ) - ) - } - ), - ErrorTestCase( - name = "Current User (String) PLUS String", - query = "CURRENT_USER + 'hello'", - expected = unionOf(MISSING, NULL), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf( - unionOf(STRING, NULL), - STRING, - ), - Rex.Binary.Op.PLUS.name - ) - ) - } - ), - // EXCLUDE test cases SuccessTestCase( - name = "EXCLUDE SELECT star", - query = """SELECT * EXCLUDE c.ssn FROM [ - { - 'name': 'Alan', - 'custId': 1, - 'address': { - 'city': 'Seattle', - 'zipcode': 98109, - 'street': '123 Seaplane Dr.' - }, - 'ssn': 123456789 - } - ] AS c""", + name = "SELECT * EXCLUDE collection wildcard and nested tuple attr", + key = key("exclude-13"), expected = BagType( StructType( fields = mapOf( - "name" to StaticType.STRING, - "custId" to StaticType.INT, - "address" to StructType( - fields = mapOf( - "city" to StaticType.STRING, - "zipcode" to StaticType.INT, - "street" to StaticType.STRING, + "a" to ListType( + elementType = StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( + "d" to StaticType.STRING + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ), + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "EXCLUDE SELECT star multiple paths", - query = """SELECT * EXCLUDE c.ssn, c.address.street FROM [ - { - 'name': 'Alan', - 'custId': 1, - 'address': { - 'city': 'Seattle', - 'zipcode': 98109, - 'street': '123 Seaplane Dr.' - }, - 'ssn': 123456789 - } - ] AS c""", + name = "SELECT * EXCLUDE collection index and nested tuple attr", + key = key("exclude-14"), expected = BagType( StructType( fields = mapOf( - "name" to StaticType.STRING, - "custId" to StaticType.INT, - "address" to StructType( - fields = mapOf( - "city" to StaticType.STRING, - "zipcode" to StaticType.INT + "a" to ListType( + elementType = StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( + "c" to StaticType.INT4.asOptional(), + "d" to StaticType.STRING + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ), + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "EXCLUDE SELECT star list index and list index field", - query = """SELECT * - EXCLUDE - t.a.b.c[0], - t.a.b.c[1].field - FROM [{ - 'a': { - 'b': { - 'c': [ - { - 'field': 0 -- c[0] - }, - { - 'field': 1 -- c[1] - }, - { - 'field': 2 -- c[2] - } - ] - } - }, - 'foo': 'bar' - }] AS t""", + name = "SELECT * EXCLUDE collection wildcard and nested tuple wildcard", + key = key("exclude-15"), expected = BagType( StructType( fields = mapOf( - "a" to StructType( - fields = mapOf( - "b" to StructType( - fields = mapOf( - "c" to ListType( - elementType = StructType( - fields = mapOf( - "field" to AnyOfType( - setOf( - StaticType.INT, - StaticType.MISSING // c[1]'s `field` was excluded - ) - ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + "a" to ListType( + elementType = StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf(), // empty map; all fields of b excluded + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) ) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - "foo" to StaticType.STRING + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "EXCLUDE SELECT star collection index as last step", - query = """SELECT * - EXCLUDE - t.a.b.c[0] - FROM [{ - 'a': { - 'b': { - 'c': [0, 1, 2] - } - }, - 'foo': 'bar' - }] AS t""", + name = "SELECT * EXCLUDE collection index and nested tuple wildcard", + key = key("exclude-16"), expected = BagType( StructType( fields = mapOf( - "a" to StructType( - fields = mapOf( - "b" to StructType( - fields = mapOf( - "c" to ListType( - elementType = StaticType.INT + "a" to ListType( + elementType = StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( // all fields of b optional + "c" to StaticType.INT4.asOptional(), + "d" to StaticType.STRING.asOptional() + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) ) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - "foo" to StaticType.STRING + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), - // EXCLUDE regression test (behavior subject to change pending RFC) SuccessTestCase( - name = "EXCLUDE SELECT star collection wildcard as last step", - query = """SELECT * - EXCLUDE - t.a[*] - FROM [{ - 'a': [0, 1, 2] - }] AS t""", + name = "SELECT * EXCLUDE collection wildcard and nested collection wildcard", + key = key("exclude-17"), expected = BagType( StructType( fields = mapOf( "a" to ListType( - elementType = StaticType.INT // empty list but still preserve typing information - ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) - ) - ) - ), - SuccessTestCase( - name = "EXCLUDE SELECT star list wildcard", - query = """SELECT * - EXCLUDE - t.a.b.c[*].field_x - FROM [{ - 'a': { - 'b': { - 'c': [ - { -- c[0] - 'field_x': 0, - 'field_y': 0 - }, - { -- c[1] - 'field_x': 1, - 'field_y': 1 - }, - { -- c[2] - 'field_x': 2, - 'field_y': 2 - } - ] - } - }, - 'foo': 'bar' - }] AS t""", - expected = BagType( - StructType( - fields = mapOf( - "a" to StructType( - fields = mapOf( - "b" to StructType( - fields = mapOf( - "c" to ListType( - elementType = StructType( - fields = mapOf( - "field_y" to StaticType.INT - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + elementType = StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( + "c" to StaticType.INT4, + "d" to ListType( + elementType = StructType( + fields = mapOf( + "f" to StaticType.BOOL + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) ) + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) ) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - "foo" to StaticType.STRING + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "EXCLUDE SELECT star list tuple wildcard", - query = """SELECT * - EXCLUDE - t.a.b.c[*].* - FROM [{ - 'a': { - 'b': { - 'c': [ - { -- c[0] - 'field_x': 0, - 'field_y': 0 - }, - { -- c[1] - 'field_x': 1, - 'field_y': 1 - }, - { -- c[2] - 'field_x': 2, - 'field_y': 2 - } - ] - } - }, - 'foo': 'bar' - }] AS t""", + name = "SELECT * EXCLUDE collection index and nested collection wildcard", + key = key("exclude-18"), expected = BagType( StructType( fields = mapOf( - "a" to StructType( - fields = mapOf( - "b" to StructType( - fields = mapOf( - "c" to ListType( - elementType = StructType( - fields = mapOf( - // all fields gone - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + "a" to ListType( + elementType = StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( + "c" to StaticType.INT4, + "d" to ListType( + elementType = StructType( + fields = mapOf( + "e" to StaticType.STRING.asOptional(), // last step is optional since only a[1]... is excluded + "f" to StaticType.BOOL + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) ) + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) ) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - "foo" to StaticType.STRING + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "EXCLUDE SELECT star order by", - query = """SELECT * - EXCLUDE - t.a - FROM [ - { - 'a': 2, - 'foo': 'bar2' - }, - { - 'a': 1, - 'foo': 'bar1' - }, - { - 'a': 3, - 'foo': 'bar3' - } - ] AS t - ORDER BY t.a""", - expected = ListType( + name = "SELECT * EXCLUDE collection index and nested collection index", + key = key("exclude-19"), + expected = BagType( StructType( fields = mapOf( - "foo" to StaticType.STRING + "a" to ListType( + elementType = StructType( + fields = mapOf( + "b" to StructType( + fields = mapOf( + "c" to StaticType.INT4, + "d" to ListType( + elementType = StructType( + fields = mapOf( // same as above + "e" to StaticType.STRING.asOptional(), + "f" to StaticType.BOOL + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) + ) + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ), + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ), + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "EXCLUDE SELECT star with JOINs", - query = """SELECT * - EXCLUDE bar.d - FROM - << - {'a': 1, 'b': 11}, - {'a': 2, 'b': 22} - >> AS foo, - << - {'c': 3, 'd': 33}, - {'c': 4, 'd': 44} - >> AS bar""", + name = "EXCLUDE case sensitive lookup", + key = key("exclude-20"), expected = BagType( StructType( fields = mapOf( - "a" to StaticType.INT, - "b" to StaticType.INT, - "c" to StaticType.INT + "a" to StructType( + fields = mapOf( + "B" to StructType( + fields = mapOf( + "d" to StaticType.STRING + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ), + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ), ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "SELECT t.b EXCLUDE ex 1", - query = """SELECT t.b EXCLUDE t.b[*].b_1 - FROM << - { - 'a': {'a_1':1,'a_2':2}, - 'b': [ {'b_1':3,'b_2':4}, {'b_1':5,'b_2':6} ], - 'c': 7, - 'd': 8 - } >> AS t""", + name = "EXCLUDE case sensitive lookup with capitalized and uncapitalized attr", + key = key("exclude-21"), expected = BagType( StructType( fields = mapOf( - "b" to ListType( - elementType = StructType( - fields = mapOf( - "b_2" to StaticType.INT + "a" to StructType( + fields = mapOf( + "B" to StructType( + fields = mapOf( + "C" to StaticType.BOOL, // keep 'C' + "d" to StaticType.STRING + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "SELECT * EXCLUDE ex 2", - query = """SELECT * EXCLUDE t.b[*].b_1 - FROM << - { - 'a': {'a_1':1,'a_2':2}, - 'b': [ {'b_1':3,'b_2':4}, {'b_1':5,'b_2':6} ], - 'c': 7, - 'd': 8 - } >> AS t""", + name = "EXCLUDE case sensitive lookup with both capitalized and uncapitalized removed", + key = key("exclude-22"), expected = BagType( StructType( fields = mapOf( "a" to StructType( fields = mapOf( - "a_1" to StaticType.INT, - "a_2" to StaticType.INT + "B" to StructType( + fields = mapOf( + "d" to StaticType.STRING + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ), ), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - "b" to ListType( - elementType = StructType( - fields = mapOf( - "b_2" to StaticType.INT - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) - ), - "c" to StaticType.INT, - "d" to StaticType.INT ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) - ) - ) - ), - SuccessTestCase( - name = "SELECT VALUE t.b EXCLUDE", - query = """SELECT VALUE t.b EXCLUDE t.b[*].b_1 - FROM << - { - 'a': {'a_1':1,'a_2':2}, - 'b': [ {'b_1':3,'b_2':4}, {'b_1':5,'b_2':6} ], - 'c': 7, - 'd': 8 - } >> AS t""", - expected = BagType( - ListType( - elementType = StructType( - fields = mapOf( - "b_2" to StaticType.INT - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered ) - ), + ) ) ), SuccessTestCase( - name = "SELECT * EXCLUDE collection wildcard and nested tuple attr", - query = """SELECT * EXCLUDE t.a[*].b.c - FROM << - { - 'a': [ - { 'b': { 'c': 0, 'd': 'zero' } }, - { 'b': { 'c': 1, 'd': 'one' } }, - { 'b': { 'c': 2, 'd': 'two' } } - ] - } - >> AS t""", + name = "EXCLUDE with both duplicates", + key = key("exclude-23"), expected = BagType( StructType( fields = mapOf( - "a" to ListType( - elementType = StructType( - fields = mapOf( - "b" to StructType( - fields = mapOf( - "d" to StaticType.STRING - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + "a" to StructType( + fields = mapOf( + "B" to StructType( + fields = mapOf( + // both "c" removed + "d" to StaticType.STRING ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(false) + ) // UniqueAttrs set to false ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - ) + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ), ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), + // EXCLUDE regression test (behavior subject to change pending RFC) SuccessTestCase( - name = "SELECT * EXCLUDE collection index and nested tuple attr", - query = """SELECT * EXCLUDE t.a[1].b.c - FROM << - { - 'a': [ - { 'b': { 'c': 0, 'd': 'zero' } }, - { 'b': { 'c': 1, 'd': 'one' } }, - { 'b': { 'c': 2, 'd': 'two' } } - ] - } - >> AS t""", + name = "EXCLUDE with removed attribute later referenced", + key = key("exclude-24"), expected = BagType( StructType( fields = mapOf( - "a" to ListType( - elementType = StructType( - fields = mapOf( - "b" to StructType( - fields = mapOf( - "c" to StaticType.INT.asOptional(), - "d" to StaticType.STRING - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - ) + "c" to StaticType.INT4 ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), + // EXCLUDE regression test (behavior subject to change pending RFC) SuccessTestCase( - name = "SELECT * EXCLUDE collection wildcard and nested tuple wildcard", - query = """SELECT * EXCLUDE t.a[*].b.* - FROM << - { - 'a': [ - { 'b': { 'c': 0, 'd': 'zero' } }, - { 'b': { 'c': 1, 'd': 'one' } }, - { 'b': { 'c': 2, 'd': 'two' } } - ] - } - >> AS t""", + name = "EXCLUDE with non-existent attribute reference", + key = key("exclude-25"), expected = BagType( StructType( fields = mapOf( - "a" to ListType( - elementType = StructType( - fields = mapOf( - "b" to StructType( - fields = mapOf(), // empty map; all fields of b excluded - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - ) + "a" to StaticType.INT4 ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), + // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "SELECT * EXCLUDE collection index and nested tuple wildcard", - query = """SELECT * EXCLUDE t.a[1].b.* - FROM << - { - 'a': [ - { 'b': { 'c': 0, 'd': 'zero' } }, - { 'b': { 'c': 1, 'd': 'one' } }, - { 'b': { 'c': 2, 'd': 'two' } } - ] - } - >> AS t""", + name = "exclude union of types", + key = key("exclude-26"), expected = BagType( StructType( fields = mapOf( - "a" to ListType( - elementType = StructType( + "t" to StaticType.unionOf( + StructType( fields = mapOf( - "b" to StructType( - fields = mapOf( // all fields of b optional - "c" to StaticType.INT.asOptional(), - "d" to StaticType.STRING.asOptional() + "a" to StructType( + fields = mapOf( + "c" to StaticType.STRING ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) ), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) - ) - ) - ), - SuccessTestCase( - name = "SELECT * EXCLUDE collection wildcard and nested collection wildcard", - query = """SELECT * EXCLUDE t.a[*].b.d[*].e - FROM << - { - 'a': [ - { 'b': { 'c': 0, 'd': [{'e': 'zero', 'f': true}] } }, - { 'b': { 'c': 1, 'd': [{'e': 'one', 'f': true}] } }, - { 'b': { 'c': 2, 'd': [{'e': 'two', 'f': true}] } } - ] - } - >> AS t""", - expected = BagType( - StructType( - fields = mapOf( - "a" to ListType( - elementType = StructType( + StructType( fields = mapOf( - "b" to StructType( - fields = mapOf( - "c" to StaticType.INT, - "d" to ListType( - elementType = StructType( - fields = mapOf( - "f" to StaticType.BOOL - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) - ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + "a" to StaticType.NULL ), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) @@ -1975,91 +1564,49 @@ class PartiQLSchemaInferencerTests { ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "SELECT * EXCLUDE collection index and nested collection wildcard", - query = """SELECT * EXCLUDE t.a[1].b.d[*].e - FROM << - { - 'a': [ - { 'b': { 'c': 0, 'd': [{'e': 'zero', 'f': true}] } }, - { 'b': { 'c': 1, 'd': [{'e': 'one', 'f': true}] } }, - { 'b': { 'c': 2, 'd': [{'e': 'two', 'f': true}] } } - ] - } - >> AS t""", + name = "exclude union of types exclude same type", + key = key("exclude-27"), expected = BagType( StructType( fields = mapOf( - "a" to ListType( - elementType = StructType( + "t" to StaticType.unionOf( + StructType( fields = mapOf( - "b" to StructType( + "a" to StructType( fields = mapOf( - "c" to StaticType.INT, - "d" to ListType( - elementType = StructType( - fields = mapOf( - "e" to StaticType.STRING.asOptional(), // last step is optional since only a[1]... is excluded - "f" to StaticType.BOOL - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) - ) + "c" to StaticType.STRING ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) ), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) ), - ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) - ) - ) - ), - SuccessTestCase( - name = "SELECT * EXCLUDE collection index and nested collection index", - query = """SELECT * EXCLUDE t.a[1].b.d[0].e - FROM << - { - 'a': [ - { 'b': { 'c': 0, 'd': [{'e': 'zero', 'f': true}] } }, - { 'b': { 'c': 1, 'd': [{'e': 'one', 'f': true}] } }, - { 'b': { 'c': 2, 'd': [{'e': 'two', 'f': true}] } } - ] - } - >> AS t""", - expected = BagType( - StructType( - fields = mapOf( - "a" to ListType( - elementType = StructType( + StructType( fields = mapOf( - "b" to StructType( + "a" to StructType( fields = mapOf( - "c" to StaticType.INT, - "d" to ListType( - elementType = StructType( - fields = mapOf( // same as above - "e" to StaticType.STRING.asOptional(), - "f" to StaticType.BOOL - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) - ) + "c" to StaticType.NULL ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) ), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) @@ -2067,729 +1614,2034 @@ class PartiQLSchemaInferencerTests { ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "EXCLUDE case sensitive lookup", - query = """SELECT * EXCLUDE t."a".b['c'] - FROM << - { - 'a': { - 'B': { - 'c': 0, - 'd': 'foo' - } - } - } - >> AS t""", + name = "exclude union of types exclude different type", + key = key("exclude-28"), expected = BagType( StructType( fields = mapOf( - "a" to StructType( + "t" to StructType( // union gone fields = mapOf( - "B" to StructType( + "a" to StructType( fields = mapOf( - "d" to StaticType.STRING + "b" to StaticType.INT4 ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) ), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), + // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "EXCLUDE case sensitive lookup with capitalized and uncapitalized attr", - query = """SELECT * EXCLUDE t."a".b['c'] - FROM << - { - 'a': { - 'B': { - 'c': 0, - 'C': true, - 'd': 'foo' - } - } - } - >> AS t""", + name = "invalid exclude collection wildcard", + key = key("exclude-29"), expected = BagType( - StructType( + elementType = StructType( fields = mapOf( "a" to StructType( fields = mapOf( - "B" to StructType( + "b" to StructType( fields = mapOf( - "C" to StaticType.BOOL, // keep 'C' + "c" to StaticType.INT4, "d" to StaticType.STRING ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) ), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), + // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "EXCLUDE case sensitive lookup with both capitalized and uncapitalized removed", - query = """SELECT * EXCLUDE t."a".b.c - FROM << - { - 'a': { - 'B': { -- both 'c' and 'C' to be removed - 'c': 0, - 'C': true, - 'd': 'foo' - } - } - } - >> AS t""", + name = "invalid exclude collection index", + key = key("exclude-30"), expected = BagType( - StructType( + elementType = StructType( fields = mapOf( "a" to StructType( fields = mapOf( - "B" to StructType( + "b" to StructType( fields = mapOf( + "c" to StaticType.INT4, "d" to StaticType.STRING ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) + ) ), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), + // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "EXCLUDE with both duplicates", - query = """SELECT * EXCLUDE t."a".b.c - FROM << - { - 'a': { - 'B': { - 'c': 0, - 'c': true, - 'd': 'foo' - } - } - } - >> AS t""", + name = "invalid exclude tuple attr", + key = key("exclude-31"), expected = BagType( - StructType( + elementType = StructType( fields = mapOf( - "a" to StructType( - fields = mapOf( - "B" to StructType( - fields = mapOf( - // both "c" removed - "d" to StaticType.STRING - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(false)) // UniqueAttrs set to false + "a" to ListType( + elementType = StructType( + fields = mapOf( + "b" to StaticType.INT4 ), - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ) + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), - // EXCLUDE regression test (behavior subject to change pending RFC) + // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "EXCLUDE with removed attribute later referenced", - query = "SELECT * EXCLUDE t.a, t.a.b FROM << { 'a': { 'b': 1 }, 'c': 2 } >> AS t", + name = "invalid exclude tuple wildcard", + key = key("exclude-32"), expected = BagType( - StructType( + elementType = StructType( fields = mapOf( - "c" to StaticType.INT + "a" to ListType( + elementType = StructType( + fields = mapOf( + "b" to StaticType.INT4 + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ) + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), - // EXCLUDE regression test (behavior subject to change pending RFC) + // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "EXCLUDE with non-existent attribute reference", - query = "SELECT * EXCLUDE t.attr_does_not_exist FROM << { 'a': 1 } >> AS t", + name = "invalid exclude tuple attr step", + key = key("exclude-33"), expected = BagType( - StructType( + elementType = StructType( fields = mapOf( - "a" to StaticType.INT + "a" to BagType( + elementType = StructType( + fields = mapOf( + "b" to StaticType.INT4 + ), + contentClosed = true, + constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ) + ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning - SuccessTestCase( - name = "exclude union of types", - query = """SELECT t EXCLUDE t.a.b - FROM << - { - 'a': { - 'b': 1, -- `b` to be excluded - 'c': 'foo' - } - }, - { - 'a': NULL - } - >> AS t""", + ErrorTestCase( + name = "invalid exclude root", + key = key("exclude-34"), expected = BagType( - StructType( + elementType = StructType( fields = mapOf( - "t" to StaticType.unionOf( - StructType( - fields = mapOf( - "a" to StructType( - fields = mapOf( - "c" to StaticType.STRING - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - StructType( + "a" to BagType( + elementType = StructType( fields = mapOf( - "a" to StaticType.NULL + "b" to StaticType.INT4 ), contentClosed = true, constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), + ) ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) - ) + ), + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UnresolvedExcludeExprRoot("nonsense") + ) + } ), + // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "exclude union of types exclude same type", - query = """SELECT t EXCLUDE t.a.b - FROM << - { - 'a': { - 'b': 1, -- `b` to be excluded - 'c': 'foo' - } - }, - { - 'a': { - 'b': 1, -- `b` to be excluded - 'c': NULL - } - } - >> AS t""", + name = "exclude with unions and last step collection index", + key = key("exclude-35"), expected = BagType( - StructType( + elementType = StructType( fields = mapOf( - "t" to StaticType.unionOf( - StructType( - fields = mapOf( - "a" to StructType( - fields = mapOf( - "c" to StaticType.STRING - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + "a" to ListType( + elementType = StaticType.unionOf( + StructType( + fields = mapOf( + "b" to StaticType.INT4, + "c" to StaticType.INT4.asOptional() + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) ) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - StructType( - fields = mapOf( - "a" to StructType( - fields = mapOf( - "c" to StaticType.NULL - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + StructType( + fields = mapOf( + "b" to StaticType.INT4, + "c" to StaticType.NULL.asOptional() + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) ) ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) - ) - ) - ), - SuccessTestCase( - name = "exclude union of types exclude different type", - query = """SELECT t EXCLUDE t.a.c - FROM << - { - 'a': { - 'b': 1, - 'c': 'foo' -- `c` to be excluded - } - }, - { - 'a': { - 'b': 1, - 'c': NULL -- `c` to be excluded - } - } - >> AS t""", - expected = BagType( - StructType( - fields = mapOf( - "t" to StructType( // union gone - fields = mapOf( - "a" to StructType( + StructType( fields = mapOf( - "b" to StaticType.INT + "b" to StaticType.INT4, + "c" to StaticType.DECIMAL.asOptional() ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true) + ) ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) + ) ) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), - // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "invalid exclude collection wildcard", - query = """SELECT * EXCLUDE t.a[*] - FROM << - { - 'a': { - 'b': { - 'c': 0, - 'd': 'foo' - } - } - } - >> AS t""", + name = "EXCLUDE using a catalog", + catalog = CATALOG_B, + key = key("exclude-36"), expected = BagType( elementType = StructType( fields = mapOf( - "a" to StructType( + "b" to StructType( fields = mapOf( - "b" to StructType( - fields = mapOf( - "c" to StaticType.INT, - "d" to StaticType.STRING - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + "b" to StaticType.INT4 ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ), ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), - // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning + ) + + @JvmStatic + fun orderByCases() = listOf( SuccessTestCase( - name = "invalid exclude collection index", - query = """SELECT * EXCLUDE t.a[1] - FROM << - { - 'a': { - 'b': { - 'c': 0, - 'd': 'foo' - } - } - } - >> AS t""", + name = "ORDER BY int", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM pets ORDER BY id", + expected = TABLE_AWS_DDB_PETS_LIST + ), + SuccessTestCase( + name = "ORDER BY str", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM pets ORDER BY breed", + expected = TABLE_AWS_DDB_PETS_LIST + ), + ErrorTestCase( + name = "ORDER BY str", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM pets ORDER BY unknown_col", + expected = TABLE_AWS_DDB_PETS_LIST, + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UndefinedVariable("unknown_col", false) + ) + } + ), + ) + + @JvmStatic + fun tupleUnionCases() = listOf( + SuccessTestCase( + name = "Empty Tuple Union", + query = "TUPLEUNION()", + expected = StructType( + fields = emptyMap(), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ), + SuccessTestCase( + name = "Tuple Union with Literal Struct", + query = "TUPLEUNION({ 'a': 1, 'b': 'hello' })", + expected = StructType( + fields = mapOf( + "a" to StaticType.INT4, + "b" to StaticType.STRING, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + ) + ), + ), + SuccessTestCase( + name = "Tuple Union with Literal Struct AND Duplicates", + query = "TUPLEUNION({ 'a': 1, 'a': 'hello' })", + expected = StructType( + fields = listOf( + StructType.Field("a", INT4), + StructType.Field("a", STRING), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(false), + ) + ), + ), + SuccessTestCase( + name = "Tuple Union with Nested Struct", + query = """ + SELECT VALUE TUPLEUNION( + t.a + ) FROM << + { 'a': { 'b': 1 } } + >> AS t + """, expected = BagType( - elementType = StructType( - fields = mapOf( - "a" to StructType( - fields = mapOf( - "b" to StructType( - fields = mapOf( - "c" to StaticType.INT, - "d" to StaticType.STRING - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + StructType( + fields = listOf( + StructType.Field("b", INT4), ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + // TODO: This shouldn't be ordered. However, this doesn't come from the TUPLEUNION. It is + // coming from the RexOpSelect. + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) - ) + ), ), - // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "invalid exclude tuple attr", - query = """SELECT * EXCLUDE t.a.b - FROM << - { - 'a': [ - { 'b': 0 }, - { 'b': 1 }, - { 'b': 2 } - ] - } - >> AS t""", + name = "Tuple Union with Heterogeneous Data", + query = """ + SELECT VALUE TUPLEUNION( + t.a + ) FROM << + { 'a': { 'b': 1 } }, + { 'a': 1 } + >> AS t + """, expected = BagType( - elementType = StructType( - fields = mapOf( - "a" to ListType( - elementType = StructType( - fields = mapOf( - "b" to StaticType.INT - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + unionOf( + MISSING, + StructType( + fields = listOf( + StructType.Field("b", INT4), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), ) - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + ) ) - ) + ), ), - // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "invalid exclude tuple wildcard", - query = """SELECT * EXCLUDE t.a.* - FROM << - { - 'a': [ - { 'b': 0 }, - { 'b': 1 }, - { 'b': 2 } - ] - } - >> AS t""", + name = "Tuple Union with Heterogeneous Data (2)", + query = """ + SELECT VALUE TUPLEUNION( + t.a + ) FROM << + { 'a': { 'b': 1 } }, + { 'a': { 'b': 'hello' } }, + { 'a': NULL }, + { 'a': 4.5 }, + { } + >> AS t + """, expected = BagType( - elementType = StructType( - fields = mapOf( - "a" to ListType( - elementType = StructType( - fields = mapOf( - "b" to StaticType.INT - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) + unionOf( + NULL, + MISSING, + StructType( + fields = listOf( + StructType.Field("b", INT4), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), ) ), + StructType( + fields = listOf( + StructType.Field("b", STRING), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + ) + ) + ) + ), + ), + SuccessTestCase( + name = "Tuple Union with Heterogeneous Data (3)", + query = """ + SELECT VALUE TUPLEUNION( + p.name + ) FROM aws.ddb.persons AS p + """, + expected = BagType( + unionOf( + MISSING, + StructType( + fields = listOf( + StructType.Field("first", STRING), + StructType.Field("last", STRING), + ), + contentClosed = false, + constraints = setOf( + TupleConstraint.Open(true), + TupleConstraint.UniqueAttrs(false), + ) + ), + StructType( + fields = listOf( + StructType.Field("full_name", STRING), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ), + ) + ), + ), + SuccessTestCase( + name = "Complex Tuple Union with Heterogeneous Data", + query = """ + SELECT VALUE TUPLEUNION( + p.name, + p.name + ) FROM aws.ddb.persons AS p + """, + expected = BagType( + unionOf( + MISSING, + StructType( + fields = listOf( + StructType.Field("first", STRING), + StructType.Field("last", STRING), + StructType.Field("first", STRING), + StructType.Field("last", STRING), + ), + contentClosed = false, + constraints = setOf( + TupleConstraint.Open(true), + TupleConstraint.UniqueAttrs(false), + ) + ), + StructType( + fields = listOf( + StructType.Field("first", STRING), + StructType.Field("last", STRING), + StructType.Field("full_name", STRING), + ), + contentClosed = false, + constraints = setOf( + TupleConstraint.Open(true), + TupleConstraint.UniqueAttrs(false), + ) + ), + StructType( + fields = listOf( + StructType.Field("full_name", STRING), + StructType.Field("first", STRING), + StructType.Field("last", STRING), + ), + contentClosed = false, + constraints = setOf( + TupleConstraint.Open(true), + TupleConstraint.UniqueAttrs(false), + ) + ), + StructType( + fields = listOf( + StructType.Field("full_name", STRING), + StructType.Field("full_name", STRING), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(false), + TupleConstraint.Ordered + ) + ), + ) + ), + ), + ) + + @JvmStatic + fun caseWhens() = listOf( + SuccessTestCase( + name = "Easy case when", + query = """ + CASE + WHEN FALSE THEN 0 + WHEN TRUE THEN 1 + ELSE 2 + END; + """, + expected = INT4 + ), + SuccessTestCase( + name = "Folded case when to grab the true", + query = """ + CASE + WHEN FALSE THEN 0 + WHEN TRUE THEN 'hello' + END; + """, + expected = STRING + ), + SuccessTestCase( + name = "Boolean case when", + query = """ + CASE 'Hello World' + WHEN 'Hello World' THEN TRUE + ELSE FALSE + END; + """, + expected = BOOL + ), + SuccessTestCase( + name = "Folded out false", + query = """ + CASE + WHEN FALSE THEN 'IMPOSSIBLE TO GET' + ELSE TRUE + END; + """, + expected = BOOL + ), + SuccessTestCase( + name = "Folded out false without default", + query = """ + CASE + WHEN FALSE THEN 'IMPOSSIBLE TO GET' + END; + """, + expected = NULL + ), + SuccessTestCase( + name = "Not folded gives us a nullable without default", + query = """ + CASE 1 + WHEN 1 THEN TRUE + WHEN 2 THEN FALSE + END; + """, + expected = BOOL.asNullable() + ), + SuccessTestCase( + name = "Not folded gives us a nullable without default for query", + query = """ + SELECT + CASE breed + WHEN 'golden retriever' THEN 'fluffy dog' + WHEN 'pitbull' THEN 'short-haired dog' + END AS breed_descriptor + FROM dogs + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = BagType( + StructType( + fields = mapOf( + "breed_descriptor" to STRING.asNullable(), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "Query", + query = """ + SELECT + CASE breed + WHEN 'golden retriever' THEN 'fluffy dog' + WHEN 'pitbull' THEN 'short-haired dog' + ELSE 'something else' + END AS breed_descriptor + FROM dogs + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = BagType( + StructType( + fields = mapOf( + "breed_descriptor" to STRING, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "Query with heterogeneous data", + query = """ + SELECT + CASE breed + WHEN 'golden retriever' THEN 'fluffy dog' + WHEN 'pitbull' THEN 2 + ELSE 2.0 + END AS breed_descriptor + FROM dogs + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = BagType( + StructType( + fields = mapOf( + "breed_descriptor" to unionOf(STRING, INT4, DECIMAL), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + ) + + @JvmStatic + fun pathExpressions() = listOf( + SuccessTestCase( + name = "Index on literal list", + query = """ + [0, 1, 2, 3][0] + """, + expected = INT4 + ), + SuccessTestCase( + name = "Index on global list", + query = """ + dogs[0].breed + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = STRING + ), + SuccessTestCase( + name = "Index on list attribute of global table", + query = """ + SELECT typical_allergies[0] AS main_allergy FROM dogs + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = BagType( + StructType( + fields = mapOf( + "main_allergy" to STRING, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "Pathing into resolved local variable without qualification", + query = """ + SELECT address.street AS s FROM employer; + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = BagType( + StructType( + fields = mapOf( + "s" to STRING, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "Pathing into resolved local variable without qualification and with sensitivity", + query = """ + SELECT address."street" AS s FROM employer; + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = BagType( + StructType( + fields = mapOf( + "s" to STRING, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "Pathing into resolved local variable without qualification and with indexing syntax", + query = """ + SELECT address['street'] AS s FROM employer; + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = BagType( + StructType( + fields = mapOf( + "s" to STRING, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "Pathing into resolved local variable without qualification and with indexing syntax and fully-qualified FROM", + query = """ + SELECT e.address['street'] AS s FROM "pql"."main"."employer" AS e; + """, + expected = BagType( + StructType( + fields = mapOf( + "s" to STRING, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + ErrorTestCase( + name = "Show that we can't use [] to reference a value in a schema. It can only be used on tuples.", + query = """ + SELECT VALUE 1 FROM "pql"."main"['employer'] AS e; + """, + expected = BagType(INT4), + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UndefinedVariable("main", true) + ) + } + ), + ErrorTestCase( + name = "Show that we can't use [] to reference a schema in a catalog. It can only be used on tuples.", + query = """ + SELECT VALUE 1 FROM "pql"['main']."employer" AS e; + """, + expected = BagType(INT4), + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UndefinedVariable("pql", true) + ) + } + ), + SuccessTestCase( + name = "Tuple indexing syntax on literal tuple with literal string key", + query = """ + { 'aBc': 1, 'AbC': 2.0 }['AbC']; + """, + expected = DECIMAL + ), + // This should fail because the Spec says tuple indexing MUST use a literal string or explicit cast. + ErrorTestCase( + name = "Array indexing syntax on literal tuple with non-literal and non-cast key", + query = """ + { 'aBc': 1, 'AbC': 2.0 }['Ab' || 'C']; + """, + expected = MISSING, + problemHandler = assertProblemExists { + Problem( + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.ExpressionAlwaysReturnsNullOrMissing + ) + } + ), + // The reason this is ANY is because we do not have support for constant-folding. We don't know what + // CAST('Ab' || 'C' AS STRING) will evaluate to, and therefore, we don't know what the indexing operation + // will return. + SuccessTestCase( + name = "Tuple indexing syntax on literal tuple with explicit cast key", + query = """ + { 'aBc': 1, 'AbC': 2.0 }[CAST('Ab' || 'C' AS STRING)]; + """, + expected = ANY + ), + ) + + @JvmStatic + fun scalarFunctions() = listOf( + SuccessTestCase( + name = "UPPER on binding tuple of literal string", + query = """ + SELECT + UPPER(some_str) AS upper_str + FROM + << { 'some_str': 'hello world!' } >> + AS t + """, + expected = BagType( + StructType( + fields = mapOf( + "upper_str" to STRING, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "UPPER on literal string", + query = """ + UPPER('hello world') + """, + expected = STRING + ), + SuccessTestCase( + name = "UPPER on global string", + query = """ + UPPER(os) + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = STRING + ), + SuccessTestCase( + name = "UPPER on global string", + query = """ + UPPER(os) + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = STRING + ), + SuccessTestCase( + name = "UPPER on global struct", + query = """ + UPPER(person.ssn) + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = STRING + ), + SuccessTestCase( + name = "UPPER on global nested struct", + query = """ + UPPER(person.name."first") + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = STRING + ), + SuccessTestCase( + name = "UPPER on global table", + query = """ + SELECT UPPER(breed) AS upper_breed + FROM dogs + """, + catalog = "pql", + catalogPath = listOf("main"), + expected = BagType( + StructType( + fields = mapOf( + "upper_breed" to STRING, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + ) + + @JvmStatic + fun aggregationCases() = listOf( + SuccessTestCase( + name = "AGGREGATE over INTS, without alias", + query = "SELECT a, COUNT(*), SUM(a), MIN(b) FROM << {'a': 1, 'b': 2} >> GROUP BY a", + expected = BagType( + StructType( + fields = mapOf( + "a" to INT4, + "_1" to INT4, + "_2" to INT4.asNullable(), + "_3" to INT4.asNullable(), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "AGGREGATE over INTS, with alias", + query = "SELECT a, COUNT(*) AS c, SUM(a) AS s, MIN(b) AS m FROM << {'a': 1, 'b': 2} >> GROUP BY a", + expected = BagType( + StructType( + fields = mapOf( + "a" to INT4, + "c" to INT4, + "s" to INT4.asNullable(), + "m" to INT4.asNullable(), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "AGGREGATE over DECIMALS", + query = "SELECT a, COUNT(*) AS c, SUM(a) AS s, MIN(b) AS m FROM << {'a': 1.0, 'b': 2.0}, {'a': 1.0, 'b': 2.0} >> GROUP BY a", + expected = BagType( + StructType( + fields = mapOf( + "a" to StaticType.DECIMAL, + "c" to INT4, + "s" to StaticType.DECIMAL.asNullable(), + "m" to StaticType.DECIMAL.asNullable(), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + ) + + @JvmStatic + fun dynamicCalls() = listOf( + SuccessTestCase( + name = "unary plus on varying numeric types -- this cannot return missing!", + query = """ + SELECT +t.a AS a + FROM << + { 'a': CAST(1 AS INT8) }, + { 'a': CAST(1 AS INT4) } + >> AS t + """.trimIndent(), + expected = BagType( + StructType( + fields = mapOf( + "a" to unionOf(INT4, INT8), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "unary plus on varying numeric types including missing -- this may return missing", + query = """ + SELECT +t.a AS a + FROM << + { 'a': CAST(1 AS INT8) }, + { 'a': CAST(1 AS INT4) }, + { } + >> AS t + """.trimIndent(), + expected = BagType( + StructType( + fields = mapOf( + "a" to unionOf(INT4, INT8, MISSING), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "unary plus on varying numeric types including string -- this may return missing", + query = """ + SELECT +t.a AS a + FROM << + { 'a': CAST(1 AS INT8) }, + { 'a': CAST(1 AS INT4) }, + { 'a': 'hello world!' } + >> AS t + """.trimIndent(), + expected = BagType( + StructType( + fields = mapOf( + "a" to unionOf(INT4, INT8, MISSING), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "binary plus on varying types -- this will return missing if one of the operands is not a number", + query = """ + SELECT t.a + t.b AS c + FROM << + { 'a': CAST(1 AS INT8), 'b': CAST(1.0 AS DECIMAL) }, + { 'a': CAST(1 AS INT4), 'b': TRUE }, + { 'a': 'hello world!!', 'b': DATE '2023-01-01' } + >> AS t + """.trimIndent(), + expected = BagType( + StructType( + fields = mapOf( + "c" to unionOf(MISSING, DECIMAL), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + ErrorTestCase( + name = """ + unary plus on non-compatible type -- this cannot resolve to a dynamic call since no function + will ever be invoked. + """.trimIndent(), + query = """ + SELECT VALUE +t.a + FROM << + { 'a': 'hello world!' } + >> AS t + """.trimIndent(), + expected = BagType(MISSING), + problemHandler = assertProblemExists { + Problem( + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.UnknownFunction( + "pos", + listOf(STRING) + ) + ) + } + ), + ErrorTestCase( + name = """ + unary plus on non-compatible union type -- this cannot resolve to a dynamic call since no function + will ever be invoked. + """.trimIndent(), + query = """ + SELECT VALUE +t.a + FROM << + { 'a': 'hello world!' }, + { 'a': <<>> } + >> AS t + """.trimIndent(), + expected = BagType(MISSING), + problemHandler = assertProblemExists { + Problem( + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.UnknownFunction( + "pos", + listOf(unionOf(STRING, BAG)) + ) + ) + } + ), + ErrorTestCase( + name = """ + unary plus on missing type -- this cannot resolve to a dynamic call since no function + will ever be invoked. + """.trimIndent(), + query = """ + SELECT VALUE +t.a + FROM << + { 'NOT_A': 1 } + >> AS t + """.trimIndent(), + expected = BagType(MISSING), + problemHandler = assertProblemExists { + Problem( + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.UnknownFunction( + "pos", + listOf(MISSING) + ) + ) + } + ), + ) + + @JvmStatic + fun subqueryCases() = listOf( + SuccessTestCase( + name = "Subquery IN collection", + catalog = "subqueries", + key = PartiQLTest.Key("subquery", "subquery-00"), + expected = BagType( + StructType( + fields = mapOf( + "x" to INT4, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "Subquery scalar coercion", + catalog = "subqueries", + key = PartiQLTest.Key("subquery", "subquery-01"), + expected = BagType( + StructType( + fields = mapOf( + "x" to INT4, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "Subquery simple JOIN", + catalog = "subqueries", + key = PartiQLTest.Key("subquery", "subquery-02"), + expected = BagType( + StructType( + fields = mapOf( + "x" to INT4, + "y" to INT4, + "z" to INT4, + "a" to INT4, + "b" to INT4, + "c" to INT4, + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "Subquery scalar coercion", + catalog = "subqueries", + key = PartiQLTest.Key("subquery", "subquery-03"), + expected = BOOL, + ), + ) + } + + sealed class TestCase { + fun toIgnored(reason: String) = + when (this) { + is IgnoredTestCase -> this + else -> IgnoredTestCase(this, reason) + } + + class SuccessTestCase( + val name: String, + val key: PartiQLTest.Key? = null, + val query: String? = null, + val catalog: String? = null, + val catalogPath: List = emptyList(), + val expected: StaticType, + val warnings: ProblemHandler? = null, + ) : TestCase() { + override fun toString(): String = "$name : $query" + } + + class ErrorTestCase( + val name: String, + val key: PartiQLTest.Key? = null, + val query: String? = null, + val catalog: String? = null, + val catalogPath: List = emptyList(), + val note: String? = null, + val expected: StaticType? = null, + val problemHandler: ProblemHandler? = null, + ) : TestCase() { + override fun toString(): String = "$name : $query" + } + + class ThrowingExceptionTestCase( + val name: String, + val query: String, + val catalog: String? = null, + val catalogPath: List = emptyList(), + val note: String? = null, + val expectedThrowable: KClass, + ) : TestCase() { + override fun toString(): String { + return "$name : $query" + } + } + + class IgnoredTestCase( + val shouldBe: TestCase, + reason: String, + ) : TestCase() { + override fun toString(): String = "Disabled - $shouldBe" + } + } + + class TestProvider : ArgumentsProvider { + override fun provideArguments(context: ExtensionContext?): Stream { + return parameters.map { Arguments.of(it) }.stream() + } + + private val parameters = listOf( + ErrorTestCase( + name = "Pets should not be accessible #1", + query = "SELECT * FROM pets", + expected = BagType( + unionOf( + StructType( + fields = emptyMap(), + contentClosed = false, + constraints = setOf( + TupleConstraint.Open(true), + TupleConstraint.UniqueAttrs(false), + ) + ), + StructType( + fields = mapOf( + "_1" to StaticType.ANY + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + ) + ), + ) + ), + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UndefinedVariable("pets", false) + ) + } + ), + ErrorTestCase( + name = "Pets should not be accessible #2", + catalog = CATALOG_AWS, + query = "SELECT * FROM pets", + expected = BagType( + unionOf( + StructType( + fields = emptyMap(), + contentClosed = false, + constraints = setOf( + TupleConstraint.Open(true), + TupleConstraint.UniqueAttrs(false), + ) + ), + StructType( + fields = mapOf( + "_1" to StaticType.ANY + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + ) + ), + ) + ), + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UndefinedVariable("pets", false) + ) + } + ), + SuccessTestCase( + name = "Project all explicitly", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM pets", + expected = TABLE_AWS_DDB_PETS + ), + SuccessTestCase( + name = "Project all implicitly", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT id, breed FROM pets", + expected = TABLE_AWS_DDB_PETS + ), + SuccessTestCase( + name = "Test #4", + catalog = CATALOG_B, + catalogPath = listOf("b"), + query = "b", + expected = TYPE_B_B_B + ), + SuccessTestCase( + name = "Test #5", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM b", + expected = TABLE_AWS_DDB_B + ), + SuccessTestCase( + name = "Test #6", + catalog = CATALOG_AWS, + catalogPath = listOf("b"), + query = "SELECT * FROM b", + expected = TABLE_AWS_B_B + ), + ErrorTestCase( + name = "Test #7", + query = "SELECT * FROM ddb.pets", + expected = BagType( + unionOf( + StructType( + fields = emptyMap(), + contentClosed = false, + constraints = setOf( + TupleConstraint.Open(true), + TupleConstraint.UniqueAttrs(false), + ) + ), + StructType( + fields = mapOf( + "_1" to StaticType.ANY + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + ) + ), + ) + ), + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UndefinedVariable("pets", false) + ) + } + ), + SuccessTestCase( + name = "Test #10", + catalog = CATALOG_B, + query = "b.b", + expected = TYPE_B_B_B + ), + SuccessTestCase( + name = "Test #11", + catalog = CATALOG_B, + catalogPath = listOf("b"), + query = "b.b", + expected = TYPE_B_B_B + ), + SuccessTestCase( + name = "Test #12", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM b.b", + expected = TABLE_AWS_B_B + ), + SuccessTestCase( + name = "Test #13", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM ddb.b", + expected = TABLE_AWS_DDB_B + ), + SuccessTestCase( + name = "Test #14", + query = "SELECT * FROM aws.ddb.pets", + expected = TABLE_AWS_DDB_PETS + ), + SuccessTestCase( + name = "Test #15", + catalog = CATALOG_AWS, + query = "SELECT * FROM aws.b.b", + expected = TABLE_AWS_B_B + ), + SuccessTestCase( + name = "Test #16", + catalog = CATALOG_B, + query = "b.b.b", + expected = TYPE_B_B_B + ), + SuccessTestCase( + name = "Test #17", + catalog = CATALOG_B, + query = "b.b.c", + expected = TYPE_B_B_C + ), + SuccessTestCase( + name = "Test #18", + catalog = CATALOG_B, + catalogPath = listOf("b"), + query = "b.b.b", + expected = TYPE_B_B_B + ), + SuccessTestCase( + name = "Test #19", + query = "b.b.b.c", + expected = TYPE_B_B_B_C + ), + SuccessTestCase( + name = "Test #20", + query = "b.b.b.b", + expected = TYPE_B_B_B_B + ), + SuccessTestCase( + name = "Test #21", + catalog = CATALOG_B, + query = "b.b.b.b", + expected = TYPE_B_B_B_B + ), + SuccessTestCase( + name = "Test #22", + catalog = CATALOG_B, + query = "b.b.b.c", + expected = TYPE_B_B_C + ), + SuccessTestCase( + name = "Test #23", + catalog = CATALOG_B, + catalogPath = listOf("b"), + query = "b.b.b.b", + expected = TYPE_B_B_B_B + ), + SuccessTestCase( + name = "Test #24", + query = "b.b.b.b.b", + expected = TYPE_B_B_B_B_B + ), + SuccessTestCase( + name = "Test #24", + catalog = CATALOG_B, + query = "b.b.b.b.b", + expected = TYPE_B_B_B_B_B + ), + SuccessTestCase( + name = "EQ", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id = 1", + expected = TYPE_BOOL + ), + SuccessTestCase( + name = "NEQ", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id <> 1", + expected = TYPE_BOOL + ), + SuccessTestCase( + name = "GEQ", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id >= 1", + expected = TYPE_BOOL + ), + SuccessTestCase( + name = "GT", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id > 1", + expected = TYPE_BOOL + ), + SuccessTestCase( + name = "LEQ", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id <= 1", + expected = TYPE_BOOL + ), + SuccessTestCase( + name = "LT", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id < 1", + expected = TYPE_BOOL + ), + SuccessTestCase( + name = "IN", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id IN (1, 2, 3)", + expected = TYPE_BOOL + ), + ErrorTestCase( + name = "IN Failure", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id IN 'hello'", + expected = MISSING, + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UnknownFunction( + "in_collection", + listOf(INT4, STRING), + ) + ) + } + ), + SuccessTestCase( + name = "BETWEEN", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id BETWEEN 1 AND 2", + expected = TYPE_BOOL + ), + ErrorTestCase( + name = "BETWEEN Failure", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id BETWEEN 1 AND 'a'", + expected = MISSING, + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UnknownFunction( + "between", + listOf( + INT4, + INT4, + STRING + ), + ) + ) + } + ), + SuccessTestCase( + name = "LIKE", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.ship_option LIKE '%ABC%'", + expected = TYPE_BOOL + ), + ErrorTestCase( + name = "LIKE Failure", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.ship_option LIKE 3", + expected = MISSING, + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UnknownFunction( + "like", + listOf(STRING, INT4), + ) + ) + } + ), + SuccessTestCase( + name = "Case Insensitive success", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.CUSTOMER_ID = 1", + expected = TYPE_BOOL + ), + // MISSING = 1 + ErrorTestCase( + name = "Case Sensitive failure", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.\"CUSTOMER_ID\" = 1", + expected = NULL + ), + SuccessTestCase( + name = "Case Sensitive success", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.\"customer_id\" = 1", + expected = TYPE_BOOL + ), + SuccessTestCase( + name = "1-Level Junction", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "(order_info.customer_id = 1) AND (order_info.marketplace_id = 2)", + expected = StaticType.unionOf(BOOL, NULL) + ), + SuccessTestCase( + name = "2-Level Junction", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "(order_info.customer_id = 1) AND (order_info.marketplace_id = 2) OR (order_info.customer_id = 3) AND (order_info.marketplace_id = 4)", + expected = StaticType.unionOf(BOOL, NULL) + ), + SuccessTestCase( + name = "INT and STR Comparison", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id = 'something'", + expected = TYPE_BOOL, + ), + ErrorTestCase( + name = "Nonexisting Comparison", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "non_existing_column = 1", + // Function resolves to EQ__ANY_ANY__BOOL + // Which can return BOOL Or NULL + expected = StaticType.unionOf(BOOL, NULL), + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UndefinedVariable("non_existing_column", false) + ) + } + ), + ErrorTestCase( + name = "Bad comparison", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "order_info.customer_id = 1 AND 1", + expected = MISSING, + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UnknownFunction( + "and", + listOf(StaticType.BOOL, INT4), + ) + ) + } + ), + ErrorTestCase( + name = "Bad comparison", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "1 AND order_info.customer_id = 1", + expected = MISSING, + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UnknownFunction( + "and", + listOf(INT4, StaticType.BOOL), + ) + ) + } + ), + ErrorTestCase( + name = "Unknown column", + catalog = CATALOG_DB, + catalogPath = DB_SCHEMA_MARKETS, + query = "SELECT unknown_col FROM orders WHERE customer_id = 1", + expected = BagType( + StructType( + fields = mapOf("unknown_col" to AnyType()), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ), + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UndefinedVariable("unknown_col", false) + ) + } + ), + SuccessTestCase( + name = "LIMIT INT", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM pets LIMIT 5", + expected = TABLE_AWS_DDB_PETS + ), + ErrorTestCase( + name = "LIMIT STR", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM pets LIMIT '5'", + expected = TABLE_AWS_DDB_PETS, + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UnexpectedType(STRING, setOf(INT)) + ) + } + ), + SuccessTestCase( + name = "OFFSET INT", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM pets LIMIT 1 OFFSET 5", + expected = TABLE_AWS_DDB_PETS + ), + ErrorTestCase( + name = "OFFSET STR", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT * FROM pets LIMIT 1 OFFSET '5'", + expected = TABLE_AWS_DDB_PETS, + problemHandler = assertProblemExists { + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.UnexpectedType(STRING, setOf(INT)) + ) + } + ), + SuccessTestCase( + name = "CAST", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT CAST(breed AS INT) AS cast_breed FROM pets", + expected = BagType( + StructType( + fields = mapOf("cast_breed" to unionOf(INT, MISSING)), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) + ) + ) + ), + SuccessTestCase( + name = "UPPER", + catalog = CATALOG_AWS, + catalogPath = listOf("ddb"), + query = "SELECT UPPER(breed) AS upper_breed FROM pets", + expected = BagType( + StructType( + fields = mapOf("upper_breed" to STRING), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), - // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "invalid exclude tuple attr step", - query = """SELECT * EXCLUDE t.b -- `t.b` does not exist - FROM << - { - 'a': << - { 'b': 0 }, - { 'b': 1 }, - { 'b': 2 } - >> - } - >> AS t""", + name = "Non-tuples", + query = "SELECT a FROM << [ 1, 1.0 ] >> AS a", expected = BagType( - elementType = StructType( - fields = mapOf( - "a" to BagType( - elementType = StructType( - fields = mapOf( - "b" to StaticType.INT - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) - ) - ), + StructType( + fields = mapOf("a" to ListType(unionOf(INT4, StaticType.DECIMAL))), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), - // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning - ErrorTestCase( - name = "invalid exclude root", - query = """SELECT * EXCLUDE nonsense.b -- `nonsense` does not exist in binding tuples + SuccessTestCase( + name = "Non-tuples in SELECT VALUE", + query = "SELECT VALUE a FROM << [ 1, 1.0 ] >> AS a", + expected = + BagType(ListType(unionOf(INT4, StaticType.DECIMAL))) + ), + SuccessTestCase( + name = "SELECT VALUE", + query = "SELECT VALUE [1, 1.0] FROM <<>>", + expected = + BagType(ListType(unionOf(INT4, StaticType.DECIMAL))) + ), + SuccessTestCase( + name = "Duplicate fields in struct", + query = """ + SELECT t.a AS a FROM << - { - 'a': << - { 'b': 0 }, - { 'b': 1 }, - { 'b': 2 } - >> - } - >> AS t""", + { 'a': 1, 'a': 'hello' } + >> AS t + """, expected = BagType( - elementType = StructType( - fields = mapOf( - "a" to BagType( - elementType = StructType( - fields = mapOf( - "b" to StaticType.INT - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) - ) + StructType( + fields = listOf( + StructType.Field("a", unionOf(INT4, STRING)) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) - ) - ), - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - PlanningProblemDetails.UnresolvedExcludeExprRoot("nonsense") + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) - } + ) ), - // EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning SuccessTestCase( - name = "exclude with unions and last step collection index", - query = """SELECT * EXCLUDE t.a[0].c -- `c`'s type to be unioned with `MISSING` - FROM << - { - 'a': [ - { - 'b': 0, - 'c': 0 - }, - { - 'b': 1, - 'c': NULL - }, - { - 'b': 2, - 'c': 0.1 - } - ] - } - >> AS t""", + name = "Duplicate fields in ordered STRUCT. NOTE: b.b.d is an ordered struct with two attributes (e). First is INT4.", + query = """ + SELECT d.e AS e + FROM << b.b.d >> AS d + """, expected = BagType( - elementType = StructType( - fields = mapOf( - "a" to ListType( - elementType = StaticType.unionOf( - StructType( - fields = mapOf( - "b" to StaticType.INT, - "c" to StaticType.INT.asOptional() - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - StructType( - fields = mapOf( - "b" to StaticType.INT, - "c" to StaticType.NULL.asOptional() - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ), - StructType( - fields = mapOf( - "b" to StaticType.INT, - "c" to StaticType.DECIMAL.asOptional() - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true)) - ) - ) - ) + StructType( + fields = listOf( + StructType.Field("e", INT4) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "EXCLUDE using a catalog", - catalog = CATALOG_B, - query = "SELECT * EXCLUDE t.c FROM b.b.b AS t", + name = "Duplicate fields in struct", + query = """ + SELECT a AS a + FROM << + { 'a': 1, 'a': 'hello' } + >> AS t + """, expected = BagType( - elementType = StructType( - fields = mapOf( - "b" to StructType( - fields = mapOf( - "b" to StaticType.INT - ), - contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) - ), + StructType( + fields = listOf( + StructType.Field("a", unionOf(INT4, STRING)) ), contentClosed = true, - constraints = setOf(TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered) + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Ordered + ) ) ) ), SuccessTestCase( - name = "BITWISE_AND_1", - query = "1 & 2", - expected = StaticType.INT - ), - // casting to a parameterized type produced Missing. - SuccessTestCase( - name = "BITWISE_AND_2", - query = "CAST(1 AS INT2) & CAST(2 AS INT2)", - expected = StaticType.unionOf(StaticType.INT2, MISSING) - ), - SuccessTestCase( - name = "BITWISE_AND_3", - query = "CAST(1 AS INT4) & CAST(2 AS INT4)", - expected = StaticType.unionOf(StaticType.INT4, MISSING) - ), - SuccessTestCase( - name = "BITWISE_AND_4", - query = "CAST(1 AS INT8) & CAST(2 AS INT8)", - expected = StaticType.unionOf(StaticType.INT8, MISSING) + name = "Current User", + query = "CURRENT_USER", + expected = unionOf(STRING, NULL) ), SuccessTestCase( - name = "BITWISE_AND_5", - query = "CAST(1 AS INT2) & CAST(2 AS INT4)", - expected = StaticType.unionOf(StaticType.INT4, MISSING) + name = "Trim", + query = "trim(' ')", + expected = STRING ), SuccessTestCase( - name = "BITWISE_AND_6", - query = "CAST(1 AS INT2) & CAST(2 AS INT8)", - expected = StaticType.unionOf(StaticType.INT8, MISSING) + name = "Current User Concat", + query = "CURRENT_USER || 'hello'", + expected = unionOf(STRING, NULL) ), SuccessTestCase( - name = "BITWISE_AND_7", - query = "CAST(1 AS INT2) & 2", - expected = StaticType.unionOf(StaticType.INT, MISSING) + name = "Current User Concat in WHERE", + query = "SELECT VALUE a FROM [ 0 ] AS a WHERE CURRENT_USER = 'hello'", + expected = BagType(INT4) ), SuccessTestCase( - name = "BITWISE_AND_8", - query = "CAST(1 AS INT4) & CAST(2 AS INT8)", - expected = StaticType.unionOf(StaticType.INT8, MISSING) + name = "TRIM_2", + query = "trim(' ' FROM ' Hello, World! ')", + expected = STRING ), SuccessTestCase( - name = "BITWISE_AND_9", - query = "CAST(1 AS INT4) & 2", - expected = StaticType.unionOf(StaticType.INT, MISSING) + name = "TRIM_1", + query = "trim(' Hello, World! ')", + expected = STRING ), SuccessTestCase( - name = "BITWISE_AND_10", - query = "CAST(1 AS INT8) & 2", - expected = StaticType.unionOf(StaticType.INT, MISSING) - ), - ErrorTestCase( - name = "BITWISE_AND_NULL_OPERAND", - query = "1 & NULL", - expected = StaticType.NULL, - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.ExpressionAlwaysReturnsNullOrMissing - ) - } - ), - ErrorTestCase( - name = "BITWISE_AND_MISSING_OPERAND", - query = "1 & MISSING", - expected = StaticType.MISSING, - problemHandler = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.ExpressionAlwaysReturnsMissing - ) - } + name = "TRIM_3", + query = "trim(LEADING ' ' FROM ' Hello, World! ')", + expected = STRING ), ErrorTestCase( - name = "BITWISE_AND_NON_INT_OPERAND", - query = "1 & 'NOT AN INT'", - expected = StaticType.MISSING, + name = "TRIM_2_error", + query = "trim(2 FROM ' Hello, World! ')", + expected = MISSING, problemHandler = assertProblemExists { Problem( UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.IncompatibleDatatypesForOp( - listOf( - INT, STRING - ), - Rex.Binary.Op.BITWISE_AND.name + PlanningProblemDetails.UnknownFunction( + "trim_chars", + args = listOf(STRING, INT4) ) ) } ), - SuccessTestCase( - name = "NULLIF - NULL first arg", - query = "NULLIF(NULL,'')", - expected = StaticType.NULL, - warnings = assertProblemExists { - Problem( - UNKNOWN_PROBLEM_LOCATION, - SemanticProblemDetails.ExpressionAlwaysReturnsNullOrMissing - ) - } - ), - SuccessTestCase( - name = "NULLIF - NULL second arg", - query = "NULLIF('',NULL)", - expected = StaticType.unionOf(StaticType.NULL, StaticType.STRING) - ) ) - - private fun assertProblemExists(problem: () -> Problem) = ProblemHandler { problems, ignoreSourceLocation -> - when (ignoreSourceLocation) { - true -> assertTrue("Expected to find ${problem.invoke()} in $problems") { problems.any { it.details == problem.invoke().details } } - false -> assertTrue("Expected to find ${problem.invoke()} in $problems") { problems.any { it == problem.invoke() } } - } - } } private fun runTest(tc: TestCase) = when (tc) { is SuccessTestCase -> runTest(tc) is ErrorTestCase -> runTest(tc) is ThrowingExceptionTestCase -> runTest(tc) + is TestCase.IgnoredTestCase -> runTest(tc) } @OptIn(ExperimentalPartiQLSchemaInferencer::class) private fun runTest(tc: ThrowingExceptionTestCase) { - val session = PlannerSession( + val session = PartiQLPlanner.Session( tc.query.hashCode().toString(), USER_ID, tc.catalog, @@ -2810,7 +3662,7 @@ class PartiQLSchemaInferencerTests { @OptIn(ExperimentalPartiQLSchemaInferencer::class) private fun runTest(tc: SuccessTestCase) { - val session = PlannerSession( + val session = PartiQLPlanner.Session( tc.query.hashCode().toString(), USER_ID, tc.catalog, @@ -2820,24 +3672,37 @@ class PartiQLSchemaInferencerTests { ) val collector = ProblemCollector() val ctx = PartiQLSchemaInferencer.Context(session, PLUGINS, collector) - val result = PartiQLSchemaInferencer.infer(tc.query, ctx) - assert(collector.problems.none { it.details.severity == ProblemSeverity.ERROR }) { - collector.problems.toString() - } - tc.warnings?.handle(collector.problems, true) + val hasQuery = tc.query != null + val hasKey = tc.key != null + if (hasQuery == hasKey) { + error("Test must have one of either `query` or `key`") + } + val input = tc.query ?: testProvider[tc.key!!]!!.statement - assert(tc.expected == result) { + val result = PartiQLSchemaInferencer.inferInternal(input, ctx) + assert(collector.problems.isEmpty()) { + buildString { + appendLine(collector.problems.toString()) + appendLine() + PlanPrinter.append(this, result.first) + } + } + val actual = result.second + assert(tc.expected == actual) { buildString { - appendLine("Expected: ${tc.expected}") - appendLine("Actual: $result") + appendLine() + appendLine("Expect: ${tc.expected}") + appendLine("Actual: $actual") + appendLine() + PlanPrinter.append(this, result.first) } } } @OptIn(ExperimentalPartiQLSchemaInferencer::class) private fun runTest(tc: ErrorTestCase) { - val session = PlannerSession( + val session = PartiQLPlanner.Session( tc.query.hashCode().toString(), USER_ID, tc.catalog, @@ -2847,12 +3712,30 @@ class PartiQLSchemaInferencerTests { ) val collector = ProblemCollector() val ctx = PartiQLSchemaInferencer.Context(session, PLUGINS, collector) - val result = PartiQLSchemaInferencer.infer(tc.query, ctx) + + val hasQuery = tc.query != null + val hasKey = tc.key != null + if (hasQuery == hasKey) { + error("Test must have one of either `query` or `key`") + } + val input = tc.query ?: testProvider[tc.key!!]!!.statement + val result = PartiQLSchemaInferencer.inferInternal(input, ctx) + + assert(collector.problems.isNotEmpty()) { + buildString { + appendLine("Expected to find problems, but none were found.") + appendLine() + PlanPrinter.append(this, result.first) + } + } if (tc.expected != null) { - assert(tc.expected == result) { + assert(tc.expected == result.second) { buildString { - appendLine("Expected: ${tc.expected}") - appendLine("Actual: $result") + appendLine() + appendLine("Expect: ${tc.expected}") + appendLine("Actual: ${result.second}") + appendLine() + PlanPrinter.append(this, result.first) } } } @@ -2862,20 +3745,13 @@ class PartiQLSchemaInferencerTests { tc.problemHandler?.handle(collector.problems, true) } - fun interface ProblemHandler { - fun handle(problems: List, ignoreSourceLocation: Boolean) + private fun runTest(tc: TestCase.IgnoredTestCase) { + assertThrows { + runTest(tc.shouldBe) + } } - @Test - fun test() { - runTest( - ErrorTestCase( - name = "Case Sensitive failure", - catalog = CATALOG_DB, - catalogPath = DB_SCHEMA_MARKETS, - query = "order_info.\"CUSTOMER_ID\" = 1", - expected = TYPE_BOOL - ) - ) + fun interface ProblemHandler { + fun handle(problems: List, ignoreSourceLocation: Boolean) } } diff --git a/partiql-lang/src/test/resources/catalogs/b/b/c.ion b/partiql-lang/src/test/resources/catalogs/b/b/c.ion deleted file mode 100644 index a21274f54..000000000 --- a/partiql-lang/src/test/resources/catalogs/b/b/c.ion +++ /dev/null @@ -1 +0,0 @@ -"int" diff --git a/partiql-parser/src/main/kotlin/org/partiql/parser/PartiQLParser.kt b/partiql-parser/src/main/kotlin/org/partiql/parser/PartiQLParser.kt index 9274e8c66..4b8f08616 100644 --- a/partiql-parser/src/main/kotlin/org/partiql/parser/PartiQLParser.kt +++ b/partiql-parser/src/main/kotlin/org/partiql/parser/PartiQLParser.kt @@ -14,7 +14,7 @@ package org.partiql.parser -import org.partiql.ast.AstNode +import org.partiql.ast.Statement public interface PartiQLParser { @@ -23,7 +23,7 @@ public interface PartiQLParser { public data class Result( val source: String, - val root: AstNode, + val root: Statement, val locations: SourceLocations, ) } diff --git a/partiql-parser/src/main/kotlin/org/partiql/parser/impl/PartiQLParserDefault.kt b/partiql-parser/src/main/kotlin/org/partiql/parser/impl/PartiQLParserDefault.kt index 5342e1963..337f98cf4 100644 --- a/partiql-parser/src/main/kotlin/org/partiql/parser/impl/PartiQLParserDefault.kt +++ b/partiql-parser/src/main/kotlin/org/partiql/parser/impl/PartiQLParserDefault.kt @@ -216,6 +216,7 @@ import org.partiql.value.dateValue import org.partiql.value.datetime.DateTimeException import org.partiql.value.datetime.DateTimeValue import org.partiql.value.decimalValue +import org.partiql.value.int32Value import org.partiql.value.int64Value import org.partiql.value.intValue import org.partiql.value.missingValue @@ -435,7 +436,7 @@ internal class PartiQLParserDefault : PartiQLParser { ): PartiQLParser.Result { val locations = SourceLocations.Mutable() val visitor = Visitor(locations, tokens.parameterIndexes) - val root = visitor.visitAs(tree) + val root = visitor.visitAs(tree) as Statement return PartiQLParser.Result( source = source, root = root, @@ -1931,13 +1932,31 @@ internal class PartiQLParserDefault : PartiQLParser { } override fun visitLiteralInteger(ctx: GeneratedParser.LiteralIntegerContext) = translate(ctx) { - val n = ctx.LITERAL_INTEGER().text.toInt() - val v = try { - int64Value(n.toLong()) - } catch (_: java.lang.NumberFormatException) { - intValue(n.toBigInteger()) + val n = ctx.LITERAL_INTEGER().text + + // 1st, try parse as int + try { + val v = n.toInt(10) + return@translate exprLit(int32Value(v)) + } catch (ex: NumberFormatException) { + // ignore + } + + // 2nd, try parse as long + try { + val v = n.toLong(10) + return@translate exprLit(int64Value(v)) + } catch (ex: NumberFormatException) { + // ignore + } + + // 3rd, try parse as BigInteger + try { + val v = BigInteger(n) + return@translate exprLit(intValue(v)) + } catch (ex: NumberFormatException) { + throw ex } - exprLit(v) } override fun visitLiteralDate(ctx: GeneratedParser.LiteralDateContext) = translate(ctx) { @@ -2033,6 +2052,7 @@ internal class PartiQLParserDefault : PartiQLParser { val n = ctx.arg0?.text?.toInt() when (ctx.datatype.type) { GeneratedParser.FLOAT -> when (n) { + null -> typeFloat64() 32 -> typeFloat32() 64 -> typeFloat64() else -> throw error(ctx.datatype, "Invalid FLOAT precision. Expected 32 or 64") diff --git a/partiql-parser/src/test/kotlin/org/partiql/parser/impl/PartiQLParserSessionAttributeTests.kt b/partiql-parser/src/test/kotlin/org/partiql/parser/impl/PartiQLParserSessionAttributeTests.kt index e1d632ac4..1748db55f 100644 --- a/partiql-parser/src/test/kotlin/org/partiql/parser/impl/PartiQLParserSessionAttributeTests.kt +++ b/partiql-parser/src/test/kotlin/org/partiql/parser/impl/PartiQLParserSessionAttributeTests.kt @@ -8,7 +8,7 @@ import org.partiql.ast.exprLit import org.partiql.ast.exprSessionAttribute import org.partiql.ast.statementQuery import org.partiql.value.PartiQLValueExperimental -import org.partiql.value.int64Value +import org.partiql.value.int32Value import kotlin.test.assertEquals @OptIn(PartiQLValueExperimental::class) @@ -48,7 +48,7 @@ class PartiQLParserSessionAttributeTests { query { exprBinary( op = Expr.Binary.Op.EQ, - lhs = exprLit(int64Value(1)), + lhs = exprLit(int32Value(1)), rhs = exprSessionAttribute(Expr.SessionAttribute.Attribute.CURRENT_USER) ) } diff --git a/partiql-plan/README.md b/partiql-plan/README.md deleted file mode 100644 index 932699cda..000000000 --- a/partiql-plan/README.md +++ /dev/null @@ -1,35 +0,0 @@ -# PartiQL Plan - -## About - -This package contains an early implementation of the PartiQL Plan data structures. Specification work is ongoing, and -this package should be considered experimental. - -## Usage - -The data structures in `org.partiql.plan` can be used via the latest [partiql-lang-kotlin](https://central.sonatype.com/artifact/org.partiql/partiql-lang-kotlin/0.9.3) JAR and using the AstToRel translator (`org.partiql.lang.planner.transforms.AstToRel`). - -For example, - -```kotlin -val parser = PartiQLParserBuilder.standard().build() -val ast = parser.parseAstStatement(input) -val plan = AstToPlan.transform(ast) - -println(PlanPrinter.toString(plan)) -``` - -## Generating Sources - -> Have the code generator built, `./gradlew :lib:sprout:install` - -```shell -# running from the package root -./lib/sprout/install/sprout/bin/sprout generate kotlin\ - -o ./ir\ - -p org.partiql.plan\ - -u Plan\ - -m DATA\ - --poems visitor --poems builder \ - ./partiql-plan/src/main/resources/partiql_plan.ion -``` diff --git a/partiql-plan/build.gradle.kts b/partiql-plan/build.gradle.kts index 9ef2e1fd6..63683435a 100644 --- a/partiql-plan/build.gradle.kts +++ b/partiql-plan/build.gradle.kts @@ -21,7 +21,7 @@ plugins { } dependencies { - implementation(project(":partiql-types")) + api(project(":partiql-types")) implementation(Deps.ionElement) implementation(Deps.kotlinReflect) } @@ -51,6 +51,7 @@ val generate = tasks.register("generate") { "--poems", "visitor", "--poems", "builder", "--poems", "util", + "--opt-in", "org.partiql.value.PartiQLValueExperimental", "./src/main/resources/partiql_plan.ion" ) } diff --git a/partiql-plan/src/main/kotlin/org/partiql/plan/debug/PlanPrinter.kt b/partiql-plan/src/main/kotlin/org/partiql/plan/debug/PlanPrinter.kt index 4244a56c8..6a268874b 100644 --- a/partiql-plan/src/main/kotlin/org/partiql/plan/debug/PlanPrinter.kt +++ b/partiql-plan/src/main/kotlin/org/partiql/plan/debug/PlanPrinter.kt @@ -1,6 +1,5 @@ package org.partiql.plan.debug -import org.partiql.plan.Common import org.partiql.plan.PlanNode import org.partiql.plan.Rel import org.partiql.plan.visitor.PlanBaseVisitor @@ -67,7 +66,7 @@ object PlanPrinter { } out.append(EOL) // print child nodes - val children = node.children.filter { it !is Common }.sortedWith(relLast) + val children = node.children.sortedWith(relLast) children.forEachIndexed { i, child -> val args = Args(out, levels + !last, last = i == children.size - 1) child.accept(Visitor, args) @@ -78,12 +77,32 @@ object PlanPrinter { private fun PlanNode.primitives(): List> = javaClass.kotlin.memberProperties .filter { val t = it.returnType.jvmErasure - val notChildren = it.name != "children" + val notChildrenOrId = it.name != "children" && it.name != "_id" val notNode = !t.isSubclassOf(PlanNode::class) // not currently correct val notCollectionOfNodes = !(t.isSubclassOf(Collection::class)) - notChildren && notNode && notCollectionOfNodes && it.visibility == KVisibility.PUBLIC + notChildrenOrId && notNode && notCollectionOfNodes && it.visibility == KVisibility.PUBLIC } .map { it.name to it.get(this) } + + // override fun visitIdentifierSymbol(node: Identifier.Symbol, ctx: Args): Unit = with(ctx) { + // out.append(lead) + // val sql = when (node.caseSensitivity) { + // Identifier.CaseSensitivity.SENSITIVE -> "\"${node.symbol}\"" + // Identifier.CaseSensitivity.INSENSITIVE -> node.symbol + // } + // out.append(sql) + // out.append(EOL) + // } + // + // override fun visitIdentifierQualified(node: Identifier.Qualified, ctx: Args): Unit = with(ctx) { + // out.append(lead) + // val root = visitIdentifierSymbol(node.root, ctx) + // val args = Args(out, levels + !last, last = false) + // val steps = node.steps.map { visitIdentifierSymbol(it, args) } + // val sql = (listOf(root) + steps).joinToString(".") + // out.append(sql) + // out.append(EOL) + // } } } diff --git a/partiql-plan/src/main/resources/partiql_plan.ion b/partiql-plan/src/main/resources/partiql_plan.ion index 26ee7b497..170fa2901 100644 --- a/partiql-plan/src/main/resources/partiql_plan.ion +++ b/partiql-plan/src/main/resources/partiql_plan.ion @@ -1,266 +1,320 @@ -// Experimental plan representation - imports::{ kotlin: [ - any::'kotlin.Any', - ion::'com.amazon.ionelement.api.IonElement', + partiql_value::'org.partiql.value.PartiQLValue', static_type::'org.partiql.types.StaticType', + scalar_signature::'org.partiql.types.function.FunctionSignature$Scalar', + aggregation_signature::'org.partiql.types.function.FunctionSignature$Aggregation', ], } -// Top-Level structure for a PartiQL Plan parti_q_l_plan::{ - version: [ PARTIQL_V0 ], - root: rex, + globals: list::[global], // (globals ...) + statement: statement, // (statement ...) } -// Grouping of common fields without implications of interfaces or inheritance -common::{ - type_env: list::[attribute], - properties: set::[property], - metas: map::[string,any], -} +// Globals -attribute::{ - name: string, - type: static_type +global::{ + path: '.identifier.qualified', + type: static_type, } -// Relation properties -property::[ - ORDERED, -] +// Functions -// Representation of ` AS ` in projections -binding::{ - name: string, - value: rex, +fn::{ + signature: scalar_signature, } -// Representation of `a_i : e_i` where a and e are expressions -field::{ - name: rex, - value: rex, +agg::{ + signature: aggregation_signature, } -// Identifier case rules -case::[ - SENSITIVE, - INSENSITIVE, -] +// Statements -// Define in `_` once supported -step::[ - key::{ - value: rex, - case: case, +statement::[ + query::{ + root: rex, }, - wildcard::{}, - unpivot::{}, ] -// Define in `_` once supported -sort_spec::{ - value: rex, - dir: [ ASC, DESC ], - nulls: [ FIRST, LAST ], -} +// Identifiers -// Rex.Call Arguments -arg::[ - value::{ - name: optional::string, - value: rex, +identifier::[ + symbol::{ + symbol: string, + case_sensitivity: case_sensitivity, + }, + qualified::{ + root: symbol, + steps: list::[symbol], }, - type::{ - name: optional::string, - type: static_type, - } + _::[ + case_sensitivity::[ + SENSITIVE, + INSENSITIVE, + ], + ], ] -// Exclude expr and step -exclude_expr::{ - root: string, - root_case: case, - steps: list::[exclude_step], -} +// Rex +rex::{ + type: static_type, + op:[ -exclude_step::[ - tuple_attr::{ - attr: string, - case: case, - }, - collection_index::{ - index: int, - }, - tuple_wildcard::{}, - collection_wildcard::{}, -] + lit::{ + value: partiql_value, + }, -branch::{ - condition: rex, - value: rex, -} + var::{ + ref: int, + }, -// Operators that return binding collections -rel::[ - // FROM [AS ] [AT ] [BY ] - scan::{ - common: common, - value: rex, - alias: optional::string, - at: optional::string, - by: optional::string, - }, - // UNPIVOT [AS ] [AT ] [BY ] - unpivot::{ - common: common, - value: rex, - alias: optional::string, - at: optional::string, - by: optional::string, - }, - // WHERE, HAVING - filter::{ - common: common, - input: rel, - condition: rex, - }, - // ORDER BY - sort::{ - common: common, - input: rel, - specs: list::[sort_spec], - }, - // UNION, INTERSECT, EXCEPT - bag::{ - common: common, - lhs: rel, - rhs: rel, - op: [ UNION, INTERSECT, EXCEPT ], - }, - // LIMIT, OFFSET, FETCH - fetch::{ - common: common, - input: rel, - limit: rex, - offset: rex, - }, - // SELECT - project::{ - common: common, - input: rel, - bindings: list::[binding], - }, - // FROM x, y / FROM x JOIN y - join::{ - common: common, - lhs: rel, - rhs: rel, - condition: optional::rex, - type: [ INNER, LEFT, RIGHT, FULL ], - }, - // SELECT [GROUP BY ] - aggregate::{ - common: common, - input: rel, - calls: list::[binding], - groups: list::[binding], - strategy: [ FULL, PARTIAL ], - }, - exclude::{ - common: common, - input: rel, - exprs: list::[exclude_expr], - } -] + global::{ + ref: int, + }, -// Operators that return any value -// Perhaps "id" and "path" can be combined into a single item "ref" -rex::[ - id::{ - name: string, - case: case, - qualifier: [ UNQUALIFIED, LOCALS_FIRST ], - type: optional::static_type - }, - path::{ - root: rex, - steps: list::[step], - type: optional::static_type - }, - lit::{ - value: ion, - type: optional::static_type - }, - unary::{ - value: rex, - op: [ NOT, POS, NEG, ], - type: optional::static_type - }, - binary::{ - lhs: rex, - rhs: rex, - op: [ - PLUS, MINUS, TIMES, DIV, MODULO, CONCAT, BITWISE_AND, - AND, OR, - EQ, NEQ, GTE, GT, LT, LTE, + path::{ + root: rex, + steps: list::[step], + _: [ + step::[ + // The key MUST be an integer expression. Ex: a[0], a[1 + 1] + index::{ key: rex }, + + // Case-sensitive lookup. The key MUST be a string expression. Ex: a["b"], a."b", a[CAST(b AS STRING)] + key::{ key: rex }, + + // Case-insensitive lookup. The key MUST be a literal string. Ex: a.b + symbol::{ key: string }, + + // For arrays. Ex: a[*] + // TODO: Do we need this? According to specification: [1,2,3][*] ⇔ SELECT VALUE v FROM [1, 2, 3] AS v + wildcard::{}, + + // For tuples. Ex: a.* + // TODO: Do we need this? According to specification: {'a':1, 'b':2}.* ⇔ SELECT VALUE v FROM UNPIVOT {'a':1, 'b':2} AS v + unpivot::{}, + ], + ], + }, + + call::[ + static::{ + fn: fn, + args: list::[rex] + }, + + // Represents a dynamic function call. If all candidates are exhausted, dynamic calls will return MISSING. + // + // args: represent the original typed arguments. These will eventually be wrapped by coercions from [candidates]. + // candidates: represent the potentially applicable resolved functions with coercions. Each of these candidates + // should be overloaded functions of the same name and number of arguments. + dynamic::{ + args: list::[rex], + candidates: list::[candidate], + _: [ + candidate::{ + fn: fn, + coercions: list::[optional::fn] + } + ] + } ], - type: optional::static_type - }, - call::{ - id: string, - args: list::[arg], - type: optional::static_type, - }, - switch::{ - match: optional::rex, - branches: list::[branch], - default: optional::rex, - type: optional::static_type, - }, - agg::{ - id: string, - args: list::[rex], - modifier: [ ALL, DISTINCT ], - type: optional::static_type - // filter: rex — later SQL feature, not sure if we'll support this - }, - collection::[ - array::{ - values: list::[rex], - type: optional::static_type, + + case::{ + branches: list::[branch], + default: rex, + _: [ + branch::{ + condition: rex, + rex: rex, + }, + ], }, - bag::{ + + collection::{ values: list::[rex], - type: optional::static_type, + }, + + struct::{ + fields: list::[field], + _: [ + field::{ + k: rex, + v: rex, + }, + ], + }, + + pivot::{ + key: rex, + value: rex, + rel: rel, + }, + + subquery::{ + select: select, + coercion: [ SCALAR, ROW ], + }, + + select::{ + constructor: rex, + rel: rel, + }, + + // SELECT v1.*, e2 AS a, v3.* + // + // SELECT VALUE TUPLEUNION( + // CASE WHEN v1 IS TUPLE THEN v1 ELSE {'_1': v1} END, + // {'a':e2 }, + // CASE WHEN v3 IS TUPLE THEN v3 ELSE {'_2': v3} END + // ) + // + // Tuple Union Function Signature: (Array) -> Struct + tuple_union::{ + args: list::[rex], + }, + + err::{ + message: string, }, ], - // - If binding.name is '*' and binding.rex is a struct, the fields of binding.rex are merged to this struct - // - If binding.name is '*' and binding.rex is not a struct, the field _n will be add to this struct where n - // is the ordinal of the field in the final merged struct. - // - Else, add the pair (binding.name, binding.rex) to the final merged struct. - tuple::{ - fields: list::[field], - type: optional::static_type +} + +// Rel + +rel::{ + type: { + schema: list::[binding], + props: set::[prop], }, - query::[ - scalar::[ - subquery::{ - query: '.rex.query.collection', // consider changing to query.scalar.subquery - type: optional::static_type - }, - pivot::{ - rel: rel, - value: rex, - at: rex, // consider changing to key - type: optional::static_type - }, - ], - collection::{ - rel: rel, - constructor: optional::rex, // consider further dividing this into query.collection.subquery - type: optional::static_type + op: [ + + scan::{ + rex: rex, + }, + + scan_indexed::{ + rex: rex, + }, + + unpivot::{ + rex: rex, + }, + + distinct::{ + input: rel, + }, + + filter::{ + input: rel, + predicate: rex, + }, + + sort::{ + input: rel, + specs: list::[spec], + _: [ + spec::{ + rex: rex, + order: order, + }, + order::[ + ASC_NULLS_LAST, + ASC_NULLS_FIRST, + DESC_NULLS_LAST, + DESC_NULLS_FIRST, + ], + ], + }, + + union::{ + lhs: rel, + rhs: rel, + }, + + intersect::{ + lhs: rel, + rhs: rel, + }, + + except::{ + lhs: rel, + rhs: rel, + }, + + limit::{ + input: rel, + limit: rex, + }, + + offset::{ + input: rel, + offset: rex, + }, + + project::{ + input: rel, + projections: list::[rex], + }, + + join::{ + lhs: rel, + rhs: rel, + rex: rex, // The Join Expression (required) (can be set to TRUE) + type: [ + INNER, // Inner Join + LEFT, // Left Outer Join + RIGHT, // Right Outer Join + FULL // Full Outer Join + ], + }, + + aggregate::{ + input: rel, + strategy: [ FULL, PARTIAL ], + calls: list::[call], + groups: list::[rex], + _: [ + call::{ + agg: agg, + args: list::[rex], + }, + ], + }, + + exclude::{ + input: rel, + items: list::[item], + _: [ + item::{ + root: '.identifier.symbol', + steps: list::[step], + }, + step::[ + attr::{ + symbol: '.identifier.symbol', + }, + pos::{ + index: int, + }, + struct_wildcard::{}, + collection_wildcard::{}, + ], + ], + }, + + err::{ + message: string, }, ], -] + _: [ + prop::[ + ORDERED, + ], + binding::{ + name: string, + type: static_type, + }, + ] +} diff --git a/partiql-plan/src/test/kotlin/org/partiql/plan/debug/PlanPrinterTest.kt b/partiql-plan/src/test/kotlin/org/partiql/plan/debug/PlanPrinterTest.kt deleted file mode 100644 index 245258c73..000000000 --- a/partiql-plan/src/test/kotlin/org/partiql/plan/debug/PlanPrinterTest.kt +++ /dev/null @@ -1,26 +0,0 @@ -package org.partiql.plan.debug - -import com.amazon.ionelement.api.ionInt -import org.junit.jupiter.api.Test -import org.partiql.plan.Rex -import org.partiql.plan.builder.plan - -class PlanPrinterTest { - - @Test - fun prettyPrintPlan() { - val root = plan { - // (1 + 2) - 3 - rexBinary { - op = Rex.Binary.Op.MINUS - lhs = rexBinary { - op = Rex.Binary.Op.PLUS - lhs = rexLit(ionInt(1)) - rhs = rexLit(ionInt(2)) - } - rhs = rexLit(ionInt(3)) - } - } - PlanPrinter.append(System.out, root) - } -} diff --git a/partiql-plan/src/test/resources/tests.ion b/partiql-plan/src/test/resources/tests.ion new file mode 100644 index 000000000..93d1c9f0e --- /dev/null +++ b/partiql-plan/src/test/resources/tests.ion @@ -0,0 +1,16 @@ +{ + suite: "PartiQL Plan Ion Representation", + version: (0 1), + cases: [ + paths::[ + { + name: 'rex.path `a.b.c`', + expected: (path ($type 0) + (var ($type 0) 0) ( + (step (lit ($type 7) b)) + (step (lit ($type 7) c)) + )), + } + ] + ] +} diff --git a/partiql-planner/build.gradle.kts b/partiql-planner/build.gradle.kts new file mode 100644 index 000000000..27c29b908 --- /dev/null +++ b/partiql-planner/build.gradle.kts @@ -0,0 +1,72 @@ +import org.jetbrains.dokka.utilities.relativeTo + +/* + * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +plugins { + id(Plugins.conventions) + id(Plugins.library) + id(Plugins.testFixtures) +} + +dependencies { + api(project(":partiql-plan")) + api(project(":partiql-types")) + implementation(project(":partiql-ast")) + implementation(project(":partiql-spi")) + implementation(Deps.dotlin) + implementation(Deps.ionElement) + // Test + testImplementation(project(":partiql-parser")) + testImplementation(project(":plugins:partiql-local")) + testImplementation(project(":plugins:partiql-memory")) + // Test Fixtures + testFixturesImplementation(project(":partiql-spi")) +} + +tasks.register("generateResourcePath") { + dependsOn("processTestFixturesResources") + doLast { + val resourceDir = file("src/testFixtures/resources") + val outDir = File("$buildDir/resources/testFixtures") + val fileName = "resource_path.txt" + val pathFile = File(outDir, fileName) + if (pathFile.exists()) { + pathFile.writeText("") // clean up existing text + } + resourceDir.walk().forEach { file -> + if (!file.isDirectory) { + if (file.extension == "ion" || file.extension == "sql") { + val toAppend = file.toURI().relativeTo(resourceDir.toURI()) + pathFile.appendText("$toAppend\n") + } + } + } + + sourceSets { + testFixtures { + resources { + this.srcDirs += pathFile + } + } + } + } +} + +tasks.processTestResources { + dependsOn("generateResourcePath") + from("src/testFixtures/resources") +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/Errors.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/Errors.kt new file mode 100644 index 000000000..98992ae41 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/Errors.kt @@ -0,0 +1,136 @@ +package org.partiql.planner + +import org.partiql.errors.ProblemDetails +import org.partiql.errors.ProblemSeverity +import org.partiql.types.StaticType + +/** + * Contains detailed information about errors that may occur during query planning. + * + * This information can be used to generate end-user readable error messages and is also easy to assert + * equivalence in unit tests. + */ +sealed class PlanningProblemDetails( + override val severity: ProblemSeverity, + val messageFormatter: () -> String, +) : ProblemDetails { + + override fun toString() = message + override val message: String get() = messageFormatter() + + data class ParseError(val parseErrorMessage: String) : + PlanningProblemDetails(ProblemSeverity.ERROR, { parseErrorMessage }) + + data class CompileError(val errorMessage: String) : + PlanningProblemDetails(ProblemSeverity.ERROR, { errorMessage }) + + data class UndefinedVariable(val variableName: String, val caseSensitive: Boolean) : + PlanningProblemDetails( + ProblemSeverity.ERROR, + { + "Undefined variable '$variableName'." + + quotationHint(caseSensitive) + } + ) + + data class UndefinedDmlTarget(val variableName: String, val caseSensitive: Boolean) : + PlanningProblemDetails( + ProblemSeverity.ERROR, + { + "Data manipulation target table '$variableName' is undefined. " + + "Hint: this must be a name in the global scope. " + + quotationHint(caseSensitive) + } + ) + + data class VariablePreviouslyDefined(val variableName: String) : + PlanningProblemDetails( + ProblemSeverity.ERROR, + { "The variable '$variableName' was previously defined." } + ) + + data class UnimplementedFeature(val featureName: String) : + PlanningProblemDetails( + ProblemSeverity.ERROR, + { "The syntax at this location is valid but utilizes unimplemented PartiQL feature '$featureName'" } + ) + + object InvalidDmlTarget : + PlanningProblemDetails( + ProblemSeverity.ERROR, + { "Expression is not a valid DML target. Hint: only table names are allowed here." } + ) + + object InsertValueDisallowed : + PlanningProblemDetails( + ProblemSeverity.ERROR, + { + "Use of `INSERT INTO VALUE ` is not allowed. " + + "Please use the `INSERT INTO
<< >>` form instead." + } + ) + + object InsertValuesDisallowed : + PlanningProblemDetails( + ProblemSeverity.ERROR, + { + "Use of `VALUES (, ...)` with INSERT is not allowed. " + + "Please use the `INSERT INTO
<< , ... >>` form instead." + } + ) + + data class UnexpectedType( + val actualType: StaticType, + val expectedTypes: Set, + ) : PlanningProblemDetails(ProblemSeverity.ERROR, { + "Unexpected type $actualType, expected one of ${expectedTypes.joinToString()}" + }) + + data class UnknownFunction( + val identifier: String, + val args: List, + ) : PlanningProblemDetails(ProblemSeverity.ERROR, { + val types = args.joinToString { "<${it.toString().lowercase()}>" } + "Unknown function `$identifier($types)" + }) + + object ExpressionAlwaysReturnsNullOrMissing : PlanningProblemDetails( + severity = ProblemSeverity.ERROR, + messageFormatter = { "Expression always returns null or missing." } + ) + + data class InvalidArgumentTypeForFunction( + val functionName: String, + val expectedType: StaticType, + val actualType: StaticType, + ) : + PlanningProblemDetails( + severity = ProblemSeverity.ERROR, + messageFormatter = { "Invalid argument type for $functionName. Expected $expectedType but got $actualType" } + ) + + data class IncompatibleTypesForOp( + val actualTypes: List, + val operator: String, + ) : + PlanningProblemDetails( + severity = ProblemSeverity.ERROR, + messageFormatter = { "${actualTypes.joinToString()} is/are incompatible data types for the '$operator' operator." } + ) + + data class UnresolvedExcludeExprRoot(val root: String) : + PlanningProblemDetails( + ProblemSeverity.ERROR, + { "Exclude expression given an unresolvable root '$root'" } + ) +} + +private fun quotationHint(caseSensitive: Boolean) = + if (caseSensitive) { + // Individuals that are new to SQL often try to use double quotes for string literals. + // Let's help them out a bit. + " Hint: did you intend to use single-quotes (') here? Remember that double-quotes (\") denote " + + "quoted identifiers and single-quotes denote strings." + } else { + "" + } diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/Header.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/Header.kt new file mode 100644 index 000000000..a2595adf1 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/Header.kt @@ -0,0 +1,80 @@ +package org.partiql.planner + +import org.partiql.planner.internal.typer.TypeLattice +import org.partiql.types.function.FunctionParameter +import org.partiql.types.function.FunctionSignature +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.PartiQLValueType + +/** + * A (temporary) place for function definitions; there are whispers of loading this as information_schema. + */ +@OptIn(PartiQLValueExperimental::class) +public abstract class Header { + + /** + * Definition namespace e.g. partiql, spark, redshift, ... + */ + abstract val namespace: String + + /** + * Scalar function signatures available via call syntax. + */ + open val functions: List = emptyList() + + /** + * Hidden scalar function signatures available via operator or special form syntax. + */ + open val operators: List = emptyList() + + /** + * Aggregation function signatures. + */ + open val aggregations: List = emptyList() + + /** + * Type relationships; this is primarily a helper for defining operators. + */ + internal val types: TypeLattice = TypeLattice.partiql() + + /** + * Dump the Header as SQL commands + * + * For functions, output CREATE FUNCTION statements. + */ + override fun toString(): String = buildString { + (functions + operators + aggregations).groupBy { it.name }.forEach { + appendLine("-- [${it.key}] ---------") + appendLine() + it.value.forEach { fn -> appendLine(fn) } + appendLine() + } + } + + // ==================================== + // HELPERS + // ==================================== + + companion object { + + @JvmStatic + internal fun unary(name: String, returns: PartiQLValueType, value: PartiQLValueType) = + FunctionSignature.Scalar( + name = name, + returns = returns, + parameters = listOf(FunctionParameter("value", value)), + isNullable = false, + isNullCall = true + ) + + @JvmStatic + internal fun binary(name: String, returns: PartiQLValueType, lhs: PartiQLValueType, rhs: PartiQLValueType) = + FunctionSignature.Scalar( + name = name, + returns = returns, + parameters = listOf(FunctionParameter("lhs", lhs), FunctionParameter("rhs", rhs)), + isNullable = false, + isNullCall = true + ) + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLHeader.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLHeader.kt new file mode 100644 index 000000000..d08ff47d4 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLHeader.kt @@ -0,0 +1,756 @@ +package org.partiql.planner + +import org.partiql.ast.DatetimeField +import org.partiql.types.function.FunctionParameter +import org.partiql.types.function.FunctionSignature +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.PartiQLValueType.ANY +import org.partiql.value.PartiQLValueType.BOOL +import org.partiql.value.PartiQLValueType.CHAR +import org.partiql.value.PartiQLValueType.DATE +import org.partiql.value.PartiQLValueType.DECIMAL +import org.partiql.value.PartiQLValueType.INT +import org.partiql.value.PartiQLValueType.INT32 +import org.partiql.value.PartiQLValueType.INT64 +import org.partiql.value.PartiQLValueType.MISSING +import org.partiql.value.PartiQLValueType.NULL +import org.partiql.value.PartiQLValueType.STRING +import org.partiql.value.PartiQLValueType.TIME +import org.partiql.value.PartiQLValueType.TIMESTAMP + +/** + * A header which uses the PartiQL Lang Kotlin default standard library. All functions exist in a global namespace. + * Once we have catalogs with information_schema, the PartiQL Header will be fixed on a specification version and + * user defined functions will be defined within their own schema. + * + */ +@OptIn(PartiQLValueExperimental::class) +object PartiQLHeader : Header() { + + override val namespace: String = "partiql" + + /** + * PartiQL Scalar Functions accessible via call syntax. + */ + override val functions = scalarBuiltins() + + /** + * PartiQL Scalar Functions accessible via special form syntax (unary, binary, infix keywords, etc). + */ + override val operators = listOf( + logical(), + predicate(), + operators(), + special(), + system(), + ).flatten() + + /** + * PartiQL Aggregation Functions accessible via + */ + override val aggregations = aggBuiltins() + + /** + * Generate all unary and binary operator signatures. + */ + private fun operators(): List = listOf( + pos(), + neg(), + plus(), + minus(), + times(), + div(), + mod(), + concat(), + bitwiseAnd(), + ).flatten() + + /** + * Predicate function -- Condition that can be evaluated to a boolean value. + * + * Predicate function IS NULL, IS MISSING, `=`(Equal) does not propagate `MISSING`. + */ + private fun predicate(): List = listOf( + // SQL + // 8.2 - comparison predicate + lt(), + lte(), + gt(), + gte(), + eq(), + + // 8.3 - between predicate + between(), + // 8.4 - in predicate + inCollection(), + // 8.5 - like predicate + like(), + // 8.7 - null predicate + isNull(), + + // PartiQL + isMissing(), // missing predication + isType(), // type predicate + isTypeSingleArg(), + isTypeDoubleArgsInt(), + isTypeTime(), + ).flatten() + + /** + * Logical functions follows the three-valued logic truth table: + * + * |A |B |A AND B|A OR B |NOT A | + * |----|----|-------|-------|------| + * |T |T |T |T |F | + * |T |F |F |T |F | + * |T |U |U |T |F | + * |F |T |F |T |T | + * |F |F |F |F |T | + * |F |U |F |U |T | + * |U |T |U |T |U | + * |U |F |F |U |U | + * |U |U |U |U |U | + * + * 1. The `MISSING` value, when convert to a truth value, becomes a `UNKNOWN`. + * 2. `UNKNOWN` truth value, when converting to PartiQL Value, becomes NULL of boolean type. + */ + private fun logical(): List = listOf( + not(), + and(), + or(), + ).flatten() + + /** + * SQL Builtins (not special forms) + */ + private fun scalarBuiltins(): List = listOf( + upper(), + lower(), + position(), + substring(), + trim(), + utcNow(), + ).flatten() + + /** + * SQL and PartiQL special forms + */ + private fun special(): List = listOf( + position(), + substring(), + trimSpecial(), + overlay(), + extract(), + dateAdd(), + dateDiff(), + ).flatten() + + /** + * System functions (for now, CURRENT_USER and CURRENT_DATE) + * + * @return + */ + private fun system(): List = listOf( + currentUser(), + currentDate(), + ) + + // OPERATORS + + private fun not(): List = listOf( + FunctionSignature.Scalar( + name = "not", + returns = BOOL, + isNullCall = true, + isNullable = false, + parameters = listOf(FunctionParameter("value", BOOL)), + ), + FunctionSignature.Scalar( + name = "not", + returns = BOOL, + isNullCall = true, + isNullable = false, + parameters = listOf(FunctionParameter("value", MISSING)), + ), + ) + + private fun pos(): List = types.numeric.map { t -> + unary("pos", t, t) + } + + private fun neg(): List = types.numeric.map { t -> + unary("neg", t, t) + } + + private fun eq(): List = types.all.map { t -> + FunctionSignature.Scalar( + name = "eq", + returns = BOOL, + parameters = listOf(FunctionParameter("lhs", t), FunctionParameter("rhs", t)), + isNullable = false, + isNullCall = true, + ) + } + + private fun and(): List = listOf( + FunctionSignature.Scalar( + name = "and", + returns = BOOL, + isNullCall = false, + isNullable = true, + parameters = listOf(FunctionParameter("lhs", BOOL), FunctionParameter("rhs", BOOL)), + ), + FunctionSignature.Scalar( + name = "and", + returns = BOOL, + isNullCall = false, + isNullable = true, + parameters = listOf(FunctionParameter("lhs", MISSING), FunctionParameter("rhs", BOOL)), + ), + FunctionSignature.Scalar( + name = "and", + returns = BOOL, + isNullCall = false, + isNullable = true, + parameters = listOf(FunctionParameter("lhs", BOOL), FunctionParameter("rhs", MISSING)), + ), + FunctionSignature.Scalar( + name = "and", + returns = BOOL, + isNullCall = false, + isNullable = true, + parameters = listOf(FunctionParameter("lhs", MISSING), FunctionParameter("rhs", MISSING)), + ), + ) + + private fun or(): List = listOf( + FunctionSignature.Scalar( + name = "or", + returns = BOOL, + isNullCall = false, + isNullable = true, + parameters = listOf(FunctionParameter("lhs", BOOL), FunctionParameter("rhs", BOOL)), + ), + FunctionSignature.Scalar( + name = "or", + returns = BOOL, + isNullCall = false, + isNullable = true, + parameters = listOf(FunctionParameter("lhs", MISSING), FunctionParameter("rhs", BOOL)), + ), + FunctionSignature.Scalar( + name = "or", + returns = BOOL, + isNullCall = false, + isNullable = true, + parameters = listOf(FunctionParameter("lhs", BOOL), FunctionParameter("rhs", MISSING)), + ), + FunctionSignature.Scalar( + name = "or", + returns = BOOL, + isNullCall = false, + isNullable = true, + parameters = listOf(FunctionParameter("lhs", MISSING), FunctionParameter("rhs", MISSING)), + ), + ) + + private fun lt(): List = types.numeric.map { t -> + binary("lt", BOOL, t, t) + } + + private fun lte(): List = types.numeric.map { t -> + binary("lte", BOOL, t, t) + } + + private fun gt(): List = types.numeric.map { t -> + binary("gt", BOOL, t, t) + } + + private fun gte(): List = types.numeric.map { t -> + binary("gte", BOOL, t, t) + } + + private fun plus(): List = types.numeric.map { t -> + binary("plus", t, t, t) + } + + private fun minus(): List = types.numeric.map { t -> + binary("minus", t, t, t) + } + + private fun times(): List = types.numeric.map { t -> + binary("times", t, t, t) + } + + private fun div(): List = types.numeric.map { t -> + binary("divide", t, t, t) + } + + private fun mod(): List = types.numeric.map { t -> + binary("modulo", t, t, t) + } + + private fun concat(): List = types.text.map { t -> + binary("concat", t, t, t) + } + + private fun bitwiseAnd(): List = types.integer.map { t -> + binary("bitwise_and", t, t, t) + } + + // BUILT INS + private fun upper(): List = types.text.map { t -> + FunctionSignature.Scalar( + name = "upper", + returns = t, + parameters = listOf(FunctionParameter("value", t)), + isNullable = false, + isNullCall = true, + ) + } + + private fun lower(): List = types.text.map { t -> + FunctionSignature.Scalar( + name = "lower", + returns = t, + parameters = listOf(FunctionParameter("value", t)), + isNullable = false, + isNullCall = true, + ) + } + + // SPECIAL FORMS + + private fun like(): List = types.text.flatMap { t -> + listOf( + FunctionSignature.Scalar( + name = "like", + returns = BOOL, + parameters = listOf( + FunctionParameter("value", t), + FunctionParameter("pattern", t), + ), + isNullCall = true, + isNullable = false, + ), + FunctionSignature.Scalar( + name = "like_escape", + returns = BOOL, + parameters = listOf( + FunctionParameter("value", t), + FunctionParameter("pattern", t), + FunctionParameter("escape", t), + ), + isNullCall = true, + isNullable = false, + ), + ) + } + + private fun between(): List = types.numeric.map { t -> + FunctionSignature.Scalar( + name = "between", + returns = BOOL, + parameters = listOf( + FunctionParameter("value", t), + FunctionParameter("lower", t), + FunctionParameter("upper", t), + ), + isNullable = false, + isNullCall = true, + ) + } + + private fun inCollection(): List = types.all.map { element -> + types.collections.map { collection -> + FunctionSignature.Scalar( + name = "in_collection", + returns = BOOL, + parameters = listOf( + FunctionParameter("value", element), + FunctionParameter("collection", collection), + ), + isNullable = false, + isNullCall = true, + ) + } + }.flatten() + + private fun isNull(): List = listOf( + FunctionSignature.Scalar( + name = "is_null", + returns = BOOL, + parameters = listOf( + FunctionParameter("value", ANY) // TODO: Decide if we need to further segment this + ), + isNullCall = false, + isNullable = false + ) + ) + + private fun isMissing(): List = listOf( + FunctionSignature.Scalar( + name = "is_missing", + returns = BOOL, + parameters = listOf( + FunctionParameter("value", ANY) // TODO: Decide if we need to further segment this + ), + isNullCall = false, + isNullable = false + ) + ) + + // To model type assertion, generating a list of assertion function based on the type, + // and the parameter will be the value entered. + // i.e., 1 is INT2 => is_int16(1) + private fun isType(): List = types.all.filterNot { it == NULL || it == MISSING }.flatMap { element -> + types.all.filterNot { it == MISSING || it == ANY }.map { operand -> + FunctionSignature.Scalar( + name = "is_${element.name.lowercase()}", + returns = BOOL, + parameters = listOf( + FunctionParameter("value", operand) + ), + isNullCall = false, // TODO: Should this be true? + isNullable = false + ) + } + } + + // In type assertion, it is possible for types to have args + // i.e., 'a' is CHAR(2) + // we put type parameter before value. + private fun isTypeSingleArg(): List = listOf(CHAR, STRING).flatMap { element -> + types.all.filterNot { it == MISSING }.map { operand -> + FunctionSignature.Scalar( + name = "is_${element.name.lowercase()}", + returns = BOOL, + parameters = listOf( + FunctionParameter("type_parameter_1", INT32), + FunctionParameter("value", operand) + ), + isNullable = false, // TODO: Should this be true? + isNullCall = false + ) + } + } + + private fun isTypeDoubleArgsInt(): List = listOf(DECIMAL).flatMap { element -> + types.all.filterNot { it == MISSING }.map { operand -> + FunctionSignature.Scalar( + name = "is_${element.name.lowercase()}", + returns = BOOL, + parameters = listOf( + FunctionParameter("type_parameter_1", INT32), + FunctionParameter("type_parameter_2", INT32), + FunctionParameter("value", operand) + ), + isNullable = false, + isNullCall = false + ) + } + } + + private fun isTypeTime(): List = listOf(TIME, TIMESTAMP).flatMap { element -> + types.all.filterNot { it == MISSING }.map { operand -> + FunctionSignature.Scalar( + name = "is_${element.name.lowercase()}", + returns = BOOL, + parameters = listOf( + FunctionParameter("type_parameter_1", BOOL), + FunctionParameter("type_parameter_2", INT32), + FunctionParameter("value", operand) // TODO: Decide if we need to further segment this + ), + isNullCall = false, + isNullable = false + ) + } + } + + // SUBSTRING (expression, start[, length]?) + // SUBSTRINGG(expression from start [FOR length]? ) + private fun substring(): List = types.text.map { t -> + listOf( + FunctionSignature.Scalar( + name = "substring", + returns = t, + parameters = listOf( + FunctionParameter("value", t), + FunctionParameter("start", INT64), + ), + isNullable = false, + isNullCall = true, + ), + FunctionSignature.Scalar( + name = "substring", + returns = t, + parameters = listOf( + FunctionParameter("value", t), + FunctionParameter("start", INT64), + FunctionParameter("end", INT64), + ), + isNullable = false, + isNullCall = true, + ) + ) + }.flatten() + + // position (str1, str2) + // position (str1 in str2) + private fun position(): List = types.text.map { t -> + FunctionSignature.Scalar( + name = "position", + returns = INT64, + parameters = listOf( + FunctionParameter("probe", t), + FunctionParameter("value", t), + ), + isNullable = false, + isNullCall = true, + ) + } + + // trim(str) + private fun trim(): List = types.text.map { t -> + FunctionSignature.Scalar( + name = "trim", + returns = t, + parameters = listOf( + FunctionParameter("value", t), + ), + isNullable = false, + isNullCall = true, + ) + } + + // TODO: We need to add a special form function for TRIM(BOTH FROM value) + private fun trimSpecial(): List = types.text.map { t -> + listOf( + // TRIM(chars FROM value) + // TRIM(both chars from value) + FunctionSignature.Scalar( + name = "trim_chars", + returns = t, + parameters = listOf( + FunctionParameter("value", t), + FunctionParameter("chars", t), + ), + isNullable = false, + isNullCall = true, + ), + // TRIM(LEADING FROM value) + FunctionSignature.Scalar( + name = "trim_leading", + returns = t, + parameters = listOf( + FunctionParameter("value", t), + ), + isNullable = false, + isNullCall = true, + ), + // TRIM(LEADING chars FROM value) + FunctionSignature.Scalar( + name = "trim_leading_chars", + returns = t, + parameters = listOf( + FunctionParameter("value", t), + FunctionParameter("chars", t), + ), + isNullable = false, + isNullCall = true, + ), + // TRIM(TRAILING FROM value) + FunctionSignature.Scalar( + name = "trim_trailing", + returns = t, + parameters = listOf( + FunctionParameter("value", t), + ), + isNullable = false, + isNullCall = true, + ), + // TRIM(TRAILING chars FROM value) + FunctionSignature.Scalar( + name = "trim_trailing_chars", + returns = t, + parameters = listOf( + FunctionParameter("value", t), + FunctionParameter("chars", t), + ), + isNullable = false, + isNullCall = true, + ), + ) + }.flatten() + + // TODO + private fun overlay(): List = emptyList() + + // TODO + private fun extract(): List = emptyList() + + private fun dateAdd(): List { + val operators = mutableListOf() + for (field in DatetimeField.values()) { + for (type in types.datetime) { + if (field == DatetimeField.TIMEZONE_HOUR || field == DatetimeField.TIMEZONE_MINUTE) { + continue + } + val signature = FunctionSignature.Scalar( + name = "date_add_${field.name.lowercase()}", + returns = type, + parameters = listOf( + FunctionParameter("interval", INT), + FunctionParameter("datetime", type), + ), + isNullable = false, + isNullCall = true, + ) + operators.add(signature) + } + } + return operators + } + + private fun dateDiff(): List { + val operators = mutableListOf() + for (field in DatetimeField.values()) { + for (type in types.datetime) { + if (field == DatetimeField.TIMEZONE_HOUR || field == DatetimeField.TIMEZONE_MINUTE) { + continue + } + val signature = FunctionSignature.Scalar( + name = "date_diff_${field.name.lowercase()}", + returns = INT64, + parameters = listOf( + FunctionParameter("datetime1", type), + FunctionParameter("datetime2", type), + ), + isNullable = false, + isNullCall = true, + ) + operators.add(signature) + } + } + return operators + } + + private fun utcNow(): List = listOf( + FunctionSignature.Scalar( + name = "utcnow", + returns = TIMESTAMP, + parameters = emptyList(), + isNullable = false, + ) + ) + + private fun currentUser() = FunctionSignature.Scalar( + name = "current_user", + returns = STRING, + parameters = emptyList(), + isNullable = true, + ) + + private fun currentDate() = FunctionSignature.Scalar( + name = "current_date", + returns = DATE, + parameters = emptyList(), + isNullable = false, + ) + + // ==================================== + // AGGREGATIONS + // ==================================== + + /** + * SQL and PartiQL Aggregation Builtins + */ + private fun aggBuiltins(): List = listOf( + every(), + any(), + some(), + count(), + min(), + max(), + sum(), + avg(), + ).flatten() + + private fun every() = listOf( + FunctionSignature.Aggregation( + name = "every", + returns = BOOL, + parameters = listOf(FunctionParameter("value", BOOL)), + isNullable = true, + ), + ) + + private fun any() = listOf( + FunctionSignature.Aggregation( + name = "any", + returns = BOOL, + parameters = listOf(FunctionParameter("value", BOOL)), + isNullable = true, + ), + ) + + private fun some() = listOf( + FunctionSignature.Aggregation( + name = "some", + returns = BOOL, + parameters = listOf(FunctionParameter("value", BOOL)), + isNullable = true, + ), + ) + + private fun count() = listOf( + FunctionSignature.Aggregation( + name = "count", + returns = INT32, + parameters = listOf(FunctionParameter("value", ANY)), + isNullable = false, + ), + FunctionSignature.Aggregation( + name = "count_star", + returns = INT32, + parameters = listOf(), + isNullable = false, + ), + ) + + private fun min() = types.numeric.map { + FunctionSignature.Aggregation( + name = "min", + returns = it, + parameters = listOf(FunctionParameter("value", it)), + isNullable = true, + ) + } + + private fun max() = types.numeric.map { + FunctionSignature.Aggregation( + name = "max", + returns = it, + parameters = listOf(FunctionParameter("value", it)), + isNullable = true, + ) + } + + private fun sum() = types.numeric.map { + FunctionSignature.Aggregation( + name = "sum", + returns = it, + parameters = listOf(FunctionParameter("value", it)), + isNullable = true, + ) + } + + private fun avg() = types.numeric.map { + FunctionSignature.Aggregation( + name = "avg", + returns = it, + parameters = listOf(FunctionParameter("value", it)), + isNullable = true, + ) + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlanner.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlanner.kt new file mode 100644 index 000000000..3630ed651 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlanner.kt @@ -0,0 +1,53 @@ +package org.partiql.planner + +import com.amazon.ionelement.api.StructElement +import org.partiql.ast.Statement +import org.partiql.errors.Problem +import org.partiql.errors.ProblemCallback +import org.partiql.plan.PartiQLPlan +import java.time.Instant + +/** + * PartiQLPlanner is responsible for transforming an AST into PartiQL's logical query plan. + */ +public interface PartiQLPlanner { + + /** + * Transform an AST to a [PartiQLPlan]. + * + * @param statement + * @param session + * @param onProblem + * @return + */ + public fun plan(statement: Statement, session: Session, onProblem: ProblemCallback = {}): Result + + /** + * Planner result along with any warnings. + * + * @property plan + */ + public class Result( + val plan: PartiQLPlan, + val problems: List, + ) + + /** + * From [org.partiql.lang.planner.transforms] + * + * @property queryId + * @property userId + * @property currentCatalog + * @property currentDirectory + * @property catalogConfig + * @property instant + */ + public class Session( + public val queryId: String, + public val userId: String, + public val currentCatalog: String? = null, + public val currentDirectory: List = emptyList(), + public val catalogConfig: Map = emptyMap(), + public val instant: Instant = Instant.now(), + ) +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlannerBuilder.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlannerBuilder.kt new file mode 100644 index 000000000..cec415092 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlannerBuilder.kt @@ -0,0 +1,27 @@ +package org.partiql.planner + +import org.partiql.spi.Plugin + +/** + * PartiQLPlannerBuilder + */ +class PartiQLPlannerBuilder { + + private var headers: MutableList
= mutableListOf(PartiQLHeader) + private var plugins: List = emptyList() + private var passes: List = emptyList() + + fun build(): PartiQLPlanner = PartiQLPlannerDefault(headers, plugins, passes) + + public fun plugins(plugins: List): PartiQLPlannerBuilder = this.apply { + this.plugins = plugins + } + + public fun passes(passes: List): PartiQLPlannerBuilder = this.apply { + this.passes = passes + } + + public fun headers(headers: List
): PartiQLPlannerBuilder = this.apply { + this.headers += headers + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlannerDefault.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlannerDefault.kt new file mode 100644 index 000000000..e0dcda802 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlannerDefault.kt @@ -0,0 +1,54 @@ +package org.partiql.planner + +import org.partiql.ast.Statement +import org.partiql.ast.normalize.normalize +import org.partiql.errors.ProblemCallback +import org.partiql.planner.internal.Env +import org.partiql.planner.internal.ir.PartiQLVersion +import org.partiql.planner.internal.transforms.AstToPlan +import org.partiql.planner.internal.transforms.PlanTransform +import org.partiql.planner.internal.typer.PlanTyper +import org.partiql.spi.Plugin + +/** + * Default PartiQL logical query planner. + */ +internal class PartiQLPlannerDefault( + private val headers: List
, + private val plugins: List, + private val passes: List, +) : PartiQLPlanner { + + override fun plan( + statement: Statement, + session: PartiQLPlanner.Session, + onProblem: ProblemCallback, + ): PartiQLPlanner.Result { + // 0. Initialize the planning environment + val env = Env(headers, plugins, session) + + // 1. Normalize + val ast = statement.normalize() + + // 2. AST to Rel/Rex + val root = AstToPlan.apply(ast, env) + + // 3. Resolve variables + val typer = PlanTyper(env, onProblem) + val internal = org.partiql.planner.internal.ir.PartiQLPlan( + version = PartiQLVersion.VERSION_0_1, + globals = env.globals, + statement = typer.resolve(root), + ) + + // 4. Assert plan has been resolved — translating to public API + var plan = PlanTransform.visitPartiQLPlan(internal, onProblem) + + // 5. Apply all passes + for (pass in passes) { + plan = pass.apply(plan, onProblem) + } + + return PartiQLPlanner.Result(plan, emptyList()) + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlannerPass.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlannerPass.kt new file mode 100644 index 000000000..c04f8fbb7 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/PartiQLPlannerPass.kt @@ -0,0 +1,9 @@ +package org.partiql.planner + +import org.partiql.errors.ProblemCallback +import org.partiql.plan.PartiQLPlan + +interface PartiQLPlannerPass { + + fun apply(plan: PartiQLPlan, onProblem: ProblemCallback): PartiQLPlan +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/Env.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/Env.kt new file mode 100644 index 000000000..2e8572493 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/Env.kt @@ -0,0 +1,437 @@ +package org.partiql.planner.internal + +import org.partiql.planner.Header +import org.partiql.planner.PartiQLPlanner +import org.partiql.planner.internal.ir.Agg +import org.partiql.planner.internal.ir.Fn +import org.partiql.planner.internal.ir.Global +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.Rel +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.global +import org.partiql.planner.internal.ir.identifierQualified +import org.partiql.planner.internal.ir.identifierSymbol +import org.partiql.planner.internal.typer.FnResolver +import org.partiql.spi.BindingCase +import org.partiql.spi.BindingName +import org.partiql.spi.BindingPath +import org.partiql.spi.Plugin +import org.partiql.spi.connector.Connector +import org.partiql.spi.connector.ConnectorMetadata +import org.partiql.spi.connector.ConnectorObjectHandle +import org.partiql.spi.connector.ConnectorObjectPath +import org.partiql.spi.connector.ConnectorSession +import org.partiql.spi.connector.Constants +import org.partiql.types.StaticType +import org.partiql.types.StructType +import org.partiql.types.TupleConstraint + +/** + * Handle for associating a catalog with the metadata; pair of catalog to data. + */ +internal typealias Handle = Pair + +/** + * TypeEnv represents the environment in which we type expressions and resolve variables while planning. + * + * TODO TypeEnv should be a stack of locals; also the strategy has been kept here because it's easier to + * pass through the traversal like this, but is conceptually odd to associate with the TypeEnv. + * @property schema + * @property strategy + */ +internal class TypeEnv( + val schema: List, + val strategy: ResolutionStrategy, +) { + + /** + * Return a copy with GLOBAL lookup strategy + */ + fun global() = TypeEnv(schema, ResolutionStrategy.GLOBAL) + + /** + * Return a copy with LOCAL lookup strategy + */ + fun local() = TypeEnv(schema, ResolutionStrategy.LOCAL) + + /** + * Debug string + */ + override fun toString() = buildString { + append("(") + append("strategy=$strategy") + append(", ") + val bindings = "< " + schema.joinToString { "${it.name}: ${it.type}" } + " >" + append("bindings=$bindings") + append(")") + } +} + +/** + * Metadata regarding a resolved variable. + */ +internal sealed interface ResolvedVar { + + public val type: StaticType + public val ordinal: Int + + /** + * Metadata for a resolved local variable. + * + * @property type Resolved StaticType + * @property ordinal Index offset in [TypeEnv] + * @property replacementSteps Path steps to replace. + * @property depth The depth/level of the path match. + */ + class Local( + override val type: StaticType, + override val ordinal: Int, + val rootType: StaticType, + val replacementSteps: List, + val depth: Int + ) : ResolvedVar + + /** + * Metadata for a resolved global variable + * + * @property type Resolved StaticType + * @property ordinal Index offset in the environment `globals` list + * @property depth The depth/level of the path match. + */ + class Global( + override val type: StaticType, + override val ordinal: Int, + val depth: Int, + ) : ResolvedVar +} + +/** + * Variable resolution strategies — https://partiql.org/assets/PartiQL-Specification.pdf#page=35 + * + * | Value | Strategy | Scoping Rules | + * |------------+-----------------------+---------------| + * | LOCAL | local-first lookup | Rules 1, 2 | + * | GLOBAL | global-first lookup | Rule 3 | + */ +internal enum class ResolutionStrategy { + LOCAL, + GLOBAL, +} + +/** + * PartiQL Planner Global Environment of Catalogs backed by given plugins. + * + * @property headers List of namespaced definitions + * @property plugins List of plugins for global resolution + * @property session Session details + */ +internal class Env( + private val headers: List
, + private val plugins: List, + private val session: PartiQLPlanner.Session, +) { + + /** + * Collect the list of all referenced globals during planning. + */ + public val globals = mutableListOf() + + /** + * Encapsulate all function resolving logic within [FnResolver]. + */ + public val fnResolver = FnResolver(headers) + + private val connectorSession = object : ConnectorSession { + override fun getQueryId(): String = session.queryId + override fun getUserId(): String = session.userId + } + + /** + * Map of catalog names to its underlying connector + */ + private val catalogs: Map + + // Initialize connectors + init { + val catalogs = mutableMapOf() + val connectors = plugins.flatMap { it.getConnectorFactories() } + // map catalogs to connectors + for ((catalog, config) in session.catalogConfig) { + // find corresponding connector + val connectorName = config[Constants.CONFIG_KEY_CONNECTOR_NAME].stringValue + val connector = connectors.first { it.getName() == connectorName } + // initialize connector with given config + catalogs[catalog] = connector.create(catalog, config) + } + this.catalogs = catalogs.toMap() + } + + /** + * Leverages a [FunctionResolver] to find a matching function defined in the [Header] scalar function catalog. + */ + internal fun resolveFn(fn: Fn.Unresolved, args: List) = fnResolver.resolveFn(fn, args) + + /** + * Leverages a [FunctionResolver] to find a matching function defined in the [Header] aggregation function catalog. + */ + internal fun resolveAgg(agg: Agg.Unresolved, args: List) = fnResolver.resolveAgg(agg, args) + + /** + * Fetch global object metadata from the given [BindingPath]. + * + * @param catalog Current catalog + * @param path Global identifier path + * @return + */ + internal fun getObjectHandle(catalog: BindingName, path: BindingPath): Handle? { + val metadata = getMetadata(catalog) ?: return null + return metadata.second.getObjectHandle(connectorSession, path)?.let { + metadata.first to it + } + } + + /** + * Fetch a global variable's StaticType given its handle. + * + * @param handle + * @return + */ + internal fun getObjectDescriptor(handle: Handle): StaticType { + val metadata = getMetadata(BindingName(handle.first, BindingCase.SENSITIVE))!!.second + return metadata.getObjectType(connectorSession, handle.second)!! + } + + /** + * Fetch [ConnectorMetadata] given a catalog name. + * + * @param catalogName + * @return + */ + private fun getMetadata(catalogName: BindingName): Handle? { + val catalogKey = catalogs.keys.firstOrNull { catalogName.isEquivalentTo(it) } ?: return null + val connector = catalogs[catalogKey] ?: return null + val metadata = connector.getMetadata(connectorSession) + return catalogKey to metadata + } + + /** + * TODO optimization, check known globals before calling out to connector again + * + * @param catalog + * @param originalPath + * @param catalogPath + * @return + */ + private fun getGlobalType( + catalog: BindingName?, + originalPath: BindingPath, + catalogPath: BindingPath, + ): ResolvedVar? { + return catalog?.let { cat -> + getObjectHandle(cat, catalogPath)?.let { handle -> + getObjectDescriptor(handle).let { type -> + val depth = calculateMatched(originalPath, catalogPath, handle.second.absolutePath) + val qualifiedPath = identifierQualified( + root = handle.first.toIdentifier(), + steps = handle.second.absolutePath.steps.map { it.toIdentifier() } + ) + val global = global(qualifiedPath, type) + globals.add(global) + // Return resolution metadata + ResolvedVar.Global(type, globals.size - 1, depth) + } + } + } + } + + private fun BindingPath.toCaseSensitive(): BindingPath { + return this.copy(steps = this.steps.map { it.copy(bindingCase = BindingCase.SENSITIVE) }) + } + + /** + * Attempt to resolve a [BindingPath] in the global + local type environments. + */ + fun resolve(path: BindingPath, locals: TypeEnv, scope: Rex.Op.Var.Scope): ResolvedVar? { + val strategy = when (scope) { + Rex.Op.Var.Scope.DEFAULT -> locals.strategy + Rex.Op.Var.Scope.LOCAL -> ResolutionStrategy.LOCAL + } + return when (strategy) { + ResolutionStrategy.LOCAL -> { + var type: ResolvedVar? = null + type = type ?: resolveLocalBind(path, locals.schema) + type = type ?: resolveGlobalBind(path) + type + } + ResolutionStrategy.GLOBAL -> { + var type: ResolvedVar? = null + type = type ?: resolveGlobalBind(path) + type = type ?: resolveLocalBind(path, locals.schema) + type + } + } + } + + /** + * Logic is as follows: + * 1. If Current Catalog and Schema are set, create a Path to the object and attempt to grab handle and schema. + * a. If not found, just try to find the object in the catalog. + * 2. If Current Catalog is not set: + * a. Loop through all catalogs and try to find the object. + * + * TODO: Add global bindings + * TODO: Replace paths with global variable references if found + */ + private fun resolveGlobalBind(path: BindingPath): ResolvedVar? { + val currentCatalog = session.currentCatalog?.let { BindingName(it, BindingCase.SENSITIVE) } + val currentCatalogPath = BindingPath(session.currentDirectory.map { BindingName(it, BindingCase.SENSITIVE) }) + val absoluteCatalogPath = BindingPath(currentCatalogPath.steps + path.steps) + val resolvedVar = when (path.steps.size) { + 0 -> null + 1 -> getGlobalType(currentCatalog, path, absoluteCatalogPath) + 2 -> getGlobalType(currentCatalog, path, path) ?: getGlobalType(currentCatalog, path, absoluteCatalogPath) + else -> { + val inferredCatalog = path.steps[0] + val newPath = BindingPath(path.steps.subList(1, path.steps.size)) + getGlobalType(inferredCatalog, path, newPath) + ?: getGlobalType(currentCatalog, path, path) + ?: getGlobalType(currentCatalog, path, absoluteCatalogPath) + } + } + return resolvedVar + } + + /** + * Check locals, else search structs. + */ + private fun resolveLocalBind(path: BindingPath, locals: List): ResolvedVar? { + if (path.steps.isEmpty()) { + return null + } + + // 1. Check locals for root + locals.forEachIndexed { ordinal, binding -> + val root = path.steps[0] + if (root.isEquivalentTo(binding.name)) { + return ResolvedVar.Local(binding.type, ordinal, binding.type, emptyList(), 1) + } + } + + // 2. Check if this variable is referencing a struct field, carrying ordinals + val matches = mutableListOf() + for (ordinal in locals.indices) { + val rootType = locals[ordinal].type + if (rootType is StructType) { + val varType = inferStructLookup(rootType, path) + if (varType != null) { + // we found this path within a struct! + val match = ResolvedVar.Local(varType.resolvedType, ordinal, rootType, varType.replacementPath.steps, varType.replacementPath.steps.size) + matches.add(match) + } + } + } + + // 0 -> no match + // 1 -> resolved + // N -> ambiguous + return when (matches.size) { + 0 -> null + 1 -> matches.single() + else -> null // TODO emit ambiguous error + } + } + + /** + * Searches for the path within the given struct, returning null if not found. + * + * @return a [ResolvedPath] that contains the disambiguated [ResolvedPath.replacementPath] and the path's + * [StaticType]. Returns NULL if unable to find the [path] given the [struct]. + */ + private fun inferStructLookup(struct: StructType, path: BindingPath): ResolvedPath? { + var curr: StaticType = struct + val replacementSteps = path.steps.map { step -> + // Assume ORDERED for now + val currentStruct = curr as? StructType ?: return null + val (replacement, stepType) = inferStructLookup(currentStruct, step) ?: return null + curr = stepType + replacement + } + // Lookup final field + return ResolvedPath( + BindingPath(replacementSteps), + curr + ) + } + + /** + * Represents a disambiguated [BindingPath] and its inferred [StaticType]. + */ + private class ResolvedPath( + val replacementPath: BindingPath, + val resolvedType: StaticType + ) + + /** + * @return a disambiguated [key] and the resulting [StaticType]. + */ + private fun inferStructLookup(struct: StructType, key: BindingName): Pair? { + val isClosed = struct.constraints.contains(TupleConstraint.Open(false)) + val isOrdered = struct.constraints.contains(TupleConstraint.Ordered) + return when { + // 1. Struct is closed and ordered + isClosed && isOrdered -> { + struct.fields.firstOrNull { entry -> key.isEquivalentTo(entry.key) }?.let { + (sensitive(it.key) to it.value) + } + } + // 2. Struct is closed + isClosed -> { + val matches = struct.fields.filter { entry -> key.isEquivalentTo(entry.key) } + when (matches.size) { + 0 -> null + 1 -> matches.first().let { (sensitive(it.key) to it.value) } + else -> { + val firstKey = matches.first().key + val sharedKey = when (matches.all { it.key == firstKey }) { + true -> sensitive(firstKey) + false -> key + } + sharedKey to StaticType.unionOf(matches.map { it.value }.toSet()).flatten() + } + } + } + // 3. Struct is open + else -> null + } + } + + private fun sensitive(str: String): BindingName = BindingName(str, BindingCase.SENSITIVE) + + /** + * Logic for determining how many BindingNames were “matched” by the ConnectorMetadata + * 1. Matched = RelativePath - Not Found + * 2. Not Found = Input CatalogPath - Output CatalogPath + * 3. Matched = RelativePath - (Input CatalogPath - Output CatalogPath) + * 4. Matched = RelativePath + Output CatalogPath - Input CatalogPath + */ + private fun calculateMatched( + originalPath: BindingPath, + inputCatalogPath: BindingPath, + outputCatalogPath: ConnectorObjectPath, + ): Int { + return originalPath.steps.size + outputCatalogPath.steps.size - inputCatalogPath.steps.size + } + + private fun String.toIdentifier() = identifierSymbol( + symbol = this, + caseSensitivity = Identifier.CaseSensitivity.SENSITIVE + ) + + private fun BindingName.toIdentifier() = identifierSymbol( + symbol = name, + caseSensitivity = when (bindingCase) { + BindingCase.SENSITIVE -> Identifier.CaseSensitivity.SENSITIVE + BindingCase.INSENSITIVE -> Identifier.CaseSensitivity.INSENSITIVE + } + ) +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/Nodes.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/Nodes.kt new file mode 100644 index 000000000..d357cfbb2 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/Nodes.kt @@ -0,0 +1,1418 @@ +@file:OptIn(PartiQLValueExperimental::class) + +package org.partiql.planner.internal.ir + +import org.partiql.planner.internal.ir.builder.AggResolvedBuilder +import org.partiql.planner.internal.ir.builder.AggUnresolvedBuilder +import org.partiql.planner.internal.ir.builder.FnResolvedBuilder +import org.partiql.planner.internal.ir.builder.FnUnresolvedBuilder +import org.partiql.planner.internal.ir.builder.GlobalBuilder +import org.partiql.planner.internal.ir.builder.IdentifierQualifiedBuilder +import org.partiql.planner.internal.ir.builder.IdentifierSymbolBuilder +import org.partiql.planner.internal.ir.builder.PartiQlPlanBuilder +import org.partiql.planner.internal.ir.builder.RelBindingBuilder +import org.partiql.planner.internal.ir.builder.RelBuilder +import org.partiql.planner.internal.ir.builder.RelOpAggregateBuilder +import org.partiql.planner.internal.ir.builder.RelOpAggregateCallBuilder +import org.partiql.planner.internal.ir.builder.RelOpDistinctBuilder +import org.partiql.planner.internal.ir.builder.RelOpErrBuilder +import org.partiql.planner.internal.ir.builder.RelOpExceptBuilder +import org.partiql.planner.internal.ir.builder.RelOpExcludeBuilder +import org.partiql.planner.internal.ir.builder.RelOpExcludeItemBuilder +import org.partiql.planner.internal.ir.builder.RelOpExcludeStepAttrBuilder +import org.partiql.planner.internal.ir.builder.RelOpExcludeStepCollectionWildcardBuilder +import org.partiql.planner.internal.ir.builder.RelOpExcludeStepPosBuilder +import org.partiql.planner.internal.ir.builder.RelOpExcludeStepStructWildcardBuilder +import org.partiql.planner.internal.ir.builder.RelOpFilterBuilder +import org.partiql.planner.internal.ir.builder.RelOpIntersectBuilder +import org.partiql.planner.internal.ir.builder.RelOpJoinBuilder +import org.partiql.planner.internal.ir.builder.RelOpLimitBuilder +import org.partiql.planner.internal.ir.builder.RelOpOffsetBuilder +import org.partiql.planner.internal.ir.builder.RelOpProjectBuilder +import org.partiql.planner.internal.ir.builder.RelOpScanBuilder +import org.partiql.planner.internal.ir.builder.RelOpScanIndexedBuilder +import org.partiql.planner.internal.ir.builder.RelOpSortBuilder +import org.partiql.planner.internal.ir.builder.RelOpSortSpecBuilder +import org.partiql.planner.internal.ir.builder.RelOpUnionBuilder +import org.partiql.planner.internal.ir.builder.RelOpUnpivotBuilder +import org.partiql.planner.internal.ir.builder.RelTypeBuilder +import org.partiql.planner.internal.ir.builder.RexBuilder +import org.partiql.planner.internal.ir.builder.RexOpCallDynamicBuilder +import org.partiql.planner.internal.ir.builder.RexOpCallDynamicCandidateBuilder +import org.partiql.planner.internal.ir.builder.RexOpCallStaticBuilder +import org.partiql.planner.internal.ir.builder.RexOpCaseBranchBuilder +import org.partiql.planner.internal.ir.builder.RexOpCaseBuilder +import org.partiql.planner.internal.ir.builder.RexOpCollectionBuilder +import org.partiql.planner.internal.ir.builder.RexOpErrBuilder +import org.partiql.planner.internal.ir.builder.RexOpGlobalBuilder +import org.partiql.planner.internal.ir.builder.RexOpLitBuilder +import org.partiql.planner.internal.ir.builder.RexOpPathBuilder +import org.partiql.planner.internal.ir.builder.RexOpPathStepIndexBuilder +import org.partiql.planner.internal.ir.builder.RexOpPathStepSymbolBuilder +import org.partiql.planner.internal.ir.builder.RexOpPathStepUnpivotBuilder +import org.partiql.planner.internal.ir.builder.RexOpPathStepWildcardBuilder +import org.partiql.planner.internal.ir.builder.RexOpPivotBuilder +import org.partiql.planner.internal.ir.builder.RexOpSelectBuilder +import org.partiql.planner.internal.ir.builder.RexOpStructBuilder +import org.partiql.planner.internal.ir.builder.RexOpStructFieldBuilder +import org.partiql.planner.internal.ir.builder.RexOpSubqueryBuilder +import org.partiql.planner.internal.ir.builder.RexOpTupleUnionBuilder +import org.partiql.planner.internal.ir.builder.RexOpVarResolvedBuilder +import org.partiql.planner.internal.ir.builder.RexOpVarUnresolvedBuilder +import org.partiql.planner.internal.ir.builder.StatementQueryBuilder +import org.partiql.planner.internal.ir.visitor.PlanVisitor +import org.partiql.types.StaticType +import org.partiql.types.function.FunctionSignature +import org.partiql.value.PartiQLValue +import org.partiql.value.PartiQLValueExperimental +import kotlin.random.Random + +internal abstract class PlanNode { + @JvmField + internal var tag: String = "Plan-${"%06x".format(Random.nextInt())}" + + internal abstract val children: List + + internal abstract fun accept(visitor: PlanVisitor, ctx: C): R +} + +internal data class PartiQLPlan( + @JvmField + internal val version: PartiQLVersion, + @JvmField + internal val globals: List, + @JvmField + internal val statement: Statement, +) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.addAll(globals) + kids.add(statement) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitPartiQLPlan(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): PartiQlPlanBuilder = PartiQlPlanBuilder() + } +} + +internal data class Global( + @JvmField + internal val path: Identifier.Qualified, + @JvmField + internal val type: StaticType, +) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(path) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitGlobal(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): GlobalBuilder = GlobalBuilder() + } +} + +internal sealed class Fn : PlanNode() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Resolved -> visitor.visitFnResolved(this, ctx) + is Unresolved -> visitor.visitFnUnresolved(this, ctx) + } + + internal data class Resolved( + @JvmField + internal val signature: FunctionSignature.Scalar, + ) : Fn() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitFnResolved(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): FnResolvedBuilder = FnResolvedBuilder() + } + } + + internal data class Unresolved( + @JvmField + internal val identifier: Identifier, + @JvmField + internal val isHidden: Boolean, + ) : Fn() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(identifier) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitFnUnresolved(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): FnUnresolvedBuilder = FnUnresolvedBuilder() + } + } +} + +internal sealed class Agg : PlanNode() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Resolved -> visitor.visitAggResolved(this, ctx) + is Unresolved -> visitor.visitAggUnresolved(this, ctx) + } + + internal data class Resolved( + @JvmField + internal val signature: FunctionSignature.Aggregation, + ) : Agg() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitAggResolved(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): AggResolvedBuilder = AggResolvedBuilder() + } + } + + internal data class Unresolved( + @JvmField + internal val identifier: Identifier, + ) : Agg() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(identifier) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitAggUnresolved(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): AggUnresolvedBuilder = AggUnresolvedBuilder() + } + } +} + +internal sealed class Statement : PlanNode() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Query -> visitor.visitStatementQuery(this, ctx) + } + + internal data class Query( + @JvmField + internal val root: Rex, + ) : Statement() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(root) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitStatementQuery(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): StatementQueryBuilder = StatementQueryBuilder() + } + } +} + +internal sealed class Identifier : PlanNode() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Symbol -> visitor.visitIdentifierSymbol(this, ctx) + is Qualified -> visitor.visitIdentifierQualified(this, ctx) + } + + internal enum class CaseSensitivity { + SENSITIVE, + INSENSITIVE, + } + + internal data class Symbol( + @JvmField + internal val symbol: String, + @JvmField + internal val caseSensitivity: CaseSensitivity, + ) : Identifier() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitIdentifierSymbol(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): IdentifierSymbolBuilder = IdentifierSymbolBuilder() + } + } + + internal data class Qualified( + @JvmField + internal val root: Symbol, + @JvmField + internal val steps: List, + ) : Identifier() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(root) + kids.addAll(steps) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitIdentifierQualified(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): IdentifierQualifiedBuilder = IdentifierQualifiedBuilder() + } + } +} + +internal data class Rex( + @JvmField + internal val type: StaticType, + @JvmField + internal val op: Op, +) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(op) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = visitor.visitRex( + this, + ctx + ) + + internal sealed class Op : PlanNode() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Lit -> visitor.visitRexOpLit(this, ctx) + is Var -> visitor.visitRexOpVar(this, ctx) + is Global -> visitor.visitRexOpGlobal(this, ctx) + is Path -> visitor.visitRexOpPath(this, ctx) + is Call -> visitor.visitRexOpCall(this, ctx) + is Case -> visitor.visitRexOpCase(this, ctx) + is Collection -> visitor.visitRexOpCollection(this, ctx) + is Struct -> visitor.visitRexOpStruct(this, ctx) + is Pivot -> visitor.visitRexOpPivot(this, ctx) + is Subquery -> visitor.visitRexOpSubquery(this, ctx) + is Select -> visitor.visitRexOpSelect(this, ctx) + is TupleUnion -> visitor.visitRexOpTupleUnion(this, ctx) + is Err -> visitor.visitRexOpErr(this, ctx) + } + + internal data class Lit( + @JvmField + internal val `value`: PartiQLValue, + ) : Op() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpLit(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpLitBuilder = RexOpLitBuilder() + } + } + + internal sealed class Var : Op() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Resolved -> visitor.visitRexOpVarResolved(this, ctx) + is Unresolved -> visitor.visitRexOpVarUnresolved(this, ctx) + } + + internal enum class Scope { + DEFAULT, + LOCAL, + } + + internal data class Resolved( + @JvmField + internal val ref: Int, + ) : Var() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpVarResolved(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpVarResolvedBuilder = RexOpVarResolvedBuilder() + } + } + + internal data class Unresolved( + @JvmField + internal val identifier: Identifier, + @JvmField + internal val scope: Scope, + ) : Var() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(identifier) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpVarUnresolved(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpVarUnresolvedBuilder = RexOpVarUnresolvedBuilder() + } + } + } + + internal data class Global( + @JvmField + internal val ref: Int, + ) : Op() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpGlobal(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpGlobalBuilder = RexOpGlobalBuilder() + } + } + + internal data class Path( + @JvmField + internal val root: Rex, + @JvmField + internal val steps: List, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(root) + kids.addAll(steps) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpPath(this, ctx) + + internal sealed class Step : PlanNode() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Index -> visitor.visitRexOpPathStepIndex(this, ctx) + is Symbol -> visitor.visitRexOpPathStepSymbol(this, ctx) + is Wildcard -> visitor.visitRexOpPathStepWildcard(this, ctx) + is Unpivot -> visitor.visitRexOpPathStepUnpivot(this, ctx) + is Key -> visitor.visitRexOpPathStepKey(this, ctx) + } + + internal data class Index( + @JvmField + internal val key: Rex, + ) : Step() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(key) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpPathStepIndex(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpPathStepIndexBuilder = RexOpPathStepIndexBuilder() + } + } + + /** + * This represents a case-sensitive lookup on a tuple. Ex: a['b'] or a[CAST('a' || 'b' AS STRING)]. + * This would normally contain the dot notation for case-sensitive lookup, however, due to + * limitations -- we cannot consolidate these. See [Symbol] for more information. + * + * The main difference is that this does NOT include `a."b"` + */ + internal data class Key( + @JvmField + internal val key: Rex, + ) : Step() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(key) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpPathStepKey(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpPathStepIndexBuilder = RexOpPathStepIndexBuilder() + } + } + + /** + * This represents a lookup on a tuple. We differentiate a [Key] and a [Symbol] at this point in the + * pipeline because we NEED to retain some syntactic knowledge for the following reason: we cannot + * use the syntactic index operation on a schema -- as it is not synonymous with a tuple. In other words, + * `.""` is not interchangeable with `['']`. + * + * So, in order to temporarily differentiate the `a."b"` from `a['b']` (see [Key]), we need to maintain + * the syntactic difference here. Note that this would potentially be mitigated by typing during the AST to Plan + * transformation. + * + * That being said, this represents a lookup on a tuple such as `a.b` or `a."b"`. + */ + internal data class Symbol( + @JvmField + internal val identifier: Identifier.Symbol, + ) : Step() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(identifier) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpPathStepSymbol(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpPathStepSymbolBuilder = RexOpPathStepSymbolBuilder() + } + } + + internal data class Wildcard( + @JvmField + internal val ` `: Char = ' ', + ) : Step() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpPathStepWildcard(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpPathStepWildcardBuilder = RexOpPathStepWildcardBuilder() + } + } + + internal data class Unpivot( + @JvmField + internal val ` `: Char = ' ', + ) : Step() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpPathStepUnpivot(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpPathStepUnpivotBuilder = RexOpPathStepUnpivotBuilder() + } + } + } + + internal companion object { + @JvmStatic + internal fun builder(): RexOpPathBuilder = RexOpPathBuilder() + } + } + + internal sealed class Call : Op() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Static -> visitor.visitRexOpCallStatic(this, ctx) + is Dynamic -> visitor.visitRexOpCallDynamic(this, ctx) + } + + internal data class Static( + @JvmField + internal val fn: Fn, + @JvmField + internal val args: List, + ) : Call() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(fn) + kids.addAll(args) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpCallStatic(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpCallStaticBuilder = RexOpCallStaticBuilder() + } + } + + internal data class Dynamic( + @JvmField + internal val args: List, + @JvmField + internal val candidates: List, + ) : Call() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.addAll(args) + kids.addAll(candidates) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpCallDynamic(this, ctx) + + internal data class Candidate( + @JvmField + internal val fn: Fn.Resolved, + @JvmField + internal val coercions: List, + ) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(fn) + kids.addAll(coercions) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpCallDynamicCandidate(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpCallDynamicCandidateBuilder = + RexOpCallDynamicCandidateBuilder() + } + } + + internal companion object { + @JvmStatic + internal fun builder(): RexOpCallDynamicBuilder = RexOpCallDynamicBuilder() + } + } + } + + internal data class Case( + @JvmField + internal val branches: List, + @JvmField + internal val default: Rex, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.addAll(branches) + kids.add(default) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpCase(this, ctx) + + internal data class Branch( + @JvmField + internal val condition: Rex, + @JvmField + internal val rex: Rex, + ) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(condition) + kids.add(rex) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpCaseBranch(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpCaseBranchBuilder = RexOpCaseBranchBuilder() + } + } + + internal companion object { + @JvmStatic + internal fun builder(): RexOpCaseBuilder = RexOpCaseBuilder() + } + } + + internal data class Collection( + @JvmField + internal val values: List, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.addAll(values) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpCollection(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpCollectionBuilder = RexOpCollectionBuilder() + } + } + + internal data class Struct( + @JvmField + internal val fields: List, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.addAll(fields) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpStruct(this, ctx) + + internal data class Field( + @JvmField + internal val k: Rex, + @JvmField + internal val v: Rex, + ) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(k) + kids.add(v) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpStructField(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpStructFieldBuilder = RexOpStructFieldBuilder() + } + } + + internal companion object { + @JvmStatic + internal fun builder(): RexOpStructBuilder = RexOpStructBuilder() + } + } + + internal data class Pivot( + @JvmField + internal val key: Rex, + @JvmField + internal val `value`: Rex, + @JvmField + internal val rel: Rel, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(key) + kids.add(value) + kids.add(rel) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpPivot(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpPivotBuilder = RexOpPivotBuilder() + } + } + + internal data class Subquery( + @JvmField + internal val select: Select, + @JvmField + internal val coercion: Coercion, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(select) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpSubquery(this, ctx) + + internal enum class Coercion { + SCALAR, + ROW, + } + + internal companion object { + @JvmStatic + internal fun builder(): RexOpSubqueryBuilder = RexOpSubqueryBuilder() + } + } + + internal data class Select( + @JvmField + internal val `constructor`: Rex, + @JvmField + internal val rel: Rel, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(constructor) + kids.add(rel) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpSelect(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpSelectBuilder = RexOpSelectBuilder() + } + } + + internal data class TupleUnion( + @JvmField + internal val args: List, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.addAll(args) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpTupleUnion(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpTupleUnionBuilder = RexOpTupleUnionBuilder() + } + } + + internal data class Err( + @JvmField + internal val message: String, + ) : Op() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRexOpErr(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RexOpErrBuilder = RexOpErrBuilder() + } + } + } + + internal companion object { + @JvmStatic + internal fun builder(): RexBuilder = RexBuilder() + } +} + +internal data class Rel( + @JvmField + internal val type: Type, + @JvmField + internal val op: Op, +) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(type) + kids.add(op) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = visitor.visitRel( + this, + ctx + ) + + internal enum class Prop { + ORDERED, + } + + internal data class Type( + @JvmField + internal val schema: List, + @JvmField + internal val props: Set, + ) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.addAll(schema) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelType(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelTypeBuilder = RelTypeBuilder() + } + } + + internal sealed class Op : PlanNode() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Scan -> visitor.visitRelOpScan(this, ctx) + is ScanIndexed -> visitor.visitRelOpScanIndexed(this, ctx) + is Unpivot -> visitor.visitRelOpUnpivot(this, ctx) + is Distinct -> visitor.visitRelOpDistinct(this, ctx) + is Filter -> visitor.visitRelOpFilter(this, ctx) + is Sort -> visitor.visitRelOpSort(this, ctx) + is Union -> visitor.visitRelOpUnion(this, ctx) + is Intersect -> visitor.visitRelOpIntersect(this, ctx) + is Except -> visitor.visitRelOpExcept(this, ctx) + is Limit -> visitor.visitRelOpLimit(this, ctx) + is Offset -> visitor.visitRelOpOffset(this, ctx) + is Project -> visitor.visitRelOpProject(this, ctx) + is Join -> visitor.visitRelOpJoin(this, ctx) + is Aggregate -> visitor.visitRelOpAggregate(this, ctx) + is Exclude -> visitor.visitRelOpExclude(this, ctx) + is Err -> visitor.visitRelOpErr(this, ctx) + } + + internal data class Scan( + @JvmField + internal val rex: Rex, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(rex) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpScan(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpScanBuilder = RelOpScanBuilder() + } + } + + internal data class ScanIndexed( + @JvmField + internal val rex: Rex, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(rex) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpScanIndexed(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpScanIndexedBuilder = RelOpScanIndexedBuilder() + } + } + + internal data class Unpivot( + @JvmField + internal val rex: Rex, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(rex) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpUnpivot(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpUnpivotBuilder = RelOpUnpivotBuilder() + } + } + + internal data class Distinct( + @JvmField + internal val input: Rel, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(input) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpDistinct(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpDistinctBuilder = RelOpDistinctBuilder() + } + } + + internal data class Filter( + @JvmField + internal val input: Rel, + @JvmField + internal val predicate: Rex, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(input) + kids.add(predicate) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpFilter(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpFilterBuilder = RelOpFilterBuilder() + } + } + + internal data class Sort( + @JvmField + internal val input: Rel, + @JvmField + internal val specs: List, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(input) + kids.addAll(specs) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpSort(this, ctx) + + internal enum class Order { + ASC_NULLS_LAST, + ASC_NULLS_FIRST, + DESC_NULLS_LAST, + DESC_NULLS_FIRST, + } + + internal data class Spec( + @JvmField + internal val rex: Rex, + @JvmField + internal val order: Order, + ) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(rex) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpSortSpec(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpSortSpecBuilder = RelOpSortSpecBuilder() + } + } + + internal companion object { + @JvmStatic + internal fun builder(): RelOpSortBuilder = RelOpSortBuilder() + } + } + + internal data class Union( + @JvmField + internal val lhs: Rel, + @JvmField + internal val rhs: Rel, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(lhs) + kids.add(rhs) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpUnion(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpUnionBuilder = RelOpUnionBuilder() + } + } + + internal data class Intersect( + @JvmField + internal val lhs: Rel, + @JvmField + internal val rhs: Rel, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(lhs) + kids.add(rhs) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpIntersect(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpIntersectBuilder = RelOpIntersectBuilder() + } + } + + internal data class Except( + @JvmField + internal val lhs: Rel, + @JvmField + internal val rhs: Rel, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(lhs) + kids.add(rhs) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpExcept(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpExceptBuilder = RelOpExceptBuilder() + } + } + + internal data class Limit( + @JvmField + internal val input: Rel, + @JvmField + internal val limit: Rex, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(input) + kids.add(limit) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpLimit(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpLimitBuilder = RelOpLimitBuilder() + } + } + + internal data class Offset( + @JvmField + internal val input: Rel, + @JvmField + internal val offset: Rex, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(input) + kids.add(offset) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpOffset(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpOffsetBuilder = RelOpOffsetBuilder() + } + } + + internal data class Project( + @JvmField + internal val input: Rel, + @JvmField + internal val projections: List, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(input) + kids.addAll(projections) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpProject(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpProjectBuilder = RelOpProjectBuilder() + } + } + + internal data class Join( + @JvmField + internal val lhs: Rel, + @JvmField + internal val rhs: Rel, + @JvmField + internal val rex: Rex, + @JvmField + internal val type: Type, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(lhs) + kids.add(rhs) + kids.add(rex) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpJoin(this, ctx) + + internal enum class Type { + INNER, + LEFT, + RIGHT, + FULL, + } + + internal companion object { + @JvmStatic + internal fun builder(): RelOpJoinBuilder = RelOpJoinBuilder() + } + } + + internal data class Aggregate( + @JvmField + internal val input: Rel, + @JvmField + internal val strategy: Strategy, + @JvmField + internal val calls: List, + @JvmField + internal val groups: List, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(input) + kids.addAll(calls) + kids.addAll(groups) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpAggregate(this, ctx) + + internal enum class Strategy { + FULL, + PARTIAL, + } + + internal data class Call( + @JvmField + internal val agg: Agg, + @JvmField + internal val args: List, + ) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(agg) + kids.addAll(args) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpAggregateCall(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpAggregateCallBuilder = RelOpAggregateCallBuilder() + } + } + + internal companion object { + @JvmStatic + internal fun builder(): RelOpAggregateBuilder = RelOpAggregateBuilder() + } + } + + internal data class Exclude( + @JvmField + internal val input: Rel, + @JvmField + internal val items: List, + ) : Op() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(input) + kids.addAll(items) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpExclude(this, ctx) + + internal data class Item( + @JvmField + internal val root: Identifier.Symbol, + @JvmField + internal val steps: List, + ) : PlanNode() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(root) + kids.addAll(steps) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpExcludeItem(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpExcludeItemBuilder = RelOpExcludeItemBuilder() + } + } + + internal sealed class Step : PlanNode() { + internal override fun accept(visitor: PlanVisitor, ctx: C): R = when (this) { + is Attr -> visitor.visitRelOpExcludeStepAttr(this, ctx) + is Pos -> visitor.visitRelOpExcludeStepPos(this, ctx) + is StructWildcard -> visitor.visitRelOpExcludeStepStructWildcard(this, ctx) + is CollectionWildcard -> visitor.visitRelOpExcludeStepCollectionWildcard(this, ctx) + } + + internal data class Attr( + @JvmField + internal val symbol: Identifier.Symbol, + ) : Step() { + internal override val children: List by lazy { + val kids = mutableListOf() + kids.add(symbol) + kids.filterNotNull() + } + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpExcludeStepAttr(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpExcludeStepAttrBuilder = RelOpExcludeStepAttrBuilder() + } + } + + internal data class Pos( + @JvmField + internal val index: Int, + ) : Step() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpExcludeStepPos(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpExcludeStepPosBuilder = RelOpExcludeStepPosBuilder() + } + } + + internal data class StructWildcard( + @JvmField + internal val ` `: Char = ' ', + ) : Step() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpExcludeStepStructWildcard(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpExcludeStepStructWildcardBuilder = + RelOpExcludeStepStructWildcardBuilder() + } + } + + internal data class CollectionWildcard( + @JvmField + internal val ` `: Char = ' ', + ) : Step() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpExcludeStepCollectionWildcard(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpExcludeStepCollectionWildcardBuilder = + RelOpExcludeStepCollectionWildcardBuilder() + } + } + } + + internal companion object { + @JvmStatic + internal fun builder(): RelOpExcludeBuilder = RelOpExcludeBuilder() + } + } + + internal data class Err( + @JvmField + internal val message: String, + ) : Op() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelOpErr(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelOpErrBuilder = RelOpErrBuilder() + } + } + } + + internal data class Binding( + @JvmField + internal val name: String, + @JvmField + internal val type: StaticType, + ) : PlanNode() { + internal override val children: List = emptyList() + + internal override fun accept(visitor: PlanVisitor, ctx: C): R = + visitor.visitRelBinding(this, ctx) + + internal companion object { + @JvmStatic + internal fun builder(): RelBindingBuilder = RelBindingBuilder() + } + } + + internal companion object { + @JvmStatic + internal fun builder(): RelBuilder = RelBuilder() + } +} + +internal enum class PartiQLVersion { + VERSION_0_0, + VERSION_0_1, +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/Plan.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/Plan.kt new file mode 100644 index 000000000..ddd3b9547 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/Plan.kt @@ -0,0 +1,182 @@ +@file:JvmName("Plan") +@file:OptIn(PartiQLValueExperimental::class) + +package org.partiql.planner.internal.ir + +import org.partiql.types.StaticType +import org.partiql.types.function.FunctionSignature +import org.partiql.value.PartiQLValue +import org.partiql.value.PartiQLValueExperimental + +internal fun partiQLPlan( + version: PartiQLVersion, + globals: List, + statement: Statement, +): PartiQLPlan = PartiQLPlan(version, globals, statement) + +internal fun global(path: Identifier.Qualified, type: StaticType): Global = Global(path, type) + +internal fun fnResolved(signature: FunctionSignature.Scalar): Fn.Resolved = Fn.Resolved(signature) + +internal fun fnUnresolved(identifier: Identifier, isHidden: Boolean): Fn.Unresolved = + Fn.Unresolved(identifier, isHidden) + +internal fun aggResolved(signature: FunctionSignature.Aggregation): Agg.Resolved = + Agg.Resolved(signature) + +internal fun aggUnresolved(identifier: Identifier): Agg.Unresolved = Agg.Unresolved(identifier) + +internal fun statementQuery(root: Rex): Statement.Query = Statement.Query(root) + +internal fun identifierSymbol(symbol: String, caseSensitivity: Identifier.CaseSensitivity): + Identifier.Symbol = Identifier.Symbol(symbol, caseSensitivity) + +internal fun identifierQualified(root: Identifier.Symbol, steps: List): + Identifier.Qualified = Identifier.Qualified(root, steps) + +internal fun rex(type: StaticType, op: Rex.Op): Rex = Rex(type, op) + +@OptIn(PartiQLValueExperimental::class) +internal fun rexOpLit(`value`: PartiQLValue): Rex.Op.Lit = Rex.Op.Lit(value) + +internal fun rexOpVarResolved(ref: Int): Rex.Op.Var.Resolved = Rex.Op.Var.Resolved(ref) + +internal fun rexOpVarUnresolved(identifier: Identifier, scope: Rex.Op.Var.Scope): + Rex.Op.Var.Unresolved = Rex.Op.Var.Unresolved(identifier, scope) + +internal fun rexOpGlobal(ref: Int): Rex.Op.Global = Rex.Op.Global(ref) + +internal fun rexOpPath(root: Rex, steps: List): Rex.Op.Path = Rex.Op.Path( + root, + steps +) + +internal fun rexOpPathStepIndex(key: Rex): Rex.Op.Path.Step.Index = Rex.Op.Path.Step.Index(key) + +internal fun rexOpPathStepKey(key: Rex): Rex.Op.Path.Step.Key = Rex.Op.Path.Step.Key(key) + +internal fun rexOpPathStepSymbol(identifier: Identifier.Symbol): Rex.Op.Path.Step.Symbol = + Rex.Op.Path.Step.Symbol(identifier) + +internal fun rexOpPathStepWildcard(): Rex.Op.Path.Step.Wildcard = Rex.Op.Path.Step.Wildcard() + +internal fun rexOpPathStepUnpivot(): Rex.Op.Path.Step.Unpivot = Rex.Op.Path.Step.Unpivot() + +internal fun rexOpCallStatic(fn: Fn, args: List): Rex.Op.Call.Static = Rex.Op.Call.Static( + fn, + args +) + +internal fun rexOpCallDynamic(args: List, candidates: List): + Rex.Op.Call.Dynamic = Rex.Op.Call.Dynamic(args, candidates) + +internal fun rexOpCallDynamicCandidate(fn: Fn.Resolved, coercions: List): + Rex.Op.Call.Dynamic.Candidate = Rex.Op.Call.Dynamic.Candidate(fn, coercions) + +internal fun rexOpCase(branches: List, default: Rex): Rex.Op.Case = + Rex.Op.Case(branches, default) + +internal fun rexOpCaseBranch(condition: Rex, rex: Rex): Rex.Op.Case.Branch = + Rex.Op.Case.Branch(condition, rex) + +internal fun rexOpCollection(values: List): Rex.Op.Collection = Rex.Op.Collection(values) + +internal fun rexOpStruct(fields: List): Rex.Op.Struct = Rex.Op.Struct(fields) + +internal fun rexOpStructField(k: Rex, v: Rex): Rex.Op.Struct.Field = Rex.Op.Struct.Field(k, v) + +internal fun rexOpPivot( + key: Rex, + `value`: Rex, + rel: Rel, +): Rex.Op.Pivot = Rex.Op.Pivot(key, value, rel) + +internal fun rexOpSubquery(select: Rex.Op.Select, coercion: Rex.Op.Subquery.Coercion): Rex.Op.Subquery = + Rex.Op.Subquery(select, coercion) + +internal fun rexOpSelect(`constructor`: Rex, rel: Rel): Rex.Op.Select = Rex.Op.Select( + constructor, + rel +) + +internal fun rexOpTupleUnion(args: List): Rex.Op.TupleUnion = Rex.Op.TupleUnion(args) + +internal fun rexOpErr(message: String): Rex.Op.Err = Rex.Op.Err(message) + +internal fun rel(type: Rel.Type, op: Rel.Op): Rel = Rel(type, op) + +internal fun relType(schema: List, props: Set): Rel.Type = Rel.Type( + schema, + props +) + +internal fun relOpScan(rex: Rex): Rel.Op.Scan = Rel.Op.Scan(rex) + +internal fun relOpScanIndexed(rex: Rex): Rel.Op.ScanIndexed = Rel.Op.ScanIndexed(rex) + +internal fun relOpUnpivot(rex: Rex): Rel.Op.Unpivot = Rel.Op.Unpivot(rex) + +internal fun relOpDistinct(input: Rel): Rel.Op.Distinct = Rel.Op.Distinct(input) + +internal fun relOpFilter(input: Rel, predicate: Rex): Rel.Op.Filter = Rel.Op.Filter(input, predicate) + +internal fun relOpSort(input: Rel, specs: List): Rel.Op.Sort = Rel.Op.Sort( + input, + specs +) + +internal fun relOpSortSpec(rex: Rex, order: Rel.Op.Sort.Order): Rel.Op.Sort.Spec = + Rel.Op.Sort.Spec(rex, order) + +internal fun relOpUnion(lhs: Rel, rhs: Rel): Rel.Op.Union = Rel.Op.Union(lhs, rhs) + +internal fun relOpIntersect(lhs: Rel, rhs: Rel): Rel.Op.Intersect = Rel.Op.Intersect(lhs, rhs) + +internal fun relOpExcept(lhs: Rel, rhs: Rel): Rel.Op.Except = Rel.Op.Except(lhs, rhs) + +internal fun relOpLimit(input: Rel, limit: Rex): Rel.Op.Limit = Rel.Op.Limit(input, limit) + +internal fun relOpOffset(input: Rel, offset: Rex): Rel.Op.Offset = Rel.Op.Offset(input, offset) + +internal fun relOpProject(input: Rel, projections: List): Rel.Op.Project = Rel.Op.Project( + input, + projections +) + +internal fun relOpJoin( + lhs: Rel, + rhs: Rel, + rex: Rex, + type: Rel.Op.Join.Type, +): Rel.Op.Join = Rel.Op.Join(lhs, rhs, rex, type) + +internal fun relOpAggregate( + input: Rel, + strategy: Rel.Op.Aggregate.Strategy, + calls: List, + groups: List, +): Rel.Op.Aggregate = Rel.Op.Aggregate(input, strategy, calls, groups) + +internal fun relOpAggregateCall(agg: Agg, args: List): Rel.Op.Aggregate.Call = + Rel.Op.Aggregate.Call(agg, args) + +internal fun relOpExclude(input: Rel, items: List): Rel.Op.Exclude = + Rel.Op.Exclude(input, items) + +internal fun relOpExcludeItem(root: Identifier.Symbol, steps: List): + Rel.Op.Exclude.Item = Rel.Op.Exclude.Item(root, steps) + +internal fun relOpExcludeStepAttr(symbol: Identifier.Symbol): Rel.Op.Exclude.Step.Attr = + Rel.Op.Exclude.Step.Attr(symbol) + +internal fun relOpExcludeStepPos(index: Int): Rel.Op.Exclude.Step.Pos = Rel.Op.Exclude.Step.Pos(index) + +internal fun relOpExcludeStepStructWildcard(): Rel.Op.Exclude.Step.StructWildcard = + Rel.Op.Exclude.Step.StructWildcard() + +internal fun relOpExcludeStepCollectionWildcard(): Rel.Op.Exclude.Step.CollectionWildcard = + Rel.Op.Exclude.Step.CollectionWildcard() + +internal fun relOpErr(message: String): Rel.Op.Err = Rel.Op.Err(message) + +internal fun relBinding(name: String, type: StaticType): Rel.Binding = Rel.Binding(name, type) diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/builder/PlanBuilder.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/builder/PlanBuilder.kt new file mode 100644 index 000000000..8ea53d5c5 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/builder/PlanBuilder.kt @@ -0,0 +1,562 @@ +@file:Suppress("UNUSED_PARAMETER") @file:OptIn(PartiQLValueExperimental::class) + +package org.partiql.planner.internal.ir.builder + +import org.partiql.planner.internal.ir.Agg +import org.partiql.planner.internal.ir.Fn +import org.partiql.planner.internal.ir.Global +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.PartiQLPlan +import org.partiql.planner.internal.ir.PartiQLVersion +import org.partiql.planner.internal.ir.PlanNode +import org.partiql.planner.internal.ir.Rel +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.Statement +import org.partiql.types.StaticType +import org.partiql.types.function.FunctionSignature +import org.partiql.value.PartiQLValue +import org.partiql.value.PartiQLValueExperimental + +internal fun plan(block: PlanBuilder.() -> T) = PlanBuilder().block() + +internal class PlanBuilder { + internal fun partiQLPlan( + version: PartiQLVersion? = null, + globals: MutableList = mutableListOf(), + statement: Statement? = null, + block: PartiQlPlanBuilder.() -> Unit = {}, + ): PartiQLPlan { + val builder = PartiQlPlanBuilder(version, globals, statement) + builder.block() + return builder.build() + } + + internal fun global( + path: Identifier.Qualified? = null, + type: StaticType? = null, + block: GlobalBuilder.() -> Unit = {}, + ): Global { + val builder = GlobalBuilder(path, type) + builder.block() + return builder.build() + } + + internal fun fnResolved( + signature: FunctionSignature.Scalar? = null, + block: FnResolvedBuilder.() -> Unit = {}, + ): Fn.Resolved { + val builder = FnResolvedBuilder(signature) + builder.block() + return builder.build() + } + + internal fun fnUnresolved( + identifier: Identifier? = null, + isHidden: Boolean? = null, + block: FnUnresolvedBuilder.() -> Unit = {}, + ): Fn.Unresolved { + val builder = FnUnresolvedBuilder(identifier, isHidden) + builder.block() + return builder.build() + } + + internal fun aggResolved( + signature: FunctionSignature.Aggregation? = null, + block: AggResolvedBuilder.() -> Unit = {}, + ): Agg.Resolved { + val builder = AggResolvedBuilder(signature) + builder.block() + return builder.build() + } + + internal fun aggUnresolved( + identifier: Identifier? = null, + block: AggUnresolvedBuilder.() -> Unit = {}, + ): Agg.Unresolved { + val builder = AggUnresolvedBuilder(identifier) + builder.block() + return builder.build() + } + + internal fun statementQuery(root: Rex? = null, block: StatementQueryBuilder.() -> Unit = {}): Statement.Query { + val builder = StatementQueryBuilder(root) + builder.block() + return builder.build() + } + + internal fun identifierSymbol( + symbol: String? = null, + caseSensitivity: Identifier.CaseSensitivity? = null, + block: IdentifierSymbolBuilder.() -> Unit = {}, + ): Identifier.Symbol { + val builder = IdentifierSymbolBuilder(symbol, caseSensitivity) + builder.block() + return builder.build() + } + + internal fun identifierQualified( + root: Identifier.Symbol? = null, + steps: MutableList = mutableListOf(), + block: IdentifierQualifiedBuilder.() -> Unit = {}, + ): Identifier.Qualified { + val builder = IdentifierQualifiedBuilder(root, steps) + builder.block() + return builder.build() + } + + internal fun rex( + type: StaticType? = null, + op: Rex.Op? = null, + block: RexBuilder.() -> Unit = {}, + ): Rex { + val builder = RexBuilder(type, op) + builder.block() + return builder.build() + } + + @OptIn(PartiQLValueExperimental::class) + internal fun rexOpLit(`value`: PartiQLValue? = null, block: RexOpLitBuilder.() -> Unit = {}): Rex.Op.Lit { + val builder = RexOpLitBuilder(value) + builder.block() + return builder.build() + } + + internal fun rexOpVarResolved( + ref: Int? = null, + block: RexOpVarResolvedBuilder.() -> Unit = {}, + ): Rex.Op.Var.Resolved { + val builder = RexOpVarResolvedBuilder(ref) + builder.block() + return builder.build() + } + + internal fun rexOpVarUnresolved( + identifier: Identifier? = null, + scope: Rex.Op.Var.Scope? = null, + block: RexOpVarUnresolvedBuilder.() -> Unit = {}, + ): Rex.Op.Var.Unresolved { + val builder = RexOpVarUnresolvedBuilder(identifier, scope) + builder.block() + return builder.build() + } + + internal fun rexOpGlobal(ref: Int? = null, block: RexOpGlobalBuilder.() -> Unit = {}): Rex.Op.Global { + val builder = RexOpGlobalBuilder(ref) + builder.block() + return builder.build() + } + + internal fun rexOpPath( + root: Rex? = null, + steps: MutableList = mutableListOf(), + block: RexOpPathBuilder.() -> Unit = {}, + ): Rex.Op.Path { + val builder = RexOpPathBuilder(root, steps) + builder.block() + return builder.build() + } + + internal fun rexOpPathStepIndex( + key: Rex? = null, + block: RexOpPathStepIndexBuilder.() -> Unit = {}, + ): Rex.Op.Path.Step.Index { + val builder = RexOpPathStepIndexBuilder(key) + builder.block() + return builder.build() + } + + internal fun rexOpPathStepKey( + key: Rex? = null, + block: RexOpPathStepKeyBuilder.() -> Unit = {}, + ): Rex.Op.Path.Step.Key { + val builder = RexOpPathStepKeyBuilder(key) + builder.block() + return builder.build() + } + + internal fun rexOpPathStepSymbol( + identifier: Identifier.Symbol? = null, + block: RexOpPathStepSymbolBuilder.() -> Unit = {}, + ): Rex.Op.Path.Step.Symbol { + val builder = RexOpPathStepSymbolBuilder(identifier) + builder.block() + return builder.build() + } + + internal fun rexOpPathStepWildcard(block: RexOpPathStepWildcardBuilder.() -> Unit = {}): Rex.Op.Path.Step.Wildcard { + val builder = RexOpPathStepWildcardBuilder() + builder.block() + return builder.build() + } + + internal fun rexOpPathStepUnpivot(block: RexOpPathStepUnpivotBuilder.() -> Unit = {}): Rex.Op.Path.Step.Unpivot { + val builder = RexOpPathStepUnpivotBuilder() + builder.block() + return builder.build() + } + + internal fun rexOpCallStatic( + fn: Fn? = null, + args: MutableList = mutableListOf(), + block: RexOpCallStaticBuilder.() -> Unit = {}, + ): Rex.Op.Call.Static { + val builder = RexOpCallStaticBuilder(fn, args) + builder.block() + return builder.build() + } + + internal fun rexOpCallDynamic( + args: MutableList = mutableListOf(), + candidates: MutableList = mutableListOf(), + block: RexOpCallDynamicBuilder.() -> Unit = {}, + ): Rex.Op.Call.Dynamic { + val builder = RexOpCallDynamicBuilder(args, candidates) + builder.block() + return builder.build() + } + + internal fun rexOpCallDynamicCandidate( + fn: Fn.Resolved? = null, + coercions: MutableList = mutableListOf(), + block: RexOpCallDynamicCandidateBuilder.() -> Unit = {}, + ): Rex.Op.Call.Dynamic.Candidate { + val builder = RexOpCallDynamicCandidateBuilder(fn, coercions) + builder.block() + return builder.build() + } + + internal fun rexOpCase( + branches: MutableList = mutableListOf(), + default: Rex? = null, + block: RexOpCaseBuilder.() -> Unit = {}, + ): Rex.Op.Case { + val builder = RexOpCaseBuilder(branches, default) + builder.block() + return builder.build() + } + + internal fun rexOpCaseBranch( + condition: Rex? = null, + rex: Rex? = null, + block: RexOpCaseBranchBuilder.() -> Unit = {}, + ): Rex.Op.Case.Branch { + val builder = RexOpCaseBranchBuilder(condition, rex) + builder.block() + return builder.build() + } + + internal fun rexOpCollection( + values: MutableList = mutableListOf(), + block: RexOpCollectionBuilder.() -> Unit = {}, + ): Rex.Op.Collection { + val builder = RexOpCollectionBuilder(values) + builder.block() + return builder.build() + } + + internal fun rexOpStruct( + fields: MutableList = mutableListOf(), + block: RexOpStructBuilder.() -> Unit = {}, + ): Rex.Op.Struct { + val builder = RexOpStructBuilder(fields) + builder.block() + return builder.build() + } + + internal fun rexOpStructField( + k: Rex? = null, + v: Rex? = null, + block: RexOpStructFieldBuilder.() -> Unit = {}, + ): Rex.Op.Struct.Field { + val builder = RexOpStructFieldBuilder(k, v) + builder.block() + return builder.build() + } + + internal fun rexOpPivot( + key: Rex? = null, + `value`: Rex? = null, + rel: Rel? = null, + block: RexOpPivotBuilder.() -> Unit = {}, + ): Rex.Op.Pivot { + val builder = RexOpPivotBuilder(key, value, rel) + builder.block() + return builder.build() + } + + internal fun rexOpSubquery( + select: Rex.Op.Select? = null, + coercion: Rex.Op.Subquery.Coercion? = null, + block: RexOpSubqueryBuilder.() -> Unit = {}, + ): Rex.Op.Subquery { + val builder = RexOpSubqueryBuilder(select, coercion) + builder.block() + return builder.build() + } + + internal fun rexOpSelect( + `constructor`: Rex? = null, + rel: Rel? = null, + block: RexOpSelectBuilder.() -> Unit = {}, + ): Rex.Op.Select { + val builder = RexOpSelectBuilder(constructor, rel) + builder.block() + return builder.build() + } + + internal fun rexOpTupleUnion( + args: MutableList = mutableListOf(), + block: RexOpTupleUnionBuilder.() -> Unit = {}, + ): Rex.Op.TupleUnion { + val builder = RexOpTupleUnionBuilder(args) + builder.block() + return builder.build() + } + + internal fun rexOpErr(message: String? = null, block: RexOpErrBuilder.() -> Unit = {}): Rex.Op.Err { + val builder = RexOpErrBuilder(message) + builder.block() + return builder.build() + } + + internal fun rel( + type: Rel.Type? = null, + op: Rel.Op? = null, + block: RelBuilder.() -> Unit = {}, + ): Rel { + val builder = RelBuilder(type, op) + builder.block() + return builder.build() + } + + internal fun relType( + schema: MutableList = mutableListOf(), + props: MutableSet = mutableSetOf(), + block: RelTypeBuilder.() -> Unit = {}, + ): Rel.Type { + val builder = RelTypeBuilder(schema, props) + builder.block() + return builder.build() + } + + internal fun relOpScan(rex: Rex? = null, block: RelOpScanBuilder.() -> Unit = {}): Rel.Op.Scan { + val builder = RelOpScanBuilder(rex) + builder.block() + return builder.build() + } + + internal fun relOpScanIndexed( + rex: Rex? = null, + block: RelOpScanIndexedBuilder.() -> Unit = {}, + ): Rel.Op.ScanIndexed { + val builder = RelOpScanIndexedBuilder(rex) + builder.block() + return builder.build() + } + + internal fun relOpUnpivot(rex: Rex? = null, block: RelOpUnpivotBuilder.() -> Unit = {}): Rel.Op.Unpivot { + val builder = RelOpUnpivotBuilder(rex) + builder.block() + return builder.build() + } + + internal fun relOpDistinct(input: Rel? = null, block: RelOpDistinctBuilder.() -> Unit = {}): Rel.Op.Distinct { + val builder = RelOpDistinctBuilder(input) + builder.block() + return builder.build() + } + + internal fun relOpFilter( + input: Rel? = null, + predicate: Rex? = null, + block: RelOpFilterBuilder.() -> Unit = {}, + ): Rel.Op.Filter { + val builder = RelOpFilterBuilder(input, predicate) + builder.block() + return builder.build() + } + + internal fun relOpSort( + input: Rel? = null, + specs: MutableList = mutableListOf(), + block: RelOpSortBuilder.() -> Unit = {}, + ): Rel.Op.Sort { + val builder = RelOpSortBuilder(input, specs) + builder.block() + return builder.build() + } + + internal fun relOpSortSpec( + rex: Rex? = null, + order: Rel.Op.Sort.Order? = null, + block: RelOpSortSpecBuilder.() -> Unit = {}, + ): Rel.Op.Sort.Spec { + val builder = RelOpSortSpecBuilder(rex, order) + builder.block() + return builder.build() + } + + internal fun relOpUnion( + lhs: Rel? = null, + rhs: Rel? = null, + block: RelOpUnionBuilder.() -> Unit = {}, + ): Rel.Op.Union { + val builder = RelOpUnionBuilder(lhs, rhs) + builder.block() + return builder.build() + } + + internal fun relOpIntersect( + lhs: Rel? = null, + rhs: Rel? = null, + block: RelOpIntersectBuilder.() -> Unit = {}, + ): Rel.Op.Intersect { + val builder = RelOpIntersectBuilder(lhs, rhs) + builder.block() + return builder.build() + } + + internal fun relOpExcept( + lhs: Rel? = null, + rhs: Rel? = null, + block: RelOpExceptBuilder.() -> Unit = {}, + ): Rel.Op.Except { + val builder = RelOpExceptBuilder(lhs, rhs) + builder.block() + return builder.build() + } + + internal fun relOpLimit( + input: Rel? = null, + limit: Rex? = null, + block: RelOpLimitBuilder.() -> Unit = {}, + ): Rel.Op.Limit { + val builder = RelOpLimitBuilder(input, limit) + builder.block() + return builder.build() + } + + internal fun relOpOffset( + input: Rel? = null, + offset: Rex? = null, + block: RelOpOffsetBuilder.() -> Unit = {}, + ): Rel.Op.Offset { + val builder = RelOpOffsetBuilder(input, offset) + builder.block() + return builder.build() + } + + internal fun relOpProject( + input: Rel? = null, + projections: MutableList = mutableListOf(), + block: RelOpProjectBuilder.() -> Unit = {}, + ): Rel.Op.Project { + val builder = RelOpProjectBuilder(input, projections) + builder.block() + return builder.build() + } + + internal fun relOpJoin( + lhs: Rel? = null, + rhs: Rel? = null, + rex: Rex? = null, + type: Rel.Op.Join.Type? = null, + block: RelOpJoinBuilder.() -> Unit = {}, + ): Rel.Op.Join { + val builder = RelOpJoinBuilder(lhs, rhs, rex, type) + builder.block() + return builder.build() + } + + internal fun relOpAggregate( + input: Rel? = null, + strategy: Rel.Op.Aggregate.Strategy? = null, + calls: MutableList = mutableListOf(), + groups: MutableList = mutableListOf(), + block: RelOpAggregateBuilder.() -> Unit = {}, + ): Rel.Op.Aggregate { + val builder = RelOpAggregateBuilder(input, strategy, calls, groups) + builder.block() + return builder.build() + } + + internal fun relOpAggregateCall( + agg: Agg? = null, + args: MutableList = mutableListOf(), + block: RelOpAggregateCallBuilder.() -> Unit = {}, + ): Rel.Op.Aggregate.Call { + val builder = RelOpAggregateCallBuilder(agg, args) + builder.block() + return builder.build() + } + + internal fun relOpExclude( + input: Rel? = null, + items: MutableList = mutableListOf(), + block: RelOpExcludeBuilder.() -> Unit = {}, + ): Rel.Op.Exclude { + val builder = RelOpExcludeBuilder(input, items) + builder.block() + return builder.build() + } + + internal fun relOpExcludeItem( + root: Identifier.Symbol? = null, + steps: MutableList = mutableListOf(), + block: RelOpExcludeItemBuilder.() -> Unit = {}, + ): Rel.Op.Exclude.Item { + val builder = RelOpExcludeItemBuilder(root, steps) + builder.block() + return builder.build() + } + + internal fun relOpExcludeStepAttr( + symbol: Identifier.Symbol? = null, + block: RelOpExcludeStepAttrBuilder.() -> Unit = {}, + ): Rel.Op.Exclude.Step.Attr { + val builder = RelOpExcludeStepAttrBuilder(symbol) + builder.block() + return builder.build() + } + + internal fun relOpExcludeStepPos( + index: Int? = null, + block: RelOpExcludeStepPosBuilder.() -> Unit = {}, + ): Rel.Op.Exclude.Step.Pos { + val builder = RelOpExcludeStepPosBuilder(index) + builder.block() + return builder.build() + } + + internal fun relOpExcludeStepStructWildcard( + block: RelOpExcludeStepStructWildcardBuilder.() -> Unit = {}, + ): Rel.Op.Exclude.Step.StructWildcard { + val builder = RelOpExcludeStepStructWildcardBuilder() + builder.block() + return builder.build() + } + + internal fun relOpExcludeStepCollectionWildcard( + block: RelOpExcludeStepCollectionWildcardBuilder.() -> Unit = {}, + ): Rel.Op.Exclude.Step.CollectionWildcard { + val builder = RelOpExcludeStepCollectionWildcardBuilder() + builder.block() + return builder.build() + } + + internal fun relOpErr(message: String? = null, block: RelOpErrBuilder.() -> Unit = {}): Rel.Op.Err { + val builder = RelOpErrBuilder(message) + builder.block() + return builder.build() + } + + internal fun relBinding( + name: String? = null, + type: StaticType? = null, + block: RelBindingBuilder.() -> Unit = {}, + ): Rel.Binding { + val builder = RelBindingBuilder(name, type) + builder.block() + return builder.build() + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/builder/PlanBuilders.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/builder/PlanBuilders.kt new file mode 100644 index 000000000..8f4cf3197 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/builder/PlanBuilders.kt @@ -0,0 +1,818 @@ +@file:OptIn(PartiQLValueExperimental::class) + +package org.partiql.planner.internal.ir.builder + +import org.partiql.planner.internal.ir.Agg +import org.partiql.planner.internal.ir.Fn +import org.partiql.planner.internal.ir.Global +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.PartiQLPlan +import org.partiql.planner.internal.ir.PartiQLVersion +import org.partiql.planner.internal.ir.Rel +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.Statement +import org.partiql.types.StaticType +import org.partiql.types.function.FunctionSignature +import org.partiql.value.PartiQLValue +import org.partiql.value.PartiQLValueExperimental + +internal class PartiQlPlanBuilder( + internal var version: PartiQLVersion? = null, + internal var globals: MutableList = mutableListOf(), + internal var statement: Statement? = null, +) { + internal fun version(version: PartiQLVersion?): PartiQlPlanBuilder = this.apply { + this.version = version + } + + internal fun globals(globals: MutableList): PartiQlPlanBuilder = this.apply { + this.globals = globals + } + + internal fun statement(statement: Statement?): PartiQlPlanBuilder = this.apply { + this.statement = statement + } + + internal fun build(): PartiQLPlan = PartiQLPlan( + version = version!!, globals = globals, + statement = + statement!! + ) +} + +internal class GlobalBuilder( + internal var path: Identifier.Qualified? = null, + internal var type: StaticType? = null, +) { + internal fun path(path: Identifier.Qualified?): GlobalBuilder = this.apply { + this.path = path + } + + internal fun type(type: StaticType?): GlobalBuilder = this.apply { + this.type = type + } + + internal fun build(): Global = Global(path = path!!, type = type!!) +} + +internal class FnResolvedBuilder( + internal var signature: FunctionSignature.Scalar? = null, +) { + internal fun signature(signature: FunctionSignature.Scalar?): FnResolvedBuilder = this.apply { + this.signature = signature + } + + internal fun build(): Fn.Resolved = Fn.Resolved(signature = signature!!) +} + +internal class FnUnresolvedBuilder( + internal var identifier: Identifier? = null, + internal var isHidden: Boolean? = null, +) { + internal fun identifier(identifier: Identifier?): FnUnresolvedBuilder = this.apply { + this.identifier = identifier + } + + internal fun isHidden(isHidden: Boolean?): FnUnresolvedBuilder = this.apply { + this.isHidden = isHidden + } + + internal fun build(): Fn.Unresolved = Fn.Unresolved( + identifier = identifier!!, + isHidden = + isHidden!! + ) +} + +internal class AggResolvedBuilder( + internal var signature: FunctionSignature.Aggregation? = null, +) { + internal fun signature(signature: FunctionSignature.Aggregation?): AggResolvedBuilder = this.apply { + this.signature = signature + } + + internal fun build(): Agg.Resolved = Agg.Resolved(signature = signature!!) +} + +internal class AggUnresolvedBuilder( + internal var identifier: Identifier? = null, +) { + internal fun identifier(identifier: Identifier?): AggUnresolvedBuilder = this.apply { + this.identifier = identifier + } + + internal fun build(): Agg.Unresolved = Agg.Unresolved(identifier = identifier!!) +} + +internal class StatementQueryBuilder( + internal var root: Rex? = null, +) { + internal fun root(root: Rex?): StatementQueryBuilder = this.apply { + this.root = root + } + + internal fun build(): Statement.Query = Statement.Query(root = root!!) +} + +internal class IdentifierSymbolBuilder( + internal var symbol: String? = null, + internal var caseSensitivity: Identifier.CaseSensitivity? = null, +) { + internal fun symbol(symbol: String?): IdentifierSymbolBuilder = this.apply { + this.symbol = symbol + } + + internal fun caseSensitivity(caseSensitivity: Identifier.CaseSensitivity?): IdentifierSymbolBuilder = this.apply { + this.caseSensitivity = caseSensitivity + } + + internal fun build(): Identifier.Symbol = Identifier.Symbol( + symbol = symbol!!, + caseSensitivity = + caseSensitivity!! + ) +} + +internal class IdentifierQualifiedBuilder( + internal var root: Identifier.Symbol? = null, + internal var steps: MutableList = mutableListOf(), +) { + internal fun root(root: Identifier.Symbol?): IdentifierQualifiedBuilder = this.apply { + this.root = root + } + + internal fun steps(steps: MutableList): IdentifierQualifiedBuilder = this.apply { + this.steps = steps + } + + internal fun build(): Identifier.Qualified = Identifier.Qualified(root = root!!, steps = steps) +} + +internal class RexBuilder( + internal var type: StaticType? = null, + internal var op: Rex.Op? = null, +) { + internal fun type(type: StaticType?): RexBuilder = this.apply { + this.type = type + } + + internal fun op(op: Rex.Op?): RexBuilder = this.apply { + this.op = op + } + + internal fun build(): Rex = Rex(type = type!!, op = op!!) +} + +internal class RexOpLitBuilder( + internal var `value`: PartiQLValue? = null, +) { + @OptIn(PartiQLValueExperimental::class) + internal fun `value`(`value`: PartiQLValue?): RexOpLitBuilder = this.apply { + this.`value` = `value` + } + + @OptIn(PartiQLValueExperimental::class) + internal fun build(): Rex.Op.Lit = Rex.Op.Lit(value = value!!) +} + +internal class RexOpVarResolvedBuilder( + internal var ref: Int? = null, +) { + internal fun ref(ref: Int?): RexOpVarResolvedBuilder = this.apply { + this.ref = ref + } + + internal fun build(): Rex.Op.Var.Resolved = Rex.Op.Var.Resolved(ref = ref!!) +} + +internal class RexOpVarUnresolvedBuilder( + internal var identifier: Identifier? = null, + internal var scope: Rex.Op.Var.Scope? = null, +) { + internal fun identifier(identifier: Identifier?): RexOpVarUnresolvedBuilder = this.apply { + this.identifier = identifier + } + + internal fun scope(scope: Rex.Op.Var.Scope?): RexOpVarUnresolvedBuilder = this.apply { + this.scope = scope + } + + internal fun build(): Rex.Op.Var.Unresolved = Rex.Op.Var.Unresolved( + identifier = identifier!!, + scope = + scope!! + ) +} + +internal class RexOpGlobalBuilder( + internal var ref: Int? = null, +) { + internal fun ref(ref: Int?): RexOpGlobalBuilder = this.apply { + this.ref = ref + } + + internal fun build(): Rex.Op.Global = Rex.Op.Global(ref = ref!!) +} + +internal class RexOpPathBuilder( + internal var root: Rex? = null, + internal var steps: MutableList = mutableListOf(), +) { + internal fun root(root: Rex?): RexOpPathBuilder = this.apply { + this.root = root + } + + internal fun steps(steps: MutableList): RexOpPathBuilder = this.apply { + this.steps = steps + } + + internal fun build(): Rex.Op.Path = Rex.Op.Path(root = root!!, steps = steps) +} + +internal class RexOpPathStepIndexBuilder( + internal var key: Rex? = null, +) { + internal fun key(key: Rex?): RexOpPathStepIndexBuilder = this.apply { + this.key = key + } + + internal fun build(): Rex.Op.Path.Step.Index = Rex.Op.Path.Step.Index(key = key!!) +} + +internal class RexOpPathStepKeyBuilder( + internal var key: Rex? = null, +) { + internal fun key(key: Rex?): RexOpPathStepKeyBuilder = this.apply { + this.key = key + } + + internal fun build(): Rex.Op.Path.Step.Key = Rex.Op.Path.Step.Key(key = key!!) +} + +internal class RexOpPathStepSymbolBuilder( + internal var identifier: Identifier.Symbol? = null, +) { + internal fun identifier(identifier: Identifier.Symbol?): RexOpPathStepSymbolBuilder = this.apply { + this.identifier = identifier + } + + internal fun build(): Rex.Op.Path.Step.Symbol = Rex.Op.Path.Step.Symbol(identifier = identifier!!) +} + +internal class RexOpPathStepWildcardBuilder() { + internal fun build(): Rex.Op.Path.Step.Wildcard = Rex.Op.Path.Step.Wildcard() +} + +internal class RexOpPathStepUnpivotBuilder() { + internal fun build(): Rex.Op.Path.Step.Unpivot = Rex.Op.Path.Step.Unpivot() +} + +internal class RexOpCallStaticBuilder( + internal var fn: Fn? = null, + internal var args: MutableList = mutableListOf(), +) { + internal fun fn(fn: Fn?): RexOpCallStaticBuilder = this.apply { + this.fn = fn + } + + internal fun args(args: MutableList): RexOpCallStaticBuilder = this.apply { + this.args = args + } + + internal fun build(): Rex.Op.Call.Static = Rex.Op.Call.Static(fn = fn!!, args = args) +} + +internal class RexOpCallDynamicBuilder( + internal var args: MutableList = mutableListOf(), + internal var candidates: MutableList = mutableListOf(), +) { + internal fun args(args: MutableList): RexOpCallDynamicBuilder = this.apply { + this.args = args + } + + internal fun candidates(candidates: MutableList): + RexOpCallDynamicBuilder = this.apply { + this.candidates = candidates + } + + internal fun build(): Rex.Op.Call.Dynamic = Rex.Op.Call.Dynamic( + args = args, + candidates = + candidates + ) +} + +internal class RexOpCallDynamicCandidateBuilder( + internal var fn: Fn.Resolved? = null, + internal var coercions: MutableList = mutableListOf(), +) { + internal fun fn(fn: Fn.Resolved?): RexOpCallDynamicCandidateBuilder = this.apply { + this.fn = fn + } + + internal fun coercions(coercions: MutableList): RexOpCallDynamicCandidateBuilder = + this.apply { + this.coercions = coercions + } + + internal fun build(): Rex.Op.Call.Dynamic.Candidate = Rex.Op.Call.Dynamic.Candidate( + fn = fn!!, + coercions = coercions + ) +} + +internal class RexOpCaseBuilder( + internal var branches: MutableList = mutableListOf(), + internal var default: Rex? = null, +) { + internal fun branches(branches: MutableList): RexOpCaseBuilder = this.apply { + this.branches = branches + } + + internal fun default(default: Rex?): RexOpCaseBuilder = this.apply { + this.default = default + } + + internal fun build(): Rex.Op.Case = Rex.Op.Case(branches = branches, default = default!!) +} + +internal class RexOpCaseBranchBuilder( + internal var condition: Rex? = null, + internal var rex: Rex? = null, +) { + internal fun condition(condition: Rex?): RexOpCaseBranchBuilder = this.apply { + this.condition = condition + } + + internal fun rex(rex: Rex?): RexOpCaseBranchBuilder = this.apply { + this.rex = rex + } + + internal fun build(): Rex.Op.Case.Branch = Rex.Op.Case.Branch(condition = condition!!, rex = rex!!) +} + +internal class RexOpCollectionBuilder( + internal var values: MutableList = mutableListOf(), +) { + internal fun values(values: MutableList): RexOpCollectionBuilder = this.apply { + this.values = values + } + + internal fun build(): Rex.Op.Collection = Rex.Op.Collection(values = values) +} + +internal class RexOpStructBuilder( + internal var fields: MutableList = mutableListOf(), +) { + internal fun fields(fields: MutableList): RexOpStructBuilder = this.apply { + this.fields = fields + } + + internal fun build(): Rex.Op.Struct = Rex.Op.Struct(fields = fields) +} + +internal class RexOpStructFieldBuilder( + internal var k: Rex? = null, + internal var v: Rex? = null, +) { + internal fun k(k: Rex?): RexOpStructFieldBuilder = this.apply { + this.k = k + } + + internal fun v(v: Rex?): RexOpStructFieldBuilder = this.apply { + this.v = v + } + + internal fun build(): Rex.Op.Struct.Field = Rex.Op.Struct.Field(k = k!!, v = v!!) +} + +internal class RexOpPivotBuilder( + internal var key: Rex? = null, + internal var `value`: Rex? = null, + internal var rel: Rel? = null, +) { + internal fun key(key: Rex?): RexOpPivotBuilder = this.apply { + this.key = key + } + + internal fun `value`(`value`: Rex?): RexOpPivotBuilder = this.apply { + this.`value` = `value` + } + + internal fun rel(rel: Rel?): RexOpPivotBuilder = this.apply { + this.rel = rel + } + + internal fun build(): Rex.Op.Pivot = Rex.Op.Pivot(key = key!!, value = value!!, rel = rel!!) +} + +internal class RexOpSubqueryBuilder( + internal var select: Rex.Op.Select? = null, + internal var coercion: Rex.Op.Subquery.Coercion? = null, +) { + internal fun select(select: Rex.Op.Select?): RexOpSubqueryBuilder = this.apply { + this.select = select + } + + internal fun coercion(coercion: Rex.Op.Subquery.Coercion?): RexOpSubqueryBuilder = this.apply { + this.coercion = coercion + } + + internal fun build(): Rex.Op.Subquery = Rex.Op.Subquery(select = select!!, coercion = coercion!!) +} + +internal class RexOpSelectBuilder( + internal var `constructor`: Rex? = null, + internal var rel: Rel? = null, +) { + internal fun `constructor`(`constructor`: Rex?): RexOpSelectBuilder = this.apply { + this.`constructor` = `constructor` + } + + internal fun rel(rel: Rel?): RexOpSelectBuilder = this.apply { + this.rel = rel + } + + internal fun build(): Rex.Op.Select = Rex.Op.Select(constructor = constructor!!, rel = rel!!) +} + +internal class RexOpTupleUnionBuilder( + internal var args: MutableList = mutableListOf(), +) { + internal fun args(args: MutableList): RexOpTupleUnionBuilder = this.apply { + this.args = args + } + + internal fun build(): Rex.Op.TupleUnion = Rex.Op.TupleUnion(args = args) +} + +internal class RexOpErrBuilder( + internal var message: String? = null, +) { + internal fun message(message: String?): RexOpErrBuilder = this.apply { + this.message = message + } + + internal fun build(): Rex.Op.Err = Rex.Op.Err(message = message!!) +} + +internal class RelBuilder( + internal var type: Rel.Type? = null, + internal var op: Rel.Op? = null, +) { + internal fun type(type: Rel.Type?): RelBuilder = this.apply { + this.type = type + } + + internal fun op(op: Rel.Op?): RelBuilder = this.apply { + this.op = op + } + + internal fun build(): Rel = Rel(type = type!!, op = op!!) +} + +internal class RelTypeBuilder( + internal var schema: MutableList = mutableListOf(), + internal var props: MutableSet = mutableSetOf(), +) { + internal fun schema(schema: MutableList): RelTypeBuilder = this.apply { + this.schema = schema + } + + internal fun props(props: MutableSet): RelTypeBuilder = this.apply { + this.props = props + } + + internal fun build(): Rel.Type = Rel.Type(schema = schema, props = props) +} + +internal class RelOpScanBuilder( + internal var rex: Rex? = null, +) { + internal fun rex(rex: Rex?): RelOpScanBuilder = this.apply { + this.rex = rex + } + + internal fun build(): Rel.Op.Scan = Rel.Op.Scan(rex = rex!!) +} + +internal class RelOpScanIndexedBuilder( + internal var rex: Rex? = null, +) { + internal fun rex(rex: Rex?): RelOpScanIndexedBuilder = this.apply { + this.rex = rex + } + + internal fun build(): Rel.Op.ScanIndexed = Rel.Op.ScanIndexed(rex = rex!!) +} + +internal class RelOpUnpivotBuilder( + internal var rex: Rex? = null, +) { + internal fun rex(rex: Rex?): RelOpUnpivotBuilder = this.apply { + this.rex = rex + } + + internal fun build(): Rel.Op.Unpivot = Rel.Op.Unpivot(rex = rex!!) +} + +internal class RelOpDistinctBuilder( + internal var input: Rel? = null, +) { + internal fun input(input: Rel?): RelOpDistinctBuilder = this.apply { + this.input = input + } + + internal fun build(): Rel.Op.Distinct = Rel.Op.Distinct(input = input!!) +} + +internal class RelOpFilterBuilder( + internal var input: Rel? = null, + internal var predicate: Rex? = null, +) { + internal fun input(input: Rel?): RelOpFilterBuilder = this.apply { + this.input = input + } + + internal fun predicate(predicate: Rex?): RelOpFilterBuilder = this.apply { + this.predicate = predicate + } + + internal fun build(): Rel.Op.Filter = Rel.Op.Filter(input = input!!, predicate = predicate!!) +} + +internal class RelOpSortBuilder( + internal var input: Rel? = null, + internal var specs: MutableList = mutableListOf(), +) { + internal fun input(input: Rel?): RelOpSortBuilder = this.apply { + this.input = input + } + + internal fun specs(specs: MutableList): RelOpSortBuilder = this.apply { + this.specs = specs + } + + internal fun build(): Rel.Op.Sort = Rel.Op.Sort(input = input!!, specs = specs) +} + +internal class RelOpSortSpecBuilder( + internal var rex: Rex? = null, + internal var order: Rel.Op.Sort.Order? = null, +) { + internal fun rex(rex: Rex?): RelOpSortSpecBuilder = this.apply { + this.rex = rex + } + + internal fun order(order: Rel.Op.Sort.Order?): RelOpSortSpecBuilder = this.apply { + this.order = order + } + + internal fun build(): Rel.Op.Sort.Spec = Rel.Op.Sort.Spec(rex = rex!!, order = order!!) +} + +internal class RelOpUnionBuilder( + internal var lhs: Rel? = null, + internal var rhs: Rel? = null, +) { + internal fun lhs(lhs: Rel?): RelOpUnionBuilder = this.apply { + this.lhs = lhs + } + + internal fun rhs(rhs: Rel?): RelOpUnionBuilder = this.apply { + this.rhs = rhs + } + + internal fun build(): Rel.Op.Union = Rel.Op.Union(lhs = lhs!!, rhs = rhs!!) +} + +internal class RelOpIntersectBuilder( + internal var lhs: Rel? = null, + internal var rhs: Rel? = null, +) { + internal fun lhs(lhs: Rel?): RelOpIntersectBuilder = this.apply { + this.lhs = lhs + } + + internal fun rhs(rhs: Rel?): RelOpIntersectBuilder = this.apply { + this.rhs = rhs + } + + internal fun build(): Rel.Op.Intersect = Rel.Op.Intersect(lhs = lhs!!, rhs = rhs!!) +} + +internal class RelOpExceptBuilder( + internal var lhs: Rel? = null, + internal var rhs: Rel? = null, +) { + internal fun lhs(lhs: Rel?): RelOpExceptBuilder = this.apply { + this.lhs = lhs + } + + internal fun rhs(rhs: Rel?): RelOpExceptBuilder = this.apply { + this.rhs = rhs + } + + internal fun build(): Rel.Op.Except = Rel.Op.Except(lhs = lhs!!, rhs = rhs!!) +} + +internal class RelOpLimitBuilder( + internal var input: Rel? = null, + internal var limit: Rex? = null, +) { + internal fun input(input: Rel?): RelOpLimitBuilder = this.apply { + this.input = input + } + + internal fun limit(limit: Rex?): RelOpLimitBuilder = this.apply { + this.limit = limit + } + + internal fun build(): Rel.Op.Limit = Rel.Op.Limit(input = input!!, limit = limit!!) +} + +internal class RelOpOffsetBuilder( + internal var input: Rel? = null, + internal var offset: Rex? = null, +) { + internal fun input(input: Rel?): RelOpOffsetBuilder = this.apply { + this.input = input + } + + internal fun offset(offset: Rex?): RelOpOffsetBuilder = this.apply { + this.offset = offset + } + + internal fun build(): Rel.Op.Offset = Rel.Op.Offset(input = input!!, offset = offset!!) +} + +internal class RelOpProjectBuilder( + internal var input: Rel? = null, + internal var projections: MutableList = mutableListOf(), +) { + internal fun input(input: Rel?): RelOpProjectBuilder = this.apply { + this.input = input + } + + internal fun projections(projections: MutableList): RelOpProjectBuilder = this.apply { + this.projections = projections + } + + internal fun build(): Rel.Op.Project = Rel.Op.Project(input = input!!, projections = projections) +} + +internal class RelOpJoinBuilder( + internal var lhs: Rel? = null, + internal var rhs: Rel? = null, + internal var rex: Rex? = null, + internal var type: Rel.Op.Join.Type? = null, +) { + internal fun lhs(lhs: Rel?): RelOpJoinBuilder = this.apply { + this.lhs = lhs + } + + internal fun rhs(rhs: Rel?): RelOpJoinBuilder = this.apply { + this.rhs = rhs + } + + internal fun rex(rex: Rex?): RelOpJoinBuilder = this.apply { + this.rex = rex + } + + internal fun type(type: Rel.Op.Join.Type?): RelOpJoinBuilder = this.apply { + this.type = type + } + + internal fun build(): Rel.Op.Join = Rel.Op.Join( + lhs = lhs!!, rhs = rhs!!, rex = rex!!, + type = + type!! + ) +} + +internal class RelOpAggregateBuilder( + internal var input: Rel? = null, + internal var strategy: Rel.Op.Aggregate.Strategy? = null, + internal var calls: MutableList = mutableListOf(), + internal var groups: MutableList = mutableListOf(), +) { + internal fun input(input: Rel?): RelOpAggregateBuilder = this.apply { + this.input = input + } + + internal fun strategy(strategy: Rel.Op.Aggregate.Strategy?): RelOpAggregateBuilder = this.apply { + this.strategy = strategy + } + + internal fun calls(calls: MutableList): RelOpAggregateBuilder = this.apply { + this.calls = calls + } + + internal fun groups(groups: MutableList): RelOpAggregateBuilder = this.apply { + this.groups = groups + } + + internal fun build(): Rel.Op.Aggregate = Rel.Op.Aggregate( + input = input!!, strategy = strategy!!, + calls = calls, groups = groups + ) +} + +internal class RelOpAggregateCallBuilder( + internal var agg: Agg? = null, + internal var args: MutableList = mutableListOf(), +) { + internal fun agg(agg: Agg?): RelOpAggregateCallBuilder = this.apply { + this.agg = agg + } + + internal fun args(args: MutableList): RelOpAggregateCallBuilder = this.apply { + this.args = args + } + + internal fun build(): Rel.Op.Aggregate.Call = Rel.Op.Aggregate.Call(agg = agg!!, args = args) +} + +internal class RelOpExcludeBuilder( + internal var input: Rel? = null, + internal var items: MutableList = mutableListOf(), +) { + internal fun input(input: Rel?): RelOpExcludeBuilder = this.apply { + this.input = input + } + + internal fun items(items: MutableList): RelOpExcludeBuilder = this.apply { + this.items = items + } + + internal fun build(): Rel.Op.Exclude = Rel.Op.Exclude(input = input!!, items = items) +} + +internal class RelOpExcludeItemBuilder( + internal var root: Identifier.Symbol? = null, + internal var steps: MutableList = mutableListOf(), +) { + internal fun root(root: Identifier.Symbol?): RelOpExcludeItemBuilder = this.apply { + this.root = root + } + + internal fun steps(steps: MutableList): RelOpExcludeItemBuilder = this.apply { + this.steps = steps + } + + internal fun build(): Rel.Op.Exclude.Item = Rel.Op.Exclude.Item(root = root!!, steps = steps) +} + +internal class RelOpExcludeStepAttrBuilder( + internal var symbol: Identifier.Symbol? = null, +) { + internal fun symbol(symbol: Identifier.Symbol?): RelOpExcludeStepAttrBuilder = this.apply { + this.symbol = symbol + } + + internal fun build(): Rel.Op.Exclude.Step.Attr = Rel.Op.Exclude.Step.Attr(symbol = symbol!!) +} + +internal class RelOpExcludeStepPosBuilder( + internal var index: Int? = null, +) { + internal fun index(index: Int?): RelOpExcludeStepPosBuilder = this.apply { + this.index = index + } + + internal fun build(): Rel.Op.Exclude.Step.Pos = Rel.Op.Exclude.Step.Pos(index = index!!) +} + +internal class RelOpExcludeStepStructWildcardBuilder() { + internal fun build(): Rel.Op.Exclude.Step.StructWildcard = Rel.Op.Exclude.Step.StructWildcard() +} + +internal class RelOpExcludeStepCollectionWildcardBuilder() { + internal fun build(): Rel.Op.Exclude.Step.CollectionWildcard = + Rel.Op.Exclude.Step.CollectionWildcard() +} + +internal class RelOpErrBuilder( + internal var message: String? = null, +) { + internal fun message(message: String?): RelOpErrBuilder = this.apply { + this.message = message + } + + internal fun build(): Rel.Op.Err = Rel.Op.Err(message = message!!) +} + +internal class RelBindingBuilder( + internal var name: String? = null, + internal var type: StaticType? = null, +) { + internal fun name(name: String?): RelBindingBuilder = this.apply { + this.name = name + } + + internal fun type(type: StaticType?): RelBindingBuilder = this.apply { + this.type = type + } + + internal fun build(): Rel.Binding = Rel.Binding(name = name!!, type = type!!) +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/util/PlanRewriter.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/util/PlanRewriter.kt new file mode 100644 index 000000000..0aae2cd0d --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/util/PlanRewriter.kt @@ -0,0 +1,588 @@ +@file:Suppress( + "UNUSED_PARAMETER", + "UNUSED_VARIABLE", +) @file:OptIn(PartiQLValueExperimental::class) + +package org.partiql.planner.internal.ir.util + +import org.partiql.planner.internal.ir.Agg +import org.partiql.planner.internal.ir.Fn +import org.partiql.planner.internal.ir.Global +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.PartiQLPlan +import org.partiql.planner.internal.ir.PlanNode +import org.partiql.planner.internal.ir.Rel +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.Statement +import org.partiql.planner.internal.ir.visitor.PlanBaseVisitor +import org.partiql.value.PartiQLValueExperimental + +internal abstract class PlanRewriter : PlanBaseVisitor() { + + override fun defaultReturn(node: PlanNode, ctx: C): PlanNode = node + + private inline fun _visitList( + nodes: List, + ctx: C, + method: (node: T, ctx: C) -> PlanNode, + ): List { + if (nodes.isEmpty()) return nodes + var diff = false + val transformed = ArrayList(nodes.size) + nodes.forEach { + val n = method(it, ctx) as T + if (it !== n) diff = true + transformed.add(n) + } + return if (diff) transformed else nodes + } + + private inline fun _visitListNull( + nodes: List, + ctx: C, + method: (node: T, ctx: C) -> PlanNode, + ): List { + if (nodes.isEmpty()) return nodes + var diff = false + val transformed = ArrayList(nodes.size) + nodes.forEach { + val n = if (it == null) null else method(it, ctx) as T + if (it !== n) diff = true + transformed.add(n) + } + return if (diff) transformed else nodes + } + + private inline fun _visitSet( + nodes: Set, + ctx: C, + method: (node: T, ctx: C) -> PlanNode, + ): Set { + if (nodes.isEmpty()) return nodes + var diff = false + val transformed = HashSet(nodes.size) + nodes.forEach { + val n = method(it, ctx) as T + if (it !== n) diff = true + transformed.add(n) + } + return if (diff) transformed else nodes + } + + private inline fun _visitSetNull( + nodes: Set, + ctx: C, + method: (node: T, ctx: C) -> PlanNode, + ): Set { + if (nodes.isEmpty()) return nodes + var diff = false + val transformed = HashSet(nodes.size) + nodes.forEach { + val n = if (it == null) null else method(it, ctx) as T + if (it !== n) diff = true + transformed.add(n) + } + return if (diff) transformed else nodes + } + + override fun visitPartiQLPlan(node: PartiQLPlan, ctx: C): PlanNode { + val version = node.version + val globals = _visitList(node.globals, ctx, ::visitGlobal) + val statement = visitStatement(node.statement, ctx) as Statement + return if (version !== node.version || globals !== node.globals || statement !== node.statement) { + PartiQLPlan(version, globals, statement) + } else { + node + } + } + + override fun visitGlobal(node: Global, ctx: C): PlanNode { + val path = visitIdentifierQualified(node.path, ctx) as Identifier.Qualified + val type = node.type + return if (path !== node.path || type !== node.type) { + Global(path, type) + } else { + node + } + } + + override fun visitFnResolved(node: Fn.Resolved, ctx: C): PlanNode { + val signature = node.signature + return node + } + + override fun visitFnUnresolved(node: Fn.Unresolved, ctx: C): PlanNode { + val identifier = visitIdentifier(node.identifier, ctx) as Identifier + val isHidden = node.isHidden + return if (identifier !== node.identifier || isHidden !== node.isHidden) { + Fn.Unresolved(identifier, isHidden) + } else { + node + } + } + + override fun visitAggResolved(node: Agg.Resolved, ctx: C): PlanNode { + val signature = node.signature + return node + } + + override fun visitAggUnresolved(node: Agg.Unresolved, ctx: C): PlanNode { + val identifier = visitIdentifier(node.identifier, ctx) as Identifier + return if (identifier !== node.identifier) { + Agg.Unresolved(identifier) + } else { + node + } + } + + override fun visitStatementQuery(node: Statement.Query, ctx: C): PlanNode { + val root = visitRex(node.root, ctx) as Rex + return if (root !== node.root) { + Statement.Query(root) + } else { + node + } + } + + override fun visitIdentifierSymbol(node: Identifier.Symbol, ctx: C): PlanNode { + val symbol = node.symbol + val caseSensitivity = node.caseSensitivity + return node + } + + override fun visitIdentifierQualified(node: Identifier.Qualified, ctx: C): PlanNode { + val root = visitIdentifierSymbol(node.root, ctx) as Identifier.Symbol + val steps = _visitList(node.steps, ctx, ::visitIdentifierSymbol) + return if (root !== node.root || steps !== node.steps) { + Identifier.Qualified(root, steps) + } else { + node + } + } + + override fun visitRex(node: Rex, ctx: C): PlanNode { + val type = node.type + val op = visitRexOp(node.op, ctx) as Rex.Op + return if (type !== node.type || op !== node.op) { + Rex(type, op) + } else { + node + } + } + + @OptIn(PartiQLValueExperimental::class) + override fun visitRexOpLit(node: Rex.Op.Lit, ctx: C): PlanNode { + val value = node.value + return node + } + + override fun visitRexOpVarResolved(node: Rex.Op.Var.Resolved, ctx: C): PlanNode { + val ref = node.ref + return node + } + + override fun visitRexOpVarUnresolved(node: Rex.Op.Var.Unresolved, ctx: C): PlanNode { + val identifier = visitIdentifier(node.identifier, ctx) as Identifier + val scope = node.scope + return if (identifier !== node.identifier || scope !== node.scope) { + Rex.Op.Var.Unresolved(identifier, scope) + } else { + node + } + } + + override fun visitRexOpGlobal(node: Rex.Op.Global, ctx: C): PlanNode { + val ref = node.ref + return node + } + + override fun visitRexOpPath(node: Rex.Op.Path, ctx: C): PlanNode { + val root = visitRex(node.root, ctx) as Rex + val steps = _visitList(node.steps, ctx, ::visitRexOpPathStep) + return if (root !== node.root || steps !== node.steps) { + Rex.Op.Path(root, steps) + } else { + node + } + } + + override fun visitRexOpPathStepIndex(node: Rex.Op.Path.Step.Index, ctx: C): PlanNode { + val key = visitRex(node.key, ctx) as Rex + return if (key !== node.key) { + Rex.Op.Path.Step.Index(key) + } else { + node + } + } + + override fun visitRexOpPathStepSymbol(node: Rex.Op.Path.Step.Symbol, ctx: C): PlanNode { + val identifier = visitIdentifierSymbol(node.identifier, ctx) as Identifier.Symbol + return if (identifier !== node.identifier) { + Rex.Op.Path.Step.Symbol(identifier) + } else { + node + } + } + + override fun visitRexOpPathStepWildcard(node: Rex.Op.Path.Step.Wildcard, ctx: C): PlanNode = node + + override fun visitRexOpPathStepUnpivot(node: Rex.Op.Path.Step.Unpivot, ctx: C): PlanNode = node + + override fun visitRexOpCallStatic(node: Rex.Op.Call.Static, ctx: C): PlanNode { + val fn = visitFn(node.fn, ctx) as Fn + val args = _visitList(node.args, ctx, ::visitRex) + return if (fn !== node.fn || args !== node.args) { + Rex.Op.Call.Static(fn, args) + } else { + node + } + } + + override fun visitRexOpCallDynamic(node: Rex.Op.Call.Dynamic, ctx: C): PlanNode { + val args = _visitList(node.args, ctx, ::visitRex) + val candidates = _visitList(node.candidates, ctx, ::visitRexOpCallDynamicCandidate) + return if (args !== node.args || candidates !== node.candidates) { + Rex.Op.Call.Dynamic(args, candidates) + } else { + node + } + } + + override fun visitRexOpCallDynamicCandidate(node: Rex.Op.Call.Dynamic.Candidate, ctx: C): PlanNode { + val fn = visitFnResolved(node.fn, ctx) as Fn.Resolved + val coercions = _visitListNull(node.coercions, ctx, ::visitFnResolved) + return if (fn !== node.fn || coercions !== node.coercions) { + Rex.Op.Call.Dynamic.Candidate(fn, coercions) + } else { + node + } + } + + override fun visitRexOpCase(node: Rex.Op.Case, ctx: C): PlanNode { + val branches = _visitList(node.branches, ctx, ::visitRexOpCaseBranch) + val default = visitRex(node.default, ctx) as Rex + return if (branches !== node.branches || default !== node.default) { + Rex.Op.Case(branches, default) + } else { + node + } + } + + override fun visitRexOpCaseBranch(node: Rex.Op.Case.Branch, ctx: C): PlanNode { + val condition = visitRex(node.condition, ctx) as Rex + val rex = visitRex(node.rex, ctx) as Rex + return if (condition !== node.condition || rex !== node.rex) { + Rex.Op.Case.Branch(condition, rex) + } else { + node + } + } + + override fun visitRexOpCollection(node: Rex.Op.Collection, ctx: C): PlanNode { + val values = _visitList(node.values, ctx, ::visitRex) + return if (values !== node.values) { + Rex.Op.Collection(values) + } else { + node + } + } + + override fun visitRexOpStruct(node: Rex.Op.Struct, ctx: C): PlanNode { + val fields = _visitList(node.fields, ctx, ::visitRexOpStructField) + return if (fields !== node.fields) { + Rex.Op.Struct(fields) + } else { + node + } + } + + override fun visitRexOpStructField(node: Rex.Op.Struct.Field, ctx: C): PlanNode { + val k = visitRex(node.k, ctx) as Rex + val v = visitRex(node.v, ctx) as Rex + return if (k !== node.k || v !== node.v) { + Rex.Op.Struct.Field(k, v) + } else { + node + } + } + + override fun visitRexOpPivot(node: Rex.Op.Pivot, ctx: C): PlanNode { + val key = visitRex(node.key, ctx) as Rex + val value = visitRex(node.value, ctx) as Rex + val rel = visitRel(node.rel, ctx) as Rel + return if (key !== node.key || value !== node.value || rel !== node.rel) { + Rex.Op.Pivot(key, value, rel) + } else { + node + } + } + + override fun visitRexOpSubquery(node: Rex.Op.Subquery, ctx: C): PlanNode { + val select = visitRexOpSelect(node.select, ctx) as Rex.Op.Select + val coercion = node.coercion + return if (select !== node.select || coercion !== node.coercion) { + Rex.Op.Subquery(select, coercion) + } else { + node + } + } + + override fun visitRexOpSelect(node: Rex.Op.Select, ctx: C): PlanNode { + val constructor = visitRex(node.constructor, ctx) as Rex + val rel = visitRel(node.rel, ctx) as Rel + return if (constructor !== node.constructor || rel !== node.rel) { + Rex.Op.Select(constructor, rel) + } else { + node + } + } + + override fun visitRexOpTupleUnion(node: Rex.Op.TupleUnion, ctx: C): PlanNode { + val args = _visitList(node.args, ctx, ::visitRex) + return if (args !== node.args) { + Rex.Op.TupleUnion(args) + } else { + node + } + } + + override fun visitRexOpErr(node: Rex.Op.Err, ctx: C): PlanNode { + val message = node.message + return node + } + + override fun visitRel(node: Rel, ctx: C): PlanNode { + val type = visitRelType(node.type, ctx) as Rel.Type + val op = visitRelOp(node.op, ctx) as Rel.Op + return if (type !== node.type || op !== node.op) { + Rel(type, op) + } else { + node + } + } + + override fun visitRelType(node: Rel.Type, ctx: C): PlanNode { + val schema = _visitList(node.schema, ctx, ::visitRelBinding) + val props = node.props + return if (schema !== node.schema || props !== node.props) { + Rel.Type(schema, props) + } else { + node + } + } + + override fun visitRelOpScan(node: Rel.Op.Scan, ctx: C): PlanNode { + val rex = visitRex(node.rex, ctx) as Rex + return if (rex !== node.rex) { + Rel.Op.Scan(rex) + } else { + node + } + } + + override fun visitRelOpScanIndexed(node: Rel.Op.ScanIndexed, ctx: C): PlanNode { + val rex = visitRex(node.rex, ctx) as Rex + return if (rex !== node.rex) { + Rel.Op.ScanIndexed(rex) + } else { + node + } + } + + override fun visitRelOpUnpivot(node: Rel.Op.Unpivot, ctx: C): PlanNode { + val rex = visitRex(node.rex, ctx) as Rex + return if (rex !== node.rex) { + Rel.Op.Unpivot(rex) + } else { + node + } + } + + override fun visitRelOpDistinct(node: Rel.Op.Distinct, ctx: C): PlanNode { + val input = visitRel(node.input, ctx) as Rel + return if (input !== node.input) { + Rel.Op.Distinct(input) + } else { + node + } + } + + override fun visitRelOpFilter(node: Rel.Op.Filter, ctx: C): PlanNode { + val input = visitRel(node.input, ctx) as Rel + val predicate = visitRex(node.predicate, ctx) as Rex + return if (input !== node.input || predicate !== node.predicate) { + Rel.Op.Filter(input, predicate) + } else { + node + } + } + + override fun visitRelOpSort(node: Rel.Op.Sort, ctx: C): PlanNode { + val input = visitRel(node.input, ctx) as Rel + val specs = _visitList(node.specs, ctx, ::visitRelOpSortSpec) + return if (input !== node.input || specs !== node.specs) { + Rel.Op.Sort(input, specs) + } else { + node + } + } + + override fun visitRelOpSortSpec(node: Rel.Op.Sort.Spec, ctx: C): PlanNode { + val rex = visitRex(node.rex, ctx) as Rex + val order = node.order + return if (rex !== node.rex || order !== node.order) { + Rel.Op.Sort.Spec(rex, order) + } else { + node + } + } + + override fun visitRelOpUnion(node: Rel.Op.Union, ctx: C): PlanNode { + val lhs = visitRel(node.lhs, ctx) as Rel + val rhs = visitRel(node.rhs, ctx) as Rel + return if (lhs !== node.lhs || rhs !== node.rhs) { + Rel.Op.Union(lhs, rhs) + } else { + node + } + } + + override fun visitRelOpIntersect(node: Rel.Op.Intersect, ctx: C): PlanNode { + val lhs = visitRel(node.lhs, ctx) as Rel + val rhs = visitRel(node.rhs, ctx) as Rel + return if (lhs !== node.lhs || rhs !== node.rhs) { + Rel.Op.Intersect(lhs, rhs) + } else { + node + } + } + + override fun visitRelOpExcept(node: Rel.Op.Except, ctx: C): PlanNode { + val lhs = visitRel(node.lhs, ctx) as Rel + val rhs = visitRel(node.rhs, ctx) as Rel + return if (lhs !== node.lhs || rhs !== node.rhs) { + Rel.Op.Except(lhs, rhs) + } else { + node + } + } + + override fun visitRelOpLimit(node: Rel.Op.Limit, ctx: C): PlanNode { + val input = visitRel(node.input, ctx) as Rel + val limit = visitRex(node.limit, ctx) as Rex + return if (input !== node.input || limit !== node.limit) { + Rel.Op.Limit(input, limit) + } else { + node + } + } + + override fun visitRelOpOffset(node: Rel.Op.Offset, ctx: C): PlanNode { + val input = visitRel(node.input, ctx) as Rel + val offset = visitRex(node.offset, ctx) as Rex + return if (input !== node.input || offset !== node.offset) { + Rel.Op.Offset(input, offset) + } else { + node + } + } + + override fun visitRelOpProject(node: Rel.Op.Project, ctx: C): PlanNode { + val input = visitRel(node.input, ctx) as Rel + val projections = _visitList(node.projections, ctx, ::visitRex) + return if (input !== node.input || projections !== node.projections) { + Rel.Op.Project(input, projections) + } else { + node + } + } + + override fun visitRelOpJoin(node: Rel.Op.Join, ctx: C): PlanNode { + val lhs = visitRel(node.lhs, ctx) as Rel + val rhs = visitRel(node.rhs, ctx) as Rel + val rex = visitRex(node.rex, ctx) as Rex + val type = node.type + return if (lhs !== node.lhs || rhs !== node.rhs || rex !== node.rex || type !== node.type) { + Rel.Op.Join(lhs, rhs, rex, type) + } else { + node + } + } + + override fun visitRelOpAggregate(node: Rel.Op.Aggregate, ctx: C): PlanNode { + val input = visitRel(node.input, ctx) as Rel + val strategy = node.strategy + val calls = _visitList(node.calls, ctx, ::visitRelOpAggregateCall) + val groups = _visitList(node.groups, ctx, ::visitRex) + return if (input !== node.input || strategy !== node.strategy || calls !== node.calls || groups !== node.groups) { + Rel.Op.Aggregate(input, strategy, calls, groups) + } else { + node + } + } + + override fun visitRelOpAggregateCall(node: Rel.Op.Aggregate.Call, ctx: C): PlanNode { + val agg = visitAgg(node.agg, ctx) as Agg + val args = _visitList(node.args, ctx, ::visitRex) + return if (agg !== node.agg || args !== node.args) { + Rel.Op.Aggregate.Call(agg, args) + } else { + node + } + } + + override fun visitRelOpExclude(node: Rel.Op.Exclude, ctx: C): PlanNode { + val input = visitRel(node.input, ctx) as Rel + val items = _visitList(node.items, ctx, ::visitRelOpExcludeItem) + return if (input !== node.input || items !== node.items) { + Rel.Op.Exclude(input, items) + } else { + node + } + } + + override fun visitRelOpExcludeItem(node: Rel.Op.Exclude.Item, ctx: C): PlanNode { + val root = visitIdentifierSymbol(node.root, ctx) as Identifier.Symbol + val steps = _visitList(node.steps, ctx, ::visitRelOpExcludeStep) + return if (root !== node.root || steps !== node.steps) { + Rel.Op.Exclude.Item(root, steps) + } else { + node + } + } + + override fun visitRelOpExcludeStepAttr(node: Rel.Op.Exclude.Step.Attr, ctx: C): PlanNode { + val symbol = visitIdentifierSymbol(node.symbol, ctx) as Identifier.Symbol + return if (symbol !== node.symbol) { + Rel.Op.Exclude.Step.Attr(symbol) + } else { + node + } + } + + override fun visitRelOpExcludeStepPos(node: Rel.Op.Exclude.Step.Pos, ctx: C): PlanNode { + val index = node.index + return node + } + + override fun visitRelOpExcludeStepStructWildcard( + node: Rel.Op.Exclude.Step.StructWildcard, + ctx: C, + ): PlanNode = node + + override fun visitRelOpExcludeStepCollectionWildcard( + node: Rel.Op.Exclude.Step.CollectionWildcard, + ctx: C, + ): PlanNode = node + + override fun visitRelOpErr(node: Rel.Op.Err, ctx: C): PlanNode { + val message = node.message + return node + } + + override fun visitRelBinding(node: Rel.Binding, ctx: C): PlanNode { + val name = node.name + val type = node.type + return node + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/visitor/PlanBaseVisitor.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/visitor/PlanBaseVisitor.kt new file mode 100644 index 000000000..afe8dac28 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/visitor/PlanBaseVisitor.kt @@ -0,0 +1,274 @@ +@file:OptIn(PartiQLValueExperimental::class) + +package org.partiql.planner.internal.ir.visitor + +import org.partiql.planner.internal.ir.Agg +import org.partiql.planner.internal.ir.Fn +import org.partiql.planner.internal.ir.Global +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.PartiQLPlan +import org.partiql.planner.internal.ir.PlanNode +import org.partiql.planner.internal.ir.Rel +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.Statement +import org.partiql.value.PartiQLValueExperimental + +internal abstract class PlanBaseVisitor : PlanVisitor { + override fun visit(node: PlanNode, ctx: C): R = node.accept(this, ctx) + + override fun visitPartiQLPlan(node: PartiQLPlan, ctx: C): R = defaultVisit(node, ctx) + + override fun visitGlobal(node: Global, ctx: C): R = defaultVisit(node, ctx) + + override fun visitFn(node: Fn, ctx: C): R = when (node) { + is Fn.Resolved -> visitFnResolved(node, ctx) + is Fn.Unresolved -> visitFnUnresolved(node, ctx) + } + + override fun visitFnResolved(node: Fn.Resolved, ctx: C): R = defaultVisit(node, ctx) + + override fun visitFnUnresolved(node: Fn.Unresolved, ctx: C): R = defaultVisit(node, ctx) + + override fun visitAgg(node: Agg, ctx: C): R = when (node) { + is Agg.Resolved -> visitAggResolved(node, ctx) + is Agg.Unresolved -> visitAggUnresolved(node, ctx) + } + + override fun visitAggResolved(node: Agg.Resolved, ctx: C): R = defaultVisit(node, ctx) + + override fun visitAggUnresolved(node: Agg.Unresolved, ctx: C): R = defaultVisit(node, ctx) + + override fun visitStatement(node: Statement, ctx: C): R = when (node) { + is Statement.Query -> visitStatementQuery(node, ctx) + } + + override fun visitStatementQuery(node: Statement.Query, ctx: C): R = defaultVisit( + node, + ctx + ) + + override fun visitIdentifier(node: Identifier, ctx: C): R = when (node) { + is Identifier.Symbol -> visitIdentifierSymbol(node, ctx) + is Identifier.Qualified -> visitIdentifierQualified(node, ctx) + } + + override fun visitIdentifierSymbol(node: Identifier.Symbol, ctx: C): R = defaultVisit( + node, + ctx + ) + + override fun visitIdentifierQualified(node: Identifier.Qualified, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRex(node: Rex, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRexOp(node: Rex.Op, ctx: C): R = when (node) { + is Rex.Op.Lit -> visitRexOpLit(node, ctx) + is Rex.Op.Var -> visitRexOpVar(node, ctx) + is Rex.Op.Global -> visitRexOpGlobal(node, ctx) + is Rex.Op.Path -> visitRexOpPath(node, ctx) + is Rex.Op.Call -> visitRexOpCall(node, ctx) + is Rex.Op.Case -> visitRexOpCase(node, ctx) + is Rex.Op.Collection -> visitRexOpCollection(node, ctx) + is Rex.Op.Struct -> visitRexOpStruct(node, ctx) + is Rex.Op.Pivot -> visitRexOpPivot(node, ctx) + is Rex.Op.Subquery -> visitRexOpSubquery(node, ctx) + is Rex.Op.Select -> visitRexOpSelect(node, ctx) + is Rex.Op.TupleUnion -> visitRexOpTupleUnion(node, ctx) + is Rex.Op.Err -> visitRexOpErr(node, ctx) + } + + override fun visitRexOpLit(node: Rex.Op.Lit, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRexOpVar(node: Rex.Op.Var, ctx: C): R = when (node) { + is Rex.Op.Var.Resolved -> visitRexOpVarResolved(node, ctx) + is Rex.Op.Var.Unresolved -> visitRexOpVarUnresolved(node, ctx) + } + + override fun visitRexOpVarResolved(node: Rex.Op.Var.Resolved, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpVarUnresolved(node: Rex.Op.Var.Unresolved, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpGlobal(node: Rex.Op.Global, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRexOpPath(node: Rex.Op.Path, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRexOpPathStep(node: Rex.Op.Path.Step, ctx: C): R = when (node) { + is Rex.Op.Path.Step.Index -> visitRexOpPathStepIndex(node, ctx) + is Rex.Op.Path.Step.Key -> visitRexOpPathStepKey(node, ctx) + is Rex.Op.Path.Step.Symbol -> visitRexOpPathStepSymbol(node, ctx) + is Rex.Op.Path.Step.Wildcard -> visitRexOpPathStepWildcard(node, ctx) + is Rex.Op.Path.Step.Unpivot -> visitRexOpPathStepUnpivot(node, ctx) + } + + override fun visitRexOpPathStepIndex(node: Rex.Op.Path.Step.Index, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpPathStepKey(node: Rex.Op.Path.Step.Key, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpPathStepSymbol(node: Rex.Op.Path.Step.Symbol, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpPathStepWildcard(node: Rex.Op.Path.Step.Wildcard, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpPathStepUnpivot(node: Rex.Op.Path.Step.Unpivot, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpCall(node: Rex.Op.Call, ctx: C): R = when (node) { + is Rex.Op.Call.Static -> visitRexOpCallStatic(node, ctx) + is Rex.Op.Call.Dynamic -> visitRexOpCallDynamic(node, ctx) + } + + override fun visitRexOpCallStatic(node: Rex.Op.Call.Static, ctx: C): R = defaultVisit( + node, + ctx + ) + + override fun visitRexOpCallDynamic(node: Rex.Op.Call.Dynamic, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpCallDynamicCandidate(node: Rex.Op.Call.Dynamic.Candidate, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpCase(node: Rex.Op.Case, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRexOpCaseBranch(node: Rex.Op.Case.Branch, ctx: C): R = defaultVisit( + node, + ctx + ) + + override fun visitRexOpCollection(node: Rex.Op.Collection, ctx: C): R = defaultVisit( + node, + ctx + ) + + override fun visitRexOpStruct(node: Rex.Op.Struct, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRexOpStructField(node: Rex.Op.Struct.Field, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRexOpPivot(node: Rex.Op.Pivot, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRexOpSubquery(node: Rex.Op.Subquery, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRexOpSelect(node: Rex.Op.Select, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRexOpTupleUnion(node: Rex.Op.TupleUnion, ctx: C): R = defaultVisit( + node, + ctx + ) + + override fun visitRexOpErr(node: Rex.Op.Err, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRel(node: Rel, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelType(node: Rel.Type, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOp(node: Rel.Op, ctx: C): R = when (node) { + is Rel.Op.Scan -> visitRelOpScan(node, ctx) + is Rel.Op.ScanIndexed -> visitRelOpScanIndexed(node, ctx) + is Rel.Op.Unpivot -> visitRelOpUnpivot(node, ctx) + is Rel.Op.Distinct -> visitRelOpDistinct(node, ctx) + is Rel.Op.Filter -> visitRelOpFilter(node, ctx) + is Rel.Op.Sort -> visitRelOpSort(node, ctx) + is Rel.Op.Union -> visitRelOpUnion(node, ctx) + is Rel.Op.Intersect -> visitRelOpIntersect(node, ctx) + is Rel.Op.Except -> visitRelOpExcept(node, ctx) + is Rel.Op.Limit -> visitRelOpLimit(node, ctx) + is Rel.Op.Offset -> visitRelOpOffset(node, ctx) + is Rel.Op.Project -> visitRelOpProject(node, ctx) + is Rel.Op.Join -> visitRelOpJoin(node, ctx) + is Rel.Op.Aggregate -> visitRelOpAggregate(node, ctx) + is Rel.Op.Exclude -> visitRelOpExclude(node, ctx) + is Rel.Op.Err -> visitRelOpErr(node, ctx) + } + + override fun visitRelOpScan(node: Rel.Op.Scan, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpScanIndexed(node: Rel.Op.ScanIndexed, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRelOpUnpivot(node: Rel.Op.Unpivot, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpDistinct(node: Rel.Op.Distinct, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpFilter(node: Rel.Op.Filter, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpSort(node: Rel.Op.Sort, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpSortSpec(node: Rel.Op.Sort.Spec, ctx: C): R = defaultVisit( + node, + ctx + ) + + override fun visitRelOpUnion(node: Rel.Op.Union, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpIntersect(node: Rel.Op.Intersect, ctx: C): R = defaultVisit( + node, + ctx + ) + + override fun visitRelOpExcept(node: Rel.Op.Except, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpLimit(node: Rel.Op.Limit, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpOffset(node: Rel.Op.Offset, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpProject(node: Rel.Op.Project, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpJoin(node: Rel.Op.Join, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpAggregate(node: Rel.Op.Aggregate, ctx: C): R = defaultVisit( + node, + ctx + ) + + override fun visitRelOpAggregateCall(node: Rel.Op.Aggregate.Call, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRelOpExclude(node: Rel.Op.Exclude, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelOpExcludeItem(node: Rel.Op.Exclude.Item, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRelOpExcludeStep(node: Rel.Op.Exclude.Step, ctx: C): R = when (node) { + is Rel.Op.Exclude.Step.Attr -> visitRelOpExcludeStepAttr(node, ctx) + is Rel.Op.Exclude.Step.Pos -> visitRelOpExcludeStepPos(node, ctx) + is Rel.Op.Exclude.Step.StructWildcard -> visitRelOpExcludeStepStructWildcard(node, ctx) + is Rel.Op.Exclude.Step.CollectionWildcard -> visitRelOpExcludeStepCollectionWildcard(node, ctx) + } + + override fun visitRelOpExcludeStepAttr(node: Rel.Op.Exclude.Step.Attr, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRelOpExcludeStepPos(node: Rel.Op.Exclude.Step.Pos, ctx: C): R = + defaultVisit(node, ctx) + + override fun visitRelOpExcludeStepStructWildcard( + node: Rel.Op.Exclude.Step.StructWildcard, + ctx: C, + ): R = defaultVisit(node, ctx) + + override + fun visitRelOpExcludeStepCollectionWildcard( + node: Rel.Op.Exclude.Step.CollectionWildcard, + ctx: C, + ): R = defaultVisit(node, ctx) + + override fun visitRelOpErr(node: Rel.Op.Err, ctx: C): R = defaultVisit(node, ctx) + + override fun visitRelBinding(node: Rel.Binding, ctx: C): R = defaultVisit(node, ctx) + + internal open fun defaultVisit(node: PlanNode, ctx: C): R { + for (child in node.children) { + child.accept(this, ctx) + } + return defaultReturn(node, ctx) + } + + internal abstract fun defaultReturn(node: PlanNode, ctx: C): R +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/visitor/PlanVisitor.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/visitor/PlanVisitor.kt new file mode 100644 index 000000000..f3114e780 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/ir/visitor/PlanVisitor.kt @@ -0,0 +1,160 @@ +@file:OptIn(PartiQLValueExperimental::class) + +package org.partiql.planner.internal.ir.visitor + +import org.partiql.planner.internal.ir.Agg +import org.partiql.planner.internal.ir.Fn +import org.partiql.planner.internal.ir.Global +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.PartiQLPlan +import org.partiql.planner.internal.ir.PlanNode +import org.partiql.planner.internal.ir.Rel +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.Statement +import org.partiql.value.PartiQLValueExperimental + +internal interface PlanVisitor { + fun visit(node: PlanNode, ctx: C): R + + fun visitPartiQLPlan(node: PartiQLPlan, ctx: C): R + + fun visitGlobal(node: Global, ctx: C): R + + fun visitFn(node: Fn, ctx: C): R + + fun visitFnResolved(node: Fn.Resolved, ctx: C): R + + fun visitFnUnresolved(node: Fn.Unresolved, ctx: C): R + + fun visitAgg(node: Agg, ctx: C): R + + fun visitAggResolved(node: Agg.Resolved, ctx: C): R + + fun visitAggUnresolved(node: Agg.Unresolved, ctx: C): R + + fun visitStatement(node: Statement, ctx: C): R + + fun visitStatementQuery(node: Statement.Query, ctx: C): R + + fun visitIdentifier(node: Identifier, ctx: C): R + + fun visitIdentifierSymbol(node: Identifier.Symbol, ctx: C): R + + fun visitIdentifierQualified(node: Identifier.Qualified, ctx: C): R + + fun visitRex(node: Rex, ctx: C): R + + fun visitRexOp(node: Rex.Op, ctx: C): R + + fun visitRexOpLit(node: Rex.Op.Lit, ctx: C): R + + fun visitRexOpVar(node: Rex.Op.Var, ctx: C): R + + fun visitRexOpVarResolved(node: Rex.Op.Var.Resolved, ctx: C): R + + fun visitRexOpVarUnresolved(node: Rex.Op.Var.Unresolved, ctx: C): R + + fun visitRexOpGlobal(node: Rex.Op.Global, ctx: C): R + + fun visitRexOpPath(node: Rex.Op.Path, ctx: C): R + + fun visitRexOpPathStep(node: Rex.Op.Path.Step, ctx: C): R + + fun visitRexOpPathStepIndex(node: Rex.Op.Path.Step.Index, ctx: C): R + + fun visitRexOpPathStepKey(node: Rex.Op.Path.Step.Key, ctx: C): R + + fun visitRexOpPathStepSymbol(node: Rex.Op.Path.Step.Symbol, ctx: C): R + + fun visitRexOpPathStepWildcard(node: Rex.Op.Path.Step.Wildcard, ctx: C): R + + fun visitRexOpPathStepUnpivot(node: Rex.Op.Path.Step.Unpivot, ctx: C): R + + fun visitRexOpCall(node: Rex.Op.Call, ctx: C): R + + fun visitRexOpCallStatic(node: Rex.Op.Call.Static, ctx: C): R + + fun visitRexOpCallDynamic(node: Rex.Op.Call.Dynamic, ctx: C): R + + fun visitRexOpCallDynamicCandidate(node: Rex.Op.Call.Dynamic.Candidate, ctx: C): R + + fun visitRexOpCase(node: Rex.Op.Case, ctx: C): R + + fun visitRexOpCaseBranch(node: Rex.Op.Case.Branch, ctx: C): R + + fun visitRexOpCollection(node: Rex.Op.Collection, ctx: C): R + + fun visitRexOpStruct(node: Rex.Op.Struct, ctx: C): R + + fun visitRexOpStructField(node: Rex.Op.Struct.Field, ctx: C): R + + fun visitRexOpPivot(node: Rex.Op.Pivot, ctx: C): R + + fun visitRexOpSubquery(node: Rex.Op.Subquery, ctx: C): R + + fun visitRexOpSelect(node: Rex.Op.Select, ctx: C): R + + fun visitRexOpTupleUnion(node: Rex.Op.TupleUnion, ctx: C): R + + fun visitRexOpErr(node: Rex.Op.Err, ctx: C): R + + fun visitRel(node: Rel, ctx: C): R + + fun visitRelType(node: Rel.Type, ctx: C): R + + fun visitRelOp(node: Rel.Op, ctx: C): R + + fun visitRelOpScan(node: Rel.Op.Scan, ctx: C): R + + fun visitRelOpScanIndexed(node: Rel.Op.ScanIndexed, ctx: C): R + + fun visitRelOpUnpivot(node: Rel.Op.Unpivot, ctx: C): R + + fun visitRelOpDistinct(node: Rel.Op.Distinct, ctx: C): R + + fun visitRelOpFilter(node: Rel.Op.Filter, ctx: C): R + + fun visitRelOpSort(node: Rel.Op.Sort, ctx: C): R + + fun visitRelOpSortSpec(node: Rel.Op.Sort.Spec, ctx: C): R + + fun visitRelOpUnion(node: Rel.Op.Union, ctx: C): R + + fun visitRelOpIntersect(node: Rel.Op.Intersect, ctx: C): R + + fun visitRelOpExcept(node: Rel.Op.Except, ctx: C): R + + fun visitRelOpLimit(node: Rel.Op.Limit, ctx: C): R + + fun visitRelOpOffset(node: Rel.Op.Offset, ctx: C): R + + fun visitRelOpProject(node: Rel.Op.Project, ctx: C): R + + fun visitRelOpJoin(node: Rel.Op.Join, ctx: C): R + + fun visitRelOpAggregate(node: Rel.Op.Aggregate, ctx: C): R + + fun visitRelOpAggregateCall(node: Rel.Op.Aggregate.Call, ctx: C): R + + fun visitRelOpExclude(node: Rel.Op.Exclude, ctx: C): R + + fun visitRelOpExcludeItem(node: Rel.Op.Exclude.Item, ctx: C): R + + fun visitRelOpExcludeStep(node: Rel.Op.Exclude.Step, ctx: C): R + + fun visitRelOpExcludeStepAttr(node: Rel.Op.Exclude.Step.Attr, ctx: C): R + + fun visitRelOpExcludeStepPos(node: Rel.Op.Exclude.Step.Pos, ctx: C): R + + fun visitRelOpExcludeStepStructWildcard(node: Rel.Op.Exclude.Step.StructWildcard, ctx: C): + R + + fun visitRelOpExcludeStepCollectionWildcard( + node: Rel.Op.Exclude.Step.CollectionWildcard, + ctx: C, + ): R + + fun visitRelOpErr(node: Rel.Op.Err, ctx: C): R + + fun visitRelBinding(node: Rel.Binding, ctx: C): R +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/passes/.gitkeep b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/passes/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/AstToPlan.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/AstToPlan.kt new file mode 100644 index 000000000..050e1bb84 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/AstToPlan.kt @@ -0,0 +1,75 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package org.partiql.planner.internal.transforms + +import org.partiql.ast.AstNode +import org.partiql.ast.Expr +import org.partiql.ast.visitor.AstBaseVisitor +import org.partiql.planner.internal.Env +import org.partiql.planner.internal.ir.identifierQualified +import org.partiql.planner.internal.ir.identifierSymbol +import org.partiql.planner.internal.ir.statementQuery +import org.partiql.ast.Identifier as AstIdentifier +import org.partiql.ast.Statement as AstStatement +import org.partiql.planner.internal.ir.Identifier as PlanIdentifier +import org.partiql.planner.internal.ir.Statement as PlanStatement + +/** + * Simple translation from AST to an unresolved algebraic IR. + */ +internal object AstToPlan { + + // statement.toPlan() + @JvmStatic + fun apply(statement: AstStatement, env: Env): PlanStatement = statement.accept(ToPlanStatement, env) + + @Suppress("PARAMETER_NAME_CHANGED_ON_OVERRIDE") + private object ToPlanStatement : AstBaseVisitor() { + + override fun defaultReturn(node: AstNode, env: Env) = throw IllegalArgumentException("Unsupported statement") + + override fun visitStatementQuery(node: AstStatement.Query, env: Env): PlanStatement { + val rex = when (val expr = node.expr) { + is Expr.SFW -> RelConverter.apply(expr, env) + else -> RexConverter.apply(expr, env) + } + return statementQuery(rex) + } + } + + // --- Helpers -------------------- + + fun convert(identifier: AstIdentifier): PlanIdentifier = when (identifier) { + is AstIdentifier.Qualified -> convert(identifier) + is AstIdentifier.Symbol -> convert(identifier) + } + + fun convert(identifier: AstIdentifier.Qualified): PlanIdentifier.Qualified { + val root = convert(identifier.root) + val steps = identifier.steps.map { convert(it) } + return identifierQualified(root, steps) + } + + fun convert(identifier: AstIdentifier.Symbol): PlanIdentifier.Symbol { + val symbol = identifier.symbol + val case = when (identifier.caseSensitivity) { + AstIdentifier.CaseSensitivity.SENSITIVE -> PlanIdentifier.CaseSensitivity.SENSITIVE + AstIdentifier.CaseSensitivity.INSENSITIVE -> PlanIdentifier.CaseSensitivity.INSENSITIVE + } + return identifierSymbol(symbol, case) + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/PlanTransform.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/PlanTransform.kt new file mode 100644 index 000000000..63ffb05af --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/PlanTransform.kt @@ -0,0 +1,391 @@ +package org.partiql.planner.internal.transforms + +import org.partiql.errors.ProblemCallback +import org.partiql.plan.PlanNode +import org.partiql.plan.partiQLPlan +import org.partiql.plan.rex +import org.partiql.plan.rexOpLit +import org.partiql.plan.rexOpPathStepKey +import org.partiql.plan.rexOpPathStepSymbol +import org.partiql.planner.internal.ir.Agg +import org.partiql.planner.internal.ir.Fn +import org.partiql.planner.internal.ir.Global +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.PartiQLPlan +import org.partiql.planner.internal.ir.Rel +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.Statement +import org.partiql.planner.internal.ir.visitor.PlanBaseVisitor +import org.partiql.types.StaticType +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.stringValue + +/** + * This is an internal utility to translate from the internal unresolved plan used for typing to the public plan IR. + * At the moment, these data structures are very similar sans-unresolved variants. The internal unresolved plan + * continues to undergo frequent changes as we improve our typing model. This indirection enables a more stable public + * consumable API while guaranteeing resolution safety. + * + * Ideally this class becomes very small as the internal IR will be a thin wrapper over the public API. + */ +internal object PlanTransform : PlanBaseVisitor() { + + override fun defaultReturn(node: org.partiql.planner.internal.ir.PlanNode, ctx: ProblemCallback): PlanNode { + error("Not implemented") + } + + override fun visitPartiQLPlan(node: PartiQLPlan, ctx: ProblemCallback): org.partiql.plan.PartiQLPlan { + val globals = node.globals.map { visitGlobal(it, ctx) } + val statement = visitStatement(node.statement, ctx) + return partiQLPlan(globals, statement) + } + + override fun visitGlobal(node: Global, ctx: ProblemCallback): org.partiql.plan.Global { + val path = visitIdentifierQualified(node.path, ctx) + val type = node.type + return org.partiql.plan.global(path, type) + } + + override fun visitFnResolved(node: Fn.Resolved, ctx: ProblemCallback) = org.partiql.plan.fn(node.signature) + + override fun visitFnUnresolved(node: Fn.Unresolved, ctx: ProblemCallback): org.partiql.plan.Rex.Op { + return org.partiql.plan.Rex.Op.Err("Unresolved function") + } + + override fun visitAgg(node: Agg, ctx: ProblemCallback) = super.visitAgg(node, ctx) as org.partiql.plan.Agg + + override fun visitAggResolved(node: Agg.Resolved, ctx: ProblemCallback) = org.partiql.plan.Agg(node.signature) + + override fun visitAggUnresolved(node: Agg.Unresolved, ctx: ProblemCallback): org.partiql.plan.Rex.Op { + return org.partiql.plan.Rex.Op.Err("Unresolved aggregation") + } + + override fun visitStatement(node: Statement, ctx: ProblemCallback) = + super.visitStatement(node, ctx) as org.partiql.plan.Statement + + override fun visitStatementQuery(node: Statement.Query, ctx: ProblemCallback): org.partiql.plan.Statement.Query { + val root = visitRex(node.root, ctx) + return org.partiql.plan.Statement.Query(root) + } + + override fun visitIdentifier(node: Identifier, ctx: ProblemCallback) = + super.visitIdentifier(node, ctx) as org.partiql.plan.Identifier + + override fun visitIdentifierSymbol(node: Identifier.Symbol, ctx: ProblemCallback) = + org.partiql.plan.Identifier.Symbol( + symbol = node.symbol, + caseSensitivity = when (node.caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> org.partiql.plan.Identifier.CaseSensitivity.SENSITIVE + Identifier.CaseSensitivity.INSENSITIVE -> org.partiql.plan.Identifier.CaseSensitivity.INSENSITIVE + } + ) + + override fun visitIdentifierQualified(node: Identifier.Qualified, ctx: ProblemCallback) = + org.partiql.plan.Identifier.Qualified( + root = visitIdentifierSymbol(node.root, ctx), + steps = node.steps.map { visitIdentifierSymbol(it, ctx) } + ) + + // EXPRESSIONS + + override fun visitRex(node: Rex, ctx: ProblemCallback): org.partiql.plan.Rex { + val type = node.type + val op = visitRexOp(node.op, ctx) + return org.partiql.plan.Rex(type, op) + } + + override fun visitRexOp(node: Rex.Op, ctx: ProblemCallback) = super.visitRexOp(node, ctx) as org.partiql.plan.Rex.Op + + @OptIn(PartiQLValueExperimental::class) + override fun visitRexOpLit(node: Rex.Op.Lit, ctx: ProblemCallback) = org.partiql.plan.rexOpLit(node.value) + + override fun visitRexOpVar(node: Rex.Op.Var, ctx: ProblemCallback) = + super.visitRexOpVar(node, ctx) as org.partiql.plan.Rex.Op + + override fun visitRexOpVarResolved(node: Rex.Op.Var.Resolved, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.Var(node.ref) + + override fun visitRexOpVarUnresolved(node: Rex.Op.Var.Unresolved, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.Err("Unresolved variable $node") + + override fun visitRexOpGlobal(node: Rex.Op.Global, ctx: ProblemCallback) = org.partiql.plan.Rex.Op.Global(node.ref) + + override fun visitRexOpPath(node: Rex.Op.Path, ctx: ProblemCallback): org.partiql.plan.Rex.Op.Path { + val root = visitRex(node.root, ctx) + val steps = node.steps.map { visitRexOpPathStep(it, ctx) } + return org.partiql.plan.Rex.Op.Path(root, steps) + } + + override fun visitRexOpPathStep(node: Rex.Op.Path.Step, ctx: ProblemCallback) = + super.visit(node, ctx) as org.partiql.plan.Rex.Op.Path.Step + + override fun visitRexOpPathStepIndex(node: Rex.Op.Path.Step.Index, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.Path.Step.Index( + key = visitRex(node.key, ctx), + ) + + @OptIn(PartiQLValueExperimental::class) + override fun visitRexOpPathStepSymbol(node: Rex.Op.Path.Step.Symbol, ctx: ProblemCallback) = when (node.identifier.caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> rexOpPathStepKey(rex(StaticType.STRING, rexOpLit(stringValue(node.identifier.symbol)))) + Identifier.CaseSensitivity.INSENSITIVE -> rexOpPathStepSymbol(node.identifier.symbol) + } + + override fun visitRexOpPathStepKey(node: Rex.Op.Path.Step.Key, ctx: ProblemCallback): PlanNode = rexOpPathStepKey( + key = visitRex(node.key, ctx) + ) + + override fun visitRexOpPathStepWildcard(node: Rex.Op.Path.Step.Wildcard, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.Path.Step.Wildcard() + + override fun visitRexOpPathStepUnpivot(node: Rex.Op.Path.Step.Unpivot, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.Path.Step.Unpivot() + + override fun visitRexOpCall(node: Rex.Op.Call, ctx: ProblemCallback) = + super.visitRexOpCall(node, ctx) as org.partiql.plan.Rex.Op + + override fun visitRexOpCallStatic(node: Rex.Op.Call.Static, ctx: ProblemCallback): org.partiql.plan.Rex.Op { + val fn = visitFn(node.fn, ctx) + val args = node.args.map { visitRex(it, ctx) } + return when (fn) { + is org.partiql.plan.Fn -> { + org.partiql.plan.Rex.Op.Call.Static(fn, args) + } + is org.partiql.plan.Rex.Op -> { + // had error + fn + } + else -> { + error("Expected Fn or Err, found $fn") + } + } + } + + override fun visitRexOpCallDynamic(node: Rex.Op.Call.Dynamic, ctx: ProblemCallback): PlanNode { + val candidates = node.candidates.map { + val c = visitRexOpCallDynamicCandidate(it, ctx) + if (c is org.partiql.plan.Rex.Op.Err) return c + c as org.partiql.plan.Rex.Op.Call.Dynamic.Candidate + } + return org.partiql.plan.Rex.Op.Call.Dynamic( + candidates = candidates, + args = node.args.map { visitRex(it, ctx) } + ) + } + + override fun visitRexOpCallDynamicCandidate(node: Rex.Op.Call.Dynamic.Candidate, ctx: ProblemCallback): PlanNode { + val fn = visitFn(node.fn, ctx) + if (fn is org.partiql.plan.Rex.Op.Err) return fn + fn as org.partiql.plan.Fn + val coercions = node.coercions.map { + it?.let { + val c = visitFn(it, ctx) + if (c is org.partiql.plan.Rex.Op.Err) return c + c as org.partiql.plan.Fn + } + } + return org.partiql.plan.Rex.Op.Call.Dynamic.Candidate(fn, coercions) + } + + override fun visitRexOpCase(node: Rex.Op.Case, ctx: ProblemCallback) = org.partiql.plan.Rex.Op.Case( + branches = node.branches.map { visitRexOpCaseBranch(it, ctx) }, default = visitRex(node.default, ctx) + ) + + override fun visitRexOpCaseBranch(node: Rex.Op.Case.Branch, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.Case.Branch( + condition = visitRex(node.condition, ctx), rex = visitRex(node.rex, ctx) + ) + + override fun visitRexOpCollection(node: Rex.Op.Collection, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.Collection(values = node.values.map { visitRex(it, ctx) }) + + override fun visitRexOpStruct(node: Rex.Op.Struct, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.Struct(fields = node.fields.map { visitRexOpStructField(it, ctx) }) + + override fun visitRexOpStructField(node: Rex.Op.Struct.Field, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.Struct.Field( + k = visitRex(node.k, ctx), + v = visitRex(node.v, ctx), + ) + + override fun visitRexOpPivot(node: Rex.Op.Pivot, ctx: ProblemCallback) = org.partiql.plan.Rex.Op.Pivot( + key = visitRex(node.key, ctx), + value = visitRex(node.value, ctx), + rel = visitRel(node.rel, ctx), + ) + + override fun visitRexOpSubquery(node: Rex.Op.Subquery, ctx: ProblemCallback) = org.partiql.plan.Rex.Op.Subquery( + select = visitRexOpSelect(node.select, ctx), + coercion = when (node.coercion) { + Rex.Op.Subquery.Coercion.SCALAR -> org.partiql.plan.Rex.Op.Subquery.Coercion.SCALAR + Rex.Op.Subquery.Coercion.ROW -> org.partiql.plan.Rex.Op.Subquery.Coercion.ROW + } + ) + + override fun visitRexOpSelect(node: Rex.Op.Select, ctx: ProblemCallback) = org.partiql.plan.Rex.Op.Select( + constructor = visitRex(node.constructor, ctx), + rel = visitRel(node.rel, ctx), + ) + + override fun visitRexOpTupleUnion(node: Rex.Op.TupleUnion, ctx: ProblemCallback) = + org.partiql.plan.Rex.Op.TupleUnion(args = node.args.map { visitRex(it, ctx) }) + + override fun visitRexOpErr(node: Rex.Op.Err, ctx: ProblemCallback) = org.partiql.plan.Rex.Op.Err(node.message) + + // RELATION OPERATORS + + override fun visitRel(node: Rel, ctx: ProblemCallback) = org.partiql.plan.Rel( + type = visitRelType(node.type, ctx), + op = visitRelOp(node.op, ctx), + ) + + override fun visitRelType(node: Rel.Type, ctx: ProblemCallback) = + org.partiql.plan.Rel.Type( + schema = node.schema.map { visitRelBinding(it, ctx) }, + props = node.props.map { + when (it) { + Rel.Prop.ORDERED -> org.partiql.plan.Rel.Prop.ORDERED + } + }.toSet() + + ) + + override fun visitRelOp(node: Rel.Op, ctx: ProblemCallback) = super.visitRelOp(node, ctx) as org.partiql.plan.Rel.Op + + override fun visitRelOpScan(node: Rel.Op.Scan, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Scan( + rex = visitRex(node.rex, ctx), + ) + + override fun visitRelOpScanIndexed(node: Rel.Op.ScanIndexed, ctx: ProblemCallback) = + org.partiql.plan.Rel.Op.ScanIndexed( + rex = visitRex(node.rex, ctx), + ) + + override fun visitRelOpUnpivot(node: Rel.Op.Unpivot, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Unpivot( + rex = visitRex(node.rex, ctx), + ) + + override fun visitRelOpDistinct(node: Rel.Op.Distinct, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Distinct( + input = visitRel(node.input, ctx), + ) + + override fun visitRelOpFilter(node: Rel.Op.Filter, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Filter( + input = visitRel(node.input, ctx), + predicate = visitRex(node.predicate, ctx), + ) + + override fun visitRelOpSort(node: Rel.Op.Sort, ctx: ProblemCallback) = + org.partiql.plan.Rel.Op.Sort( + input = visitRel(node.input, ctx), + specs = node.specs.map { visitRelOpSortSpec(it, ctx) } + ) + + override fun visitRelOpSortSpec(node: Rel.Op.Sort.Spec, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Sort.Spec( + rex = visitRex(node.rex, ctx), + order = when (node.order) { + Rel.Op.Sort.Order.ASC_NULLS_LAST -> org.partiql.plan.Rel.Op.Sort.Order.ASC_NULLS_LAST + Rel.Op.Sort.Order.ASC_NULLS_FIRST -> org.partiql.plan.Rel.Op.Sort.Order.ASC_NULLS_FIRST + Rel.Op.Sort.Order.DESC_NULLS_LAST -> org.partiql.plan.Rel.Op.Sort.Order.DESC_NULLS_LAST + Rel.Op.Sort.Order.DESC_NULLS_FIRST -> org.partiql.plan.Rel.Op.Sort.Order.DESC_NULLS_FIRST + } + ) + + override fun visitRelOpUnion(node: Rel.Op.Union, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Union( + lhs = visitRel(node.lhs, ctx), + rhs = visitRel(node.rhs, ctx), + ) + + override fun visitRelOpIntersect(node: Rel.Op.Intersect, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Intersect( + lhs = visitRel(node.lhs, ctx), + rhs = visitRel(node.rhs, ctx), + ) + + override fun visitRelOpExcept(node: Rel.Op.Except, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Except( + lhs = visitRel(node.lhs, ctx), + rhs = visitRel(node.rhs, ctx), + ) + + override fun visitRelOpLimit(node: Rel.Op.Limit, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Limit( + input = visitRel(node.input, ctx), + limit = visitRex(node.limit, ctx), + ) + + override fun visitRelOpOffset(node: Rel.Op.Offset, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Offset( + input = visitRel(node.input, ctx), + offset = visitRex(node.offset, ctx), + ) + + override fun visitRelOpProject(node: Rel.Op.Project, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Project( + input = visitRel(node.input, ctx), + projections = node.projections.map { visitRex(it, ctx) }, + ) + + override fun visitRelOpJoin(node: Rel.Op.Join, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Join( + lhs = visitRel(node.lhs, ctx), + rhs = visitRel(node.rhs, ctx), + rex = visitRex(node.rex, ctx), + type = when (node.type) { + Rel.Op.Join.Type.INNER -> org.partiql.plan.Rel.Op.Join.Type.INNER + Rel.Op.Join.Type.LEFT -> org.partiql.plan.Rel.Op.Join.Type.LEFT + Rel.Op.Join.Type.RIGHT -> org.partiql.plan.Rel.Op.Join.Type.RIGHT + Rel.Op.Join.Type.FULL -> org.partiql.plan.Rel.Op.Join.Type.FULL + } + ) + + override fun visitRelOpAggregate(node: Rel.Op.Aggregate, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Aggregate( + input = visitRel(node.input, ctx), + strategy = when (node.strategy) { + Rel.Op.Aggregate.Strategy.FULL -> org.partiql.plan.Rel.Op.Aggregate.Strategy.FULL + Rel.Op.Aggregate.Strategy.PARTIAL -> org.partiql.plan.Rel.Op.Aggregate.Strategy.PARTIAL + }, + calls = node.calls.map { visitRelOpAggregateCall(it, ctx) }, + groups = node.groups.map { visitRex(it, ctx) }, + ) + + override fun visitRelOpAggregateCall(node: Rel.Op.Aggregate.Call, ctx: ProblemCallback) = + org.partiql.plan.Rel.Op.Aggregate.Call( + agg = visitAgg(node.agg, ctx), + args = node.args.map { visitRex(it, ctx) }, + ) + + override fun visitRelOpExclude(node: Rel.Op.Exclude, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Exclude( + input = visitRel(node.input, ctx), + items = node.items.map { visitRelOpExcludeItem(it, ctx) }, + ) + + override fun visitRelOpExcludeItem(node: Rel.Op.Exclude.Item, ctx: ProblemCallback) = + org.partiql.plan.Rel.Op.Exclude.Item( + root = visitIdentifierSymbol(node.root, ctx), + steps = node.steps.map { visitRelOpExcludeStep(it, ctx) }, + ) + + override fun visitRelOpExcludeStep(node: Rel.Op.Exclude.Step, ctx: ProblemCallback) = + super.visit(node, ctx) as org.partiql.plan.Rel.Op.Exclude.Step + + override fun visitRelOpExcludeStepAttr(node: Rel.Op.Exclude.Step.Attr, ctx: ProblemCallback) = + org.partiql.plan.Rel.Op.Exclude.Step.Attr( + symbol = visitIdentifierSymbol(node.symbol, ctx), + ) + + override fun visitRelOpExcludeStepPos(node: Rel.Op.Exclude.Step.Pos, ctx: ProblemCallback) = + org.partiql.plan.Rel.Op.Exclude.Step.Pos( + index = node.index, + ) + + override fun visitRelOpExcludeStepStructWildcard( + node: Rel.Op.Exclude.Step.StructWildcard, + ctx: ProblemCallback, + ) = org.partiql.plan.Rel.Op.Exclude.Step.StructWildcard() + + override fun visitRelOpExcludeStepCollectionWildcard( + node: Rel.Op.Exclude.Step.CollectionWildcard, + ctx: ProblemCallback, + ) = org.partiql.plan.Rel.Op.Exclude.Step.CollectionWildcard() + + override fun visitRelOpErr(node: Rel.Op.Err, ctx: ProblemCallback) = org.partiql.plan.Rel.Op.Err(node.message) + + override fun visitRelBinding(node: Rel.Binding, ctx: ProblemCallback) = org.partiql.plan.Rel.Binding( + name = node.name, + type = node.type, + ) + } + \ No newline at end of file diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/RelConverter.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/RelConverter.kt new file mode 100644 index 000000000..15afd9c3b --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/RelConverter.kt @@ -0,0 +1,538 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package org.partiql.planner.internal.transforms + +import org.partiql.ast.AstNode +import org.partiql.ast.Exclude +import org.partiql.ast.Expr +import org.partiql.ast.From +import org.partiql.ast.GroupBy +import org.partiql.ast.OrderBy +import org.partiql.ast.Select +import org.partiql.ast.SetOp +import org.partiql.ast.Sort +import org.partiql.ast.builder.ast +import org.partiql.ast.helpers.toBinder +import org.partiql.ast.util.AstRewriter +import org.partiql.ast.visitor.AstBaseVisitor +import org.partiql.planner.internal.Env +import org.partiql.planner.internal.ir.Rel +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.aggUnresolved +import org.partiql.planner.internal.ir.rel +import org.partiql.planner.internal.ir.relBinding +import org.partiql.planner.internal.ir.relOpAggregate +import org.partiql.planner.internal.ir.relOpAggregateCall +import org.partiql.planner.internal.ir.relOpErr +import org.partiql.planner.internal.ir.relOpExcept +import org.partiql.planner.internal.ir.relOpExclude +import org.partiql.planner.internal.ir.relOpExcludeItem +import org.partiql.planner.internal.ir.relOpExcludeStepAttr +import org.partiql.planner.internal.ir.relOpExcludeStepCollectionWildcard +import org.partiql.planner.internal.ir.relOpExcludeStepPos +import org.partiql.planner.internal.ir.relOpExcludeStepStructWildcard +import org.partiql.planner.internal.ir.relOpFilter +import org.partiql.planner.internal.ir.relOpIntersect +import org.partiql.planner.internal.ir.relOpJoin +import org.partiql.planner.internal.ir.relOpLimit +import org.partiql.planner.internal.ir.relOpOffset +import org.partiql.planner.internal.ir.relOpProject +import org.partiql.planner.internal.ir.relOpScan +import org.partiql.planner.internal.ir.relOpSort +import org.partiql.planner.internal.ir.relOpSortSpec +import org.partiql.planner.internal.ir.relOpUnion +import org.partiql.planner.internal.ir.relOpUnpivot +import org.partiql.planner.internal.ir.relType +import org.partiql.planner.internal.ir.rex +import org.partiql.planner.internal.ir.rexOpLit +import org.partiql.planner.internal.ir.rexOpPivot +import org.partiql.planner.internal.ir.rexOpSelect +import org.partiql.planner.internal.ir.rexOpVarResolved +import org.partiql.types.StaticType +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.boolValue + +/** + * Lexically scoped state for use in translating an individual SELECT statement. + */ +internal object RelConverter { + + // IGNORE — so we don't have to non-null assert on operator inputs + private val nil = rel(relType(emptyList(), emptySet()), relOpErr("nil")) + + /** + * Here we convert an SFW to composed [Rel]s, then apply the appropriate relation-value projection to get a [Rex]. + */ + internal fun apply(sfw: Expr.SFW, env: Env): Rex { + val rel = sfw.accept(ToRel(env), nil) + val rex = when (val projection = sfw.select) { + // PIVOT ... FROM + is Select.Pivot -> { + val key = projection.key.toRex(env) + val value = projection.value.toRex(env) + val type = (StaticType.STRUCT) + val op = rexOpPivot(key, value, rel) + rex(type, op) + } + // SELECT VALUE ... FROM + is Select.Value -> { + assert(rel.type.schema.size == 1) { + "Expected SELECT VALUE's input to have a single binding. " + + "However, it contained: ${rel.type.schema.map { it.name }}." + } + val constructor = rex(StaticType.ANY, rexOpVarResolved(0)) + val op = rexOpSelect(constructor, rel) + val type = when (rel.type.props.contains(Rel.Prop.ORDERED)) { + true -> (StaticType.LIST) + else -> (StaticType.BAG) + } + rex(type, op) + } + // SELECT * FROM + is Select.Star -> { + throw IllegalArgumentException("AST not normalized") + } + // SELECT ... FROM + is Select.Project -> { + throw IllegalArgumentException("AST not normalized") + } + } + return rex + } + + /** + * Syntax sugar for converting an [Expr] tree to a [Rex] tree. + */ + private fun Expr.toRex(env: Env): Rex = RexConverter.apply(this, env) + + @Suppress("PARAMETER_NAME_CHANGED_ON_OVERRIDE", "LocalVariableName") + private class ToRel(private val env: Env) : AstBaseVisitor() { + + override fun defaultReturn(node: AstNode, input: Rel): Rel = + throw IllegalArgumentException("unsupported rel $node") + + /** + * Translate SFW AST node to a pipeline of [Rel] operators; skip any SELECT VALUE or PIVOT projection. + */ + + override fun visitExprSFW(node: Expr.SFW, input: Rel): Rel { + var sel = node + var rel = visitFrom(sel.from, nil) + rel = convertWhere(rel, sel.where) + // kotlin does not have destructuring reassignment + val (_sel, _rel) = convertAgg(rel, sel, sel.groupBy) + sel = _sel + rel = _rel + // Plan.create (possibly rewritten) sel node + rel = convertHaving(rel, sel.having) + rel = convertSetOp(rel, sel.setOp) + rel = convertOrderBy(rel, sel.orderBy) + rel = convertLimit(rel, sel.limit) + rel = convertOffset(rel, sel.offset) + rel = convertExclude(rel, sel.exclude) + // append SQL projection if present + rel = when (val projection = sel.select) { + is Select.Project -> visitSelectProject(projection, rel) + is Select.Value -> visitSelectValue(projection, rel) + is Select.Star -> error("AST not normalized, found project star") + else -> rel // skip PIVOT and SELECT VALUE + } + return rel + } + + override fun visitSelectProject(node: Select.Project, input: Rel): Rel { + // this ignores aggregations + val schema = mutableListOf() + val props = input.type.props + val projections = mutableListOf() + node.items.forEach { + val (binding, projection) = convertProjectionItem(it) + schema.add(binding) + projections.add(projection) + } + val type = relType(schema, props) + val op = relOpProject(input, projections) + return rel(type, op) + } + + override fun visitSelectValue(node: Select.Value, input: Rel): Rel { + val name = node.constructor.toBinder(1).symbol + val rex = RexConverter.apply(node.constructor, env) + val schema = listOf(relBinding(name, rex.type)) + val props = input.type.props + val type = relType(schema, props) + val op = relOpProject(input, projections = listOf(rex)) + return rel(type, op) + } + + override fun visitFromValue(node: From.Value, nil: Rel): Rel { + val rex = RexConverter.apply(node.expr, env) + val binding = when (val a = node.asAlias) { + null -> error("AST not normalized, missing AS alias on $node") + else -> relBinding( + name = a.symbol, + type = rex.type + ) + } + return when (node.type) { + From.Value.Type.SCAN -> { + when (val i = node.atAlias) { + null -> convertScan(rex, binding) + else -> { + val index = relBinding( + name = i.symbol, + type = (StaticType.INT) + ) + convertScanIndexed(rex, binding, index) + } + } + } + From.Value.Type.UNPIVOT -> { + val atAlias = when (val at = node.atAlias) { + null -> error("AST not normalized, missing AT alias on UNPIVOT $node") + else -> relBinding( + name = at.symbol, + type = (StaticType.STRING) + ) + } + convertUnpivot(rex, k = atAlias, v = binding) + } + } + } + + /** + * Appends [Rel.Op.Join] where the left and right sides are converted FROM sources + * + * TODO compute basic schema + */ + @OptIn(PartiQLValueExperimental::class) + override fun visitFromJoin(node: From.Join, nil: Rel): Rel { + val lhs = visitFrom(node.lhs, nil) + val rhs = visitFrom(node.rhs, nil) + val schema = listOf() + val props = emptySet() + val condition = node.condition?.let { RexConverter.apply(it, env) } ?: rex(StaticType.BOOL, rexOpLit(boolValue(true))) + val joinType = when (node.type) { + From.Join.Type.LEFT_OUTER, From.Join.Type.LEFT -> Rel.Op.Join.Type.LEFT + From.Join.Type.RIGHT_OUTER, From.Join.Type.RIGHT -> Rel.Op.Join.Type.RIGHT + From.Join.Type.FULL_OUTER, From.Join.Type.FULL -> Rel.Op.Join.Type.FULL + From.Join.Type.COMMA, + From.Join.Type.INNER, + From.Join.Type.CROSS -> Rel.Op.Join.Type.INNER // Cross Joins are just INNER JOIN ON TRUE + null -> Rel.Op.Join.Type.INNER // a JOIN b ON a.id = b.id <--> a INNER JOIN b ON a.id = b.id + } + val type = relType(schema, props) + val op = relOpJoin(lhs, rhs, condition, joinType) + return rel(type, op) + } + + // Helpers + + private fun convertScan(rex: Rex, binding: Rel.Binding): Rel { + val schema = listOf(binding) + val props = emptySet() + val type = relType(schema, props) + val op = relOpScan(rex) + return rel(type, op) + } + + private fun convertScanIndexed(rex: Rex, binding: Rel.Binding, index: Rel.Binding): Rel { + val schema = listOf(binding, index) + val props = setOf(Rel.Prop.ORDERED) + val type = relType(schema, props) + val op = relOpScan(rex) + return rel(type, op) + } + + /** + * Output schema of an UNPIVOT is < k, v > + * + * @param rex + * @param k + * @param v + */ + private fun convertUnpivot(rex: Rex, k: Rel.Binding, v: Rel.Binding): Rel { + val schema = listOf(k, v) + val props = emptySet() + val type = relType(schema, props) + val op = relOpUnpivot(rex) + return rel(type, op) + } + + private fun convertProjectionItem(item: Select.Project.Item) = when (item) { + is Select.Project.Item.All -> convertProjectItemAll(item) + is Select.Project.Item.Expression -> convertProjectItemRex(item) + } + + private fun convertProjectItemAll(item: Select.Project.Item.All): Pair { + throw IllegalArgumentException("AST not normalized") + } + + private fun convertProjectItemRex(item: Select.Project.Item.Expression): Pair { + val name = when (val a = item.asAlias) { + null -> error("AST not normalized, missing AS alias on projection item $item") + else -> a.symbol + } + val rex = RexConverter.apply(item.expr, env) + val binding = relBinding(name, rex.type) + return binding to rex + } + + /** + * Append [Rel.Op.Filter] only if a WHERE condition exists + */ + private fun convertWhere(input: Rel, expr: Expr?): Rel { + if (expr == null) { + return input + } + val type = input.type + val predicate = expr.toRex(env) + val op = relOpFilter(input, predicate) + return rel(type, op) + } + + /** + * Append [Rel.Op.Aggregate] only if SELECT contains aggregate expressions. + * + * TODO Set quantifiers + * TODO Group As + * + * @return Pair is returned where + * 1. Ast.Expr.SFW has every Ast.Expr.CallAgg replaced by a synthetic Ast.Expr.Var + * 2. Rel which has the appropriate Rex.Agg calls and groups + */ + private fun convertAgg(input: Rel, select: Expr.SFW, groupBy: GroupBy?): Pair { + // Rewrite and extract all aggregations in the SELECT clause + val (sel, aggregations) = AggregationTransform.apply(select) + + // No aggregation planning required for GROUP BY + if (aggregations.isEmpty()) { + if (groupBy != null) { + // GROUP BY with no aggregations is considered an error. + error("GROUP BY with no aggregations in SELECT clause") + } + return Pair(select, input) + } + + // Build the schema -> (calls... groups...) + val schema = mutableListOf() + val props = emptySet() + + // Build the rel operator + var strategy = Rel.Op.Aggregate.Strategy.FULL + val calls = aggregations.mapIndexed { i, expr -> + val binding = relBinding( + name = syntheticAgg(i), + type = (StaticType.ANY), + ) + schema.add(binding) + val args = expr.args.map { arg -> arg.toRex(env) } + val id = AstToPlan.convert(expr.function) + val fn = aggUnresolved(id) + relOpAggregateCall(fn, args) + } + var groups = emptyList() + if (groupBy != null) { + groups = groupBy.keys.map { + if (it.asAlias == null) { + error("not normalized, group key $it missing unique name") + } + val binding = relBinding( + name = it.asAlias!!.symbol, + type = (StaticType.ANY) + ) + schema.add(binding) + it.expr.toRex(env) + } + strategy = when (groupBy.strategy) { + GroupBy.Strategy.FULL -> Rel.Op.Aggregate.Strategy.FULL + GroupBy.Strategy.PARTIAL -> Rel.Op.Aggregate.Strategy.PARTIAL + } + } + val type = relType(schema, props) + val op = relOpAggregate(input, strategy, calls, groups) + val rel = rel(type, op) + return Pair(sel, rel) + } + + /** + * Append [Rel.Op.Filter] only if a HAVING condition exists + * + * Notes: + * - This currently does not support aggregation expressions in the WHERE condition + */ + private fun convertHaving(input: Rel, expr: Expr?): Rel { + if (expr == null) { + return input + } + val type = input.type + val predicate = expr.toRex(env) + val op = relOpFilter(input, predicate) + return rel(type, op) + } + + /** + * Append SQL set operator if present + * + * TODO combine/compare schemas + * TODO set quantifier + */ + private fun convertSetOp(input: Rel, setOp: Expr.SFW.SetOp?): Rel { + if (setOp == null) { + return input + } + val type = input.type.copy(props = emptySet()) + val lhs = input + val rhs = visitExprSFW(setOp.operand, nil) + val op = when (setOp.type.type) { + SetOp.Type.UNION -> relOpUnion(lhs, rhs) + SetOp.Type.INTERSECT -> relOpIntersect(lhs, rhs) + SetOp.Type.EXCEPT -> relOpExcept(lhs, rhs) + } + return rel(type, op) + } + + /** + * Append [Rel.Op.Sort] only if an ORDER BY clause is present + */ + private fun convertOrderBy(input: Rel, orderBy: OrderBy?): Rel { + if (orderBy == null) { + return input + } + val type = input.type.copy(props = setOf(Rel.Prop.ORDERED)) + val specs = orderBy.sorts.map { + val rex = it.expr.toRex(env) + val order = when (it.dir) { + Sort.Dir.DESC -> when (it.nulls) { + Sort.Nulls.LAST -> Rel.Op.Sort.Order.DESC_NULLS_LAST + else -> Rel.Op.Sort.Order.DESC_NULLS_FIRST + } + else -> when (it.nulls) { + Sort.Nulls.FIRST -> Rel.Op.Sort.Order.ASC_NULLS_FIRST + else -> Rel.Op.Sort.Order.ASC_NULLS_LAST + } + } + relOpSortSpec(rex, order) + } + val op = relOpSort(input, specs) + return rel(type, op) + } + + /** + * Append [Rel.Op.Limit] if there is a LIMIT + */ + private fun convertLimit(input: Rel, limit: Expr?): Rel { + if (limit == null) { + return input + } + val type = input.type + val rex = RexConverter.apply(limit, env) + val op = relOpLimit(input, rex) + return rel(type, op) + } + + /** + * Append [Rel.Op.Offset] if there is an OFFSET + */ + private fun convertOffset(input: Rel, offset: Expr?): Rel { + if (offset == null) { + return input + } + val type = input.type + val rex = RexConverter.apply(offset, env) + val op = relOpOffset(input, rex) + return rel(type, op) + } + + private fun convertExclude(input: Rel, exclude: Exclude?): Rel { + if (exclude == null) { + return input + } + val type = input.type // PlanTyper handles typing the exclusion + val items = exclude.exprs.map { convertExcludeItem(it) } + val op = relOpExclude(input, items) + return rel(type, op) + } + + private fun convertExcludeItem(expr: Exclude.ExcludeExpr): Rel.Op.Exclude.Item { + val root = AstToPlan.convert(expr.root) + val steps = expr.steps.map { convertExcludeStep(it) } + return relOpExcludeItem(root, steps) + } + + private fun convertExcludeStep(step: Exclude.Step): Rel.Op.Exclude.Step = when (step) { + is Exclude.Step.ExcludeTupleAttr -> relOpExcludeStepAttr(AstToPlan.convert(step.symbol)) + is Exclude.Step.ExcludeCollectionIndex -> relOpExcludeStepPos(step.index) + is Exclude.Step.ExcludeCollectionWildcard -> relOpExcludeStepCollectionWildcard() + is Exclude.Step.ExcludeTupleWildcard -> relOpExcludeStepStructWildcard() + } + + // /** + // * Converts a GROUP AS X clause to a binding of the form: + // * ``` + // * { 'X': group_as({ 'a_0': e_0, ..., 'a_n': e_n }) } + // * ``` + // * + // * Notes: + // * - This was included to be consistent with the existing PartiqlAst and PartiqlLogical representations, + // * but perhaps we don't want to represent GROUP AS with an agg function. + // */ + // private fun convertGroupAs(name: String, from: From): Binding { + // val fields = from.bindings().map { n -> + // Plan.field( + // name = Plan.rexLit(ionString(n), StaticType.STRING), + // value = Plan.rexId(n, Case.SENSITIVE, Rex.Id.Qualifier.UNQUALIFIED, type = StaticType.STRUCT) + // ) + // } + // return Plan.binding( + // name = name, + // value = Plan.rexAgg( + // id = "group_as", + // args = listOf(Plan.rexTuple(fields, StaticType.STRUCT)), + // modifier = Rex.Agg.Modifier.ALL, + // type = StaticType.STRUCT + // ) + // ) + // } + } + + /** + * Rewrites a SELECT node replacing (and extracting) each aggregation `i` with a synthetic field name `$agg_i`. + */ + private object AggregationTransform : AstRewriter>() { + + fun apply(node: Expr.SFW): Pair> { + val aggs = mutableListOf() + val select = super.visitExprSFW(node, aggs) as Expr.SFW + return Pair(select, aggs) + } + + // only rewrite top-level SFW + override fun visitExprSFW(node: Expr.SFW, ctx: MutableList): AstNode = node + + override fun visitExprAgg(node: Expr.Agg, ctx: MutableList) = ast { + val id = identifierSymbol { + symbol = syntheticAgg(ctx.size) + caseSensitivity = org.partiql.ast.Identifier.CaseSensitivity.INSENSITIVE + } + ctx += node + exprVar(id, Expr.Var.Scope.DEFAULT) + } + } + + private fun syntheticAgg(i: Int) = "\$agg_$i" +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/RexConverter.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/RexConverter.kt new file mode 100644 index 000000000..5676c9ee5 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/transforms/RexConverter.kt @@ -0,0 +1,615 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package org.partiql.planner.internal.transforms + +import org.partiql.ast.AstNode +import org.partiql.ast.DatetimeField +import org.partiql.ast.Expr +import org.partiql.ast.Type +import org.partiql.ast.visitor.AstBaseVisitor +import org.partiql.planner.internal.Env +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.builder.plan +import org.partiql.planner.internal.ir.fnUnresolved +import org.partiql.planner.internal.ir.identifierSymbol +import org.partiql.planner.internal.ir.rex +import org.partiql.planner.internal.ir.rexOpCallStatic +import org.partiql.planner.internal.ir.rexOpCollection +import org.partiql.planner.internal.ir.rexOpLit +import org.partiql.planner.internal.ir.rexOpPath +import org.partiql.planner.internal.ir.rexOpPathStepIndex +import org.partiql.planner.internal.ir.rexOpPathStepKey +import org.partiql.planner.internal.ir.rexOpPathStepSymbol +import org.partiql.planner.internal.ir.rexOpPathStepUnpivot +import org.partiql.planner.internal.ir.rexOpPathStepWildcard +import org.partiql.planner.internal.ir.rexOpStruct +import org.partiql.planner.internal.ir.rexOpStructField +import org.partiql.planner.internal.ir.rexOpSubquery +import org.partiql.planner.internal.ir.rexOpTupleUnion +import org.partiql.planner.internal.ir.rexOpVarUnresolved +import org.partiql.planner.internal.typer.toNonNullStaticType +import org.partiql.planner.internal.typer.toStaticType +import org.partiql.types.StaticType +import org.partiql.types.TimeType +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.StringValue +import org.partiql.value.boolValue +import org.partiql.value.int32Value +import org.partiql.value.int64Value +import org.partiql.value.nullValue + +/** + * Converts an AST expression node to a Plan Rex node; ignoring any typing. + */ +internal object RexConverter { + + internal fun apply(expr: Expr, context: Env): Rex = expr.accept(ToRex, context) // expr.toRex() + + @OptIn(PartiQLValueExperimental::class) + @Suppress("PARAMETER_NAME_CHANGED_ON_OVERRIDE") + private object ToRex : AstBaseVisitor() { + + override fun defaultReturn(node: AstNode, context: Env): Rex = + throw IllegalArgumentException("unsupported rex $node") + + override fun visitExprLit(node: Expr.Lit, context: Env): Rex { + val type = when (node.value.isNull) { + true -> node.value.type.toStaticType() + else -> node.value.type.toNonNullStaticType() + } + val op = rexOpLit(node.value) + return rex(type, op) + } + + /** + * !! IMPORTANT !! + * + * This is the top-level visit for handling subquery coercion. The default behavior is to coerce to a scalar. + * In some situations, ie comparison to complex types we may make assertions on the desired type. + * + * It is recommended that every method (except for the exceptional cases) recurse the tree from visitExprCoerce. + * + * - RHS of comparison when LHS is an array or collection expression; and visa-versa + * - It is the collection expression of a FROM clause or JOIN + * - It is the RHS of an IN predicate + * - It is an argument of an OUTER set operator. + * + * @param node + * @param ctx + * @return + */ + private fun visitExprCoerce(node: Expr, ctx: Env, coercion: Rex.Op.Subquery.Coercion = Rex.Op.Subquery.Coercion.SCALAR): Rex { + val rex = super.visitExpr(node, ctx) + return when (rex.op is Rex.Op.Select) { + true -> rex(StaticType.ANY, rexOpSubquery(rex.op as Rex.Op.Select, coercion)) + else -> rex + } + } + + override fun visitExprVar(node: Expr.Var, context: Env): Rex { + val type = (StaticType.ANY) + val identifier = AstToPlan.convert(node.identifier) + val scope = when (node.scope) { + Expr.Var.Scope.DEFAULT -> Rex.Op.Var.Scope.DEFAULT + Expr.Var.Scope.LOCAL -> Rex.Op.Var.Scope.LOCAL + } + val op = rexOpVarUnresolved(identifier, scope) + return rex(type, op) + } + + override fun visitExprUnary(node: Expr.Unary, context: Env): Rex { + val type = (StaticType.ANY) + // Args + val arg = visitExprCoerce(node.expr, context) + val args = listOf(arg) + // Fn + val id = identifierSymbol(node.op.name.lowercase(), Identifier.CaseSensitivity.SENSITIVE) + val fn = fnUnresolved(id, true) + val op = rexOpCallStatic(fn, args) + return rex(type, op) + } + + override fun visitExprBinary(node: Expr.Binary, context: Env): Rex { + val type = (StaticType.ANY) + // Args + val lhs = visitExprCoerce(node.lhs, context) + val rhs = visitExprCoerce(node.rhs, context) + val args = listOf(lhs, rhs) + return when (node.op) { + Expr.Binary.Op.NE -> { + val op = negate(call("eq", lhs, rhs)) + rex(type, op) + } + else -> { + // Fn + val id = identifierSymbol(node.op.name.lowercase(), Identifier.CaseSensitivity.SENSITIVE) + val fn = fnUnresolved(id, true) + // Rex + val op = rexOpCallStatic(fn, args) + rex(type, op) + } + } + } + + override fun visitExprPath(node: Expr.Path, context: Env): Rex { + val type = (StaticType.ANY) + // Args + val root = visitExprCoerce(node.root, context) + val steps = node.steps.map { + when (it) { + is Expr.Path.Step.Index -> { + val key = visitExprCoerce(it.key, context) + when (val astKey = it.key) { + is Expr.Lit -> when (astKey.value) { + is StringValue -> rexOpPathStepKey(key) + else -> rexOpPathStepIndex(key) + } + is Expr.Cast -> when (astKey.asType is Type.String) { + true -> rexOpPathStepKey(key) + false -> rexOpPathStepIndex(key) + } + else -> rexOpPathStepIndex(key) + } + } + is Expr.Path.Step.Symbol -> { + val identifier = AstToPlan.convert(it.symbol) + rexOpPathStepSymbol(identifier) + } + is Expr.Path.Step.Unpivot -> rexOpPathStepUnpivot() + is Expr.Path.Step.Wildcard -> rexOpPathStepWildcard() + } + } + // Rex + val op = rexOpPath(root, steps) + return rex(type, op) + } + + override fun visitExprCall(node: Expr.Call, context: Env): Rex { + val type = (StaticType.ANY) + // Fn + val id = AstToPlan.convert(node.function) + if (id is Identifier.Symbol && id.symbol.equals("TUPLEUNION", ignoreCase = true)) { + return visitExprCallTupleUnion(node, context) + } + val fn = fnUnresolved(id, false) + // Args + val args = node.args.map { visitExprCoerce(it, context) } + // Rex + val op = rexOpCallStatic(fn, args) + return rex(type, op) + } + + private fun visitExprCallTupleUnion(node: Expr.Call, context: Env): Rex { + val type = (StaticType.STRUCT) + val args = node.args.map { visitExprCoerce(it, context) }.toMutableList() + val op = rexOpTupleUnion(args) + return rex(type, op) + } + + override fun visitExprCase(node: Expr.Case, context: Env) = plan { + val type = (StaticType.ANY) + val rex = when (node.expr) { + null -> null + else -> visitExprCoerce(node.expr!!, context) // match `rex + } + + // Converts AST CASE (x) WHEN y THEN z --> Plan CASE WHEN x = y THEN z + val id = identifierSymbol(Expr.Binary.Op.EQ.name.lowercase(), Identifier.CaseSensitivity.SENSITIVE) + val fn = fnUnresolved(id, true) + val createBranch: (Rex, Rex) -> Rex.Op.Case.Branch = { condition: Rex, result: Rex -> + val updatedCondition = when (rex) { + null -> condition + else -> rex(type, rexOpCallStatic(fn.copy(), listOf(rex, condition))) + } + rexOpCaseBranch(updatedCondition, result) + } + + val branches = node.branches.map { + val branchCondition = visitExprCoerce(it.condition, context) + val branchRex = visitExprCoerce(it.expr, context) + createBranch(branchCondition, branchRex) + }.toMutableList() + + val defaultRex = when (val default = node.default) { + null -> rex(type = StaticType.NULL, op = rexOpLit(value = nullValue())) + else -> visitExprCoerce(default, context) + } + val op = rexOpCase(branches = branches, default = defaultRex) + rex(type, op) + } + + override fun visitExprCollection(node: Expr.Collection, context: Env): Rex { + val type = when (node.type) { + Expr.Collection.Type.BAG -> StaticType.BAG + Expr.Collection.Type.ARRAY -> StaticType.LIST + Expr.Collection.Type.VALUES -> StaticType.LIST + Expr.Collection.Type.LIST -> StaticType.LIST + Expr.Collection.Type.SEXP -> StaticType.SEXP + } + val values = node.values.map { visitExprCoerce(it, context) } + val op = rexOpCollection(values) + return rex(type, op) + } + + override fun visitExprStruct(node: Expr.Struct, context: Env): Rex { + val type = (StaticType.STRUCT) + val fields = node.fields.map { + val k = visitExprCoerce(it.name, context) + val v = visitExprCoerce(it.value, context) + rexOpStructField(k, v) + } + val op = rexOpStruct(fields) + return rex(type, op) + } + + // SPECIAL FORMS + + /** + * NOT? LIKE ( ESCAPE )? + */ + override fun visitExprLike(node: Expr.Like, ctx: Env): Rex { + val type = StaticType.BOOL + // Args + val arg0 = visitExprCoerce(node.value, ctx) + val arg1 = visitExprCoerce(node.pattern, ctx) + val arg2 = node.escape?.let { visitExprCoerce(it, ctx) } + // Call Variants + var call = when (arg2) { + null -> call("like", arg0, arg1) + else -> call("like_escape", arg0, arg1, arg2) + } + // NOT? + if (node.not == true) { + call = negate(call) + } + return rex(type, call) + } + + /** + * NOT? BETWEEN AND + */ + override fun visitExprBetween(node: Expr.Between, ctx: Env): Rex = plan { + val type = StaticType.BOOL + // Args + val arg0 = visitExprCoerce(node.value, ctx) + val arg1 = visitExprCoerce(node.from, ctx) + val arg2 = visitExprCoerce(node.to, ctx) + // Call + var call = call("between", arg0, arg1, arg2) + // NOT? + if (node.not == true) { + call = negate(call) + } + rex(type, call) + } + + /** + * NOT? IN + * + * SQL Spec 1999 section 8.4 + * RVC IN IPV is equivalent to RVC = ANY IPV -> Quantified Comparison Predicate + * Which means: + * Let the expression be T in C, where C is [a1, ..., an] + * T in C is true iff T = a_x is true for any a_x in [a1, ...., an] + * T in C is false iff T = a_x is false for every a_x in [a1, ....., an ] or cardinality of the collection is 0. + * Otherwise, T in C is unknown. + * + */ + override fun visitExprInCollection(node: Expr.InCollection, ctx: Env): Rex { + val type = StaticType.BOOL + // Args + val arg0 = visitExprCoerce(node.lhs, ctx) + val arg1 = visitExpr(node.rhs, ctx) // !! don't insert scalar subquery coercions + + // Call + var call = call("in_collection", arg0, arg1) + // NOT? + if (node.not == true) { + call = negate(call) + } + return rex(type, call) + } + + /** + * IS ? + */ + override fun visitExprIsType(node: Expr.IsType, ctx: Env): Rex { + val type = StaticType.BOOL + // arg + val arg0 = visitExprCoerce(node.value, ctx) + + var call = when (val targetType = node.type) { + is Type.NullType -> call("is_null", arg0) + is Type.Missing -> call("is_missing", arg0) + is Type.Bool -> call("is_bool", arg0) + is Type.Tinyint -> call("is_int8", arg0) + is Type.Smallint, is Type.Int2 -> call("is_int16", arg0) + is Type.Int4 -> call("is_int32", arg0) + is Type.Bigint, is Type.Int8 -> call("is_int64", arg0) + is Type.Int -> call("is_int", arg0) + is Type.Real -> call("is_real", arg0) + is Type.Float32 -> call("is_float32", arg0) + is Type.Float64 -> call("is_float64", arg0) + is Type.Decimal -> call("is_decimal", targetType.precision.toRex(), targetType.scale.toRex(), arg0) + is Type.Numeric -> call("is_numeric", targetType.precision.toRex(), targetType.scale.toRex(), arg0) + is Type.Char -> call("is_char", targetType.length.toRex(), arg0) + is Type.Varchar -> call("is_varchar", targetType.length.toRex(), arg0) + is Type.String -> call("is_string", targetType.length.toRex(), arg0) + is Type.Symbol -> call("is_symbol", arg0) + is Type.Bit -> call("is_bit", arg0) + is Type.BitVarying -> call("is_bitVarying", arg0) + is Type.ByteString -> call("is_byteString", arg0) + is Type.Blob -> call("is_blob", arg0) + is Type.Clob -> call("is_clob", arg0) + is Type.Date -> call("is_date", arg0) + is Type.Time -> call("is_time", arg0) + // TODO: DO we want to seperate with time zone vs without time zone into two different type in the plan? + // leave the parameterized type out for now until the above is answered + is Type.TimeWithTz -> call("is_timeWithTz", arg0) + is Type.Timestamp -> call("is_timestamp", arg0) + is Type.TimestampWithTz -> call("is_timestampWithTz", arg0) + is Type.Interval -> call("is_interval", arg0) + is Type.Bag -> call("is_bag", arg0) + is Type.List -> call("is_list", arg0) + is Type.Sexp -> call("is_sexp", arg0) + is Type.Tuple -> call("is_tuple", arg0) + is Type.Struct -> call("is_struct", arg0) + is Type.Any -> call("is_any", arg0) + is Type.Custom -> call("is_custom", arg0) + } + + if (node.not == true) { + call = negate(call) + } + + return rex(type, call) + } + + // coalesce(expr1, expr2, ... exprN) -> + // CASE + // WHEN expr1 IS NOT NULL THEN EXPR1 + // ... + // WHEN exprn is NOT NULL THEN exprn + // ELSE NULL END + override fun visitExprCoalesce(node: Expr.Coalesce, ctx: Env): Rex = plan { + val type = StaticType.ANY + val createBranch: (Rex) -> Rex.Op.Case.Branch = { expr: Rex -> + val updatedCondition = rex(type, negate(call("is_null", expr))) + rexOpCaseBranch(updatedCondition, expr) + } + + val branches = node.args.map { + createBranch(visitExpr(it, ctx)) + }.toMutableList() + + val defaultRex = rex(type = StaticType.NULL, op = rexOpLit(value = nullValue())) + val op = rexOpCase(branches, defaultRex) + rex(type, op) + } + + // nullIf(expr1, expr2) -> + // CASE + // WHEN expr1 = expr2 THEN NULL + // ELSE expr1 END + override fun visitExprNullIf(node: Expr.NullIf, ctx: Env): Rex = plan { + val type = StaticType.ANY + val expr1 = visitExpr(node.value, ctx) + val expr2 = visitExpr(node.nullifier, ctx) + val id = identifierSymbol(Expr.Binary.Op.EQ.name.lowercase(), Identifier.CaseSensitivity.SENSITIVE) + val fn = fnUnresolved(id, true) + val call = rexOpCallStatic(fn, listOf(expr1, expr2)) + val branches = listOf( + rexOpCaseBranch(rex(type, call), rex(type = StaticType.NULL, op = rexOpLit(value = nullValue()))), + ) + val op = rexOpCase(branches.toMutableList(), expr1) + rex(type, op) + } + + /** + * SUBSTRING( (FROM (FOR )?)? ) + */ + override fun visitExprSubstring(node: Expr.Substring, ctx: Env): Rex { + val type = StaticType.ANY + // Args + val arg0 = visitExprCoerce(node.value, ctx) + val arg1 = node.start?.let { visitExprCoerce(it, ctx) } ?: rex(StaticType.INT, rexOpLit(int64Value(1))) + val arg2 = node.length?.let { visitExprCoerce(it, ctx) } + // Call Variants + val call = when (arg2) { + null -> call("substring", arg0, arg1) + else -> call("substring_length", arg0, arg1, arg2) + } + return rex(type, call) + } + + /** + * POSITION( IN ) + */ + override fun visitExprPosition(node: Expr.Position, ctx: Env): Rex { + val type = StaticType.ANY + // Args + val arg0 = visitExprCoerce(node.lhs, ctx) + val arg1 = visitExprCoerce(node.rhs, ctx) + // Call + val call = call("position", arg0, arg1) + return rex(type, call) + } + + /** + * TRIM([LEADING|TRAILING|BOTH]? ( FROM)? ) + */ + override fun visitExprTrim(node: Expr.Trim, ctx: Env): Rex { + val type = StaticType.TEXT + // Args + val arg0 = visitExprCoerce(node.value, ctx) + val arg1 = node.chars?.let { visitExprCoerce(it, ctx) } + // Call Variants + val call = when (node.spec) { + Expr.Trim.Spec.LEADING -> when (arg1) { + null -> call("trim_leading", arg0) + else -> call("trim_leading_chars", arg0, arg1) + } + Expr.Trim.Spec.TRAILING -> when (arg1) { + null -> call("trim_trailing", arg0) + else -> call("trim_trailing_chars", arg0, arg1) + } + // TODO: We may want to add a trim_both for trim(BOTH FROM arg) + else -> when (arg1) { + null -> callNonHidden("trim", arg0) + else -> call("trim_chars", arg0, arg1) + } + } + return rex(type, call) + } + + override fun visitExprOverlay(node: Expr.Overlay, ctx: Env): Rex { + TODO("SQL Special Form OVERLAY") + } + + override fun visitExprExtract(node: Expr.Extract, ctx: Env): Rex { + TODO("SQL Special Form EXTRACT") + } + + // TODO: Ignoring type parameter now + override fun visitExprCast(node: Expr.Cast, ctx: Env): Rex { + val type = node.asType + val arg0 = visitExprCoerce(node.value, ctx) + return when (type) { + is Type.NullType -> rex(StaticType.NULL, call("cast_null", arg0)) + is Type.Missing -> rex(StaticType.MISSING, call("cast_missing", arg0)) + is Type.Bool -> rex(StaticType.BOOL, call("cast_bool", arg0)) + is Type.Tinyint -> TODO("Static Type does not have TINYINT type") + is Type.Smallint, is Type.Int2 -> rex(StaticType.INT2, call("cast_int16", arg0)) + is Type.Int4 -> rex(StaticType.INT4, call("cast_int32", arg0)) + is Type.Bigint, is Type.Int8 -> rex(StaticType.INT8, call("cast_int64", arg0)) + is Type.Int -> rex(StaticType.INT, call("cast_int", arg0)) + is Type.Real -> TODO("Static Type does not have REAL type") + is Type.Float32 -> TODO("Static Type does not have FLOAT32 type") + is Type.Float64 -> rex(StaticType.FLOAT, call("cast_float64", arg0)) + is Type.Decimal -> rex(StaticType.DECIMAL, call("cast_decimal", arg0)) + is Type.Numeric -> rex(StaticType.DECIMAL, call("cast_numeric", arg0)) + is Type.Char -> rex(StaticType.CHAR, call("cast_char", arg0)) + is Type.Varchar -> rex(StaticType.STRING, call("cast_varchar", arg0)) + is Type.String -> rex(StaticType.STRING, call("cast_string", arg0)) + is Type.Symbol -> rex(StaticType.SYMBOL, call("cast_symbol", arg0)) + is Type.Bit -> TODO("Static Type does not have Bit type") + is Type.BitVarying -> TODO("Static Type does not have BitVarying type") + is Type.ByteString -> TODO("Static Type does not have ByteString type") + is Type.Blob -> rex(StaticType.BLOB, call("cast_blob", arg0)) + is Type.Clob -> rex(StaticType.CLOB, call("cast_clob", arg0)) + is Type.Date -> rex(StaticType.DATE, call("cast_date", arg0)) + is Type.Time -> rex(StaticType.TIME, call("cast_time", arg0)) + is Type.TimeWithTz -> rex(TimeType(null, true), call("cast_timeWithTz", arg0)) + is Type.Timestamp -> TODO("Need to rebase main") + is Type.TimestampWithTz -> rex(StaticType.TIMESTAMP, call("cast_timeWithTz", arg0)) + is Type.Interval -> TODO("Static Type does not have Interval type") + is Type.Bag -> rex(StaticType.BAG, call("cast_bag", arg0)) + is Type.List -> rex(StaticType.LIST, call("cast_list", arg0)) + is Type.Sexp -> rex(StaticType.SEXP, call("cast_sexp", arg0)) + is Type.Tuple -> rex(StaticType.STRUCT, call("cast_tuple", arg0)) + is Type.Struct -> rex(StaticType.STRUCT, call("cast_struct", arg0)) + is Type.Any -> rex(StaticType.ANY, call("cast_any", arg0)) + is Type.Custom -> TODO("Custom type not supported ") + } + } + + override fun visitExprCanCast(node: Expr.CanCast, ctx: Env): Rex { + TODO("PartiQL Special Form CAN_CAST") + } + + override fun visitExprCanLosslessCast(node: Expr.CanLosslessCast, ctx: Env): Rex { + TODO("PartiQL Special Form CAN_LOSSLESS_CAST") + } + + override fun visitExprDateAdd(node: Expr.DateAdd, ctx: Env): Rex { + val type = StaticType.TIMESTAMP + // Args + val arg0 = visitExprCoerce(node.lhs, ctx) + val arg1 = visitExprCoerce(node.rhs, ctx) + // Call Variants + val call = when (node.field) { + DatetimeField.TIMEZONE_HOUR -> error("Invalid call DATE_ADD(TIMEZONE_HOUR, ...)") + DatetimeField.TIMEZONE_MINUTE -> error("Invalid call DATE_ADD(TIMEZONE_MINUTE, ...)") + else -> call("date_add_${node.field.name.lowercase()}", arg0, arg1) + } + return rex(type, call) + } + + override fun visitExprDateDiff(node: Expr.DateDiff, ctx: Env): Rex { + val type = StaticType.TIMESTAMP + // Args + val arg0 = visitExprCoerce(node.lhs, ctx) + val arg1 = visitExprCoerce(node.rhs, ctx) + // Call Variants + val call = when (node.field) { + DatetimeField.TIMEZONE_HOUR -> error("Invalid call DATE_DIFF(TIMEZONE_HOUR, ...)") + DatetimeField.TIMEZONE_MINUTE -> error("Invalid call DATE_DIFF(TIMEZONE_MINUTE, ...)") + else -> call("date_diff_${node.field.name.lowercase()}", arg0, arg1) + } + return rex(type, call) + } + + override fun visitExprSessionAttribute(node: Expr.SessionAttribute, ctx: Env): Rex { + val type = StaticType.ANY + val fn = node.attribute.name.lowercase() + val call = call(fn) + return rex(type, call) + } + + override fun visitExprSFW(node: Expr.SFW, context: Env): Rex = RelConverter.apply(node, context) + + // Helpers + + private fun bool(v: Boolean): Rex { + val type = StaticType.BOOL + val op = rexOpLit(boolValue(v)) + return rex(type, op) + } + + private fun negate(call: Rex.Op.Call): Rex.Op.Call.Static { + val name = Expr.Unary.Op.NOT.name + val id = identifierSymbol(name.lowercase(), Identifier.CaseSensitivity.SENSITIVE) + val fn = fnUnresolved(id, true) + // wrap + val arg = rex(StaticType.BOOL, call) + // rewrite call + return rexOpCallStatic(fn, listOf(arg)) + } + + /** + * Create a [Rex.Op.Call.Static] node which has a hidden unresolved Function. + * The purpose of having such hidden function is to prevent usage of generated function name in query text. + */ + private fun call(name: String, vararg args: Rex): Rex.Op.Call.Static { + val id = identifierSymbol(name, Identifier.CaseSensitivity.SENSITIVE) + val fn = fnUnresolved(id, true) + return rexOpCallStatic(fn, args.toList()) + } + + /** + * Create a [Rex.Op.Call.Static] node which has a non-hidden unresolved Function. + */ + private fun callNonHidden(name: String, vararg args: Rex): Rex.Op.Call.Static { + val id = identifierSymbol(name, Identifier.CaseSensitivity.SENSITIVE) + val fn = fnUnresolved(id, false) + return rexOpCallStatic(fn, args.toList()) + } + + private fun Int?.toRex() = rex(StaticType.INT4, rexOpLit(int32Value(this))) + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/FnResolver.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/FnResolver.kt new file mode 100644 index 000000000..cb844089a --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/FnResolver.kt @@ -0,0 +1,458 @@ +package org.partiql.planner.internal.typer + +import org.partiql.planner.Header +import org.partiql.planner.internal.ir.Agg +import org.partiql.planner.internal.ir.Fn +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.Rex +import org.partiql.types.StaticType +import org.partiql.types.function.FunctionParameter +import org.partiql.types.function.FunctionSignature +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.PartiQLValueType +import org.partiql.value.PartiQLValueType.ANY +import org.partiql.value.PartiQLValueType.BAG +import org.partiql.value.PartiQLValueType.BINARY +import org.partiql.value.PartiQLValueType.BLOB +import org.partiql.value.PartiQLValueType.BOOL +import org.partiql.value.PartiQLValueType.BYTE +import org.partiql.value.PartiQLValueType.CHAR +import org.partiql.value.PartiQLValueType.CLOB +import org.partiql.value.PartiQLValueType.DATE +import org.partiql.value.PartiQLValueType.DECIMAL +import org.partiql.value.PartiQLValueType.DECIMAL_ARBITRARY +import org.partiql.value.PartiQLValueType.FLOAT32 +import org.partiql.value.PartiQLValueType.FLOAT64 +import org.partiql.value.PartiQLValueType.INT +import org.partiql.value.PartiQLValueType.INT16 +import org.partiql.value.PartiQLValueType.INT32 +import org.partiql.value.PartiQLValueType.INT64 +import org.partiql.value.PartiQLValueType.INT8 +import org.partiql.value.PartiQLValueType.INTERVAL +import org.partiql.value.PartiQLValueType.LIST +import org.partiql.value.PartiQLValueType.MISSING +import org.partiql.value.PartiQLValueType.NULL +import org.partiql.value.PartiQLValueType.SEXP +import org.partiql.value.PartiQLValueType.STRING +import org.partiql.value.PartiQLValueType.STRUCT +import org.partiql.value.PartiQLValueType.SYMBOL +import org.partiql.value.PartiQLValueType.TIME +import org.partiql.value.PartiQLValueType.TIMESTAMP + +/** + * Function signature lookup by name. + */ +internal typealias FnMap = Map> + +/** + * Function arguments list. The planner is responsible for mapping arguments to parameters. + */ +internal typealias Args = List + +/** + * Parameter mapping list tells the planner where to insert implicit casts. Null is the identity. + */ +internal typealias Mapping = List + +/** + * Tells us which function matched, and how the arguments are mapped. + */ +internal class Match( + public val signature: T, + public val mapping: Mapping, +) + +/** + * Result of attempting to match an unresolved function. + */ +internal sealed class FnMatch { + + /** + * 7.1 Inputs with wrong types + * It follows that all functions return MISSING when one of their inputs is MISSING + * + * @property signature + * @property mapping + * @property isMissable TRUE when anyone of the arguments _could_ be MISSING. We *always* propagate MISSING. + */ + public data class Ok( + public val signature: T, + public val mapping: Mapping, + public val isMissable: Boolean, + ) : FnMatch() + + /** + * This represents dynamic dispatch. + * + * @property candidates an ordered list of potentially applicable functions to dispatch dynamically. + * @property isMissable TRUE when the argument permutations may not definitively invoke one of the candidates. You + * can think of [isMissable] as being the same as "not exhaustive". For example, if we have ABS(INT | STRING), then + * this function call [isMissable] because there isn't an `ABS(STRING)` function signature AKA we haven't exhausted + * all the arguments. On the other hand, take an "exhaustive" scenario: ABS(INT | DEC). In this case, [isMissable] + * is false because we have functions for each potential argument AKA we have exhausted the arguments. + */ + public data class Dynamic( + public val candidates: List>, + public val isMissable: Boolean + ) : FnMatch() + + public data class Error( + public val identifier: Identifier, + public val args: List, + public val candidates: List, + ) : FnMatch() +} + +/** + * Logic for matching signatures to arguments — this class contains all cast/coercion logic. In my opinion, casts + * and coercions should come along with the type lattice. Considering we don't really have this, it is simple enough + * at the moment to keep that information (derived from the current TypeLattice) with the [FnResolver]. + */ +@OptIn(PartiQLValueExperimental::class) +internal class FnResolver(private val headers: List
) { + + /** + * All headers use the same type lattice (we don't have a design for plugging type systems at the moment). + */ + private val types = TypeLattice.partiql() + + /** + * Calculate a queryable map of scalar function signatures. + */ + private val functions: FnMap + + /** + * Calculate a queryable map of scalar function signatures from special forms. + */ + private val operators: FnMap + + /** + * Calculate a queryable map of aggregation function signatures + */ + private val aggregations: FnMap + + /** + * A place to quickly lookup a cast can return missing; lookup by "SPECIFIC" + */ + private val unsafeCastSet: Set + + init { + val (casts, unsafeCasts) = casts() + unsafeCastSet = unsafeCasts + // combine all header definitions + val fns = headers.flatMap { it.functions } + functions = fns.toFnMap() + operators = (headers.flatMap { it.operators } + casts).toFnMap() + aggregations = headers.flatMap { it.aggregations }.toFnMap() + } + + /** + * Group list of [FunctionSignature] by name. + */ + private fun List.toFnMap(): FnMap = this + .distinctBy { it.specific } + .sortedWith(fnPrecedence) + .groupBy { it.name } + + /** + * Leverages a [FnResolver] to find a matching function defined in the [Header] scalar function catalog. + */ + public fun resolveFn(fn: Fn.Unresolved, args: List): FnMatch { + val candidates = lookup(fn) + var canReturnMissing = false + val parameterPermutations = buildArgumentPermutations(args.map { it.type }).mapNotNull { argList -> + argList.mapIndexed { i, arg -> + if (arg.isMissable()) { + canReturnMissing = true + } + // Skip over if we cannot convert type to runtime type. + val argType = arg.toRuntimeTypeOrNull() ?: return@mapNotNull null + FunctionParameter("arg-$i", argType) + } + } + val potentialFunctions = parameterPermutations.mapNotNull { parameters -> + when (val match = match(candidates, parameters)) { + null -> { + canReturnMissing = true + null + } + else -> { + val isMissable = canReturnMissing || isUnsafeCast(match.signature.specific) + FnMatch.Ok(match.signature, match.mapping, isMissable) + } + } + } + // Remove duplicates while maintaining order (precedence). + val orderedUniqueFunctions = potentialFunctions.toSet().toList() + return when (orderedUniqueFunctions.size) { + 0 -> FnMatch.Error(fn.identifier, args, candidates) + 1 -> orderedUniqueFunctions.first() + else -> FnMatch.Dynamic(orderedUniqueFunctions, canReturnMissing) + } + } + + private fun buildArgumentPermutations(args: List): List> { + val flattenedArgs = args.map { it.flatten().allTypes } + return buildArgumentPermutations(flattenedArgs, accumulator = emptyList()) + } + + private fun buildArgumentPermutations( + args: List>, + accumulator: List, + ): List> { + if (args.isEmpty()) { + return listOf(accumulator) + } + val first = args.first() + val rest = when (args.size) { + 1 -> emptyList() + else -> args.subList(1, args.size) + } + return buildList { + first.forEach { argSubType -> + addAll(buildArgumentPermutations(rest, accumulator + listOf(argSubType))) + } + } + } + + /** + * Leverages a [FnResolver] to find a matching function defined in the [Header] aggregation function catalog. + */ + public fun resolveAgg(agg: Agg.Unresolved, args: List): FnMatch { + val candidates = lookup(agg) + var hadMissingArg = false + val parameters = args.mapIndexed { i, arg -> + if (!hadMissingArg && arg.type.isMissable()) { + hadMissingArg = true + } + FunctionParameter("arg-$i", arg.type.toRuntimeType()) + } + val match = match(candidates, parameters) + return when (match) { + null -> FnMatch.Error(agg.identifier, args, candidates) + else -> { + val isMissable = hadMissingArg || isUnsafeCast(match.signature.specific) + FnMatch.Ok(match.signature, match.mapping, isMissable) + } + } + } + + /** + * Functions are sorted by precedence (which is not rigorously defined/specified at the moment). + */ + private fun match(signatures: List, args: Args): Match? { + for (signature in signatures) { + val mapping = match(signature, args) + if (mapping != null) { + return Match(signature, mapping) + } + } + return null + } + + /** + * Attempt to match arguments to the parameters; return the implicit casts if necessary. + * + * TODO we need to constrain the allowable runtime types for an ANY typed parameter. + */ + fun match(signature: FunctionSignature, args: Args): Mapping? { + if (signature.parameters.size != args.size) { + return null + } + val mapping = ArrayList(args.size) + for (i in args.indices) { + val a = args[i] + val p = signature.parameters[i] + when { + // 1. Exact match + a.type == p.type -> mapping.add(null) + // 2. Match ANY, no coercion needed + p.type == ANY -> mapping.add(null) + // 3. Match NULL argument + a.type == NULL -> mapping.add(null) + // 4. Check for a coercion + else -> { + val coercion = lookupCoercion(a.type, p.type) + when (coercion) { + null -> return null // short-circuit + else -> mapping.add(coercion) + } + } + } + } + // if all elements requires casting, then no match + // because there must be another function definition that requires no casting + return if (mapping.isEmpty() || mapping.contains(null)) { + // we made a match + mapping + } else { + null + } + } + + /** + * Return a list of all scalar function signatures matching the given identifier. + */ + private fun lookup(ref: Fn.Unresolved): List { + val name = getFnName(ref.identifier) + return when (ref.isHidden) { + true -> operators.getOrDefault(name, emptyList()) + else -> functions.getOrDefault(name, emptyList()) + } + } + + /** + * Return a list of all aggregation function signatures matching the given identifier. + */ + private fun lookup(ref: Agg.Unresolved): List { + val name = getFnName(ref.identifier) + return aggregations.getOrDefault(name, emptyList()) + } + + /** + * Return a normalized function identifier for lookup in our list of function definitions. + */ + private fun getFnName(identifier: Identifier): String = when (identifier) { + is Identifier.Qualified -> throw IllegalArgumentException("Qualified function identifiers not supported") + is Identifier.Symbol -> identifier.symbol.lowercase() + } + + // ==================================== + // CASTS and COERCIONS + // ==================================== + + /** + * Returns the CAST function if exists, else null. + */ + private fun lookupCoercion(valueType: PartiQLValueType, targetType: PartiQLValueType): FunctionSignature.Scalar? { + if (!types.canCoerce(valueType, targetType)) { + return null + } + val name = castName(targetType) + val casts = operators.getOrDefault(name, emptyList()) + for (cast in casts) { + if (cast.parameters.isEmpty()) { + break // should be unreachable + } + if (valueType == cast.parameters[0].type) return cast + } + return null + } + + /** + * Easy lookup of whether this CAST can return MISSING. + */ + private fun isUnsafeCast(specific: String): Boolean = unsafeCastSet.contains(specific) + + /** + * Generate all CAST functions from the given lattice. + * + * @return Pair(0) is the function list, Pair(1) represents the unsafe cast specifics + */ + private fun casts(): Pair, Set> { + val casts = mutableListOf() + val unsafeCastSet = mutableSetOf() + for (t1 in types.types) { + for (t2 in types.types) { + val r = types.graph[t1.ordinal][t2.ordinal] + if (r != null) { + val fn = cast(t1, t2) + casts.add(fn) + if (r.cast == CastType.UNSAFE) unsafeCastSet.add(fn.specific) + } + } + } + return casts to unsafeCastSet + } + + /** + * Define CASTS with some mangled name; CAST(x AS T) -> cast_t(x) + * + * CAST(x AS INT8) -> cast_int64(x) + * + * But what about parameterized types? Are the parameters dropped in casts, or do parameters become arguments? + */ + private fun castName(type: PartiQLValueType) = "cast_${type.name.lowercase()}" + + internal fun cast(operand: PartiQLValueType, target: PartiQLValueType) = + FunctionSignature.Scalar( + name = castName(target), + returns = target, + parameters = listOf( + FunctionParameter("value", operand), + ), + isNullable = false, + isNullCall = true + ) + + companion object { + + // ==================================== + // SORTING + // ==================================== + + // Function precedence comparator + // 1. Fewest args first + // 2. Parameters are compared left-to-right + @JvmStatic + private val fnPrecedence = Comparator { fn1, fn2 -> + // Compare number of arguments + if (fn1.parameters.size != fn2.parameters.size) { + return@Comparator fn1.parameters.size - fn2.parameters.size + } + // Compare operand type precedence + for (i in fn1.parameters.indices) { + val p1 = fn1.parameters[i] + val p2 = fn2.parameters[i] + val comparison = p1.compareTo(p2) + if (comparison != 0) return@Comparator comparison + } + // unreachable? + 0 + } + + private fun FunctionParameter.compareTo(other: FunctionParameter): Int = + comparePrecedence(this.type, other.type) + + private fun comparePrecedence(t1: PartiQLValueType, t2: PartiQLValueType): Int { + if (t1 == t2) return 0 + val p1 = precedence[t1]!! + val p2 = precedence[t2]!! + return p1 - p2 + } + + // This simply describes some precedence for ordering functions. + // This is not explicitly defined in the PartiQL Specification!! + // This does not imply the ability to CAST; this defines function resolution behavior. + private val precedence: Map = listOf( + NULL, + MISSING, + BOOL, + INT8, + INT16, + INT32, + INT64, + INT, + DECIMAL, + FLOAT32, + FLOAT64, + DECIMAL_ARBITRARY, // Arbitrary precision decimal has a higher precedence than FLOAT + CHAR, + STRING, + CLOB, + SYMBOL, + BINARY, + BYTE, + BLOB, + DATE, + TIME, + TIMESTAMP, + INTERVAL, + LIST, + SEXP, + BAG, + STRUCT, + ANY, + ).mapIndexed { precedence, type -> type to precedence }.toMap() + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/PlanTyper.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/PlanTyper.kt new file mode 100644 index 000000000..06cd0ea13 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/PlanTyper.kt @@ -0,0 +1,1463 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package org.partiql.planner.internal.typer + +import org.partiql.errors.Problem +import org.partiql.errors.ProblemCallback +import org.partiql.errors.UNKNOWN_PROBLEM_LOCATION +import org.partiql.planner.PlanningProblemDetails +import org.partiql.planner.internal.Env +import org.partiql.planner.internal.ResolutionStrategy +import org.partiql.planner.internal.ResolvedVar +import org.partiql.planner.internal.TypeEnv +import org.partiql.planner.internal.ir.Agg +import org.partiql.planner.internal.ir.Fn +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.PlanNode +import org.partiql.planner.internal.ir.Rel +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.Statement +import org.partiql.planner.internal.ir.aggResolved +import org.partiql.planner.internal.ir.fnResolved +import org.partiql.planner.internal.ir.identifierSymbol +import org.partiql.planner.internal.ir.rel +import org.partiql.planner.internal.ir.relBinding +import org.partiql.planner.internal.ir.relOpAggregate +import org.partiql.planner.internal.ir.relOpAggregateCall +import org.partiql.planner.internal.ir.relOpErr +import org.partiql.planner.internal.ir.relOpFilter +import org.partiql.planner.internal.ir.relOpJoin +import org.partiql.planner.internal.ir.relOpLimit +import org.partiql.planner.internal.ir.relOpOffset +import org.partiql.planner.internal.ir.relOpProject +import org.partiql.planner.internal.ir.relOpScan +import org.partiql.planner.internal.ir.relOpSort +import org.partiql.planner.internal.ir.relOpUnpivot +import org.partiql.planner.internal.ir.relType +import org.partiql.planner.internal.ir.rex +import org.partiql.planner.internal.ir.rexOpCallDynamic +import org.partiql.planner.internal.ir.rexOpCallDynamicCandidate +import org.partiql.planner.internal.ir.rexOpCallStatic +import org.partiql.planner.internal.ir.rexOpCaseBranch +import org.partiql.planner.internal.ir.rexOpCollection +import org.partiql.planner.internal.ir.rexOpErr +import org.partiql.planner.internal.ir.rexOpGlobal +import org.partiql.planner.internal.ir.rexOpLit +import org.partiql.planner.internal.ir.rexOpPath +import org.partiql.planner.internal.ir.rexOpPathStepSymbol +import org.partiql.planner.internal.ir.rexOpSelect +import org.partiql.planner.internal.ir.rexOpStruct +import org.partiql.planner.internal.ir.rexOpStructField +import org.partiql.planner.internal.ir.rexOpTupleUnion +import org.partiql.planner.internal.ir.rexOpVarResolved +import org.partiql.planner.internal.ir.statementQuery +import org.partiql.planner.internal.ir.util.PlanRewriter +import org.partiql.spi.BindingCase +import org.partiql.spi.BindingName +import org.partiql.spi.BindingPath +import org.partiql.types.AnyOfType +import org.partiql.types.AnyType +import org.partiql.types.BagType +import org.partiql.types.BoolType +import org.partiql.types.CollectionType +import org.partiql.types.IntType +import org.partiql.types.ListType +import org.partiql.types.MissingType +import org.partiql.types.NullType +import org.partiql.types.SexpType +import org.partiql.types.StaticType +import org.partiql.types.StaticType.Companion.ANY +import org.partiql.types.StaticType.Companion.BOOL +import org.partiql.types.StaticType.Companion.MISSING +import org.partiql.types.StaticType.Companion.NULL +import org.partiql.types.StaticType.Companion.STRING +import org.partiql.types.StringType +import org.partiql.types.StructType +import org.partiql.types.TupleConstraint +import org.partiql.types.function.FunctionSignature +import org.partiql.value.BoolValue +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.TextValue +import org.partiql.value.boolValue + +/** + * Rewrites an untyped algebraic translation of the query to be both typed and have resolved variables. + * + * @property env + * @property onProblem + */ +@OptIn(PartiQLValueExperimental::class) +internal class PlanTyper( + private val env: Env, + private val onProblem: ProblemCallback, +) { + + /** + * Rewrite the statement with inferred types and resolved variables + */ + public fun resolve(statement: Statement): Statement { + if (statement !is Statement.Query) { + throw IllegalArgumentException("PartiQLPlanner only supports Query statements") + } + // root TypeEnv has no bindings + val typeEnv = TypeEnv( + schema = emptyList(), + strategy = ResolutionStrategy.GLOBAL, + ) + val root = statement.root.type(typeEnv) + return statementQuery(root) + } + + /** + * Types the relational operators of a query expression. + * + * @property outer represents the outer TypeEnv of a query expression — only used by scan variable resolution. + */ + private inner class RelTyper(private val outer: TypeEnv) : PlanRewriter() { + + override fun visitRel(node: Rel, ctx: Rel.Type?) = visitRelOp(node.op, node.type) as Rel + + /** + * The output schema of a `rel.op.scan` is the single value binding. + */ + override fun visitRelOpScan(node: Rel.Op.Scan, ctx: Rel.Type?): Rel { + // descend, with GLOBAL resolution strategy + val rex = node.rex.type(outer.global()) + // compute rel type + val valueT = getElementTypeForFromSource(rex.type) + val type = ctx!!.copyWithSchema(listOf(valueT)) + // rewrite + val op = relOpScan(rex) + return rel(type, op) + } + + override fun visitRelOpErr(node: Rel.Op.Err, ctx: Rel.Type?): Rel { + val type = ctx ?: relType(emptyList(), emptySet()) + return rel(type, node) + } + + /** + * The output schema of a `rel.op.scan_index` is the value binding and index binding. + */ + override fun visitRelOpScanIndexed(node: Rel.Op.ScanIndexed, ctx: Rel.Type?): Rel { + // descend, with GLOBAL resolution strategy + val rex = node.rex.type(outer.global()) + // compute rel type + val valueT = getElementTypeForFromSource(rex.type) + val indexT = StaticType.INT + val type = ctx!!.copyWithSchema(listOf(valueT, indexT)) + // rewrite + val op = relOpScan(rex) + return rel(type, op) + } + + /** + * TODO handle NULL|STRUCT type + */ + override fun visitRelOpUnpivot(node: Rel.Op.Unpivot, ctx: Rel.Type?): Rel { + // descend, with GLOBAL resolution strategy + val rex = node.rex.type(outer.global()) + + // only UNPIVOT a struct + if (rex.type !is StructType) { + handleUnexpectedType(rex.type, expected = setOf(StaticType.STRUCT)) + return rel(ctx!!, relOpErr("UNPIVOT on non-STRUCT type ${rex.type}")) + } + + // compute element type + val t = rex.type as StructType + val e = if (t.contentClosed) { + StaticType.unionOf(t.fields.map { it.value }.toSet()).flatten() + } else { + StaticType.ANY + } + + // compute rel type + val kType = StaticType.STRING + val vType = e + val type = ctx!!.copyWithSchema(listOf(kType, vType)) + + // rewrite + val op = relOpUnpivot(rex) + return rel(type, op) + } + + override fun visitRelOpDistinct(node: Rel.Op.Distinct, ctx: Rel.Type?): Rel { + TODO("Type RelOp Distinct") + } + + override fun visitRelOpFilter(node: Rel.Op.Filter, ctx: Rel.Type?): Rel { + // compute input schema + val input = visitRel(node.input, ctx) + // type sub-nodes + val typeEnv = TypeEnv(input.type.schema, ResolutionStrategy.LOCAL) + val predicate = node.predicate.type(typeEnv) + // compute output schema + val type = input.type + // rewrite + val op = relOpFilter(input, predicate) + return rel(type, op) + } + + override fun visitRelOpSort(node: Rel.Op.Sort, ctx: Rel.Type?): Rel { + // compute input schema + val input = visitRel(node.input, ctx) + // type sub-nodes + val typeEnv = TypeEnv(input.type.schema, ResolutionStrategy.LOCAL) + val specs = node.specs.map { + val rex = it.rex.type(typeEnv) + it.copy(rex) + } + // output schema of a sort is the same as the input + val type = input.type.copy(props = setOf(Rel.Prop.ORDERED)) + // rewrite + val op = relOpSort(input, specs) + return rel(type, op) + } + + override fun visitRelOpUnion(node: Rel.Op.Union, ctx: Rel.Type?): Rel { + TODO("Type RelOp Union") + } + + override fun visitRelOpIntersect(node: Rel.Op.Intersect, ctx: Rel.Type?): Rel { + TODO("Type RelOp Intersect") + } + + override fun visitRelOpExcept(node: Rel.Op.Except, ctx: Rel.Type?): Rel { + TODO("Type RelOp Except") + } + + override fun visitRelOpLimit(node: Rel.Op.Limit, ctx: Rel.Type?): Rel { + // compute input schema + val input = visitRel(node.input, ctx) + // type limit expression using outer scope with global resolution + val typeEnv = outer.global() + val limit = node.limit.type(typeEnv) + // check types + assertAsInt(limit.type) + // compute output schema + val type = input.type + // rewrite + val op = relOpLimit(input, limit) + return rel(type, op) + } + + override fun visitRelOpOffset(node: Rel.Op.Offset, ctx: Rel.Type?): Rel { + // compute input schema + val input = visitRel(node.input, ctx) + // type offset expression using outer scope with global resolution + val typeEnv = outer.global() + val offset = node.offset.type(typeEnv) + // check types + assertAsInt(offset.type) + // compute output schema + val type = input.type + // rewrite + val op = relOpOffset(input, offset) + return rel(type, op) + } + + override fun visitRelOpProject(node: Rel.Op.Project, ctx: Rel.Type?): Rel { + // compute input schema + val input = visitRel(node.input, ctx) + // type sub-nodes + val typeEnv = TypeEnv(input.type.schema, ResolutionStrategy.LOCAL) + val projections = node.projections.map { + it.type(typeEnv) + } + // compute output schema + val schema = projections.map { it.type } + val type = ctx!!.copyWithSchema(schema) + // rewrite + val op = relOpProject(input, projections) + return rel(type, op) + } + + override fun visitRelOpJoin(node: Rel.Op.Join, ctx: Rel.Type?): Rel { + // Rewrite LHS and RHS + val lhs = visitRel(node.lhs, ctx) + val rhs = visitRel(node.rhs, ctx) + + // Calculate output schema given JOIN type + val l = lhs.type.schema + val r = rhs.type.schema + val schema = when (node.type) { + Rel.Op.Join.Type.INNER -> l + r + Rel.Op.Join.Type.LEFT -> l + r.pad() + Rel.Op.Join.Type.RIGHT -> l.pad() + r + Rel.Op.Join.Type.FULL -> l.pad() + r.pad() + } + val type = relType(schema, ctx!!.props) + + // Type the condition on the output schema + val condition = node.rex.type(TypeEnv(type.schema, ResolutionStrategy.LOCAL)) + + val op = relOpJoin(lhs, rhs, condition, node.type) + return rel(type, op) + } + + /** + * Initial implementation of `EXCLUDE` schema inference. Until an RFC is finalized for `EXCLUDE` + * (https://github.com/partiql/partiql-spec/issues/39), + * + * This behavior is considered experimental and subject to change. + * + * This implementation includes + * - Excluding tuple bindings (e.g. t.a.b.c) + * - Excluding tuple wildcards (e.g. t.a.*.b) + * - Excluding collection indexes (e.g. t.a[0].b -- behavior subject to change; see below discussion) + * - Excluding collection wildcards (e.g. t.a[*].b) + * + * There are still discussion points regarding the following edge cases: + * - EXCLUDE on a tuple attribute that doesn't exist -- give an error/warning? + * - currently no error + * - EXCLUDE on a tuple attribute that has duplicates -- give an error/warning? exclude one? exclude both? + * - currently excludes both w/ no error + * - EXCLUDE on a collection index as the last step -- mark element type as optional? + * - currently element type as-is + * - EXCLUDE on a collection index w/ remaining path steps -- mark last step's type as optional? + * - currently marks last step's type as optional + * - EXCLUDE on a binding tuple variable (e.g. SELECT ... EXCLUDE t FROM t) -- error? + * - currently a parser error + * - EXCLUDE on a union type -- give an error/warning? no-op? exclude on each type in union? + * - currently exclude on each union type + * - If SELECT list includes an attribute that is excluded, we could consider giving an error in PlanTyper or + * some other semantic pass + * - currently does not give an error + */ + override fun visitRelOpExclude(node: Rel.Op.Exclude, ctx: Rel.Type?): Rel { + // compute input schema + val input = visitRel(node.input, ctx) + + // apply exclusions to the input schema + val init = input.type.schema.map { it.copy() } + val schema = node.items.fold((init)) { bindings, item -> excludeBindings(bindings, item) } + + // rewrite + val type = ctx!!.copy(schema) + return rel(type, node) + } + + override fun visitRelOpAggregate(node: Rel.Op.Aggregate, ctx: Rel.Type?): Rel { + // compute input schema + val input = visitRel(node.input, ctx) + + // type the calls and groups + val typer = RexTyper(locals = TypeEnv(input.type.schema, ResolutionStrategy.LOCAL)) + + // typing of aggregate calls is slightly more complicated because they are not expressions. + val calls = node.calls.mapIndexed { i, call -> + when (val agg = call.agg) { + is Agg.Resolved -> call to ctx!!.schema[i].type + is Agg.Unresolved -> typer.resolveAgg(agg, call.args) + } + } + val groups = node.groups.map { typer.visitRex(it, null) } + + // Compute schema using order (calls...groups...) + val schema = mutableListOf() + schema += calls.map { it.second } + schema += groups.map { it.type } + + // rewrite with typed calls and groups + val type = ctx!!.copyWithSchema(schema) + val op = relOpAggregate( + input = input, + strategy = node.strategy, + calls = calls.map { it.first }, + groups = groups, + ) + return rel(type, op) + } + } + + /** + * Types a PartiQL expression tree. For now, we ignore the pre-existing type. We assume all existing types + * are simply the `any`, so we keep the new type. Ideally we can programmatically calculate the most specific type. + * + * We should consider making the StaticType? parameter non-nullable. + * + * @property locals TypeEnv in which this rex tree is evaluated. + */ + @OptIn(PartiQLValueExperimental::class) + private inner class RexTyper(private val locals: TypeEnv) : PlanRewriter() { + + override fun visitRex(node: Rex, ctx: StaticType?): Rex = visitRexOp(node.op, node.type) as Rex + + override fun visitRexOpLit(node: Rex.Op.Lit, ctx: StaticType?): Rex { + // type comes from RexConverter + return rex(ctx!!, node) + } + + override fun visitRexOpVarResolved(node: Rex.Op.Var.Resolved, ctx: StaticType?): Rex { + assert(node.ref < locals.schema.size) { "Invalid resolved variable (var ${node.ref}) for $locals" } + val type = locals.schema[node.ref].type + return rex(type, node) + } + + override fun visitRexOpVarUnresolved(node: Rex.Op.Var.Unresolved, ctx: StaticType?): Rex { + val path = node.identifier.toBindingPath() + val resolvedVar = env.resolve(path, locals, node.scope) + + if (resolvedVar == null) { + handleUndefinedVariable(path.steps.last()) + return rex(StaticType.ANY, rexOpErr("Undefined variable ${node.identifier}")) + } + val type = resolvedVar.type + val op = when (resolvedVar) { + is ResolvedVar.Global -> rexOpGlobal(resolvedVar.ordinal) + is ResolvedVar.Local -> resolvedLocalPath(resolvedVar) + } + return rex(type, op) + } + + override fun visitRexOpGlobal(node: Rex.Op.Global, ctx: StaticType?): Rex { + val global = env.globals[node.ref] + val type = global.type + return rex(type, node) + } + + /** + * Match path as far as possible (rewriting the steps), then infer based on resolved root and rewritten steps. + */ + override fun visitRexOpPath(node: Rex.Op.Path, ctx: StaticType?): Rex { + val visitedSteps = node.steps.map { visitRexOpPathStep(it, null) as Rex.Op.Path.Step } + // 1. Resolve path prefix + val (root, steps) = when (val rootOp = node.root.op) { + is Rex.Op.Var.Unresolved -> { + // Rewrite the root + val path = rexPathToBindingPath(rootOp, visitedSteps) + val resolvedVar = env.resolve(path, locals, rootOp.scope) + if (resolvedVar == null) { + handleUndefinedVariable(path.steps.last()) + return rex(StaticType.ANY, node) + } + val type = resolvedVar.type + val (op, steps) = when (resolvedVar) { + // Root (and some steps) was a local. Replace the matched nodes with disambiguated steps + // and return the remaining steps to continue typing. + is ResolvedVar.Local -> { + val amountRemaining = (visitedSteps.size + 1) - resolvedVar.depth + val remainingSteps = visitedSteps.takeLast(amountRemaining) + resolvedLocalPath(resolvedVar) to remainingSteps + } + is ResolvedVar.Global -> { + // Root (and some steps) was a global; replace root and re-calculate remaining steps. + val remainingFirstIndex = resolvedVar.depth - 1 + val remaining = when (remainingFirstIndex > visitedSteps.lastIndex) { + true -> emptyList() + false -> visitedSteps.subList(remainingFirstIndex, visitedSteps.size) + } + rexOpGlobal(resolvedVar.ordinal) to remaining + } + } + // rewrite root + rex(type, op) to steps + } + else -> visitRex(node.root, node.root.type) to visitedSteps + } + + // short-circuit if whole path was matched + if (steps.isEmpty()) { + return root + } + + // 2. TODO rewrite and type the steps containing expressions + // val typedSteps = steps.map { + // if (it is Rex.Op.Path.Step.Index) { + // val key = visitRex(it.key, null) + // rexOpPathStepIndex(key) + // } else it + // } + + // 3. Walk the steps, determine the path type, and replace each step with the disambiguated equivalent + // (AKA made sensitive, if possible) + var type = root.type + val newSteps = steps.map { step -> + val (stepType, replacementStep) = inferPathStep(type, step) + type = stepType + replacementStep + } + + // 4. Invalid path reference; always MISSING + if (type == StaticType.MISSING) { + handleAlwaysMissing() + return rexErr("Unknown identifier $node") + } + + // 5. Non-missing, root is resolved + return rex(type, rexOpPath(root, newSteps)) + } + + /** + * Resolve and type scalar function calls. + * + * @param node + * @param ctx + * @return + */ + override fun visitRexOpCallStatic(node: Rex.Op.Call.Static, ctx: StaticType?): Rex { + // Already resolved; unreachable but handle gracefully. + if (node.fn is Fn.Resolved) return rex(ctx!!, node) + + // Type the arguments + val fn = node.fn as Fn.Unresolved + val isNotMissable = fn.isNotMissable() + val args = node.args.map { visitRex(it, null) } + + // Try to match the arguments to functions defined in the catalog + return when (val match = env.resolveFn(fn, args)) { + is FnMatch.Ok -> toRexCall(match, args, isNotMissable) + is FnMatch.Dynamic -> { + val types = mutableSetOf() + if (match.isMissable && !isNotMissable) { + types.add(StaticType.MISSING) + } + val candidates = match.candidates.map { candidate -> + val rex = toRexCall(candidate, args, isNotMissable) + val staticCall = rex.op as? Rex.Op.Call.Static ?: error("ToRexCall should always return a static call.") + val resolvedFn = staticCall.fn as? Fn.Resolved ?: error("This should have been resolved") + types.add(rex.type) + val coercions = candidate.mapping.map { it?.let { fnResolved(it) } } + rexOpCallDynamicCandidate(fn = resolvedFn, coercions = coercions) + } + val op = rexOpCallDynamic(args = args, candidates = candidates) + rex(type = StaticType.unionOf(types).flatten(), op = op) + } + is FnMatch.Error -> { + handleUnknownFunction(match) + rexErr("Unknown scalar function $fn") + } + } + } + + override fun visitRexOpCallDynamic(node: Rex.Op.Call.Dynamic, ctx: StaticType?): Rex { + return rex(ANY, rexOpErr("Direct dynamic calls are not supported. This should have been a static call.")) + } + + private fun toRexCall(match: FnMatch.Ok, args: List, isNotMissable: Boolean): Rex { + // Found a match! + val newFn = fnResolved(match.signature) + val newArgs = rewriteFnArgs(match.mapping, args) + val returns = newFn.signature.returns + + // 7.1 All functions return MISSING when one of their inputs is MISSING (except `=`) + newArgs.forEach { + if (it.type == MissingType && !isNotMissable) { + handleAlwaysMissing() + return rex(StaticType.MISSING, rexOpCallStatic(newFn, newArgs)) + } + } + + // If a function is NOT Missable (i.e., does not propagate MISSING) + // then treat MISSING as null. + var isMissing = false + var isMissable = false + if (isNotMissable) { + if (newArgs.any { it.type is MissingType }) { + isMissing = true + } else if (newArgs.any { it.type.isMissable() }) { + isMissable = true + } + } + + // Determine the nullability of the return type + var isNull = false // True iff NULL CALL and has a NULL arg + var isNullable = false // True iff NULL CALL and has a NULLABLE arg; or is a NULLABLE operator + if (newFn.signature.isNullCall) { + if (isMissing) { + isNull = true + } else if (isMissable) { + isNullable = true + } else { + for (arg in newArgs) { + if (arg.type is NullType) { + isNull = true + break + } + if (arg.type.isNullable()) { + isNullable = true + break + } + } + } + } + isNullable = isNullable || newFn.signature.isNullable + + // Return type with calculated nullability + var type = when { + isNull -> StaticType.NULL + isNullable -> returns.toStaticType() + else -> returns.toNonNullStaticType() + } + + // Some operators can return MISSING during runtime + if (match.isMissable && !isNotMissable) { + type = StaticType.unionOf(type, StaticType.MISSING) + } + + // Finally, rewrite this node + val op = rexOpCallStatic(newFn, newArgs) + return rex(type.flatten(), op) + } + + override fun visitRexOpCase(node: Rex.Op.Case, ctx: StaticType?): Rex { + // Type branches and prune branches known to never execute + val newBranches = node.branches.map { visitRexOpCaseBranch(it, it.rex.type) } + .filterNot { isLiteralBool(it.condition, false) } + + newBranches.forEach { branch -> + if (canBeBoolean(branch.condition.type).not()) { + onProblem.invoke( + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.IncompatibleTypesForOp(branch.condition.type.allTypes, "CASE_WHEN") + ) + ) + } + } + val default = visitRex(node.default, node.default.type) + + // Calculate final expression (short-circuit to first branch if the condition is always TRUE). + val resultTypes = newBranches.map { it.rex }.map { it.type } + listOf(default.type) + return when (newBranches.size) { + 0 -> default + else -> when (isLiteralBool(newBranches[0].condition, true)) { + true -> newBranches[0].rex + false -> rex( + type = StaticType.unionOf(resultTypes.toSet()).flatten(), + node.copy(branches = newBranches, default = default) + ) + } + } + } + + private fun canBeBoolean(type: StaticType): Boolean { + return type.flatten().allTypes.any { + it is BoolType + } + } + + @OptIn(PartiQLValueExperimental::class) + private fun isLiteralBool(rex: Rex, bool: Boolean): Boolean { + val op = rex.op as? Rex.Op.Lit ?: return false + val value = op.value as? BoolValue ?: return false + return value.value == bool + } + + /** + * We need special handling for: + * ``` + * CASE + * WHEN a IS STRUCT THEN a + * ELSE { 'a': a } + * END + * ``` + * When we type the above, if we know that `a` can be many different types (one of them being a struct), + * then when we see the top-level `a IS STRUCT`, then we can assume that the `a` on the RHS is definitely a + * struct. We handle this by using [foldCaseBranch]. + */ + override fun visitRexOpCaseBranch(node: Rex.Op.Case.Branch, ctx: StaticType?): Rex.Op.Case.Branch { + val visitedCondition = visitRex(node.condition, node.condition.type) + val visitedReturn = visitRex(node.rex, node.rex.type) + return foldCaseBranch(visitedCondition, visitedReturn) + } + + /** + * This takes in a [Rex.Op.Case.Branch.condition] and [Rex.Op.Case.Branch.rex]. If the [condition] is a type check, + * AKA ` IS STRUCT`, then this function will return a new [Rex.Op.Case.Branch] whose [Rex.Op.Case.Branch.rex] + * assumes that the type will always be a struct. The [Rex.Op.Case.Branch.condition] will be replaced by a + * boolean literal if it is KNOWN whether a branch will always/never execute. This can be used to prune the + * branch in subsequent passes. + * + * TODO: Currently, this only folds type checking for STRUCTs. We need to add support for all other types. + * + * TODO: I added a check for [Rex.Op.Var.Resolved] as it seemed odd to replace a general expression like: + * `WHEN { 'a': { 'b': 1} }.a IS STRUCT THEN { 'a': { 'b': 1} }.a.b`. We can discuss this later, but I'm + * currently limiting the scope of this intentionally. + */ + private fun foldCaseBranch(condition: Rex, result: Rex): Rex.Op.Case.Branch { + val call = condition.op as? Rex.Op.Call ?: return rexOpCaseBranch(condition, result) + when (call) { + is Rex.Op.Call.Dynamic -> { + val rex = call.candidates.map { candidate -> + val fn = candidate.fn as? Fn.Resolved ?: return rexOpCaseBranch(condition, result) + if (fn.signature.name.equals("is_struct", ignoreCase = true).not()) { + return rexOpCaseBranch(condition, result) + } + val ref = call.args.getOrNull(0) ?: error("IS STRUCT requires an argument.") + // Replace the result's type + val type = AnyOfType(ref.type.allTypes.filterIsInstance().toSet()) + val replacementVal = ref.copy(type = type) + when (ref.op is Rex.Op.Var.Resolved) { + true -> RexReplacer.replace(result, ref, replacementVal) + false -> result + } + } + val type = rex.toUnionType().flatten() + + return rexOpCaseBranch(condition, result.copy(type)) + } + is Rex.Op.Call.Static -> { + val fn = call.fn as? Fn.Resolved ?: return rexOpCaseBranch(condition, result) + if (fn.signature.name.equals("is_struct", ignoreCase = true).not()) { + return rexOpCaseBranch(condition, result) + } + val ref = call.args.getOrNull(0) ?: error("IS STRUCT requires an argument.") + val simplifiedCondition = when { + ref.type.allTypes.all { it is StructType } -> rex(StaticType.BOOL, rexOpLit(boolValue(true))) + ref.type.allTypes.none { it is StructType } -> rex(StaticType.BOOL, rexOpLit(boolValue(false))) + else -> condition + } + + // Replace the result's type + val type = AnyOfType(ref.type.allTypes.filterIsInstance().toSet()) + val replacementVal = ref.copy(type = type) + val rex = when (ref.op is Rex.Op.Var.Resolved) { + true -> RexReplacer.replace(result, ref, replacementVal) + false -> result + } + return rexOpCaseBranch(simplifiedCondition, rex) + } + } + } + + override fun visitRexOpCollection(node: Rex.Op.Collection, ctx: StaticType?): Rex { + if (ctx!! !is CollectionType) { + handleUnexpectedType(ctx, setOf(StaticType.LIST, StaticType.BAG, StaticType.SEXP)) + return rex(StaticType.NULL_OR_MISSING, rexOpErr("Expected collection type")) + } + val values = node.values.map { visitRex(it, it.type) } + val t = when (values.size) { + 0 -> ANY + else -> values.toUnionType() + } + val type = when (ctx as CollectionType) { + is BagType -> BagType(t) + is ListType -> ListType(t) + is SexpType -> SexpType(t) + } + return rex(type, rexOpCollection(values)) + } + + @OptIn(PartiQLValueExperimental::class) + override fun visitRexOpStruct(node: Rex.Op.Struct, ctx: StaticType?): Rex { + val fields = node.fields.map { + val k = visitRex(it.k, it.k.type) + val v = visitRex(it.v, it.v.type) + rexOpStructField(k, v) + } + var structIsClosed = true + val structTypeFields = mutableListOf() + val structKeysSeent = mutableSetOf() + for (field in fields) { + when (field.k.op) { + is Rex.Op.Lit -> { + // A field is only included in the StructType if its key is a text literal + val key = field.k.op as Rex.Op.Lit + if (key.value is TextValue<*>) { + val name = (key.value as TextValue<*>).string!! + val type = field.v.type + structKeysSeent.add(name) + structTypeFields.add(StructType.Field(name, type)) + } + } + else -> { + if (field.k.type.allTypes.any { it.isText() }) { + // If the non-literal could be text, StructType will have open content. + structIsClosed = false + } else { + // A field with a non-literal key name is not included in the StructType. + } + } + } + } + val type = StructType( + fields = structTypeFields, + contentClosed = structIsClosed, + constraints = setOf( + TupleConstraint.Open(!structIsClosed), + TupleConstraint.UniqueAttrs(structKeysSeent.size == fields.size) + ), + ) + return rex(type, rexOpStruct(fields)) + } + + override fun visitRexOpPivot(node: Rex.Op.Pivot, ctx: StaticType?): Rex { + TODO("Type RexOpPivot") + } + + override fun visitRexOpSubquery(node: Rex.Op.Subquery, ctx: StaticType?): Rex { + val select = visitRexOpSelect(node.select, ctx).op as Rex.Op.Select + val subquery = node.copy(select = select) + return when (node.coercion) { + Rex.Op.Subquery.Coercion.SCALAR -> visitRexOpSubqueryScalar(subquery, select.constructor.type) + Rex.Op.Subquery.Coercion.ROW -> visitRexOpSubqueryRow(subquery, select.constructor.type) + } + } + + /** + * Calculate output type of a row-value subquery. + */ + private fun visitRexOpSubqueryRow(subquery: Rex.Op.Subquery, cons: StaticType): Rex { + if (cons !is StructType) { + return rexErr("Subquery with non-SQL SELECT cannot be coerced to a row-value expression. Found constructor type: $cons") + } + // Do a simple cardinality check for the moment. + // TODO we can only check cardinality if we know we are in a a comparison operator. + // val n = coercion.columns.size + // val m = cons.fields.size + // if (n != m) { + // return rexErr("Cannot coercion subquery with $m attributes to a row-value-expression with $n attributes") + // } + // If we made it this far, then we can coerce this subquery to the desired complex value + val type = StaticType.LIST + val op = subquery + return rex(type, op) + } + + /** + * Calculate output type of a scalar subquery. + */ + private fun visitRexOpSubqueryScalar(subquery: Rex.Op.Subquery, cons: StaticType): Rex { + if (cons !is StructType) { + return rexErr("Subquery with non-SQL SELECT cannot be coerced to a scalar. Found constructor type: $cons") + } + val n = cons.fields.size + if (n != 1) { + return rexErr("SELECT constructor with $n attributes cannot be coerced to a scalar. Found constructor type: $cons") + } + // If we made it this far, then we can coerce this subquery to a scalar + val type = cons.fields.first().value + val op = subquery + return rex(type, op) + } + + override fun visitRexOpSelect(node: Rex.Op.Select, ctx: StaticType?): Rex { + val rel = node.rel.type(locals) + val typeEnv = TypeEnv(rel.type.schema, ResolutionStrategy.LOCAL) + var constructor = node.constructor.type(typeEnv) + var constructorType = constructor.type + // add the ordered property to the constructor + if (constructorType is StructType) { + // TODO: We shouldn't need to copy the ordered constraint. + constructorType = constructorType.copy( + constraints = constructorType.constraints + setOf(TupleConstraint.Ordered) + ) + constructor = rex(constructorType, constructor.op) + } + val type = when (rel.isOrdered()) { + true -> ListType(constructor.type) + else -> BagType(constructor.type) + } + return rex(type, rexOpSelect(constructor, rel)) + } + + override fun visitRexOpTupleUnion(node: Rex.Op.TupleUnion, ctx: StaticType?): Rex { + val args = node.args.map { visitRex(it, ctx) } + val type = when (args.size) { + 0 -> StructType( + fields = emptyMap(), contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), TupleConstraint.UniqueAttrs(true), TupleConstraint.Ordered + ) + ) + else -> { + val argTypes = args.map { it.type } + val potentialTypes = buildArgumentPermutations(argTypes).map { argumentList -> + calculateTupleUnionOutputType(argumentList) + } + StaticType.unionOf(potentialTypes.toSet()).flatten() + } + } + val op = rexOpTupleUnion(args) + return rex(type, op) + } + + override fun visitRexOpErr(node: Rex.Op.Err, ctx: StaticType?): PlanNode { + val type = ctx ?: StaticType.ANY + return rex(type, node) + } + + // Helpers + + /** + * Given a list of [args], this calculates the output type of `TUPLEUNION(args)`. NOTE: This does NOT handle union + * types intentionally. This function expects that all arguments be flattened, and, if need be, that you invoke + * this function multiple times based on the permutations of arguments. + * + * The signature of TUPLEUNION is: (LIST) -> STRUCT. + * + * If any of the arguments are NULL (or potentially NULL), we return NULL. + * If any of the arguments are non-struct, we return MISSING. + * + * Now, assuming all the other arguments are STRUCT, then we compute the output based on a number of factors: + * - closed content + * - ordering + * - unique attributes + * + * If all arguments are closed content, then the output is closed content. + * If all arguments are ordered, then the output is ordered. + * If all arguments contain unique attributes AND all arguments are closed AND no fields clash, the output has + * unique attributes. + */ + private fun calculateTupleUnionOutputType(args: List): StaticType { + val structFields = mutableListOf() + var structAmount = 0 + var structIsClosed = true + var structIsOrdered = true + var uniqueAttrs = true + val possibleOutputTypes = mutableListOf() + args.forEach { arg -> + when (arg) { + is StructType -> { + structAmount += 1 + structFields.addAll(arg.fields) + structIsClosed = structIsClosed && arg.constraints.contains(TupleConstraint.Open(false)) + structIsOrdered = structIsOrdered && arg.constraints.contains(TupleConstraint.Ordered) + uniqueAttrs = uniqueAttrs && arg.constraints.contains(TupleConstraint.UniqueAttrs(true)) + } + is AnyOfType -> { + onProblem.invoke( + Problem( + UNKNOWN_PROBLEM_LOCATION, + PlanningProblemDetails.CompileError("TupleUnion wasn't normalized to exclude union types.") + ) + ) + possibleOutputTypes.add(StaticType.MISSING) + } + is NullType -> { + return StaticType.NULL + } + else -> { + return StaticType.MISSING + } + } + } + uniqueAttrs = when { + structIsClosed.not() && structAmount > 1 -> false + else -> uniqueAttrs + } + uniqueAttrs = uniqueAttrs && (structFields.size == structFields.distinctBy { it.key }.size) + val orderedConstraint = when (structIsOrdered) { + true -> TupleConstraint.Ordered + false -> null + } + val constraints = setOfNotNull( + TupleConstraint.Open(!structIsClosed), TupleConstraint.UniqueAttrs(uniqueAttrs), orderedConstraint + ) + return StructType( + fields = structFields.map { it }, contentClosed = structIsClosed, constraints = constraints + ) + } + + /** + * We are essentially making permutations of arguments that maintain the same initial ordering. For example, + * consider the following args: + * ``` + * [ 0 = UNION(INT, STRING), 1 = (DECIMAL, TIMESTAMP) ] + * ``` + * This function will return: + * ``` + * [ + * [ 0 = INT, 1 = DECIMAL ], + * [ 0 = INT, 1 = TIMESTAMP ], + * [ 0 = STRING, 1 = DECIMAL ], + * [ 0 = STRING, 1 = TIMESTAMP ] + * ] + * ``` + * + * Essentially, this becomes useful specifically in the case of TUPLEUNION, since we can make sure that + * the ordering of argument's attributes remains the same. For example: + * ``` + * TUPLEUNION( UNION(STRUCT(a, b), STRUCT(c)), UNION(STRUCT(d, e), STRUCT(f)) ) + * ``` + * + * Then, the output of the tupleunion will have the output types of all of the below: + * ``` + * TUPLEUNION(STRUCT(a,b), STRUCT(d,e)) --> STRUCT(a, b, d, e) + * TUPLEUNION(STRUCT(a,b), STRUCT(f)) --> STRUCT(a, b, f) + * TUPLEUNION(STRUCT(c), STRUCT(d,e)) --> STRUCT(c, d, e) + * TUPLEUNION(STRUCT(c), STRUCT(f)) --> STRUCT(c, f) + * ``` + */ + private fun buildArgumentPermutations(args: List): Sequence> { + val flattenedArgs = args.map { it.flatten().allTypes } + return buildArgumentPermutations(flattenedArgs, accumulator = emptyList()) + } + + private fun buildArgumentPermutations( + args: List>, + accumulator: List, + ): Sequence> { + if (args.isEmpty()) { + return sequenceOf(accumulator) + } + val first = args.first() + val rest = when (args.size) { + 1 -> emptyList() + else -> args.subList(1, args.size) + } + return sequence { + first.forEach { argSubType -> + yieldAll(buildArgumentPermutations(rest, accumulator + listOf(argSubType))) + } + } + } + + // Helpers + + /** + * @return a [Pair] where the [Pair.first] represents the type of the [step] and the [Pair.second] represents + * the disambiguated [step]. + */ + private fun inferPathStep(type: StaticType, step: Rex.Op.Path.Step): Pair = + when (type) { + is AnyType -> StaticType.ANY to step + is StructType -> inferPathStep(type, step) + is ListType, is SexpType -> inferPathStep(type as CollectionType, step) to step + is AnyOfType -> { + when (type.types.size) { + 0 -> throw IllegalStateException("Cannot path on an empty StaticType union") + else -> { + val prevTypes = type.allTypes + if (prevTypes.any { it is AnyType }) { + StaticType.ANY to step + } else { + val results = prevTypes.map { inferPathStep(it, step) } + val types = results.map { it.first } + val firstResultStep = results.first().second + // replace step only if all are disambiguated + val replacementStep = when (results.map { it.second }.all { it == firstResultStep }) { + true -> firstResultStep + false -> step + } + AnyOfType(types.toSet()).flatten() to replacementStep + } + } + } + } + else -> StaticType.MISSING to step + } + + /** + * @return a [Pair] where the [Pair.first] represents the type of the [step] and the [Pair.second] represents + * the disambiguated [step]. + */ + private fun inferPathStep(struct: StructType, step: Rex.Op.Path.Step): Pair = when (step) { + // { 'a': 1 }[0] should always return missing since tuples cannot be navigated via integer indexes + is Rex.Op.Path.Step.Index -> { + handleAlwaysMissing() + MISSING to step + } + is Rex.Op.Path.Step.Symbol -> { + val (type, replacementId) = inferStructLookup(struct, step.identifier) + type to replacementId.toPathStep() + } + is Rex.Op.Path.Step.Key -> { + if (step.key.type !is StringType) { + error("Expected string but found: ${step.key.type}") + } + if (step.key.op is Rex.Op.Lit) { + val lit = step.key.op.value + if (lit is TextValue<*> && !lit.isNull) { + val id = identifierSymbol(lit.string!!, Identifier.CaseSensitivity.SENSITIVE) + val (type, replacementId) = inferStructLookup(struct, id) + type to replacementId.toPathStep() + } else { + error("Expected text literal, but got $lit") + } + } else { + // cannot infer type of non-literal path step because we don't know its value + // we might improve upon this with some constant folding prior to typing + ANY to step + } + } + is Rex.Op.Path.Step.Unpivot -> error("Unpivot not supported") + is Rex.Op.Path.Step.Wildcard -> error("Wildcard not supported") + } + + private fun Identifier.Symbol.toPathStep() = rexOpPathStepSymbol(this) + + private fun inferPathStep(collection: CollectionType, step: Rex.Op.Path.Step): StaticType { + if (step !is Rex.Op.Path.Step.Index) { + error("Path step on a collection must be an expression") + } + if (step.key.type !is IntType) { + error("Collections must be indexed with integers, found ${step.key.type}") + } + return collection.elementType + } + + /** + * Logic is as follows: + * 1. If [struct] is closed and ordered: + * - If no item is found, return [MissingType] + * - Else, grab first matching item and make sensitive. + * 2. If [struct] is closed + * - AND no item is found, return [MissingType] + * - AND only one item is present -> grab item and make sensitive. + * - AND more than one item is present, keep sensitivity and grab item. + * 3. If [struct] is open, return [AnyType] + * + * @return a [Pair] where the [Pair.first] represents the type of the [step] and the [Pair.second] represents + * the disambiguated [key]. + */ + private fun inferStructLookup(struct: StructType, key: Identifier.Symbol): Pair { + val binding = key.toBindingName() + val isClosed = struct.constraints.contains(TupleConstraint.Open(false)) + val isOrdered = struct.constraints.contains(TupleConstraint.Ordered) + val (name, type) = when { + // 1. Struct is closed and ordered + isClosed && isOrdered -> { + struct.fields.firstOrNull { entry -> binding.isEquivalentTo(entry.key) }?.let { + (sensitive(it.key) to it.value) + } ?: (key to StaticType.MISSING) + } + // 2. Struct is closed + isClosed -> { + val matches = struct.fields.filter { entry -> binding.isEquivalentTo(entry.key) } + when (matches.size) { + 0 -> (key to StaticType.MISSING) + 1 -> matches.first().let { (sensitive(it.key) to it.value) } + else -> { + val firstKey = matches.first().key + val sharedKey = when (matches.all { it.key == firstKey }) { + true -> sensitive(firstKey) + false -> key + } + sharedKey to StaticType.unionOf(matches.map { it.value }.toSet()).flatten() + } + } + } + // 3. Struct is open + else -> (key to StaticType.ANY) + } + return type to name + } + + private fun sensitive(str: String): Identifier.Symbol = + identifierSymbol(str, Identifier.CaseSensitivity.SENSITIVE) + + /** + * Resolution and typing of aggregation function calls. + * + * I've chosen to place this in RexTyper because all arguments will be typed using the same locals. + * There's no need to create new RexTyper instances for each argument. There is no reason to limit aggregations + * to a single argument (covar, corr, pct, etc.) but in practice we typically only have single . + * + * This method is _very_ similar to scalar function resolution, so it is temping to DRY these two out; but the + * separation is cleaner as the typing of NULLS is subtly different. + * + * SQL-99 6.16 General Rules on + * Let TX be the single-column table that is the result of applying the + * to each row of T and eliminating null values <--- all NULL values are eliminated as inputs + */ + public fun resolveAgg(agg: Agg.Unresolved, arguments: List): Pair { + var missingArg = false + val args = arguments.map { + val arg = visitRex(it, null) + if (arg.type.isMissable()) missingArg = true + arg + } + + // + if (missingArg) { + handleAlwaysMissing() + return relOpAggregateCall(agg, listOf(rexErr("MISSING"))) to MissingType + } + + // Try to match the arguments to functions defined in the catalog + return when (val match = env.resolveAgg(agg, args)) { + is FnMatch.Ok -> { + // Found a match! + val newAgg = aggResolved(match.signature) + val newArgs = rewriteFnArgs(match.mapping, args) + val returns = newAgg.signature.returns + + // Return type with calculated nullability + var type = when { + newAgg.signature.isNullable -> returns.toStaticType() + else -> returns.toNonNullStaticType() + } + + // Some operators can return MISSING during runtime + if (match.isMissable) { + type = StaticType.unionOf(type, StaticType.MISSING).flatten() + } + + // Finally, rewrite this node + relOpAggregateCall(newAgg, newArgs) to type + } + is FnMatch.Dynamic -> TODO("Dynamic aggregates not yet supported.") + is FnMatch.Error -> { + handleUnknownFunction(match) + return relOpAggregateCall(agg, listOf(rexErr("MISSING"))) to MissingType + } + } + } + } + + // HELPERS + + private fun Rel.type(typeEnv: TypeEnv): Rel = RelTyper(typeEnv).visitRel(this, null) + + private fun Rex.type(typeEnv: TypeEnv) = RexTyper(typeEnv).visitRex(this, this.type) + + private fun rexErr(message: String) = rex(StaticType.MISSING, rexOpErr(message)) + + /** + * I found decorating the tree with the binding names (for resolution) was easier than associating introduced + * bindings with a node via an id->list map. ONLY because right now I don't think we have a good way + * of managing ids when trees are rewritten. + * + * We need a good answer for these questions before going for it: + * - If you copy, should the id should come along for the ride? + * - If someone writes their own pass and forgets to copy the id, then resolution could break. + * + * We may be able to eliminate this issue by keeping everything internal and running the typing pass first. + * This is simple enough for now. + */ + private fun Rel.Type.copyWithSchema(types: List): Rel.Type { + assert(types.size == schema.size) { "Illegal copy, types size does not matching bindings list size" } + return this.copy(schema = schema.mapIndexed { i, binding -> binding.copy(type = types[i]) }) + } + + private fun Identifier.toBindingPath() = when (this) { + is Identifier.Qualified -> this.toBindingPath() + is Identifier.Symbol -> BindingPath(listOf(this.toBindingName())) + } + + private fun Identifier.Qualified.toBindingPath() = BindingPath(steps = steps.map { it.toBindingName() }) + + private fun Identifier.Symbol.toBindingName() = BindingName( + name = symbol, + bindingCase = when (caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> BindingCase.SENSITIVE + Identifier.CaseSensitivity.INSENSITIVE -> BindingCase.INSENSITIVE + } + ) + + private fun Rel.isOrdered(): Boolean = type.props.contains(Rel.Prop.ORDERED) + + /** + * Produce a union type from all the + */ + private fun List.toUnionType(): StaticType = AnyOfType(map { it.type }.toSet()).flatten() + + /** + * Helper function which returns the literal string/symbol steps of a path expression as a [BindingPath]. + * + * TODO this does not handle constant expressions in `[]`, only literals + */ + @OptIn(PartiQLValueExperimental::class) + private fun rexPathToBindingPath(rootOp: Rex.Op.Var.Unresolved, steps: List): BindingPath { + if (rootOp.identifier !is Identifier.Symbol) { + throw IllegalArgumentException("Expected identifier symbol") + } + val bindingRoot = rootOp.identifier.toBindingName() + val bindingSteps = mutableListOf(bindingRoot) + for (step in steps) { + when (step) { + is Rex.Op.Path.Step.Index -> break + is Rex.Op.Path.Step.Symbol -> bindingSteps.add(step.identifier.toBindingName()) + is Rex.Op.Path.Step.Key -> break + else -> break // short-circuit + } + } + return BindingPath(bindingSteps) + } + + private fun getElementTypeForFromSource(fromSourceType: StaticType): StaticType = when (fromSourceType) { + is BagType -> fromSourceType.elementType + is ListType -> fromSourceType.elementType + is AnyType -> StaticType.ANY + is AnyOfType -> AnyOfType(fromSourceType.types.map { getElementTypeForFromSource(it) }.toSet()) + // All the other types coerce into a bag of themselves (including null/missing/sexp). + else -> fromSourceType + } + + /** + * Rewrites function arguments, wrapping in the given function if exists. + */ + private fun rewriteFnArgs(mapping: List, args: List): List { + if (mapping.size != args.size) { + error("Fatal, malformed function mapping") // should be unreachable given how a mapping is generated. + } + val newArgs = mutableListOf() + for (i in mapping.indices) { + var a = args[i] + val m = mapping[i] + if (m != null) { + // rewrite + val type = m.returns.toNonNullStaticType() + val cast = rexOpCallStatic(fnResolved(m), listOf(a)) + a = rex(type, cast) + } + newArgs.add(a) + } + return newArgs + } + + private fun assertAsInt(type: StaticType) { + if (type.flatten().allTypes.any { variant -> variant is IntType }.not()) { + handleUnexpectedType(type, setOf(StaticType.INT)) + } + } + + /** + * Constructs a Rex.Op.Path from a resolved local + */ + private fun resolvedLocalPath(local: ResolvedVar.Local): Rex.Op { + val root = rex(local.rootType, rexOpVarResolved(local.ordinal)) + val steps = local.replacementSteps.map { + val case = when (it.bindingCase) { + BindingCase.SENSITIVE -> Identifier.CaseSensitivity.SENSITIVE + BindingCase.INSENSITIVE -> Identifier.CaseSensitivity.INSENSITIVE + } + val symbol = identifierSymbol(it.name, case) + rexOpPathStepSymbol(symbol) + } + return when (steps.isEmpty()) { + true -> root.op + false -> rexOpPath(root, steps) + } + } + + // ERRORS + + private fun handleUndefinedVariable(name: BindingName) { + onProblem( + Problem( + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.UndefinedVariable(name.name, name.bindingCase == BindingCase.SENSITIVE) + ) + ) + } + + private fun handleUnexpectedType(actual: StaticType, expected: Set) { + onProblem( + Problem( + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.UnexpectedType(actual, expected), + ) + ) + } + + private fun handleUnknownFunction(match: FnMatch.Error<*>) { + onProblem( + Problem( + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.UnknownFunction( + match.identifier.normalize(), + match.args.map { a -> a.type }, + ) + ) + ) + } + + private fun handleAlwaysMissing() { + onProblem( + Problem( + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.ExpressionAlwaysReturnsNullOrMissing + ) + ) + } + + private fun handleUnresolvedExcludeRoot(root: String) { + onProblem( + Problem( + sourceLocation = UNKNOWN_PROBLEM_LOCATION, + details = PlanningProblemDetails.UnresolvedExcludeExprRoot(root) + ) + ) + } + + // HELPERS + + private fun Identifier.normalize(): String = when (this) { + is Identifier.Qualified -> (listOf(root.normalize()) + steps.map { it.normalize() }).joinToString(".") + is Identifier.Symbol -> when (caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> symbol + Identifier.CaseSensitivity.INSENSITIVE -> symbol.lowercase() + } + } + + /** + * Indicates whether the given functions propagate Missing. + * + * Currently, Logical Functions : AND, OR, NOT, IS NULL, IS MISSING + * the equal function, function do not propagate Missing. + */ + private fun Fn.Unresolved.isNotMissable(): Boolean { + return when (identifier) { + is Identifier.Qualified -> false + is Identifier.Symbol -> when ((identifier as Identifier.Symbol).symbol) { + "and" -> true + "or" -> true + "not" -> true + "eq" -> true + "is_null" -> true + "is_missing" -> true + else -> false + } + } + } + + private fun Fn.Unresolved.isTypeAssertion(): Boolean { + return (identifier is Identifier.Symbol && (identifier as Identifier.Symbol).symbol.startsWith("is")) + } + + /** + * This will make all binding values nullables. If the value is a struct, each field will be nullable. + * + * Note, this does not handle union types or nullable struct types. + */ + private fun List.pad() = map { + val type = when (val t = it.type) { + is StructType -> t.withNullableFields() + else -> t.asNullable() + } + relBinding(it.name, type) + } + + private fun StructType.withNullableFields(): StructType { + return copy(fields.map { it.copy(value = it.value.asNullable()) }) + } + + private fun excludeBindings(input: List, item: Rel.Op.Exclude.Item): List { + var matchedRoot = false + val output = input.map { + if (item.root.isEquivalentTo(it.name)) { + matchedRoot = true + // recompute the StaticType of this binding after apply the exclusions + val type = it.type.exclude(item.steps, false) + it.copy(type = type) + } else { + it + } + } + if (!matchedRoot) handleUnresolvedExcludeRoot(item.root.symbol) + return output + } + + private fun Identifier.Symbol.isEquivalentTo(other: String): Boolean = when (caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> symbol.equals(other) + Identifier.CaseSensitivity.INSENSITIVE -> symbol.equals(other, ignoreCase = true) + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/RexReplacer.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/RexReplacer.kt new file mode 100644 index 000000000..1c4b41994 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/RexReplacer.kt @@ -0,0 +1,44 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at: + * + * http://aws.amazon.com/apache2.0/ + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific + * language governing permissions and limitations under the License. + */ + +package org.partiql.planner.internal.typer + +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.rex +import org.partiql.planner.internal.ir.util.PlanRewriter + +/** + * Uses to replace [Rex]'s within an expression tree. + */ +internal object RexReplacer { + + /** + * Within the [Rex] tree of [rex], replaces all instances of [replace] with the [with]. + */ + internal fun replace(rex: Rex, replace: Rex, with: Rex): Rex { + val params = ReplaceParams(replace, with) + return RexReplacerImpl.visitRex(rex, params) + } + + private class ReplaceParams(val replace: Rex, val with: Rex) + + private object RexReplacerImpl : PlanRewriter() { + + override fun visitRex(node: Rex, ctx: ReplaceParams): Rex { + if (node == ctx.replace) { return ctx.with } + val op = visitRexOp(node.op, ctx) as Rex.Op + return if (op !== node.op) rex(node.type, op) else node + } + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/TypeLattice.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/TypeLattice.kt new file mode 100644 index 000000000..bdf022269 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/TypeLattice.kt @@ -0,0 +1,359 @@ +package org.partiql.planner.internal.typer + +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.PartiQLValueType +import org.partiql.value.PartiQLValueType.ANY +import org.partiql.value.PartiQLValueType.BAG +import org.partiql.value.PartiQLValueType.BINARY +import org.partiql.value.PartiQLValueType.BLOB +import org.partiql.value.PartiQLValueType.BOOL +import org.partiql.value.PartiQLValueType.BYTE +import org.partiql.value.PartiQLValueType.CHAR +import org.partiql.value.PartiQLValueType.CLOB +import org.partiql.value.PartiQLValueType.DATE +import org.partiql.value.PartiQLValueType.DECIMAL +import org.partiql.value.PartiQLValueType.DECIMAL_ARBITRARY +import org.partiql.value.PartiQLValueType.FLOAT32 +import org.partiql.value.PartiQLValueType.FLOAT64 +import org.partiql.value.PartiQLValueType.INT +import org.partiql.value.PartiQLValueType.INT16 +import org.partiql.value.PartiQLValueType.INT32 +import org.partiql.value.PartiQLValueType.INT64 +import org.partiql.value.PartiQLValueType.INT8 +import org.partiql.value.PartiQLValueType.INTERVAL +import org.partiql.value.PartiQLValueType.LIST +import org.partiql.value.PartiQLValueType.MISSING +import org.partiql.value.PartiQLValueType.NULL +import org.partiql.value.PartiQLValueType.SEXP +import org.partiql.value.PartiQLValueType.STRING +import org.partiql.value.PartiQLValueType.STRUCT +import org.partiql.value.PartiQLValueType.SYMBOL +import org.partiql.value.PartiQLValueType.TIME +import org.partiql.value.PartiQLValueType.TIMESTAMP + +/** + * Going with a matrix here (using enum ordinals) as it's simple and avoids walking. + */ +internal typealias TypeGraph = Array> + +/** + * Each edge represents a type relationship + */ +internal data class TypeRelationship(val cast: CastType) + +/** + * An COERCION will be inserted by the compiler during function resolution, an EXPLICIT CAST will never be inserted. + * + * COERCION: Lossless CAST(V AS T) -> T + * EXPLICIT: Lossy CAST(V AS T) -> T + * UNSAFE: CAST(V AS T) -> T|MISSING + */ +internal enum class CastType { COERCION, EXPLICIT, UNSAFE } + +/** + * A place to model type relationships (for now this is to answer CAST inquiries). + * + * Is this indeed a lattice? It's a rather smart sounding word. + */ +@OptIn(PartiQLValueExperimental::class) +internal class TypeLattice private constructor( + public val types: Array, + public val graph: TypeGraph, +) { + + public fun canCoerce(operand: PartiQLValueType, target: PartiQLValueType): Boolean { + return graph[operand][target]?.cast == CastType.COERCION + } + + internal val all = PartiQLValueType.values() + + internal val nullable = listOf( + NULL, // null.null + MISSING, // missing + ) + + internal val integer = listOf( + INT8, + INT16, + INT32, + INT64, + INT, + ) + + internal val numeric = listOf( + INT8, + INT16, + INT32, + INT64, + INT, + DECIMAL_ARBITRARY, + FLOAT32, + FLOAT64, + ) + + internal val text = listOf( + STRING, + SYMBOL, + CLOB, + ) + + internal val collections = listOf( + BAG, + LIST, + SEXP, + ) + + internal val datetime = listOf( + DATE, + TIME, + TIMESTAMP, + ) + + /** + * Dump the graph as an Asciidoc table. + */ + override fun toString(): String = buildString { + appendLine("|===") + appendLine() + // Header + append("| | ").appendLine(types.joinToString("| ")) + // Body + for (t1 in types) { + append("| $t1 ") + for (t2 in types) { + val symbol = when (val r = graph[t1][t2]) { + null -> "X" + else -> when (r.cast) { + CastType.COERCION -> "⬤" + CastType.EXPLICIT -> "◯" + CastType.UNSAFE -> "△" + } + } + append("| $symbol ") + } + appendLine() + } + appendLine() + appendLine("|===") + } + + private operator fun Array.get(t: PartiQLValueType): T = get(t.ordinal) + + companion object { + + private val N = PartiQLValueType.values().size + + private fun relationships(vararg relationships: Pair): Array { + val arr = arrayOfNulls(N) + for (type in relationships) { + arr[type.first] = type.second + } + return arr + } + + private fun coercion(): TypeRelationship = TypeRelationship(CastType.COERCION) + + private fun explicit(): TypeRelationship = TypeRelationship(CastType.EXPLICIT) + + private fun unsafe(): TypeRelationship = TypeRelationship(CastType.UNSAFE) + + private operator fun Array.set(t: PartiQLValueType, value: T): Unit = this.set(t.ordinal, value) + + /** + * Build the PartiQL type lattice. + * + * TODO this is incomplete. + */ + public fun partiql(): TypeLattice { + val types = PartiQLValueType.values() + val graph = arrayOfNulls>(N) + for (type in types) { + // initialize all with empty relationships + graph[type] = arrayOfNulls(N) + } + graph[ANY] = relationships( + ANY to coercion() + ) + graph[NULL] = relationships( + NULL to coercion() + ) + graph[MISSING] = relationships( + MISSING to coercion() + ) + graph[BOOL] = relationships( + BOOL to coercion(), + INT8 to explicit(), + INT16 to explicit(), + INT32 to explicit(), + INT64 to explicit(), + INT to explicit(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to explicit(), + FLOAT32 to explicit(), + FLOAT64 to explicit(), + CHAR to explicit(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[INT8] = relationships( + BOOL to explicit(), + INT8 to coercion(), + INT16 to coercion(), + INT32 to coercion(), + INT64 to coercion(), + INT to coercion(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to coercion(), + FLOAT32 to coercion(), + FLOAT64 to coercion(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[INT16] = relationships( + BOOL to explicit(), + INT8 to unsafe(), + INT16 to coercion(), + INT32 to coercion(), + INT64 to coercion(), + INT to coercion(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to coercion(), + FLOAT32 to coercion(), + FLOAT64 to coercion(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[INT32] = relationships( + BOOL to explicit(), + INT8 to unsafe(), + INT16 to unsafe(), + INT32 to coercion(), + INT64 to coercion(), + INT to coercion(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to coercion(), + FLOAT32 to coercion(), + FLOAT64 to coercion(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[INT64] = relationships( + BOOL to explicit(), + INT8 to unsafe(), + INT16 to unsafe(), + INT32 to unsafe(), + INT64 to coercion(), + INT to coercion(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to coercion(), + FLOAT32 to coercion(), + FLOAT64 to coercion(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[INT] = relationships( + BOOL to explicit(), + INT8 to unsafe(), + INT16 to unsafe(), + INT32 to unsafe(), + INT64 to unsafe(), + INT to coercion(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to coercion(), + FLOAT32 to coercion(), + FLOAT64 to coercion(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[DECIMAL] = relationships( + INT8 to explicit(), + INT16 to explicit(), + INT32 to explicit(), + INT64 to explicit(), + BOOL to explicit(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to explicit(), + FLOAT32 to explicit(), + FLOAT64 to explicit(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[FLOAT32] = relationships( + BOOL to explicit(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to coercion(), + FLOAT32 to coercion(), + FLOAT64 to coercion(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[FLOAT64] = relationships( + BOOL to explicit(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to coercion(), + FLOAT64 to coercion(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[DECIMAL_ARBITRARY] = relationships( + BOOL to explicit(), + DECIMAL to explicit(), + DECIMAL_ARBITRARY to coercion(), + FLOAT32 to explicit(), + FLOAT64 to explicit(), + STRING to explicit(), + SYMBOL to explicit(), + ) + graph[CHAR] = relationships( + BOOL to explicit(), + CHAR to coercion(), + STRING to coercion(), + SYMBOL to coercion(), + ) + graph[STRING] = relationships( + BOOL to explicit(), + INT8 to unsafe(), + INT16 to unsafe(), + INT32 to unsafe(), + INT64 to unsafe(), + INT to unsafe(), + STRING to coercion(), + SYMBOL to explicit(), + CLOB to coercion(), + ) + graph[SYMBOL] = relationships( + BOOL to explicit(), + STRING to coercion(), + SYMBOL to coercion(), + CLOB to coercion(), + ) + graph[CLOB] = relationships( + CLOB to coercion(), + ) + graph[BINARY] = arrayOfNulls(N) + graph[BYTE] = arrayOfNulls(N) + graph[BLOB] = arrayOfNulls(N) + graph[DATE] = arrayOfNulls(N) + graph[TIME] = arrayOfNulls(N) + graph[TIMESTAMP] = arrayOfNulls(N) + graph[INTERVAL] = arrayOfNulls(N) + graph[BAG] = relationships( + BAG to coercion(), + ) + graph[LIST] = relationships( + BAG to coercion(), + SEXP to coercion(), + LIST to coercion(), + ) + graph[SEXP] = relationships( + BAG to coercion(), + SEXP to coercion(), + LIST to coercion(), + ) + graph[STRUCT] = relationships( + STRUCT to coercion(), + ) + return TypeLattice(types, graph.requireNoNulls()) + } + } +} diff --git a/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/TypeUtils.kt b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/TypeUtils.kt new file mode 100644 index 000000000..436311682 --- /dev/null +++ b/partiql-planner/src/main/kotlin/org/partiql/planner/internal/typer/TypeUtils.kt @@ -0,0 +1,238 @@ +package org.partiql.planner.internal.typer + +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.Rel +import org.partiql.types.AnyOfType +import org.partiql.types.AnyType +import org.partiql.types.BagType +import org.partiql.types.BlobType +import org.partiql.types.BoolType +import org.partiql.types.ClobType +import org.partiql.types.CollectionType +import org.partiql.types.DateType +import org.partiql.types.DecimalType +import org.partiql.types.FloatType +import org.partiql.types.GraphType +import org.partiql.types.IntType +import org.partiql.types.ListType +import org.partiql.types.MissingType +import org.partiql.types.NullType +import org.partiql.types.SexpType +import org.partiql.types.StaticType +import org.partiql.types.StringType +import org.partiql.types.StructType +import org.partiql.types.SymbolType +import org.partiql.types.TimeType +import org.partiql.types.TimestampType +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.PartiQLValueType + +internal fun StaticType.isNullOrMissing(): Boolean = (this is NullType || this is MissingType) + +internal fun StaticType.isNumeric(): Boolean = (this is IntType || this is FloatType || this is DecimalType) + +internal fun StaticType.isExactNumeric(): Boolean = (this is IntType || this is DecimalType) + +internal fun StaticType.isApproxNumeric(): Boolean = (this is FloatType) + +internal fun StaticType.isText(): Boolean = (this is SymbolType || this is StringType) + +internal fun StaticType.isUnknown(): Boolean = (this.isNullOrMissing() || this == StaticType.NULL_OR_MISSING) + +internal fun StaticType.isOptional(): Boolean = when (this) { + is AnyType, MissingType -> true // Any includes Missing type + is AnyOfType -> types.any { it.isOptional() } + else -> false +} + +/** + * Per SQL, runtime types are always nullable + */ +@OptIn(PartiQLValueExperimental::class) +internal fun PartiQLValueType.toStaticType(): StaticType = when (this) { + PartiQLValueType.NULL -> StaticType.NULL + PartiQLValueType.MISSING -> StaticType.MISSING + else -> toNonNullStaticType().asNullable() +} + +@OptIn(PartiQLValueExperimental::class) +internal fun PartiQLValueType.toNonNullStaticType(): StaticType = when (this) { + PartiQLValueType.ANY -> StaticType.ANY + PartiQLValueType.BOOL -> StaticType.BOOL + PartiQLValueType.INT8 -> StaticType.INT2 + PartiQLValueType.INT16 -> StaticType.INT2 + PartiQLValueType.INT32 -> StaticType.INT4 + PartiQLValueType.INT64 -> StaticType.INT8 + PartiQLValueType.INT -> StaticType.INT + PartiQLValueType.DECIMAL_ARBITRARY -> StaticType.DECIMAL + PartiQLValueType.DECIMAL -> StaticType.DECIMAL + PartiQLValueType.FLOAT32 -> StaticType.FLOAT + PartiQLValueType.FLOAT64 -> StaticType.FLOAT + PartiQLValueType.CHAR -> StaticType.CHAR + PartiQLValueType.STRING -> StaticType.STRING + PartiQLValueType.SYMBOL -> StaticType.SYMBOL + PartiQLValueType.BINARY -> TODO() + PartiQLValueType.BYTE -> TODO() + PartiQLValueType.BLOB -> StaticType.BLOB + PartiQLValueType.CLOB -> StaticType.CLOB + PartiQLValueType.DATE -> StaticType.DATE + PartiQLValueType.TIME -> StaticType.TIME + PartiQLValueType.TIMESTAMP -> StaticType.TIMESTAMP + PartiQLValueType.INTERVAL -> TODO() + PartiQLValueType.BAG -> StaticType.BAG + PartiQLValueType.LIST -> StaticType.LIST + PartiQLValueType.SEXP -> StaticType.SEXP + PartiQLValueType.STRUCT -> StaticType.STRUCT + PartiQLValueType.NULL -> StaticType.NULL + PartiQLValueType.MISSING -> StaticType.MISSING +} + +@OptIn(PartiQLValueExperimental::class) +internal fun StaticType.toRuntimeType(): PartiQLValueType { + if (this is AnyOfType) { + // handle anyOf(null, T) cases + val t = types.filter { it !is NullType && it !is MissingType } + return if (t.size != 1) { + error("Cannot have a UNION runtime type: $this") + } else { + t.first().asRuntimeType() + } + } + return this.asRuntimeType() +} + +@OptIn(PartiQLValueExperimental::class) +internal fun StaticType.toRuntimeTypeOrNull(): PartiQLValueType? { + return try { + this.toRuntimeType() + } catch (_: Throwable) { + null + } +} + +@OptIn(PartiQLValueExperimental::class) +private fun StaticType.asRuntimeType(): PartiQLValueType = when (this) { + is AnyOfType -> PartiQLValueType.ANY + is AnyType -> PartiQLValueType.ANY + is BlobType -> PartiQLValueType.BLOB + is BoolType -> PartiQLValueType.BOOL + is ClobType -> PartiQLValueType.CLOB + is BagType -> PartiQLValueType.BAG + is ListType -> PartiQLValueType.LIST + is SexpType -> PartiQLValueType.SEXP + is DateType -> PartiQLValueType.DATE + is DecimalType -> PartiQLValueType.DECIMAL_ARBITRARY + is FloatType -> PartiQLValueType.FLOAT64 + is GraphType -> error("Graph type missing from runtime types") + is IntType -> when (this.rangeConstraint) { + IntType.IntRangeConstraint.SHORT -> PartiQLValueType.INT16 + IntType.IntRangeConstraint.INT4 -> PartiQLValueType.INT32 + IntType.IntRangeConstraint.LONG -> PartiQLValueType.INT64 + IntType.IntRangeConstraint.UNCONSTRAINED -> PartiQLValueType.INT + } + MissingType -> PartiQLValueType.MISSING + is NullType -> PartiQLValueType.NULL + is StringType -> PartiQLValueType.STRING + is StructType -> PartiQLValueType.STRUCT + is SymbolType -> PartiQLValueType.SYMBOL + is TimeType -> PartiQLValueType.TIME + is TimestampType -> PartiQLValueType.TIMESTAMP +} + +/** + * Applies the given exclusion path to produce the reduced StaticType + * + * @param steps + * @param lastStepOptional + * @return + */ +internal fun StaticType.exclude(steps: List, lastStepOptional: Boolean = true): StaticType = + when (this) { + is StructType -> this.exclude(steps, lastStepOptional) + is CollectionType -> this.exclude(steps, lastStepOptional) + is AnyOfType -> StaticType.unionOf( + this.types.map { it.exclude(steps, lastStepOptional) }.toSet() + ) + else -> this + }.flatten() + +/** + * Applies exclusions to struct fields. + * + * @param steps + * @param lastStepOptional + * @return + */ +internal fun StructType.exclude(steps: List, lastStepOptional: Boolean = true): StaticType { + val step = steps.first() + val output = fields.map { field -> + val newField = if (steps.size == 1) { + if (lastStepOptional) { + StructType.Field(field.key, field.value.asOptional()) + } else { + null + } + } else { + val k = field.key + val v = field.value.exclude(steps.drop(1), lastStepOptional) + StructType.Field(k, v) + } + when (step) { + is Rel.Op.Exclude.Step.Attr -> { + if (step.symbol.isEquivalentTo(field.key)) { + newField + } else { + field + } + } + is Rel.Op.Exclude.Step.StructWildcard -> newField + else -> field + } + }.filterNotNull() + return this.copy(fields = output) +} + +/** + * Applies exclusions to collection element type. + * + * @param steps + * @param lastStepOptional + * @return + */ +internal fun CollectionType.exclude(steps: List, lastStepOptional: Boolean = true): StaticType { + var e = this.elementType + when (steps.first()) { + is Rel.Op.Exclude.Step.Pos -> { + if (steps.size > 1) { + e = e.exclude(steps.drop(1), true) + } + } + is Rel.Op.Exclude.Step.CollectionWildcard -> { + if (steps.size > 1) { + e = e.exclude(steps.drop(1), lastStepOptional) + } + // currently no change to elementType if collection wildcard is last element; this behavior could + // change based on RFC definition + } + else -> { + // currently no change to elementType and no error thrown; could consider an error/warning in + // the future + } + } + return when (this) { + is BagType -> this.copy(e) + is ListType -> this.copy(e) + is SexpType -> this.copy(e) + } +} + +/** + * Compare an identifier to a struct field; handling case-insensitive comparisons. + * + * @param other + * @return + */ +private fun Identifier.Symbol.isEquivalentTo(other: String): Boolean = when (caseSensitivity) { + Identifier.CaseSensitivity.SENSITIVE -> symbol.equals(other) + Identifier.CaseSensitivity.INSENSITIVE -> symbol.equals(other, ignoreCase = true) +} diff --git a/partiql-planner/src/main/resources/builtins.sql b/partiql-planner/src/main/resources/builtins.sql new file mode 100644 index 000000000..a15ead3f6 --- /dev/null +++ b/partiql-planner/src/main/resources/builtins.sql @@ -0,0 +1,1681 @@ +-- [trim] --------- + +CREATE FUNCTION "TRIM"( + VALUE STRING) + RETURNS STRING + SPECIFIC - + RETURN TRIM ( VALUE ); + +CREATE FUNCTION "TRIM"( + VALUE NULLABLE_STRING) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN TRIM ( VALUE ); + +CREATE FUNCTION "TRIM"( + VALUE SYMBOL) + RETURNS SYMBOL + SPECIFIC - + RETURN TRIM ( VALUE ); + +CREATE FUNCTION "TRIM"( + VALUE NULLABLE_SYMBOL) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN TRIM ( VALUE ); + + +-- [trim_leading] --------- + +CREATE FUNCTION "TRIM_LEADING"( + VALUE STRING) + RETURNS STRING + SPECIFIC - + RETURN TRIM_LEADING ( VALUE ); + +CREATE FUNCTION "TRIM_LEADING"( + VALUE NULLABLE_STRING) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN TRIM_LEADING ( VALUE ); + +CREATE FUNCTION "TRIM_LEADING"( + VALUE SYMBOL) + RETURNS SYMBOL + SPECIFIC - + RETURN TRIM_LEADING ( VALUE ); + +CREATE FUNCTION "TRIM_LEADING"( + VALUE NULLABLE_SYMBOL) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN TRIM_LEADING ( VALUE ); + + +-- [trim_trailing] --------- + +CREATE FUNCTION "TRIM_TRAILING"( + VALUE STRING) + RETURNS STRING + SPECIFIC - + RETURN TRIM_TRAILING ( VALUE ); + +CREATE FUNCTION "TRIM_TRAILING"( + VALUE NULLABLE_STRING) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN TRIM_TRAILING ( VALUE ); + +CREATE FUNCTION "TRIM_TRAILING"( + VALUE SYMBOL) + RETURNS SYMBOL + SPECIFIC - + RETURN TRIM_TRAILING ( VALUE ); + +CREATE FUNCTION "TRIM_TRAILING"( + VALUE NULLABLE_SYMBOL) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN TRIM_TRAILING ( VALUE ); + + +-- [null_if] --------- + +CREATE FUNCTION "NULL_IF"( + VALUE NULL + NULLIFIER BOOL) + RETURNS NULL + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE MISSING + NULLIFIER BOOL) + RETURNS MISSING + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_BOOL + NULLIFIER BOOL) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_INT8 + NULLIFIER BOOL) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_INT16 + NULLIFIER BOOL) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_INT32 + NULLIFIER BOOL) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_INT64 + NULLIFIER BOOL) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_INT + NULLIFIER BOOL) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_DECIMAL + NULLIFIER BOOL) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_FLOAT32 + NULLIFIER BOOL) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_FLOAT64 + NULLIFIER BOOL) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_CHAR + NULLIFIER BOOL) + RETURNS NULLABLE_CHAR + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_STRING + NULLIFIER BOOL) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_SYMBOL + NULLIFIER BOOL) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_CLOB + NULLIFIER BOOL) + RETURNS NULLABLE_CLOB + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_BINARY + NULLIFIER BOOL) + RETURNS NULLABLE_BINARY + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_BYTE + NULLIFIER BOOL) + RETURNS NULLABLE_BYTE + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_BLOB + NULLIFIER BOOL) + RETURNS NULLABLE_BLOB + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_DATE + NULLIFIER BOOL) + RETURNS NULLABLE_DATE + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_TIME + NULLIFIER BOOL) + RETURNS NULLABLE_TIME + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_TIMESTAMP + NULLIFIER BOOL) + RETURNS NULLABLE_TIMESTAMP + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_INTERVAL + NULLIFIER BOOL) + RETURNS NULLABLE_INTERVAL + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_LIST + NULLIFIER BOOL) + RETURNS NULLABLE_LIST + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_BAG + NULLIFIER BOOL) + RETURNS NULLABLE_BAG + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_SEXP + NULLIFIER BOOL) + RETURNS NULLABLE_SEXP + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + +CREATE FUNCTION "NULL_IF"( + VALUE NULLABLE_STRUCT + NULLIFIER BOOL) + RETURNS NULLABLE_STRUCT + SPECIFIC - + RETURN NULL_IF ( VALUE, NULLIFIER ); + + +-- [in_collection] --------- + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULL + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE MISSING + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULL + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE MISSING + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULL + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE MISSING + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BOOL + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BOOL + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BOOL + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BOOL + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BOOL + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BOOL + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT8 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT8 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT8 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT8 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT8 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT8 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT16 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT16 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT16 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT16 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT16 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT16 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT32 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT32 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT32 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT32 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT32 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT32 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT64 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT64 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT64 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT64 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT64 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT64 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INT + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INT + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE DECIMAL + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE DECIMAL + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE DECIMAL + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_DECIMAL + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_DECIMAL + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_DECIMAL + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE FLOAT32 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE FLOAT32 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE FLOAT32 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_FLOAT32 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_FLOAT32 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_FLOAT32 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE FLOAT64 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE FLOAT64 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE FLOAT64 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_FLOAT64 + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_FLOAT64 + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_FLOAT64 + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE CHAR + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE CHAR + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE CHAR + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_CHAR + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_CHAR + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_CHAR + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE STRING + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE STRING + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE STRING + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_STRING + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_STRING + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_STRING + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE SYMBOL + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE SYMBOL + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE SYMBOL + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_SYMBOL + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_SYMBOL + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_SYMBOL + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE CLOB + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE CLOB + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE CLOB + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_CLOB + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_CLOB + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_CLOB + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BINARY + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BINARY + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BINARY + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BINARY + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BINARY + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BINARY + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BYTE + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BYTE + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BYTE + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BYTE + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BYTE + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BYTE + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BLOB + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BLOB + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BLOB + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BLOB + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BLOB + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BLOB + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE DATE + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE DATE + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE DATE + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_DATE + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_DATE + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_DATE + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE TIME + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE TIME + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE TIME + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_TIME + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_TIME + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_TIME + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE TIMESTAMP + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE TIMESTAMP + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE TIMESTAMP + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_TIMESTAMP + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_TIMESTAMP + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_TIMESTAMP + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INTERVAL + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INTERVAL + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE INTERVAL + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INTERVAL + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INTERVAL + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_INTERVAL + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE LIST + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE LIST + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE LIST + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_LIST + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_LIST + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_LIST + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BAG + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BAG + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE BAG + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BAG + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BAG + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_BAG + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE SEXP + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE SEXP + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE SEXP + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_SEXP + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_SEXP + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_SEXP + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE STRUCT + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE STRUCT + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE STRUCT + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_STRUCT + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_STRUCT + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE NULLABLE_STRUCT + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE GRAPH + COLLECTION LIST) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE GRAPH + COLLECTION BAG) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + +CREATE FUNCTION "IN_COLLECTION"( + VALUE GRAPH + COLLECTION SEXP) + RETURNS BOOL + SPECIFIC - + RETURN IN_COLLECTION ( VALUE, COLLECTION ); + + +-- [substring] --------- + +CREATE FUNCTION "SUBSTRING"( + VALUE STRING + START INT64) + RETURNS STRING + SPECIFIC - + RETURN SUBSTRING ( VALUE, START ); + +CREATE FUNCTION "SUBSTRING"( + VALUE NULLABLE_STRING + START INT64) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN SUBSTRING ( VALUE, START ); + +CREATE FUNCTION "SUBSTRING"( + VALUE SYMBOL + START INT64) + RETURNS SYMBOL + SPECIFIC - + RETURN SUBSTRING ( VALUE, START ); + +CREATE FUNCTION "SUBSTRING"( + VALUE NULLABLE_SYMBOL + START INT64) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN SUBSTRING ( VALUE, START ); + + +-- [like] --------- + +CREATE FUNCTION "LIKE"( + VALUE STRING + PATTERN STRING) + RETURNS BOOL + SPECIFIC - + RETURN LIKE ( VALUE, PATTERN ); + + +-- [position] --------- + +CREATE FUNCTION "POSITION"( + PROBE STRING + VALUE STRING) + RETURNS INT64 + SPECIFIC - + RETURN POSITION ( PROBE, VALUE ); + +CREATE FUNCTION "POSITION"( + PROBE NULLABLE_STRING + VALUE NULLABLE_STRING) + RETURNS INT64 + SPECIFIC - + RETURN POSITION ( PROBE, VALUE ); + +CREATE FUNCTION "POSITION"( + PROBE SYMBOL + VALUE SYMBOL) + RETURNS INT64 + SPECIFIC - + RETURN POSITION ( PROBE, VALUE ); + +CREATE FUNCTION "POSITION"( + PROBE NULLABLE_SYMBOL + VALUE NULLABLE_SYMBOL) + RETURNS INT64 + SPECIFIC - + RETURN POSITION ( PROBE, VALUE ); + + +-- [trim_chars] --------- + +CREATE FUNCTION "TRIM_CHARS"( + VALUE STRING + CHARS STRING) + RETURNS STRING + SPECIFIC - + RETURN TRIM_CHARS ( VALUE, CHARS ); + +CREATE FUNCTION "TRIM_CHARS"( + VALUE NULLABLE_STRING + CHARS NULLABLE_STRING) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN TRIM_CHARS ( VALUE, CHARS ); + +CREATE FUNCTION "TRIM_CHARS"( + VALUE SYMBOL + CHARS SYMBOL) + RETURNS SYMBOL + SPECIFIC - + RETURN TRIM_CHARS ( VALUE, CHARS ); + +CREATE FUNCTION "TRIM_CHARS"( + VALUE NULLABLE_SYMBOL + CHARS NULLABLE_SYMBOL) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN TRIM_CHARS ( VALUE, CHARS ); + + +-- [trim_leading_chars] --------- + +CREATE FUNCTION "TRIM_LEADING_CHARS"( + VALUE STRING + CHARS STRING) + RETURNS STRING + SPECIFIC - + RETURN TRIM_LEADING_CHARS ( VALUE, CHARS ); + +CREATE FUNCTION "TRIM_LEADING_CHARS"( + VALUE NULLABLE_STRING + CHARS NULLABLE_STRING) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN TRIM_LEADING_CHARS ( VALUE, CHARS ); + +CREATE FUNCTION "TRIM_LEADING_CHARS"( + VALUE SYMBOL + CHARS SYMBOL) + RETURNS SYMBOL + SPECIFIC - + RETURN TRIM_LEADING_CHARS ( VALUE, CHARS ); + +CREATE FUNCTION "TRIM_LEADING_CHARS"( + VALUE NULLABLE_SYMBOL + CHARS NULLABLE_SYMBOL) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN TRIM_LEADING_CHARS ( VALUE, CHARS ); + + +-- [trim_trailing_chars] --------- + +CREATE FUNCTION "TRIM_TRAILING_CHARS"( + VALUE STRING + CHARS STRING) + RETURNS STRING + SPECIFIC - + RETURN TRIM_TRAILING_CHARS ( VALUE, CHARS ); + +CREATE FUNCTION "TRIM_TRAILING_CHARS"( + VALUE NULLABLE_STRING + CHARS NULLABLE_STRING) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN TRIM_TRAILING_CHARS ( VALUE, CHARS ); + +CREATE FUNCTION "TRIM_TRAILING_CHARS"( + VALUE SYMBOL + CHARS SYMBOL) + RETURNS SYMBOL + SPECIFIC - + RETURN TRIM_TRAILING_CHARS ( VALUE, CHARS ); + +CREATE FUNCTION "TRIM_TRAILING_CHARS"( + VALUE NULLABLE_SYMBOL + CHARS NULLABLE_SYMBOL) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN TRIM_TRAILING_CHARS ( VALUE, CHARS ); + + +-- [between] --------- + +CREATE FUNCTION "BETWEEN"( + VALUE INT8 + LOWER INT8 + UPPER INT8) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE NULLABLE_INT8 + LOWER NULLABLE_INT8 + UPPER NULLABLE_INT8) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE INT16 + LOWER INT16 + UPPER INT16) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE NULLABLE_INT16 + LOWER NULLABLE_INT16 + UPPER NULLABLE_INT16) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE INT32 + LOWER INT32 + UPPER INT32) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE NULLABLE_INT32 + LOWER NULLABLE_INT32 + UPPER NULLABLE_INT32) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE INT64 + LOWER INT64 + UPPER INT64) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE NULLABLE_INT64 + LOWER NULLABLE_INT64 + UPPER NULLABLE_INT64) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE INT + LOWER INT + UPPER INT) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE NULLABLE_INT + LOWER NULLABLE_INT + UPPER NULLABLE_INT) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE DECIMAL + LOWER DECIMAL + UPPER DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE NULLABLE_DECIMAL + LOWER NULLABLE_DECIMAL + UPPER NULLABLE_DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE FLOAT32 + LOWER FLOAT32 + UPPER FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE NULLABLE_FLOAT32 + LOWER NULLABLE_FLOAT32 + UPPER NULLABLE_FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE FLOAT64 + LOWER FLOAT64 + UPPER FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + +CREATE FUNCTION "BETWEEN"( + VALUE NULLABLE_FLOAT64 + LOWER NULLABLE_FLOAT64 + UPPER NULLABLE_FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN BETWEEN ( VALUE, LOWER, UPPER ); + + +-- [substring_length] --------- + +CREATE FUNCTION "SUBSTRING_LENGTH"( + VALUE STRING + START INT64 + END INT64) + RETURNS STRING + SPECIFIC - + RETURN SUBSTRING_LENGTH ( VALUE, START, END ); + +CREATE FUNCTION "SUBSTRING_LENGTH"( + VALUE NULLABLE_STRING + START INT64 + END INT64) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN SUBSTRING_LENGTH ( VALUE, START, END ); + +CREATE FUNCTION "SUBSTRING_LENGTH"( + VALUE SYMBOL + START INT64 + END INT64) + RETURNS SYMBOL + SPECIFIC - + RETURN SUBSTRING_LENGTH ( VALUE, START, END ); + +CREATE FUNCTION "SUBSTRING_LENGTH"( + VALUE NULLABLE_SYMBOL + START INT64 + END INT64) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN SUBSTRING_LENGTH ( VALUE, START, END ); + + +-- [like_escape] --------- + +CREATE FUNCTION "LIKE_ESCAPE"( + VALUE STRING + PATTERN STRING + ESCAPE STRING) + RETURNS BOOL + SPECIFIC - + RETURN LIKE_ESCAPE ( VALUE, PATTERN, ESCAPE ); + diff --git a/partiql-planner/src/main/resources/casts.sql b/partiql-planner/src/main/resources/casts.sql new file mode 100644 index 000000000..d985ddc24 --- /dev/null +++ b/partiql-planner/src/main/resources/casts.sql @@ -0,0 +1,1982 @@ +-- [cast] --------- + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (INT8)) + RETURNS INT8 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (INT16)) + RETURNS INT16 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (INT32)) + RETURNS INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (INT64)) + RETURNS INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (INT)) + RETURNS INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (DECIMAL)) + RETURNS DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (FLOAT32)) + RETURNS FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (FLOAT64)) + RETURNS FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (CHAR)) + RETURNS CHAR + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_INT8)) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_INT16)) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_INT32)) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_CHAR)) + RETURNS NULLABLE_CHAR + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BOOL + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_INT8)) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_INT16)) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_INT32)) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_CHAR)) + RETURNS NULLABLE_CHAR + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BOOL + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (INT8)) + RETURNS INT8 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (INT16)) + RETURNS INT16 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (INT32)) + RETURNS INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (INT64)) + RETURNS INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (INT)) + RETURNS INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (DECIMAL)) + RETURNS DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (FLOAT32)) + RETURNS FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (FLOAT64)) + RETURNS FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_INT8)) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_INT16)) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_INT32)) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT8 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_INT8)) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_INT16)) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_INT32)) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT8 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (INT16)) + RETURNS INT16 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (INT32)) + RETURNS INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (INT64)) + RETURNS INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (INT)) + RETURNS INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (DECIMAL)) + RETURNS DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (FLOAT32)) + RETURNS FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (FLOAT64)) + RETURNS FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_INT16)) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_INT32)) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT16 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_INT16)) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_INT32)) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT16 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (INT32)) + RETURNS INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (INT64)) + RETURNS INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (INT)) + RETURNS INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (DECIMAL)) + RETURNS DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (FLOAT32)) + RETURNS FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (FLOAT64)) + RETURNS FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (NULLABLE_INT32)) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT32 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT32 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT32 + TYPE TYPE (NULLABLE_INT32)) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT32 + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT32 + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT32 + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT32 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT32 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT32 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT32 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (INT64)) + RETURNS INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (INT)) + RETURNS INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (DECIMAL)) + RETURNS DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (FLOAT32)) + RETURNS FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (FLOAT64)) + RETURNS FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT64 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT64 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT64 + TYPE TYPE (NULLABLE_INT64)) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT64 + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT64 + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT64 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT64 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT64 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT64 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (INT)) + RETURNS INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (DECIMAL)) + RETURNS DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (FLOAT32)) + RETURNS FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (FLOAT64)) + RETURNS FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE INT + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT + TYPE TYPE (NULLABLE_INT)) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_INT + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (DECIMAL)) + RETURNS DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (FLOAT32)) + RETURNS FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (FLOAT64)) + RETURNS FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE DECIMAL + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_DECIMAL + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_DECIMAL + TYPE TYPE (NULLABLE_DECIMAL)) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_DECIMAL + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_DECIMAL + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_DECIMAL + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_DECIMAL + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (FLOAT32)) + RETURNS FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (FLOAT64)) + RETURNS FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT32 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_FLOAT32 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_FLOAT32 + TYPE TYPE (NULLABLE_FLOAT32)) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_FLOAT32 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_FLOAT32 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_FLOAT32 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT64 + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT64 + TYPE TYPE (FLOAT64)) + RETURNS FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT64 + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT64 + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT64 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT64 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT64 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE FLOAT64 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_FLOAT64 + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_FLOAT64 + TYPE TYPE (NULLABLE_FLOAT64)) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_FLOAT64 + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_FLOAT64 + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CHAR + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CHAR + TYPE TYPE (CHAR)) + RETURNS CHAR + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CHAR + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CHAR + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CHAR + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CHAR + TYPE TYPE (NULLABLE_CHAR)) + RETURNS NULLABLE_CHAR + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CHAR + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CHAR + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_CHAR + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_CHAR + TYPE TYPE (NULLABLE_CHAR)) + RETURNS NULLABLE_CHAR + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_CHAR + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_CHAR + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRING + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRING + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRING + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRING + TYPE TYPE (CLOB)) + RETURNS CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRING + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRING + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRING + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRING + TYPE TYPE (NULLABLE_CLOB)) + RETURNS NULLABLE_CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_STRING + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_STRING + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_STRING + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_STRING + TYPE TYPE (CLOB)) + RETURNS CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_STRING + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_STRING + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_STRING + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_STRING + TYPE TYPE (NULLABLE_CLOB)) + RETURNS NULLABLE_CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SYMBOL + TYPE TYPE (BOOL)) + RETURNS BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SYMBOL + TYPE TYPE (STRING)) + RETURNS STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SYMBOL + TYPE TYPE (SYMBOL)) + RETURNS SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SYMBOL + TYPE TYPE (CLOB)) + RETURNS CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SYMBOL + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SYMBOL + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SYMBOL + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SYMBOL + TYPE TYPE (NULLABLE_CLOB)) + RETURNS NULLABLE_CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_SYMBOL + TYPE TYPE (NULLABLE_BOOL)) + RETURNS NULLABLE_BOOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_SYMBOL + TYPE TYPE (NULLABLE_STRING)) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_SYMBOL + TYPE TYPE (NULLABLE_SYMBOL)) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_SYMBOL + TYPE TYPE (NULLABLE_CLOB)) + RETURNS NULLABLE_CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CLOB + TYPE TYPE (CLOB)) + RETURNS CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE CLOB + TYPE TYPE (NULLABLE_CLOB)) + RETURNS NULLABLE_CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_CLOB + TYPE TYPE (NULLABLE_CLOB)) + RETURNS NULLABLE_CLOB + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE LIST + TYPE TYPE (BAG)) + RETURNS BAG + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE LIST + TYPE TYPE (LIST)) + RETURNS LIST + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE LIST + TYPE TYPE (SEXP)) + RETURNS SEXP + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE LIST + TYPE TYPE (NULLABLE_BAG)) + RETURNS NULLABLE_BAG + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE LIST + TYPE TYPE (NULLABLE_LIST)) + RETURNS NULLABLE_LIST + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE LIST + TYPE TYPE (NULLABLE_SEXP)) + RETURNS NULLABLE_SEXP + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_LIST + TYPE TYPE (NULLABLE_BAG)) + RETURNS NULLABLE_BAG + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_LIST + TYPE TYPE (NULLABLE_LIST)) + RETURNS NULLABLE_LIST + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_LIST + TYPE TYPE (NULLABLE_SEXP)) + RETURNS NULLABLE_SEXP + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BAG + TYPE TYPE (BAG)) + RETURNS BAG + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE BAG + TYPE TYPE (NULLABLE_BAG)) + RETURNS NULLABLE_BAG + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_BAG + TYPE TYPE (NULLABLE_BAG)) + RETURNS NULLABLE_BAG + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SEXP + TYPE TYPE (BAG)) + RETURNS BAG + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SEXP + TYPE TYPE (LIST)) + RETURNS LIST + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SEXP + TYPE TYPE (SEXP)) + RETURNS SEXP + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SEXP + TYPE TYPE (NULLABLE_BAG)) + RETURNS NULLABLE_BAG + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SEXP + TYPE TYPE (NULLABLE_LIST)) + RETURNS NULLABLE_LIST + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE SEXP + TYPE TYPE (NULLABLE_SEXP)) + RETURNS NULLABLE_SEXP + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_SEXP + TYPE TYPE (NULLABLE_BAG)) + RETURNS NULLABLE_BAG + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_SEXP + TYPE TYPE (NULLABLE_LIST)) + RETURNS NULLABLE_LIST + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_SEXP + TYPE TYPE (NULLABLE_SEXP)) + RETURNS NULLABLE_SEXP + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRUCT + TYPE TYPE (STRUCT)) + RETURNS STRUCT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE STRUCT + TYPE TYPE (NULLABLE_STRUCT)) + RETURNS NULLABLE_STRUCT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); + +CREATE FUNCTION "CAST"( + VALUE NULLABLE_STRUCT + TYPE TYPE (NULLABLE_STRUCT)) + RETURNS NULLABLE_STRUCT + SPECIFIC - + RETURN CAST ( VALUE, TYPE ); diff --git a/partiql-planner/src/main/resources/operators.sql b/partiql-planner/src/main/resources/operators.sql new file mode 100644 index 000000000..a4e17f855 --- /dev/null +++ b/partiql-planner/src/main/resources/operators.sql @@ -0,0 +1,2031 @@ +--- [not] --------- + +CREATE FUNCTION "NOT"( + VALUE BOOL) + RETURNS BOOL + SPECIFIC - + RETURN NOT ( VALUE ); + +CREATE FUNCTION "NOT"( + VALUE BOOL) + RETURNS BOOL + SPECIFIC - + RETURN NOT ( VALUE ); + + +--- [pos] --------- + +CREATE FUNCTION "POS"( + VALUE INT8) + RETURNS INT8 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE NULLABLE_INT8) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE INT16) + RETURNS INT16 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE NULLABLE_INT16) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE INT32) + RETURNS INT32 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE NULLABLE_INT32) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE INT64) + RETURNS INT64 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE NULLABLE_INT64) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE INT) + RETURNS INT + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE NULLABLE_INT) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE DECIMAL) + RETURNS DECIMAL + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE NULLABLE_DECIMAL) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE FLOAT32) + RETURNS FLOAT32 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE NULLABLE_FLOAT32) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE FLOAT64) + RETURNS FLOAT64 + SPECIFIC - + RETURN POS ( VALUE ); + +CREATE FUNCTION "POS"( + VALUE NULLABLE_FLOAT64) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN POS ( VALUE ); + + +--- [neg] --------- + +CREATE FUNCTION "NEG"( + VALUE INT8) + RETURNS INT8 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE NULLABLE_INT8) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE INT16) + RETURNS INT16 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE NULLABLE_INT16) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE INT32) + RETURNS INT32 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE NULLABLE_INT32) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE INT64) + RETURNS INT64 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE NULLABLE_INT64) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE INT) + RETURNS INT + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE NULLABLE_INT) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE DECIMAL) + RETURNS DECIMAL + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE NULLABLE_DECIMAL) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE FLOAT32) + RETURNS FLOAT32 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE NULLABLE_FLOAT32) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE FLOAT64) + RETURNS FLOAT64 + SPECIFIC - + RETURN NEG ( VALUE ); + +CREATE FUNCTION "NEG"( + VALUE NULLABLE_FLOAT64) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN NEG ( VALUE ); + + +--- [eq] --------- + +CREATE FUNCTION "EQ"( + LHS NULL + RHS NULL) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS MISSING + RHS MISSING) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS BOOL + RHS BOOL) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_BOOL + RHS NULLABLE_BOOL) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS INT8 + RHS INT8) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS INT16 + RHS INT16) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS INT32 + RHS INT32) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS INT64 + RHS INT64) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS INT + RHS INT) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS DECIMAL + RHS DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS CHAR + RHS CHAR) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_CHAR + RHS NULLABLE_CHAR) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS SYMBOL + RHS SYMBOL) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_SYMBOL + RHS NULLABLE_SYMBOL) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS STRING + RHS STRING) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_STRING + RHS NULLABLE_STRING) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS CLOB + RHS CLOB) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_CLOB + RHS NULLABLE_CLOB) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS BINARY + RHS BINARY) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_BINARY + RHS NULLABLE_BINARY) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS BYTE + RHS BYTE) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_BYTE + RHS NULLABLE_BYTE) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS BLOB + RHS BLOB) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_BLOB + RHS NULLABLE_BLOB) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS DATE + RHS DATE) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_DATE + RHS NULLABLE_DATE) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS TIME + RHS TIME) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_TIME + RHS NULLABLE_TIME) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS TIMESTAMP + RHS TIMESTAMP) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_TIMESTAMP + RHS NULLABLE_TIMESTAMP) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS INTERVAL + RHS INTERVAL) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_INTERVAL + RHS NULLABLE_INTERVAL) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS LIST + RHS LIST) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_LIST + RHS NULLABLE_LIST) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS BAG + RHS BAG) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_BAG + RHS NULLABLE_BAG) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS SEXP + RHS SEXP) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_SEXP + RHS NULLABLE_SEXP) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS STRUCT + RHS STRUCT) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS NULLABLE_STRUCT + RHS NULLABLE_STRUCT) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + +CREATE FUNCTION "EQ"( + LHS GRAPH + RHS GRAPH) + RETURNS BOOL + SPECIFIC - + RETURN EQ ( LHS, RHS ); + + +--- [neq] --------- + +CREATE FUNCTION "NEQ"( + LHS NULL + RHS NULL) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS MISSING + RHS MISSING) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS BOOL + RHS BOOL) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_BOOL + RHS NULLABLE_BOOL) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS INT8 + RHS INT8) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS INT16 + RHS INT16) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS INT32 + RHS INT32) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS INT64 + RHS INT64) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS INT + RHS INT) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS DECIMAL + RHS DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS CHAR + RHS CHAR) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_CHAR + RHS NULLABLE_CHAR) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS SYMBOL + RHS SYMBOL) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_SYMBOL + RHS NULLABLE_SYMBOL) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS STRING + RHS STRING) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_STRING + RHS NULLABLE_STRING) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS CLOB + RHS CLOB) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_CLOB + RHS NULLABLE_CLOB) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS BINARY + RHS BINARY) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_BINARY + RHS NULLABLE_BINARY) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS BYTE + RHS BYTE) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_BYTE + RHS NULLABLE_BYTE) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS BLOB + RHS BLOB) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_BLOB + RHS NULLABLE_BLOB) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS DATE + RHS DATE) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_DATE + RHS NULLABLE_DATE) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS TIME + RHS TIME) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_TIME + RHS NULLABLE_TIME) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS TIMESTAMP + RHS TIMESTAMP) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_TIMESTAMP + RHS NULLABLE_TIMESTAMP) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS INTERVAL + RHS INTERVAL) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_INTERVAL + RHS NULLABLE_INTERVAL) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS LIST + RHS LIST) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_LIST + RHS NULLABLE_LIST) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS BAG + RHS BAG) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_BAG + RHS NULLABLE_BAG) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS SEXP + RHS SEXP) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_SEXP + RHS NULLABLE_SEXP) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS STRUCT + RHS STRUCT) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS NULLABLE_STRUCT + RHS NULLABLE_STRUCT) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + +CREATE FUNCTION "NEQ"( + LHS GRAPH + RHS GRAPH) + RETURNS BOOL + SPECIFIC - + RETURN NEQ ( LHS, RHS ); + + +--- [and] --------- + +CREATE FUNCTION "AND"( + LHS BOOL + RHS BOOL) + RETURNS BOOL + SPECIFIC - + RETURN AND ( LHS, RHS ); + +CREATE FUNCTION "AND"( + LHS NULLABLE_BOOL + RHS NULLABLE_BOOL) + RETURNS BOOL + SPECIFIC - + RETURN AND ( LHS, RHS ); + + +--- [or] --------- + +CREATE FUNCTION "OR"( + LHS BOOL + RHS BOOL) + RETURNS BOOL + SPECIFIC - + RETURN OR ( LHS, RHS ); + +CREATE FUNCTION "OR"( + LHS NULLABLE_BOOL + RHS NULLABLE_BOOL) + RETURNS BOOL + SPECIFIC - + RETURN OR ( LHS, RHS ); + + +--- [lt] --------- + +CREATE FUNCTION "LT"( + LHS INT8 + RHS INT8) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS INT16 + RHS INT16) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS INT32 + RHS INT32) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS INT64 + RHS INT64) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS INT + RHS INT) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS DECIMAL + RHS DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + +CREATE FUNCTION "LT"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN LT ( LHS, RHS ); + + +--- [lte] --------- + +CREATE FUNCTION "LTE"( + LHS INT8 + RHS INT8) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS INT16 + RHS INT16) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS INT32 + RHS INT32) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS INT64 + RHS INT64) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS INT + RHS INT) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS DECIMAL + RHS DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + +CREATE FUNCTION "LTE"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN LTE ( LHS, RHS ); + + +--- [gt] --------- + +CREATE FUNCTION "GT"( + LHS INT8 + RHS INT8) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS INT16 + RHS INT16) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS INT32 + RHS INT32) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS INT64 + RHS INT64) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS INT + RHS INT) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS DECIMAL + RHS DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + +CREATE FUNCTION "GT"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN GT ( LHS, RHS ); + + +--- [gte] --------- + +CREATE FUNCTION "GTE"( + LHS INT8 + RHS INT8) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS INT16 + RHS INT16) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS INT32 + RHS INT32) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS INT64 + RHS INT64) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS INT + RHS INT) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS DECIMAL + RHS DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + +CREATE FUNCTION "GTE"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS BOOL + SPECIFIC - + RETURN GTE ( LHS, RHS ); + + +--- [plus] --------- + +CREATE FUNCTION "PLUS"( + LHS INT8 + RHS INT8) + RETURNS INT8 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS INT16 + RHS INT16) + RETURNS INT16 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS INT32 + RHS INT32) + RETURNS INT32 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS INT64 + RHS INT64) + RETURNS INT64 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS INT + RHS INT) + RETURNS INT + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS DECIMAL + RHS DECIMAL) + RETURNS DECIMAL + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS FLOAT32 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS FLOAT64 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + +CREATE FUNCTION "PLUS"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN PLUS ( LHS, RHS ); + + +--- [minus] --------- + +CREATE FUNCTION "MINUS"( + LHS INT8 + RHS INT8) + RETURNS INT8 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS INT16 + RHS INT16) + RETURNS INT16 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS INT32 + RHS INT32) + RETURNS INT32 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS INT64 + RHS INT64) + RETURNS INT64 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS INT + RHS INT) + RETURNS INT + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS DECIMAL + RHS DECIMAL) + RETURNS DECIMAL + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS FLOAT32 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS FLOAT64 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + +CREATE FUNCTION "MINUS"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN MINUS ( LHS, RHS ); + + +--- [times] --------- + +CREATE FUNCTION "TIMES"( + LHS INT8 + RHS INT8) + RETURNS INT8 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS INT16 + RHS INT16) + RETURNS INT16 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS INT32 + RHS INT32) + RETURNS INT32 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS INT64 + RHS INT64) + RETURNS INT64 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS INT + RHS INT) + RETURNS INT + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS DECIMAL + RHS DECIMAL) + RETURNS DECIMAL + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS FLOAT32 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS FLOAT64 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + +CREATE FUNCTION "TIMES"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN TIMES ( LHS, RHS ); + + +--- [div] --------- + +CREATE FUNCTION "DIV"( + LHS INT8 + RHS INT8) + RETURNS INT8 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS INT16 + RHS INT16) + RETURNS INT16 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS INT32 + RHS INT32) + RETURNS INT32 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS INT64 + RHS INT64) + RETURNS INT64 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS INT + RHS INT) + RETURNS INT + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS DECIMAL + RHS DECIMAL) + RETURNS DECIMAL + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS FLOAT32 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS FLOAT64 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + +CREATE FUNCTION "DIV"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN DIV ( LHS, RHS ); + + +--- [mod] --------- + +CREATE FUNCTION "MOD"( + LHS INT8 + RHS INT8) + RETURNS INT8 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS NULLABLE_INT8 + RHS NULLABLE_INT8) + RETURNS NULLABLE_INT8 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS INT16 + RHS INT16) + RETURNS INT16 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS NULLABLE_INT16 + RHS NULLABLE_INT16) + RETURNS NULLABLE_INT16 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS INT32 + RHS INT32) + RETURNS INT32 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS NULLABLE_INT32 + RHS NULLABLE_INT32) + RETURNS NULLABLE_INT32 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS INT64 + RHS INT64) + RETURNS INT64 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS NULLABLE_INT64 + RHS NULLABLE_INT64) + RETURNS NULLABLE_INT64 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS INT + RHS INT) + RETURNS INT + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS NULLABLE_INT + RHS NULLABLE_INT) + RETURNS NULLABLE_INT + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS DECIMAL + RHS DECIMAL) + RETURNS DECIMAL + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS NULLABLE_DECIMAL + RHS NULLABLE_DECIMAL) + RETURNS NULLABLE_DECIMAL + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS FLOAT32 + RHS FLOAT32) + RETURNS FLOAT32 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS NULLABLE_FLOAT32 + RHS NULLABLE_FLOAT32) + RETURNS NULLABLE_FLOAT32 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS FLOAT64 + RHS FLOAT64) + RETURNS FLOAT64 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + +CREATE FUNCTION "MOD"( + LHS NULLABLE_FLOAT64 + RHS NULLABLE_FLOAT64) + RETURNS NULLABLE_FLOAT64 + SPECIFIC - + RETURN MOD ( LHS, RHS ); + + +--- [concat] --------- + +CREATE FUNCTION "CONCAT"( + LHS SYMBOL + RHS SYMBOL) + RETURNS SYMBOL + SPECIFIC - + RETURN CONCAT ( LHS, RHS ); + +CREATE FUNCTION "CONCAT"( + LHS NULLABLE_SYMBOL + RHS NULLABLE_SYMBOL) + RETURNS NULLABLE_SYMBOL + SPECIFIC - + RETURN CONCAT ( LHS, RHS ); + +CREATE FUNCTION "CONCAT"( + LHS STRING + RHS STRING) + RETURNS STRING + SPECIFIC - + RETURN CONCAT ( LHS, RHS ); + +CREATE FUNCTION "CONCAT"( + LHS NULLABLE_STRING + RHS NULLABLE_STRING) + RETURNS NULLABLE_STRING + SPECIFIC - + RETURN CONCAT ( LHS, RHS ); diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/HeaderTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/HeaderTest.kt new file mode 100644 index 000000000..3dad436ca --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/HeaderTest.kt @@ -0,0 +1,13 @@ +package org.partiql.planner + +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test + +class HeaderTest { + + @Test + @Disabled + fun print() { + println(PartiQLHeader) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/EnvTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/EnvTest.kt new file mode 100644 index 000000000..725f0f481 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/EnvTest.kt @@ -0,0 +1,99 @@ +package org.partiql.planner.internal + +import com.amazon.ionelement.api.field +import com.amazon.ionelement.api.ionString +import com.amazon.ionelement.api.ionStructOf +import org.junit.jupiter.api.Assertions.assertNull +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.partiql.planner.PartiQLHeader +import org.partiql.planner.PartiQLPlanner +import org.partiql.planner.internal.ir.Global +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.identifierQualified +import org.partiql.planner.internal.ir.identifierSymbol +import org.partiql.plugins.local.LocalPlugin +import org.partiql.spi.BindingCase +import org.partiql.spi.BindingName +import org.partiql.spi.BindingPath +import org.partiql.types.StaticType +import java.util.Random +import kotlin.io.path.pathString +import kotlin.io.path.toPath +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class EnvTest { + + companion object { + private val root = this::class.java.getResource("/catalogs/default")!!.toURI().toPath().pathString + + val catalogConfig = mapOf( + "pql" to ionStructOf( + field("connector_name", ionString("local")), + field("root", ionString("$root/pql")), + ) + ) + + private val EMPTY_TYPE_ENV = TypeEnv(schema = emptyList(), ResolutionStrategy.GLOBAL) + + private val GLOBAL_OS = Global( + path = identifierQualified( + root = identifierSymbol("pql", Identifier.CaseSensitivity.SENSITIVE), + steps = listOf( + identifierSymbol("main", Identifier.CaseSensitivity.SENSITIVE), + identifierSymbol("os", Identifier.CaseSensitivity.SENSITIVE) + ) + ), + type = StaticType.STRING + ) + } + + private lateinit var env: Env + + @BeforeEach + fun init() { + env = Env( + listOf(PartiQLHeader), + listOf(LocalPlugin()), + PartiQLPlanner.Session( + queryId = Random().nextInt().toString(), + userId = "test-user", + currentCatalog = "pql", + currentDirectory = listOf("main"), + catalogConfig = catalogConfig + ) + ) + } + + @Test + fun testGlobalMatchingSensitiveName() { + val path = BindingPath(listOf(BindingName("os", BindingCase.SENSITIVE))) + assertNotNull(env.resolve(path, EMPTY_TYPE_ENV, Rex.Op.Var.Scope.DEFAULT)) + assertEquals(1, env.globals.size) + assert(env.globals.contains(GLOBAL_OS)) + } + + @Test + fun testGlobalMatchingInsensitiveName() { + val path = BindingPath(listOf(BindingName("oS", BindingCase.INSENSITIVE))) + assertNotNull(env.resolve(path, EMPTY_TYPE_ENV, Rex.Op.Var.Scope.DEFAULT)) + assertEquals(1, env.globals.size) + assert(env.globals.contains(GLOBAL_OS)) + } + + @Test + fun testGlobalNotMatchingSensitiveName() { + val path = BindingPath(listOf(BindingName("oS", BindingCase.SENSITIVE))) + assertNull(env.resolve(path, EMPTY_TYPE_ENV, Rex.Op.Var.Scope.DEFAULT)) + assert(env.globals.isEmpty()) + } + + @Test + fun testGlobalNotMatchingInsensitiveName() { + val path = BindingPath(listOf(BindingName("nonexistent", BindingCase.INSENSITIVE))) + assertNull(env.resolve(path, EMPTY_TYPE_ENV, Rex.Op.Var.Scope.DEFAULT)) + assert(env.globals.isEmpty()) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/FunctionResolverTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/FunctionResolverTest.kt new file mode 100644 index 000000000..bc220377d --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/FunctionResolverTest.kt @@ -0,0 +1,122 @@ +package org.partiql.planner.internal.typer + +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.fail +import org.partiql.planner.Header +import org.partiql.planner.PartiQLHeader +import org.partiql.types.function.FunctionParameter +import org.partiql.types.function.FunctionSignature +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.PartiQLValueType + +/** + * As far as testing is concerned, we can stub out all value related things. + * We may be able to pretty-print with string equals to also simplify things. + * Only the "types" of expressions matter, we ignore the underlying ops. + */ +@OptIn(PartiQLValueExperimental::class) +class FunctionResolverTest { + + @Test + fun sanity() { + // 1 + 1.0 -> 2.0 + val fn = Header.binary( + name = "plus", + returns = PartiQLValueType.FLOAT64, + lhs = PartiQLValueType.FLOAT64, + rhs = PartiQLValueType.FLOAT64, + ) + val args = listOf( + FunctionParameter("arg-0", PartiQLValueType.INT32), + FunctionParameter("arg-1", PartiQLValueType.FLOAT64), + ) + val expectedImplicitCasts = listOf(true, false) + val case = Case.Success(fn, args, expectedImplicitCasts) + case.assert() + } + + @Test + fun split() { + val args = listOf( + FunctionParameter("arg-0", PartiQLValueType.STRING), + FunctionParameter("arg-1", PartiQLValueType.STRING), + ) + val expectedImplicitCasts = listOf(false, false) + val case = Case.Success(split, args, expectedImplicitCasts) + case.assert() + } + + companion object { + + val split = FunctionSignature.Scalar( + name = "split", + returns = PartiQLValueType.LIST, + parameters = listOf( + FunctionParameter("value", PartiQLValueType.STRING), + FunctionParameter("delimiter", PartiQLValueType.STRING), + ), + isNullable = false, + ) + + private val myHeader = object : Header() { + + override val namespace: String = "my_header" + + override val functions: List = listOf( + split + ) + } + + private val resolver = FnResolver(listOf(PartiQLHeader, myHeader)) + } + + private sealed class Case { + + abstract fun assert() + + class Success( + private val signature: FunctionSignature, + private val inputs: List, + private val expectedImplicitCast: List, + ) : Case() { + + /** + * Assert we match the function, and the appropriate implicit CASTs were returned. + * + * TODO actually look into what the CAST functions are. + */ + override fun assert() { + val mapping = resolver.match(signature, inputs) + val diffs = mutableListOf() + val message = buildString { + appendLine("Given arguments did not match function signature") + appendLine(signature) + appendLine("Input: (${inputs.joinToString()}})") + } + if (mapping == null || mapping.size != expectedImplicitCast.size) { + fail { message } + } + // compare args + for (i in mapping.indices) { + val m = mapping[i] + val shouldCast = expectedImplicitCast[i] + val diff: String? = when { + m == null && shouldCast -> "Arg[$i] is missing an implicit CAST" + m != null && !shouldCast -> "Arg[$i] had implicit CAST but should not" + else -> null + } + if (diff != null) diffs.add(diff) + } + // pretty-print some debug info + if (diffs.isNotEmpty()) { + fail { + buildString { + appendLine(message) + diffs.forEach { appendLine(it) } + } + } + } + } + } + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/PartiQLTyperTestBase.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/PartiQLTyperTestBase.kt new file mode 100644 index 000000000..ebe1dd277 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/PartiQLTyperTestBase.kt @@ -0,0 +1,144 @@ +package org.partiql.planner.internal.typer + +import com.amazon.ionelement.api.ionString +import com.amazon.ionelement.api.ionStructOf +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.DynamicTest +import org.partiql.errors.Problem +import org.partiql.errors.ProblemCallback +import org.partiql.errors.ProblemSeverity +import org.partiql.parser.PartiQLParserBuilder +import org.partiql.plan.Statement +import org.partiql.plan.debug.PlanPrinter +import org.partiql.planner.PartiQLPlanner +import org.partiql.planner.PartiQLPlannerBuilder +import org.partiql.planner.test.PartiQLTest +import org.partiql.planner.test.PartiQLTestProvider +import org.partiql.plugins.memory.MemoryCatalog +import org.partiql.plugins.memory.MemoryPlugin +import org.partiql.types.StaticType +import java.util.Random +import java.util.stream.Stream + +abstract class PartiQLTyperTestBase { + sealed class TestResult { + data class Success(val expectedType: StaticType) : TestResult() { + override fun toString(): String = "Success_$expectedType" + } + + object Failure : TestResult() { + override fun toString(): String = "Failure" + } + } + + internal class ProblemCollector : ProblemCallback { + private val problemList = mutableListOf() + + val problems: List + get() = problemList + + val hasErrors: Boolean + get() = problemList.any { it.details.severity == ProblemSeverity.ERROR } + + val hasWarnings: Boolean + get() = problemList.any { it.details.severity == ProblemSeverity.WARNING } + + override fun invoke(problem: Problem) { + problemList.add(problem) + } + } + + companion object { + internal val session: ((String) -> PartiQLPlanner.Session) = { catalog -> + PartiQLPlanner.Session( + queryId = Random().nextInt().toString(), + userId = "test-user", + currentCatalog = catalog, + catalogConfig = mapOf( + catalog to ionStructOf( + "connector_name" to ionString("memory") + ) + ) + ) + } + } + + val inputs = PartiQLTestProvider().apply { load() } + + val testingPipeline: ((String, String, MemoryCatalog.Provider, ProblemCallback) -> PartiQLPlanner.Result) = { query, catalog, catalogProvider, collector -> + val ast = PartiQLParserBuilder.standard().build().parse(query).root + val planner = PartiQLPlannerBuilder().plugins(listOf(MemoryPlugin(catalogProvider))).build() + planner.plan(ast, session(catalog), collector) + } + + fun testGen( + testCategory: String, + tests: List, + argsMap: Map>>, + ): Stream { + val catalogProvider = MemoryCatalog.Provider() + + return tests.map { test -> + val group = test.statement + val children = argsMap.flatMap { (key, value) -> + value.mapIndexed { index: Int, types: List -> + val testName = "${testCategory}_${key}_$index" + catalogProvider[testName] = MemoryCatalog.of( + *( + types.mapIndexed { i, t -> + "t${i + 1}" to t + }.toTypedArray() + ) + ) + val displayName = "$group | $testName | $types" + val statement = test.statement + // Assert + DynamicTest.dynamicTest(displayName) { + val pc = ProblemCollector() + if (key is TestResult.Success) { + val result = testingPipeline(statement, testName, catalogProvider, pc) + val root = (result.plan.statement as Statement.Query).root + val actualType = root.type + assert(actualType == key.expectedType) { + buildString { + this.appendLine("expected Type is : ${key.expectedType}") + this.appendLine("actual Type is : $actualType") + PlanPrinter.append(this, result.plan) + } + } + assert(pc.problems.isEmpty()) { + buildString { + this.appendLine("expected success Test case to have no problem") + this.appendLine("actual problems are: ") + pc.problems.forEach { + this.appendLine(it) + } + PlanPrinter.append(this, result.plan) + } + } + } else { + val result = testingPipeline(statement, testName, catalogProvider, pc) + val root = (result.plan.statement as Statement.Query).root + val actualType = root.type + assert(actualType == StaticType.MISSING) { + buildString { + this.appendLine(" expected Type is : MISSING") + this.appendLine("actual Type is : $actualType") + PlanPrinter.append(this, result.plan) + } + } + assert(pc.problems.isNotEmpty()) { + buildString { + this.appendLine("expected success Test case to have problems") + this.appendLine("but received no problems") + PlanPrinter.append(this, result.plan) + } + } + } + } + } + } + DynamicContainer.dynamicContainer(group, children) + }.stream() + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/PlanTyperTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/PlanTyperTest.kt new file mode 100644 index 000000000..661e32802 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/PlanTyperTest.kt @@ -0,0 +1,572 @@ +package org.partiql.planner.internal.typer + +import com.amazon.ionelement.api.field +import com.amazon.ionelement.api.ionString +import com.amazon.ionelement.api.ionStructOf +import org.junit.jupiter.api.Test +import org.partiql.errors.Problem +import org.partiql.errors.ProblemCallback +import org.partiql.errors.ProblemHandler +import org.partiql.errors.ProblemSeverity +import org.partiql.planner.PartiQLHeader +import org.partiql.planner.PartiQLPlanner +import org.partiql.planner.internal.Env +import org.partiql.planner.internal.ir.Identifier +import org.partiql.planner.internal.ir.Rex +import org.partiql.planner.internal.ir.identifierSymbol +import org.partiql.planner.internal.ir.rex +import org.partiql.planner.internal.ir.rexOpGlobal +import org.partiql.planner.internal.ir.rexOpLit +import org.partiql.planner.internal.ir.rexOpPath +import org.partiql.planner.internal.ir.rexOpPathStepSymbol +import org.partiql.planner.internal.ir.rexOpStruct +import org.partiql.planner.internal.ir.rexOpStructField +import org.partiql.planner.internal.ir.rexOpVarUnresolved +import org.partiql.planner.internal.ir.statementQuery +import org.partiql.plugins.local.LocalPlugin +import org.partiql.types.StaticType +import org.partiql.types.StructType +import org.partiql.types.TupleConstraint +import org.partiql.value.PartiQLValueExperimental +import org.partiql.value.int32Value +import org.partiql.value.stringValue +import java.util.Random +import kotlin.io.path.pathString +import kotlin.io.path.toPath +import kotlin.test.assertEquals + +class PlanTyperTest { + + companion object { + private val root = this::class.java.getResource("/catalogs/default")!!.toURI().toPath().pathString + + private val catalogConfig = mapOf( + "pql" to ionStructOf( + field("connector_name", ionString("local")), + field("root", ionString("$root/pql")), + ) + ) + + private val ORDERED_DUPLICATES_STRUCT = StructType( + fields = listOf( + StructType.Field("definition", StaticType.STRING), + StructType.Field("definition", StaticType.FLOAT), + StructType.Field("DEFINITION", StaticType.DECIMAL), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.Ordered + ) + ) + + private val DUPLICATES_STRUCT = StructType( + fields = listOf( + StructType.Field("definition", StaticType.STRING), + StructType.Field("definition", StaticType.FLOAT), + StructType.Field("DEFINITION", StaticType.DECIMAL), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false) + ) + ) + + private val CLOSED_UNION_DUPLICATES_STRUCT = StaticType.unionOf( + StructType( + fields = listOf( + StructType.Field("definition", StaticType.STRING), + StructType.Field("definition", StaticType.FLOAT), + StructType.Field("DEFINITION", StaticType.DECIMAL), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false) + ) + ), + StructType( + fields = listOf( + StructType.Field("definition", StaticType.INT2), + StructType.Field("definition", StaticType.INT4), + StructType.Field("DEFINITION", StaticType.INT8), + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.Open(false), + TupleConstraint.Ordered + ) + ), + ) + + private val OPEN_DUPLICATES_STRUCT = StructType( + fields = listOf( + StructType.Field("definition", StaticType.STRING), + StructType.Field("definition", StaticType.FLOAT), + StructType.Field("DEFINITION", StaticType.DECIMAL), + ), + contentClosed = false + ) + + private fun getTyper(): PlanTyperWrapper { + val collector = ProblemCollector() + val env = Env( + listOf(PartiQLHeader), + listOf(LocalPlugin()), + PartiQLPlanner.Session( + queryId = Random().nextInt().toString(), + userId = "test-user", + currentCatalog = "pql", + currentDirectory = listOf("main"), + catalogConfig = catalogConfig + ) + ) + return PlanTyperWrapper(PlanTyper(env, collector), collector) + } + } + + private class PlanTyperWrapper( + internal val typer: PlanTyper, + internal val collector: ProblemCollector + ) + + /** + * This is a test to show that we convert: + * ``` + * { 'FiRsT_KeY': { 'sEcoNd_KEY': 5 } }.first_key."sEcoNd_KEY" + * ``` + * to + * ``` + * { 'FiRsT_KeY': { 'sEcoNd_KEY': 5 } }."FiRsT_KeY"."sEcoNd_KEY" + * ``` + * + * It also checks that we type it all correctly as well. + */ + @Test + @OptIn(PartiQLValueExperimental::class) + fun testReplacingStructs() { + val wrapper = getTyper() + val typer = wrapper.typer + val input = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + StaticType.ANY, + rexOpStruct( + fields = listOf( + rexOpStructField( + k = rex(StaticType.STRING, rexOpLit(stringValue("FiRsT_KeY"))), + v = rex( + StaticType.ANY, + rexOpStruct( + fields = listOf( + rexOpStructField( + k = rex(StaticType.STRING, rexOpLit(stringValue("sEcoNd_KEY"))), + v = rex(StaticType.INT4, rexOpLit(int32Value(5))) + ) + ) + ) + ) + ) + ) + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("first_key", Identifier.CaseSensitivity.INSENSITIVE)), + rexOpPathStepSymbol(identifierSymbol("sEcoNd_KEY", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val firstKeyStruct = StructType( + fields = mapOf( + "sEcoNd_KEY" to StaticType.INT4 + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Open(false) + ) + ) + val topLevelStruct = StructType( + fields = mapOf( + "FiRsT_KeY" to firstKeyStruct + ), + contentClosed = true, + constraints = setOf( + TupleConstraint.UniqueAttrs(true), + TupleConstraint.Open(false) + ) + ) + val expected = statementQuery( + root = rex( + type = StaticType.INT4, + op = rexOpPath( + root = rex( + type = topLevelStruct, + rexOpStruct( + fields = listOf( + rexOpStructField( + k = rex(StaticType.STRING, rexOpLit(stringValue("FiRsT_KeY"))), + v = rex( + type = firstKeyStruct, + rexOpStruct( + fields = listOf( + rexOpStructField( + k = rex(StaticType.STRING, rexOpLit(stringValue("sEcoNd_KEY"))), + v = rex(StaticType.INT4, rexOpLit(int32Value(5))) + ) + ) + ) + ) + ) + ) + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("FiRsT_KeY", Identifier.CaseSensitivity.SENSITIVE)), + rexOpPathStepSymbol(identifierSymbol("sEcoNd_KEY", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val actual = typer.resolve(input) + assertEquals(expected, actual) + } + + @Test + fun testOrderedDuplicates() { + val wrapper = getTyper() + val typer = wrapper.typer + val input = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + StaticType.ANY, + rexOpVarUnresolved( + identifierSymbol("closed_ordered_duplicates_struct", Identifier.CaseSensitivity.SENSITIVE), + Rex.Op.Var.Scope.DEFAULT + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("DEFINITION", Identifier.CaseSensitivity.INSENSITIVE)), + ) + ) + ) + ) + val expected = statementQuery( + root = rex( + type = StaticType.STRING, + op = rexOpPath( + root = rex( + ORDERED_DUPLICATES_STRUCT, + rexOpGlobal(0) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("definition", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val actual = typer.resolve(input) + assertEquals(expected, actual) + } + + @Test + fun testOrderedDuplicatesWithSensitivity() { + val wrapper = getTyper() + val typer = wrapper.typer + val input = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + StaticType.ANY, + rexOpVarUnresolved( + identifierSymbol("closed_ordered_duplicates_struct", Identifier.CaseSensitivity.SENSITIVE), + Rex.Op.Var.Scope.DEFAULT + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("DEFINITION", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val expected = statementQuery( + root = rex( + type = StaticType.DECIMAL, + op = rexOpPath( + root = rex( + ORDERED_DUPLICATES_STRUCT, + rexOpGlobal(0) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("DEFINITION", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val actual = typer.resolve(input) + assertEquals(expected, actual) + } + + @Test + fun testUnorderedDuplicates() { + val wrapper = getTyper() + val typer = wrapper.typer + val input = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + StaticType.ANY, + rexOpVarUnresolved( + identifierSymbol("closed_duplicates_struct", Identifier.CaseSensitivity.SENSITIVE), + Rex.Op.Var.Scope.DEFAULT + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("DEFINITION", Identifier.CaseSensitivity.INSENSITIVE)), + ) + ) + ) + ) + val expected = statementQuery( + root = rex( + type = StaticType.unionOf(StaticType.STRING, StaticType.FLOAT, StaticType.DECIMAL), + op = rexOpPath( + root = rex( + DUPLICATES_STRUCT, + rexOpGlobal(0) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("DEFINITION", Identifier.CaseSensitivity.INSENSITIVE)), + ) + ) + ) + ) + val actual = typer.resolve(input) + assertEquals(expected, actual) + } + + @Test + fun testUnorderedDuplicatesWithSensitivity() { + val wrapper = getTyper() + val typer = wrapper.typer + val input = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + StaticType.ANY, + rexOpVarUnresolved( + identifierSymbol("closed_duplicates_struct", Identifier.CaseSensitivity.SENSITIVE), + Rex.Op.Var.Scope.DEFAULT + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("DEFINITION", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val expected = statementQuery( + root = rex( + type = StaticType.DECIMAL, + op = rexOpPath( + root = rex( + DUPLICATES_STRUCT, + rexOpGlobal(0) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("DEFINITION", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val actual = typer.resolve(input) + assertEquals(expected, actual) + } + + @Test + fun testUnorderedDuplicatesWithSensitivityAndDuplicateResults() { + val wrapper = getTyper() + val typer = wrapper.typer + val input = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + StaticType.ANY, + rexOpVarUnresolved( + identifierSymbol("closed_duplicates_struct", Identifier.CaseSensitivity.SENSITIVE), + Rex.Op.Var.Scope.DEFAULT + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("definition", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val expected = statementQuery( + root = rex( + type = StaticType.unionOf(StaticType.STRING, StaticType.FLOAT), + op = rexOpPath( + root = rex( + DUPLICATES_STRUCT, + rexOpGlobal(0) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("definition", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val actual = typer.resolve(input) + assertEquals(expected, actual) + } + + @Test + fun testOpenDuplicates() { + val wrapper = getTyper() + val typer = wrapper.typer + val input = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + StaticType.ANY, + rexOpVarUnresolved( + identifierSymbol("open_duplicates_struct", Identifier.CaseSensitivity.SENSITIVE), + Rex.Op.Var.Scope.DEFAULT + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("definition", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val expected = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + OPEN_DUPLICATES_STRUCT, + rexOpGlobal(0) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("definition", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val actual = typer.resolve(input) + assertEquals(expected, actual) + } + + @Test + fun testUnionClosedDuplicates() { + val wrapper = getTyper() + val typer = wrapper.typer + val input = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + StaticType.ANY, + rexOpVarUnresolved( + identifierSymbol("closed_union_duplicates_struct", Identifier.CaseSensitivity.SENSITIVE), + Rex.Op.Var.Scope.DEFAULT + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("definition", Identifier.CaseSensitivity.INSENSITIVE)), + ) + ) + ) + ) + val expected = statementQuery( + root = rex( + type = StaticType.unionOf(StaticType.STRING, StaticType.FLOAT, StaticType.DECIMAL, StaticType.INT2), + op = rexOpPath( + root = rex( + CLOSED_UNION_DUPLICATES_STRUCT, + rexOpGlobal(0) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("definition", Identifier.CaseSensitivity.INSENSITIVE)), + ) + ) + ) + ) + val actual = typer.resolve(input) + assertEquals(expected, actual) + } + + @Test + fun testUnionClosedDuplicatesWithSensitivity() { + val wrapper = getTyper() + val typer = wrapper.typer + val input = statementQuery( + root = rex( + type = StaticType.ANY, + op = rexOpPath( + root = rex( + StaticType.ANY, + rexOpVarUnresolved( + identifierSymbol("closed_union_duplicates_struct", Identifier.CaseSensitivity.SENSITIVE), + Rex.Op.Var.Scope.DEFAULT + ) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("definition", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val expected = statementQuery( + root = rex( + type = StaticType.unionOf(StaticType.STRING, StaticType.FLOAT, StaticType.INT2), + op = rexOpPath( + root = rex( + CLOSED_UNION_DUPLICATES_STRUCT, + rexOpGlobal(0) + ), + steps = listOf( + rexOpPathStepSymbol(identifierSymbol("definition", Identifier.CaseSensitivity.SENSITIVE)), + ) + ) + ) + ) + val actual = typer.resolve(input) + assertEquals(expected, actual) + } + + /** + * A [ProblemHandler] that collects all the encountered [Problem]s without throwing. + * + * This is intended to be used when wanting to collect multiple problems that may be encountered (e.g. a static type + * inference pass that can result in multiple errors and/or warnings). This handler does not collect other exceptions + * that may be thrown. + */ + internal class ProblemCollector : ProblemCallback { + private val problemList = mutableListOf() + + val problems: List + get() = problemList + + val hasErrors: Boolean + get() = problemList.any { it.details.severity == ProblemSeverity.ERROR } + + val hasWarnings: Boolean + get() = problemList.any { it.details.severity == ProblemSeverity.WARNING } + + override fun invoke(problem: Problem) { + problemList.add(problem) + } + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/TypeLatticeTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/TypeLatticeTest.kt new file mode 100644 index 000000000..9d02aaa66 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/TypeLatticeTest.kt @@ -0,0 +1,14 @@ +package org.partiql.planner.internal.typer + +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test + +class TypeLatticeTest { + + @Test + @Disabled + fun latticeAsciidocDump() { + // this test only exists for dumping the type lattice as Asciidoc + println(TypeLattice.partiql()) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/logical/OpLogicalTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/logical/OpLogicalTest.kt new file mode 100644 index 000000000..996c318bd --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/logical/OpLogicalTest.kt @@ -0,0 +1,84 @@ +package org.partiql.planner.internal.typer.logical + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.internal.typer.isUnknown +import org.partiql.planner.util.allSupportedType +import org.partiql.planner.util.cartesianProduct +import org.partiql.types.StaticType +import java.util.stream.Stream + +// TODO: Finalize the semantics for logical operators when operand(s) contain MISSING +// For now Logical Operator (NOT, AND, OR) can take missing as operand(s) +// and never returns MISSING +class OpLogicalTest : PartiQLTyperTestBase() { + @TestFactory + fun not(): Stream { + val supportedType = listOf( + StaticType.BOOL, + StaticType.NULL, + StaticType.MISSING, + ) + + val unsupportedType = allSupportedType.filterNot { + supportedType.contains(it) + } + + val tests = listOf( + "expr-02", // Not + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = supportedType.map { t -> listOf(t) }.toSet() + successArgs.forEach { args: List -> + val arg = args.first() + if (arg.isUnknown()) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } + Unit + } + + put(TestResult.Failure, unsupportedType.map { t -> listOf(t) }.toSet()) + } + + return super.testGen("not", tests, argsMap) + } + + // TODO: There is no good way to have the inferencer to distinguish whether the logical operator returns + // NULL, OR BOOL, OR UnionOf(Bool, NULL), other than have a lookup table in the inferencer. + @TestFactory + fun booleanConnective(): Stream { + val supportedType = listOf( + StaticType.BOOL, + StaticType.NULL, + StaticType.MISSING + ) + + val tests = listOf( + "expr-00", // OR + "expr-01", // AND + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = cartesianProduct(supportedType, supportedType) + val failureArgs = cartesianProduct( + allSupportedType, + allSupportedType + ).filterNot { + successArgs.contains(it) + }.toSet() + + put(TestResult.Success(StaticType.unionOf(StaticType.BOOL, StaticType.NULL)), successArgs) + put(TestResult.Failure, failureArgs) + } + + return super.testGen("booleanConnective", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/operator/OpArithmeticTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/operator/OpArithmeticTest.kt new file mode 100644 index 000000000..940aa1dd2 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/operator/OpArithmeticTest.kt @@ -0,0 +1,63 @@ +package org.partiql.planner.internal.typer.operator + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.CastType +import org.partiql.planner.util.allNumberType +import org.partiql.planner.util.allSupportedType +import org.partiql.planner.util.cartesianProduct +import org.partiql.planner.util.castTable +import org.partiql.types.StaticType +import java.util.stream.Stream + +class OpArithmeticTest : PartiQLTyperTestBase() { + @TestFactory + fun arithmetic(): Stream { + val tests = listOf( + "expr-37", + "expr-38", + "expr-39", + "expr-40", + "expr-41", + ).map { inputs.get("basics", it)!! } + + val argsMap: Map>> = buildMap { + val successArgs = (allNumberType + listOf(StaticType.NULL)) + .let { cartesianProduct(it, it) } + val failureArgs = cartesianProduct( + allSupportedType, + allSupportedType + ).filterNot { + successArgs.contains(it) + }.toSet() + + successArgs.forEach { args: List -> + val arg0 = args.first() + val arg1 = args[1] + if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else if (arg0 == arg1) { + (this[TestResult.Success(arg1)] ?: setOf(args)).let { + put(TestResult.Success(arg1), it + setOf(args)) + } + } else if (castTable(arg1, arg0) == CastType.COERCION) { + (this[TestResult.Success(arg0)] ?: setOf(args)).let { + put(TestResult.Success(arg0), it + setOf(args)) + } + } else { + (this[TestResult.Success(arg1)] ?: setOf(args)).let { + put(TestResult.Success(arg1), it + setOf(args)) + } + } + Unit + } + + put(TestResult.Failure, failureArgs) + } + + return super.testGen("arithmetic", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/operator/OpBitwiseAndTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/operator/OpBitwiseAndTest.kt new file mode 100644 index 000000000..398ebe805 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/operator/OpBitwiseAndTest.kt @@ -0,0 +1,58 @@ +package org.partiql.planner.internal.typer.operator + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.CastType +import org.partiql.planner.util.allIntType +import org.partiql.planner.util.allSupportedType +import org.partiql.planner.util.cartesianProduct +import org.partiql.planner.util.castTable +import org.partiql.types.StaticType +import java.util.stream.Stream + +class OpBitwiseAndTest : PartiQLTyperTestBase() { + @TestFactory + fun bitwiseAnd(): Stream { + val tests = listOf( + "expr-36" + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = (allIntType + listOf(StaticType.NULL)) + .let { cartesianProduct(it, it) } + val failureArgs = cartesianProduct( + allSupportedType, + allSupportedType + ).filterNot { + successArgs.contains(it) + }.toSet() + + successArgs.forEach { args: List -> + val arg0 = args.first() + val arg1 = args[1] + if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else if (arg0 == arg1) { + (this[TestResult.Success(arg1)] ?: setOf(args)).let { + put(TestResult.Success(arg1), it + setOf(args)) + } + } else if (castTable(arg1, arg0) == CastType.COERCION) { + (this[TestResult.Success(arg0)] ?: setOf(args)).let { + put(TestResult.Success(arg0), it + setOf(args)) + } + } else { + (this[TestResult.Success(arg1)] ?: setOf(args)).let { + put(TestResult.Success(arg1), it + setOf(args)) + } + } + Unit + } + put(TestResult.Failure, failureArgs) + } + + return super.testGen("bitwise_and", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/operator/OpConcatTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/operator/OpConcatTest.kt new file mode 100644 index 000000000..b445d8181 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/operator/OpConcatTest.kt @@ -0,0 +1,58 @@ +package org.partiql.planner.internal.typer.operator + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.CastType +import org.partiql.planner.util.allSupportedType +import org.partiql.planner.util.allTextType +import org.partiql.planner.util.cartesianProduct +import org.partiql.planner.util.castTable +import org.partiql.types.StaticType +import java.util.stream.Stream + +class OpConcatTest : PartiQLTyperTestBase() { + @TestFactory + fun concat(): Stream { + val tests = listOf( + "expr-35" + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = (allTextType + listOf(StaticType.NULL)) + .let { cartesianProduct(it, it) } + val failureArgs = cartesianProduct( + allSupportedType, + allSupportedType + ).filterNot { + successArgs.contains(it) + }.toSet() + + successArgs.forEach { args: List -> + val arg0 = args.first() + val arg1 = args[1] + if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else if (arg0 == arg1) { + (this[TestResult.Success(arg1)] ?: setOf(args)).let { + put(TestResult.Success(arg1), it + setOf(args)) + } + } else if (castTable(arg1, arg0) == CastType.COERCION) { + (this[TestResult.Success(arg0)] ?: setOf(args)).let { + put(TestResult.Success(arg0), it + setOf(args)) + } + } else { + (this[TestResult.Success(arg1)] ?: setOf(args)).let { + put(TestResult.Success(arg1), it + setOf(args)) + } + } + Unit + } + put(TestResult.Failure, failureArgs) + } + + return super.testGen("concat", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpBetweenTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpBetweenTest.kt new file mode 100644 index 000000000..59844fa4d --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpBetweenTest.kt @@ -0,0 +1,57 @@ +package org.partiql.planner.internal.typer.predicate + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.allNumberType +import org.partiql.planner.util.allSupportedType +import org.partiql.planner.util.cartesianProduct +import org.partiql.types.StaticType +import java.util.stream.Stream + +// TODO: Finalize the semantics for Between operator when operands contain MISSING +// For now, Between propagates MISSING. +class OpBetweenTest : PartiQLTyperTestBase() { + @TestFactory + fun between(): Stream { + val tests = listOf( + "expr-34", + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = + cartesianProduct( + allNumberType + listOf(StaticType.NULL), + allNumberType + listOf(StaticType.NULL), + allNumberType + listOf(StaticType.NULL), + ) + + val failureArgs = cartesianProduct( + allSupportedType, + allSupportedType, + allSupportedType + ).filterNot { + successArgs.contains(it) + }.toSet() + + successArgs.forEach { args: List -> + val arg0 = args.first() + val arg1 = args[1] + val arg2 = args[2] + if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } + Unit + } + put(TestResult.Failure, failureArgs) + } + + return super.testGen("between", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpComparisonTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpComparisonTest.kt new file mode 100644 index 000000000..5ad77f979 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpComparisonTest.kt @@ -0,0 +1,101 @@ +package org.partiql.planner.internal.typer.predicate + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.CastType +import org.partiql.planner.util.allSupportedType +import org.partiql.planner.util.cartesianProduct +import org.partiql.planner.util.castTable +import org.partiql.types.StaticType +import java.util.stream.Stream + +// TODO : Behavior when Missing is one operand needs to be finalized +// For now, equal function does not propagates MISSING. +class OpComparisonTest : PartiQLTyperTestBase() { + @TestFactory + fun eq(): Stream { + val tests = listOf( + "expr-07", // Equal + "expr-08", // Not Equal != + "expr-09", // Not Equal <> + ).map { inputs.get("basics", it)!! } + val argsMap = buildMap { + val successArgs = cartesianProduct(allSupportedType, allSupportedType) + + successArgs.forEach { args: List -> + if (args.contains(StaticType.MISSING)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } + put(TestResult.Failure, emptySet>()) + } + } + + return super.testGen("eq", tests, argsMap) + } + + @TestFactory + fun comparison(): Stream { + val tests = listOf( + "expr-03", // Less than TODO: Less than currently only support numeric type + "expr-04", // Less than or equal TODO: Less than or equal currently only support numeric type + "expr-05", // Bigger than TODO: Bigger than currently only support numeric type + "expr-06", // Bigger than or equal TODO: Bigger than or equal currently only support numeric type + + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = + cartesianProduct( + StaticType.NUMERIC.allTypes + listOf(StaticType.NULL), + StaticType.NUMERIC.allTypes + listOf(StaticType.NULL) + ) + val failureArgs = cartesianProduct( + allSupportedType, + allSupportedType, + ).filterNot { + successArgs.contains(it) + }.toSet() + + successArgs.forEach { args: List -> + val arg0 = args.first() + val arg1 = args[1] + if (args.contains(StaticType.MISSING)) { + (this[TestResult.Success(StaticType.MISSING)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.MISSING), it + setOf(args)) + } + } else if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else if (arg0 == arg1) { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } else if (castTable(arg1, arg0) == CastType.COERCION) { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } else { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } + Unit + } + put(TestResult.Failure, failureArgs) + } + + return super.testGen("comparison", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpInTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpInTest.kt new file mode 100644 index 000000000..04ee00a47 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpInTest.kt @@ -0,0 +1,81 @@ +package org.partiql.planner.internal.typer.predicate + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.allCollectionType +import org.partiql.planner.util.allSupportedType +import org.partiql.planner.util.cartesianProduct +import org.partiql.types.MissingType +import org.partiql.types.StaticType +import java.util.stream.Stream + +class OpInTest : PartiQLTyperTestBase() { + + @TestFactory + fun inSingleArg(): Stream { + val tests = listOf( + "expr-30", // IN ( true ) + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = + allSupportedType + .filterNot { it is MissingType } + .map { t -> listOf(t) } + .toSet() + + successArgs.forEach { args: List -> + if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } + Unit + } + put(TestResult.Failure, emptySet>()) + } + + return super.testGen("in", tests, argsMap) + } + + @TestFactory + fun inDoubleArg(): Stream { + val tests = listOf( + "expr-31", // t1 IN t2 + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = cartesianProduct( + allSupportedType.filterNot { it is MissingType }, + (allCollectionType + listOf(StaticType.NULL)) + ) + val failureArgs = cartesianProduct( + allSupportedType, + allSupportedType, + ).filterNot { + successArgs.contains(it) + }.toSet() + + successArgs.forEach { args: List -> + if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } + Unit + } + put(TestResult.Failure, failureArgs) + } + + return super.testGen("in", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpIsMissingTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpIsMissingTest.kt new file mode 100644 index 000000000..e85d6d5c2 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpIsMissingTest.kt @@ -0,0 +1,28 @@ +package org.partiql.planner.internal.typer.predicate + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.allSupportedType +import org.partiql.types.StaticType +import java.util.stream.Stream + +class OpIsMissingTest : PartiQLTyperTestBase() { + @TestFactory + fun isMissing(): Stream { + val tests = listOf( + "expr-11" // IS MISSING + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = allSupportedType.flatMap { t -> + setOf(listOf(t)) + }.toSet() + + put(TestResult.Success(StaticType.BOOL), successArgs) + put(TestResult.Failure, emptySet>()) + } + + return super.testGen("isMissing", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpIsNullTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpIsNullTest.kt new file mode 100644 index 000000000..cb806fec1 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpIsNullTest.kt @@ -0,0 +1,30 @@ +package org.partiql.planner.internal.typer.predicate + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.allSupportedType +import org.partiql.types.StaticType +import java.util.stream.Stream + +// TODO: Finalize the semantics for IS NULL operator when operand is MISSING. +// For now, the IS NULL function can take missing as a operand, and returns TRUE. +class OpIsNullTest : PartiQLTyperTestBase() { + @TestFactory + fun isNull(): Stream { + val tests = listOf( + "expr-10", // IS NULL + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = allSupportedType.flatMap { t -> + setOf(listOf(t)) + }.toSet() + + put(TestResult.Success(StaticType.BOOL), successArgs) + put(TestResult.Failure, emptySet>()) + } + + return super.testGen("isNull", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpLikeTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpLikeTest.kt new file mode 100644 index 000000000..8f7ec051f --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpLikeTest.kt @@ -0,0 +1,81 @@ +package org.partiql.planner.internal.typer.predicate + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.allSupportedType +import org.partiql.planner.util.allTextType +import org.partiql.planner.util.cartesianProduct +import org.partiql.types.StaticType +import java.util.stream.Stream + +class OpLikeTest : PartiQLTyperTestBase() { + @TestFactory + fun likeDoubleArg(): Stream { + val tests = listOf( + "expr-32", // t1 LIKE t2 + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = (allTextType + listOf(StaticType.NULL)) + .let { cartesianProduct(it, it) } + val failureArgs = cartesianProduct( + allSupportedType, + allSupportedType + ).filterNot { + successArgs.contains(it) + }.toSet() + + successArgs.forEach { args: List -> + if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } + Unit + } + put(TestResult.Failure, failureArgs) + } + + return super.testGen("like", tests, argsMap) + } + + @TestFactory + fun likeTripleArg(): Stream { + val tests = listOf( + "expr-33", // t1 LIKE t2 ESCAPE t3 + ).map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = (allTextType + listOf(StaticType.NULL)) + .let { cartesianProduct(it, it, it) } + val failureArgs = cartesianProduct( + allSupportedType, + allSupportedType, + allSupportedType, + ).filterNot { + successArgs.contains(it) + }.toSet() + + successArgs.forEach { args: List -> + if (args.contains(StaticType.NULL)) { + (this[TestResult.Success(StaticType.NULL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.NULL), it + setOf(args)) + } + } else { + (this[TestResult.Success(StaticType.BOOL)] ?: setOf(args)).let { + put(TestResult.Success(StaticType.BOOL), it + setOf(args)) + } + } + Unit + } + put(TestResult.Failure, failureArgs) + } + + return super.testGen("like", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpTypeAssertionTest.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpTypeAssertionTest.kt new file mode 100644 index 000000000..d418b7b31 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/internal/typer/predicate/OpTypeAssertionTest.kt @@ -0,0 +1,31 @@ +package org.partiql.planner.internal.typer.predicate + +import org.junit.jupiter.api.DynamicContainer +import org.junit.jupiter.api.TestFactory +import org.partiql.planner.internal.typer.PartiQLTyperTestBase +import org.partiql.planner.util.allSupportedType +import org.partiql.types.MissingType +import org.partiql.types.StaticType +import java.util.stream.Stream + +class OpTypeAssertionTest : PartiQLTyperTestBase() { + @TestFactory + fun typeAssertion(): Stream { + val tests = buildList { + (12..29).forEach { + this.add("expr-$it") + } + }.map { inputs.get("basics", it)!! } + + val argsMap = buildMap { + val successArgs = allSupportedType.filterNot { it is MissingType }.flatMap { t -> + setOf(listOf(t)) + }.toSet() + val failureArgs = setOf(listOf(MissingType)) + put(TestResult.Success(StaticType.BOOL), successArgs) + put(TestResult.Failure, failureArgs) + } + + return super.testGen("type-assertion", tests, argsMap) + } +} diff --git a/partiql-planner/src/test/kotlin/org/partiql/planner/util/Utils.kt b/partiql-planner/src/test/kotlin/org/partiql/planner/util/Utils.kt new file mode 100644 index 000000000..a94fae460 --- /dev/null +++ b/partiql-planner/src/test/kotlin/org/partiql/planner/util/Utils.kt @@ -0,0 +1,217 @@ +package org.partiql.planner.util + +import org.partiql.types.AnyOfType +import org.partiql.types.AnyType +import org.partiql.types.BagType +import org.partiql.types.BlobType +import org.partiql.types.BoolType +import org.partiql.types.ClobType +import org.partiql.types.DateType +import org.partiql.types.DecimalType +import org.partiql.types.FloatType +import org.partiql.types.GraphType +import org.partiql.types.IntType +import org.partiql.types.ListType +import org.partiql.types.MissingType +import org.partiql.types.NullType +import org.partiql.types.SexpType +import org.partiql.types.StaticType +import org.partiql.types.StringType +import org.partiql.types.StructType +import org.partiql.types.SymbolType +import org.partiql.types.TimeType +import org.partiql.types.TimestampType + +fun cartesianProduct(a: List, b: List, vararg lists: List): Set> = + (listOf(a, b).plus(lists)) + .fold(listOf(listOf())) { acc, set -> + acc.flatMap { list -> set.map { element -> list + element } } + }.toSet() + +val allSupportedType = StaticType.ALL_TYPES.filterNot { it == StaticType.GRAPH } + +val allSupportedTypeNotUnknown = allSupportedType.filterNot { it == StaticType.MISSING || it == StaticType.NULL } + +val allCollectionType = listOf(StaticType.LIST, StaticType.BAG, StaticType.SEXP) + +val allTextType = listOf(StaticType.SYMBOL, StaticType.STRING, StaticType.CLOB) + +val allDateTimeType = listOf(StaticType.TIME, StaticType.TIMESTAMP, StaticType.DATE) + +val allNumberType = StaticType.NUMERIC.allTypes + +val allIntType = listOf(StaticType.INT2, StaticType.INT4, StaticType.INT8, StaticType.INT) + +enum class CastType { + COERCION, // lossless + EXPLICIT, // lossy + UNSAFE // fail +} + +val castTable: ((StaticType, StaticType) -> CastType) = { from, to -> + when (from) { + is AnyOfType -> CastType.UNSAFE + is AnyType -> + when (to) { + is AnyType -> CastType.COERCION + else -> CastType.UNSAFE + } + is BlobType -> + when (to) { + is BlobType -> CastType.COERCION + else -> CastType.UNSAFE + } + is BoolType -> + when (to) { + is BoolType, is DecimalType, is FloatType, is IntType -> CastType.COERCION + is StringType, is SymbolType -> CastType.COERCION + else -> CastType.UNSAFE + } + is ClobType -> + when (to) { + is ClobType -> CastType.COERCION + else -> CastType.UNSAFE + } + is BagType -> when (to) { + is BagType -> CastType.COERCION + else -> CastType.UNSAFE + } + is ListType -> when (to) { + is BagType -> CastType.COERCION + else -> CastType.UNSAFE + } + is SexpType -> when (to) { + is BagType -> CastType.COERCION + else -> CastType.UNSAFE + } + is DateType -> when (to) { + is BagType -> CastType.COERCION + else -> CastType.UNSAFE + } + is DecimalType -> { + when (val fromPrecisionScaleConstraint = from.precisionScaleConstraint) { + is DecimalType.PrecisionScaleConstraint.Unconstrained -> { + when (to) { + is DecimalType -> { + when (to.precisionScaleConstraint) { + // to arbitrary precision decimal + is DecimalType.PrecisionScaleConstraint.Unconstrained -> CastType.COERCION + // to fixed precision decimal + is DecimalType.PrecisionScaleConstraint.Constrained -> CastType.EXPLICIT + } + } + is FloatType, is IntType -> CastType.EXPLICIT + else -> CastType.UNSAFE + } + } + is DecimalType.PrecisionScaleConstraint.Constrained -> { + // from fixed precision decimal + when (to) { + is DecimalType -> { + when (val toPrecisionScaleConstraint = to.precisionScaleConstraint) { + is DecimalType.PrecisionScaleConstraint.Unconstrained -> CastType.COERCION + is DecimalType.PrecisionScaleConstraint.Constrained -> { + val toPrecision = toPrecisionScaleConstraint.precision + val toScale = toPrecisionScaleConstraint.scale + val fromPrecision = fromPrecisionScaleConstraint.precision + val fromScale = fromPrecisionScaleConstraint.scale + if (fromPrecision >= toPrecision && fromScale >= toScale) { + CastType.COERCION + } else CastType.EXPLICIT + } + } + } + is FloatType -> CastType.COERCION + is IntType -> CastType.EXPLICIT + else -> CastType.UNSAFE + } + } + } + } + is FloatType -> when (to) { + is DecimalType -> CastType.COERCION + is FloatType -> CastType.COERCION + else -> CastType.UNSAFE + } + is GraphType -> when (to) { + is GraphType -> CastType.COERCION + else -> CastType.UNSAFE + } + is IntType -> { + when (to) { + is IntType -> { + when (from.rangeConstraint) { + IntType.IntRangeConstraint.SHORT -> { + when (to.rangeConstraint) { + IntType.IntRangeConstraint.SHORT -> CastType.COERCION + IntType.IntRangeConstraint.INT4 -> CastType.COERCION + IntType.IntRangeConstraint.LONG -> CastType.COERCION + IntType.IntRangeConstraint.UNCONSTRAINED -> CastType.COERCION + } + } + IntType.IntRangeConstraint.INT4 -> { + when (to.rangeConstraint) { + IntType.IntRangeConstraint.SHORT -> CastType.UNSAFE + IntType.IntRangeConstraint.INT4 -> CastType.COERCION + IntType.IntRangeConstraint.LONG -> CastType.COERCION + IntType.IntRangeConstraint.UNCONSTRAINED -> CastType.COERCION + } + } + IntType.IntRangeConstraint.LONG -> { + when (to.rangeConstraint) { + IntType.IntRangeConstraint.SHORT -> CastType.UNSAFE + IntType.IntRangeConstraint.INT4 -> CastType.UNSAFE + IntType.IntRangeConstraint.LONG -> CastType.COERCION + IntType.IntRangeConstraint.UNCONSTRAINED -> CastType.COERCION + } + } + IntType.IntRangeConstraint.UNCONSTRAINED -> { + when (to.rangeConstraint) { + IntType.IntRangeConstraint.SHORT -> CastType.UNSAFE + IntType.IntRangeConstraint.INT4 -> CastType.UNSAFE + IntType.IntRangeConstraint.LONG -> CastType.UNSAFE + IntType.IntRangeConstraint.UNCONSTRAINED -> CastType.COERCION + } + } + } + } + is FloatType -> CastType.COERCION + is DecimalType -> CastType.COERCION + else -> CastType.UNSAFE + } + } + MissingType -> when (to) { + is MissingType -> CastType.COERCION + else -> CastType.UNSAFE + } + is NullType -> when (to) { + is NullType -> CastType.COERCION + else -> CastType.UNSAFE + } + is StringType -> + when (to) { + is StringType -> CastType.COERCION + is SymbolType -> CastType.EXPLICIT + is ClobType -> CastType.COERCION + else -> CastType.UNSAFE + } + is StructType -> when (to) { + is StructType -> CastType.COERCION + else -> CastType.UNSAFE + } + is SymbolType -> when (to) { + is SymbolType -> CastType.COERCION + is StringType -> CastType.COERCION + is ClobType -> CastType.COERCION + else -> CastType.UNSAFE + } + is TimeType -> when (to) { + is TimeType -> CastType.COERCION + else -> CastType.UNSAFE + } + is TimestampType -> when (to) { + is TimestampType -> CastType.COERCION + else -> CastType.UNSAFE + } + } +} diff --git a/partiql-planner/src/testFixtures/kotlin/org/partiql/planner/test/PartiQLTest.kt b/partiql-planner/src/testFixtures/kotlin/org/partiql/planner/test/PartiQLTest.kt new file mode 100644 index 000000000..0e8f762a2 --- /dev/null +++ b/partiql-planner/src/testFixtures/kotlin/org/partiql/planner/test/PartiQLTest.kt @@ -0,0 +1,38 @@ +/* + * Copyright 2019 Amazon.com, Inc. or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at: + * + * http://aws.amazon.com/apache2.0/ + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific + * language governing permissions and limitations under the License. + */ + +package org.partiql.planner.test + +/** + * Holding class for test input. + * + * --#[example-test] + * SELECT * FROM example; + */ +public data class PartiQLTest( + public val key: Key, + public val statement: String, +) { + + /** + * Unique test identifier. + * + * @property group + * @property name + */ + public data class Key( + public val group: String, + public val name: String, + ) +} diff --git a/partiql-planner/src/testFixtures/kotlin/org/partiql/planner/test/PartiQLTestProvider.kt b/partiql-planner/src/testFixtures/kotlin/org/partiql/planner/test/PartiQLTestProvider.kt new file mode 100644 index 000000000..086571fe3 --- /dev/null +++ b/partiql-planner/src/testFixtures/kotlin/org/partiql/planner/test/PartiQLTestProvider.kt @@ -0,0 +1,118 @@ +/* + * Copyright 2019 Amazon.com, Inc. or its affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at: + * + * http://aws.amazon.com/apache2.0/ + * + * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific + * language governing permissions and limitations under the License. + */ + +package org.partiql.planner.test + +import java.io.File +import java.io.InputStream +import java.nio.file.Path +import kotlin.io.path.toPath + +/** + * The PartiQLTestProvider is a simple utility for indexing SQL statements within files for re-use across library tests. + */ +class PartiQLTestProvider { + + /** + * Backing map for test input lookup. + */ + private val map: MutableMap = mutableMapOf() + + /** + * Default database of test inputs. + */ + private val default = this::class.java.getResource("/inputs")!!.toURI().toPath() + + /** + * Load test groups from a directory. + */ + public fun load(root: Path? = null) { + if (root != null) { + val dir = root.toFile() + dir.listFiles { f -> f.isDirectory }!!.map { + for (test in load(it)) { + map[test.key] = test + } + } + } else { + // user default resources + val inputStream = this::class.java.getResourceAsStream("/resource_path.txt")!! + inputStream.reader().forEachLine { path -> + val pathSteps = path.split("/") + val outMostDir = pathSteps.first() + if (outMostDir == "inputs") { + val group = pathSteps[pathSteps.size - 2] + val resource = this::class.java.getResourceAsStream("/$path")!! + for (test in load(group, resource)) { + map[test.key] = test + } + } + } + } + } + + /** + * Lookup a test by key + * + * @param key + * @return + */ + public operator fun get(key: PartiQLTest.Key): PartiQLTest? = map[key] + + /** + * Lookup a test by key parts + * + * @param group + * @param name + * @return + */ + public fun get(group: String, name: String): PartiQLTest? = get(PartiQLTest.Key(group, name)) + + // load all tests in a directory + private fun load(dir: File) = dir.listFiles()!!.flatMap { load(dir.name, it) } + + // load all tests in a file + private fun load(group: String, file: File): List = load(group, file.inputStream()) + + private fun load(group: String, inputStream: InputStream): List { + val tests = mutableListOf() + var name = "" + val statement = StringBuilder() + for (line in inputStream.reader().readLines()) { + + // start of test + if (line.startsWith("--#[") and line.endsWith("]")) { + name = line.substring(4, line.length - 1) + statement.clear() + } + + if (name.isNotEmpty() && line.isNotBlank()) { + // accumulating test statement + statement.appendLine(line) + } else { + // skip these lines + continue + } + + // Finish & Reset + if (line.endsWith(";")) { + val key = PartiQLTest.Key(group, name) + tests.add(PartiQLTest(key, statement.toString())) + name = "" + statement.clear() + } + } + return tests + } +} diff --git a/partiql-planner/src/testFixtures/resources/README.adoc b/partiql-planner/src/testFixtures/resources/README.adoc new file mode 100644 index 000000000..49ff686ca --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/README.adoc @@ -0,0 +1,139 @@ += PartiQL Test Input Queries +:toc: + +This package contains many input queries to be used in various parts of testing — parsing, planning, transpiling, evaluation. + +== Structure + +Query inputs are grouped by some general category which makes up a "test group". There are many opportunities to add more structure, but the premise +is to simply have a list of input queries, each of which has a unique identifier which is the group name along with a +special pragma comment (ex: `--#[my-test-name]`) above the statement. File names don't matter, only a suite directory +name is used. All statements within a group a treated as one big list regardless of file structure. You may choose +to group multiple short statements in one file, or split each query into its own file. + +NOTE: I have not modified the generated TPC-DS schemas so everything is NULLABLE which isn't always the case. See +https://www.tpc.org/tpc_documents_current_versions/pdf/tpc-ds_v2.6.0.pdf + + +== Schemas + +Catalog and schema definition subject to change once PartiQL Value Schema is actually defined. The current version +is an iteration of John's initial work on the local plugin and Avro IDL. I'm finding it verbose which can make things +harder to read. Then again this is a plugin used for testing, so the input schema doesn't matter that much so long as +we can easily parse it. + +.Scalar Types +[source,ion] +---- +// absent +null +missing + +// boolean +bool + +// numeric +int8 +int16 +int32 +int64 +int +decimal +(decimal p s) +float32 +float64 + +// char strings +(char n) +string +(string n) + +// bit strings +(bit n) +binary +(binary n) + +// byte strings +(byte n) +blob +blob(n) + +// date/time types +date +time +(time p) +(time p z) +timestamp +(timestamp p) +(timestamp p z) +(interval p) +---- + +.Collection Types +[source,ion] +---- +bag::[type] // ex: bag::[int] +list::[type] +sexp::[type] +---- + +.Struct Type +[source,ion] +---- +{ f_1: t_1, ..., f_n, t_n } + +open::{ ... } +closed::{ ... } + +// Examples +{ a: int, b: string } + +bag::[ + closed::{ + x: int32, + y: int32, + } +] +---- + +.Union Types +[source,ion] +---- +(t1 | t2 | ... ) + +// Examples +(int32 | null) +---- + +You can place a list of constraints on a type by wrapping in an outer constraint container. + +.Constraints +[source,sql] +---- +CREATE TABLE foo ( + location STRING PRIMARY KEY + x INT + y INT +); +---- + +[source,ion] +---- +bag::[ + { + location: string, + x: int32, + y: int32, + }, + _::( + (primary_key location) + ) +] + +(define my_num + (int _::( + (min 1) + (max 1) + )) +) +---- diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/aggregations/T.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/aggregations/T.ion new file mode 100644 index 000000000..4a4d5e4a7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/aggregations/T.ion @@ -0,0 +1,34 @@ +{ + type: "bag", + items: { + type: "struct", + constraints: [ closed, ordered, unique ], + fields: [ + { + name: "a", + type: "bool", + }, + { + name: "b", + type: "int32", + }, + { + name: "c", + type: "string", + }, + { + name: "d", + type: { + type: "struct", + constraints: [ closed, ordered, unique ], + fields: [ + { + name: "e", + type: "string" + } + ] + }, + } + ] + } +} diff --git a/partiql-lang/src/test/resources/catalogs/aws/b/b.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/aws/b/b.ion similarity index 87% rename from partiql-lang/src/test/resources/catalogs/aws/b/b.ion rename to partiql-planner/src/testFixtures/resources/catalogs/default/aws/b/b.ion index c6ac00734..045423765 100644 --- a/partiql-lang/src/test/resources/catalogs/aws/b/b.ion +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/aws/b/b.ion @@ -6,7 +6,7 @@ fields: [ { name: "identifier", - type: "int", + type: "int32", } ] } diff --git a/partiql-lang/src/test/resources/catalogs/aws/ddb/b.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/aws/ddb/b.ion similarity index 100% rename from partiql-lang/src/test/resources/catalogs/aws/ddb/b.ion rename to partiql-planner/src/testFixtures/resources/catalogs/default/aws/ddb/b.ion diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/aws/ddb/persons.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/aws/ddb/persons.ion new file mode 100644 index 000000000..7b0d9ba58 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/aws/ddb/persons.ion @@ -0,0 +1,46 @@ +{ + type: "bag", + items: { + type: "struct", + constraints: [ + closed, + unique, + ordered + ], + fields: [ + { + name: "name", + type: [ + "string", + { + type: "struct", + fields: [ + { + name: "first", + type: "string" + }, + { + name: "last", + type: "string" + } + ] + }, + { + type: "struct", + constraints: [ + closed, + unique, + ordered + ], + fields: [ + { + name: "full_name", + type: "string" + }, + ] + }, + ] + }, + ] + } +} diff --git a/partiql-lang/src/test/resources/catalogs/aws/ddb/pets.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/aws/ddb/pets.ion similarity index 90% rename from partiql-lang/src/test/resources/catalogs/aws/ddb/pets.ion rename to partiql-planner/src/testFixtures/resources/catalogs/default/aws/ddb/pets.ion index 14f1c1f5e..11ec58028 100644 --- a/partiql-lang/src/test/resources/catalogs/aws/ddb/pets.ion +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/aws/ddb/pets.ion @@ -6,7 +6,7 @@ fields: [ { name: "id", - type: "int", + type: "int32", }, { name: "breed", diff --git a/partiql-lang/src/test/resources/catalogs/b/b/b.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/b/b/b.ion similarity index 86% rename from partiql-lang/src/test/resources/catalogs/b/b/b.ion rename to partiql-planner/src/testFixtures/resources/catalogs/default/b/b/b.ion index 8508d37b6..0c5e6866b 100644 --- a/partiql-lang/src/test/resources/catalogs/b/b/b.ion +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/b/b/b.ion @@ -10,14 +10,14 @@ fields: [ { name: "b", - type: "int", + type: "int32", } ] } }, { name: "c", - type: "int", + type: "int32", } ] } diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/b/b/c.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/b/b/c.ion new file mode 100644 index 000000000..62d1536e7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/b/b/c.ion @@ -0,0 +1 @@ +"int32" diff --git a/partiql-lang/src/test/resources/catalogs/b/b/d.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/b/b/d.ion similarity index 72% rename from partiql-lang/src/test/resources/catalogs/b/b/d.ion rename to partiql-planner/src/testFixtures/resources/catalogs/default/b/b/d.ion index c01b78f40..46f56fef3 100644 --- a/partiql-lang/src/test/resources/catalogs/b/b/d.ion +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/b/b/d.ion @@ -1,10 +1,10 @@ { type: "struct", - constraints: [ ordered ], + constraints: [ closed, ordered ], fields: [ { name: "e", - type: "int", + type: "int32", }, { name: "e", @@ -13,7 +13,7 @@ fields: [ { name: "f", - type: "int", + type: "int32", } ] } diff --git a/partiql-lang/src/test/resources/catalogs/b/c/c.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/b/c/c.ion similarity index 100% rename from partiql-lang/src/test/resources/catalogs/b/c/c.ion rename to partiql-planner/src/testFixtures/resources/catalogs/default/b/c/c.ion diff --git a/partiql-lang/src/test/resources/catalogs/db/markets/order_info.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/db/markets/order_info.ion similarity index 85% rename from partiql-lang/src/test/resources/catalogs/db/markets/order_info.ion rename to partiql-planner/src/testFixtures/resources/catalogs/default/db/markets/order_info.ion index a31ace9aa..2f0658c16 100644 --- a/partiql-lang/src/test/resources/catalogs/db/markets/order_info.ion +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/db/markets/order_info.ion @@ -5,15 +5,15 @@ fields: [ { name: "customer_id", - type: "int", + type: "int32", }, { name: "marketplace_id", - type: "int", + type: "int32", }, { name: "ship_option", type: "string", } ] -} +} \ No newline at end of file diff --git a/partiql-lang/src/test/resources/catalogs/db/markets/orders.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/db/markets/orders.ion similarity index 86% rename from partiql-lang/src/test/resources/catalogs/db/markets/orders.ion rename to partiql-planner/src/testFixtures/resources/catalogs/default/db/markets/orders.ion index f42abdb9c..689a4e655 100644 --- a/partiql-lang/src/test/resources/catalogs/db/markets/orders.ion +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/db/markets/orders.ion @@ -6,11 +6,11 @@ fields: [ { name: "customer_id", - type: "int", + type: "int32", }, { name: "marketplace_id", - type: "int", + type: "int32", }, { name: "ship_option", diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/T.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/T.ion new file mode 100644 index 000000000..0c8a14920 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/T.ion @@ -0,0 +1,48 @@ +{ + type: "bag", + items: { + type: "struct", + constraints: [ closed, ordered, unique ], + fields: [ + { + name: "a", + type: "bool", + }, + { + name: "b", + type: "int32", + }, + { + name: "c", + type: "string", + }, + { + name: "d", + type: { + type: "struct", + constraints: [ closed, ordered, unique ], + fields: [ + { + name: "e", + type: "string" + } + ] + }, + }, + // path expression tests + { + name: "x", + type: "any", + }, + { + name: "z", + type: "string", + }, + // split + { + name: "v", + type: "string", + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/closed_duplicates_struct.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/closed_duplicates_struct.ion new file mode 100644 index 000000000..3344d2c07 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/closed_duplicates_struct.ion @@ -0,0 +1,20 @@ +{ + type: "struct", + constraints: [ + closed + ], + fields: [ + { + name: "definition", + type: "string", + }, + { + name: "definition", + type: "float32", + }, + { + name: "DEFINITION", + type: "decimal" + } + ] +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/closed_ordered_duplicates_struct.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/closed_ordered_duplicates_struct.ion new file mode 100644 index 000000000..f06fa335b --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/closed_ordered_duplicates_struct.ion @@ -0,0 +1,21 @@ +{ + type: "struct", + constraints: [ + closed, + ordered + ], + fields: [ + { + name: "definition", + type: "string", + }, + { + name: "definition", + type: "float32", + }, + { + name: "DEFINITION", + type: "decimal" + } + ] +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/closed_union_duplicates_struct.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/closed_union_duplicates_struct.ion new file mode 100644 index 000000000..0c63d9b5b --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/closed_union_duplicates_struct.ion @@ -0,0 +1,43 @@ +[ + { + type: "struct", + constraints: [ + closed + ], + fields: [ + { + name: "definition", + type: "string", + }, + { + name: "definition", + type: "float32", + }, + { + name: "DEFINITION", + type: "decimal" + } + ] + }, + { + type: "struct", + constraints: [ + closed, + ordered + ], + fields: [ + { + name: "definition", + type: "int16", + }, + { + name: "definition", + type: "int32", + }, + { + name: "DEFINITION", + type: "int64" + } + ] + }, +] diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/dogs.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/dogs.ion new file mode 100644 index 000000000..6b1187ea2 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/dogs.ion @@ -0,0 +1,24 @@ +{ + type: "list", + items: { + type: "struct", + constraints: [closed], + fields: [ + { + name: "breed", + type: "string", + }, + { + name: "avg_height", + type: "float32", + }, + { + name: "typical_allergies", + type: { + type: "list", + items: "string" + } + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/employer.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/employer.ion new file mode 100644 index 000000000..fdb0eea1c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/employer.ion @@ -0,0 +1,32 @@ +{ + type: "struct", + name: "employer", + constraints: [ closed, unique, ordered ], + fields: [ + { + name: "name", + type: "string" + }, + { + name: "tax_id", + type: "int64" + }, + { + name: "address", + type: { + type: "struct", + constraints: [ closed, unique, ordered ], + fields: [ + { + name: "street", + type: "string" + }, + { + name: "zip", + type: "int32" + }, + ] + }, + }, + ] +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/item.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/item.ion new file mode 100644 index 000000000..2622a7b13 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/item.ion @@ -0,0 +1,210 @@ +{ + type: "struct", + name: "item", + constraints: [ closed, unique, ordered ], + fields: [ + { + name: "i_item_sk", + type: [ + "string", + "null" + ] + }, + { + name: "i_item_id", + type: [ + "string", + "null" + ] + }, + { + name: "i_rec", + type: { + type: "struct", + constraints: [ closed, unique, ordered ], + fields: [ + { + name: "i_rec_start_date", + type: [ + "int64", + "null" + ] + }, + { + name: "i_rec_end_date", + type: [ + "int64", + "null" + ] + }, + ] + }, + }, + { + name: "i_item_desc", + type: [ + "string", + "null" + ] + }, + { + name: "pricing", + type: { + type: "struct", + fields: [ + { + name: "i_current_price", + type: [ + "float64", + "null" + ] + }, + { + name: "i_wholesale_cost", + type: [ + "float64", + "null" + ] + }, + ] + }, + }, + { + name: "i_brand_id", + type: [ + "int32", + "null" + ] + }, + { + name: "i_brand", + type: [ + "string", + "null" + ] + }, + { + name: "i_class_id", + type: [ + "int32", + "null" + ] + }, + { + name: "i_class", + type: [ + "string", + "null" + ] + }, + { + name: "i_category_id", + type: [ + "int32", + "null" + ] + }, + { + name: "i_category", + type: [ + "string", + "null" + ] + }, + { + name: "i_manufact_id", + type: [ + "int32", + "null" + ] + }, + { + name: "i_manufact", + type: [ + "string", + "null" + ] + }, + { + name: "i_size", + type: [ + "string", + "null" + ] + }, + { + name: "i_formulation", + type: [ + "string", + "null" + ] + }, + { + name: "i_color", + type: [ + "string", + "null" + ] + }, + { + name: "i_units", + type: [ + "string", + "null" + ] + }, + { + name: "i_container", + type: [ + "string", + "null" + ] + }, + { + name: "manager_info", + type: { + type: "struct", + constraints: [ closed, unique, ordered ], + fields: [ + { + name: "manager_id", + type: [ + "int32" + ] + }, + { + name: "manager_name", + type: ["string", "null"] + }, + { + name: "manager_address", + type: [ + "null", + { + type: "struct", + constraints: [ closed, unique, ordered ], + fields: [ + { + name: "zip_code", + type: "int32" + }, + { + name: "house_number", + type: ["int32", "null"] + } + ] + } + ], + } + ] + }, + }, + { + name: "i_product_name", + type: [ + "string", + "null" + ] + } + ] +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/open_duplicates_struct.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/open_duplicates_struct.ion new file mode 100644 index 000000000..1335da6a1 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/open_duplicates_struct.ion @@ -0,0 +1,17 @@ +{ + type: "struct", + fields: [ + { + name: "definition", + type: "string", + }, + { + name: "definition", + type: "float32", + }, + { + name: "DEFINITION", + type: "decimal" + } + ] +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/os.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/os.ion new file mode 100644 index 000000000..ace60fe3b --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/os.ion @@ -0,0 +1,2 @@ +// String representing the current operating system +"string" diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/person.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/person.ion new file mode 100644 index 000000000..70afca867 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/main/person.ion @@ -0,0 +1,35 @@ +{ + type: "struct", + name: "person", + constraints: [ closed, unique, ordered ], + fields: [ + { + name: "name", + type: { + type: "struct", + constraints: [ closed, unique, ordered ], + fields: [ + { + name: "first", + type: "string" + }, + { + name: "last", + type: "string" + }, + ] + }, + }, + { + name: "ssn", + type: "string" + }, + { + name: "employer", + type: [ + "string", + "null" + ] + } + ] +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/numbers.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/numbers.ion new file mode 100644 index 000000000..14c03f713 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/numbers.ion @@ -0,0 +1,114 @@ +{ + type: "struct", + fields: [ + { + name: "nullable_int16s", + type: { + type: "list", + items: [ + "int16", + "null" + ] + } + }, + { + name: "nullable_int32s", + type: { + type: "list", + items: [ + "int32", + "null" + ] + } + }, + { + name: "nullable_int64s", + type: { + type: "list", + items: [ + "int64", + "null" + ] + } + }, + { + name: "nullable_ints", + type: { + type: "list", + items: [ + "int", + "null" + ] + } + }, + { + name: "int16s", + type: { + type: "list", + items: "int16", + }, + }, + { + name: "int32s", + type: { + type: "list", + items: "int32", + }, + }, + { + name: "int64s", + type: { + type: "list", + items: "int64", + }, + }, + { + name: "ints", + type: { + type: "list", + items: "int", + }, + }, + { + name: "decimals", + type: { + type: "list", + items: "decimal", + }, + }, + { + name: "nullable_float32s", + type: { + type: "list", + items: [ + "float32", + "null" + ] + } + }, + { + name: "nullable_float64s", + type: { + type: "list", + items: [ + "float64", + "null" + ] + } + }, + { + name: "float32s", + type: { + type: "list", + items: "float32", + }, + }, + { + name: "float64s", + type: { + type: "list", + items: "float64", + }, + } + ], +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/pql/points.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/points.ion new file mode 100644 index 000000000..0d18fe569 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/pql/points.ion @@ -0,0 +1,21 @@ +{ + type: "bag", + items: { + type: "struct", + constraints: [closed], + fields: [ + { + name: "x", + type: "float32", + }, + { + name: "y", + type: "float32", + }, + { + name: "z", + type: "float32", + }, + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/subqueries/S.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/subqueries/S.ion new file mode 100644 index 000000000..6491b5611 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/subqueries/S.ion @@ -0,0 +1,21 @@ +{ + type: "list", + items: { + type: "struct", + constraints: [ closed, unique ], + fields: [ + { + name: "a", + type: "int32" + }, + { + name: "b", + type: "int32" + }, + { + name: "c", + type: "int32" + }, + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/default/subqueries/T.ion b/partiql-planner/src/testFixtures/resources/catalogs/default/subqueries/T.ion new file mode 100644 index 000000000..9782f5316 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/default/subqueries/T.ion @@ -0,0 +1,21 @@ +{ + type: "list", + items: { + type: "struct", + constraints: [ closed, unique ], + fields: [ + { + name: "x", + type: "int32" + }, + { + name: "y", + type: "int32" + }, + { + name: "z", + type: "int32" + }, + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/call_center.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/call_center.ion new file mode 100644 index 000000000..ea3dc3c06 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/call_center.ion @@ -0,0 +1,219 @@ +call_center::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "cc_call_center_sk", + type: "string" + }, + { + name: "cc_call_center_id", + type: "string" + }, + { + name: "cc_rec_start_date", + type: [ + "int64", + "null" + ] + }, + { + name: "cc_rec_end_date", + type: [ + "int64", + "null" + ] + }, + { + name: "cc_closed_date_sk", + type: [ + "int32", + "null" + ] + }, + { + name: "cc_open_date_sk", + type: [ + "int32", + "null" + ] + }, + { + name: "cc_name", + type: [ + "string", + "null" + ] + }, + { + name: "cc_class", + type: [ + "string", + "null" + ] + }, + { + name: "cc_employees", + type: [ + "int32", + "null" + ] + }, + { + name: "cc_sq_ft", + type: [ + "int32", + "null" + ] + }, + { + name: "cc_hours", + type: [ + "string", + "null" + ] + }, + { + name: "cc_manager", + type: [ + "string", + "null" + ] + }, + { + name: "cc_mkt_id", + type: [ + "int32", + "null" + ] + }, + { + name: "cc_mkt_class", + type: [ + "string", + "null" + ] + }, + { + name: "cc_mkt_desc", + type: [ + "string", + "null" + ] + }, + { + name: "cc_market_manager", + type: [ + "string", + "null" + ] + }, + { + name: "cc_division", + type: [ + "int32", + "null" + ] + }, + { + name: "cc_division_name", + type: [ + "string", + "null" + ] + }, + { + name: "cc_company", + type: [ + "int32", + "null" + ] + }, + { + name: "cc_company_name", + type: [ + "string", + "null" + ] + }, + { + name: "cc_street_number", + type: [ + "string", + "null" + ] + }, + { + name: "cc_street_name", + type: [ + "string", + "null" + ] + }, + { + name: "cc_street_type", + type: [ + "string", + "null" + ] + }, + { + name: "cc_suite_number", + type: [ + "string", + "null" + ] + }, + { + name: "cc_city", + type: [ + "string", + "null" + ] + }, + { + name: "cc_county", + type: [ + "string", + "null" + ] + }, + { + name: "cc_state", + type: [ + "string", + "null" + ] + }, + { + name: "cc_zip", + type: [ + "string", + "null" + ] + }, + { + name: "cc_country", + type: [ + "string", + "null" + ] + }, + { + name: "cc_gmt_offset", + type: [ + "float64", + "null" + ] + }, + { + name: "cc_tax_percentage", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/catalog_page.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/catalog_page.ion new file mode 100644 index 000000000..272df0daa --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/catalog_page.ion @@ -0,0 +1,71 @@ +catalog_page::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "cp_catalog_page_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cp_catalog_page_id", + type: [ + "string", + "null" + ] + }, + { + name: "cp_start_date_sk", + type: [ + "int32", + "null" + ] + }, + { + name: "cp_end_date_sk", + type: [ + "int32", + "null" + ] + }, + { + name: "cp_department", + type: [ + "string", + "null" + ] + }, + { + name: "cp_catalog_number", + type: [ + "int32", + "null" + ] + }, + { + name: "cp_catalog_page_number", + type: [ + "int32", + "null" + ] + }, + { + name: "cp_description", + type: [ + "string", + "null" + ] + }, + { + name: "cp_type", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/catalog_returns.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/catalog_returns.ion new file mode 100644 index 000000000..bd881456c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/catalog_returns.ion @@ -0,0 +1,197 @@ +catalog_returns::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "cr_returned_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_returned_time_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_item_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_refunded_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_refunded_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_refunded_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_refunded_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_returning_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_returning_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_returning_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_returning_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_call_center_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_catalog_page_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_ship_mode_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_warehouse_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_reason_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cr_order_number", + type: [ + "string", + "null" + ] + }, + { + name: "cr_return_quantity", + type: [ + "int32", + "null" + ] + }, + { + name: "cr_return_amount", + type: [ + "float64", + "null" + ] + }, + { + name: "cr_return_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "cr_return_amt_inc_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "cr_fee", + type: [ + "float64", + "null" + ] + }, + { + name: "cr_return_ship_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "cr_refunded_cash", + type: [ + "float64", + "null" + ] + }, + { + name: "cr_reversed_charge", + type: [ + "float64", + "null" + ] + }, + { + name: "cr_store_credit", + type: [ + "float64", + "null" + ] + }, + { + name: "cr_net_loss", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/catalog_sales.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/catalog_sales.ion new file mode 100644 index 000000000..b6f620e99 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/catalog_sales.ion @@ -0,0 +1,240 @@ +catalog_sales::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "cs_sold_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_sold_time_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_ship_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_bill_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_bill_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_bill_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_bill_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_ship_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_ship_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_ship_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_ship_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_call_center_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_catalog_page_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_ship_mode_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_warehouse_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_item_sk", + type: "string" + }, + { + name: "cs_promo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cs_order_number", + type: "string" + }, + { + name: "cs_quantity", + type: [ + "int32", + "null" + ] + }, + { + name: "cs_wholesale_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_list_price", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_sales_price", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_ext_discount_amt", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_ext_sales_price", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_ext_wholesale_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_ext_list_price", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_ext_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_coupon_amt", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_ext_ship_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_net_paid", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_net_paid_inc_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_net_paid_inc_ship", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_net_paid_inc_ship_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "cs_net_profit", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/customer.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/customer.ion new file mode 100644 index 000000000..55b76c52e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/customer.ion @@ -0,0 +1,134 @@ +customer::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "c_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "c_customer_id", + type: [ + "string", + "null" + ] + }, + { + name: "c_current_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "c_current_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "c_current_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "c_first_shipto_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "c_first_sales_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "c_salutation", + type: [ + "string", + "null" + ] + }, + { + name: "c_first_name", + type: [ + "string", + "null" + ] + }, + { + name: "c_last_name", + type: [ + "string", + "null" + ] + }, + { + name: "c_preferred_cust_flag", + type: [ + "string", + "null" + ] + }, + { + name: "c_birth_day", + type: [ + "int32", + "null" + ] + }, + { + name: "c_birth_month", + type: [ + "int32", + "null" + ] + }, + { + name: "c_birth_year", + type: [ + "int32", + "null" + ] + }, + { + name: "c_birth_country", + type: [ + "string", + "null" + ] + }, + { + name: "c_login", + type: [ + "string", + "null" + ] + }, + { + name: "c_email_address", + type: [ + "string", + "null" + ] + }, + { + name: "c_last_review_date_sk", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/customer_address.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/customer_address.ion new file mode 100644 index 000000000..7105c7d86 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/customer_address.ion @@ -0,0 +1,99 @@ +customer_address::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "ca_address_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ca_address_id", + type: [ + "string", + "null" + ] + }, + { + name: "ca_street_number", + type: [ + "string", + "null" + ] + }, + { + name: "ca_street_name", + type: [ + "string", + "null" + ] + }, + { + name: "ca_street_type", + type: [ + "string", + "null" + ] + }, + { + name: "ca_suite_number", + type: [ + "string", + "null" + ] + }, + { + name: "ca_city", + type: [ + "string", + "null" + ] + }, + { + name: "ca_county", + type: [ + "string", + "null" + ] + }, + { + name: "ca_state", + type: [ + "string", + "null" + ] + }, + { + name: "ca_zip", + type: [ + "string", + "null" + ] + }, + { + name: "ca_country", + type: [ + "string", + "null" + ] + }, + { + name: "ca_gmt_offset", + type: [ + "float64", + "null" + ] + }, + { + name: "ca_location_type", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/customer_demographics.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/customer_demographics.ion new file mode 100644 index 000000000..9634b7762 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/customer_demographics.ion @@ -0,0 +1,71 @@ +customer_demographics::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "cd_demo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "cd_gender", + type: [ + "string", + "null" + ] + }, + { + name: "cd_marital_status", + type: [ + "string", + "null" + ] + }, + { + name: "cd_education_status", + type: [ + "string", + "null" + ] + }, + { + name: "cd_purchase_estimate", + type: [ + "int32", + "null" + ] + }, + { + name: "cd_credit_rating", + type: [ + "string", + "null" + ] + }, + { + name: "cd_dep_count", + type: [ + "int32", + "null" + ] + }, + { + name: "cd_dep_employed_count", + type: [ + "int32", + "null" + ] + }, + { + name: "cd_dep_college_count", + type: [ + "int32", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/date_dim.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/date_dim.ion new file mode 100644 index 000000000..9ae5bb378 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/date_dim.ion @@ -0,0 +1,204 @@ +date_dim::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "d_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "d_date_id", + type: [ + "string", + "null" + ] + }, + { + name: "d_date", + type: [ + "int64", + "null" + ] + }, + { + name: "d_month_seq", + type: [ + "int32", + "null" + ] + }, + { + name: "d_week_seq", + type: [ + "int32", + "null" + ] + }, + { + name: "d_quarter_seq", + type: [ + "int32", + "null" + ] + }, + { + name: "d_year", + type: [ + "int32", + "null" + ] + }, + { + name: "d_dow", + type: [ + "int32", + "null" + ] + }, + { + name: "d_moy", + type: [ + "int32", + "null" + ] + }, + { + name: "d_dom", + type: [ + "int32", + "null" + ] + }, + { + name: "d_qoy", + type: [ + "int32", + "null" + ] + }, + { + name: "d_fy_year", + type: [ + "int32", + "null" + ] + }, + { + name: "d_fy_quarter_seq", + type: [ + "int32", + "null" + ] + }, + { + name: "d_fy_week_seq", + type: [ + "int32", + "null" + ] + }, + { + name: "d_day_name", + type: [ + "string", + "null" + ] + }, + { + name: "d_quarter_name", + type: [ + "string", + "null" + ] + }, + { + name: "d_holiday", + type: [ + "string", + "null" + ] + }, + { + name: "d_weekend", + type: [ + "string", + "null" + ] + }, + { + name: "d_following_holiday", + type: [ + "string", + "null" + ] + }, + { + name: "d_first_dom", + type: [ + "int32", + "null" + ] + }, + { + name: "d_last_dom", + type: [ + "int32", + "null" + ] + }, + { + name: "d_same_day_ly", + type: [ + "int32", + "null" + ] + }, + { + name: "d_same_day_lq", + type: [ + "int32", + "null" + ] + }, + { + name: "d_current_day", + type: [ + "string", + "null" + ] + }, + { + name: "d_current_week", + type: [ + "string", + "null" + ] + }, + { + name: "d_current_month", + type: [ + "string", + "null" + ] + }, + { + name: "d_current_quarter", + type: [ + "string", + "null" + ] + }, + { + name: "d_current_year", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/dbgen_version.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/dbgen_version.ion new file mode 100644 index 000000000..1f66c8ea3 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/dbgen_version.ion @@ -0,0 +1,36 @@ +dbgen_version::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "dv_version", + type: [ + "string", + "null" + ] + }, + { + name: "dv_create_date", + type: [ + "int64", + "null" + ] + }, + { + name: "dv_create_time", + type: [ + "int64", + "null" + ] + }, + { + name: "dv_cmdline_args", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/household_demographics.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/household_demographics.ion new file mode 100644 index 000000000..6f361cfb8 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/household_demographics.ion @@ -0,0 +1,43 @@ +household_demographics::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "hd_demo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "hd_income_band_sk", + type: [ + "string", + "null" + ] + }, + { + name: "hd_buy_potential", + type: [ + "string", + "null" + ] + }, + { + name: "hd_dep_count", + type: [ + "int32", + "null" + ] + }, + { + name: "hd_vehicle_count", + type: [ + "int32", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/income_band.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/income_band.ion new file mode 100644 index 000000000..ccf094c26 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/income_band.ion @@ -0,0 +1,29 @@ +income_band::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "ib_income_band_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ib_lower_bound", + type: [ + "int32", + "null" + ] + }, + { + name: "ib_upper_bound", + type: [ + "int32", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/inventory.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/inventory.ion new file mode 100644 index 000000000..6552d9111 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/inventory.ion @@ -0,0 +1,36 @@ +inventory::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "inv_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "inv_item_sk", + type: [ + "string", + "null" + ] + }, + { + name: "inv_warehouse_sk", + type: [ + "string", + "null" + ] + }, + { + name: "inv_quantity_on_hand", + type: [ + "int32", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/item.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/item.ion new file mode 100644 index 000000000..b6cb30233 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/item.ion @@ -0,0 +1,162 @@ +item::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "i_item_sk", + type: [ + "string", + "null" + ] + }, + { + name: "i_item_id", + type: [ + "string", + "null" + ] + }, + { + name: "i_rec_start_date", + type: [ + "int64", + "null" + ] + }, + { + name: "i_rec_end_date", + type: [ + "int64", + "null" + ] + }, + { + name: "i_item_desc", + type: [ + "string", + "null" + ] + }, + { + name: "i_current_price", + type: [ + "float64", + "null" + ] + }, + { + name: "i_wholesale_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "i_brand_id", + type: [ + "int32", + "null" + ] + }, + { + name: "i_brand", + type: [ + "string", + "null" + ] + }, + { + name: "i_class_id", + type: [ + "int32", + "null" + ] + }, + { + name: "i_class", + type: [ + "string", + "null" + ] + }, + { + name: "i_category_id", + type: [ + "int32", + "null" + ] + }, + { + name: "i_category", + type: [ + "string", + "null" + ] + }, + { + name: "i_manufact_id", + type: [ + "int32", + "null" + ] + }, + { + name: "i_manufact", + type: [ + "string", + "null" + ] + }, + { + name: "i_size", + type: [ + "string", + "null" + ] + }, + { + name: "i_formulation", + type: [ + "string", + "null" + ] + }, + { + name: "i_color", + type: [ + "string", + "null" + ] + }, + { + name: "i_units", + type: [ + "string", + "null" + ] + }, + { + name: "i_container", + type: [ + "string", + "null" + ] + }, + { + name: "i_manager_id", + type: [ + "int32", + "null" + ] + }, + { + name: "i_product_name", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/promotion.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/promotion.ion new file mode 100644 index 000000000..be727f19f --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/promotion.ion @@ -0,0 +1,141 @@ +promotion::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "p_promo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "p_promo_id", + type: [ + "string", + "null" + ] + }, + { + name: "p_start_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "p_end_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "p_item_sk", + type: [ + "string", + "null" + ] + }, + { + name: "p_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "p_response_targe", + type: [ + "int32", + "null" + ] + }, + { + name: "p_promo_name", + type: [ + "string", + "null" + ] + }, + { + name: "p_channel_dmail", + type: [ + "string", + "null" + ] + }, + { + name: "p_channel_email", + type: [ + "string", + "null" + ] + }, + { + name: "p_channel_catalog", + type: [ + "string", + "null" + ] + }, + { + name: "p_channel_tv", + type: [ + "string", + "null" + ] + }, + { + name: "p_channel_radio", + type: [ + "string", + "null" + ] + }, + { + name: "p_channel_press", + type: [ + "string", + "null" + ] + }, + { + name: "p_channel_event", + type: [ + "string", + "null" + ] + }, + { + name: "p_channel_demo", + type: [ + "string", + "null" + ] + }, + { + name: "p_channel_details", + type: [ + "string", + "null" + ] + }, + { + name: "p_purpose", + type: [ + "string", + "null" + ] + }, + { + name: "p_discount_active", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/reason.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/reason.ion new file mode 100644 index 000000000..4223e44e7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/reason.ion @@ -0,0 +1,29 @@ +reason::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "r_reason_sk", + type: [ + "string", + "null" + ] + }, + { + name: "r_reason_id", + type: [ + "string", + "null" + ] + }, + { + name: "r_reason_desc", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/ship_mode.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/ship_mode.ion new file mode 100644 index 000000000..08ae3fbcb --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/ship_mode.ion @@ -0,0 +1,50 @@ +ship_mode::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "sm_ship_mode_sk", + type: [ + "string", + "null" + ] + }, + { + name: "sm_ship_mode_id", + type: [ + "string", + "null" + ] + }, + { + name: "sm_type", + type: [ + "string", + "null" + ] + }, + { + name: "sm_code", + type: [ + "string", + "null" + ] + }, + { + name: "sm_carrier", + type: [ + "string", + "null" + ] + }, + { + name: "sm_contract", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/store.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/store.ion new file mode 100644 index 000000000..b0a3566a7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/store.ion @@ -0,0 +1,205 @@ +store::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "s_store_sk", + type: "string", + }, + { + name: "s_store_id", + type: "string" + }, + { + name: "s_rec_start_date", + type: [ + "date", + "null" + ] + }, + { + name: "s_rec_end_date", + type: [ + "date", + "null" + ] + }, + { + name: "s_closed_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "s_store_name", + type: [ + "string", + "null" + ] + }, + { + name: "s_number_employees", + type: [ + "int32", + "null" + ] + }, + { + name: "s_floor_space", + type: [ + "int32", + "null" + ] + }, + { + name: "s_hours", + type: [ + "string", + "null" + ] + }, + { + name: "s_manager", + type: [ + "string", + "null" + ] + }, + { + name: "s_market_id", + type: [ + "int32", + "null" + ] + }, + { + name: "s_geography_class", + type: [ + "string", + "null" + ] + }, + { + name: "s_market_desc", + type: [ + "string", + "null" + ] + }, + { + name: "s_market_manager", + type: [ + "string", + "null" + ] + }, + { + name: "s_division_id", + type: [ + "int32", + "null" + ] + }, + { + name: "s_division_name", + type: [ + "string", + "null" + ] + }, + { + name: "s_company_id", + type: [ + "int32", + "null" + ] + }, + { + name: "s_company_name", + type: [ + "string", + "null" + ] + }, + { + name: "s_street_number", + type: [ + "string", + "null" + ] + }, + { + name: "s_street_name", + type: [ + "string", + "null" + ] + }, + { + name: "s_street_type", + type: [ + "string", + "null" + ] + }, + { + name: "s_suite_number", + type: [ + "string", + "null" + ] + }, + { + name: "s_city", + type: [ + "string", + "null" + ] + }, + { + name: "s_county", + type: [ + "string", + "null" + ] + }, + { + name: "s_state", + type: [ + "string", + "null" + ] + }, + { + name: "s_zip", + type: [ + "string", + "null" + ] + }, + { + name: "s_country", + type: [ + "string", + "null" + ] + }, + { + name: "s_gmt_offset", + type: [ + "float64", + "null" + ] + }, + { + name: "s_tax_precentage", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/store_returns.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/store_returns.ion new file mode 100644 index 000000000..55347e7e4 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/store_returns.ion @@ -0,0 +1,142 @@ +store_returns::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "sr_returned_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "sr_return_time_sk", + type: [ + "string", + "null" + ] + }, + { + name: "sr_item_sk", + type: "string" + }, + { + name: "sr_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "sr_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "sr_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "sr_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "sr_store_sk", + type: [ + "string", + "null" + ] + }, + { + name: "sr_reason_sk", + type: [ + "string", + "null" + ] + }, + { + name: "sr_ticket_number", + type: "string", + }, + { + name: "sr_return_quantity", + type: [ + "int32", + "null" + ] + }, + { + name: "sr_return_amt", + type: [ + "float64", + "null" + ] + }, + { + name: "sr_return_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "sr_return_amt_inc_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "sr_fee", + type: [ + "float64", + "null" + ] + }, + { + name: "sr_return_ship_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "sr_refunded_cash", + type: [ + "float64", + "null" + ] + }, + { + name: "sr_reversed_charge", + type: [ + "float64", + "null" + ] + }, + { + name: "sr_store_credit", + type: [ + "float64", + "null" + ] + }, + { + name: "sr_net_loss", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/store_sales.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/store_sales.ion new file mode 100644 index 000000000..64676ed27 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/store_sales.ion @@ -0,0 +1,163 @@ +store_sales::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "ss_sold_date_sk", + type: [ + "date", + "null" + ] + }, + { + name: "ss_sold_time_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ss_item_sk", + type: "string" + }, + { + name: "ss_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ss_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ss_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ss_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ss_store_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ss_promo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ss_ticket_number", + type: "string" + }, + { + name: "ss_quantity", + type: [ + "int32", + "null" + ] + }, + { + name: "ss_wholesale_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_list_price", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_sales_price", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_ext_discount_amt", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_ext_sales_price", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_ext_wholesale_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_ext_list_price", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_ext_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_coupon_amt", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_net_paid", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_net_paid_inc_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "ss_net_profit", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/time_dim.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/time_dim.ion new file mode 100644 index 000000000..55b5ce4f4 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/time_dim.ion @@ -0,0 +1,78 @@ +time_dim::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "t_time_sk", + type: [ + "string", + "null" + ] + }, + { + name: "t_time_id", + type: [ + "string", + "null" + ] + }, + { + name: "t_time", + type: [ + "int32", + "null" + ] + }, + { + name: "t_hour", + type: [ + "int32", + "null" + ] + }, + { + name: "t_minute", + type: [ + "int32", + "null" + ] + }, + { + name: "t_second", + type: [ + "int32", + "null" + ] + }, + { + name: "t_am_pm", + type: [ + "string", + "null" + ] + }, + { + name: "t_shift", + type: [ + "string", + "null" + ] + }, + { + name: "t_sub_shift", + type: [ + "string", + "null" + ] + }, + { + name: "t_meal_time", + type: [ + "string", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/warehouse.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/warehouse.ion new file mode 100644 index 000000000..170d486ce --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/warehouse.ion @@ -0,0 +1,106 @@ +warehouse::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "w_warehouse_sk", + type: [ + "string", + "null" + ] + }, + { + name: "w_warehouse_id", + type: [ + "string", + "null" + ] + }, + { + name: "w_warehouse_name", + type: [ + "string", + "null" + ] + }, + { + name: "w_warehouse_sq_ft", + type: [ + "int32", + "null" + ] + }, + { + name: "w_street_number", + type: [ + "string", + "null" + ] + }, + { + name: "w_street_name", + type: [ + "string", + "null" + ] + }, + { + name: "w_street_type", + type: [ + "string", + "null" + ] + }, + { + name: "w_suite_number", + type: [ + "string", + "null" + ] + }, + { + name: "w_city", + type: [ + "string", + "null" + ] + }, + { + name: "w_county", + type: [ + "string", + "null" + ] + }, + { + name: "w_state", + type: [ + "string", + "null" + ] + }, + { + name: "w_zip", + type: [ + "string", + "null" + ] + }, + { + name: "w_country", + type: [ + "string", + "null" + ] + }, + { + name: "w_gmt_offset", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_page.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_page.ion new file mode 100644 index 000000000..56b421dc5 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_page.ion @@ -0,0 +1,106 @@ +web_page::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "wp_web_page_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wp_web_page_id", + type: [ + "string", + "null" + ] + }, + { + name: "wp_rec_start_date", + type: [ + "int64", + "null" + ] + }, + { + name: "wp_rec_end_date", + type: [ + "int64", + "null" + ] + }, + { + name: "wp_creation_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wp_access_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wp_autogen_flag", + type: [ + "string", + "null" + ] + }, + { + name: "wp_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wp_url", + type: [ + "string", + "null" + ] + }, + { + name: "wp_type", + type: [ + "string", + "null" + ] + }, + { + name: "wp_char_count", + type: [ + "int32", + "null" + ] + }, + { + name: "wp_link_count", + type: [ + "int32", + "null" + ] + }, + { + name: "wp_image_count", + type: [ + "int32", + "null" + ] + }, + { + name: "wp_max_ad_count", + type: [ + "int32", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_returns.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_returns.ion new file mode 100644 index 000000000..336733c5c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_returns.ion @@ -0,0 +1,176 @@ +web_returns::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "wr_returned_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_returned_time_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_item_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_refunded_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_refunded_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_refunded_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_refunded_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_returning_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_returning_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_returning_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_returning_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_web_page_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_reason_sk", + type: [ + "string", + "null" + ] + }, + { + name: "wr_order_number", + type: [ + "string", + "null" + ] + }, + { + name: "wr_return_quantity", + type: [ + "int32", + "null" + ] + }, + { + name: "wr_return_amt", + type: [ + "float64", + "null" + ] + }, + { + name: "wr_return_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "wr_return_amt_inc_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "wr_fee", + type: [ + "float64", + "null" + ] + }, + { + name: "wr_return_ship_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "wr_refunded_cash", + type: [ + "float64", + "null" + ] + }, + { + name: "wr_reversed_charge", + type: [ + "float64", + "null" + ] + }, + { + name: "wr_account_credit", + type: [ + "float64", + "null" + ] + }, + { + name: "wr_net_loss", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_sales.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_sales.ion new file mode 100644 index 000000000..cb1f35614 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_sales.ion @@ -0,0 +1,246 @@ +web_sales::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "ws_sold_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_sold_time_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_ship_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_item_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_bill_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_bill_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_bill_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_bill_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_ship_customer_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_ship_cdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_ship_hdemo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_ship_addr_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_web_page_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_web_site_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_ship_mode_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_warehouse_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_promo_sk", + type: [ + "string", + "null" + ] + }, + { + name: "ws_order_number", + type: [ + "string", + "null" + ] + }, + { + name: "ws_quantity", + type: [ + "int32", + "null" + ] + }, + { + name: "ws_wholesale_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_list_price", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_sales_price", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_ext_discount_amt", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_ext_sales_price", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_ext_wholesale_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_ext_list_price", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_ext_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_coupon_amt", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_ext_ship_cost", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_net_paid", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_net_paid_inc_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_net_paid_inc_ship", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_net_paid_inc_ship_tax", + type: [ + "float64", + "null" + ] + }, + { + name: "ws_net_profit", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_site.ion b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_site.ion new file mode 100644 index 000000000..372c16bac --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/catalogs/tpc_ds/web_site.ion @@ -0,0 +1,190 @@ +web_site::{ + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "web_site_sk", + type: [ + "string", + "null" + ] + }, + { + name: "web_site_id", + type: [ + "string", + "null" + ] + }, + { + name: "web_rec_start_date", + type: [ + "int64", + "null" + ] + }, + { + name: "web_rec_end_date", + type: [ + "int64", + "null" + ] + }, + { + name: "web_name", + type: [ + "string", + "null" + ] + }, + { + name: "web_open_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "web_close_date_sk", + type: [ + "string", + "null" + ] + }, + { + name: "web_class", + type: [ + "string", + "null" + ] + }, + { + name: "web_manager", + type: [ + "string", + "null" + ] + }, + { + name: "web_mkt_id", + type: [ + "int32", + "null" + ] + }, + { + name: "web_mkt_class", + type: [ + "string", + "null" + ] + }, + { + name: "web_mkt_desc", + type: [ + "string", + "null" + ] + }, + { + name: "web_market_manager", + type: [ + "string", + "null" + ] + }, + { + name: "web_company_id", + type: [ + "int32", + "null" + ] + }, + { + name: "web_company_name", + type: [ + "string", + "null" + ] + }, + { + name: "web_street_number", + type: [ + "string", + "null" + ] + }, + { + name: "web_street_name", + type: [ + "string", + "null" + ] + }, + { + name: "web_street_type", + type: [ + "string", + "null" + ] + }, + { + name: "web_suite_number", + type: [ + "string", + "null" + ] + }, + { + name: "web_city", + type: [ + "string", + "null" + ] + }, + { + name: "web_county", + type: [ + "string", + "null" + ] + }, + { + name: "web_state", + type: [ + "string", + "null" + ] + }, + { + name: "web_zip", + type: [ + "string", + "null" + ] + }, + { + name: "web_country", + type: [ + "string", + "null" + ] + }, + { + name: "web_gmt_offset", + type: [ + "float64", + "null" + ] + }, + { + name: "web_tax_percentage", + type: [ + "float64", + "null" + ] + } + ] + } +} diff --git a/partiql-planner/src/testFixtures/resources/catalogs/tpc_h/.gitkeep b/partiql-planner/src/testFixtures/resources/catalogs/tpc_h/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/partiql-planner/src/testFixtures/resources/inputs/basics/case.sql b/partiql-planner/src/testFixtures/resources/inputs/basics/case.sql new file mode 100644 index 000000000..f7099d53c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/basics/case.sql @@ -0,0 +1,85 @@ +--#[case-00] +CASE + WHEN FALSE THEN 0 + WHEN TRUE THEN 1 + ELSE 2 +END; + +--#[case-01] +CASE + WHEN 1 = 2 THEN 0 + WHEN 2 = 3 THEN 1 + ELSE 3 +END; + +--#[case-02] +CASE 1 + WHEN 1 THEN 'MATCH!' + ELSE 'NO MATCH!' +END; + +--#[case-03] +CASE 'Hello World' + WHEN 'Hello World' THEN TRUE + ELSE FALSE +END; + +--#[case-04] +SELECT + CASE a + WHEN TRUE THEN 'a IS TRUE' + ELSE 'a MUST BE FALSE' + END AS result +FROM T; + +--#[case-05] +SELECT + CASE + WHEN a = TRUE THEN 'a IS TRUE' + ELSE 'a MUST BE FALSE' + END AS result +FROM T; + +--#[case-06] +SELECT + CASE b + WHEN 10 THEN 'b IS 10' + ELSE 'b IS NOT 10' + END AS result +FROM T; + +--#[case-07] +-- TODO: This is currently failing as we seemingly cannot search for a nested attribute of a global. +SELECT + CASE d.e + WHEN 'WATER' THEN 'd.e IS WATER' + ELSE 'd.e IS NOT WATER' + END AS result +FROM T; + +--#[case-08] +SELECT + CASE x + WHEN 'WATER' THEN 'x IS WATER' + WHEN 5 THEN 'x IS 5' + ELSE 'x IS SOMETHING ELSE' + END AS result +FROM T; + +--#[case-09] +-- TODO: When using `x IS STRING` or `x IS DECIMAL`, I found that there are issues with the SqlCalls not receiving +-- the length/precision/scale parameters. This doesn't have to do with CASE_WHEN, but it needs to be addressed. +SELECT + CASE + WHEN x IS INT THEN 'x IS INT' + WHEN x IS STRUCT THEN 'x IS STRUCT' + ELSE 'x IS SOMETHING ELSE' + END AS result +FROM T; + +--#[case-10] +CASE + WHEN FALSE THEN 0 + WHEN FALSE THEN 1 + ELSE 2 +END; diff --git a/partiql-planner/src/testFixtures/resources/inputs/basics/operator.sql b/partiql-planner/src/testFixtures/resources/inputs/basics/operator.sql new file mode 100644 index 000000000..45ee26ba7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/basics/operator.sql @@ -0,0 +1,133 @@ +--#[expr-00] +t1 OR t2; + +--#[expr-01] +t1 AND t2; + +--#[expr-02] +NOT t1; + +--#[expr-03] +t1 < t2; + +--#[expr-04] +t1 <= t2; + +--#[expr-05] +t1 > t2; + +--#[expr-06] +t1 >= t2; + +--#[expr-07] +t1 = t2; + +--#[expr-08] +t1 != t2; + +--#[expr-09] +t1 <> t2; + +--#[expr-10] +t1 IS NULL; + +--#[expr-11] +t1 IS MISSING; + +--#[expr-12] +t1 IS INT2; + +--#[expr-13] +t1 IS INT4; + +--#[expr-14] +t1 IS INT8; + +--#[expr-15] +t1 IS INT; + +--#[expr-16] +t1 IS DECIMAL; + +--#[expr-17] +t1 IS FLOAT; + +--#[expr-18] +t1 IS BOOL; + +--#[expr-19] +t1 IS SYMBOL; + +--#[expr-20] +t1 IS DATE; + +--#[expr-21] +t1 IS TIME; + +--#[expr-22] +t1 IS TIMESTAMP; + +--#[expr-23] +t1 IS STRING; + +--#[expr-24] +t1 IS CLOB; + +--#[expr-25] +t1 IS BLOB; + +--#[expr-26] +t1 IS LIST; + +--#[expr-27] +t1 IS SEXP; + +--#[expr-28] +t1 IS STRUCT; + +--#[expr-29] +t1 IS BAG; + +--#[expr-30] +t1 IN ( true ); + +--#[expr-31] +t1 IN t2; + +--#[expr-32] +t1 LIKE t2; + +--#[expr-33] +t1 LIKE t2 ESCAPE t3; + +--#[expr-34] +t1 BETWEEN t2 AND t3; + +--#[expr-35] +t1 || t2; + +--#[expr-36] +t1 & t2; + +--#[expr-37] +t1 + t2; + +--#[expr-38] +t1 - t2; + +--#[expr-39] +t1 % t2; + +--#[expr-40] +t1 * t2; + +--#[expr-41] +t1 / t2; + +--#[expr-66] +CURRENT_USER; + +--#[expr-67] +CURRENT_DATE; + +-- TO BE CONTINUED .... diff --git a/partiql-planner/src/testFixtures/resources/inputs/basics/paths.sql b/partiql-planner/src/testFixtures/resources/inputs/basics/paths.sql new file mode 100644 index 000000000..4837ace0b --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/basics/paths.sql @@ -0,0 +1,135 @@ +-- ---------------------------------------- +-- PartiQL Path Navigation +-- ---------------------------------------- + +--#[paths-00] +-- tuple navigation +x.y; + +--#[paths-01] +-- array navigation with literal +x[0]; + +--#[paths-02] +-- tuple navigation with array notation +x['y']; + +--#[paths-03] +-- tuple navigation (2) +x."y"; + +--#[paths-04] +-- tuple navigation with explicit cast as string +x[CAST(z AS STRING)]; + +-- ---------------------------------------- +-- Composition of Navigation (5 choose 3) +-- ---------------------------------------- + +--#[paths-05] +x.y[0]['y']; + +--#[paths-06] +x.y[0]."y"; + +--#[paths-07] +x.y[0][CAST(z AS STRING)]; + +--#[paths-08] +x.y['y']."y"; + +--#[paths-09] +x.y['y'][CAST(z AS STRING)]; + +--#[paths-10] +x.y."y"[CAST(z AS STRING)]; + +--#[paths-11] +x[0]['y']."y"; + +--#[paths-12] +x[0]['y'][CAST(z AS STRING)]; + +--#[paths-13] +x[0]."y"[CAST(z AS STRING)]; + +--#[paths-14] +x['y']."y"[CAST(z AS STRING)]; + +-- ---------------------------------------- +-- Array Navigation with Expressions +-- ---------------------------------------- + +--#[paths-15] +x[0+1]; + +--#[paths-16] +x[ABS(1)]; + +-- ---------------------------------------- +-- PartiQL Path Navigation (+SFW) +-- ---------------------------------------- + +--#[paths-sfw-00] +-- tuple navigation +SELECT t.x.y AS v FROM t; + +--#[paths-sfw-01] +-- array navigation with literal +SELECT t.x[0] AS v FROM t; + +--#[paths-sfw-02] +-- tuple navigation with array notation (1) +SELECT t.x['y'] AS v FROM t; + +--#[paths-sfw-03] +-- tuple navigation with array notation (2) +SELECT t.x."y" AS v FROM t; + +--#[paths-sfw-04] +-- tuple navigation with explicit cast as string +SELECT t.x[CAST(t.z AS STRING)] AS v FROM t; + +-- ---------------------------------------- +-- Composition of Navigation (5 choose 3) +-- ---------------------------------------- + +--#[paths-sfw-05] +SELECT t.x.y[0]['y'] AS v FROM t; + +--#[paths-sfw-06] +SELECT t.x.y[0]."y" AS v FROM t; + +--#[paths-sfw-07] +SELECT t.x.y[0][CAST(t.z AS STRING)] AS v FROM t; + +--#[paths-sfw-08] +SELECT t.x.y['y']."y" AS v FROM t; + +--#[paths-sfw-09] +SELECT t.x.y['y'][CAST(t.z AS STRING)] AS v FROM t; + +--#[paths-sfw-10] +SELECT t.x.y."y"[CAST(t.z AS STRING)] AS v FROM t; + +--#[paths-sfw-11] +SELECT t.x[0]['y']."y" AS v FROM t; + +--#[paths-sfw-12] +SELECT t.x[0]['y'][CAST(t.z AS STRING)] AS v FROM t; + +--#[paths-sfw-13] +SELECT t.x[0]."y"[CAST(t.z AS STRING)] AS v FROM t; + +--#[paths-sfw-14] +SELECT t.x['y']."y"[CAST(t.z AS STRING)] AS v FROM t; + +-- ---------------------------------------- +-- Array Navigation with Expressions +-- ---------------------------------------- + +--#[paths-sfw-15] +SELECT t.x[0 + 1] AS v FROM t; + +--#[paths-sfw-16] +SELECT t.x[ABS(1)] AS v FROM t; diff --git a/partiql-planner/src/testFixtures/resources/inputs/basics/select.sql b/partiql-planner/src/testFixtures/resources/inputs/basics/select.sql new file mode 100644 index 000000000..da7911c59 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/basics/select.sql @@ -0,0 +1,50 @@ +--#[select-00] +SELECT a, b, c FROM T; + +--#[select-01] +SELECT * FROM T; + +--#[select-02] +SELECT VALUE { 'a': a, 'b': b, 'c': c } FROM T; + +--#[select-03] +SELECT VALUE a FROM T; + +--#[select-04] +SELECT * FROM T AS t1, T AS t2; + +--#[select-05] +SELECT t.d.* FROM T; + +--#[select-06] +SELECT t, t.d.* FROM T; + +--#[select-07] +SELECT t.d.*, t.d.* FROM T; + +--#[select-08] +SELECT d.* FROM T; + +--#[select-09] +SELECT t.* FROM T; + +--#[select-10] +SELECT t.c || CURRENT_USER FROM T; + +--#[select-11] +SELECT CURRENT_USER FROM T; + +--#[select-12] +SELECT CURRENT_DATE FROM T; + +--#[select-13] +SELECT DATE_DIFF(DAY, CURRENT_DATE, CURRENT_DATE) FROM T; + +--#[select-14] +SELECT DATE_ADD(DAY, 5, CURRENT_DATE) FROM T; + +--#[select-15] +SELECT DATE_ADD(DAY, -5, CURRENT_DATE) FROM T; + +--#[select-16] +SELECT a FROM t; diff --git a/partiql-planner/src/testFixtures/resources/inputs/basics/simple.sql b/partiql-planner/src/testFixtures/resources/inputs/basics/simple.sql new file mode 100644 index 000000000..1c81d335c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/basics/simple.sql @@ -0,0 +1,31 @@ +-- ------------------ +-- Globals +-- ------------------ + +--#[global-00] +my_global; + +-- ------------------ +-- Literals +-- ------------------ + +--#[sanity-lit-00] +true; + +--#[sanity-lit-01] +1; + +--#[sanity-lit-02] +1.0; + +--#[sanity-lit-03] +'hello'; + +--#[sanity-lit-04] +[ 'a', 'b', 'c' ]; + +--#[sanity-lit-05] +<< 'a', 'b', 'c' >>; + +--#[sanity-lit-06] +{ 'a': 1, 'b': 2, 'c': 3 }; \ No newline at end of file diff --git a/partiql-planner/src/testFixtures/resources/inputs/basics/subquery.sql b/partiql-planner/src/testFixtures/resources/inputs/basics/subquery.sql new file mode 100644 index 000000000..4f3fa8e6e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/basics/subquery.sql @@ -0,0 +1,17 @@ +-- Scalar subquery coercion +--#[subquery-00] +1 = (SELECT b FROM T); + +-- Row value subquery coercion +--#[subquery-01] +(false, 1) = (SELECT a, b FROM T); + +-- IN collection subquery +--#[subquery-02] +SELECT UPPER(v) FROM T +WHERE b IN (SELECT b FROM T WHERE a); + +-- Scalar subquery coercion with aggregation +--#[subquery-03] +-- 100 = (SELECT MAX(t.b) FROM T as t) +100 = (SELECT COUNT(*) FROM T); diff --git a/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/aggregations.sql b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/aggregations.sql new file mode 100644 index 000000000..3e0ce7111 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/aggregations.sql @@ -0,0 +1,50 @@ +--#[aggs-00] +SELECT COUNT(*) FROM T; + +--#[aggs-01] +SELECT COUNT(*), COUNT(1), MIN(b), MAX(b), AVG(b) FROM T; + +--#[aggs-02] +SELECT COUNT(*) AS count_star FROM T; + +--#[aggs-03] +SELECT COUNT(*) AS count_star, + COUNT(b) AS count_b, + MIN(b) AS min_b, + MAX(b) AS max_b, + AVG(b) AS avg_b +FROM T; + +--#[aggs-04] +SELECT a, COUNT(*) FROM T GROUP BY a; + +--#[aggs-05] +SELECT COUNT(*), a FROM T GROUP BY a; + +--#[aggs-06] +SELECT a, b, c, MIN(b), MAX(b) FROM T GROUP BY a, b, c; + +--#[aggs-07] +SELECT MIN(b), MAX(b), a, b, c FROM T GROUP BY a, b, c; + +--#[aggs-08] +SELECT a AS _a, COUNT(*) AS count_star FROM T GROUP BY a; + +--#[aggs-09] +SELECT COUNT(*) AS count_star, a AS _a FROM T GROUP BY a; + +--#[aggs-10] +SELECT a AS _a, b AS _b, c AS _c, MIN(b) AS min_b, MAX(b) AS max_b FROM T GROUP BY a, b, c; + +--#[aggs-11] +SELECT MIN(b) AS min_b, MAX(b) AS max_b, a AS _a, b AS _b, c AS _c FROM T GROUP BY a, b, c; + +--#[aggs-12] +SELECT a AS _a, AVG(b) AS avg_b FROM T +GROUP BY a +HAVING a = true; + +--#[aggs-13] +SELECT a AS _a, AVG(b) AS avg_b FROM T +GROUP BY a +HAVING avg_b > 0; diff --git a/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/collections.sql b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/collections.sql new file mode 100644 index 000000000..d06e6be9d --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/collections.sql @@ -0,0 +1,23 @@ +--#[collections-01] +-- Collection BAG +<< 1, 2, 3 >>; + +--#[collections-02] +-- Collection LIST +[ 1, 2, 3 ]; + +--#[collections-03] +-- Collection LIST +( 1, 2, 3 ); + +--#[collections-04] +-- Collection SEXP +SEXP ( 1, 2, 3 ); + +--#[collections-05] +--SELECT VALUE from array +SELECT VALUE x FROM [ 1, 2, 3 ] as x; + +--#[collections-06] +--SELECT from array +SELECT x FROM [ 1, 2, 3 ] as x; diff --git a/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/exclude.sql b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/exclude.sql new file mode 100644 index 000000000..3c862dd79 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/exclude.sql @@ -0,0 +1,473 @@ +--#[exclude-01] +SELECT * EXCLUDE c.ssn FROM [ + { + 'name': 'Alan', + 'custId': 1, + 'address': { + 'city': 'Seattle', + 'zipcode': 98109, + 'street': '123 Seaplane Dr.' + }, + 'ssn': 123456789 + }] AS c; + +--#[exclude-02] +SELECT * EXCLUDE c.ssn, c.address.street FROM [ + { + 'name': 'Alan', + 'custId': 1, + 'address': { + 'city': 'Seattle', + 'zipcode': 98109, + 'street': '123 Seaplane Dr.' + }, + 'ssn': 123456789 + }] AS c; + +--#[exclude-03] +SELECT * EXCLUDE t.a.b.c[0], t.a.b.c[1].field +FROM [{ + 'a': { + 'b': { + 'c': [ + { + 'field': 0 -- c[0] + }, + { + 'field': 1 -- c[1] + }, + { + 'field': 2 -- c[2] + } + ] + } + }, + 'foo': 'bar' + }] AS t; + +--#[exclude-04] +SELECT * + EXCLUDE + t.a.b.c[0] +FROM [{ + 'a': { + 'b': { + 'c': [0, 1, 2] + } + }, + 'foo': 'bar' + }] AS t; + +--#[exclude-05] +SELECT * + EXCLUDE + t.a[*] +FROM [{ + 'a': [0, 1, 2] + }] AS t; + +--#[exclude-06] +SELECT * + EXCLUDE + t.a.b.c[*].field_x +FROM [{ + 'a': { + 'b': { + 'c': [ + { -- c[0] + 'field_x': 0, + 'field_y': 0 + }, + { -- c[1] + 'field_x': 1, + 'field_y': 1 + }, + { -- c[2] + 'field_x': 2, + 'field_y': 2 + } + ] + } + }, + 'foo': 'bar' + }] AS t; + +--#[exclude-07] +SELECT * + EXCLUDE + t.a.b.c[*].* +FROM [{ + 'a': { + 'b': { + 'c': [ + { -- c[0] + 'field_x': 0, + 'field_y': 0 + }, + { -- c[1] + 'field_x': 1, + 'field_y': 1 + }, + { -- c[2] + 'field_x': 2, + 'field_y': 2 + } + ] + } + }, + 'foo': 'bar' + }] AS t; + +--#[exclude-08] +SELECT * + EXCLUDE + t.a +FROM [ + { + 'a': 2, + 'foo': 'bar2' + }, + { + 'a': 1, + 'foo': 'bar1' + }, + { + 'a': 3, + 'foo': 'bar3' + } + ] AS t +ORDER BY t.a; + +--#[exclude-09] +SELECT * + EXCLUDE bar.d +FROM + << + {'a': 1, 'b': 11}, + {'a': 2, 'b': 22} + >> AS foo, + << + {'c': 3, 'd': 33}, + {'c': 4, 'd': 44} + >> AS bar; + +--#[exclude-10] +SELECT t.b EXCLUDE t.b[*].b_1 +FROM << + { + 'a': {'a_1':1,'a_2':2}, + 'b': [ {'b_1':3,'b_2':4}, {'b_1':5,'b_2':6} ], + 'c': 7, + 'd': 8 + } >> AS t; + +--#[exclude-11] +SELECT * EXCLUDE t.b[*].b_1 +FROM << + { + 'a': {'a_1':1,'a_2':2}, + 'b': [ {'b_1':3,'b_2':4}, {'b_1':5,'b_2':6} ], + 'c': 7, + 'd': 8 + } >> AS t; + +--#[exclude-12] +SELECT VALUE t.b EXCLUDE t.b[*].b_1 +FROM << + { + 'a': {'a_1':1,'a_2':2}, + 'b': [ {'b_1':3,'b_2':4}, {'b_1':5,'b_2':6} ], + 'c': 7, + 'd': 8 + } >> AS t; + +--#[exclude-13] +SELECT * EXCLUDE t.a[*].b.c +FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': 'zero' } }, + { 'b': { 'c': 1, 'd': 'one' } }, + { 'b': { 'c': 2, 'd': 'two' } } + ] + } + >> AS t; + +--#[exclude-14] +SELECT * EXCLUDE t.a[1].b.c +FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': 'zero' } }, + { 'b': { 'c': 1, 'd': 'one' } }, + { 'b': { 'c': 2, 'd': 'two' } } + ] + } + >> AS t; + +--#[exclude-15] +SELECT * EXCLUDE t.a[*].b.* +FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': 'zero' } }, + { 'b': { 'c': 1, 'd': 'one' } }, + { 'b': { 'c': 2, 'd': 'two' } } + ] + } + >> AS t; + +--#[exclude-16] +SELECT * EXCLUDE t.a[1].b.* +FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': 'zero' } }, + { 'b': { 'c': 1, 'd': 'one' } }, + { 'b': { 'c': 2, 'd': 'two' } } + ] + } + >> AS t; + +--#[exclude-17] +SELECT * EXCLUDE t.a[*].b.d[*].e +FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': [{'e': 'zero', 'f': true}] } }, + { 'b': { 'c': 1, 'd': [{'e': 'one', 'f': true}] } }, + { 'b': { 'c': 2, 'd': [{'e': 'two', 'f': true}] } } + ] + } + >> AS t; + +--#[exclude-18] +SELECT * EXCLUDE t.a[1].b.d[*].e +FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': [{'e': 'zero', 'f': true}] } }, + { 'b': { 'c': 1, 'd': [{'e': 'one', 'f': true}] } }, + { 'b': { 'c': 2, 'd': [{'e': 'two', 'f': true}] } } + ] + } + >> AS t; + +--#[exclude-19] +SELECT * EXCLUDE t.a[1].b.d[0].e +FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': [{'e': 'zero', 'f': true}] } }, + { 'b': { 'c': 1, 'd': [{'e': 'one', 'f': true}] } }, + { 'b': { 'c': 2, 'd': [{'e': 'two', 'f': true}] } } + ] + } + >> AS t; + +--#[exclude-20] +SELECT * EXCLUDE t."a".b['c'] +FROM << + { + 'a': { + 'B': { + 'c': 0, + 'd': 'foo' + } + } + } + >> AS t; + +--#[exclude-21] +SELECT * EXCLUDE t."a".b['c'] +FROM << + { + 'a': { + 'B': { + 'c': 0, + 'C': true, + 'd': 'foo' + } + } + } + >> AS t; + +--#[exclude-22] +SELECT * EXCLUDE t."a".b.c +FROM << + { + 'a': { + 'B': { -- both 'c' and 'C' to be removed + 'c': 0, + 'C': true, + 'd': 'foo' + } + } + } + >> AS t; + +--#[exclude-23] +SELECT * EXCLUDE t."a".b.c +FROM << + { + 'a': { + 'B': { + 'c': 0, + 'c': true, + 'd': 'foo' + } + } + } + >> AS t; + +--#[exclude-24] +SELECT * EXCLUDE t.a, t.a.b FROM << { 'a': { 'b': 1 }, 'c': 2 } >> AS t; + +--#[exclude-25] +SELECT * EXCLUDE t.attr_does_not_exist FROM << { 'a': 1 } >> AS t; + +--#[exclude-26] +SELECT t EXCLUDE t.a.b +FROM << + { + 'a': { + 'b': 1, -- `b` to be excluded + 'c': 'foo' + } + }, + { + 'a': NULL + } + >> AS t; + +--#[exclude-27] +SELECT t EXCLUDE t.a.b +FROM << + { + 'a': { + 'b': 1, -- `b` to be excluded + 'c': 'foo' + } + }, + { + 'a': { + 'b': 1, -- `b` to be excluded + 'c': NULL + } + } + >> AS t; + +--#[exclude-28] +SELECT t EXCLUDE t.a.c +FROM << + { + 'a': { + 'b': 1, + 'c': 'foo' -- `c` to be excluded + } + }, + { + 'a': { + 'b': 1, + 'c': NULL -- `c` to be excluded + } + } + >> AS t; + +--#[exclude-29] +SELECT * EXCLUDE t.a[*] +FROM << + { + 'a': { + 'b': { + 'c': 0, + 'd': 'foo' + } + } + } + >> AS t; + +--#[exclude-30] +SELECT * EXCLUDE t.a[1] +FROM << + { + 'a': { + 'b': { + 'c': 0, + 'd': 'foo' + } + } + } + >> AS t; + +--#[exclude-31] +SELECT * EXCLUDE t.a.b +FROM << + { + 'a': [ + { 'b': 0 }, + { 'b': 1 }, + { 'b': 2 } + ] + } + >> AS t; + +--#[exclude-32] +SELECT * EXCLUDE t.a.* +FROM << + { + 'a': [ + { 'b': 0 }, + { 'b': 1 }, + { 'b': 2 } + ] + } + >> AS t; + +--#[exclude-33] +SELECT * EXCLUDE t.b -- `t.b` does not exist +FROM << + { + 'a': << + { 'b': 0 }, + { 'b': 1 }, + { 'b': 2 } + >> + } + >> AS t; + +--#[exclude-34] +-- EXCLUDE regression test (behavior subject to change pending RFC); could give error/warning +SELECT * EXCLUDE nonsense.b -- `nonsense` does not exist in binding tuples +FROM << + { 'a': << + { 'b': 0 }, + { 'b': 1 }, + { 'b': 2 } + >> + } +>> AS t; + +--#[exclude-35] +SELECT * EXCLUDE t.a[0].c -- `c`'s type to be unioned with `MISSING` +FROM << + { + 'a': [ + { + 'b': 0, + 'c': 0 + }, + { + 'b': 1, + 'c': NULL + }, + { + 'b': 2, + 'c': 0.1 + } + ] + } + >> AS t; + +--#[exclude-36] +SELECT * EXCLUDE t.c FROM b.b.b AS t; \ No newline at end of file diff --git a/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/joins.sql b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/joins.sql new file mode 100644 index 000000000..5f8c8b4ec --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/joins.sql @@ -0,0 +1,24 @@ +--#[join-01] +SELECT * FROM <<{ 'a': 1 }>> AS t1, <<{ 'b': 2.0 }>> AS t2; + +--#[join-02] +SELECT * FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'b': 2.0 }>> AS t2 ON TRUE; + +--#[join-03] +SELECT b, a FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'b': 2.0 }>> AS t2 ON TRUE; + +--#[join-04] +SELECT t1.a, t2.a FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'a': 2.0 }>> AS t2 ON t1.a = t2.a; + +--#[join-05] +SELECT * FROM <<{ 'a': 1 }>> AS t1 LEFT JOIN <<{ 'a': 2.0 }>> AS t2 ON t1.a = t2.a; + +--#[join-06] +SELECT * FROM + <<{ 'a': 1 }>> AS t1 + LEFT JOIN + <<{ 'a': 2.0 }>> AS t2 + ON t1.a = t2.a + LEFT JOIN + <<{ 'a': 'hello, world' }>> AS t3 + ON t3.a = 'hello'; diff --git a/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/order_by.sql b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/order_by.sql new file mode 100644 index 000000000..1fa842ed1 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/order_by.sql @@ -0,0 +1,8 @@ +--#[order_by-01] +SELECT * FROM pets ORDER BY id; + +--#[order_by-02] +SELECT * FROM pets ORDER BY breed; + +--#[order_by-03] +SELECT * FROM pets ORDER BY unknown_col; diff --git a/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/sanity.sql b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/sanity.sql new file mode 100644 index 000000000..e4f97c7be --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/sanity.sql @@ -0,0 +1,47 @@ +--#[sanity-01] +SELECT ss_ticket_number, ss_quantity, ss_sold_date_sk +FROM store_sales; + +--#[sanity-02] +SELECT ss_ticket_number, ss_quantity, ss_sold_date_sk +FROM store_sales +WHERE ss_sold_date_sk > DATE_ADD(DAY, -30, UTCNOW()); + +--#[sanity-03] +SELECT (ss_wholesale_cost + 10 < ss_list_price) AS x +FROM store_sales; + +--#[sanity-04] +SELECT ss_quantity, -- This is a nullable int32 + CASE (ss_quantity) -- This case statement will always return a non-nullable string + WHEN 0 THEN 'Did not sell anything!' + WHEN 1 THEN 'Sold a single item!' + ELSE 'Sold multiple items!' + END AS ss_quantity_description_1, + CASE (ss_quantity) + WHEN 0 THEN 'Hello' -- sometimes STRING + WHEN 1 THEN 1.0 -- sometimes DECIMAL + WHEN 2 THEN 2 -- sometimes INT + -- There isn't an else here, so the output should be nullable as well. + END AS ss_quantity_description_2, + CASE (ss_quantity) + WHEN 0 THEN 'Hello' -- ss_quantity will be cast to an INT for comparison + WHEN 'not an int32' THEN 'not cast-able' -- cannot be cast! + ELSE 'fallback' + -- There is an ELSE here, so the output should NOT be nullable. + END AS ss_quantity_description_3 +FROM store_sales; + +--#[sanity-05] +SELECT p.*, e.* +FROM + main.person AS p + INNER JOIN + main.employer AS e + ON p.employer = e.name; + +--#[sanity-06] +SELECT + p.name.*, + (p.name."first" || ' ' || p.name."last") AS full_name +FROM main.person AS p; diff --git a/partiql-planner/src/testFixtures/resources/inputs/subquery/non_correlated.sql b/partiql-planner/src/testFixtures/resources/inputs/subquery/non_correlated.sql new file mode 100644 index 000000000..11254c05d --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/subquery/non_correlated.sql @@ -0,0 +1,18 @@ +--#[subquery-00] +SELECT x +FROM T +WHERE x IN (SELECT a FROM S); + +--#[subquery-01] +SELECT x +FROM T +WHERE x > (SELECT MAX(a) FROM S); + +--#[subquery-02] +SELECT t.*, s.* +FROM T AS t + JOIN (SELECT * FROM S) AS s + ON t.x = s.a; + +--#[subquery-03] +1 = (SELECT COUNT(*) FROM T); diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query01.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query01.sql new file mode 100644 index 000000000..b3dd6eb5e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query01.sql @@ -0,0 +1,23 @@ +--#[tpc-ds-01] +WITH customer_total_return + AS (SELECT sr_customer_sk AS ctr_customer_sk, + sr_store_sk AS ctr_store_sk, + Sum(sr_return_amt) AS ctr_total_return + FROM store_returns, + date_dim + WHERE sr_returned_date_sk = d_date_sk + AND d_year = 2001 + GROUP BY sr_customer_sk, + sr_store_sk) +SELECT c_customer_id +FROM customer_total_return ctr1, + store, + customer +WHERE ctr1.ctr_total_return > (SELECT Avg(ctr_total_return) * 1.2 + FROM customer_total_return ctr2 + WHERE ctr1.ctr_store_sk = ctr2.ctr_store_sk) + AND s_store_sk = ctr1.ctr_store_sk + AND s_state = 'TN' + AND ctr1.ctr_customer_sk = c_customer_sk +ORDER BY c_customer_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query02.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query02.sql new file mode 100644 index 000000000..65e82290f --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query02.sql @@ -0,0 +1,79 @@ +-- start query 2 in stream 0 using template query2.tpl +WITH wscs + AS (SELECT sold_date_sk, + sales_price + FROM (SELECT ws_sold_date_sk sold_date_sk, + ws_ext_sales_price sales_price + FROM web_sales) + UNION ALL + (SELECT cs_sold_date_sk sold_date_sk, + cs_ext_sales_price sales_price + FROM catalog_sales)), + wswscs + AS (SELECT d_week_seq, + Sum(CASE + WHEN ( d_day_name = 'Sunday' ) THEN sales_price + ELSE NULL + END) sun_sales, + Sum(CASE + WHEN ( d_day_name = 'Monday' ) THEN sales_price + ELSE NULL + END) mon_sales, + Sum(CASE + WHEN ( d_day_name = 'Tuesday' ) THEN sales_price + ELSE NULL + END) tue_sales, + Sum(CASE + WHEN ( d_day_name = 'Wednesday' ) THEN sales_price + ELSE NULL + END) wed_sales, + Sum(CASE + WHEN ( d_day_name = 'Thursday' ) THEN sales_price + ELSE NULL + END) thu_sales, + Sum(CASE + WHEN ( d_day_name = 'Friday' ) THEN sales_price + ELSE NULL + END) fri_sales, + Sum(CASE + WHEN ( d_day_name = 'Saturday' ) THEN sales_price + ELSE NULL + END) sat_sales + FROM wscs, + date_dim + WHERE d_date_sk = sold_date_sk + GROUP BY d_week_seq) +SELECT d_week_seq1, + Round(sun_sales1 / sun_sales2, 2), + Round(mon_sales1 / mon_sales2, 2), + Round(tue_sales1 / tue_sales2, 2), + Round(wed_sales1 / wed_sales2, 2), + Round(thu_sales1 / thu_sales2, 2), + Round(fri_sales1 / fri_sales2, 2), + Round(sat_sales1 / sat_sales2, 2) +FROM (SELECT wswscs.d_week_seq d_week_seq1, + sun_sales sun_sales1, + mon_sales mon_sales1, + tue_sales tue_sales1, + wed_sales wed_sales1, + thu_sales thu_sales1, + fri_sales fri_sales1, + sat_sales sat_sales1 + FROM wswscs, + date_dim + WHERE date_dim.d_week_seq = wswscs.d_week_seq + AND d_year = 1998) y, + (SELECT wswscs.d_week_seq d_week_seq2, + sun_sales sun_sales2, + mon_sales mon_sales2, + tue_sales tue_sales2, + wed_sales wed_sales2, + thu_sales thu_sales2, + fri_sales fri_sales2, + sat_sales sat_sales2 + FROM wswscs, + date_dim + WHERE date_dim.d_week_seq = wswscs.d_week_seq + AND d_year = 1998 + 1) z +WHERE d_week_seq1 = d_week_seq2 - 53 +ORDER BY d_week_seq1; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query03.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query03.sql new file mode 100644 index 000000000..3b6f4f58f --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query03.sql @@ -0,0 +1,19 @@ +-- start query 3 in stream 0 using template query3.tpl +SELECT dt.d_year, + item.i_brand_id brand_id, + item.i_brand brand, + Sum(ss_ext_discount_amt) sum_agg +FROM date_dim dt, + store_sales, + item +WHERE dt.d_date_sk = store_sales.ss_sold_date_sk + AND store_sales.ss_item_sk = item.i_item_sk + AND item.i_manufact_id = 427 + AND dt.d_moy = 11 +GROUP BY dt.d_year, + item.i_brand, + item.i_brand_id +ORDER BY dt.d_year, + sum_agg DESC, + brand_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query04.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query04.sql new file mode 100644 index 000000000..026a1bac9 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query04.sql @@ -0,0 +1,152 @@ +-- start query 4 in stream 0 using template query4.tpl +WITH year_total + AS (SELECT c_customer_id customer_id, + c_first_name customer_first_name, + c_last_name customer_last_name, + c_preferred_cust_flag customer_preferred_cust_flag + , + c_birth_country + customer_birth_country, + c_login customer_login, + c_email_address customer_email_address, + d_year dyear, + Sum(( ( ss_ext_list_price - ss_ext_wholesale_cost + - ss_ext_discount_amt + ) + + + ss_ext_sales_price ) / 2) year_total, + 's' sale_type + FROM customer, + store_sales, + date_dim + WHERE c_customer_sk = ss_customer_sk + AND ss_sold_date_sk = d_date_sk + GROUP BY c_customer_id, + c_first_name, + c_last_name, + c_preferred_cust_flag, + c_birth_country, + c_login, + c_email_address, + d_year + UNION ALL + SELECT c_customer_id customer_id, + c_first_name customer_first_name, + c_last_name customer_last_name, + c_preferred_cust_flag + customer_preferred_cust_flag, + c_birth_country customer_birth_country + , + c_login + customer_login, + c_email_address customer_email_address + , + d_year dyear + , + Sum(( ( ( cs_ext_list_price + - cs_ext_wholesale_cost + - cs_ext_discount_amt + ) + + cs_ext_sales_price ) / 2 )) year_total, + 'c' sale_type + FROM customer, + catalog_sales, + date_dim + WHERE c_customer_sk = cs_bill_customer_sk + AND cs_sold_date_sk = d_date_sk + GROUP BY c_customer_id, + c_first_name, + c_last_name, + c_preferred_cust_flag, + c_birth_country, + c_login, + c_email_address, + d_year + UNION ALL + SELECT c_customer_id customer_id, + c_first_name customer_first_name, + c_last_name customer_last_name, + c_preferred_cust_flag + customer_preferred_cust_flag, + c_birth_country customer_birth_country + , + c_login + customer_login, + c_email_address customer_email_address + , + d_year dyear + , + Sum(( ( ( ws_ext_list_price + - ws_ext_wholesale_cost + - ws_ext_discount_amt + ) + + ws_ext_sales_price ) / 2 )) year_total, + 'w' sale_type + FROM customer, + web_sales, + date_dim + WHERE c_customer_sk = ws_bill_customer_sk + AND ws_sold_date_sk = d_date_sk + GROUP BY c_customer_id, + c_first_name, + c_last_name, + c_preferred_cust_flag, + c_birth_country, + c_login, + c_email_address, + d_year) +SELECT t_s_secyear.customer_id, + t_s_secyear.customer_first_name, + t_s_secyear.customer_last_name, + t_s_secyear.customer_preferred_cust_flag +FROM year_total t_s_firstyear, + year_total t_s_secyear, + year_total t_c_firstyear, + year_total t_c_secyear, + year_total t_w_firstyear, + year_total t_w_secyear +WHERE t_s_secyear.customer_id = t_s_firstyear.customer_id + AND t_s_firstyear.customer_id = t_c_secyear.customer_id + AND t_s_firstyear.customer_id = t_c_firstyear.customer_id + AND t_s_firstyear.customer_id = t_w_firstyear.customer_id + AND t_s_firstyear.customer_id = t_w_secyear.customer_id + AND t_s_firstyear.sale_type = 's' + AND t_c_firstyear.sale_type = 'c' + AND t_w_firstyear.sale_type = 'w' + AND t_s_secyear.sale_type = 's' + AND t_c_secyear.sale_type = 'c' + AND t_w_secyear.sale_type = 'w' + AND t_s_firstyear.dyear = 2001 + AND t_s_secyear.dyear = 2001 + 1 + AND t_c_firstyear.dyear = 2001 + AND t_c_secyear.dyear = 2001 + 1 + AND t_w_firstyear.dyear = 2001 + AND t_w_secyear.dyear = 2001 + 1 + AND t_s_firstyear.year_total > 0 + AND t_c_firstyear.year_total > 0 + AND t_w_firstyear.year_total > 0 + AND CASE + WHEN t_c_firstyear.year_total > 0 THEN t_c_secyear.year_total / + t_c_firstyear.year_total + ELSE NULL + END > CASE + WHEN t_s_firstyear.year_total > 0 THEN + t_s_secyear.year_total / + t_s_firstyear.year_total + ELSE NULL + END + AND CASE + WHEN t_c_firstyear.year_total > 0 THEN t_c_secyear.year_total / + t_c_firstyear.year_total + ELSE NULL + END > CASE + WHEN t_w_firstyear.year_total > 0 THEN + t_w_secyear.year_total / + t_w_firstyear.year_total + ELSE NULL + END +ORDER BY t_s_secyear.customer_id, + t_s_secyear.customer_first_name, + t_s_secyear.customer_last_name, + t_s_secyear.customer_preferred_cust_flag +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query05.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query05.sql new file mode 100644 index 000000000..0823a5507 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query05.sql @@ -0,0 +1,128 @@ +-- start query 5 in stream 0 using template query5.tpl +WITH ssr AS +( + SELECT s_store_id, + Sum(sales_price) AS sales, + Sum(profit) AS profit, + Sum(return_amt) AS returns1, + Sum(net_loss) AS profit_loss + FROM ( + SELECT ss_store_sk AS store_sk, + ss_sold_date_sk AS date_sk, + ss_ext_sales_price AS sales_price, + ss_net_profit AS profit, + Cast(0 AS DECIMAL(7,2)) AS return_amt, + Cast(0 AS DECIMAL(7,2)) AS net_loss + FROM store_sales + UNION ALL + SELECT sr_store_sk AS store_sk, + sr_returned_date_sk AS date_sk, + Cast(0 AS DECIMAL(7,2)) AS sales_price, + Cast(0 AS DECIMAL(7,2)) AS profit, + sr_return_amt AS return_amt, + sr_net_loss AS net_loss + FROM store_returns ) salesreturns, + date_dim, + store + WHERE date_sk = d_date_sk + AND d_date BETWEEN Cast('2002-08-22' AS DATE) AND ( + Cast('2002-08-22' AS DATE) + INTERVAL '14' day) + AND store_sk = s_store_sk + GROUP BY s_store_id) , csr AS +( + SELECT cp_catalog_page_id, + sum(sales_price) AS sales, + sum(profit) AS profit, + sum(return_amt) AS returns1, + sum(net_loss) AS profit_loss + FROM ( + SELECT cs_catalog_page_sk AS page_sk, + cs_sold_date_sk AS date_sk, + cs_ext_sales_price AS sales_price, + cs_net_profit AS profit, + cast(0 AS decimal(7,2)) AS return_amt, + cast(0 AS decimal(7,2)) AS net_loss + FROM catalog_sales + UNION ALL + SELECT cr_catalog_page_sk AS page_sk, + cr_returned_date_sk AS date_sk, + cast(0 AS decimal(7,2)) AS sales_price, + cast(0 AS decimal(7,2)) AS profit, + cr_return_amount AS return_amt, + cr_net_loss AS net_loss + FROM catalog_returns ) salesreturns, + date_dim, + catalog_page + WHERE date_sk = d_date_sk + AND d_date BETWEEN cast('2002-08-22' AS date) AND ( + cast('2002-08-22' AS date) + INTERVAL '14' day) + AND page_sk = cp_catalog_page_sk + GROUP BY cp_catalog_page_id) , wsr AS +( + SELECT web_site_id, + sum(sales_price) AS sales, + sum(profit) AS profit, + sum(return_amt) AS returns1, + sum(net_loss) AS profit_loss + FROM ( + SELECT ws_web_site_sk AS wsr_web_site_sk, + ws_sold_date_sk AS date_sk, + ws_ext_sales_price AS sales_price, + ws_net_profit AS profit, + cast(0 AS decimal(7,2)) AS return_amt, + cast(0 AS decimal(7,2)) AS net_loss + FROM web_sales + UNION ALL + SELECT ws_web_site_sk AS wsr_web_site_sk, + wr_returned_date_sk AS date_sk, + cast(0 AS decimal(7,2)) AS sales_price, + cast(0 AS decimal(7,2)) AS profit, + wr_return_amt AS return_amt, + wr_net_loss AS net_loss + FROM web_returns + LEFT OUTER JOIN web_sales + ON ( + wr_item_sk = ws_item_sk + AND wr_order_number = ws_order_number) ) salesreturns, + date_dim, + web_site + WHERE date_sk = d_date_sk + AND d_date BETWEEN cast('2002-08-22' AS date) AND ( + cast('2002-08-22' AS date) + INTERVAL '14' day) + AND wsr_web_site_sk = web_site_sk + GROUP BY web_site_id) +SELECT + channel , + id , + sum(sales) AS sales , + sum(returns1) AS returns1 , + sum(profit) AS profit +FROM ( + SELECT 'store channel' AS channel , + 'store' + || s_store_id AS id , + sales , + returns1 , + (profit - profit_loss) AS profit + FROM ssr + UNION ALL + SELECT 'catalog channel' AS channel , + 'catalog_page' + || cp_catalog_page_id AS id , + sales , + returns1 , + (profit - profit_loss) AS profit + FROM csr + UNION ALL + SELECT 'web channel' AS channel , + 'web_site' + || web_site_id AS id , + sales , + returns1 , + (profit - profit_loss) AS profit + FROM wsr ) x +GROUP BY rollup (channel, id) +ORDER BY channel , + id +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query06.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query06.sql new file mode 100644 index 000000000..1c53aa09f --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query06.sql @@ -0,0 +1,23 @@ +-- start query 6 in stream 0 using template query6.tpl +SELECT a.ca_state state, + Count(*) cnt +FROM customer_address a, + customer c, + store_sales s, + date_dim d, + item i +WHERE a.ca_address_sk = c.c_current_addr_sk + AND c.c_customer_sk = s.ss_customer_sk + AND s.ss_sold_date_sk = d.d_date_sk + AND s.ss_item_sk = i.i_item_sk + AND d.d_month_seq = (SELECT DISTINCT ( d_month_seq ) + FROM date_dim + WHERE d_year = 1998 + AND d_moy = 7) + AND i.i_current_price > 1.2 * (SELECT Avg(j.i_current_price) + FROM item j + WHERE j.i_category = i.i_category) +GROUP BY a.ca_state +HAVING Count(*) >= 10 +ORDER BY cnt +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query07.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query07.sql new file mode 100644 index 000000000..5408624f6 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query07.sql @@ -0,0 +1,24 @@ +-- start query 7 in stream 0 using template query7.tpl +SELECT i_item_id, + Avg(ss_quantity) agg1, + Avg(ss_list_price) agg2, + Avg(ss_coupon_amt) agg3, + Avg(ss_sales_price) agg4 +FROM store_sales, + customer_demographics, + date_dim, + item, + promotion +WHERE ss_sold_date_sk = d_date_sk + AND ss_item_sk = i_item_sk + AND ss_cdemo_sk = cd_demo_sk + AND ss_promo_sk = p_promo_sk + AND cd_gender = 'F' + AND cd_marital_status = 'W' + AND cd_education_status = '2 yr Degree' + AND ( p_channel_email = 'N' + OR p_channel_event = 'N' ) + AND d_year = 1998 +GROUP BY i_item_id +ORDER BY i_item_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query08.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query08.sql new file mode 100644 index 000000000..88d259880 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query08.sql @@ -0,0 +1,227 @@ +-- start query 8 in stream 0 using template query8.tpl +SELECT s_store_name, + Sum(ss_net_profit) +FROM store_sales, + date_dim, + store, + (SELECT ca_zip + FROM (SELECT Substr(ca_zip, 1, 5) ca_zip + FROM customer_address + WHERE Substr(ca_zip, 1, 5) IN ( '67436', '26121', '38443', + '63157', + '68856', '19485', '86425', + '26741', + '70991', '60899', '63573', + '47556', + '56193', '93314', '87827', + '62017', + '85067', '95390', '48091', + '10261', + '81845', '41790', '42853', + '24675', + '12840', '60065', '84430', + '57451', + '24021', '91735', '75335', + '71935', + '34482', '56943', '70695', + '52147', + '56251', '28411', '86653', + '23005', + '22478', '29031', '34398', + '15365', + '42460', '33337', '59433', + '73943', + '72477', '74081', '74430', + '64605', + '39006', '11226', '49057', + '97308', + '42663', '18187', '19768', + '43454', + '32147', '76637', '51975', + '11181', + '45630', '33129', '45995', + '64386', + '55522', '26697', '20963', + '35154', + '64587', '49752', '66386', + '30586', + '59286', '13177', '66646', + '84195', + '74316', '36853', '32927', + '12469', + '11904', '36269', '17724', + '55346', + '12595', '53988', '65439', + '28015', + '63268', '73590', '29216', + '82575', + '69267', '13805', '91678', + '79460', + '94152', '14961', '15419', + '48277', + '62588', '55493', '28360', + '14152', + '55225', '18007', '53705', + '56573', + '80245', '71769', '57348', + '36845', + '13039', '17270', '22363', + '83474', + '25294', '43269', '77666', + '15488', + '99146', '64441', '43338', + '38736', + '62754', '48556', '86057', + '23090', + '38114', '66061', '18910', + '84385', + '23600', '19975', '27883', + '65719', + '19933', '32085', '49731', + '40473', + '27190', '46192', '23949', + '44738', + '12436', '64794', '68741', + '15333', + '24282', '49085', '31844', + '71156', + '48441', '17100', '98207', + '44982', + '20277', '71496', '96299', + '37583', + '22206', '89174', '30589', + '61924', + '53079', '10976', '13104', + '42794', + '54772', '15809', '56434', + '39975', + '13874', '30753', '77598', + '78229', + '59478', '12345', '55547', + '57422', + '42600', '79444', '29074', + '29752', + '21676', '32096', '43044', + '39383', + '37296', '36295', '63077', + '16572', + '31275', '18701', '40197', + '48242', + '27219', '49865', '84175', + '30446', + '25165', '13807', '72142', + '70499', + '70464', '71429', '18111', + '70857', + '29545', '36425', '52706', + '36194', + '42963', '75068', '47921', + '74763', + '90990', '89456', '62073', + '88397', + '73963', '75885', '62657', + '12530', + '81146', '57434', '25099', + '41429', + '98441', '48713', '52552', + '31667', + '14072', '13903', '44709', + '85429', + '58017', '38295', '44875', + '73541', + '30091', '12707', '23762', + '62258', + '33247', '78722', '77431', + '14510', + '35656', '72428', '92082', + '35267', + '43759', '24354', '90952', + '11512', + '21242', '22579', '56114', + '32339', + '52282', '41791', '24484', + '95020', + '28408', '99710', '11899', + '43344', + '72915', '27644', '62708', + '74479', + '17177', '32619', '12351', + '91339', + '31169', '57081', '53522', + '16712', + '34419', '71779', '44187', + '46206', + '96099', '61910', '53664', + '12295', + '31837', '33096', '10813', + '63048', + '31732', '79118', '73084', + '72783', + '84952', '46965', '77956', + '39815', + '32311', '75329', '48156', + '30826', + '49661', '13736', '92076', + '74865', + '88149', '92397', '52777', + '68453', + '32012', '21222', '52721', + '24626', + '18210', '42177', '91791', + '75251', + '82075', '44372', '45542', + '20609', + '60115', '17362', '22750', + '90434', + '31852', '54071', '33762', + '14705', + '40718', '56433', '30996', + '40657', + '49056', '23585', '66455', + '41021', + '74736', '72151', '37007', + '21729', + '60177', '84558', '59027', + '93855', + '60022', '86443', '19541', + '86886', + '30532', '39062', '48532', + '34713', + '52077', '22564', '64638', + '15273', + '31677', '36138', '62367', + '60261', + '80213', '42818', '25113', + '72378', + '69802', '69096', '55443', + '28820', + '13848', '78258', '37490', + '30556', + '77380', '28447', '44550', + '26791', + '70609', '82182', '33306', + '43224', + '22322', '86959', '68519', + '14308', + '46501', '81131', '34056', + '61991', + '19896', '87804', '65774', + '92564' ) + INTERSECT + SELECT ca_zip + FROM (SELECT Substr(ca_zip, 1, 5) ca_zip, + Count(*) cnt + FROM customer_address, + customer + WHERE ca_address_sk = c_current_addr_sk + AND c_preferred_cust_flag = 'Y' + GROUP BY ca_zip + HAVING Count(*) > 10)A1)A2) V1 +WHERE ss_store_sk = s_store_sk + AND ss_sold_date_sk = d_date_sk + AND d_qoy = 2 + AND d_year = 2000 + AND ( Substr(s_zip, 1, 2) = Substr(V1.ca_zip, 1, 2) ) +GROUP BY s_store_name +ORDER BY s_store_name +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query09.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query09.sql new file mode 100644 index 000000000..729ac26ba --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query09.sql @@ -0,0 +1,63 @@ +-- start query 9 in stream 0 using template query9.tpl +SELECT CASE + WHEN (SELECT Count(*) + FROM store_sales + WHERE ss_quantity BETWEEN 1 AND 20) > 3672 THEN + (SELECT Avg(ss_ext_list_price) + FROM store_sales + WHERE + ss_quantity BETWEEN 1 AND 20) + ELSE (SELECT Avg(ss_net_profit) + FROM store_sales + WHERE ss_quantity BETWEEN 1 AND 20) + END bucket1, + CASE + WHEN (SELECT Count(*) + FROM store_sales + WHERE ss_quantity BETWEEN 21 AND 40) > 3392 THEN + (SELECT Avg(ss_ext_list_price) + FROM store_sales + WHERE + ss_quantity BETWEEN 21 AND 40) + ELSE (SELECT Avg(ss_net_profit) + FROM store_sales + WHERE ss_quantity BETWEEN 21 AND 40) + END bucket2, + CASE + WHEN (SELECT Count(*) + FROM store_sales + WHERE ss_quantity BETWEEN 41 AND 60) > 32784 THEN + (SELECT Avg(ss_ext_list_price) + FROM store_sales + WHERE + ss_quantity BETWEEN 41 AND 60) + ELSE (SELECT Avg(ss_net_profit) + FROM store_sales + WHERE ss_quantity BETWEEN 41 AND 60) + END bucket3, + CASE + WHEN (SELECT Count(*) + FROM store_sales + WHERE ss_quantity BETWEEN 61 AND 80) > 26032 THEN + (SELECT Avg(ss_ext_list_price) + FROM store_sales + WHERE + ss_quantity BETWEEN 61 AND 80) + ELSE (SELECT Avg(ss_net_profit) + FROM store_sales + WHERE ss_quantity BETWEEN 61 AND 80) + END bucket4, + CASE + WHEN (SELECT Count(*) + FROM store_sales + WHERE ss_quantity BETWEEN 81 AND 100) > 23982 THEN + (SELECT Avg(ss_ext_list_price) + FROM store_sales + WHERE + ss_quantity BETWEEN 81 AND 100) + ELSE (SELECT Avg(ss_net_profit) + FROM store_sales + WHERE ss_quantity BETWEEN 81 AND 100) + END bucket5 +FROM reason +WHERE r_reason_sk = 1; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query10.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query10.sql new file mode 100644 index 000000000..c01fa898f --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query10.sql @@ -0,0 +1,62 @@ +-- start query 10 in stream 0 using template query10.tpl +SELECT cd_gender, + cd_marital_status, + cd_education_status, + Count(*) cnt1, + cd_purchase_estimate, + Count(*) cnt2, + cd_credit_rating, + Count(*) cnt3, + cd_dep_count, + Count(*) cnt4, + cd_dep_employed_count, + Count(*) cnt5, + cd_dep_college_count, + Count(*) cnt6 +FROM customer c, + customer_address ca, + customer_demographics +WHERE c.c_current_addr_sk = ca.ca_address_sk + AND ca_county IN ( 'Lycoming County', 'Sheridan County', + 'Kandiyohi County', + 'Pike County', + 'Greene County' ) + AND cd_demo_sk = c.c_current_cdemo_sk + AND EXISTS (SELECT * + FROM store_sales, + date_dim + WHERE c.c_customer_sk = ss_customer_sk + AND ss_sold_date_sk = d_date_sk + AND d_year = 2002 + AND d_moy BETWEEN 4 AND 4 + 3) + AND ( EXISTS (SELECT * + FROM web_sales, + date_dim + WHERE c.c_customer_sk = ws_bill_customer_sk + AND ws_sold_date_sk = d_date_sk + AND d_year = 2002 + AND d_moy BETWEEN 4 AND 4 + 3) + OR EXISTS (SELECT * + FROM catalog_sales, + date_dim + WHERE c.c_customer_sk = cs_ship_customer_sk + AND cs_sold_date_sk = d_date_sk + AND d_year = 2002 + AND d_moy BETWEEN 4 AND 4 + 3) ) +GROUP BY cd_gender, + cd_marital_status, + cd_education_status, + cd_purchase_estimate, + cd_credit_rating, + cd_dep_count, + cd_dep_employed_count, + cd_dep_college_count +ORDER BY cd_gender, + cd_marital_status, + cd_education_status, + cd_purchase_estimate, + cd_credit_rating, + cd_dep_count, + cd_dep_employed_count, + cd_dep_college_count +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query11.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query11.sql new file mode 100644 index 000000000..fe31ef50c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query11.sql @@ -0,0 +1,97 @@ +-- start query 11 in stream 0 using template query11.tpl +WITH year_total + AS (SELECT c_customer_id customer_id, + c_first_name customer_first_name + , + c_last_name + customer_last_name, + c_preferred_cust_flag + customer_preferred_cust_flag + , + c_birth_country + customer_birth_country, + c_login customer_login, + c_email_address + customer_email_address, + d_year dyear, + Sum(ss_ext_list_price - ss_ext_discount_amt) year_total, + 's' sale_type + FROM customer, + store_sales, + date_dim + WHERE c_customer_sk = ss_customer_sk + AND ss_sold_date_sk = d_date_sk + GROUP BY c_customer_id, + c_first_name, + c_last_name, + c_preferred_cust_flag, + c_birth_country, + c_login, + c_email_address, + d_year + UNION ALL + SELECT c_customer_id customer_id, + c_first_name customer_first_name + , + c_last_name + customer_last_name, + c_preferred_cust_flag + customer_preferred_cust_flag + , + c_birth_country + customer_birth_country, + c_login customer_login, + c_email_address + customer_email_address, + d_year dyear, + Sum(ws_ext_list_price - ws_ext_discount_amt) year_total, + 'w' sale_type + FROM customer, + web_sales, + date_dim + WHERE c_customer_sk = ws_bill_customer_sk + AND ws_sold_date_sk = d_date_sk + GROUP BY c_customer_id, + c_first_name, + c_last_name, + c_preferred_cust_flag, + c_birth_country, + c_login, + c_email_address, + d_year) +SELECT t_s_secyear.customer_id, + t_s_secyear.customer_first_name, + t_s_secyear.customer_last_name, + t_s_secyear.customer_birth_country +FROM year_total t_s_firstyear, + year_total t_s_secyear, + year_total t_w_firstyear, + year_total t_w_secyear +WHERE t_s_secyear.customer_id = t_s_firstyear.customer_id + AND t_s_firstyear.customer_id = t_w_secyear.customer_id + AND t_s_firstyear.customer_id = t_w_firstyear.customer_id + AND t_s_firstyear.sale_type = 's' + AND t_w_firstyear.sale_type = 'w' + AND t_s_secyear.sale_type = 's' + AND t_w_secyear.sale_type = 'w' + AND t_s_firstyear.dyear = 2001 + AND t_s_secyear.dyear = 2001 + 1 + AND t_w_firstyear.dyear = 2001 + AND t_w_secyear.dyear = 2001 + 1 + AND t_s_firstyear.year_total > 0 + AND t_w_firstyear.year_total > 0 + AND CASE + WHEN t_w_firstyear.year_total > 0 THEN t_w_secyear.year_total / + t_w_firstyear.year_total + ELSE 0.0 + END > CASE + WHEN t_s_firstyear.year_total > 0 THEN + t_s_secyear.year_total / + t_s_firstyear.year_total + ELSE 0.0 + END +ORDER BY t_s_secyear.customer_id, + t_s_secyear.customer_first_name, + t_s_secyear.customer_last_name, + t_s_secyear.customer_birth_country +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query12.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query12.sql new file mode 100644 index 000000000..fe325087b --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query12.sql @@ -0,0 +1,31 @@ +-- start query 12 in stream 0 using template query12.tpl +SELECT + i_item_id , + i_item_desc , + i_category , + i_class , + i_current_price , + Sum(ws_ext_sales_price) AS itemrevenue , + Sum(ws_ext_sales_price)*100/Sum(Sum(ws_ext_sales_price)) OVER (partition BY i_class) AS revenueratio +FROM web_sales , + item , + date_dim +WHERE ws_item_sk = i_item_sk +AND i_category IN ('Home', + 'Men', + 'Women') +AND ws_sold_date_sk = d_date_sk +AND d_date BETWEEN Cast('2000-05-11' AS DATE) AND ( + Cast('2000-05-11' AS DATE) + INTERVAL '30' day) +GROUP BY i_item_id , + i_item_desc , + i_category , + i_class , + i_current_price +ORDER BY i_category , + i_class , + i_item_id , + i_item_desc , + revenueratio +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query13.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query13.sql new file mode 100644 index 000000000..c445aa9df --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query13.sql @@ -0,0 +1,44 @@ +-- start query 13 in stream 0 using template query13.tpl +SELECT Avg(ss_quantity), + Avg(ss_ext_sales_price), + Avg(ss_ext_wholesale_cost), + Sum(ss_ext_wholesale_cost) +FROM store_sales, + store, + customer_demographics, + household_demographics, + customer_address, + date_dim +WHERE s_store_sk = ss_store_sk + AND ss_sold_date_sk = d_date_sk + AND d_year = 2001 + AND ( ( ss_hdemo_sk = hd_demo_sk + AND cd_demo_sk = ss_cdemo_sk + AND cd_marital_status = 'U' + AND cd_education_status = 'Advanced Degree' + AND ss_sales_price BETWEEN 100.00 AND 150.00 + AND hd_dep_count = 3 ) + OR ( ss_hdemo_sk = hd_demo_sk + AND cd_demo_sk = ss_cdemo_sk + AND cd_marital_status = 'M' + AND cd_education_status = 'Primary' + AND ss_sales_price BETWEEN 50.00 AND 100.00 + AND hd_dep_count = 1 ) + OR ( ss_hdemo_sk = hd_demo_sk + AND cd_demo_sk = ss_cdemo_sk + AND cd_marital_status = 'D' + AND cd_education_status = 'Secondary' + AND ss_sales_price BETWEEN 150.00 AND 200.00 + AND hd_dep_count = 1 ) ) + AND ( ( ss_addr_sk = ca_address_sk + AND ca_country = 'United States' + AND ca_state IN ( 'AZ', 'NE', 'IA' ) + AND ss_net_profit BETWEEN 100 AND 200 ) + OR ( ss_addr_sk = ca_address_sk + AND ca_country = 'United States' + AND ca_state IN ( 'MS', 'CA', 'NV' ) + AND ss_net_profit BETWEEN 150 AND 300 ) + OR ( ss_addr_sk = ca_address_sk + AND ca_country = 'United States' + AND ca_state IN ( 'GA', 'TX', 'NJ' ) + AND ss_net_profit BETWEEN 50 AND 250 ) ); diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query14.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query14.sql new file mode 100644 index 000000000..40467ce39 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query14.sql @@ -0,0 +1,245 @@ +-- start query 14 in stream 0 using template query14.tpl +WITH cross_items + AS (SELECT i_item_sk ss_item_sk + FROM item, + (SELECT iss.i_brand_id brand_id, + iss.i_class_id class_id, + iss.i_category_id category_id + FROM store_sales, + item iss, + date_dim d1 + WHERE ss_item_sk = iss.i_item_sk + AND ss_sold_date_sk = d1.d_date_sk + AND d1.d_year BETWEEN 1999 AND 1999 + 2 + INTERSECT + SELECT ics.i_brand_id, + ics.i_class_id, + ics.i_category_id + FROM catalog_sales, + item ics, + date_dim d2 + WHERE cs_item_sk = ics.i_item_sk + AND cs_sold_date_sk = d2.d_date_sk + AND d2.d_year BETWEEN 1999 AND 1999 + 2 + INTERSECT + SELECT iws.i_brand_id, + iws.i_class_id, + iws.i_category_id + FROM web_sales, + item iws, + date_dim d3 + WHERE ws_item_sk = iws.i_item_sk + AND ws_sold_date_sk = d3.d_date_sk + AND d3.d_year BETWEEN 1999 AND 1999 + 2) + WHERE i_brand_id = brand_id + AND i_class_id = class_id + AND i_category_id = category_id), + avg_sales + AS (SELECT Avg(quantity * list_price) average_sales + FROM (SELECT ss_quantity quantity, + ss_list_price list_price + FROM store_sales, + date_dim + WHERE ss_sold_date_sk = d_date_sk + AND d_year BETWEEN 1999 AND 1999 + 2 + UNION ALL + SELECT cs_quantity quantity, + cs_list_price list_price + FROM catalog_sales, + date_dim + WHERE cs_sold_date_sk = d_date_sk + AND d_year BETWEEN 1999 AND 1999 + 2 + UNION ALL + SELECT ws_quantity quantity, + ws_list_price list_price + FROM web_sales, + date_dim + WHERE ws_sold_date_sk = d_date_sk + AND d_year BETWEEN 1999 AND 1999 + 2) x) +SELECT channel, + i_brand_id, + i_class_id, + i_category_id, + Sum(sales), + Sum(number_sales) +FROM (SELECT 'store' channel, + i_brand_id, + i_class_id, + i_category_id, + Sum(ss_quantity * ss_list_price) sales, + Count(*) number_sales + FROM store_sales, + item, + date_dim + WHERE ss_item_sk IN (SELECT ss_item_sk + FROM cross_items) + AND ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND d_year = 1999 + 2 + AND d_moy = 11 + GROUP BY i_brand_id, + i_class_id, + i_category_id + HAVING Sum(ss_quantity * ss_list_price) > (SELECT average_sales + FROM avg_sales) + UNION ALL + SELECT 'catalog' channel, + i_brand_id, + i_class_id, + i_category_id, + Sum(cs_quantity * cs_list_price) sales, + Count(*) number_sales + FROM catalog_sales, + item, + date_dim + WHERE cs_item_sk IN (SELECT ss_item_sk + FROM cross_items) + AND cs_item_sk = i_item_sk + AND cs_sold_date_sk = d_date_sk + AND d_year = 1999 + 2 + AND d_moy = 11 + GROUP BY i_brand_id, + i_class_id, + i_category_id + HAVING Sum(cs_quantity * cs_list_price) > (SELECT average_sales + FROM avg_sales) + UNION ALL + SELECT 'web' channel, + i_brand_id, + i_class_id, + i_category_id, + Sum(ws_quantity * ws_list_price) sales, + Count(*) number_sales + FROM web_sales, + item, + date_dim + WHERE ws_item_sk IN (SELECT ss_item_sk + FROM cross_items) + AND ws_item_sk = i_item_sk + AND ws_sold_date_sk = d_date_sk + AND d_year = 1999 + 2 + AND d_moy = 11 + GROUP BY i_brand_id, + i_class_id, + i_category_id + HAVING Sum(ws_quantity * ws_list_price) > (SELECT average_sales + FROM avg_sales)) y +GROUP BY rollup ( channel, i_brand_id, i_class_id, i_category_id ) +ORDER BY channel, + i_brand_id, + i_class_id, + i_category_id +LIMIT 100; + +WITH cross_items + AS (SELECT i_item_sk ss_item_sk + FROM item, + (SELECT iss.i_brand_id brand_id, + iss.i_class_id class_id, + iss.i_category_id category_id + FROM store_sales, + item iss, + date_dim d1 + WHERE ss_item_sk = iss.i_item_sk + AND ss_sold_date_sk = d1.d_date_sk + AND d1.d_year BETWEEN 1999 AND 1999 + 2 + INTERSECT + SELECT ics.i_brand_id, + ics.i_class_id, + ics.i_category_id + FROM catalog_sales, + item ics, + date_dim d2 + WHERE cs_item_sk = ics.i_item_sk + AND cs_sold_date_sk = d2.d_date_sk + AND d2.d_year BETWEEN 1999 AND 1999 + 2 + INTERSECT + SELECT iws.i_brand_id, + iws.i_class_id, + iws.i_category_id + FROM web_sales, + item iws, + date_dim d3 + WHERE ws_item_sk = iws.i_item_sk + AND ws_sold_date_sk = d3.d_date_sk + AND d3.d_year BETWEEN 1999 AND 1999 + 2) x + WHERE i_brand_id = brand_id + AND i_class_id = class_id + AND i_category_id = category_id), + avg_sales + AS (SELECT Avg(quantity * list_price) average_sales + FROM (SELECT ss_quantity quantity, + ss_list_price list_price + FROM store_sales, + date_dim + WHERE ss_sold_date_sk = d_date_sk + AND d_year BETWEEN 1999 AND 1999 + 2 + UNION ALL + SELECT cs_quantity quantity, + cs_list_price list_price + FROM catalog_sales, + date_dim + WHERE cs_sold_date_sk = d_date_sk + AND d_year BETWEEN 1999 AND 1999 + 2 + UNION ALL + SELECT ws_quantity quantity, + ws_list_price list_price + FROM web_sales, + date_dim + WHERE ws_sold_date_sk = d_date_sk + AND d_year BETWEEN 1999 AND 1999 + 2) x) +SELECT * +FROM (SELECT 'store' channel, + i_brand_id, + i_class_id, + i_category_id, + Sum(ss_quantity * ss_list_price) sales, + Count(*) number_sales + FROM store_sales, + item, + date_dim + WHERE ss_item_sk IN (SELECT ss_item_sk + FROM cross_items) + AND ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND d_week_seq = (SELECT d_week_seq + FROM date_dim + WHERE d_year = 1999 + 1 + AND d_moy = 12 + AND d_dom = 25) + GROUP BY i_brand_id, + i_class_id, + i_category_id + HAVING Sum(ss_quantity * ss_list_price) > (SELECT average_sales + FROM avg_sales)) this_year, + (SELECT 'store' channel, + i_brand_id, + i_class_id, + i_category_id, + Sum(ss_quantity * ss_list_price) sales, + Count(*) number_sales + FROM store_sales, + item, + date_dim + WHERE ss_item_sk IN (SELECT ss_item_sk + FROM cross_items) + AND ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND d_week_seq = (SELECT d_week_seq + FROM date_dim + WHERE d_year = 1999 + AND d_moy = 12 + AND d_dom = 25) + GROUP BY i_brand_id, + i_class_id, + i_category_id + HAVING Sum(ss_quantity * ss_list_price) > (SELECT average_sales + FROM avg_sales)) last_year +WHERE this_year.i_brand_id = last_year.i_brand_id + AND this_year.i_class_id = last_year.i_class_id + AND this_year.i_category_id = last_year.i_category_id +ORDER BY this_year.channel, + this_year.i_brand_id, + this_year.i_class_id, + this_year.i_category_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query15.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query15.sql new file mode 100644 index 000000000..89f956ee2 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query15.sql @@ -0,0 +1,20 @@ +-- start query 15 in stream 0 using template query15.tpl +SELECT ca_zip, + Sum(cs_sales_price) +FROM catalog_sales, + customer, + customer_address, + date_dim +WHERE cs_bill_customer_sk = c_customer_sk + AND c_current_addr_sk = ca_address_sk + AND ( Substr(ca_zip, 1, 5) IN ( '85669', '86197', '88274', '83405', + '86475', '85392', '85460', '80348', + '81792' ) + OR ca_state IN ( 'CA', 'WA', 'GA' ) + OR cs_sales_price > 500 ) + AND cs_sold_date_sk = d_date_sk + AND d_qoy = 1 + AND d_year = 1998 +GROUP BY ca_zip +ORDER BY ca_zip +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query16.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query16.sql new file mode 100644 index 000000000..c21fd95fa --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query16.sql @@ -0,0 +1,34 @@ +-- start query 16 in stream 0 using template query16.tpl +SELECT + Count(DISTINCT cs_order_number) AS `order count` , + Sum(cs_ext_ship_cost) AS `total shipping cost` , + Sum(cs_net_profit) AS `total net profit` +FROM catalog_sales cs1 , + date_dim , + customer_address , + call_center +WHERE d_date BETWEEN '2002-3-01' AND ( + Cast('2002-3-01' AS DATE) + INTERVAL '60' day) +AND cs1.cs_ship_date_sk = d_date_sk +AND cs1.cs_ship_addr_sk = ca_address_sk +AND ca_state = 'IA' +AND cs1.cs_call_center_sk = cc_call_center_sk +AND cc_county IN ('Williamson County', + 'Williamson County', + 'Williamson County', + 'Williamson County', + 'Williamson County' ) +AND EXISTS + ( + SELECT * + FROM catalog_sales cs2 + WHERE cs1.cs_order_number = cs2.cs_order_number + AND cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) +AND NOT EXISTS + ( + SELECT * + FROM catalog_returns cr1 + WHERE cs1.cs_order_number = cr1.cr_order_number) +ORDER BY count(DISTINCT cs_order_number) +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query17.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query17.sql new file mode 100644 index 000000000..9871fae66 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query17.sql @@ -0,0 +1,56 @@ +-- start query 17 in stream 0 using template query17.tpl +SELECT i_item_id, + i_item_desc, + s_state, + Count(ss_quantity) AS + store_sales_quantitycount, + Avg(ss_quantity) AS + store_sales_quantityave, + Stddev_samp(ss_quantity) AS + store_sales_quantitystdev, + Stddev_samp(ss_quantity) / Avg(ss_quantity) AS + store_sales_quantitycov, + Count(sr_return_quantity) AS + store_returns_quantitycount, + Avg(sr_return_quantity) AS + store_returns_quantityave, + Stddev_samp(sr_return_quantity) AS + store_returns_quantitystdev, + Stddev_samp(sr_return_quantity) / Avg(sr_return_quantity) AS + store_returns_quantitycov, + Count(cs_quantity) AS + catalog_sales_quantitycount, + Avg(cs_quantity) AS + catalog_sales_quantityave, + Stddev_samp(cs_quantity) / Avg(cs_quantity) AS + catalog_sales_quantitystdev, + Stddev_samp(cs_quantity) / Avg(cs_quantity) AS + catalog_sales_quantitycov +FROM store_sales, + store_returns, + catalog_sales, + date_dim d1, + date_dim d2, + date_dim d3, + store, + item +WHERE d1.d_quarter_name = '1999Q1' + AND d1.d_date_sk = ss_sold_date_sk + AND i_item_sk = ss_item_sk + AND s_store_sk = ss_store_sk + AND ss_customer_sk = sr_customer_sk + AND ss_item_sk = sr_item_sk + AND ss_ticket_number = sr_ticket_number + AND sr_returned_date_sk = d2.d_date_sk + AND d2.d_quarter_name IN ( '1999Q1', '1999Q2', '1999Q3' ) + AND sr_customer_sk = cs_bill_customer_sk + AND sr_item_sk = cs_item_sk + AND cs_sold_date_sk = d3.d_date_sk + AND d3.d_quarter_name IN ( '1999Q1', '1999Q2', '1999Q3' ) +GROUP BY i_item_id, + i_item_desc, + s_state +ORDER BY i_item_id, + i_item_desc, + s_state +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query18.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query18.sql new file mode 100644 index 000000000..d94c68015 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query18.sql @@ -0,0 +1,38 @@ +-- start query 18 in stream 0 using template query18.tpl +SELECT i_item_id, + ca_country, + ca_state, + ca_county, + Avg(Cast(cs_quantity AS NUMERIC(12, 2))) agg1, + Avg(Cast(cs_list_price AS NUMERIC(12, 2))) agg2, + Avg(Cast(cs_coupon_amt AS NUMERIC(12, 2))) agg3, + Avg(Cast(cs_sales_price AS NUMERIC(12, 2))) agg4, + Avg(Cast(cs_net_profit AS NUMERIC(12, 2))) agg5, + Avg(Cast(c_birth_year AS NUMERIC(12, 2))) agg6, + Avg(Cast(cd1.cd_dep_count AS NUMERIC(12, 2))) agg7 +FROM catalog_sales, + customer_demographics cd1, + customer_demographics cd2, + customer, + customer_address, + date_dim, + item +WHERE cs_sold_date_sk = d_date_sk + AND cs_item_sk = i_item_sk + AND cs_bill_cdemo_sk = cd1.cd_demo_sk + AND cs_bill_customer_sk = c_customer_sk + AND cd1.cd_gender = 'F' + AND cd1.cd_education_status = 'Secondary' + AND c_current_cdemo_sk = cd2.cd_demo_sk + AND c_current_addr_sk = ca_address_sk + AND c_birth_month IN ( 8, 4, 2, 5, + 11, 9 ) + AND d_year = 2001 + AND ca_state IN ( 'KS', 'IA', 'AL', 'UT', + 'VA', 'NC', 'TX' ) +GROUP BY rollup ( i_item_id, ca_country, ca_state, ca_county ) +ORDER BY ca_country, + ca_state, + ca_county, + i_item_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query19.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query19.sql new file mode 100644 index 000000000..d053670b4 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query19.sql @@ -0,0 +1,31 @@ +-- start query 19 in stream 0 using template query19.tpl +SELECT i_brand_id brand_id, + i_brand brand, + i_manufact_id, + i_manufact, + Sum(ss_ext_sales_price) ext_price +FROM date_dim, + store_sales, + item, + customer, + customer_address, + store +WHERE d_date_sk = ss_sold_date_sk + AND ss_item_sk = i_item_sk + AND i_manager_id = 38 + AND d_moy = 12 + AND d_year = 1998 + AND ss_customer_sk = c_customer_sk + AND c_current_addr_sk = ca_address_sk + AND Substr(ca_zip, 1, 5) <> Substr(s_zip, 1, 5) + AND ss_store_sk = s_store_sk +GROUP BY i_brand, + i_brand_id, + i_manufact_id, + i_manufact +ORDER BY ext_price DESC, + i_brand, + i_brand_id, + i_manufact_id, + i_manufact +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query20.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query20.sql new file mode 100644 index 000000000..b5d060d1a --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query20.sql @@ -0,0 +1,31 @@ +-- start query 20 in stream 0 using template query20.tpl +SELECT + i_item_id , + i_item_desc , + i_category , + i_class , + i_current_price , + Sum(cs_ext_sales_price) AS itemrevenue , + Sum(cs_ext_sales_price)*100/Sum(Sum(cs_ext_sales_price)) OVER (partition BY i_class) AS revenueratio +FROM catalog_sales , + item , + date_dim +WHERE cs_item_sk = i_item_sk +AND i_category IN ('Children', + 'Women', + 'Electronics') +AND cs_sold_date_sk = d_date_sk +AND d_date BETWEEN Cast('2001-02-03' AS DATE) AND ( + Cast('2001-02-03' AS DATE) + INTERVAL '30' day) +GROUP BY i_item_id , + i_item_desc , + i_category , + i_class , + i_current_price +ORDER BY i_category , + i_class , + i_item_id , + i_item_desc , + revenueratio +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query21.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query21.sql new file mode 100644 index 000000000..1efc3b320 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query21.sql @@ -0,0 +1,39 @@ +-- start query 21 in stream 0 using template query21.tpl +SELECT + * +FROM ( + SELECT w_warehouse_name , + i_item_id , + Sum( + CASE + WHEN ( + Cast(d_date AS DATE) < Cast ('2000-05-13' AS DATE)) THEN inv_quantity_on_hand + ELSE 0 + END) AS inv_before , + Sum( + CASE + WHEN ( + Cast(d_date AS DATE) >= Cast ('2000-05-13' AS DATE)) THEN inv_quantity_on_hand + ELSE 0 + END) AS inv_after + FROM inventory , + warehouse , + item , + date_dim + WHERE i_current_price BETWEEN 0.99 AND 1.49 + AND i_item_sk = inv_item_sk + AND inv_warehouse_sk = w_warehouse_sk + AND inv_date_sk = d_date_sk + AND d_date BETWEEN (Cast ('2000-05-13' AS DATE) - INTERVAL '30' day) AND ( + cast ('2000-05-13' AS date) + INTERVAL '30' day) + GROUP BY w_warehouse_name, + i_item_id) x +WHERE ( + CASE + WHEN inv_before > 0 THEN inv_after / inv_before + ELSE NULL + END) BETWEEN 2.0/3.0 AND 3.0/2.0 +ORDER BY w_warehouse_name , + i_item_id +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query22.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query22.sql new file mode 100644 index 000000000..b3e566fc3 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query22.sql @@ -0,0 +1,21 @@ +-- start query 22 in stream 0 using template query22.tpl +SELECT i_product_name, + i_brand, + i_class, + i_category, + Avg(inv_quantity_on_hand) qoh +FROM inventory, + date_dim, + item, + warehouse +WHERE inv_date_sk = d_date_sk + AND inv_item_sk = i_item_sk + AND inv_warehouse_sk = w_warehouse_sk + AND d_month_seq BETWEEN 1205 AND 1205 + 11 +GROUP BY rollup( i_product_name, i_brand, i_class, i_category ) +ORDER BY qoh, + i_product_name, + i_brand, + i_class, + i_category +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query23.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query23.sql new file mode 100644 index 000000000..9e0ef707f --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query23.sql @@ -0,0 +1,136 @@ +-- start query 23 in stream 0 using template query23.tpl +WITH frequent_ss_items + AS (SELECT Substr(i_item_desc, 1, 30) itemdesc, + i_item_sk item_sk, + d_date solddate, + Count(*) cnt + FROM store_sales, + date_dim, + item + WHERE ss_sold_date_sk = d_date_sk + AND ss_item_sk = i_item_sk + AND d_year IN ( 1998, 1998 + 1, 1998 + 2, 1998 + 3 ) + GROUP BY Substr(i_item_desc, 1, 30), + i_item_sk, + d_date + HAVING Count(*) > 4), + max_store_sales + AS (SELECT Max(csales) tpcds_cmax + FROM (SELECT c_customer_sk, + Sum(ss_quantity * ss_sales_price) csales + FROM store_sales, + customer, + date_dim + WHERE ss_customer_sk = c_customer_sk + AND ss_sold_date_sk = d_date_sk + AND d_year IN ( 1998, 1998 + 1, 1998 + 2, 1998 + 3 ) + GROUP BY c_customer_sk)), + best_ss_customer + AS (SELECT c_customer_sk, + Sum(ss_quantity * ss_sales_price) ssales + FROM store_sales, + customer + WHERE ss_customer_sk = c_customer_sk + GROUP BY c_customer_sk + HAVING Sum(ss_quantity * ss_sales_price) > + ( 95 / 100.0 ) * (SELECT * + FROM max_store_sales)) +SELECT Sum(sales) +FROM (SELECT cs_quantity * cs_list_price sales + FROM catalog_sales, + date_dim + WHERE d_year = 1998 + AND d_moy = 6 + AND cs_sold_date_sk = d_date_sk + AND cs_item_sk IN (SELECT item_sk + FROM frequent_ss_items) + AND cs_bill_customer_sk IN (SELECT c_customer_sk + FROM best_ss_customer) + UNION ALL + SELECT ws_quantity * ws_list_price sales + FROM web_sales, + date_dim + WHERE d_year = 1998 + AND d_moy = 6 + AND ws_sold_date_sk = d_date_sk + AND ws_item_sk IN (SELECT item_sk + FROM frequent_ss_items) + AND ws_bill_customer_sk IN (SELECT c_customer_sk + FROM best_ss_customer)) LIMIT 100; + +WITH frequent_ss_items + AS (SELECT Substr(i_item_desc, 1, 30) itemdesc, + i_item_sk item_sk, + d_date solddate, + Count(*) cnt + FROM store_sales, + date_dim, + item + WHERE ss_sold_date_sk = d_date_sk + AND ss_item_sk = i_item_sk + AND d_year IN ( 1998, 1998 + 1, 1998 + 2, 1998 + 3 ) + GROUP BY Substr(i_item_desc, 1, 30), + i_item_sk, + d_date + HAVING Count(*) > 4), + max_store_sales + AS (SELECT Max(csales) tpcds_cmax + FROM (SELECT c_customer_sk, + Sum(ss_quantity * ss_sales_price) csales + FROM store_sales, + customer, + date_dim + WHERE ss_customer_sk = c_customer_sk + AND ss_sold_date_sk = d_date_sk + AND d_year IN ( 1998, 1998 + 1, 1998 + 2, 1998 + 3 ) + GROUP BY c_customer_sk)), + best_ss_customer + AS (SELECT c_customer_sk, + Sum(ss_quantity * ss_sales_price) ssales + FROM store_sales, + customer + WHERE ss_customer_sk = c_customer_sk + GROUP BY c_customer_sk + HAVING Sum(ss_quantity * ss_sales_price) > + ( 95 / 100.0 ) * (SELECT * + FROM max_store_sales)) +SELECT c_last_name, + c_first_name, + sales +FROM (SELECT c_last_name, + c_first_name, + Sum(cs_quantity * cs_list_price) sales + FROM catalog_sales, + customer, + date_dim + WHERE d_year = 1998 + AND d_moy = 6 + AND cs_sold_date_sk = d_date_sk + AND cs_item_sk IN (SELECT item_sk + FROM frequent_ss_items) + AND cs_bill_customer_sk IN (SELECT c_customer_sk + FROM best_ss_customer) + AND cs_bill_customer_sk = c_customer_sk + GROUP BY c_last_name, + c_first_name + UNION ALL + SELECT c_last_name, + c_first_name, + Sum(ws_quantity * ws_list_price) sales + FROM web_sales, + customer, + date_dim + WHERE d_year = 1998 + AND d_moy = 6 + AND ws_sold_date_sk = d_date_sk + AND ws_item_sk IN (SELECT item_sk + FROM frequent_ss_items) + AND ws_bill_customer_sk IN (SELECT c_customer_sk + FROM best_ss_customer) + AND ws_bill_customer_sk = c_customer_sk + GROUP BY c_last_name, + c_first_name) +ORDER BY c_last_name, + c_first_name, + sales +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query24.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query24.sql new file mode 100644 index 000000000..c3922ccdd --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query24.sql @@ -0,0 +1,96 @@ +-- start query 24 in stream 0 using template query24.tpl +WITH ssales + AS (SELECT c_last_name, + c_first_name, + s_store_name, + ca_state, + s_state, + i_color, + i_current_price, + i_manager_id, + i_units, + i_size, + Sum(ss_net_profit) netpaid + FROM store_sales, + store_returns, + store, + item, + customer, + customer_address + WHERE ss_ticket_number = sr_ticket_number + AND ss_item_sk = sr_item_sk + AND ss_customer_sk = c_customer_sk + AND ss_item_sk = i_item_sk + AND ss_store_sk = s_store_sk + AND c_birth_country = Upper(ca_country) + AND s_zip = ca_zip + AND s_market_id = 6 + GROUP BY c_last_name, + c_first_name, + s_store_name, + ca_state, + s_state, + i_color, + i_current_price, + i_manager_id, + i_units, + i_size) +SELECT c_last_name, + c_first_name, + s_store_name, + Sum(netpaid) paid +FROM ssales +WHERE i_color = 'papaya' +GROUP BY c_last_name, + c_first_name, + s_store_name +HAVING Sum(netpaid) > (SELECT 0.05 * Avg(netpaid) + FROM ssales); + +WITH ssales + AS (SELECT c_last_name, + c_first_name, + s_store_name, + ca_state, + s_state, + i_color, + i_current_price, + i_manager_id, + i_units, + i_size, + Sum(ss_net_profit) netpaid + FROM store_sales, + store_returns, + store, + item, + customer, + customer_address + WHERE ss_ticket_number = sr_ticket_number + AND ss_item_sk = sr_item_sk + AND ss_customer_sk = c_customer_sk + AND ss_item_sk = i_item_sk + AND ss_store_sk = s_store_sk + AND c_birth_country = Upper(ca_country) + AND s_zip = ca_zip + AND s_market_id = 6 + GROUP BY c_last_name, + c_first_name, + s_store_name, + ca_state, + s_state, + i_color, + i_current_price, + i_manager_id, + i_units, + i_size) +SELECT c_last_name, + c_first_name, + s_store_name, + Sum(netpaid) paid +FROM ssales +WHERE i_color = 'chartreuse' +GROUP BY c_last_name, + c_first_name, + s_store_name +HAVING Sum(netpaid) > (SELECT 0.05 * Avg(netpaid) + FROM ssales); diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query25.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query25.sql new file mode 100644 index 000000000..1d52c6ef8 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query25.sql @@ -0,0 +1,41 @@ +-- start query 25 in stream 0 using template query25.tpl +SELECT i_item_id, + i_item_desc, + s_store_id, + s_store_name, + Max(ss_net_profit) AS store_sales_profit, + Max(sr_net_loss) AS store_returns_loss, + Max(cs_net_profit) AS catalog_sales_profit +FROM store_sales, + store_returns, + catalog_sales, + date_dim d1, + date_dim d2, + date_dim d3, + store, + item +WHERE d1.d_moy = 4 + AND d1.d_year = 2001 + AND d1.d_date_sk = ss_sold_date_sk + AND i_item_sk = ss_item_sk + AND s_store_sk = ss_store_sk + AND ss_customer_sk = sr_customer_sk + AND ss_item_sk = sr_item_sk + AND ss_ticket_number = sr_ticket_number + AND sr_returned_date_sk = d2.d_date_sk + AND d2.d_moy BETWEEN 4 AND 10 + AND d2.d_year = 2001 + AND sr_customer_sk = cs_bill_customer_sk + AND sr_item_sk = cs_item_sk + AND cs_sold_date_sk = d3.d_date_sk + AND d3.d_moy BETWEEN 4 AND 10 + AND d3.d_year = 2001 +GROUP BY i_item_id, + i_item_desc, + s_store_id, + s_store_name +ORDER BY i_item_id, + i_item_desc, + s_store_id, + s_store_name +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query26.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query26.sql new file mode 100644 index 000000000..8bf38fa04 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query26.sql @@ -0,0 +1,24 @@ +-- start query 26 in stream 0 using template query26.tpl +SELECT i_item_id, + Avg(cs_quantity) agg1, + Avg(cs_list_price) agg2, + Avg(cs_coupon_amt) agg3, + Avg(cs_sales_price) agg4 +FROM catalog_sales, + customer_demographics, + date_dim, + item, + promotion +WHERE cs_sold_date_sk = d_date_sk + AND cs_item_sk = i_item_sk + AND cs_bill_cdemo_sk = cd_demo_sk + AND cs_promo_sk = p_promo_sk + AND cd_gender = 'F' + AND cd_marital_status = 'W' + AND cd_education_status = 'Secondary' + AND ( p_channel_email = 'N' + OR p_channel_event = 'N' ) + AND d_year = 2000 +GROUP BY i_item_id +ORDER BY i_item_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query27.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query27.sql new file mode 100644 index 000000000..1cc646391 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query27.sql @@ -0,0 +1,27 @@ +-- start query 27 in stream 0 using template query27.tpl +SELECT i_item_id, + s_state, + Grouping(s_state) g_state, + Avg(ss_quantity) agg1, + Avg(ss_list_price) agg2, + Avg(ss_coupon_amt) agg3, + Avg(ss_sales_price) agg4 +FROM store_sales, + customer_demographics, + date_dim, + store, + item +WHERE ss_sold_date_sk = d_date_sk + AND ss_item_sk = i_item_sk + AND ss_store_sk = s_store_sk + AND ss_cdemo_sk = cd_demo_sk + AND cd_gender = 'M' + AND cd_marital_status = 'D' + AND cd_education_status = 'College' + AND d_year = 2000 + AND s_state IN ( 'TN', 'TN', 'TN', 'TN', + 'TN', 'TN' ) +GROUP BY rollup ( i_item_id, s_state ) +ORDER BY i_item_id, + s_state +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query28.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query28.sql new file mode 100644 index 000000000..ff014304b --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query28.sql @@ -0,0 +1,51 @@ +-- start query 28 in stream 0 using template query28.tpl +SELECT * +FROM (SELECT Avg(ss_list_price) B1_LP, + Count(ss_list_price) B1_CNT, + Count(DISTINCT ss_list_price) B1_CNTD + FROM store_sales + WHERE ss_quantity BETWEEN 0 AND 5 + AND ( ss_list_price BETWEEN 18 AND 18 + 10 + OR ss_coupon_amt BETWEEN 1939 AND 1939 + 1000 + OR ss_wholesale_cost BETWEEN 34 AND 34 + 20 )) B1, + (SELECT Avg(ss_list_price) B2_LP, + Count(ss_list_price) B2_CNT, + Count(DISTINCT ss_list_price) B2_CNTD + FROM store_sales + WHERE ss_quantity BETWEEN 6 AND 10 + AND ( ss_list_price BETWEEN 1 AND 1 + 10 + OR ss_coupon_amt BETWEEN 35 AND 35 + 1000 + OR ss_wholesale_cost BETWEEN 50 AND 50 + 20 )) B2, + (SELECT Avg(ss_list_price) B3_LP, + Count(ss_list_price) B3_CNT, + Count(DISTINCT ss_list_price) B3_CNTD + FROM store_sales + WHERE ss_quantity BETWEEN 11 AND 15 + AND ( ss_list_price BETWEEN 91 AND 91 + 10 + OR ss_coupon_amt BETWEEN 1412 AND 1412 + 1000 + OR ss_wholesale_cost BETWEEN 17 AND 17 + 20 )) B3, + (SELECT Avg(ss_list_price) B4_LP, + Count(ss_list_price) B4_CNT, + Count(DISTINCT ss_list_price) B4_CNTD + FROM store_sales + WHERE ss_quantity BETWEEN 16 AND 20 + AND ( ss_list_price BETWEEN 9 AND 9 + 10 + OR ss_coupon_amt BETWEEN 5270 AND 5270 + 1000 + OR ss_wholesale_cost BETWEEN 29 AND 29 + 20 )) B4, + (SELECT Avg(ss_list_price) B5_LP, + Count(ss_list_price) B5_CNT, + Count(DISTINCT ss_list_price) B5_CNTD + FROM store_sales + WHERE ss_quantity BETWEEN 21 AND 25 + AND ( ss_list_price BETWEEN 45 AND 45 + 10 + OR ss_coupon_amt BETWEEN 826 AND 826 + 1000 + OR ss_wholesale_cost BETWEEN 5 AND 5 + 20 )) B5, + (SELECT Avg(ss_list_price) B6_LP, + Count(ss_list_price) B6_CNT, + Count(DISTINCT ss_list_price) B6_CNTD + FROM store_sales + WHERE ss_quantity BETWEEN 26 AND 30 + AND ( ss_list_price BETWEEN 174 AND 174 + 10 + OR ss_coupon_amt BETWEEN 5548 AND 5548 + 1000 + OR ss_wholesale_cost BETWEEN 42 AND 42 + 20 )) B6 +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query29.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query29.sql new file mode 100644 index 000000000..537076927 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query29.sql @@ -0,0 +1,40 @@ +-- start query 29 in stream 0 using template query29.tpl +SELECT i_item_id, + i_item_desc, + s_store_id, + s_store_name, + Avg(ss_quantity) AS store_sales_quantity, + Avg(sr_return_quantity) AS store_returns_quantity, + Avg(cs_quantity) AS catalog_sales_quantity +FROM store_sales, + store_returns, + catalog_sales, + date_dim d1, + date_dim d2, + date_dim d3, + store, + item +WHERE d1.d_moy = 4 + AND d1.d_year = 1998 + AND d1.d_date_sk = ss_sold_date_sk + AND i_item_sk = ss_item_sk + AND s_store_sk = ss_store_sk + AND ss_customer_sk = sr_customer_sk + AND ss_item_sk = sr_item_sk + AND ss_ticket_number = sr_ticket_number + AND sr_returned_date_sk = d2.d_date_sk + AND d2.d_moy BETWEEN 4 AND 4 + 3 + AND d2.d_year = 1998 + AND sr_customer_sk = cs_bill_customer_sk + AND sr_item_sk = cs_item_sk + AND cs_sold_date_sk = d3.d_date_sk + AND d3.d_year IN ( 1998, 1998 + 1, 1998 + 2 ) +GROUP BY i_item_id, + i_item_desc, + s_store_id, + s_store_name +ORDER BY i_item_id, + i_item_desc, + s_store_id, + s_store_name +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query30.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query30.sql new file mode 100644 index 000000000..c4ef74cc7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query30.sql @@ -0,0 +1,49 @@ +-- start query 30 in stream 0 using template query30.tpl +WITH customer_total_return + AS (SELECT wr_returning_customer_sk AS ctr_customer_sk, + ca_state AS ctr_state, + Sum(wr_return_amt) AS ctr_total_return + FROM web_returns, + date_dim, + customer_address + WHERE wr_returned_date_sk = d_date_sk + AND d_year = 2000 + AND wr_returning_addr_sk = ca_address_sk + GROUP BY wr_returning_customer_sk, + ca_state) +SELECT c_customer_id, + c_salutation, + c_first_name, + c_last_name, + c_preferred_cust_flag, + c_birth_day, + c_birth_month, + c_birth_year, + c_birth_country, + c_login, + c_email_address, + c_last_review_date, + ctr_total_return +FROM customer_total_return ctr1, + customer_address, + customer +WHERE ctr1.ctr_total_return > (SELECT Avg(ctr_total_return) * 1.2 + FROM customer_total_return ctr2 + WHERE ctr1.ctr_state = ctr2.ctr_state) + AND ca_address_sk = c_current_addr_sk + AND ca_state = 'IN' + AND ctr1.ctr_customer_sk = c_customer_sk +ORDER BY c_customer_id, + c_salutation, + c_first_name, + c_last_name, + c_preferred_cust_flag, + c_birth_day, + c_birth_month, + c_birth_year, + c_birth_country, + c_login, + c_email_address, + c_last_review_date, + ctr_total_return +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query31.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query31.sql new file mode 100644 index 000000000..66e428fc7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query31.sql @@ -0,0 +1,73 @@ +-- start query 31 in stream 0 using template query31.tpl +WITH ss + AS (SELECT ca_county, + d_qoy, + d_year, + Sum(ss_ext_sales_price) AS store_sales + FROM store_sales, + date_dim, + customer_address + WHERE ss_sold_date_sk = d_date_sk + AND ss_addr_sk = ca_address_sk + GROUP BY ca_county, + d_qoy, + d_year), + ws + AS (SELECT ca_county, + d_qoy, + d_year, + Sum(ws_ext_sales_price) AS web_sales + FROM web_sales, + date_dim, + customer_address + WHERE ws_sold_date_sk = d_date_sk + AND ws_bill_addr_sk = ca_address_sk + GROUP BY ca_county, + d_qoy, + d_year) +SELECT ss1.ca_county, + ss1.d_year, + ws2.web_sales / ws1.web_sales web_q1_q2_increase, + ss2.store_sales / ss1.store_sales store_q1_q2_increase, + ws3.web_sales / ws2.web_sales web_q2_q3_increase, + ss3.store_sales / ss2.store_sales store_q2_q3_increase +FROM ss ss1, + ss ss2, + ss ss3, + ws ws1, + ws ws2, + ws ws3 +WHERE ss1.d_qoy = 1 + AND ss1.d_year = 2001 + AND ss1.ca_county = ss2.ca_county + AND ss2.d_qoy = 2 + AND ss2.d_year = 2001 + AND ss2.ca_county = ss3.ca_county + AND ss3.d_qoy = 3 + AND ss3.d_year = 2001 + AND ss1.ca_county = ws1.ca_county + AND ws1.d_qoy = 1 + AND ws1.d_year = 2001 + AND ws1.ca_county = ws2.ca_county + AND ws2.d_qoy = 2 + AND ws2.d_year = 2001 + AND ws1.ca_county = ws3.ca_county + AND ws3.d_qoy = 3 + AND ws3.d_year = 2001 + AND CASE + WHEN ws1.web_sales > 0 THEN ws2.web_sales / ws1.web_sales + ELSE NULL + END > CASE + WHEN ss1.store_sales > 0 THEN + ss2.store_sales / ss1.store_sales + ELSE NULL + END + AND CASE + WHEN ws2.web_sales > 0 THEN ws3.web_sales / ws2.web_sales + ELSE NULL + END > CASE + WHEN ss2.store_sales > 0 THEN + ss3.store_sales / ss2.store_sales + ELSE NULL + END +ORDER BY ss1.d_year; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query32.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query32.sql new file mode 100644 index 000000000..fbbf771f2 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query32.sql @@ -0,0 +1,22 @@ +-- start query 32 in stream 0 using template query32.tpl +SELECT + Sum(cs_ext_discount_amt) AS `excess discount amount` +FROM catalog_sales , + item , + date_dim +WHERE i_manufact_id = 610 +AND i_item_sk = cs_item_sk +AND d_date BETWEEN '2001-03-04' AND ( + Cast('2001-03-04' AS DATE) + INTERVAL '90' day) +AND d_date_sk = cs_sold_date_sk +AND cs_ext_discount_amt > + ( + SELECT 1.3 * avg(cs_ext_discount_amt) + FROM catalog_sales , + date_dim + WHERE cs_item_sk = i_item_sk + AND d_date BETWEEN '2001-03-04' AND ( + cast('2001-03-04' AS date) + INTERVAL '90' day) + AND d_date_sk = cs_sold_date_sk ) +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query33.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query33.sql new file mode 100644 index 000000000..1b6fcdc79 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query33.sql @@ -0,0 +1,65 @@ +-- start query 33 in stream 0 using template query33.tpl +WITH ss + AS (SELECT i_manufact_id, + Sum(ss_ext_sales_price) total_sales + FROM store_sales, + date_dim, + customer_address, + item + WHERE i_manufact_id IN (SELECT i_manufact_id + FROM item + WHERE i_category IN ( 'Books' )) + AND ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND d_year = 1999 + AND d_moy = 3 + AND ss_addr_sk = ca_address_sk + AND ca_gmt_offset = -5 + GROUP BY i_manufact_id), + cs + AS (SELECT i_manufact_id, + Sum(cs_ext_sales_price) total_sales + FROM catalog_sales, + date_dim, + customer_address, + item + WHERE i_manufact_id IN (SELECT i_manufact_id + FROM item + WHERE i_category IN ( 'Books' )) + AND cs_item_sk = i_item_sk + AND cs_sold_date_sk = d_date_sk + AND d_year = 1999 + AND d_moy = 3 + AND cs_bill_addr_sk = ca_address_sk + AND ca_gmt_offset = -5 + GROUP BY i_manufact_id), + ws + AS (SELECT i_manufact_id, + Sum(ws_ext_sales_price) total_sales + FROM web_sales, + date_dim, + customer_address, + item + WHERE i_manufact_id IN (SELECT i_manufact_id + FROM item + WHERE i_category IN ( 'Books' )) + AND ws_item_sk = i_item_sk + AND ws_sold_date_sk = d_date_sk + AND d_year = 1999 + AND d_moy = 3 + AND ws_bill_addr_sk = ca_address_sk + AND ca_gmt_offset = -5 + GROUP BY i_manufact_id) +SELECT i_manufact_id, + Sum(total_sales) total_sales +FROM (SELECT * + FROM ss + UNION ALL + SELECT * + FROM cs + UNION ALL + SELECT * + FROM ws) tmp1 +GROUP BY i_manufact_id +ORDER BY total_sales +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query34.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query34.sql new file mode 100644 index 000000000..7b5d954df --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query34.sql @@ -0,0 +1,46 @@ +-- start query 34 in stream 0 using template query34.tpl +SELECT c_last_name, + c_first_name, + c_salutation, + c_preferred_cust_flag, + ss_ticket_number, + cnt +FROM (SELECT ss_ticket_number, + ss_customer_sk, + Count(*) cnt + FROM store_sales, + date_dim, + store, + household_demographics + WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk + AND store_sales.ss_store_sk = store.s_store_sk + AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + AND ( date_dim.d_dom BETWEEN 1 AND 3 + OR date_dim.d_dom BETWEEN 25 AND 28 ) + AND ( household_demographics.hd_buy_potential = '>10000' + OR household_demographics.hd_buy_potential = 'unknown' ) + AND household_demographics.hd_vehicle_count > 0 + AND ( CASE + WHEN household_demographics.hd_vehicle_count > 0 THEN + household_demographics.hd_dep_count / + household_demographics.hd_vehicle_count + ELSE NULL + END ) > 1.2 + AND date_dim.d_year IN ( 1999, 1999 + 1, 1999 + 2 ) + AND store.s_county IN ( 'Williamson County', 'Williamson County', + 'Williamson County', + 'Williamson County' + , + 'Williamson County', 'Williamson County', + 'Williamson County', + 'Williamson County' + ) + GROUP BY ss_ticket_number, + ss_customer_sk) dn, + customer +WHERE ss_customer_sk = c_customer_sk + AND cnt BETWEEN 15 AND 20 +ORDER BY c_last_name, + c_first_name, + c_salutation, + c_preferred_cust_flag DESC; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query35.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query35.sql new file mode 100644 index 000000000..7d1ce6ae7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query35.sql @@ -0,0 +1,58 @@ +-- start query 35 in stream 0 using template query35.tpl +SELECT ca_state, + cd_gender, + cd_marital_status, + cd_dep_count, + Count(*) cnt1, + Stddev_samp(cd_dep_count), + Avg(cd_dep_count), + Max(cd_dep_count), + cd_dep_employed_count, + Count(*) cnt2, + Stddev_samp(cd_dep_employed_count), + Avg(cd_dep_employed_count), + Max(cd_dep_employed_count), + cd_dep_college_count, + Count(*) cnt3, + Stddev_samp(cd_dep_college_count), + Avg(cd_dep_college_count), + Max(cd_dep_college_count) +FROM customer c, + customer_address ca, + customer_demographics +WHERE c.c_current_addr_sk = ca.ca_address_sk + AND cd_demo_sk = c.c_current_cdemo_sk + AND EXISTS (SELECT * + FROM store_sales, + date_dim + WHERE c.c_customer_sk = ss_customer_sk + AND ss_sold_date_sk = d_date_sk + AND d_year = 2001 + AND d_qoy < 4) + AND ( EXISTS (SELECT * + FROM web_sales, + date_dim + WHERE c.c_customer_sk = ws_bill_customer_sk + AND ws_sold_date_sk = d_date_sk + AND d_year = 2001 + AND d_qoy < 4) + OR EXISTS (SELECT * + FROM catalog_sales, + date_dim + WHERE c.c_customer_sk = cs_ship_customer_sk + AND cs_sold_date_sk = d_date_sk + AND d_year = 2001 + AND d_qoy < 4) ) +GROUP BY ca_state, + cd_gender, + cd_marital_status, + cd_dep_count, + cd_dep_employed_count, + cd_dep_college_count +ORDER BY ca_state, + cd_gender, + cd_marital_status, + cd_dep_count, + cd_dep_employed_count, + cd_dep_college_count +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query36.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query36.sql new file mode 100644 index 000000000..300a445bc --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query36.sql @@ -0,0 +1,31 @@ +-- start query 36 in stream 0 using template query36.tpl +SELECT Sum(ss_net_profit) / Sum(ss_ext_sales_price) AS + gross_margin, + i_category, + i_class, + Grouping(i_category) + Grouping(i_class) AS + lochierarchy, + Rank() + OVER ( + partition BY Grouping(i_category)+Grouping(i_class), CASE + WHEN Grouping( + i_class) = 0 THEN i_category END + ORDER BY Sum(ss_net_profit)/Sum(ss_ext_sales_price) ASC) AS + rank_within_parent +FROM store_sales, + date_dim d1, + item, + store +WHERE d1.d_year = 2000 + AND d1.d_date_sk = ss_sold_date_sk + AND i_item_sk = ss_item_sk + AND s_store_sk = ss_store_sk + AND s_state IN ( 'TN', 'TN', 'TN', 'TN', + 'TN', 'TN', 'TN', 'TN' ) +GROUP BY rollup( i_category, i_class ) +ORDER BY lochierarchy DESC, + CASE + WHEN lochierarchy = 0 THEN i_category + END, + rank_within_parent +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query37.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query37.sql new file mode 100644 index 000000000..b6b6be029 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query37.sql @@ -0,0 +1,23 @@ +-- start query 37 in stream 0 using template query37.tpl +SELECT + i_item_id , + i_item_desc , + i_current_price +FROM item, + inventory, + date_dim, + catalog_sales +WHERE i_current_price BETWEEN 20 AND 20 + 30 +AND inv_item_sk = i_item_sk +AND d_date_sk=inv_date_sk +AND d_date BETWEEN Cast('1999-03-06' AS DATE) AND ( + Cast('1999-03-06' AS DATE) + INTERVAL '60' day) +AND i_manufact_id IN (843,815,850,840) +AND inv_quantity_on_hand BETWEEN 100 AND 500 +AND cs_item_sk = i_item_sk +GROUP BY i_item_id, + i_item_desc, + i_current_price +ORDER BY i_item_id +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query38.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query38.sql new file mode 100644 index 000000000..1e657d572 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query38.sql @@ -0,0 +1,32 @@ +-- start query 38 in stream 0 using template query38.tpl +SELECT Count(*) +FROM (SELECT DISTINCT c_last_name, + c_first_name, + d_date + FROM store_sales, + date_dim, + customer + WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk + AND store_sales.ss_customer_sk = customer.c_customer_sk + AND d_month_seq BETWEEN 1188 AND 1188 + 11 + INTERSECT + SELECT DISTINCT c_last_name, + c_first_name, + d_date + FROM catalog_sales, + date_dim, + customer + WHERE catalog_sales.cs_sold_date_sk = date_dim.d_date_sk + AND catalog_sales.cs_bill_customer_sk = customer.c_customer_sk + AND d_month_seq BETWEEN 1188 AND 1188 + 11 + INTERSECT + SELECT DISTINCT c_last_name, + c_first_name, + d_date + FROM web_sales, + date_dim, + customer + WHERE web_sales.ws_sold_date_sk = date_dim.d_date_sk + AND web_sales.ws_bill_customer_sk = customer.c_customer_sk + AND d_month_seq BETWEEN 1188 AND 1188 + 11) hot_cust +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query39.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query39.sql new file mode 100644 index 000000000..8625b9889 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query39.sql @@ -0,0 +1,117 @@ +-- start query 39 in stream 0 using template query39.tpl +WITH inv + AS (SELECT w_warehouse_name, + w_warehouse_sk, + i_item_sk, + d_moy, + stdev, + mean, + CASE mean + WHEN 0 THEN NULL + ELSE stdev / mean + END cov + FROM (SELECT w_warehouse_name, + w_warehouse_sk, + i_item_sk, + d_moy, + Stddev_samp(inv_quantity_on_hand) stdev, + Avg(inv_quantity_on_hand) mean + FROM inventory, + item, + warehouse, + date_dim + WHERE inv_item_sk = i_item_sk + AND inv_warehouse_sk = w_warehouse_sk + AND inv_date_sk = d_date_sk + AND d_year = 2002 + GROUP BY w_warehouse_name, + w_warehouse_sk, + i_item_sk, + d_moy) foo + WHERE CASE mean + WHEN 0 THEN 0 + ELSE stdev / mean + END > 1) +SELECT inv1.w_warehouse_sk, + inv1.i_item_sk, + inv1.d_moy, + inv1.mean, + inv1.cov, + inv2.w_warehouse_sk, + inv2.i_item_sk, + inv2.d_moy, + inv2.mean, + inv2.cov +FROM inv inv1, + inv inv2 +WHERE inv1.i_item_sk = inv2.i_item_sk + AND inv1.w_warehouse_sk = inv2.w_warehouse_sk + AND inv1.d_moy = 1 + AND inv2.d_moy = 1 + 1 +ORDER BY inv1.w_warehouse_sk, + inv1.i_item_sk, + inv1.d_moy, + inv1.mean, + inv1.cov, + inv2.d_moy, + inv2.mean, + inv2.cov; + +WITH inv + AS (SELECT w_warehouse_name, + w_warehouse_sk, + i_item_sk, + d_moy, + stdev, + mean, + CASE mean + WHEN 0 THEN NULL + ELSE stdev / mean + END cov + FROM (SELECT w_warehouse_name, + w_warehouse_sk, + i_item_sk, + d_moy, + Stddev_samp(inv_quantity_on_hand) stdev, + Avg(inv_quantity_on_hand) mean + FROM inventory, + item, + warehouse, + date_dim + WHERE inv_item_sk = i_item_sk + AND inv_warehouse_sk = w_warehouse_sk + AND inv_date_sk = d_date_sk + AND d_year = 2002 + GROUP BY w_warehouse_name, + w_warehouse_sk, + i_item_sk, + d_moy) foo + WHERE CASE mean + WHEN 0 THEN 0 + ELSE stdev / mean + END > 1) +SELECT inv1.w_warehouse_sk, + inv1.i_item_sk, + inv1.d_moy, + inv1.mean, + inv1.cov, + inv2.w_warehouse_sk, + inv2.i_item_sk, + inv2.d_moy, + inv2.mean, + inv2.cov +FROM inv inv1, + inv inv2 +WHERE inv1.i_item_sk = inv2.i_item_sk + AND inv1.w_warehouse_sk = inv2.w_warehouse_sk + AND inv1.d_moy = 1 + AND inv2.d_moy = 1 + 1 + AND inv1.cov > 1.5 +ORDER BY inv1.w_warehouse_sk, + inv1.i_item_sk, + inv1.d_moy, + inv1.mean, + inv1.cov, + inv2.d_moy, + inv2.mean, + inv2.cov; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query40.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query40.sql new file mode 100644 index 000000000..ed4a5aab8 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query40.sql @@ -0,0 +1,36 @@ +-- start query 40 in stream 0 using template query40.tpl +SELECT + w_state , + i_item_id , + Sum( + CASE + WHEN ( + Cast(d_date AS DATE) < Cast ('2002-06-01' AS DATE)) THEN cs_sales_price - COALESCE(cr_refunded_cash,0) + ELSE 0 + END) AS sales_before , + Sum( + CASE + WHEN ( + Cast(d_date AS DATE) >= Cast ('2002-06-01' AS DATE)) THEN cs_sales_price - COALESCE(cr_refunded_cash,0) + ELSE 0 + END) AS sales_after +FROM catalog_sales +LEFT OUTER JOIN catalog_returns +ON ( + cs_order_number = cr_order_number + AND cs_item_sk = cr_item_sk) , + warehouse , + item , + date_dim +WHERE i_current_price BETWEEN 0.99 AND 1.49 +AND i_item_sk = cs_item_sk +AND cs_warehouse_sk = w_warehouse_sk +AND cs_sold_date_sk = d_date_sk +AND d_date BETWEEN (Cast ('2002-06-01' AS DATE) - INTERVAL '30' day) AND ( + cast ('2002-06-01' AS date) + INTERVAL '30' day) +GROUP BY w_state, + i_item_id +ORDER BY w_state, + i_item_id +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query41.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query41.sql new file mode 100644 index 000000000..8fb9149ea --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query41.sql @@ -0,0 +1,66 @@ +-- start query 41 in stream 0 using template query41.tpl +SELECT Distinct(i_product_name) +FROM item i1 +WHERE i_manufact_id BETWEEN 765 AND 765 + 40 + AND (SELECT Count(*) AS item_cnt + FROM item + WHERE ( i_manufact = i1.i_manufact + AND ( ( i_category = 'Women' + AND ( i_color = 'dim' + OR i_color = 'green' ) + AND ( i_units = 'Gross' + OR i_units = 'Dozen' ) + AND ( i_size = 'economy' + OR i_size = 'petite' ) ) + OR ( i_category = 'Women' + AND ( i_color = 'navajo' + OR i_color = 'aquamarine' ) + AND ( i_units = 'Case' + OR i_units = 'Unknown' ) + AND ( i_size = 'large' + OR i_size = 'N/A' ) ) + OR ( i_category = 'Men' + AND ( i_color = 'indian' + OR i_color = 'dark' ) + AND ( i_units = 'Oz' + OR i_units = 'Lb' ) + AND ( i_size = 'extra large' + OR i_size = 'small' ) ) + OR ( i_category = 'Men' + AND ( i_color = 'peach' + OR i_color = 'purple' ) + AND ( i_units = 'Tbl' + OR i_units = 'Bunch' ) + AND ( i_size = 'economy' + OR i_size = 'petite' ) ) ) ) + OR ( i_manufact = i1.i_manufact + AND ( ( i_category = 'Women' + AND ( i_color = 'orchid' + OR i_color = 'peru' ) + AND ( i_units = 'Carton' + OR i_units = 'Cup' ) + AND ( i_size = 'economy' + OR i_size = 'petite' ) ) + OR ( i_category = 'Women' + AND ( i_color = 'violet' + OR i_color = 'papaya' ) + AND ( i_units = 'Ounce' + OR i_units = 'Box' ) + AND ( i_size = 'large' + OR i_size = 'N/A' ) ) + OR ( i_category = 'Men' + AND ( i_color = 'drab' + OR i_color = 'grey' ) + AND ( i_units = 'Each' + OR i_units = 'N/A' ) + AND ( i_size = 'extra large' + OR i_size = 'small' ) ) + OR ( i_category = 'Men' + AND ( i_color = 'chocolate' + OR i_color = 'antique' ) + AND ( i_units = 'Dram' + OR i_units = 'Gram' ) + AND ( i_size = 'economy' + OR i_size = 'petite' ) ) ) )) > 0 +ORDER BY i_product_name +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query42.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query42.sql new file mode 100644 index 000000000..47dab1717 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query42.sql @@ -0,0 +1,21 @@ +-- start query 42 in stream 0 using template query42.tpl +SELECT dt.d_year, + item.i_category_id, + item.i_category, + Sum(ss_ext_sales_price) +FROM date_dim dt, + store_sales, + item +WHERE dt.d_date_sk = store_sales.ss_sold_date_sk + AND store_sales.ss_item_sk = item.i_item_sk + AND item.i_manager_id = 1 + AND dt.d_moy = 12 + AND dt.d_year = 2000 +GROUP BY dt.d_year, + item.i_category_id, + item.i_category +ORDER BY Sum(ss_ext_sales_price) DESC, + dt.d_year, + item.i_category_id, + item.i_category +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query43.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query43.sql new file mode 100644 index 000000000..dbb40165e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query43.sql @@ -0,0 +1,50 @@ +-- start query 43 in stream 0 using template query43.tpl +SELECT s_store_name, + s_store_id, + Sum(CASE + WHEN ( d_day_name = 'Sunday' ) THEN ss_sales_price + ELSE NULL + END) sun_sales, + Sum(CASE + WHEN ( d_day_name = 'Monday' ) THEN ss_sales_price + ELSE NULL + END) mon_sales, + Sum(CASE + WHEN ( d_day_name = 'Tuesday' ) THEN ss_sales_price + ELSE NULL + END) tue_sales, + Sum(CASE + WHEN ( d_day_name = 'Wednesday' ) THEN ss_sales_price + ELSE NULL + END) wed_sales, + Sum(CASE + WHEN ( d_day_name = 'Thursday' ) THEN ss_sales_price + ELSE NULL + END) thu_sales, + Sum(CASE + WHEN ( d_day_name = 'Friday' ) THEN ss_sales_price + ELSE NULL + END) fri_sales, + Sum(CASE + WHEN ( d_day_name = 'Saturday' ) THEN ss_sales_price + ELSE NULL + END) sat_sales +FROM date_dim, + store_sales, + store +WHERE d_date_sk = ss_sold_date_sk + AND s_store_sk = ss_store_sk + AND s_gmt_offset = -5 + AND d_year = 2002 +GROUP BY s_store_name, + s_store_id +ORDER BY s_store_name, + s_store_id, + sun_sales, + mon_sales, + tue_sales, + wed_sales, + thu_sales, + fri_sales, + sat_sales +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query44.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query44.sql new file mode 100644 index 000000000..fb8bd520e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query44.sql @@ -0,0 +1,51 @@ +-- start query 44 in stream 0 using template query44.tpl +SELECT asceding.rnk, + i1.i_product_name best_performing, + i2.i_product_name worst_performing +FROM (SELECT * + FROM (SELECT item_sk, + Rank() + OVER ( + ORDER BY rank_col ASC) rnk + FROM (SELECT ss_item_sk item_sk, + Avg(ss_net_profit) rank_col + FROM store_sales ss1 + WHERE ss_store_sk = 4 + GROUP BY ss_item_sk + HAVING Avg(ss_net_profit) > 0.9 * + (SELECT Avg(ss_net_profit) + rank_col + FROM store_sales + WHERE ss_store_sk = 4 + AND ss_cdemo_sk IS + NULL + GROUP BY ss_store_sk))V1) + V11 + WHERE rnk < 11) asceding, + (SELECT * + FROM (SELECT item_sk, + Rank() + OVER ( + ORDER BY rank_col DESC) rnk + FROM (SELECT ss_item_sk item_sk, + Avg(ss_net_profit) rank_col + FROM store_sales ss1 + WHERE ss_store_sk = 4 + GROUP BY ss_item_sk + HAVING Avg(ss_net_profit) > 0.9 * + (SELECT Avg(ss_net_profit) + rank_col + FROM store_sales + WHERE ss_store_sk = 4 + AND ss_cdemo_sk IS + NULL + GROUP BY ss_store_sk))V2) + V21 + WHERE rnk < 11) descending, + item i1, + item i2 +WHERE asceding.rnk = descending.rnk + AND i1.i_item_sk = asceding.item_sk + AND i2.i_item_sk = descending.item_sk +ORDER BY asceding.rnk +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query45.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query45.sql new file mode 100644 index 000000000..90454bc21 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query45.sql @@ -0,0 +1,28 @@ +-- start query 45 in stream 0 using template query45.tpl +SELECT ca_zip, + ca_state, + Sum(ws_sales_price) +FROM web_sales, + customer, + customer_address, + date_dim, + item +WHERE ws_bill_customer_sk = c_customer_sk + AND c_current_addr_sk = ca_address_sk + AND ws_item_sk = i_item_sk + AND ( Substr(ca_zip, 1, 5) IN ( '85669', '86197', '88274', '83405', + '86475', '85392', '85460', '80348', + '81792' ) + OR i_item_id IN (SELECT i_item_id + FROM item + WHERE i_item_sk IN ( 2, 3, 5, 7, + 11, 13, 17, 19, + 23, 29 )) ) + AND ws_sold_date_sk = d_date_sk + AND d_qoy = 1 + AND d_year = 2000 +GROUP BY ca_zip, + ca_state +ORDER BY ca_zip, + ca_state +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query46.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query46.sql new file mode 100644 index 000000000..3562694ed --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query46.sql @@ -0,0 +1,44 @@ +-- start query 46 in stream 0 using template query46.tpl +SELECT c_last_name, + c_first_name, + ca_city, + bought_city, + ss_ticket_number, + amt, + profit +FROM (SELECT ss_ticket_number, + ss_customer_sk, + ca_city bought_city, + Sum(ss_coupon_amt) amt, + Sum(ss_net_profit) profit + FROM store_sales, + date_dim, + store, + household_demographics, + customer_address + WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk + AND store_sales.ss_store_sk = store.s_store_sk + AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + AND store_sales.ss_addr_sk = customer_address.ca_address_sk + AND ( household_demographics.hd_dep_count = 6 + OR household_demographics.hd_vehicle_count = 0 ) + AND date_dim.d_dow IN ( 6, 0 ) + AND date_dim.d_year IN ( 2000, 2000 + 1, 2000 + 2 ) + AND store.s_city IN ( 'Midway', 'Fairview', 'Fairview', + 'Fairview', + 'Fairview' ) + GROUP BY ss_ticket_number, + ss_customer_sk, + ss_addr_sk, + ca_city) dn, + customer, + customer_address current_addr +WHERE ss_customer_sk = c_customer_sk + AND customer.c_current_addr_sk = current_addr.ca_address_sk + AND current_addr.ca_city <> bought_city +ORDER BY c_last_name, + c_first_name, + ca_city, + bought_city, + ss_ticket_number +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query47.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query47.sql new file mode 100644 index 000000000..07e18cb3c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query47.sql @@ -0,0 +1,72 @@ +-- start query 47 in stream 0 using template query47.tpl +WITH v1 + AS (SELECT i_category, + i_brand, + s_store_name, + s_company_name, + d_year, + d_moy, + Sum(ss_sales_price) sum_sales, + Avg(Sum(ss_sales_price)) + OVER ( + partition BY i_category, i_brand, s_store_name, + s_company_name, + d_year) + avg_monthly_sales, + Rank() + OVER ( + partition BY i_category, i_brand, s_store_name, + s_company_name + ORDER BY d_year, d_moy) rn + FROM item, + store_sales, + date_dim, + store + WHERE ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND ss_store_sk = s_store_sk + AND ( d_year = 1999 + OR ( d_year = 1999 - 1 + AND d_moy = 12 ) + OR ( d_year = 1999 + 1 + AND d_moy = 1 ) ) + GROUP BY i_category, + i_brand, + s_store_name, + s_company_name, + d_year, + d_moy), + v2 + AS (SELECT v1.i_category, + v1.d_year, + v1.d_moy, + v1.avg_monthly_sales, + v1.sum_sales, + v1_lag.sum_sales psum, + v1_lead.sum_sales nsum + FROM v1, + v1 v1_lag, + v1 v1_lead + WHERE v1.i_category = v1_lag.i_category + AND v1.i_category = v1_lead.i_category + AND v1.i_brand = v1_lag.i_brand + AND v1.i_brand = v1_lead.i_brand + AND v1.s_store_name = v1_lag.s_store_name + AND v1.s_store_name = v1_lead.s_store_name + AND v1.s_company_name = v1_lag.s_company_name + AND v1.s_company_name = v1_lead.s_company_name + AND v1.rn = v1_lag.rn + 1 + AND v1.rn = v1_lead.rn - 1) +SELECT * +FROM v2 +WHERE d_year = 1999 + AND avg_monthly_sales > 0 + AND CASE + WHEN avg_monthly_sales > 0 THEN Abs(sum_sales - avg_monthly_sales) + / + avg_monthly_sales + ELSE NULL + END > 0.1 +ORDER BY sum_sales - avg_monthly_sales, + 3 +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query48.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query48.sql new file mode 100644 index 000000000..a28febaf6 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query48.sql @@ -0,0 +1,34 @@ +-- start query 48 in stream 0 using template query48.tpl +SELECT Sum (ss_quantity) +FROM store_sales, + store, + customer_demographics, + customer_address, + date_dim +WHERE s_store_sk = ss_store_sk + AND ss_sold_date_sk = d_date_sk + AND d_year = 1999 + AND ( ( cd_demo_sk = ss_cdemo_sk + AND cd_marital_status = 'W' + AND cd_education_status = 'Secondary' + AND ss_sales_price BETWEEN 100.00 AND 150.00 ) + OR ( cd_demo_sk = ss_cdemo_sk + AND cd_marital_status = 'M' + AND cd_education_status = 'Advanced Degree' + AND ss_sales_price BETWEEN 50.00 AND 100.00 ) + OR ( cd_demo_sk = ss_cdemo_sk + AND cd_marital_status = 'D' + AND cd_education_status = '2 yr Degree' + AND ss_sales_price BETWEEN 150.00 AND 200.00 ) ) + AND ( ( ss_addr_sk = ca_address_sk + AND ca_country = 'United States' + AND ca_state IN ( 'TX', 'NE', 'MO' ) + AND ss_net_profit BETWEEN 0 AND 2000 ) + OR ( ss_addr_sk = ca_address_sk + AND ca_country = 'United States' + AND ca_state IN ( 'CO', 'TN', 'ND' ) + AND ss_net_profit BETWEEN 150 AND 3000 ) + OR ( ss_addr_sk = ca_address_sk + AND ca_country = 'United States' + AND ca_state IN ( 'OK', 'PA', 'CA' ) + AND ss_net_profit BETWEEN 50 AND 25000 ) ); diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query49.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query49.sql new file mode 100644 index 000000000..81cf93943 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query49.sql @@ -0,0 +1,133 @@ +-- start query 49 in stream 0 using template query49.tpl +SELECT 'web' AS channel, + web.item, + web.return_ratio, + web.return_rank, + web.currency_rank +FROM (SELECT item, + return_ratio, + currency_ratio, + Rank() + OVER ( + ORDER BY return_ratio) AS return_rank, + Rank() + OVER ( + ORDER BY currency_ratio) AS currency_rank + FROM (SELECT ws.ws_item_sk AS + item, + ( Cast(Sum(COALESCE(wr.wr_return_quantity, 0)) AS DEC(15, + 4)) / + Cast( + Sum(COALESCE(ws.ws_quantity, 0)) AS DEC(15, 4)) ) AS + return_ratio, + ( Cast(Sum(COALESCE(wr.wr_return_amt, 0)) AS DEC(15, 4)) + / Cast( + Sum( + COALESCE(ws.ws_net_paid, 0)) AS DEC(15, + 4)) ) AS + currency_ratio + FROM web_sales ws + LEFT OUTER JOIN web_returns wr + ON ( ws.ws_order_number = wr.wr_order_number + AND ws.ws_item_sk = wr.wr_item_sk ), + date_dim + WHERE wr.wr_return_amt > 10000 + AND ws.ws_net_profit > 1 + AND ws.ws_net_paid > 0 + AND ws.ws_quantity > 0 + AND ws_sold_date_sk = d_date_sk + AND d_year = 1999 + AND d_moy = 12 + GROUP BY ws.ws_item_sk) in_web) web +WHERE ( web.return_rank <= 10 + OR web.currency_rank <= 10 ) +UNION +SELECT 'catalog' AS channel, + catalog.item, + catalog.return_ratio, + catalog.return_rank, + catalog.currency_rank +FROM (SELECT item, + return_ratio, + currency_ratio, + Rank() + OVER ( + ORDER BY return_ratio) AS return_rank, + Rank() + OVER ( + ORDER BY currency_ratio) AS currency_rank + FROM (SELECT cs.cs_item_sk AS + item, + ( Cast(Sum(COALESCE(cr.cr_return_quantity, 0)) AS DEC(15, + 4)) / + Cast( + Sum(COALESCE(cs.cs_quantity, 0)) AS DEC(15, 4)) ) AS + return_ratio, + ( Cast(Sum(COALESCE(cr.cr_return_amount, 0)) AS DEC(15, 4 + )) / + Cast(Sum( + COALESCE(cs.cs_net_paid, 0)) AS DEC( + 15, 4)) ) AS + currency_ratio + FROM catalog_sales cs + LEFT OUTER JOIN catalog_returns cr + ON ( cs.cs_order_number = cr.cr_order_number + AND cs.cs_item_sk = cr.cr_item_sk ), + date_dim + WHERE cr.cr_return_amount > 10000 + AND cs.cs_net_profit > 1 + AND cs.cs_net_paid > 0 + AND cs.cs_quantity > 0 + AND cs_sold_date_sk = d_date_sk + AND d_year = 1999 + AND d_moy = 12 + GROUP BY cs.cs_item_sk) in_cat) catalog +WHERE ( catalog.return_rank <= 10 + OR catalog.currency_rank <= 10 ) +UNION +SELECT 'store' AS channel, + store.item, + store.return_ratio, + store.return_rank, + store.currency_rank +FROM (SELECT item, + return_ratio, + currency_ratio, + Rank() + OVER ( + ORDER BY return_ratio) AS return_rank, + Rank() + OVER ( + ORDER BY currency_ratio) AS currency_rank + FROM (SELECT sts.ss_item_sk AS + item, + ( Cast(Sum(COALESCE(sr.sr_return_quantity, 0)) AS DEC(15, + 4)) / + Cast( + Sum(COALESCE(sts.ss_quantity, 0)) AS DEC(15, 4)) ) AS + return_ratio, + ( Cast(Sum(COALESCE(sr.sr_return_amt, 0)) AS DEC(15, 4)) + / Cast( + Sum( + COALESCE(sts.ss_net_paid, 0)) AS DEC(15, 4)) ) AS + currency_ratio + FROM store_sales sts + LEFT OUTER JOIN store_returns sr + ON ( sts.ss_ticket_number = + sr.sr_ticket_number + AND sts.ss_item_sk = sr.sr_item_sk ), + date_dim + WHERE sr.sr_return_amt > 10000 + AND sts.ss_net_profit > 1 + AND sts.ss_net_paid > 0 + AND sts.ss_quantity > 0 + AND ss_sold_date_sk = d_date_sk + AND d_year = 1999 + AND d_moy = 12 + GROUP BY sts.ss_item_sk) in_store) store +WHERE ( store.return_rank <= 10 + OR store.currency_rank <= 10 ) +ORDER BY 1, + 4, + 5 +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query50.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query50.sql new file mode 100644 index 000000000..fd4bbc799 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query50.sql @@ -0,0 +1,71 @@ +-- start query 50 in stream 0 using template query50.tpl +SELECT s_store_name, + s_company_id, + s_street_number, + s_street_name, + s_street_type, + s_suite_number, + s_city, + s_county, + s_state, + s_zip, + Sum(CASE + WHEN ( sr_returned_date_sk - ss_sold_date_sk <= 30 ) THEN 1 + ELSE 0 + END) AS `30 days`, + Sum(CASE + WHEN ( sr_returned_date_sk - ss_sold_date_sk > 30 ) + AND ( sr_returned_date_sk - ss_sold_date_sk <= 60 ) + THEN 1 + ELSE 0 + END) AS `31-60 days`, + Sum(CASE + WHEN ( sr_returned_date_sk - ss_sold_date_sk > 60 ) + AND ( sr_returned_date_sk - ss_sold_date_sk <= 90 ) + THEN 1 + ELSE 0 + END) AS `61-90 days`, + Sum(CASE + WHEN ( sr_returned_date_sk - ss_sold_date_sk > 90 ) + AND ( sr_returned_date_sk - ss_sold_date_sk <= 120 ) + THEN 1 + ELSE 0 + END) AS `91-120 days`, + Sum(CASE + WHEN ( sr_returned_date_sk - ss_sold_date_sk > 120 ) THEN 1 + ELSE 0 + END) AS `>120 days` +FROM store_sales, + store_returns, + store, + date_dim d1, + date_dim d2 +WHERE d2.d_year = 2002 + AND d2.d_moy = 9 + AND ss_ticket_number = sr_ticket_number + AND ss_item_sk = sr_item_sk + AND ss_sold_date_sk = d1.d_date_sk + AND sr_returned_date_sk = d2.d_date_sk + AND ss_customer_sk = sr_customer_sk + AND ss_store_sk = s_store_sk +GROUP BY s_store_name, + s_company_id, + s_street_number, + s_street_name, + s_street_type, + s_suite_number, + s_city, + s_county, + s_state, + s_zip +ORDER BY s_store_name, + s_company_id, + s_street_number, + s_street_name, + s_street_type, + s_suite_number, + s_city, + s_county, + s_state, + s_zip +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query51.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query51.sql new file mode 100644 index 000000000..2f84ca3fa --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query51.sql @@ -0,0 +1,55 @@ +-- start query 51 in stream 0 using template query51.tpl +WITH web_v1 AS +( + SELECT ws_item_sk item_sk, + d_date, + sum(Sum(ws_sales_price)) OVER (partition BY ws_item_sk ORDER BY d_date rows BETWEEN UNBOUNDED PRECEDING AND CURRENT row) cume_sales + FROM web_sales , + date_dim + WHERE ws_sold_date_sk=d_date_sk + AND d_month_seq BETWEEN 1192 AND 1192+11 + AND ws_item_sk IS NOT NULL + GROUP BY ws_item_sk, + d_date), store_v1 AS +( + SELECT ss_item_sk item_sk, + d_date, + sum(sum(ss_sales_price)) OVER (partition BY ss_item_sk ORDER BY d_date rows BETWEEN UNBOUNDED PRECEDING AND CURRENT row) cume_sales + FROM store_sales , + date_dim + WHERE ss_sold_date_sk=d_date_sk + AND d_month_seq BETWEEN 1192 AND 1192+11 + AND ss_item_sk IS NOT NULL + GROUP BY ss_item_sk, + d_date) +SELECT + * +FROM ( + SELECT item_sk , + d_date , + web_sales , + store_sales , + max(web_sales) OVER (partition BY item_sk ORDER BY d_date rows BETWEEN UNBOUNDED PRECEDING AND CURRENT row) web_cumulative , + max(store_sales) OVER (partition BY item_sk ORDER BY d_date rows BETWEEN UNBOUNDED PRECEDING AND CURRENT row) store_cumulative + FROM ( + SELECT + CASE + WHEN web.item_sk IS NOT NULL THEN web.item_sk + ELSE store.item_sk + END item_sk , + CASE + WHEN web.d_date IS NOT NULL THEN web.d_date + ELSE store.d_date + END d_date , + web.cume_sales web_sales , + store.cume_sales store_sales + FROM web_v1 web + FULL OUTER JOIN store_v1 store + ON ( + web.item_sk = store.item_sk + AND web.d_date = store.d_date) )x )y +WHERE web_cumulative > store_cumulative +ORDER BY item_sk , + d_date +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query52.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query52.sql new file mode 100644 index 000000000..685cc9e00 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query52.sql @@ -0,0 +1,20 @@ +-- start query 52 in stream 0 using template query52.tpl +SELECT dt.d_year, + item.i_brand_id brand_id, + item.i_brand brand, + Sum(ss_ext_sales_price) ext_price +FROM date_dim dt, + store_sales, + item +WHERE dt.d_date_sk = store_sales.ss_sold_date_sk + AND store_sales.ss_item_sk = item.i_item_sk + AND item.i_manager_id = 1 + AND dt.d_moy = 11 + AND dt.d_year = 1999 +GROUP BY dt.d_year, + item.i_brand, + item.i_brand_id +ORDER BY dt.d_year, + ext_price DESC, + brand_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query53.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query53.sql new file mode 100644 index 000000000..5bd5ff047 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query53.sql @@ -0,0 +1,46 @@ +-- start query 53 in stream 0 using template query53.tpl +SELECT * +FROM (SELECT i_manufact_id, + Sum(ss_sales_price) sum_sales, + Avg(Sum(ss_sales_price)) + OVER ( + partition BY i_manufact_id) avg_quarterly_sales + FROM item, + store_sales, + date_dim, + store + WHERE ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND ss_store_sk = s_store_sk + AND d_month_seq IN ( 1199, 1199 + 1, 1199 + 2, 1199 + 3, + 1199 + 4, 1199 + 5, 1199 + 6, 1199 + 7, + 1199 + 8, 1199 + 9, 1199 + 10, 1199 + 11 ) + AND ( ( i_category IN ( 'Books', 'Children', 'Electronics' ) + AND i_class IN ( 'personal', 'portable', 'reference', + 'self-help' ) + AND i_brand IN ( 'scholaramalgamalg #14', + 'scholaramalgamalg #7' + , + 'exportiunivamalg #9', + 'scholaramalgamalg #9' ) + ) + OR ( i_category IN ( 'Women', 'Music', 'Men' ) + AND i_class IN ( 'accessories', 'classical', + 'fragrances', + 'pants' ) + AND i_brand IN ( 'amalgimporto #1', + 'edu packscholar #1', + 'exportiimporto #1', + 'importoamalg #1' ) ) ) + GROUP BY i_manufact_id, + d_qoy) tmp1 +WHERE CASE + WHEN avg_quarterly_sales > 0 THEN Abs (sum_sales - avg_quarterly_sales) + / + avg_quarterly_sales + ELSE NULL + END > 0.1 +ORDER BY avg_quarterly_sales, + sum_sales, + i_manufact_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query54.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query54.sql new file mode 100644 index 000000000..951c5dd67 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query54.sql @@ -0,0 +1,57 @@ +-- start query 54 in stream 0 using template query54.tpl +WITH my_customers + AS (SELECT DISTINCT c_customer_sk, + c_current_addr_sk + FROM (SELECT cs_sold_date_sk sold_date_sk, + cs_bill_customer_sk customer_sk, + cs_item_sk item_sk + FROM catalog_sales + UNION ALL + SELECT ws_sold_date_sk sold_date_sk, + ws_bill_customer_sk customer_sk, + ws_item_sk item_sk + FROM web_sales) cs_or_ws_sales, + item, + date_dim, + customer + WHERE sold_date_sk = d_date_sk + AND item_sk = i_item_sk + AND i_category = 'Sports' + AND i_class = 'fitness' + AND c_customer_sk = cs_or_ws_sales.customer_sk + AND d_moy = 5 + AND d_year = 2000), + my_revenue + AS (SELECT c_customer_sk, + Sum(ss_ext_sales_price) AS revenue + FROM my_customers, + store_sales, + customer_address, + store, + date_dim + WHERE c_current_addr_sk = ca_address_sk + AND ca_county = s_county + AND ca_state = s_state + AND ss_sold_date_sk = d_date_sk + AND c_customer_sk = ss_customer_sk + AND d_month_seq BETWEEN (SELECT DISTINCT d_month_seq + 1 + FROM date_dim + WHERE d_year = 2000 + AND d_moy = 5) AND + (SELECT DISTINCT + d_month_seq + 3 + FROM date_dim + WHERE d_year = 2000 + AND d_moy = 5) + GROUP BY c_customer_sk), + segments + AS (SELECT Cast(( revenue / 50 ) AS INT) AS segment + FROM my_revenue) +SELECT segment, + Count(*) AS num_customers, + segment * 50 AS segment_base +FROM segments +GROUP BY segment +ORDER BY segment, + num_customers +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query55.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query55.sql new file mode 100644 index 000000000..0746ad4ee --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query55.sql @@ -0,0 +1,17 @@ +-- start query 55 in stream 0 using template query55.tpl +SELECT i_brand_id brand_id, + i_brand brand, + Sum(ss_ext_sales_price) ext_price +FROM date_dim, + store_sales, + item +WHERE d_date_sk = ss_sold_date_sk + AND ss_item_sk = i_item_sk + AND i_manager_id = 33 + AND d_moy = 12 + AND d_year = 1998 +GROUP BY i_brand, + i_brand_id +ORDER BY ext_price DESC, + i_brand_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query56.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query56.sql new file mode 100644 index 000000000..617c209eb --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query56.sql @@ -0,0 +1,68 @@ +-- start query 56 in stream 0 using template query56.tpl +WITH ss + AS (SELECT i_item_id, + Sum(ss_ext_sales_price) total_sales + FROM store_sales, + date_dim, + customer_address, + item + WHERE i_item_id IN (SELECT i_item_id + FROM item + WHERE i_color IN ( 'firebrick', 'rosy', 'white' ) + ) + AND ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND d_year = 1998 + AND d_moy = 3 + AND ss_addr_sk = ca_address_sk + AND ca_gmt_offset = -6 + GROUP BY i_item_id), + cs + AS (SELECT i_item_id, + Sum(cs_ext_sales_price) total_sales + FROM catalog_sales, + date_dim, + customer_address, + item + WHERE i_item_id IN (SELECT i_item_id + FROM item + WHERE i_color IN ( 'firebrick', 'rosy', 'white' ) + ) + AND cs_item_sk = i_item_sk + AND cs_sold_date_sk = d_date_sk + AND d_year = 1998 + AND d_moy = 3 + AND cs_bill_addr_sk = ca_address_sk + AND ca_gmt_offset = -6 + GROUP BY i_item_id), + ws + AS (SELECT i_item_id, + Sum(ws_ext_sales_price) total_sales + FROM web_sales, + date_dim, + customer_address, + item + WHERE i_item_id IN (SELECT i_item_id + FROM item + WHERE i_color IN ( 'firebrick', 'rosy', 'white' ) + ) + AND ws_item_sk = i_item_sk + AND ws_sold_date_sk = d_date_sk + AND d_year = 1998 + AND d_moy = 3 + AND ws_bill_addr_sk = ca_address_sk + AND ca_gmt_offset = -6 + GROUP BY i_item_id) +SELECT i_item_id, + Sum(total_sales) total_sales +FROM (SELECT * + FROM ss + UNION ALL + SELECT * + FROM cs + UNION ALL + SELECT * + FROM ws) tmp1 +GROUP BY i_item_id +ORDER BY total_sales +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query57.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query57.sql new file mode 100644 index 000000000..578d15c7e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query57.sql @@ -0,0 +1,66 @@ +-- start query 57 in stream 0 using template query57.tpl +WITH v1 + AS (SELECT i_category, + i_brand, + cc_name, + d_year, + d_moy, + Sum(cs_sales_price) sum_sales + , + Avg(Sum(cs_sales_price)) + OVER ( + partition BY i_category, i_brand, cc_name, d_year) + avg_monthly_sales + , + Rank() + OVER ( + partition BY i_category, i_brand, cc_name + ORDER BY d_year, d_moy) rn + FROM item, + catalog_sales, + date_dim, + call_center + WHERE cs_item_sk = i_item_sk + AND cs_sold_date_sk = d_date_sk + AND cc_call_center_sk = cs_call_center_sk + AND ( d_year = 2000 + OR ( d_year = 2000 - 1 + AND d_moy = 12 ) + OR ( d_year = 2000 + 1 + AND d_moy = 1 ) ) + GROUP BY i_category, + i_brand, + cc_name, + d_year, + d_moy), + v2 + AS (SELECT v1.i_brand, + v1.d_year, + v1.avg_monthly_sales, + v1.sum_sales, + v1_lag.sum_sales psum, + v1_lead.sum_sales nsum + FROM v1, + v1 v1_lag, + v1 v1_lead + WHERE v1.i_category = v1_lag.i_category + AND v1.i_category = v1_lead.i_category + AND v1.i_brand = v1_lag.i_brand + AND v1.i_brand = v1_lead.i_brand + AND v1. cc_name = v1_lag. cc_name + AND v1. cc_name = v1_lead. cc_name + AND v1.rn = v1_lag.rn + 1 + AND v1.rn = v1_lead.rn - 1) +SELECT * +FROM v2 +WHERE d_year = 2000 + AND avg_monthly_sales > 0 + AND CASE + WHEN avg_monthly_sales > 0 THEN Abs(sum_sales - avg_monthly_sales) + / + avg_monthly_sales + ELSE NULL + END > 0.1 +ORDER BY sum_sales - avg_monthly_sales, + 3 +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query58.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query58.sql new file mode 100644 index 000000000..1fb22afc4 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query58.sql @@ -0,0 +1,72 @@ +-- start query 58 in stream 0 using template query58.tpl +WITH ss_items + AS (SELECT i_item_id item_id, + Sum(ss_ext_sales_price) ss_item_rev + FROM store_sales, + item, + date_dim + WHERE ss_item_sk = i_item_sk + AND d_date IN (SELECT d_date + FROM date_dim + WHERE d_week_seq = (SELECT d_week_seq + FROM date_dim + WHERE d_date = '2002-02-25' + )) + AND ss_sold_date_sk = d_date_sk + GROUP BY i_item_id), + cs_items + AS (SELECT i_item_id item_id, + Sum(cs_ext_sales_price) cs_item_rev + FROM catalog_sales, + item, + date_dim + WHERE cs_item_sk = i_item_sk + AND d_date IN (SELECT d_date + FROM date_dim + WHERE d_week_seq = (SELECT d_week_seq + FROM date_dim + WHERE d_date = '2002-02-25' + )) + AND cs_sold_date_sk = d_date_sk + GROUP BY i_item_id), + ws_items + AS (SELECT i_item_id item_id, + Sum(ws_ext_sales_price) ws_item_rev + FROM web_sales, + item, + date_dim + WHERE ws_item_sk = i_item_sk + AND d_date IN (SELECT d_date + FROM date_dim + WHERE d_week_seq = (SELECT d_week_seq + FROM date_dim + WHERE d_date = '2002-02-25' + )) + AND ws_sold_date_sk = d_date_sk + GROUP BY i_item_id) +SELECT ss_items.item_id, + ss_item_rev, + ss_item_rev / ( ss_item_rev + cs_item_rev + ws_item_rev ) / 3 * + 100 ss_dev, + cs_item_rev, + cs_item_rev / ( ss_item_rev + cs_item_rev + ws_item_rev ) / 3 * + 100 cs_dev, + ws_item_rev, + ws_item_rev / ( ss_item_rev + cs_item_rev + ws_item_rev ) / 3 * + 100 ws_dev, + ( ss_item_rev + cs_item_rev + ws_item_rev ) / 3 + average +FROM ss_items, + cs_items, + ws_items +WHERE ss_items.item_id = cs_items.item_id + AND ss_items.item_id = ws_items.item_id + AND ss_item_rev BETWEEN 0.9 * cs_item_rev AND 1.1 * cs_item_rev + AND ss_item_rev BETWEEN 0.9 * ws_item_rev AND 1.1 * ws_item_rev + AND cs_item_rev BETWEEN 0.9 * ss_item_rev AND 1.1 * ss_item_rev + AND cs_item_rev BETWEEN 0.9 * ws_item_rev AND 1.1 * ws_item_rev + AND ws_item_rev BETWEEN 0.9 * ss_item_rev AND 1.1 * ss_item_rev + AND ws_item_rev BETWEEN 0.9 * cs_item_rev AND 1.1 * cs_item_rev +ORDER BY item_id, + ss_item_rev +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query59.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query59.sql new file mode 100644 index 000000000..baf7a0886 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query59.sql @@ -0,0 +1,85 @@ +-- start query 59 in stream 0 using template query59.tpl +WITH wss + AS (SELECT d_week_seq, + ss_store_sk, + Sum(CASE + WHEN ( d_day_name = 'Sunday' ) THEN ss_sales_price + ELSE NULL + END) sun_sales, + Sum(CASE + WHEN ( d_day_name = 'Monday' ) THEN ss_sales_price + ELSE NULL + END) mon_sales, + Sum(CASE + WHEN ( d_day_name = 'Tuesday' ) THEN ss_sales_price + ELSE NULL + END) tue_sales, + Sum(CASE + WHEN ( d_day_name = 'Wednesday' ) THEN ss_sales_price + ELSE NULL + END) wed_sales, + Sum(CASE + WHEN ( d_day_name = 'Thursday' ) THEN ss_sales_price + ELSE NULL + END) thu_sales, + Sum(CASE + WHEN ( d_day_name = 'Friday' ) THEN ss_sales_price + ELSE NULL + END) fri_sales, + Sum(CASE + WHEN ( d_day_name = 'Saturday' ) THEN ss_sales_price + ELSE NULL + END) sat_sales + FROM store_sales, + date_dim + WHERE d_date_sk = ss_sold_date_sk + GROUP BY d_week_seq, + ss_store_sk) +SELECT s_store_name1, + s_store_id1, + d_week_seq1, + sun_sales1 / sun_sales2, + mon_sales1 / mon_sales2, + tue_sales1 / tue_sales2, + wed_sales1 / wed_sales2, + thu_sales1 / thu_sales2, + fri_sales1 / fri_sales2, + sat_sales1 / sat_sales2 +FROM (SELECT s_store_name s_store_name1, + wss.d_week_seq d_week_seq1, + s_store_id s_store_id1, + sun_sales sun_sales1, + mon_sales mon_sales1, + tue_sales tue_sales1, + wed_sales wed_sales1, + thu_sales thu_sales1, + fri_sales fri_sales1, + sat_sales sat_sales1 + FROM wss, + store, + date_dim d + WHERE d.d_week_seq = wss.d_week_seq + AND ss_store_sk = s_store_sk + AND d_month_seq BETWEEN 1196 AND 1196 + 11) y, + (SELECT s_store_name s_store_name2, + wss.d_week_seq d_week_seq2, + s_store_id s_store_id2, + sun_sales sun_sales2, + mon_sales mon_sales2, + tue_sales tue_sales2, + wed_sales wed_sales2, + thu_sales thu_sales2, + fri_sales fri_sales2, + sat_sales sat_sales2 + FROM wss, + store, + date_dim d + WHERE d.d_week_seq = wss.d_week_seq + AND ss_store_sk = s_store_sk + AND d_month_seq BETWEEN 1196 + 12 AND 1196 + 23) x +WHERE s_store_id1 = s_store_id2 + AND d_week_seq1 = d_week_seq2 - 52 +ORDER BY s_store_name1, + s_store_id1, + d_week_seq1 +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query60.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query60.sql new file mode 100644 index 000000000..a7f480b8a --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query60.sql @@ -0,0 +1,66 @@ +-- start query 60 in stream 0 using template query60.tpl +WITH ss + AS (SELECT i_item_id, + Sum(ss_ext_sales_price) total_sales + FROM store_sales, + date_dim, + customer_address, + item + WHERE i_item_id IN (SELECT i_item_id + FROM item + WHERE i_category IN ( 'Jewelry' )) + AND ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND d_year = 1999 + AND d_moy = 8 + AND ss_addr_sk = ca_address_sk + AND ca_gmt_offset = -6 + GROUP BY i_item_id), + cs + AS (SELECT i_item_id, + Sum(cs_ext_sales_price) total_sales + FROM catalog_sales, + date_dim, + customer_address, + item + WHERE i_item_id IN (SELECT i_item_id + FROM item + WHERE i_category IN ( 'Jewelry' )) + AND cs_item_sk = i_item_sk + AND cs_sold_date_sk = d_date_sk + AND d_year = 1999 + AND d_moy = 8 + AND cs_bill_addr_sk = ca_address_sk + AND ca_gmt_offset = -6 + GROUP BY i_item_id), + ws + AS (SELECT i_item_id, + Sum(ws_ext_sales_price) total_sales + FROM web_sales, + date_dim, + customer_address, + item + WHERE i_item_id IN (SELECT i_item_id + FROM item + WHERE i_category IN ( 'Jewelry' )) + AND ws_item_sk = i_item_sk + AND ws_sold_date_sk = d_date_sk + AND d_year = 1999 + AND d_moy = 8 + AND ws_bill_addr_sk = ca_address_sk + AND ca_gmt_offset = -6 + GROUP BY i_item_id) +SELECT i_item_id, + Sum(total_sales) total_sales +FROM (SELECT * + FROM ss + UNION ALL + SELECT * + FROM cs + UNION ALL + SELECT * + FROM ws) tmp1 +GROUP BY i_item_id +ORDER BY i_item_id, + total_sales +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query61.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query61.sql new file mode 100644 index 000000000..bd9c4cd5e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query61.sql @@ -0,0 +1,47 @@ +-- start query 61 in stream 0 using template query61.tpl +SELECT promotions, + total, + Cast(promotions AS DECIMAL(15, 4)) / + Cast(total AS DECIMAL(15, 4)) * 100 +FROM (SELECT Sum(ss_ext_sales_price) promotions + FROM store_sales, + store, + promotion, + date_dim, + customer, + customer_address, + item + WHERE ss_sold_date_sk = d_date_sk + AND ss_store_sk = s_store_sk + AND ss_promo_sk = p_promo_sk + AND ss_customer_sk = c_customer_sk + AND ca_address_sk = c_current_addr_sk + AND ss_item_sk = i_item_sk + AND ca_gmt_offset = -7 + AND i_category = 'Books' + AND ( p_channel_dmail = 'Y' + OR p_channel_email = 'Y' + OR p_channel_tv = 'Y' ) + AND s_gmt_offset = -7 + AND d_year = 2001 + AND d_moy = 12) promotional_sales, + (SELECT Sum(ss_ext_sales_price) total + FROM store_sales, + store, + date_dim, + customer, + customer_address, + item + WHERE ss_sold_date_sk = d_date_sk + AND ss_store_sk = s_store_sk + AND ss_customer_sk = c_customer_sk + AND ca_address_sk = c_current_addr_sk + AND ss_item_sk = i_item_sk + AND ca_gmt_offset = -7 + AND i_category = 'Books' + AND s_gmt_offset = -7 + AND d_year = 2001 + AND d_moy = 12) all_sales +ORDER BY promotions, + total +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query62.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query62.sql new file mode 100644 index 000000000..6b8439920 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query62.sql @@ -0,0 +1,45 @@ +-- start query 62 in stream 0 using template query62.tpl +SELECT Substr(w_warehouse_name, 1, 20), + sm_type, + web_name, + Sum(CASE + WHEN ( ws_ship_date_sk - ws_sold_date_sk <= 30 ) THEN 1 + ELSE 0 + END) AS `30 days`, + Sum(CASE + WHEN ( ws_ship_date_sk - ws_sold_date_sk > 30 ) + AND ( ws_ship_date_sk - ws_sold_date_sk <= 60 ) THEN 1 + ELSE 0 + END) AS `31-60 days`, + Sum(CASE + WHEN ( ws_ship_date_sk - ws_sold_date_sk > 60 ) + AND ( ws_ship_date_sk - ws_sold_date_sk <= 90 ) THEN 1 + ELSE 0 + END) AS `61-90 days`, + Sum(CASE + WHEN ( ws_ship_date_sk - ws_sold_date_sk > 90 ) + AND ( ws_ship_date_sk - ws_sold_date_sk <= 120 ) THEN + 1 + ELSE 0 + END) AS `91-120 days`, + Sum(CASE + WHEN ( ws_ship_date_sk - ws_sold_date_sk > 120 ) THEN 1 + ELSE 0 + END) AS `>120 days` +FROM web_sales, + warehouse, + ship_mode, + web_site, + date_dim +WHERE d_month_seq BETWEEN 1222 AND 1222 + 11 + AND ws_ship_date_sk = d_date_sk + AND ws_warehouse_sk = w_warehouse_sk + AND ws_ship_mode_sk = sm_ship_mode_sk + AND ws_web_site_sk = web_site_sk +GROUP BY Substr(w_warehouse_name, 1, 20), + sm_type, + web_name +ORDER BY Substr(w_warehouse_name, 1, 20), + sm_type, + web_name +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query63.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query63.sql new file mode 100644 index 000000000..3ebb7e726 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query63.sql @@ -0,0 +1,45 @@ +-- start query 63 in stream 0 using template query63.tpl +SELECT * +FROM (SELECT i_manager_id, + Sum(ss_sales_price) sum_sales, + Avg(Sum(ss_sales_price)) + OVER ( + partition BY i_manager_id) avg_monthly_sales + FROM item, + store_sales, + date_dim, + store + WHERE ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND ss_store_sk = s_store_sk + AND d_month_seq IN ( 1200, 1200 + 1, 1200 + 2, 1200 + 3, + 1200 + 4, 1200 + 5, 1200 + 6, 1200 + 7, + 1200 + 8, 1200 + 9, 1200 + 10, 1200 + 11 ) + AND ( ( i_category IN ( 'Books', 'Children', 'Electronics' ) + AND i_class IN ( 'personal', 'portable', 'reference', + 'self-help' ) + AND i_brand IN ( 'scholaramalgamalg #14', + 'scholaramalgamalg #7' + , + 'exportiunivamalg #9', + 'scholaramalgamalg #9' ) + ) + OR ( i_category IN ( 'Women', 'Music', 'Men' ) + AND i_class IN ( 'accessories', 'classical', + 'fragrances', + 'pants' ) + AND i_brand IN ( 'amalgimporto #1', + 'edu packscholar #1', + 'exportiimporto #1', + 'importoamalg #1' ) ) ) + GROUP BY i_manager_id, + d_moy) tmp1 +WHERE CASE + WHEN avg_monthly_sales > 0 THEN Abs (sum_sales - avg_monthly_sales) / + avg_monthly_sales + ELSE NULL + END > 0.1 +ORDER BY i_manager_id, + avg_monthly_sales, + sum_sales +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query64.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query64.sql new file mode 100644 index 000000000..e883b6420 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query64.sql @@ -0,0 +1,122 @@ +-- start query 64 in stream 0 using template query64.tpl +WITH cs_ui + AS (SELECT cs_item_sk, + Sum(cs_ext_list_price) AS sale, + Sum(cr_refunded_cash + cr_reversed_charge + + cr_store_credit) AS refund + FROM catalog_sales, + catalog_returns + WHERE cs_item_sk = cr_item_sk + AND cs_order_number = cr_order_number + GROUP BY cs_item_sk + HAVING Sum(cs_ext_list_price) > 2 * Sum( + cr_refunded_cash + cr_reversed_charge + + cr_store_credit)), + cross_sales + AS (SELECT i_product_name product_name, + i_item_sk item_sk, + s_store_name store_name, + s_zip store_zip, + ad1.ca_street_number b_street_number, + ad1.ca_street_name b_streen_name, + ad1.ca_city b_city, + ad1.ca_zip b_zip, + ad2.ca_street_number c_street_number, + ad2.ca_street_name c_street_name, + ad2.ca_city c_city, + ad2.ca_zip c_zip, + d1.d_year AS syear, + d2.d_year AS fsyear, + d3.d_year s2year, + Count(*) cnt, + Sum(ss_wholesale_cost) s1, + Sum(ss_list_price) s2, + Sum(ss_coupon_amt) s3 + FROM store_sales, + store_returns, + cs_ui, + date_dim d1, + date_dim d2, + date_dim d3, + store, + customer, + customer_demographics cd1, + customer_demographics cd2, + promotion, + household_demographics hd1, + household_demographics hd2, + customer_address ad1, + customer_address ad2, + income_band ib1, + income_band ib2, + item + WHERE ss_store_sk = s_store_sk + AND ss_sold_date_sk = d1.d_date_sk + AND ss_customer_sk = c_customer_sk + AND ss_cdemo_sk = cd1.cd_demo_sk + AND ss_hdemo_sk = hd1.hd_demo_sk + AND ss_addr_sk = ad1.ca_address_sk + AND ss_item_sk = i_item_sk + AND ss_item_sk = sr_item_sk + AND ss_ticket_number = sr_ticket_number + AND ss_item_sk = cs_ui.cs_item_sk + AND c_current_cdemo_sk = cd2.cd_demo_sk + AND c_current_hdemo_sk = hd2.hd_demo_sk + AND c_current_addr_sk = ad2.ca_address_sk + AND c_first_sales_date_sk = d2.d_date_sk + AND c_first_shipto_date_sk = d3.d_date_sk + AND ss_promo_sk = p_promo_sk + AND hd1.hd_income_band_sk = ib1.ib_income_band_sk + AND hd2.hd_income_band_sk = ib2.ib_income_band_sk + AND cd1.cd_marital_status <> cd2.cd_marital_status + AND i_color IN ( 'cyan', 'peach', 'blush', 'frosted', + 'powder', 'orange' ) + AND i_current_price BETWEEN 58 AND 58 + 10 + AND i_current_price BETWEEN 58 + 1 AND 58 + 15 + GROUP BY i_product_name, + i_item_sk, + s_store_name, + s_zip, + ad1.ca_street_number, + ad1.ca_street_name, + ad1.ca_city, + ad1.ca_zip, + ad2.ca_street_number, + ad2.ca_street_name, + ad2.ca_city, + ad2.ca_zip, + d1.d_year, + d2.d_year, + d3.d_year) +SELECT cs1.product_name, + cs1.store_name, + cs1.store_zip, + cs1.b_street_number, + cs1.b_streen_name, + cs1.b_city, + cs1.b_zip, + cs1.c_street_number, + cs1.c_street_name, + cs1.c_city, + cs1.c_zip, + cs1.syear, + cs1.cnt, + cs1.s1, + cs1.s2, + cs1.s3, + cs2.s1, + cs2.s2, + cs2.s3, + cs2.syear, + cs2.cnt +FROM cross_sales cs1, + cross_sales cs2 +WHERE cs1.item_sk = cs2.item_sk + AND cs1.syear = 2001 + AND cs2.syear = 2001 + 1 + AND cs2.cnt <= cs1.cnt + AND cs1.store_name = cs2.store_name + AND cs1.store_zip = cs2.store_zip +ORDER BY cs1.product_name, + cs1.store_name, + cs2.cnt; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query65.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query65.sql new file mode 100644 index 000000000..bf4ea2ce0 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query65.sql @@ -0,0 +1,37 @@ +-- start query 65 in stream 0 using template query65.tpl +SELECT s_store_name, + i_item_desc, + sc.revenue, + i_current_price, + i_wholesale_cost, + i_brand +FROM store, + item, + (SELECT ss_store_sk, + Avg(revenue) AS ave + FROM (SELECT ss_store_sk, + ss_item_sk, + Sum(ss_sales_price) AS revenue + FROM store_sales, + date_dim + WHERE ss_sold_date_sk = d_date_sk + AND d_month_seq BETWEEN 1199 AND 1199 + 11 + GROUP BY ss_store_sk, + ss_item_sk) sa + GROUP BY ss_store_sk) sb, + (SELECT ss_store_sk, + ss_item_sk, + Sum(ss_sales_price) AS revenue + FROM store_sales, + date_dim + WHERE ss_sold_date_sk = d_date_sk + AND d_month_seq BETWEEN 1199 AND 1199 + 11 + GROUP BY ss_store_sk, + ss_item_sk) sc +WHERE sb.ss_store_sk = sc.ss_store_sk + AND sc.revenue <= 0.1 * sb.ave + AND s_store_sk = sc.ss_store_sk + AND i_item_sk = sc.ss_item_sk +ORDER BY s_store_name, + i_item_desc +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query66.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query66.sql new file mode 100644 index 000000000..5e46266f1 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query66.sql @@ -0,0 +1,306 @@ +-- start query 66 in stream 0 using template query66.tpl +SELECT w_warehouse_name, + w_warehouse_sq_ft, + w_city, + w_county, + w_state, + w_country, + ship_carriers, + year1, + Sum(jan_sales) AS jan_sales, + Sum(feb_sales) AS feb_sales, + Sum(mar_sales) AS mar_sales, + Sum(apr_sales) AS apr_sales, + Sum(may_sales) AS may_sales, + Sum(jun_sales) AS jun_sales, + Sum(jul_sales) AS jul_sales, + Sum(aug_sales) AS aug_sales, + Sum(sep_sales) AS sep_sales, + Sum(oct_sales) AS oct_sales, + Sum(nov_sales) AS nov_sales, + Sum(dec_sales) AS dec_sales, + Sum(jan_sales / w_warehouse_sq_ft) AS jan_sales_per_sq_foot, + Sum(feb_sales / w_warehouse_sq_ft) AS feb_sales_per_sq_foot, + Sum(mar_sales / w_warehouse_sq_ft) AS mar_sales_per_sq_foot, + Sum(apr_sales / w_warehouse_sq_ft) AS apr_sales_per_sq_foot, + Sum(may_sales / w_warehouse_sq_ft) AS may_sales_per_sq_foot, + Sum(jun_sales / w_warehouse_sq_ft) AS jun_sales_per_sq_foot, + Sum(jul_sales / w_warehouse_sq_ft) AS jul_sales_per_sq_foot, + Sum(aug_sales / w_warehouse_sq_ft) AS aug_sales_per_sq_foot, + Sum(sep_sales / w_warehouse_sq_ft) AS sep_sales_per_sq_foot, + Sum(oct_sales / w_warehouse_sq_ft) AS oct_sales_per_sq_foot, + Sum(nov_sales / w_warehouse_sq_ft) AS nov_sales_per_sq_foot, + Sum(dec_sales / w_warehouse_sq_ft) AS dec_sales_per_sq_foot, + Sum(jan_net) AS jan_net, + Sum(feb_net) AS feb_net, + Sum(mar_net) AS mar_net, + Sum(apr_net) AS apr_net, + Sum(may_net) AS may_net, + Sum(jun_net) AS jun_net, + Sum(jul_net) AS jul_net, + Sum(aug_net) AS aug_net, + Sum(sep_net) AS sep_net, + Sum(oct_net) AS oct_net, + Sum(nov_net) AS nov_net, + Sum(dec_net) AS dec_net +FROM (SELECT w_warehouse_name, + w_warehouse_sq_ft, + w_city, + w_county, + w_state, + w_country, + 'ZOUROS' + || ',' + || 'ZHOU' AS ship_carriers, + d_year AS year1, + Sum(CASE + WHEN d_moy = 1 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS jan_sales, + Sum(CASE + WHEN d_moy = 2 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS feb_sales, + Sum(CASE + WHEN d_moy = 3 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS mar_sales, + Sum(CASE + WHEN d_moy = 4 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS apr_sales, + Sum(CASE + WHEN d_moy = 5 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS may_sales, + Sum(CASE + WHEN d_moy = 6 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS jun_sales, + Sum(CASE + WHEN d_moy = 7 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS jul_sales, + Sum(CASE + WHEN d_moy = 8 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS aug_sales, + Sum(CASE + WHEN d_moy = 9 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS sep_sales, + Sum(CASE + WHEN d_moy = 10 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS oct_sales, + Sum(CASE + WHEN d_moy = 11 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS nov_sales, + Sum(CASE + WHEN d_moy = 12 THEN ws_ext_sales_price * ws_quantity + ELSE 0 + END) AS dec_sales, + Sum(CASE + WHEN d_moy = 1 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS jan_net, + Sum(CASE + WHEN d_moy = 2 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS feb_net, + Sum(CASE + WHEN d_moy = 3 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS mar_net, + Sum(CASE + WHEN d_moy = 4 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS apr_net, + Sum(CASE + WHEN d_moy = 5 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS may_net, + Sum(CASE + WHEN d_moy = 6 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS jun_net, + Sum(CASE + WHEN d_moy = 7 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS jul_net, + Sum(CASE + WHEN d_moy = 8 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS aug_net, + Sum(CASE + WHEN d_moy = 9 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS sep_net, + Sum(CASE + WHEN d_moy = 10 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS oct_net, + Sum(CASE + WHEN d_moy = 11 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS nov_net, + Sum(CASE + WHEN d_moy = 12 THEN ws_net_paid_inc_ship * ws_quantity + ELSE 0 + END) AS dec_net + FROM web_sales, + warehouse, + date_dim, + time_dim, + ship_mode + WHERE ws_warehouse_sk = w_warehouse_sk + AND ws_sold_date_sk = d_date_sk + AND ws_sold_time_sk = t_time_sk + AND ws_ship_mode_sk = sm_ship_mode_sk + AND d_year = 1998 + AND t_time BETWEEN 7249 AND 7249 + 28800 + AND sm_carrier IN ( 'ZOUROS', 'ZHOU' ) + GROUP BY w_warehouse_name, + w_warehouse_sq_ft, + w_city, + w_county, + w_state, + w_country, + d_year + UNION ALL + SELECT w_warehouse_name, + w_warehouse_sq_ft, + w_city, + w_county, + w_state, + w_country, + 'ZOUROS' + || ',' + || 'ZHOU' AS ship_carriers, + d_year AS year1, + Sum(CASE + WHEN d_moy = 1 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS jan_sales, + Sum(CASE + WHEN d_moy = 2 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS feb_sales, + Sum(CASE + WHEN d_moy = 3 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS mar_sales, + Sum(CASE + WHEN d_moy = 4 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS apr_sales, + Sum(CASE + WHEN d_moy = 5 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS may_sales, + Sum(CASE + WHEN d_moy = 6 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS jun_sales, + Sum(CASE + WHEN d_moy = 7 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS jul_sales, + Sum(CASE + WHEN d_moy = 8 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS aug_sales, + Sum(CASE + WHEN d_moy = 9 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS sep_sales, + Sum(CASE + WHEN d_moy = 10 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS oct_sales, + Sum(CASE + WHEN d_moy = 11 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS nov_sales, + Sum(CASE + WHEN d_moy = 12 THEN cs_ext_sales_price * cs_quantity + ELSE 0 + END) AS dec_sales, + Sum(CASE + WHEN d_moy = 1 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS jan_net, + Sum(CASE + WHEN d_moy = 2 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS feb_net, + Sum(CASE + WHEN d_moy = 3 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS mar_net, + Sum(CASE + WHEN d_moy = 4 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS apr_net, + Sum(CASE + WHEN d_moy = 5 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS may_net, + Sum(CASE + WHEN d_moy = 6 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS jun_net, + Sum(CASE + WHEN d_moy = 7 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS jul_net, + Sum(CASE + WHEN d_moy = 8 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS aug_net, + Sum(CASE + WHEN d_moy = 9 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS sep_net, + Sum(CASE + WHEN d_moy = 10 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS oct_net, + Sum(CASE + WHEN d_moy = 11 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS nov_net, + Sum(CASE + WHEN d_moy = 12 THEN cs_net_paid * cs_quantity + ELSE 0 + END) AS dec_net + FROM catalog_sales, + warehouse, + date_dim, + time_dim, + ship_mode + WHERE cs_warehouse_sk = w_warehouse_sk + AND cs_sold_date_sk = d_date_sk + AND cs_sold_time_sk = t_time_sk + AND cs_ship_mode_sk = sm_ship_mode_sk + AND d_year = 1998 + AND t_time BETWEEN 7249 AND 7249 + 28800 + AND sm_carrier IN ( 'ZOUROS', 'ZHOU' ) + GROUP BY w_warehouse_name, + w_warehouse_sq_ft, + w_city, + w_county, + w_state, + w_country, + d_year) x +GROUP BY w_warehouse_name, + w_warehouse_sq_ft, + w_city, + w_county, + w_state, + w_country, + ship_carriers, + year1 +ORDER BY w_warehouse_name +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query67.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query67.sql new file mode 100644 index 000000000..fd137cfb3 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query67.sql @@ -0,0 +1,43 @@ +-- start query 67 in stream 0 using template query67.tpl +select top 100 * +from (select i_category + ,i_class + ,i_brand + ,i_product_name + ,d_year + ,d_qoy + ,d_moy + ,s_store_id + ,sumsales + ,rank() over (partition by i_category order by sumsales desc) rk + from (select i_category + ,i_class + ,i_brand + ,i_product_name + ,d_year + ,d_qoy + ,d_moy + ,s_store_id + ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales + from store_sales + ,date_dim + ,store + ,item + where ss_sold_date_sk=d_date_sk + and ss_item_sk=i_item_sk + and ss_store_sk = s_store_sk + and d_month_seq between 1181 and 1181+11 + group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 +where rk <= 100 +order by i_category + ,i_class + ,i_brand + ,i_product_name + ,d_year + ,d_qoy + ,d_moy + ,s_store_id + ,sumsales + ,rk +; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query68.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query68.sql new file mode 100644 index 000000000..ce217e5ac --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query68.sql @@ -0,0 +1,41 @@ +-- start query 68 in stream 0 using template query68.tpl +SELECT c_last_name, + c_first_name, + ca_city, + bought_city, + ss_ticket_number, + extended_price, + extended_tax, + list_price +FROM (SELECT ss_ticket_number, + ss_customer_sk, + ca_city bought_city, + Sum(ss_ext_sales_price) extended_price, + Sum(ss_ext_list_price) list_price, + Sum(ss_ext_tax) extended_tax + FROM store_sales, + date_dim, + store, + household_demographics, + customer_address + WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk + AND store_sales.ss_store_sk = store.s_store_sk + AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + AND store_sales.ss_addr_sk = customer_address.ca_address_sk + AND date_dim.d_dom BETWEEN 1 AND 2 + AND ( household_demographics.hd_dep_count = 8 + OR household_demographics.hd_vehicle_count = 3 ) + AND date_dim.d_year IN ( 1998, 1998 + 1, 1998 + 2 ) + AND store.s_city IN ( 'Fairview', 'Midway' ) + GROUP BY ss_ticket_number, + ss_customer_sk, + ss_addr_sk, + ca_city) dn, + customer, + customer_address current_addr +WHERE ss_customer_sk = c_customer_sk + AND customer.c_current_addr_sk = current_addr.ca_address_sk + AND current_addr.ca_city <> bought_city +ORDER BY c_last_name, + ss_ticket_number +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query69.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query69.sql new file mode 100644 index 000000000..997f073c1 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query69.sql @@ -0,0 +1,46 @@ +SELECT cd_gender, + cd_marital_status, + cd_education_status, + Count(*) cnt1, + cd_purchase_estimate, + Count(*) cnt2, + cd_credit_rating, + Count(*) cnt3 +FROM customer c, + customer_address ca, + customer_demographics +WHERE c.c_current_addr_sk = ca.ca_address_sk + AND ca_state IN ( 'KS', 'AZ', 'NE' ) + AND cd_demo_sk = c.c_current_cdemo_sk + AND EXISTS (SELECT * + FROM store_sales, + date_dim + WHERE c.c_customer_sk = ss_customer_sk + AND ss_sold_date_sk = d_date_sk + AND d_year = 2004 + AND d_moy BETWEEN 3 AND 3 + 2) + AND ( NOT EXISTS (SELECT * + FROM web_sales, + date_dim + WHERE c.c_customer_sk = ws_bill_customer_sk + AND ws_sold_date_sk = d_date_sk + AND d_year = 2004 + AND d_moy BETWEEN 3 AND 3 + 2) + AND NOT EXISTS (SELECT * + FROM catalog_sales, + date_dim + WHERE c.c_customer_sk = cs_ship_customer_sk + AND cs_sold_date_sk = d_date_sk + AND d_year = 2004 + AND d_moy BETWEEN 3 AND 3 + 2) ) +GROUP BY cd_gender, + cd_marital_status, + cd_education_status, + cd_purchase_estimate, + cd_credit_rating +ORDER BY cd_gender, + cd_marital_status, + cd_education_status, + cd_purchase_estimate, + cd_credit_rating +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query70.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query70.sql new file mode 100644 index 000000000..bf0884e16 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query70.sql @@ -0,0 +1,40 @@ +-- start query 70 in stream 0 using template query70.tpl +SELECT Sum(ss_net_profit) AS total_sum, + s_state, + s_county, + Grouping(s_state) + Grouping(s_county) AS lochierarchy, + Rank() + OVER ( + partition BY Grouping(s_state)+Grouping(s_county), CASE WHEN + Grouping( + s_county) = 0 THEN s_state END + ORDER BY Sum(ss_net_profit) DESC) AS rank_within_parent +FROM store_sales, + date_dim d1, + store +WHERE d1.d_month_seq BETWEEN 1200 AND 1200 + 11 + AND d1.d_date_sk = ss_sold_date_sk + AND s_store_sk = ss_store_sk + AND s_state IN (SELECT s_state + FROM (SELECT s_state AS + s_state, + Rank() + OVER ( + partition BY s_state + ORDER BY Sum(ss_net_profit) DESC) AS + ranking + FROM store_sales, + store, + date_dim + WHERE d_month_seq BETWEEN 1200 AND 1200 + 11 + AND d_date_sk = ss_sold_date_sk + AND s_store_sk = ss_store_sk + GROUP BY s_state) tmp1 + WHERE ranking <= 5) +GROUP BY rollup( s_state, s_county ) +ORDER BY lochierarchy DESC, + CASE + WHEN lochierarchy = 0 THEN s_state + END, + rank_within_parent +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query71.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query71.sql new file mode 100644 index 000000000..43b40bfff --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query71.sql @@ -0,0 +1,48 @@ +-- start query 71 in stream 0 using template query71.tpl +SELECT i_brand_id brand_id, + i_brand brand, + t_hour, + t_minute, + Sum(ext_price) ext_price +FROM item, + (SELECT ws_ext_sales_price AS ext_price, + ws_sold_date_sk AS sold_date_sk, + ws_item_sk AS sold_item_sk, + ws_sold_time_sk AS time_sk + FROM web_sales, + date_dim + WHERE d_date_sk = ws_sold_date_sk + AND d_moy = 11 + AND d_year = 2001 + UNION ALL + SELECT cs_ext_sales_price AS ext_price, + cs_sold_date_sk AS sold_date_sk, + cs_item_sk AS sold_item_sk, + cs_sold_time_sk AS time_sk + FROM catalog_sales, + date_dim + WHERE d_date_sk = cs_sold_date_sk + AND d_moy = 11 + AND d_year = 2001 + UNION ALL + SELECT ss_ext_sales_price AS ext_price, + ss_sold_date_sk AS sold_date_sk, + ss_item_sk AS sold_item_sk, + ss_sold_time_sk AS time_sk + FROM store_sales, + date_dim + WHERE d_date_sk = ss_sold_date_sk + AND d_moy = 11 + AND d_year = 2001) AS tmp, + time_dim +WHERE sold_item_sk = i_item_sk + AND i_manager_id = 1 + AND time_sk = t_time_sk + AND ( t_meal_time = 'breakfast' + OR t_meal_time = 'dinner' ) +GROUP BY i_brand, + i_brand_id, + t_hour, + t_minute +ORDER BY ext_price DESC, + i_brand_id; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query72.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query72.sql new file mode 100644 index 000000000..05e5f0957 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query72.sql @@ -0,0 +1,49 @@ +-- start query 72 in stream 0 using template query72.tpl +SELECT i_item_desc, + w_warehouse_name, + d1.d_week_seq, + Sum(CASE + WHEN p_promo_sk IS NULL THEN 1 + ELSE 0 + END) no_promo, + Sum(CASE + WHEN p_promo_sk IS NOT NULL THEN 1 + ELSE 0 + END) promo, + Count(*) total_cnt +FROM catalog_sales + JOIN inventory + ON ( cs_item_sk = inv_item_sk ) + JOIN warehouse + ON ( w_warehouse_sk = inv_warehouse_sk ) + JOIN item + ON ( i_item_sk = cs_item_sk ) + JOIN customer_demographics + ON ( cs_bill_cdemo_sk = cd_demo_sk ) + JOIN household_demographics + ON ( cs_bill_hdemo_sk = hd_demo_sk ) + JOIN date_dim d1 + ON ( cs_sold_date_sk = d1.d_date_sk ) + JOIN date_dim d2 + ON ( inv_date_sk = d2.d_date_sk ) + JOIN date_dim d3 + ON ( cs_ship_date_sk = d3.d_date_sk ) + LEFT OUTER JOIN promotion + ON ( cs_promo_sk = p_promo_sk ) + LEFT OUTER JOIN catalog_returns + ON ( cr_item_sk = cs_item_sk + AND cr_order_number = cs_order_number ) +WHERE d1.d_week_seq = d2.d_week_seq + AND inv_quantity_on_hand < cs_quantity + AND d3.d_date > d1.d_date + INTERVAL '5' day + AND hd_buy_potential = '501-1000' + AND d1.d_year = 2002 + AND cd_marital_status = 'M' +GROUP BY i_item_desc, + w_warehouse_name, + d1.d_week_seq +ORDER BY total_cnt DESC, + i_item_desc, + w_warehouse_name, + d_week_seq +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query73.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query73.sql new file mode 100644 index 000000000..47b7788fc --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query73.sql @@ -0,0 +1,39 @@ +-- start query 73 in stream 0 using template query73.tpl +SELECT c_last_name, + c_first_name, + c_salutation, + c_preferred_cust_flag, + ss_ticket_number, + cnt +FROM (SELECT ss_ticket_number, + ss_customer_sk, + Count(*) cnt + FROM store_sales, + date_dim, + store, + household_demographics + WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk + AND store_sales.ss_store_sk = store.s_store_sk + AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + AND date_dim.d_dom BETWEEN 1 AND 2 + AND ( household_demographics.hd_buy_potential = '>10000' + OR household_demographics.hd_buy_potential = '0-500' ) + AND household_demographics.hd_vehicle_count > 0 + AND CASE + WHEN household_demographics.hd_vehicle_count > 0 THEN + household_demographics.hd_dep_count / + household_demographics.hd_vehicle_count + ELSE NULL + END > 1 + AND date_dim.d_year IN ( 2000, 2000 + 1, 2000 + 2 ) + AND store.s_county IN ( 'Williamson County', 'Williamson County', + 'Williamson County', + 'Williamson County' + ) + GROUP BY ss_ticket_number, + ss_customer_sk) dj, + customer +WHERE ss_customer_sk = c_customer_sk + AND cnt BETWEEN 1 AND 5 +ORDER BY cnt DESC, + c_last_name ASC; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query74.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query74.sql new file mode 100644 index 000000000..5603c8dc8 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query74.sql @@ -0,0 +1,69 @@ +-- start query 74 in stream 0 using template query74.tpl +WITH year_total + AS (SELECT c_customer_id customer_id, + c_first_name customer_first_name, + c_last_name customer_last_name, + d_year AS year1, + Sum(ss_net_paid) year_total, + 's' sale_type + FROM customer, + store_sales, + date_dim + WHERE c_customer_sk = ss_customer_sk + AND ss_sold_date_sk = d_date_sk + AND d_year IN ( 1999, 1999 + 1 ) + GROUP BY c_customer_id, + c_first_name, + c_last_name, + d_year + UNION ALL + SELECT c_customer_id customer_id, + c_first_name customer_first_name, + c_last_name customer_last_name, + d_year AS year1, + Sum(ws_net_paid) year_total, + 'w' sale_type + FROM customer, + web_sales, + date_dim + WHERE c_customer_sk = ws_bill_customer_sk + AND ws_sold_date_sk = d_date_sk + AND d_year IN ( 1999, 1999 + 1 ) + GROUP BY c_customer_id, + c_first_name, + c_last_name, + d_year) +SELECT t_s_secyear.customer_id, + t_s_secyear.customer_first_name, + t_s_secyear.customer_last_name +FROM year_total t_s_firstyear, + year_total t_s_secyear, + year_total t_w_firstyear, + year_total t_w_secyear +WHERE t_s_secyear.customer_id = t_s_firstyear.customer_id + AND t_s_firstyear.customer_id = t_w_secyear.customer_id + AND t_s_firstyear.customer_id = t_w_firstyear.customer_id + AND t_s_firstyear.sale_type = 's' + AND t_w_firstyear.sale_type = 'w' + AND t_s_secyear.sale_type = 's' + AND t_w_secyear.sale_type = 'w' + AND t_s_firstyear.year1 = 1999 + AND t_s_secyear.year1 = 1999 + 1 + AND t_w_firstyear.year1 = 1999 + AND t_w_secyear.year1 = 1999 + 1 + AND t_s_firstyear.year_total > 0 + AND t_w_firstyear.year_total > 0 + AND CASE + WHEN t_w_firstyear.year_total > 0 THEN t_w_secyear.year_total / + t_w_firstyear.year_total + ELSE NULL + END > CASE + WHEN t_s_firstyear.year_total > 0 THEN + t_s_secyear.year_total / + t_s_firstyear.year_total + ELSE NULL + END +ORDER BY 1, + 2, + 3 +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query75.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query75.sql new file mode 100644 index 000000000..653741a50 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query75.sql @@ -0,0 +1,93 @@ +-- start query 75 in stream 0 using template query75.tpl +WITH all_sales + AS (SELECT d_year, + i_brand_id, + i_class_id, + i_category_id, + i_manufact_id, + Sum(sales_cnt) AS sales_cnt, + Sum(sales_amt) AS sales_amt + FROM (SELECT d_year, + i_brand_id, + i_class_id, + i_category_id, + i_manufact_id, + cs_quantity - COALESCE(cr_return_quantity, 0) AS + sales_cnt, + cs_ext_sales_price - COALESCE(cr_return_amount, 0.0) AS + sales_amt + FROM catalog_sales + JOIN item + ON i_item_sk = cs_item_sk + JOIN date_dim + ON d_date_sk = cs_sold_date_sk + LEFT JOIN catalog_returns + ON ( cs_order_number = cr_order_number + AND cs_item_sk = cr_item_sk ) + WHERE i_category = 'Men' + UNION + SELECT d_year, + i_brand_id, + i_class_id, + i_category_id, + i_manufact_id, + ss_quantity - COALESCE(sr_return_quantity, 0) AS + sales_cnt, + ss_ext_sales_price - COALESCE(sr_return_amt, 0.0) AS + sales_amt + FROM store_sales + JOIN item + ON i_item_sk = ss_item_sk + JOIN date_dim + ON d_date_sk = ss_sold_date_sk + LEFT JOIN store_returns + ON ( ss_ticket_number = sr_ticket_number + AND ss_item_sk = sr_item_sk ) + WHERE i_category = 'Men' + UNION + SELECT d_year, + i_brand_id, + i_class_id, + i_category_id, + i_manufact_id, + ws_quantity - COALESCE(wr_return_quantity, 0) AS + sales_cnt, + ws_ext_sales_price - COALESCE(wr_return_amt, 0.0) AS + sales_amt + FROM web_sales + JOIN item + ON i_item_sk = ws_item_sk + JOIN date_dim + ON d_date_sk = ws_sold_date_sk + LEFT JOIN web_returns + ON ( ws_order_number = wr_order_number + AND ws_item_sk = wr_item_sk ) + WHERE i_category = 'Men') sales_detail + GROUP BY d_year, + i_brand_id, + i_class_id, + i_category_id, + i_manufact_id) +SELECT prev_yr.d_year AS prev_year, + curr_yr.d_year AS year1, + curr_yr.i_brand_id, + curr_yr.i_class_id, + curr_yr.i_category_id, + curr_yr.i_manufact_id, + prev_yr.sales_cnt AS prev_yr_cnt, + curr_yr.sales_cnt AS curr_yr_cnt, + curr_yr.sales_cnt - prev_yr.sales_cnt AS sales_cnt_diff, + curr_yr.sales_amt - prev_yr.sales_amt AS sales_amt_diff +FROM all_sales curr_yr, + all_sales prev_yr +WHERE curr_yr.i_brand_id = prev_yr.i_brand_id + AND curr_yr.i_class_id = prev_yr.i_class_id + AND curr_yr.i_category_id = prev_yr.i_category_id + AND curr_yr.i_manufact_id = prev_yr.i_manufact_id + AND curr_yr.d_year = 2002 + AND prev_yr.d_year = 2002 - 1 + AND Cast(curr_yr.sales_cnt AS DECIMAL(17, 2)) / Cast(prev_yr.sales_cnt AS + DECIMAL(17, 2)) + < 0.9 +ORDER BY sales_cnt_diff +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query76.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query76.sql new file mode 100644 index 000000000..ebee4a612 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query76.sql @@ -0,0 +1,57 @@ +-- start query 76 in stream 0 using template query76.tpl +SELECT channel, + col_name, + d_year, + d_qoy, + i_category, + Count(*) sales_cnt, + Sum(ext_sales_price) sales_amt +FROM (SELECT 'store' AS channel, + 'ss_hdemo_sk' col_name, + d_year, + d_qoy, + i_category, + ss_ext_sales_price ext_sales_price + FROM store_sales, + item, + date_dim + WHERE ss_hdemo_sk IS NULL + AND ss_sold_date_sk = d_date_sk + AND ss_item_sk = i_item_sk + UNION ALL + SELECT 'web' AS channel, + 'ws_ship_hdemo_sk' col_name, + d_year, + d_qoy, + i_category, + ws_ext_sales_price ext_sales_price + FROM web_sales, + item, + date_dim + WHERE ws_ship_hdemo_sk IS NULL + AND ws_sold_date_sk = d_date_sk + AND ws_item_sk = i_item_sk + UNION ALL + SELECT 'catalog' AS channel, + 'cs_warehouse_sk' col_name, + d_year, + d_qoy, + i_category, + cs_ext_sales_price ext_sales_price + FROM catalog_sales, + item, + date_dim + WHERE cs_warehouse_sk IS NULL + AND cs_sold_date_sk = d_date_sk + AND cs_item_sk = i_item_sk) foo +GROUP BY channel, + col_name, + d_year, + d_qoy, + i_category +ORDER BY channel, + col_name, + d_year, + d_qoy, + i_category +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query77.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query77.sql new file mode 100644 index 000000000..4e38de926 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query77.sql @@ -0,0 +1,108 @@ + +-- start query 77 in stream 0 using template query77.tpl +WITH ss AS +( + SELECT s_store_sk, + Sum(ss_ext_sales_price) AS sales, + Sum(ss_net_profit) AS profit + FROM store_sales, + date_dim, + store + WHERE ss_sold_date_sk = d_date_sk + AND d_date BETWEEN Cast('2001-08-16' AS DATE) AND ( + Cast('2001-08-16' AS DATE) + INTERVAL '30' day) + AND ss_store_sk = s_store_sk + GROUP BY s_store_sk) , sr AS +( + SELECT s_store_sk, + sum(sr_return_amt) AS returns1, + sum(sr_net_loss) AS profit_loss + FROM store_returns, + date_dim, + store + WHERE sr_returned_date_sk = d_date_sk + AND d_date BETWEEN cast('2001-08-16' AS date) AND ( + cast('2001-08-16' AS date) + INTERVAL '30' day) + AND sr_store_sk = s_store_sk + GROUP BY s_store_sk), cs AS +( + SELECT cs_call_center_sk, + sum(cs_ext_sales_price) AS sales, + sum(cs_net_profit) AS profit + FROM catalog_sales, + date_dim + WHERE cs_sold_date_sk = d_date_sk + AND d_date BETWEEN cast('2001-08-16' AS date) AND ( + cast('2001-08-16' AS date) + INTERVAL '30' day) + GROUP BY cs_call_center_sk ), cr AS +( + SELECT cr_call_center_sk, + sum(cr_return_amount) AS returns1, + sum(cr_net_loss) AS profit_loss + FROM catalog_returns, + date_dim + WHERE cr_returned_date_sk = d_date_sk + AND d_date BETWEEN cast('2001-08-16' AS date) AND ( + cast('2001-08-16' AS date) + INTERVAL '30' day) + GROUP BY cr_call_center_sk ), ws AS +( + SELECT wp_web_page_sk, + sum(ws_ext_sales_price) AS sales, + sum(ws_net_profit) AS profit + FROM web_sales, + date_dim, + web_page + WHERE ws_sold_date_sk = d_date_sk + AND d_date BETWEEN cast('2001-08-16' AS date) AND ( + cast('2001-08-16' AS date) + INTERVAL '30' day) + AND ws_web_page_sk = wp_web_page_sk + GROUP BY wp_web_page_sk), wr AS +( + SELECT wp_web_page_sk, + sum(wr_return_amt) AS returns1, + sum(wr_net_loss) AS profit_loss + FROM web_returns, + date_dim, + web_page + WHERE wr_returned_date_sk = d_date_sk + AND d_date BETWEEN cast('2001-08-16' AS date) AND ( + cast('2001-08-16' AS date) + INTERVAL '30' day) + AND wr_web_page_sk = wp_web_page_sk + GROUP BY wp_web_page_sk) +SELECT + channel , + id , + sum(sales) AS sales , + sum(returns1) AS returns1 , + sum(profit) AS profit +FROM ( + SELECT 'store channel' AS channel , + ss.s_store_sk AS id , + sales , + COALESCE(returns1, 0) AS returns1 , + (profit - COALESCE(profit_loss,0)) AS profit + FROM ss + LEFT JOIN sr + ON ss.s_store_sk = sr.s_store_sk + UNION ALL + SELECT 'catalog channel' AS channel , + cs_call_center_sk AS id , + sales , + returns1 , + (profit - profit_loss) AS profit + FROM cs , + cr + UNION ALL + SELECT 'web channel' AS channel , + ws.wp_web_page_sk AS id , + sales , + COALESCE(returns1, 0) returns1 , + (profit - COALESCE(profit_loss,0)) AS profit + FROM ws + LEFT JOIN wr + ON ws.wp_web_page_sk = wr.wp_web_page_sk ) x +GROUP BY rollup (channel, id) +ORDER BY channel , + id +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query78.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query78.sql new file mode 100644 index 000000000..ef577b59a --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query78.sql @@ -0,0 +1,86 @@ +-- start query 78 in stream 0 using template query78.tpl +WITH ws + AS (SELECT d_year AS ws_sold_year, + ws_item_sk, + ws_bill_customer_sk ws_customer_sk, + Sum(ws_quantity) ws_qty, + Sum(ws_wholesale_cost) ws_wc, + Sum(ws_sales_price) ws_sp + FROM web_sales + LEFT JOIN web_returns + ON wr_order_number = ws_order_number + AND ws_item_sk = wr_item_sk + JOIN date_dim + ON ws_sold_date_sk = d_date_sk + WHERE wr_order_number IS NULL + GROUP BY d_year, + ws_item_sk, + ws_bill_customer_sk), + cs + AS (SELECT d_year AS cs_sold_year, + cs_item_sk, + cs_bill_customer_sk cs_customer_sk, + Sum(cs_quantity) cs_qty, + Sum(cs_wholesale_cost) cs_wc, + Sum(cs_sales_price) cs_sp + FROM catalog_sales + LEFT JOIN catalog_returns + ON cr_order_number = cs_order_number + AND cs_item_sk = cr_item_sk + JOIN date_dim + ON cs_sold_date_sk = d_date_sk + WHERE cr_order_number IS NULL + GROUP BY d_year, + cs_item_sk, + cs_bill_customer_sk), + ss + AS (SELECT d_year AS ss_sold_year, + ss_item_sk, + ss_customer_sk, + Sum(ss_quantity) ss_qty, + Sum(ss_wholesale_cost) ss_wc, + Sum(ss_sales_price) ss_sp + FROM store_sales + LEFT JOIN store_returns + ON sr_ticket_number = ss_ticket_number + AND ss_item_sk = sr_item_sk + JOIN date_dim + ON ss_sold_date_sk = d_date_sk + WHERE sr_ticket_number IS NULL + GROUP BY d_year, + ss_item_sk, + ss_customer_sk) +SELECT ss_item_sk, + Round(ss_qty / ( COALESCE(ws_qty + cs_qty, 1) ), 2) ratio, + ss_qty store_qty, + ss_wc + store_wholesale_cost, + ss_sp + store_sales_price, + COALESCE(ws_qty, 0) + COALESCE(cs_qty, 0) + other_chan_qty, + COALESCE(ws_wc, 0) + COALESCE(cs_wc, 0) + other_chan_wholesale_cost, + COALESCE(ws_sp, 0) + COALESCE(cs_sp, 0) + other_chan_sales_price +FROM ss + LEFT JOIN ws + ON ( ws_sold_year = ss_sold_year + AND ws_item_sk = ss_item_sk + AND ws_customer_sk = ss_customer_sk ) + LEFT JOIN cs + ON ( cs_sold_year = ss_sold_year + AND cs_item_sk = cs_item_sk + AND cs_customer_sk = ss_customer_sk ) +WHERE COALESCE(ws_qty, 0) > 0 + AND COALESCE(cs_qty, 0) > 0 + AND ss_sold_year = 1999 +ORDER BY ss_item_sk, + ss_qty DESC, + ss_wc DESC, + ss_sp DESC, + other_chan_qty, + other_chan_wholesale_cost, + other_chan_sales_price, + Round(ss_qty / ( COALESCE(ws_qty + cs_qty, 1) ), 2) +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query79.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query79.sql new file mode 100644 index 000000000..07c4dbda4 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query79.sql @@ -0,0 +1,35 @@ +-- start query 79 in stream 0 using template query79.tpl +SELECT c_last_name, + c_first_name, + Substr(s_city, 1, 30), + ss_ticket_number, + amt, + profit +FROM (SELECT ss_ticket_number, + ss_customer_sk, + store.s_city, + Sum(ss_coupon_amt) amt, + Sum(ss_net_profit) profit + FROM store_sales, + date_dim, + store, + household_demographics + WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk + AND store_sales.ss_store_sk = store.s_store_sk + AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + AND ( household_demographics.hd_dep_count = 8 + OR household_demographics.hd_vehicle_count > 4 ) + AND date_dim.d_dow = 1 + AND date_dim.d_year IN ( 2000, 2000 + 1, 2000 + 2 ) + AND store.s_number_employees BETWEEN 200 AND 295 + GROUP BY ss_ticket_number, + ss_customer_sk, + ss_addr_sk, + store.s_city) ms, + customer +WHERE ss_customer_sk = c_customer_sk +ORDER BY c_last_name, + c_first_name, + Substr(s_city, 1, 30), + profit +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query80.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query80.sql new file mode 100644 index 000000000..37774cef3 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query80.sql @@ -0,0 +1,106 @@ +-- start query 80 in stream 0 using template query80.tpl +WITH ssr AS +( + SELECT s_store_id AS store_id, + Sum(ss_ext_sales_price) AS sales, + Sum(COALESCE(sr_return_amt, 0)) AS returns1, + Sum(ss_net_profit - COALESCE(sr_net_loss, 0)) AS profit + FROM store_sales + LEFT OUTER JOIN store_returns + ON ( + ss_item_sk = sr_item_sk + AND ss_ticket_number = sr_ticket_number), + date_dim, + store, + item, + promotion + WHERE ss_sold_date_sk = d_date_sk + AND d_date BETWEEN Cast('2000-08-26' AS DATE) AND ( + Cast('2000-08-26' AS DATE) + INTERVAL '30' day) + AND ss_store_sk = s_store_sk + AND ss_item_sk = i_item_sk + AND i_current_price > 50 + AND ss_promo_sk = p_promo_sk + AND p_channel_tv = 'N' + GROUP BY s_store_id) , csr AS +( + SELECT cp_catalog_page_id AS catalog_page_id, + sum(cs_ext_sales_price) AS sales, + sum(COALESCE(cr_return_amount, 0)) AS returns1, + sum(cs_net_profit - COALESCE(cr_net_loss, 0)) AS profit + FROM catalog_sales + LEFT OUTER JOIN catalog_returns + ON ( + cs_item_sk = cr_item_sk + AND cs_order_number = cr_order_number), + date_dim, + catalog_page, + item, + promotion + WHERE cs_sold_date_sk = d_date_sk + AND d_date BETWEEN cast('2000-08-26' AS date) AND ( + cast('2000-08-26' AS date) + INTERVAL '30' day) + AND cs_catalog_page_sk = cp_catalog_page_sk + AND cs_item_sk = i_item_sk + AND i_current_price > 50 + AND cs_promo_sk = p_promo_sk + AND p_channel_tv = 'N' + GROUP BY cp_catalog_page_id) , wsr AS +( + SELECT web_site_id, + sum(ws_ext_sales_price) AS sales, + sum(COALESCE(wr_return_amt, 0)) AS returns1, + sum(ws_net_profit - COALESCE(wr_net_loss, 0)) AS profit + FROM web_sales + LEFT OUTER JOIN web_returns + ON ( + ws_item_sk = wr_item_sk + AND ws_order_number = wr_order_number), + date_dim, + web_site, + item, + promotion + WHERE ws_sold_date_sk = d_date_sk + AND d_date BETWEEN cast('2000-08-26' AS date) AND ( + cast('2000-08-26' AS date) + INTERVAL '30' day) + AND ws_web_site_sk = web_site_sk + AND ws_item_sk = i_item_sk + AND i_current_price > 50 + AND ws_promo_sk = p_promo_sk + AND p_channel_tv = 'N' + GROUP BY web_site_id) +SELECT + channel , + id , + sum(sales) AS sales , + sum(returns1) AS returns1 , + sum(profit) AS profit +FROM ( + SELECT 'store channel' AS channel , + 'store' + || store_id AS id , + sales , + returns1 , + profit + FROM ssr + UNION ALL + SELECT 'catalog channel' AS channel , + 'catalog_page' + || catalog_page_id AS id , + sales , + returns1 , + profit + FROM csr + UNION ALL + SELECT 'web channel' AS channel , + 'web_site' + || web_site_id AS id , + sales , + returns1 , + profit + FROM wsr ) x +GROUP BY rollup (channel, id) +ORDER BY channel , + id +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query81.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query81.sql new file mode 100644 index 000000000..e596c591a --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query81.sql @@ -0,0 +1,56 @@ + +-- start query 81 in stream 0 using template query81.tpl +WITH customer_total_return + AS (SELECT cr_returning_customer_sk AS ctr_customer_sk, + ca_state AS ctr_state, + Sum(cr_return_amt_inc_tax) AS ctr_total_return + FROM catalog_returns, + date_dim, + customer_address + WHERE cr_returned_date_sk = d_date_sk + AND d_year = 1999 + AND cr_returning_addr_sk = ca_address_sk + GROUP BY cr_returning_customer_sk, + ca_state) +SELECT c_customer_id, + c_salutation, + c_first_name, + c_last_name, + ca_street_number, + ca_street_name, + ca_street_type, + ca_suite_number, + ca_city, + ca_county, + ca_state, + ca_zip, + ca_country, + ca_gmt_offset, + ca_location_type, + ctr_total_return +FROM customer_total_return ctr1, + customer_address, + customer +WHERE ctr1.ctr_total_return > (SELECT Avg(ctr_total_return) * 1.2 + FROM customer_total_return ctr2 + WHERE ctr1.ctr_state = ctr2.ctr_state) + AND ca_address_sk = c_current_addr_sk + AND ca_state = 'TX' + AND ctr1.ctr_customer_sk = c_customer_sk +ORDER BY c_customer_id, + c_salutation, + c_first_name, + c_last_name, + ca_street_number, + ca_street_name, + ca_street_type, + ca_suite_number, + ca_city, + ca_county, + ca_state, + ca_zip, + ca_country, + ca_gmt_offset, + ca_location_type, + ctr_total_return +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query82.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query82.sql new file mode 100644 index 000000000..1a8d9126d --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query82.sql @@ -0,0 +1,24 @@ + +-- start query 82 in stream 0 using template query82.tpl +SELECT + i_item_id , + i_item_desc , + i_current_price +FROM item, + inventory, + date_dim, + store_sales +WHERE i_current_price BETWEEN 63 AND 63+30 +AND inv_item_sk = i_item_sk +AND d_date_sk=inv_date_sk +AND d_date BETWEEN Cast('1998-04-27' AS DATE) AND ( + Cast('1998-04-27' AS DATE) + INTERVAL '60' day) +AND i_manufact_id IN (57,293,427,320) +AND inv_quantity_on_hand BETWEEN 100 AND 500 +AND ss_item_sk = i_item_sk +GROUP BY i_item_id, + i_item_desc, + i_current_price +ORDER BY i_item_id +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query83.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query83.sql new file mode 100644 index 000000000..380dfd9fd --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query83.sql @@ -0,0 +1,75 @@ +-- start query 83 in stream 0 using template query83.tpl +WITH sr_items + AS (SELECT i_item_id item_id, + Sum(sr_return_quantity) sr_item_qty + FROM store_returns, + item, + date_dim + WHERE sr_item_sk = i_item_sk + AND d_date IN (SELECT d_date + FROM date_dim + WHERE d_week_seq IN (SELECT d_week_seq + FROM date_dim + WHERE + d_date IN ( '1999-06-30', + '1999-08-28', + '1999-11-18' + ))) + AND sr_returned_date_sk = d_date_sk + GROUP BY i_item_id), + cr_items + AS (SELECT i_item_id item_id, + Sum(cr_return_quantity) cr_item_qty + FROM catalog_returns, + item, + date_dim + WHERE cr_item_sk = i_item_sk + AND d_date IN (SELECT d_date + FROM date_dim + WHERE d_week_seq IN (SELECT d_week_seq + FROM date_dim + WHERE + d_date IN ( '1999-06-30', + '1999-08-28', + '1999-11-18' + ))) + AND cr_returned_date_sk = d_date_sk + GROUP BY i_item_id), + wr_items + AS (SELECT i_item_id item_id, + Sum(wr_return_quantity) wr_item_qty + FROM web_returns, + item, + date_dim + WHERE wr_item_sk = i_item_sk + AND d_date IN (SELECT d_date + FROM date_dim + WHERE d_week_seq IN (SELECT d_week_seq + FROM date_dim + WHERE + d_date IN ( '1999-06-30', + '1999-08-28', + '1999-11-18' + ))) + AND wr_returned_date_sk = d_date_sk + GROUP BY i_item_id) +SELECT sr_items.item_id, + sr_item_qty, + sr_item_qty / ( sr_item_qty + cr_item_qty + wr_item_qty ) / 3.0 * + 100 sr_dev, + cr_item_qty, + cr_item_qty / ( sr_item_qty + cr_item_qty + wr_item_qty ) / 3.0 * + 100 cr_dev, + wr_item_qty, + wr_item_qty / ( sr_item_qty + cr_item_qty + wr_item_qty ) / 3.0 * + 100 wr_dev, + ( sr_item_qty + cr_item_qty + wr_item_qty ) / 3.0 + average +FROM sr_items, + cr_items, + wr_items +WHERE sr_items.item_id = cr_items.item_id + AND sr_items.item_id = wr_items.item_id +ORDER BY sr_items.item_id, + sr_item_qty +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query84.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query84.sql new file mode 100644 index 000000000..f073403ef --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query84.sql @@ -0,0 +1,21 @@ +-- start query 84 in stream 0 using template query84.tpl +SELECT c_customer_id AS customer_id, + c_last_name + || ', ' + || c_first_name AS customername +FROM customer, + customer_address, + customer_demographics, + household_demographics, + income_band, + store_returns +WHERE ca_city = 'Green Acres' + AND c_current_addr_sk = ca_address_sk + AND ib_lower_bound >= 54986 + AND ib_upper_bound <= 54986 + 50000 + AND ib_income_band_sk = hd_income_band_sk + AND cd_demo_sk = c_current_cdemo_sk + AND hd_demo_sk = c_current_hdemo_sk + AND sr_cdemo_sk = cd_demo_sk +ORDER BY c_customer_id +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query85.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query85.sql new file mode 100644 index 000000000..b821e8aa8 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query85.sql @@ -0,0 +1,52 @@ +-- start query 85 in stream 0 using template query85.tpl +SELECT Substr(r_reason_desc, 1, 20), + Avg(ws_quantity), + Avg(wr_refunded_cash), + Avg(wr_fee) +FROM web_sales, + web_returns, + web_page, + customer_demographics cd1, + customer_demographics cd2, + customer_address, + date_dim, + reason +WHERE ws_web_page_sk = wp_web_page_sk + AND ws_item_sk = wr_item_sk + AND ws_order_number = wr_order_number + AND ws_sold_date_sk = d_date_sk + AND d_year = 2001 + AND cd1.cd_demo_sk = wr_refunded_cdemo_sk + AND cd2.cd_demo_sk = wr_returning_cdemo_sk + AND ca_address_sk = wr_refunded_addr_sk + AND r_reason_sk = wr_reason_sk + AND ( ( cd1.cd_marital_status = 'W' + AND cd1.cd_marital_status = cd2.cd_marital_status + AND cd1.cd_education_status = 'Primary' + AND cd1.cd_education_status = cd2.cd_education_status + AND ws_sales_price BETWEEN 100.00 AND 150.00 ) + OR ( cd1.cd_marital_status = 'D' + AND cd1.cd_marital_status = cd2.cd_marital_status + AND cd1.cd_education_status = 'Secondary' + AND cd1.cd_education_status = cd2.cd_education_status + AND ws_sales_price BETWEEN 50.00 AND 100.00 ) + OR ( cd1.cd_marital_status = 'M' + AND cd1.cd_marital_status = cd2.cd_marital_status + AND cd1.cd_education_status = 'Advanced Degree' + AND cd1.cd_education_status = cd2.cd_education_status + AND ws_sales_price BETWEEN 150.00 AND 200.00 ) ) + AND ( ( ca_country = 'United States' + AND ca_state IN ( 'KY', 'ME', 'IL' ) + AND ws_net_profit BETWEEN 100 AND 200 ) + OR ( ca_country = 'United States' + AND ca_state IN ( 'OK', 'NE', 'MN' ) + AND ws_net_profit BETWEEN 150 AND 300 ) + OR ( ca_country = 'United States' + AND ca_state IN ( 'FL', 'WI', 'KS' ) + AND ws_net_profit BETWEEN 50 AND 250 ) ) +GROUP BY r_reason_desc +ORDER BY Substr(r_reason_desc, 1, 20), + Avg(ws_quantity), + Avg(wr_refunded_cash), + Avg(wr_fee) +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query86.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query86.sql new file mode 100644 index 000000000..511b33fb2 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query86.sql @@ -0,0 +1,24 @@ +-- start query 86 in stream 0 using template query86.tpl +SELECT Sum(ws_net_paid) AS total_sum, + i_category, + i_class, + Grouping(i_category) + Grouping(i_class) AS lochierarchy, + Rank() + OVER ( + partition BY Grouping(i_category)+Grouping(i_class), CASE + WHEN Grouping( + i_class) = 0 THEN i_category END + ORDER BY Sum(ws_net_paid) DESC) AS rank_within_parent +FROM web_sales, + date_dim d1, + item +WHERE d1.d_month_seq BETWEEN 1183 AND 1183 + 11 + AND d1.d_date_sk = ws_sold_date_sk + AND i_item_sk = ws_item_sk +GROUP BY rollup( i_category, i_class ) +ORDER BY lochierarchy DESC, + CASE + WHEN lochierarchy = 0 THEN i_category + END, + rank_within_parent +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query87.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query87.sql new file mode 100644 index 000000000..49c99464d --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query87.sql @@ -0,0 +1,22 @@ +-- start query 87 in stream 0 using template query87.tpl +select count(*) +from ((select distinct c_last_name, c_first_name, d_date + from store_sales, date_dim, customer + where store_sales.ss_sold_date_sk = date_dim.d_date_sk + and store_sales.ss_customer_sk = customer.c_customer_sk + and d_month_seq between 1188 and 1188+11) + except + (select distinct c_last_name, c_first_name, d_date + from catalog_sales, date_dim, customer + where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk + and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk + and d_month_seq between 1188 and 1188+11) + except + (select distinct c_last_name, c_first_name, d_date + from web_sales, date_dim, customer + where web_sales.ws_sold_date_sk = date_dim.d_date_sk + and web_sales.ws_bill_customer_sk = customer.c_customer_sk + and d_month_seq between 1188 and 1188+11) +) cool_cust +; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query88.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query88.sql new file mode 100644 index 000000000..1f77fe1fd --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query88.sql @@ -0,0 +1,93 @@ +-- start query 88 in stream 0 using template query88.tpl +select * +from + (select count(*) h8_30_to_9 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 8 + and time_dim.t_minute >= 30 + and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or + (household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s1, + (select count(*) h9_to_9_30 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 9 + and time_dim.t_minute < 30 + and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or + (household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s2, + (select count(*) h9_30_to_10 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 9 + and time_dim.t_minute >= 30 + and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or + (household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s3, + (select count(*) h10_to_10_30 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 10 + and time_dim.t_minute < 30 + and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or + (household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s4, + (select count(*) h10_30_to_11 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 10 + and time_dim.t_minute >= 30 + and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or + (household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s5, + (select count(*) h11_to_11_30 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 11 + and time_dim.t_minute < 30 + and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or + (household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s6, + (select count(*) h11_30_to_12 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 11 + and time_dim.t_minute >= 30 + and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or + (household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s7, + (select count(*) h12_to_12_30 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 12 + and time_dim.t_minute < 30 + and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or + (household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s8 +; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query89.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query89.sql new file mode 100644 index 000000000..2004a4fe2 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query89.sql @@ -0,0 +1,40 @@ +-- start query 89 in stream 0 using template query89.tpl +SELECT * +FROM (SELECT i_category, + i_class, + i_brand, + s_store_name, + s_company_name, + d_moy, + Sum(ss_sales_price) sum_sales, + Avg(Sum(ss_sales_price)) + OVER ( + partition BY i_category, i_brand, s_store_name, s_company_name + ) + avg_monthly_sales + FROM item, + store_sales, + date_dim, + store + WHERE ss_item_sk = i_item_sk + AND ss_sold_date_sk = d_date_sk + AND ss_store_sk = s_store_sk + AND d_year IN ( 2002 ) + AND ( ( i_category IN ( 'Home', 'Men', 'Sports' ) + AND i_class IN ( 'paint', 'accessories', 'fitness' ) ) + OR ( i_category IN ( 'Shoes', 'Jewelry', 'Women' ) + AND i_class IN ( 'mens', 'pendants', 'swimwear' ) ) ) + GROUP BY i_category, + i_class, + i_brand, + s_store_name, + s_company_name, + d_moy) tmp1 +WHERE CASE + WHEN ( avg_monthly_sales <> 0 ) THEN ( + Abs(sum_sales - avg_monthly_sales) / avg_monthly_sales ) + ELSE NULL + END > 0.1 +ORDER BY sum_sales - avg_monthly_sales, + s_store_name +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query90.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query90.sql new file mode 100644 index 000000000..5a4778b1b --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query90.sql @@ -0,0 +1,28 @@ + +-- start query 90 in stream 0 using template query90.tpl +SELECT Cast(amc AS DECIMAL(15, 4)) / Cast(pmc AS DECIMAL(15, 4)) + am_pm_ratio +FROM (SELECT Count(*) amc + FROM web_sales, + household_demographics, + time_dim, + web_page + WHERE ws_sold_time_sk = time_dim.t_time_sk + AND ws_ship_hdemo_sk = household_demographics.hd_demo_sk + AND ws_web_page_sk = web_page.wp_web_page_sk + AND time_dim.t_hour BETWEEN 12 AND 12 + 1 + AND household_demographics.hd_dep_count = 8 + AND web_page.wp_char_count BETWEEN 5000 AND 5200) at1, + (SELECT Count(*) pmc + FROM web_sales, + household_demographics, + time_dim, + web_page + WHERE ws_sold_time_sk = time_dim.t_time_sk + AND ws_ship_hdemo_sk = household_demographics.hd_demo_sk + AND ws_web_page_sk = web_page.wp_web_page_sk + AND time_dim.t_hour BETWEEN 20 AND 20 + 1 + AND household_demographics.hd_dep_count = 8 + AND web_page.wp_char_count BETWEEN 5000 AND 5200) pt +ORDER BY am_pm_ratio +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query91.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query91.sql new file mode 100644 index 000000000..1462192de --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query91.sql @@ -0,0 +1,32 @@ +-- start query 91 in stream 0 using template query91.tpl +SELECT cc_call_center_id Call_Center, + cc_name Call_Center_Name, + cc_manager Manager, + Sum(cr_net_loss) Returns_Loss +FROM call_center, + catalog_returns, + date_dim, + customer, + customer_address, + customer_demographics, + household_demographics +WHERE cr_call_center_sk = cc_call_center_sk + AND cr_returned_date_sk = d_date_sk + AND cr_returning_customer_sk = c_customer_sk + AND cd_demo_sk = c_current_cdemo_sk + AND hd_demo_sk = c_current_hdemo_sk + AND ca_address_sk = c_current_addr_sk + AND d_year = 1999 + AND d_moy = 12 + AND ( ( cd_marital_status = 'M' + AND cd_education_status = 'Unknown' ) + OR ( cd_marital_status = 'W' + AND cd_education_status = 'Advanced Degree' ) ) + AND hd_buy_potential LIKE 'Unknown%' + AND ca_gmt_offset = -7 +GROUP BY cc_call_center_id, + cc_name, + cc_manager, + cd_marital_status, + cd_education_status +ORDER BY Sum(cr_net_loss) DESC; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query92.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query92.sql new file mode 100644 index 000000000..ddbf2e152 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query92.sql @@ -0,0 +1,23 @@ +-- start query 92 in stream 0 using template query92.tpl +SELECT + Sum(ws_ext_discount_amt) AS `Excess Discount Amount` +FROM web_sales , + item , + date_dim +WHERE i_manufact_id = 718 +AND i_item_sk = ws_item_sk +AND d_date BETWEEN '2002-03-29' AND ( + Cast('2002-03-29' AS DATE) + INTERVAL '90' day) +AND d_date_sk = ws_sold_date_sk +AND ws_ext_discount_amt > + ( + SELECT 1.3 * avg(ws_ext_discount_amt) + FROM web_sales , + date_dim + WHERE ws_item_sk = i_item_sk + AND d_date BETWEEN '2002-03-29' AND ( + cast('2002-03-29' AS date) + INTERVAL '90' day) + AND d_date_sk = ws_sold_date_sk ) +ORDER BY sum(ws_ext_discount_amt) +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query93.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query93.sql new file mode 100644 index 000000000..1a5aeb81c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query93.sql @@ -0,0 +1,22 @@ +-- start query 93 in stream 0 using template query93.tpl +SELECT ss_customer_sk, + Sum(act_sales) sumsales +FROM (SELECT ss_item_sk, + ss_ticket_number, + ss_customer_sk, + CASE + WHEN sr_return_quantity IS NOT NULL THEN + ( ss_quantity - sr_return_quantity ) * ss_sales_price + ELSE ( ss_quantity * ss_sales_price ) + END act_sales + FROM store_sales + LEFT OUTER JOIN store_returns + ON ( sr_item_sk = ss_item_sk + AND sr_ticket_number = ss_ticket_number ), + reason + WHERE sr_reason_sk = r_reason_sk + AND r_reason_desc = 'reason 38') t +GROUP BY ss_customer_sk +ORDER BY sumsales, + ss_customer_sk +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query94.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query94.sql new file mode 100644 index 000000000..18cc4c0e2 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query94.sql @@ -0,0 +1,30 @@ +-- start query 94 in stream 0 using template query94.tpl +SELECT + Count(DISTINCT ws_order_number) AS `order count` , + Sum(ws_ext_ship_cost) AS `total shipping cost` , + Sum(ws_net_profit) AS `total net profit` +FROM web_sales ws1 , + date_dim , + customer_address , + web_site +WHERE d_date BETWEEN '2000-3-01' AND ( + Cast('2000-3-01' AS DATE) + INTERVAL '60' day) +AND ws1.ws_ship_date_sk = d_date_sk +AND ws1.ws_ship_addr_sk = ca_address_sk +AND ca_state = 'MT' +AND ws1.ws_web_site_sk = web_site_sk +AND web_company_name = 'pri' +AND EXISTS + ( + SELECT * + FROM web_sales ws2 + WHERE ws1.ws_order_number = ws2.ws_order_number + AND ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) +AND NOT EXISTS + ( + SELECT * + FROM web_returns wr1 + WHERE ws1.ws_order_number = wr1.wr_order_number) +ORDER BY count(DISTINCT ws_order_number) +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query95.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query95.sql new file mode 100644 index 000000000..099c9d2f6 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query95.sql @@ -0,0 +1,38 @@ +-- start query 95 in stream 0 using template query95.tpl +WITH ws_wh AS +( + SELECT ws1.ws_order_number, + ws1.ws_warehouse_sk wh1, + ws2.ws_warehouse_sk wh2 + FROM web_sales ws1, + web_sales ws2 + WHERE ws1.ws_order_number = ws2.ws_order_number + AND ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) +SELECT + Count(DISTINCT ws_order_number) AS `order count` , + Sum(ws_ext_ship_cost) AS `total shipping cost` , + Sum(ws_net_profit) AS `total net profit` +FROM web_sales ws1 , + date_dim , + customer_address , + web_site +WHERE d_date BETWEEN '2000-4-01' AND ( + Cast('2000-4-01' AS DATE) + INTERVAL '60' day) +AND ws1.ws_ship_date_sk = d_date_sk +AND ws1.ws_ship_addr_sk = ca_address_sk +AND ca_state = 'IN' +AND ws1.ws_web_site_sk = web_site_sk +AND web_company_name = 'pri' +AND ws1.ws_order_number IN + ( + SELECT ws_order_number + FROM ws_wh) +AND ws1.ws_order_number IN + ( + SELECT wr_order_number + FROM web_returns, + ws_wh + WHERE wr_order_number = ws_wh.ws_order_number) +ORDER BY count(DISTINCT ws_order_number) +LIMIT 100; + diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query96.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query96.sql new file mode 100644 index 000000000..25bc83e01 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query96.sql @@ -0,0 +1,15 @@ +-- start query 96 in stream 0 using template query96.tpl +SELECT Count(*) +FROM store_sales, + household_demographics, + time_dim, + store +WHERE ss_sold_time_sk = time_dim.t_time_sk + AND ss_hdemo_sk = household_demographics.hd_demo_sk + AND ss_store_sk = s_store_sk + AND time_dim.t_hour = 15 + AND time_dim.t_minute >= 30 + AND household_demographics.hd_dep_count = 7 + AND store.s_store_name = 'ese' +ORDER BY Count(*) +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query97.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query97.sql new file mode 100644 index 000000000..58d95211c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query97.sql @@ -0,0 +1,40 @@ + +-- start query 97 in stream 0 using template query97.tpl +WITH ssci + AS (SELECT ss_customer_sk customer_sk, + ss_item_sk item_sk + FROM store_sales, + date_dim + WHERE ss_sold_date_sk = d_date_sk + AND d_month_seq BETWEEN 1196 AND 1196 + 11 + GROUP BY ss_customer_sk, + ss_item_sk), + csci + AS (SELECT cs_bill_customer_sk customer_sk, + cs_item_sk item_sk + FROM catalog_sales, + date_dim + WHERE cs_sold_date_sk = d_date_sk + AND d_month_seq BETWEEN 1196 AND 1196 + 11 + GROUP BY cs_bill_customer_sk, + cs_item_sk) +SELECT Sum(CASE + WHEN ssci.customer_sk IS NOT NULL + AND csci.customer_sk IS NULL THEN 1 + ELSE 0 + END) store_only, + Sum(CASE + WHEN ssci.customer_sk IS NULL + AND csci.customer_sk IS NOT NULL THEN 1 + ELSE 0 + END) catalog_only, + Sum(CASE + WHEN ssci.customer_sk IS NOT NULL + AND csci.customer_sk IS NOT NULL THEN 1 + ELSE 0 + END) store_and_catalog +FROM ssci + FULL OUTER JOIN csci + ON ( ssci.customer_sk = csci.customer_sk + AND ssci.item_sk = csci.item_sk ) +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query98.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query98.sql new file mode 100644 index 000000000..cfd04a601 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query98.sql @@ -0,0 +1,29 @@ + +-- start query 98 in stream 0 using template query98.tpl +SELECT i_item_id, + i_item_desc, + i_category, + i_class, + i_current_price, + Sum(ss_ext_sales_price) AS itemrevenue, + Sum(ss_ext_sales_price) * 100 / Sum(Sum(ss_ext_sales_price)) + OVER ( + PARTITION BY i_class) AS revenueratio +FROM store_sales, + item, + date_dim +WHERE ss_item_sk = i_item_sk + AND i_category IN ( 'Men', 'Home', 'Electronics' ) + AND ss_sold_date_sk = d_date_sk + AND d_date BETWEEN CAST('2000-05-18' AS DATE) AND ( + CAST('2000-05-18' AS DATE) + INTERVAL '30' DAY ) +GROUP BY i_item_id, + i_item_desc, + i_category, + i_class, + i_current_price +ORDER BY i_category, + i_class, + i_item_id, + i_item_desc, + revenueratio; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query99.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query99.sql new file mode 100644 index 000000000..3acbf5348 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_ds/query99.sql @@ -0,0 +1,47 @@ + + +-- start query 99 in stream 0 using template query99.tpl +SELECT Substr(w_warehouse_name, 1, 20), + sm_type, + cc_name, + Sum(CASE + WHEN ( cs_ship_date_sk - cs_sold_date_sk <= 30 ) THEN 1 + ELSE 0 + END) AS `30 days`, + Sum(CASE + WHEN ( cs_ship_date_sk - cs_sold_date_sk > 30 ) + AND ( cs_ship_date_sk - cs_sold_date_sk <= 60 ) THEN 1 + ELSE 0 + END) AS `31-60 days`, + Sum(CASE + WHEN ( cs_ship_date_sk - cs_sold_date_sk > 60 ) + AND ( cs_ship_date_sk - cs_sold_date_sk <= 90 ) THEN 1 + ELSE 0 + END) AS `61-90 days`, + Sum(CASE + WHEN ( cs_ship_date_sk - cs_sold_date_sk > 90 ) + AND ( cs_ship_date_sk - cs_sold_date_sk <= 120 ) THEN + 1 + ELSE 0 + END) AS `91-120 days`, + Sum(CASE + WHEN ( cs_ship_date_sk - cs_sold_date_sk > 120 ) THEN 1 + ELSE 0 + END) AS `>120 days` +FROM catalog_sales, + warehouse, + ship_mode, + call_center, + date_dim +WHERE d_month_seq BETWEEN 1200 AND 1200 + 11 + AND cs_ship_date_sk = d_date_sk + AND cs_warehouse_sk = w_warehouse_sk + AND cs_ship_mode_sk = sm_ship_mode_sk + AND cs_call_center_sk = cc_call_center_sk +GROUP BY Substr(w_warehouse_name, 1, 20), + sm_type, + cc_name +ORDER BY Substr(w_warehouse_name, 1, 20), + sm_type, + cc_name +LIMIT 100; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query01.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query01.sql new file mode 100644 index 000000000..a388f00f3 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query01.sql @@ -0,0 +1,22 @@ +--#[tpc-h-01] +select + l_returnflag, + l_linestatus, + sum(l_quantity) as sum_qty, + sum(l_extendedprice) as sum_base_price, + sum(l_extendedprice * (1 - l_discount)) as sum_disc_price, + sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge, + avg(l_quantity) as avg_qty, + avg(l_extendedprice) as avg_price, + avg(l_discount) as avg_disc, + count(*) as count_order +from + lineitem +where + l_shipdate <= date '1998-12-01' - interval ':1' day (3) +group by + l_returnflag, + l_linestatus +order by + l_returnflag, + l_linestatus; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query02.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query02.sql new file mode 100644 index 000000000..0d4080ada --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query02.sql @@ -0,0 +1,44 @@ +--#[tpc-h-02] +select + s_acctbal, + s_name, + n_name, + p_partkey, + p_mfgr, + s_address, + s_phone, + s_comment +from + part, + supplier, + partsupp, + nation, + region +where + p_partkey = ps_partkey + and s_suppkey = ps_suppkey + and p_size = :1 + and p_type like '%:2' + and s_nationkey = n_nationkey + and n_regionkey = r_regionkey + and r_name = ':3' + and ps_supplycost = ( + select + min(ps_supplycost) + from + partsupp, + supplier, + nation, + region + where + p_partkey = ps_partkey + and s_suppkey = ps_suppkey + and s_nationkey = n_nationkey + and n_regionkey = r_regionkey + and r_name = ':3' + ) +order by + s_acctbal desc, + n_name, + s_name, + p_partkey; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query03.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query03.sql new file mode 100644 index 000000000..611bb9d30 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query03.sql @@ -0,0 +1,23 @@ +--#[tpc-h-03] +select + l_orderkey, + sum(l_extendedprice * (1 - l_discount)) as revenue, + o_orderdate, + o_shippriority +from + customer, + orders, + lineitem +where + c_mktsegment = ':1' + and c_custkey = o_custkey + and l_orderkey = o_orderkey + and o_orderdate < date ':2' + and l_shipdate > date ':2' +group by + l_orderkey, + o_orderdate, + o_shippriority +order by + revenue desc, + o_orderdate; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query04.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query04.sql new file mode 100644 index 000000000..4104fbd6b --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query04.sql @@ -0,0 +1,22 @@ +--#[tpc-h-04] +select + o_orderpriority, + count(*) as order_count +from + orders +where + o_orderdate >= date ':1' + and o_orderdate < date ':1' + interval '3' month + and exists ( + select + * + from + lineitem + where + l_orderkey = o_orderkey + and l_commitdate < l_receiptdate + ) +group by + o_orderpriority +order by + o_orderpriority; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query05.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query05.sql new file mode 100644 index 000000000..bca162d1e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query05.sql @@ -0,0 +1,25 @@ +--#[tpc-h-05] +select + n_name, + sum(l_extendedprice * (1 - l_discount)) as revenue +from + customer, + orders, + lineitem, + supplier, + nation, + region +where + c_custkey = o_custkey + and l_orderkey = o_orderkey + and l_suppkey = s_suppkey + and c_nationkey = s_nationkey + and s_nationkey = n_nationkey + and n_regionkey = r_regionkey + and r_name = ':1' + and o_orderdate >= date ':2' + and o_orderdate < date ':2' + interval '1' year +group by + n_name +order by + revenue desc; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query06.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query06.sql new file mode 100644 index 000000000..8698a28a4 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query06.sql @@ -0,0 +1,16 @@ +-- $ID$ +-- TPC-H/TPC-R Forecasting Revenue Change Query (Q6) +-- Functional Query Definition +-- Approved February 1998 +:x +:o +select + sum(l_extendedprice * l_discount) as revenue +from + lineitem +where + l_shipdate >= date ':1' + and l_shipdate < date ':1' + interval '1' year + and l_discount between :2 - 0.01 and :2 + 0.01 + and l_quantity < :3; +:n -1 diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query07.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query07.sql new file mode 100644 index 000000000..624f6bc4e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query07.sql @@ -0,0 +1,40 @@ +--#[tpc-h-07] +select + supp_nation, + cust_nation, + l_year, + sum(volume) as revenue +from + ( + select + n1.n_name as supp_nation, + n2.n_name as cust_nation, + extract(year from l_shipdate) as l_year, + l_extendedprice * (1 - l_discount) as volume + from + supplier, + lineitem, + orders, + customer, + nation n1, + nation n2 + where + s_suppkey = l_suppkey + and o_orderkey = l_orderkey + and c_custkey = o_custkey + and s_nationkey = n1.n_nationkey + and c_nationkey = n2.n_nationkey + and ( + (n1.n_name = ':1' and n2.n_name = ':2') + or (n1.n_name = ':2' and n2.n_name = ':1') + ) + and l_shipdate between date '1995-01-01' and date '1996-12-31' + ) as shipping +group by + supp_nation, + cust_nation, + l_year +order by + supp_nation, + cust_nation, + l_year; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query08.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query08.sql new file mode 100644 index 000000000..b1c30c27c --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query08.sql @@ -0,0 +1,39 @@ +--#[tpc-h-08] +select + o_year, + sum(case + when nation = ':1' then volume + else 0 + end) / sum(volume) as mkt_share +from + ( + select + extract(year from o_orderdate) as o_year, + l_extendedprice * (1 - l_discount) as volume, + n2.n_name as nation + from + part, + supplier, + lineitem, + orders, + customer, + nation n1, + nation n2, + region + where + p_partkey = l_partkey + and s_suppkey = l_suppkey + and l_orderkey = o_orderkey + and o_custkey = c_custkey + and c_nationkey = n1.n_nationkey + and n1.n_regionkey = r_regionkey + and r_name = ':2' + and s_nationkey = n2.n_nationkey + and o_orderdate between date '1995-01-01' and date '1996-12-31' + and p_type = ':3' + ) as all_nations +group by + o_year +order by + o_year; +:n -1 diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query09.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query09.sql new file mode 100644 index 000000000..58dcbe7f7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query09.sql @@ -0,0 +1,33 @@ +--#[tpc-h-09] +select + nation, + o_year, + sum(amount) as sum_profit +from + ( + select + n_name as nation, + extract(year from o_orderdate) as o_year, + l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount + from + part, + supplier, + lineitem, + partsupp, + orders, + nation + where + s_suppkey = l_suppkey + and ps_suppkey = l_suppkey + and ps_partkey = l_partkey + and p_partkey = l_partkey + and o_orderkey = l_orderkey + and s_nationkey = n_nationkey + and p_name like '%:1%' + ) as profit +group by + nation, + o_year +order by + nation, + o_year desc; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query10.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query10.sql new file mode 100644 index 000000000..300192480 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query10.sql @@ -0,0 +1,32 @@ +--#[tpc-h-10] +select + c_custkey, + c_name, + sum(l_extendedprice * (1 - l_discount)) as revenue, + c_acctbal, + n_name, + c_address, + c_phone, + c_comment +from + customer, + orders, + lineitem, + nation +where + c_custkey = o_custkey + and l_orderkey = o_orderkey + and o_orderdate >= date ':1' + and o_orderdate < date ':1' + interval '3' month + and l_returnflag = 'R' + and c_nationkey = n_nationkey +group by + c_custkey, + c_name, + c_acctbal, + c_phone, + n_name, + c_address, + c_comment +order by + revenue desc; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query11.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query11.sql new file mode 100644 index 000000000..54820dfcd --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query11.sql @@ -0,0 +1,28 @@ +--#[tpch-h-11] +select + ps_partkey, + sum(ps_supplycost * ps_availqty) as value +from + partsupp, + supplier, + nation +where + ps_suppkey = s_suppkey + and s_nationkey = n_nationkey + and n_name = ':1' +group by + ps_partkey having + sum(ps_supplycost * ps_availqty) > ( + select + sum(ps_supplycost * ps_availqty) * :2 + from + partsupp, + supplier, + nation + where + ps_suppkey = s_suppkey + and s_nationkey = n_nationkey + and n_name = ':1' + ) +order by + value desc; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query12.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query12.sql new file mode 100644 index 000000000..5515ddbc2 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query12.sql @@ -0,0 +1,29 @@ +--#[tpc-h-12] +select + l_shipmode, + sum(case + when o_orderpriority = '1-URGENT' + or o_orderpriority = '2-HIGH' + then 1 + else 0 + end) as high_line_count, + sum(case + when o_orderpriority <> '1-URGENT' + and o_orderpriority <> '2-HIGH' + then 1 + else 0 + end) as low_line_count +from + orders, + lineitem +where + o_orderkey = l_orderkey + and l_shipmode in (':1', ':2') + and l_commitdate < l_receiptdate + and l_shipdate < l_commitdate + and l_receiptdate >= date ':3' + and l_receiptdate < date ':3' + interval '1' year +group by + l_shipmode +order by + l_shipmode; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query13.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query13.sql new file mode 100644 index 000000000..adf10eb53 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query13.sql @@ -0,0 +1,21 @@ +--#[tpc-h-13] +select + c_count, + count(*) as custdist +from + ( + select + c_custkey, + count(o_orderkey) + from + customer left outer join orders on + c_custkey = o_custkey + and o_comment not like '%:1%:2%' + group by + c_custkey + ) as c_orders (c_custkey, c_count) +group by + c_count +order by + custdist desc, + c_count desc; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query14.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query14.sql new file mode 100644 index 000000000..e7b50f537 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query14.sql @@ -0,0 +1,14 @@ +--#[tpc-h-14] +select + 100.00 * sum(case + when p_type like 'PROMO%' + then l_extendedprice * (1 - l_discount) + else 0 + end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue +from + lineitem, + part +where + l_partkey = p_partkey + and l_shipdate >= date ':1' + and l_shipdate < date ':1' + interval '1' month; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query15.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query15.sql new file mode 100644 index 000000000..bafd089f2 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query15.sql @@ -0,0 +1,35 @@ +--#[tpch-h-15] +create view revenue:s (supplier_no, total_revenue) as + select + l_suppkey, + sum(l_extendedprice * (1 - l_discount)) + from + lineitem + where + l_shipdate >= date ':1' + and l_shipdate < date ':1' + interval '3' month + group by + l_suppkey; + +:o +select + s_suppkey, + s_name, + s_address, + s_phone, + total_revenue +from + supplier, + revenue:s +where + s_suppkey = supplier_no + and total_revenue = ( + select + max(total_revenue) + from + revenue:s + ) +order by + s_suppkey; + +drop view revenue:s; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query16.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query16.sql new file mode 100644 index 000000000..75f9ccfbf --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query16.sql @@ -0,0 +1,31 @@ +--#[tpc-h-16] +select + p_brand, + p_type, + p_size, + count(distinct ps_suppkey) as supplier_cnt +from + partsupp, + part +where + p_partkey = ps_partkey + and p_brand <> ':1' + and p_type not like ':2%' + and p_size in (:3, :4, :5, :6, :7, :8, :9, :10) + and ps_suppkey not in ( + select + s_suppkey + from + supplier + where + s_comment like '%Customer%Complaints%' + ) +group by + p_brand, + p_type, + p_size +order by + supplier_cnt desc, + p_brand, + p_type, + p_size; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query17.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query17.sql new file mode 100644 index 000000000..f999a6b3e --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query17.sql @@ -0,0 +1,18 @@ +--#[tpc-h-17] +select + sum(l_extendedprice) / 7.0 as avg_yearly +from + lineitem, + part +where + p_partkey = l_partkey + and p_brand = ':1' + and p_container = ':2' + and l_quantity < ( + select + 0.2 * avg(l_quantity) + from + lineitem + where + l_partkey = p_partkey + ); diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query18.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query18.sql new file mode 100644 index 000000000..3aab619d1 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query18.sql @@ -0,0 +1,33 @@ +--#[tpc-h-18] +select + c_name, + c_custkey, + o_orderkey, + o_orderdate, + o_totalprice, + sum(l_quantity) +from + customer, + orders, + lineitem +where + o_orderkey in ( + select + l_orderkey + from + lineitem + group by + l_orderkey having + sum(l_quantity) > :1 + ) + and c_custkey = o_custkey + and o_orderkey = l_orderkey +group by + c_name, + c_custkey, + o_orderkey, + o_orderdate, + o_totalprice +order by + o_totalprice desc, + o_orderdate; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query19.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query19.sql new file mode 100644 index 000000000..abd8dc051 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query19.sql @@ -0,0 +1,36 @@ +--#[tpc-h-19] +select + sum(l_extendedprice* (1 - l_discount)) as revenue +from + lineitem, + part +where + ( + p_partkey = l_partkey + and p_brand = ':1' + and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') + and l_quantity >= :4 and l_quantity <= :4 + 10 + and p_size between 1 and 5 + and l_shipmode in ('AIR', 'AIR REG') + and l_shipinstruct = 'DELIVER IN PERSON' + ) + or + ( + p_partkey = l_partkey + and p_brand = ':2' + and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') + and l_quantity >= :5 and l_quantity <= :5 + 10 + and p_size between 1 and 10 + and l_shipmode in ('AIR', 'AIR REG') + and l_shipinstruct = 'DELIVER IN PERSON' + ) + or + ( + p_partkey = l_partkey + and p_brand = ':3' + and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') + and l_quantity >= :6 and l_quantity <= :6 + 10 + and p_size between 1 and 15 + and l_shipmode in ('AIR', 'AIR REG') + and l_shipinstruct = 'DELIVER IN PERSON' + ); diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query20.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query20.sql new file mode 100644 index 000000000..3bd75d6cf --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query20.sql @@ -0,0 +1,38 @@ +--#[tpc-h-20] +select + s_name, + s_address +from + supplier, + nation +where + s_suppkey in ( + select + ps_suppkey + from + partsupp + where + ps_partkey in ( + select + p_partkey + from + part + where + p_name like ':1%' + ) + and ps_availqty > ( + select + 0.5 * sum(l_quantity) + from + lineitem + where + l_partkey = ps_partkey + and l_suppkey = ps_suppkey + and l_shipdate >= date ':2' + and l_shipdate < date ':2' + interval '1' year + ) + ) + and s_nationkey = n_nationkey + and n_name = ':3' +order by + s_name; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query21.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query21.sql new file mode 100644 index 000000000..86e9bbcb7 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query21.sql @@ -0,0 +1,40 @@ +--#[tpc-h-21] +select + s_name, + count(*) as numwait +from + supplier, + lineitem l1, + orders, + nation +where + s_suppkey = l1.l_suppkey + and o_orderkey = l1.l_orderkey + and o_orderstatus = 'F' + and l1.l_receiptdate > l1.l_commitdate + and exists ( + select + * + from + lineitem l2 + where + l2.l_orderkey = l1.l_orderkey + and l2.l_suppkey <> l1.l_suppkey + ) + and not exists ( + select + * + from + lineitem l3 + where + l3.l_orderkey = l1.l_orderkey + and l3.l_suppkey <> l1.l_suppkey + and l3.l_receiptdate > l3.l_commitdate + ) + and s_nationkey = n_nationkey + and n_name = ':1' +group by + s_name +order by + numwait desc, + s_name; diff --git a/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query22.sql b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query22.sql new file mode 100644 index 000000000..ed010b2a2 --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/inputs/tpc_h/query22.sql @@ -0,0 +1,38 @@ +--#[tpc-h-22] +select + cntrycode, + count(*) as numcust, + sum(c_acctbal) as totacctbal +from + ( + select + substring(c_phone from 1 for 2) as cntrycode, + c_acctbal + from + customer + where + substring(c_phone from 1 for 2) in + (':1', ':2', ':3', ':4', ':5', ':6', ':7') + and c_acctbal > ( + select + avg(c_acctbal) + from + customer + where + c_acctbal > 0.00 + and substring(c_phone from 1 for 2) in + (':1', ':2', ':3', ':4', ':5', ':6', ':7') + ) + and not exists ( + select + * + from + orders + where + o_custkey = c_custkey + ) + ) as custsale +group by + cntrycode +order by + cntrycode; diff --git a/partiql-planner/src/testFixtures/resources/tests/aggregations.ion b/partiql-planner/src/testFixtures/resources/tests/aggregations.ion new file mode 100644 index 000000000..2badeb8dd --- /dev/null +++ b/partiql-planner/src/testFixtures/resources/tests/aggregations.ion @@ -0,0 +1,286 @@ +suite::{ + name: "aggregations", + session: { + catalog: "default", + path: [ + "pql" + ], + vars: {}, + }, + tests: { + 'avg(int32|null)': { + statement: ''' + SELECT AVG(n) as "avg" FROM numbers.nullable_int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "avg", + type: [ + "int32", + "null", + ], + }, + ], + }, + }, + }, + 'count(int32|null)': { + statement: ''' + SELECT COUNT(n) as "count" FROM numbers.nullable_int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "count", + type: "int32", + }, + ], + }, + }, + }, + 'min(int32|null)': { + statement: ''' + SELECT MIN(n) as "min" FROM numbers.nullable_int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "min", + type: [ + "int32", + "null", + ], + }, + ], + }, + }, + }, + 'max(int32|null)': { + statement: ''' + SELECT MAX(n) as "max" FROM numbers.nullable_int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "max", + type: [ + "int32", + "null", + ], + }, + ], + }, + }, + }, + 'sum(int32|null)': { + statement: ''' + SELECT SUM(n) as "sum" FROM numbers.nullable_int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "sum", + type: [ + "int32", + "null", + ], + }, + ], + }, + }, + }, + 'avg(int32)': { + statement: ''' + SELECT AVG(n) as "avg" FROM numbers.int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "avg", + type: [ + "int32", + "null" + ], + }, + ], + }, + }, + }, + 'count(int32)': { + statement: ''' + SELECT COUNT(n) as "count" FROM numbers.int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "count", + type: "int32", + }, + ], + }, + }, + }, + 'min(int32)': { + statement: ''' + SELECT MIN(n) as "min" FROM numbers.int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "min", + type: [ + "int32", + "null" + ], + }, + ], + }, + }, + }, + 'max(int32)': { + statement: ''' + SELECT MAX(n) as "max" FROM numbers.int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "max", + type: [ + "int32", + "null" + ], + }, + ], + }, + }, + }, + 'sum(int32)': { + statement: ''' + SELECT SUM(n) as "sum" FROM numbers.int32s AS n + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "sum", + type: [ + "int32", + "null" + ], + }, + ], + }, + }, + }, + 'group_by_key': { + statement: ''' + SELECT COUNT(*) as "count", isOdd FROM numbers.int32s AS n + GROUP BY n % 2 = 0 AS isOdd + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "count", + type: "int32" + }, + { + name: "isOdd", + type: "bool" + }, + ], + }, + }, + }, + 'group_by_keys_noalias': { + statement: ''' + SELECT AVG(x), y, z FROM points + GROUP BY y, z + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "_1", + type: [ + "float32", + "null" + ], + }, + { + name: "y", + type: "float32" + }, + { + name: "z", + type: "float32" + }, + ], + }, + }, + }, + 'group_by_keys_alias': { + statement: ''' + SELECT AVG(x), a, b FROM points + GROUP BY y as a, z as b + ''', + schema: { + type: "bag", + items: { + type: "struct", + fields: [ + { + name: "_1", + type: [ + "float32", + "null" + ], + }, + { + name: "a", + type: "float32" + }, + { + name: "b", + type: "float32" + }, + ], + }, + }, + }, + }, +} diff --git a/partiql-types/src/main/kotlin/org/partiql/errors/Problem.kt b/partiql-types/src/main/kotlin/org/partiql/errors/Problem.kt index 4b1fdb3bb..a85a77a39 100644 --- a/partiql-types/src/main/kotlin/org/partiql/errors/Problem.kt +++ b/partiql-types/src/main/kotlin/org/partiql/errors/Problem.kt @@ -1,5 +1,10 @@ package org.partiql.errors +/** + * Typeof `ProblemHandler.handleProblem` + */ +public typealias ProblemCallback = (Problem) -> Unit + /** * In general, a [Problem] is a semantic error or warning encountered during compilation of a query. * diff --git a/partiql-types/src/main/kotlin/org/partiql/types/StaticType.kt b/partiql-types/src/main/kotlin/org/partiql/types/StaticType.kt index 525c90fcb..d2ef1756f 100644 --- a/partiql-types/src/main/kotlin/org/partiql/types/StaticType.kt +++ b/partiql-types/src/main/kotlin/org/partiql/types/StaticType.kt @@ -55,6 +55,7 @@ public sealed class StaticType { @JvmField public val SYMBOL: SymbolType = SymbolType() @JvmField public val STRING: StringType = StringType() @JvmField public val TEXT: StaticType = unionOf(SYMBOL, STRING) + @JvmField public val CHAR: StaticType = StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(1))) @JvmField public val CLOB: ClobType = ClobType() @JvmField public val BLOB: BlobType = BlobType() @JvmField public val LIST: ListType = ListType() @@ -154,6 +155,18 @@ public sealed class StaticType { else -> false } + /** + * Type is missable if it is MISSING or is an AnyOfType that contains a MISSING type + * + * @return + */ + public fun isMissable(): Boolean = + when (this) { + is AnyOfType -> types.any { it.isMissable() } + is AnyType, is MissingType -> true + else -> false + } + /** * Type is optional if it is Any, or Missing, or an AnyOfType that contains Any or Missing type */ @@ -679,7 +692,10 @@ public sealed class TupleConstraint { * The presence of the [Ordered] on a [StructType] represents that the [StructType] is ORDERED. The absence of * this constrain represents the opposite -- AKA that the [StructType] is UNORDERED */ - public object Ordered : TupleConstraint() + public object Ordered : TupleConstraint() { + + override fun toString(): String = "Ordered" + } } /** diff --git a/partiql-types/src/main/kotlin/org/partiql/types/function/FunctionSignature.kt b/partiql-types/src/main/kotlin/org/partiql/types/function/FunctionSignature.kt index bc67852ce..e524316e0 100644 --- a/partiql-types/src/main/kotlin/org/partiql/types/function/FunctionSignature.kt +++ b/partiql-types/src/main/kotlin/org/partiql/types/function/FunctionSignature.kt @@ -4,7 +4,6 @@ import org.partiql.value.PartiQLValueExperimental import org.partiql.value.PartiQLValueType /** - * Represents the signature of a PartiQL function. * * The signature includes the names of the function (which allows for function overloading), * the return type, a list of parameters, a flag indicating whether the function is deterministic @@ -13,28 +12,22 @@ import org.partiql.value.PartiQLValueType * @property name Function name * @property returns Operator return type * @property parameters Operator parameters - * @property isDeterministic Flag indicating this function always produces the same output given the same input. - * @property isNullCall Flag indicating if any of the call arguments is NULL, then return NULL. - * @property isNullable Flag indicating this function's operator may return a NULL value. * @property description Optional operator description + * @property isNullable Flag indicating this function's operator may return a NULL value. */ @OptIn(PartiQLValueExperimental::class) -public class FunctionSignature( - public val name: String, - public val returns: PartiQLValueType, - public val parameters: List, - public val isDeterministic: Boolean = true, - public val isNullCall: Boolean = false, - public val isNullable: Boolean = true, - public val description: String? = null, +public sealed class FunctionSignature( + @JvmField public val name: String, + @JvmField public val returns: PartiQLValueType, + @JvmField public val parameters: List, + @JvmField public val description: String? = null, + @JvmField public val isNullable: Boolean = true, ) { /** - * String mangling of a function signature to generate a specific identifier. - * - * Format NAME__INPUTS__RETURNS + * Symbolic name of this operator of the form NAME__INPUTS__RETURNS */ - private val specific = buildString { + public val specific: String = buildString { append(name.uppercase()) append("__") append(parameters.joinToString("_") { it.type.name }) @@ -43,72 +36,145 @@ public class FunctionSignature( } /** - * SQL-99 p.542 + * Use the symbolic name for easy debugging + * + * @return */ - private val deterministicCharacteristic = when (isDeterministic) { - true -> "DETERMINISTIC" - else -> "NOT DETERMINISTIC" - } + override fun toString(): String = specific /** - * SQL-99 p.543 + * Represents the signature of a PartiQL scalar function. + * + * @property isDeterministic Flag indicating this function always produces the same output given the same input. + * @property isNullCall Flag indicating if any of the call arguments is NULL, then return NULL. + * @constructor */ - private val nullCallClause = when (isNullCall) { - true -> "RETURNS NULL ON NULL INPUT" - else -> "CALLED ON NULL INPUT" - } + public class Scalar( + name: String, + returns: PartiQLValueType, + parameters: List, + description: String? = null, + isNullable: Boolean = true, + @JvmField public val isDeterministic: Boolean = true, + @JvmField public val isNullCall: Boolean = false, + ) : FunctionSignature(name, returns, parameters, description, isNullable) { - override fun toString(): String = buildString { - val fn = name.uppercase() - val indent = " " - append("CREATE FUNCTION \"$fn\" (") - if (parameters.isNotEmpty()) { - val extent = parameters.maxOf { it.name.length } + override fun equals(other: Any?): Boolean { + if (other !is Scalar) return false + if ( + other.name != name || + other.returns != returns || + other.parameters.size != parameters.size || + other.isDeterministic != isDeterministic || + other.isNullCall != isNullCall || + other.isNullable != isNullable + ) { + return false + } + // all other parts equal, compare parameters (ignore names) for (i in parameters.indices) { - val p = parameters[i] - val ws = (extent - p.name.length) + 1 - appendLine() - append(indent).append(p.name.uppercase()).append(" ".repeat(ws)).append(p.type.name) - if (i != parameters.size - 1) append(",") + val p1 = parameters[i] + val p2 = other.parameters[i] + if (p1.type != p2.type) return false } + return true + } + + override fun hashCode(): Int { + var result = name.hashCode() + result = 31 * result + returns.hashCode() + result = 31 * result + parameters.hashCode() + result = 31 * result + isDeterministic.hashCode() + result = 31 * result + isNullCall.hashCode() + result = 31 * result + isNullable.hashCode() + result = 31 * result + (description?.hashCode() ?: 0) + return result + } + + // Logic for writing a [FunctionSignature] using SQL `CREATE FUNCTION` syntax. + + /** + * SQL-99 p.542 + */ + private val deterministicCharacteristic = when (isDeterministic) { + true -> "DETERMINISTIC" + else -> "NOT DETERMINISTIC" } - appendLine(" )") - append(indent).appendLine("RETURNS $returns") - append(indent).appendLine("SPECIFIC $specific") - append(indent).appendLine(deterministicCharacteristic) - append(indent).appendLine(nullCallClause) - append(indent).appendLine("RETURN $fn ( ${parameters.joinToString { it.name.uppercase() }} ) ;") - } - override fun equals(other: Any?): Boolean { - if (other !is FunctionSignature) return false - if ( - other.name != name || - other.returns != returns || - other.isDeterministic != isDeterministic || - other.isNullCall != isNullCall || - other.isNullable != isNullable || - other.parameters.size != parameters.size - ) { - return false + /** + * SQL-99 p.543 + */ + private val nullCallClause = when (isNullCall) { + true -> "RETURNS NULL ON NULL INPUT" + else -> "CALLED ON NULL INPUT" } - // all other parts equal, compare parameters (ignore names) - for (i in parameters.indices) { - val p1 = parameters[i] - val p2 = other.parameters[i] - if (p1.type != p2.type) return false + + public fun sql(): String = buildString { + val fn = name.uppercase() + val indent = " " + append("CREATE FUNCTION \"$fn\" (") + if (parameters.isNotEmpty()) { + val extent = parameters.maxOf { it.name.length } + for (i in parameters.indices) { + val p = parameters[i] + val ws = (extent - p.name.length) + 1 + appendLine() + append(indent).append(p.name.uppercase()).append(" ".repeat(ws)).append(p.type.name) + if (i != parameters.size - 1) append(",") + } + } + appendLine(" )") + append(indent).appendLine("RETURNS $returns") + append(indent).appendLine("SPECIFIC $specific") + append(indent).appendLine(deterministicCharacteristic) + append(indent).appendLine(nullCallClause) + append(indent).appendLine("RETURN $fn ( ${parameters.joinToString { it.name.uppercase() }} ) ;") } - return true } - override fun hashCode(): Int { - var result = name.hashCode() - result = 31 * result + returns.hashCode() - result = 31 * result + parameters.hashCode() - result = 31 * result + isDeterministic.hashCode() - result = 31 * result + isNullCall.hashCode() - result = 31 * result + isNullable.hashCode() - result = 31 * result + (description?.hashCode() ?: 0) - return result + /** + * Represents the signature of a PartiQL aggregation function. + * + * @property isDecomposable Flag indicating this aggregation can be decomposed + * @constructor + */ + public class Aggregation( + name: String, + returns: PartiQLValueType, + parameters: List, + description: String? = null, + isNullable: Boolean = true, + @JvmField public val isDecomposable: Boolean = true, + ) : FunctionSignature(name, returns, parameters, description, isNullable) { + + override fun equals(other: Any?): Boolean { + if (other !is Aggregation) return false + if ( + other.name != name || + other.returns != returns || + other.parameters.size != parameters.size || + other.isDecomposable != isDecomposable || + other.isNullable != isNullable + ) { + return false + } + // all other parts equal, compare parameters (ignore names) + for (i in parameters.indices) { + val p1 = parameters[i] + val p2 = other.parameters[i] + if (p1.type != p2.type) return false + } + return true + } + + override fun hashCode(): Int { + var result = name.hashCode() + result = 31 * result + returns.hashCode() + result = 31 * result + parameters.hashCode() + result = 31 * result + isDecomposable.hashCode() + result = 31 * result + isNullable.hashCode() + result = 31 * result + (description?.hashCode() ?: 0) + return result + } } } diff --git a/partiql-types/src/main/kotlin/org/partiql/value/PartiQLValue.kt b/partiql-types/src/main/kotlin/org/partiql/value/PartiQLValue.kt index b4f360eec..2d9ab21d5 100644 --- a/partiql-types/src/main/kotlin/org/partiql/value/PartiQLValue.kt +++ b/partiql-types/src/main/kotlin/org/partiql/value/PartiQLValue.kt @@ -178,7 +178,7 @@ public abstract class IntValue : NumericValue() { @PartiQLValueExperimental public abstract class DecimalValue : NumericValue() { - override val type: PartiQLValueType = PartiQLValueType.DECIMAL + override val type: PartiQLValueType = PartiQLValueType.DECIMAL_ARBITRARY abstract override fun copy(annotations: Annotations): DecimalValue diff --git a/partiql-types/src/main/kotlin/org/partiql/value/PartiQLValueType.kt b/partiql-types/src/main/kotlin/org/partiql/value/PartiQLValueType.kt index 93fda483d..118e1e380 100644 --- a/partiql-types/src/main/kotlin/org/partiql/value/PartiQLValueType.kt +++ b/partiql-types/src/main/kotlin/org/partiql/value/PartiQLValueType.kt @@ -11,7 +11,6 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ - package org.partiql.value /** @@ -26,7 +25,9 @@ public enum class PartiQLValueType { INT32, INT64, INT, - DECIMAL, + // For now, just distinguish between fixed precision and arbitrary precision + DECIMAL, // TODO: Simple enum class does not have the power to express the parameterized type + DECIMAL_ARBITRARY, // arbitrary precision decimal FLOAT32, FLOAT64, CHAR, diff --git a/partiql-types/src/main/kotlin/org/partiql/value/io/PartiQLValueIonReader.kt b/partiql-types/src/main/kotlin/org/partiql/value/io/PartiQLValueIonReader.kt index 365f466bc..5fcbf5fd1 100644 --- a/partiql-types/src/main/kotlin/org/partiql/value/io/PartiQLValueIonReader.kt +++ b/partiql-types/src/main/kotlin/org/partiql/value/io/PartiQLValueIonReader.kt @@ -465,7 +465,7 @@ internal class PartiQLValueIonReader( val map = mutableMapOf() checkRequiredFieldNameAndPut(reader, map, "hour", PartiQLValueType.INT) checkRequiredFieldNameAndPut(reader, map, "minute", PartiQLValueType.INT) - checkRequiredFieldNameAndPut(reader, map, "second", PartiQLValueType.DECIMAL) + checkRequiredFieldNameAndPut(reader, map, "second", PartiQLValueType.DECIMAL_ARBITRARY) checkOptionalFieldNameAndPut(reader, map, "offset", PartiQLValueType.INT) // check remaining if (reader.next() != null) { @@ -500,7 +500,7 @@ internal class PartiQLValueIonReader( checkRequiredFieldNameAndPut(reader, map, "day", PartiQLValueType.INT) checkRequiredFieldNameAndPut(reader, map, "hour", PartiQLValueType.INT) checkRequiredFieldNameAndPut(reader, map, "minute", PartiQLValueType.INT) - checkRequiredFieldNameAndPut(reader, map, "second", PartiQLValueType.DECIMAL) + checkRequiredFieldNameAndPut(reader, map, "second", PartiQLValueType.DECIMAL_ARBITRARY) // check remaining if (reader.next() != null) { throw IllegalArgumentException("excess field in struct") @@ -572,7 +572,7 @@ internal class PartiQLValueIonReader( val v = fromIon(reader) when (expectedType) { PartiQLValueType.INT -> destination[k] = (v as IntValue).value?.intValueExact() - PartiQLValueType.DECIMAL -> destination[k] = (v as DecimalValue).value + PartiQLValueType.DECIMAL_ARBITRARY -> destination[k] = (v as DecimalValue).value else -> throw IllegalArgumentException("$expectedField should be either INT OR DECIMAL") } } else { diff --git a/partiql-types/src/main/kotlin/org/partiql/value/io/PartiQLValueIonWriter.kt b/partiql-types/src/main/kotlin/org/partiql/value/io/PartiQLValueIonWriter.kt index 8bacdc328..b30900c4c 100644 --- a/partiql-types/src/main/kotlin/org/partiql/value/io/PartiQLValueIonWriter.kt +++ b/partiql-types/src/main/kotlin/org/partiql/value/io/PartiQLValueIonWriter.kt @@ -10,7 +10,7 @@ import org.partiql.value.toIon import java.io.OutputStream @OptIn(PartiQLValueExperimental::class) -internal class PartiQLValueIonWriter( +public class PartiQLValueIonWriter internal constructor( private val ionWriter: IonWriter, ) : PartiQLValueWriter { diff --git a/plugins/partiql-local/build.gradle.kts b/plugins/partiql-local/build.gradle.kts index 823e674e4..ceef4f3a4 100644 --- a/plugins/partiql-local/build.gradle.kts +++ b/plugins/partiql-local/build.gradle.kts @@ -1,3 +1,5 @@ +import org.gradle.kotlin.dsl.distribution + /* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * @@ -15,10 +17,10 @@ plugins { id(Plugins.conventions) + distribution } dependencies { implementation(project(":partiql-spi")) implementation(project(":partiql-types")) - implementation(Deps.gson) } diff --git a/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/LocalCatalog.kt b/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/LocalCatalog.kt index 0200a7e30..b77f892f1 100644 --- a/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/LocalCatalog.kt +++ b/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/LocalCatalog.kt @@ -1,6 +1,8 @@ package org.partiql.plugins.local import com.amazon.ionelement.api.loadSingleElement +import org.partiql.spi.BindingCase +import org.partiql.spi.BindingName import org.partiql.spi.BindingPath import org.partiql.types.StaticType import java.io.File @@ -43,6 +45,29 @@ public class LocalCatalog private constructor(private val root: FsTree.D) { return null } + /** + * Provide a list of all objects in this catalog. + */ + public fun listObjects(): List = sequence { search(emptyList(), root) }.toList() + + private suspend fun SequenceScope.search(acc: List, node: FsTree) = + when (node) { + is FsTree.D -> search(acc, node) + is FsTree.T -> search(acc, node) + } + + private suspend fun SequenceScope.search(acc: List, node: FsTree.D) { + val steps = acc + BindingName(node.name, BindingCase.INSENSITIVE) + for (child in node.children) { + search(steps, child) + } + } + + private suspend fun SequenceScope.search(acc: List, node: FsTree.T) { + val steps = acc + BindingName(node.name, BindingCase.INSENSITIVE) + this.yield(BindingPath(steps)) + } + companion object { /** diff --git a/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/LocalConnector.kt b/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/LocalConnector.kt index 35a819f4a..649520f30 100644 --- a/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/LocalConnector.kt +++ b/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/LocalConnector.kt @@ -55,6 +55,9 @@ class LocalConnector( private val metadata = Metadata(catalogRoot) + // not yet defined in SPI + public fun listObjects(): List = metadata.listObjects() + override fun getMetadata(session: ConnectorSession): ConnectorMetadata = metadata class Factory : Connector.Factory { @@ -97,5 +100,7 @@ class LocalConnector( value = value, ) } + + internal fun listObjects(): List = catalog.listObjects() } } diff --git a/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/functions/Pow.kt b/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/functions/Pow.kt index 3ef0482ae..d27b60c27 100644 --- a/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/functions/Pow.kt +++ b/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/functions/Pow.kt @@ -15,15 +15,15 @@ import org.partiql.value.float64Value object Pow : PartiQLFunction { @OptIn(PartiQLValueExperimental::class) - override val signature = FunctionSignature( + override val signature = FunctionSignature.Scalar( name = "test_power", returns = PartiQLValueType.FLOAT64, parameters = listOf( FunctionParameter(name = "base", type = PartiQLValueType.INT8), FunctionParameter(name = "exponent", type = PartiQLValueType.INT8) ), - isDeterministic = true, - description = "Power [base] with [exponent]" + description = "Power [base] with [exponent]", + isDeterministic = true ) @OptIn(PartiQLValueExperimental::class) diff --git a/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/functions/TrimLead.kt b/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/functions/TrimLead.kt index cf5a3912d..9037ca6c0 100644 --- a/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/functions/TrimLead.kt +++ b/plugins/partiql-local/src/main/kotlin/org/partiql/plugins/local/functions/TrimLead.kt @@ -15,14 +15,14 @@ import org.partiql.value.stringValue object TrimLead : PartiQLFunction { @OptIn(PartiQLValueExperimental::class) - override val signature = FunctionSignature( + override val signature = FunctionSignature.Scalar( name = "trim_lead", returns = PartiQLValueType.STRING, parameters = listOf( FunctionParameter(name = "str", type = PartiQLValueType.STRING) ), - isDeterministic = true, - description = "Trims leading whitespace of a [str]." + description = "Trims leading whitespace of a [str].", + isDeterministic = true ) @OptIn(PartiQLValueExperimental::class) diff --git a/plugins/partiql-memory/README.md b/plugins/partiql-memory/README.md new file mode 100644 index 000000000..e26a74f27 --- /dev/null +++ b/plugins/partiql-memory/README.md @@ -0,0 +1,36 @@ +# PartiQL In-Memory Plugin + +This is a PartiQL plugin for in-memory DB. The primary purpose of this plugin is for testing. + +## Provider + +The plugin is backed by a catalog provider. This enables use to easily modify a catalog for testing. + +```kotlin +val provider = MemoryCatalog.Provider() +provider[catalogName] = MemoryCatalog.of( + t1 to StaticType.INT2, + ... +) +``` + +## Catalog path + +The in-memory connector can handle arbitrary depth catalog path: + +```kotlin +val provider = MemoryCatalog.Provider() +provider[catalogName] = MemoryCatalog.of( + "schema.tbl" to StaticType.INT2, +) +``` + +The full path is `catalogName.schema.tbl` + +The lookup logic is identical to localPlugin. + +``` +|_ catalogName + |_ schema + |_ tbl.ion +``` \ No newline at end of file diff --git a/plugins/partiql-memory/build.gradle.kts b/plugins/partiql-memory/build.gradle.kts new file mode 100644 index 000000000..ceef4f3a4 --- /dev/null +++ b/plugins/partiql-memory/build.gradle.kts @@ -0,0 +1,26 @@ +import org.gradle.kotlin.dsl.distribution + +/* + * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +plugins { + id(Plugins.conventions) + distribution +} + +dependencies { + implementation(project(":partiql-spi")) + implementation(project(":partiql-types")) +} diff --git a/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryCatalog.kt b/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryCatalog.kt new file mode 100644 index 000000000..cc6a57842 --- /dev/null +++ b/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryCatalog.kt @@ -0,0 +1,76 @@ +package org.partiql.plugins.memory + +import org.partiql.spi.BindingCase +import org.partiql.spi.BindingPath +import org.partiql.spi.connector.ConnectorObjectPath +import org.partiql.types.StaticType + +class MemoryCatalog( + private val map: Map +) { + operator fun get(key: String): StaticType? = map[key] + + public fun lookup(path: BindingPath): MemoryObject? { + val kPath = ConnectorObjectPath( + path.steps.map { + when (it.bindingCase) { + BindingCase.SENSITIVE -> it.name + BindingCase.INSENSITIVE -> it.loweredName + } + } + ) + val k = kPath.steps.joinToString(".") + if (this[k] != null) { + return this[k]?.let { MemoryObject(kPath.steps, it) } + } else { + val candidatePath = this.map.keys.map { it.split(".") } + val kPathIter = kPath.steps.listIterator() + while (kPathIter.hasNext()) { + val currKPath = kPathIter.next() + candidatePath.forEach { + val match = mutableListOf() + val candidateIterator = it.iterator() + while (candidateIterator.hasNext()) { + if (candidateIterator.next() == currKPath) { + match.add(currKPath) + val pathIteratorCopy = kPath.steps.listIterator(kPathIter.nextIndex()) + candidateIterator.forEachRemaining { + val nextPath = pathIteratorCopy.next() + if (it != nextPath) { + match.clear() + return@forEachRemaining + } + match.add(it) + } + } else { + return@forEach + } + } + if (match.isNotEmpty()) { + return this[match.joinToString(".")]?.let { it1 -> + MemoryObject( + match, + it1 + ) + } + } + } + } + return null + } + } + + companion object { + fun of(vararg entities: Pair) = MemoryCatalog(mapOf(*entities)) + } + + class Provider { + private val catalogs = mutableMapOf() + + operator fun get(path: String): MemoryCatalog = catalogs[path] ?: error("invalid catalog path") + + operator fun set(path: String, catalog: MemoryCatalog) { + catalogs[path] = catalog + } + } +} diff --git a/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryConnector.kt b/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryConnector.kt new file mode 100644 index 000000000..9fce2172e --- /dev/null +++ b/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryConnector.kt @@ -0,0 +1,46 @@ +package org.partiql.plugins.memory + +import com.amazon.ionelement.api.StructElement +import org.partiql.spi.BindingPath +import org.partiql.spi.connector.Connector +import org.partiql.spi.connector.ConnectorMetadata +import org.partiql.spi.connector.ConnectorObjectHandle +import org.partiql.spi.connector.ConnectorObjectPath +import org.partiql.spi.connector.ConnectorSession +import org.partiql.types.StaticType + +class MemoryConnector( + val catalog: MemoryCatalog +) : Connector { + + companion object { + const val CONNECTOR_NAME = "memory" + } + + override fun getMetadata(session: ConnectorSession): ConnectorMetadata = Metadata() + + class Factory(private val provider: MemoryCatalog.Provider) : Connector.Factory { + override fun getName(): String = CONNECTOR_NAME + + override fun create(catalogName: String, config: StructElement): Connector { + val catalog = provider[catalogName] + return MemoryConnector(catalog) + } + } + + inner class Metadata : ConnectorMetadata { + + override fun getObjectType(session: ConnectorSession, handle: ConnectorObjectHandle): StaticType? { + val obj = handle.value as MemoryObject + return obj.type + } + + override fun getObjectHandle(session: ConnectorSession, path: BindingPath): ConnectorObjectHandle? { + val value = catalog.lookup(path) ?: return null + return ConnectorObjectHandle( + absolutePath = ConnectorObjectPath(value.path), + value = value, + ) + } + } +} diff --git a/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryObject.kt b/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryObject.kt new file mode 100644 index 000000000..cbd0fbea6 --- /dev/null +++ b/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryObject.kt @@ -0,0 +1,9 @@ +package org.partiql.plugins.memory + +import org.partiql.spi.connector.ConnectorObject +import org.partiql.types.StaticType + +class MemoryObject( + val path: List, + val type: StaticType +) : ConnectorObject diff --git a/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryPlugin.kt b/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryPlugin.kt new file mode 100644 index 000000000..808f8e72a --- /dev/null +++ b/plugins/partiql-memory/src/main/kotlin/org/partiql/plugins/memory/MemoryPlugin.kt @@ -0,0 +1,13 @@ +package org.partiql.plugins.memory + +import org.partiql.spi.Plugin +import org.partiql.spi.connector.Connector +import org.partiql.spi.function.PartiQLFunction +import org.partiql.spi.function.PartiQLFunctionExperimental + +class MemoryPlugin(val provider: MemoryCatalog.Provider) : Plugin { + override fun getConnectorFactories(): List = listOf(MemoryConnector.Factory(provider)) + + @PartiQLFunctionExperimental + override fun getFunctions(): List = emptyList() +} diff --git a/plugins/partiql-memory/src/test/kotlin/org/partiql/plugins/memory/InMemoryPluginTest.kt b/plugins/partiql-memory/src/test/kotlin/org/partiql/plugins/memory/InMemoryPluginTest.kt new file mode 100644 index 000000000..d2fab9eae --- /dev/null +++ b/plugins/partiql-memory/src/test/kotlin/org/partiql/plugins/memory/InMemoryPluginTest.kt @@ -0,0 +1,159 @@ +package org.partiql.plugins.memory + +import org.junit.jupiter.api.Test +import org.partiql.spi.BindingCase +import org.partiql.spi.BindingName +import org.partiql.spi.BindingPath +import org.partiql.spi.connector.ConnectorObjectPath +import org.partiql.spi.connector.ConnectorSession +import org.partiql.types.BagType +import org.partiql.types.StaticType +import org.partiql.types.StructType + +class InMemoryPluginTest { + + private val session = object : ConnectorSession { + override fun getQueryId(): String = "mock_query_id" + override fun getUserId(): String = "mock_user" + } + + companion object { + val provider = MemoryCatalog.Provider().also { + it["test"] = MemoryCatalog.of( + "a" to StaticType.INT2, + "struct" to StructType( + fields = listOf(StructType.Field("a", StaticType.INT2)) + ), + "schema.tbl" to BagType( + StructType( + fields = listOf(StructType.Field("a", StaticType.INT2)) + ) + ) + ) + } + } + + @Test + fun getValue() { + val requested = BindingPath( + listOf( + BindingName("a", BindingCase.INSENSITIVE) + ) + ) + val expected = StaticType.INT2 + + val connector = MemoryConnector(provider["test"]) + + val metadata = connector.Metadata() + + val handle = metadata.getObjectHandle(session, requested) + + val descriptor = metadata.getObjectType(session, handle!!) + + assert(requested.isEquivalentTo(handle.absolutePath)) + assert(expected == descriptor) + } + + @Test + fun getCaseSensitiveValueShouldFail() { + val requested = BindingPath( + listOf( + BindingName("A", BindingCase.SENSITIVE) + ) + ) + + val connector = MemoryConnector(provider["test"]) + + val metadata = connector.Metadata() + + val handle = metadata.getObjectHandle(session, requested) + + assert(null == handle) + } + + @Test + fun accessStruct() { + val requested = BindingPath( + listOf( + BindingName("struct", BindingCase.INSENSITIVE), + BindingName("a", BindingCase.INSENSITIVE) + ) + ) + + val connector = MemoryConnector(provider["test"]) + + val metadata = connector.Metadata() + + val handle = metadata.getObjectHandle(session, requested) + + val descriptor = metadata.getObjectType(session, handle!!) + + val expectConnectorPath = ConnectorObjectPath(listOf("struct")) + + val expectedObjectType = StructType(fields = listOf(StructType.Field("a", StaticType.INT2))) + + assert(expectConnectorPath == handle.absolutePath) + assert(expectedObjectType == descriptor) + } + + @Test + fun pathNavigationSuccess() { + val requested = BindingPath( + listOf( + BindingName("schema", BindingCase.INSENSITIVE), + BindingName("tbl", BindingCase.INSENSITIVE) + ) + ) + + val connector = MemoryConnector(provider["test"]) + + val metadata = connector.Metadata() + + val handle = metadata.getObjectHandle(session, requested) + + val descriptor = metadata.getObjectType(session, handle!!) + + val expectedObjectType = BagType(StructType(fields = listOf(StructType.Field("a", StaticType.INT2)))) + + assert(requested.isEquivalentTo(handle.absolutePath)) + assert(expectedObjectType == descriptor) + } + + @Test + fun pathNavigationSuccess2() { + val requested = BindingPath( + listOf( + BindingName("schema", BindingCase.INSENSITIVE), + BindingName("tbl", BindingCase.INSENSITIVE), + BindingName("a", BindingCase.INSENSITIVE) + ) + ) + + val connector = MemoryConnector(provider["test"]) + + val metadata = connector.Metadata() + + val handle = metadata.getObjectHandle(session, requested) + + val descriptor = metadata.getObjectType(session, handle!!) + + val expectedObjectType = BagType(StructType(fields = listOf(StructType.Field("a", StaticType.INT2)))) + + val expectConnectorPath = ConnectorObjectPath(listOf("schema", "tbl")) + + assert(expectConnectorPath == handle.absolutePath) + assert(expectedObjectType == descriptor) + } + + private fun BindingPath.isEquivalentTo(other: ConnectorObjectPath): Boolean { + if (this.steps.size != other.steps.size) { + return false + } + this.steps.forEachIndexed { index, step -> + if (step.isEquivalentTo(other.steps[index]).not()) { + return false + } + } + return true + } +} diff --git a/settings.gradle.kts b/settings.gradle.kts index 2564e3cf2..0876b5b07 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -21,9 +21,11 @@ include( "partiql-lang", "partiql-parser", "partiql-plan", + "partiql-planner", "partiql-spi", "partiql-types", "plugins:partiql-local", + "plugins:partiql-memory", "lib:isl", "lib:sprout", "test:coverage-tests",