From 21695a1dfb33d5cb8bb195397d769222da5a6345 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Wed, 10 Aug 2022 14:11:44 +0200 Subject: [PATCH 01/15] Rename AST nodes to avoid conflicting class names --- .../io/trino/sql/analyzer/StatementAnalyzer.java | 6 +++--- .../src/main/java/io/trino/sql/SqlFormatter.java | 8 ++++---- .../main/java/io/trino/sql/parser/AstBuilder.java | 8 ++++---- .../main/java/io/trino/sql/tree/AstVisitor.java | 4 ++-- .../io/trino/sql/tree/TableFunctionArgument.java | 2 +- ...t.java => TableFunctionDescriptorArgument.java} | 14 +++++++------- ...gument.java => TableFunctionTableArgument.java} | 8 ++++---- .../java/io/trino/sql/parser/TestSqlParser.java | 14 +++++++------- 8 files changed, 32 insertions(+), 32 deletions(-) rename core/trino-parser/src/main/java/io/trino/sql/tree/{DescriptorArgument.java => TableFunctionDescriptorArgument.java} (76%) rename core/trino-parser/src/main/java/io/trino/sql/tree/{TableArgument.java => TableFunctionTableArgument.java} (93%) diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index 874f10fc09b2..52dd8a99996b 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -136,7 +136,6 @@ import io.trino.sql.tree.Delete; import io.trino.sql.tree.Deny; import io.trino.sql.tree.DereferenceExpression; -import io.trino.sql.tree.DescriptorArgument; import io.trino.sql.tree.DropColumn; import io.trino.sql.tree.DropMaterializedView; import io.trino.sql.tree.DropSchema; @@ -220,6 +219,7 @@ import io.trino.sql.tree.Table; import io.trino.sql.tree.TableExecute; import io.trino.sql.tree.TableFunctionArgument; +import io.trino.sql.tree.TableFunctionDescriptorArgument; import io.trino.sql.tree.TableFunctionInvocation; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.TruncateTable; @@ -1650,7 +1650,7 @@ private Argument analyzeArgument(ArgumentSpecification argumentSpecification, Ta if (argument.getValue() instanceof Relation) { actualType = "table"; } - else if (argument.getValue() instanceof DescriptorArgument) { + else if (argument.getValue() instanceof TableFunctionDescriptorArgument) { actualType = "descriptor"; } else if (argument.getValue() instanceof Expression) { @@ -1677,7 +1677,7 @@ else if (argument.getValue() instanceof Expression) { throw semanticException(NOT_SUPPORTED, argument, "Table arguments are not yet supported for table functions"); } if (argumentSpecification instanceof DescriptorArgumentSpecification) { - if (!(argument.getValue() instanceof DescriptorArgument)) { + if (!(argument.getValue() instanceof TableFunctionDescriptorArgument)) { if (argument.getValue() instanceof FunctionCall && ((FunctionCall) argument.getValue()).getName().hasSuffix(QualifiedName.of("descriptor"))) { // function name is always compared case-insensitive // malformed descriptor which parsed as a function call throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid descriptor argument %s. Descriptors should be formatted as 'DESCRIPTOR(name [type], ...)'", argumentSpecification.getName()); diff --git a/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java b/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java index 06f66c949a67..2945d88a84d2 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java +++ b/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java @@ -36,7 +36,6 @@ import io.trino.sql.tree.Deny; import io.trino.sql.tree.DescribeInput; import io.trino.sql.tree.DescribeOutput; -import io.trino.sql.tree.DescriptorArgument; import io.trino.sql.tree.DropColumn; import io.trino.sql.tree.DropMaterializedView; import io.trino.sql.tree.DropRole; @@ -121,10 +120,11 @@ import io.trino.sql.tree.SingleColumn; import io.trino.sql.tree.StartTransaction; import io.trino.sql.tree.Table; -import io.trino.sql.tree.TableArgument; import io.trino.sql.tree.TableExecute; import io.trino.sql.tree.TableFunctionArgument; +import io.trino.sql.tree.TableFunctionDescriptorArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.TransactionAccessMode; import io.trino.sql.tree.TransactionMode; @@ -282,7 +282,7 @@ private void appendTableFunctionArguments(List arguments, } @Override - protected Void visitTableArgument(TableArgument node, Integer indent) + protected Void visitTableArgument(TableFunctionTableArgument node, Integer indent) { Relation relation = node.getTable(); Relation unaliased = relation instanceof AliasedRelation ? ((AliasedRelation) relation).getRelation() : relation; @@ -319,7 +319,7 @@ protected Void visitTableArgument(TableArgument node, Integer indent) } @Override - protected Void visitDescriptorArgument(DescriptorArgument node, Integer indent) + protected Void visitDescriptorArgument(TableFunctionDescriptorArgument node, Integer indent) { if (node.getDescriptor().isPresent()) { builder.append(node.getDescriptor().get().getFields().stream() diff --git a/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java b/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java index 0a6e47fbdffb..dec6608e3aa3 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java +++ b/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java @@ -220,11 +220,11 @@ import io.trino.sql.tree.SubscriptExpression; import io.trino.sql.tree.SubsetDefinition; import io.trino.sql.tree.Table; -import io.trino.sql.tree.TableArgument; import io.trino.sql.tree.TableElement; import io.trino.sql.tree.TableExecute; import io.trino.sql.tree.TableFunctionArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.TimeLiteral; import io.trino.sql.tree.TimestampLiteral; @@ -271,8 +271,6 @@ import static io.trino.sql.parser.SqlBaseParser.TIMESTAMP; import static io.trino.sql.tree.AnchorPattern.Type.PARTITION_END; import static io.trino.sql.tree.AnchorPattern.Type.PARTITION_START; -import static io.trino.sql.tree.DescriptorArgument.descriptorArgument; -import static io.trino.sql.tree.DescriptorArgument.nullDescriptorArgument; import static io.trino.sql.tree.JsonExists.ErrorBehavior.ERROR; import static io.trino.sql.tree.JsonExists.ErrorBehavior.FALSE; import static io.trino.sql.tree.JsonExists.ErrorBehavior.TRUE; @@ -301,6 +299,8 @@ import static io.trino.sql.tree.SkipTo.skipToFirst; import static io.trino.sql.tree.SkipTo.skipToLast; import static io.trino.sql.tree.SkipTo.skipToNextRow; +import static io.trino.sql.tree.TableFunctionDescriptorArgument.descriptorArgument; +import static io.trino.sql.tree.TableFunctionDescriptorArgument.nullDescriptorArgument; import static java.lang.String.format; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; @@ -1881,7 +1881,7 @@ public Node visitTableArgument(SqlBaseParser.TableArgumentContext context) boolean pruneWhenEmpty = context.PRUNE() != null; - return new TableArgument(getLocation(context), table, partitionBy, orderBy, pruneWhenEmpty); + return new TableFunctionTableArgument(getLocation(context), table, partitionBy, orderBy, pruneWhenEmpty); } @Override diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java b/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java index 86bf82db3c61..386693e00f0a 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java @@ -1097,12 +1097,12 @@ protected R visitTableFunctionArgument(TableFunctionArgument node, C context) return visitNode(node, context); } - protected R visitTableArgument(TableArgument node, C context) + protected R visitTableArgument(TableFunctionTableArgument node, C context) { return visitNode(node, context); } - protected R visitDescriptorArgument(DescriptorArgument node, C context) + protected R visitDescriptorArgument(TableFunctionDescriptorArgument node, C context) { return visitNode(node, context); } diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionArgument.java b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionArgument.java index b403313cfd59..396c2661007e 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionArgument.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionArgument.java @@ -33,7 +33,7 @@ public TableFunctionArgument(NodeLocation location, Optional name, N super(Optional.of(location)); this.name = requireNonNull(name, "name is null"); requireNonNull(value, "value is null"); - checkArgument(value instanceof TableArgument || value instanceof DescriptorArgument || value instanceof Expression); + checkArgument(value instanceof TableFunctionTableArgument || value instanceof TableFunctionDescriptorArgument || value instanceof Expression); this.value = value; } diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/DescriptorArgument.java b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionDescriptorArgument.java similarity index 76% rename from core/trino-parser/src/main/java/io/trino/sql/tree/DescriptorArgument.java rename to core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionDescriptorArgument.java index c5f1f21a1b9a..374b98e43908 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/DescriptorArgument.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionDescriptorArgument.java @@ -21,23 +21,23 @@ import static java.util.Objects.requireNonNull; -public class DescriptorArgument +public class TableFunctionDescriptorArgument extends Node { private final Optional descriptor; - public static DescriptorArgument descriptorArgument(NodeLocation location, Descriptor descriptor) + public static TableFunctionDescriptorArgument descriptorArgument(NodeLocation location, Descriptor descriptor) { requireNonNull(descriptor, "descriptor is null"); - return new DescriptorArgument(location, Optional.of(descriptor)); + return new TableFunctionDescriptorArgument(location, Optional.of(descriptor)); } - public static DescriptorArgument nullDescriptorArgument(NodeLocation location) + public static TableFunctionDescriptorArgument nullDescriptorArgument(NodeLocation location) { - return new DescriptorArgument(location, Optional.empty()); + return new TableFunctionDescriptorArgument(location, Optional.empty()); } - private DescriptorArgument(NodeLocation location, Optional descriptor) + private TableFunctionDescriptorArgument(NodeLocation location, Optional descriptor) { super(Optional.of(location)); this.descriptor = descriptor; @@ -69,7 +69,7 @@ public boolean equals(Object o) if (o == null || getClass() != o.getClass()) { return false; } - return Objects.equals(descriptor, ((DescriptorArgument) o).descriptor); + return Objects.equals(descriptor, ((TableFunctionDescriptorArgument) o).descriptor); } @Override diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/TableArgument.java b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java similarity index 93% rename from core/trino-parser/src/main/java/io/trino/sql/tree/TableArgument.java rename to core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java index 6431d9d6b627..e36dc896301b 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/TableArgument.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java @@ -23,7 +23,7 @@ import static io.trino.sql.ExpressionFormatter.formatSortItems; import static java.util.Objects.requireNonNull; -public class TableArgument +public class TableFunctionTableArgument extends Node { private final Relation table; @@ -31,7 +31,7 @@ public class TableArgument private final Optional orderBy; private final boolean pruneWhenEmpty; - public TableArgument( + public TableFunctionTableArgument( NodeLocation location, Relation table, Optional> partitionBy, @@ -92,7 +92,7 @@ public boolean equals(Object o) return false; } - TableArgument other = (TableArgument) o; + TableFunctionTableArgument other = (TableFunctionTableArgument) o; return Objects.equals(table, other.table) && Objects.equals(partitionBy, other.partitionBy) && Objects.equals(orderBy, other.orderBy) && @@ -127,6 +127,6 @@ public boolean shallowEquals(Node o) return false; } - return pruneWhenEmpty == ((TableArgument) o).pruneWhenEmpty; + return pruneWhenEmpty == ((TableFunctionTableArgument) o).pruneWhenEmpty; } } diff --git a/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java b/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java index 3d2114dc6bba..8eeccae78c68 100644 --- a/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java +++ b/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java @@ -187,10 +187,10 @@ import io.trino.sql.tree.SubscriptExpression; import io.trino.sql.tree.SubsetDefinition; import io.trino.sql.tree.Table; -import io.trino.sql.tree.TableArgument; import io.trino.sql.tree.TableExecute; import io.trino.sql.tree.TableFunctionArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.TimeLiteral; import io.trino.sql.tree.TimestampLiteral; @@ -256,8 +256,6 @@ import static io.trino.sql.tree.ArithmeticUnaryExpression.positive; import static io.trino.sql.tree.ComparisonExpression.Operator.EQUAL; import static io.trino.sql.tree.DateTimeDataType.Type.TIMESTAMP; -import static io.trino.sql.tree.DescriptorArgument.descriptorArgument; -import static io.trino.sql.tree.DescriptorArgument.nullDescriptorArgument; import static io.trino.sql.tree.FrameBound.Type.CURRENT_ROW; import static io.trino.sql.tree.FrameBound.Type.FOLLOWING; import static io.trino.sql.tree.JsonPathParameter.JsonFormat.JSON; @@ -273,6 +271,8 @@ import static io.trino.sql.tree.SortItem.NullOrdering.UNDEFINED; import static io.trino.sql.tree.SortItem.Ordering.ASCENDING; import static io.trino.sql.tree.SortItem.Ordering.DESCENDING; +import static io.trino.sql.tree.TableFunctionDescriptorArgument.descriptorArgument; +import static io.trino.sql.tree.TableFunctionDescriptorArgument.nullDescriptorArgument; import static io.trino.sql.tree.Trim.Specification.BOTH; import static io.trino.sql.tree.Trim.Specification.LEADING; import static io.trino.sql.tree.Trim.Specification.TRAILING; @@ -3921,7 +3921,7 @@ public void testTableFunctionInvocation() new TableFunctionArgument( location(1, 77), Optional.of(new Identifier(location(1, 77), "arg1", false)), - new TableArgument( + new TableFunctionTableArgument( location(1, 85), new AliasedRelation( location(1, 85), @@ -3976,7 +3976,7 @@ public void testTableFunctionTableArgumentAliasing() ImmutableList.of(new TableFunctionArgument( location(1, 30), Optional.of(new Identifier(location(1, 30), "input", false)), - new TableArgument( + new TableFunctionTableArgument( location(1, 39), new Table(location(1, 39), qualifiedName(location(1, 45), "orders")), Optional.empty(), @@ -3992,7 +3992,7 @@ public void testTableFunctionTableArgumentAliasing() ImmutableList.of(new TableFunctionArgument( location(1, 30), Optional.of(new Identifier(location(1, 30), "input", false)), - new TableArgument( + new TableFunctionTableArgument( location(1, 39), new AliasedRelation( location(1, 39), @@ -4012,7 +4012,7 @@ public void testTableFunctionTableArgumentAliasing() ImmutableList.of(new TableFunctionArgument( location(1, 30), Optional.of(new Identifier(location(1, 30), "input", false)), - new TableArgument( + new TableFunctionTableArgument( location(1, 39), new AliasedRelation( location(1, 39), From 8be4a486a525056ef6b361618280ae0185d7211b Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 19 Jul 2022 16:07:15 +0200 Subject: [PATCH 02/15] Fix table function argument resolution --- .../main/java/io/trino/sql/analyzer/StatementAnalyzer.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index 52dd8a99996b..5228a7974395 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -221,6 +221,7 @@ import io.trino.sql.tree.TableFunctionArgument; import io.trino.sql.tree.TableFunctionDescriptorArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.TruncateTable; import io.trino.sql.tree.Union; @@ -1647,7 +1648,7 @@ private Map analyzeArguments(Node node, List Date: Wed, 20 Jul 2022 12:30:25 +0200 Subject: [PATCH 03/15] Refactor empty table treatment into a Node --- .../main/java/io/trino/sql/SqlFormatter.java | 10 +- .../java/io/trino/sql/parser/AstBuilder.java | 12 ++- .../java/io/trino/sql/tree/AstVisitor.java | 5 + .../trino/sql/tree/EmptyTableTreatment.java | 93 +++++++++++++++++++ .../sql/tree/TableFunctionTableArgument.java | 21 ++--- .../io/trino/sql/parser/TestSqlParser.java | 10 +- 6 files changed, 126 insertions(+), 25 deletions(-) create mode 100644 core/trino-parser/src/main/java/io/trino/sql/tree/EmptyTableTreatment.java diff --git a/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java b/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java index 2945d88a84d2..5078f58f7a43 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java +++ b/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java @@ -302,14 +302,10 @@ protected Void visitTableArgument(TableFunctionTableArgument node, Integer inden .map(ExpressionFormatter::formatExpression) .collect(joining(", "))); } - if (node.isPruneWhenEmpty()) { + node.getEmptyTableTreatment().ifPresent(treatment -> { builder.append("\n"); - append(indent, "PRUNE WHEN EMPTY"); - } - else { - builder.append("\n"); - append(indent, "KEEP WHEN EMPTY"); - } + append(indent, treatment.getTreatment().name() + " WHEN EMPTY"); + }); node.getOrderBy().ifPresent(orderBy -> { builder.append("\n"); append(indent, formatOrderBy(orderBy)); diff --git a/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java b/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java index dec6608e3aa3..4f13e97e9455 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java +++ b/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java @@ -72,6 +72,8 @@ import io.trino.sql.tree.DropTable; import io.trino.sql.tree.DropView; import io.trino.sql.tree.EmptyPattern; +import io.trino.sql.tree.EmptyTableTreatment; +import io.trino.sql.tree.EmptyTableTreatment.Treatment; import io.trino.sql.tree.Except; import io.trino.sql.tree.ExcludedPattern; import io.trino.sql.tree.Execute; @@ -1879,9 +1881,15 @@ public Node visitTableArgument(SqlBaseParser.TableArgumentContext context) orderBy = Optional.of(new OrderBy(visit(context.sortItem(), SortItem.class))); } - boolean pruneWhenEmpty = context.PRUNE() != null; + Optional emptyTableTreatment = Optional.empty(); + if (context.PRUNE() != null) { + emptyTableTreatment = Optional.of(new EmptyTableTreatment(getLocation(context.PRUNE()), Treatment.PRUNE)); + } + else if (context.KEEP() != null) { + emptyTableTreatment = Optional.of(new EmptyTableTreatment(getLocation(context.KEEP()), Treatment.KEEP)); + } - return new TableFunctionTableArgument(getLocation(context), table, partitionBy, orderBy, pruneWhenEmpty); + return new TableFunctionTableArgument(getLocation(context), table, partitionBy, orderBy, emptyTableTreatment); } @Override diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java b/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java index 386693e00f0a..215b86378d41 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java @@ -1146,4 +1146,9 @@ protected R visitJsonArray(JsonArray node, C context) { return visitExpression(node, context); } + + protected R visitEmptyTableTreatment(EmptyTableTreatment node, C context) + { + return visitNode(node, context); + } } diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/EmptyTableTreatment.java b/core/trino-parser/src/main/java/io/trino/sql/tree/EmptyTableTreatment.java new file mode 100644 index 000000000000..fc49f28df879 --- /dev/null +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/EmptyTableTreatment.java @@ -0,0 +1,93 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.sql.tree; + +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +import static com.google.common.base.MoreObjects.toStringHelper; +import static java.util.Objects.requireNonNull; + +public final class EmptyTableTreatment + extends Node +{ + private final Treatment treatment; + + public EmptyTableTreatment(NodeLocation location, Treatment treatment) + { + super(Optional.of(location)); + this.treatment = requireNonNull(treatment, "treatment is null"); + } + + public Treatment getTreatment() + { + return treatment; + } + + @Override + public R accept(AstVisitor visitor, C context) + { + return visitor.visitEmptyTableTreatment(this, context); + } + + @Override + public List getChildren() + { + return ImmutableList.of(); + } + + @Override + public boolean equals(Object obj) + { + if (this == obj) { + return true; + } + if ((obj == null) || (getClass() != obj.getClass())) { + return false; + } + return treatment == ((EmptyTableTreatment) obj).treatment; + } + + @Override + public int hashCode() + { + return Objects.hash(treatment); + } + + @Override + public String toString() + { + return toStringHelper(this) + .add("treatment", treatment) + .toString(); + } + + @Override + public boolean shallowEquals(Node other) + { + if (!sameClass(this, other)) { + return false; + } + + return treatment == ((EmptyTableTreatment) other).treatment; + } + + public enum Treatment + { + KEEP, PRUNE + } +} diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java index e36dc896301b..f44b4409ca69 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java @@ -29,20 +29,20 @@ public class TableFunctionTableArgument private final Relation table; private final Optional> partitionBy; // it is allowed to partition by empty list private final Optional orderBy; - private final boolean pruneWhenEmpty; + private final Optional emptyTableTreatment; public TableFunctionTableArgument( NodeLocation location, Relation table, Optional> partitionBy, Optional orderBy, - boolean pruneWhenEmpty) + Optional emptyTableTreatment) { super(Optional.of(location)); this.table = requireNonNull(table, "table is null"); this.partitionBy = requireNonNull(partitionBy, "partitionBy is null"); this.orderBy = requireNonNull(orderBy, "orderBy is null"); - this.pruneWhenEmpty = pruneWhenEmpty; + this.emptyTableTreatment = requireNonNull(emptyTableTreatment, "emptyTableTreatment is null"); } public Relation getTable() @@ -60,9 +60,9 @@ public Optional getOrderBy() return orderBy; } - public boolean isPruneWhenEmpty() + public Optional getEmptyTableTreatment() { - return pruneWhenEmpty; + return emptyTableTreatment; } @Override @@ -78,6 +78,7 @@ public List getChildren() builder.add(table); partitionBy.ifPresent(builder::addAll); orderBy.ifPresent(builder::add); + emptyTableTreatment.ifPresent(builder::add); return builder.build(); } @@ -96,13 +97,13 @@ public boolean equals(Object o) return Objects.equals(table, other.table) && Objects.equals(partitionBy, other.partitionBy) && Objects.equals(orderBy, other.orderBy) && - pruneWhenEmpty == other.pruneWhenEmpty; + Objects.equals(emptyTableTreatment, other.emptyTableTreatment); } @Override public int hashCode() { - return Objects.hash(table, partitionBy, orderBy, pruneWhenEmpty); + return Objects.hash(table, partitionBy, orderBy, emptyTableTreatment); } @Override @@ -123,10 +124,6 @@ public String toString() @Override public boolean shallowEquals(Node o) { - if (!sameClass(this, o)) { - return false; - } - - return pruneWhenEmpty == ((TableFunctionTableArgument) o).pruneWhenEmpty; + return sameClass(this, o); } } diff --git a/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java b/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java index 8eeccae78c68..35757563d01a 100644 --- a/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java +++ b/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java @@ -63,6 +63,7 @@ import io.trino.sql.tree.DropTable; import io.trino.sql.tree.DropView; import io.trino.sql.tree.EmptyPattern; +import io.trino.sql.tree.EmptyTableTreatment; import io.trino.sql.tree.Execute; import io.trino.sql.tree.ExistsPredicate; import io.trino.sql.tree.Explain; @@ -256,6 +257,7 @@ import static io.trino.sql.tree.ArithmeticUnaryExpression.positive; import static io.trino.sql.tree.ComparisonExpression.Operator.EQUAL; import static io.trino.sql.tree.DateTimeDataType.Type.TIMESTAMP; +import static io.trino.sql.tree.EmptyTableTreatment.Treatment.PRUNE; import static io.trino.sql.tree.FrameBound.Type.CURRENT_ROW; import static io.trino.sql.tree.FrameBound.Type.FOLLOWING; import static io.trino.sql.tree.JsonPathParameter.JsonFormat.JSON; @@ -3933,7 +3935,7 @@ public void testTableFunctionInvocation() new Identifier(location(1, 112), "c", false))), Optional.of(ImmutableList.of(new Identifier(location(1, 196), "a", false))), Optional.of(new OrderBy(ImmutableList.of(new SortItem(location(1, 360), new Identifier(location(1, 360), "b", false), ASCENDING, LAST)))), - true)), + Optional.of(new EmptyTableTreatment(location(1, 266), PRUNE)))), new TableFunctionArgument( location(1, 425), Optional.of(new Identifier(location(1, 425), "arg2", false)), @@ -3981,7 +3983,7 @@ public void testTableFunctionTableArgumentAliasing() new Table(location(1, 39), qualifiedName(location(1, 45), "orders")), Optional.empty(), Optional.empty(), - false))), + Optional.empty()))), ImmutableList.of()))); // table alias; no column aliases @@ -4001,7 +4003,7 @@ public void testTableFunctionTableArgumentAliasing() null), Optional.empty(), Optional.empty(), - false))), + Optional.empty()))), ImmutableList.of()))); // table alias and column aliases @@ -4024,7 +4026,7 @@ public void testTableFunctionTableArgumentAliasing() new Identifier(location(1, 66), "c", false))), Optional.empty(), Optional.empty(), - false))), + Optional.empty()))), ImmutableList.of()))); } From a93597ff72d98d577b32f5da6ef562ec85a7aad6 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Thu, 21 Jul 2022 22:09:57 +0200 Subject: [PATCH 04/15] Extract method for analyzing scalar arguments --- .../trino/sql/analyzer/StatementAnalyzer.java | 50 ++++++++++--------- 1 file changed, 27 insertions(+), 23 deletions(-) diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index 5228a7974395..16ed0c6fd9a0 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -1696,34 +1696,38 @@ else if (argument.getValue() instanceof Expression) { if (argument.getValue() instanceof FunctionCall && ((FunctionCall) argument.getValue()).getName().hasSuffix(QualifiedName.of("descriptor"))) { // function name is always compared case-insensitive throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "'descriptor' function is not allowed as a table function argument"); } - // inline parameters - Expression inlined = ExpressionTreeRewriter.rewriteWith(new ExpressionRewriter<>() - { - @Override - public Expression rewriteParameter(Parameter node, Void context, ExpressionTreeRewriter treeRewriter) - { - if (analysis.isDescribe()) { - // We cannot handle DESCRIBE when a table function argument involves a parameter. - // In DESCRIBE, the parameter values are not known. We cannot pass a dummy value for a parameter. - // The value of a table function argument can affect the returned relation type. The returned - // relation type can affect the assumed types for other parameters in the query. - throw semanticException(NOT_SUPPORTED, node, "DESCRIBE is not supported if a table function uses parameters"); - } - return analysis.getParameters().get(NodeRef.of(node)); - } - }, expression); - Type expectedArgumentType = ((ScalarArgumentSpecification) argumentSpecification).getType(); - // currently, only constant arguments are supported - Object constantValue = ExpressionInterpreter.evaluateConstantExpression(inlined, expectedArgumentType, plannerContext, session, accessControl, analysis.getParameters()); - return ScalarArgument.builder() - .type(expectedArgumentType) - .value(constantValue) - .build(); + return analyzeScalarArgument(expression, ((ScalarArgumentSpecification) argumentSpecification).getType()); } throw new IllegalStateException("Unexpected argument specification: " + argumentSpecification.getClass().getSimpleName()); } + private Argument analyzeScalarArgument(Expression expression, Type type) + { + // inline parameters + Expression inlined = ExpressionTreeRewriter.rewriteWith(new ExpressionRewriter<>() + { + @Override + public Expression rewriteParameter(Parameter node, Void context, ExpressionTreeRewriter treeRewriter) + { + if (analysis.isDescribe()) { + // We cannot handle DESCRIBE when a table function argument involves a parameter. + // In DESCRIBE, the parameter values are not known. We cannot pass a dummy value for a parameter. + // The value of a table function argument can affect the returned relation type. The returned + // relation type can affect the assumed types for other parameters in the query. + throw semanticException(NOT_SUPPORTED, node, "DESCRIBE is not supported if a table function uses parameters"); + } + return analysis.getParameters().get(NodeRef.of(node)); + } + }, expression); + // currently, only constant arguments are supported + Object constantValue = ExpressionInterpreter.evaluateConstantExpression(inlined, type, plannerContext, session, accessControl, analysis.getParameters()); + return ScalarArgument.builder() + .type(type) + .value(constantValue) + .build(); + } + private Argument analyzeDefault(ArgumentSpecification argumentSpecification, Node errorLocation) { if (argumentSpecification.isRequired()) { From 21acef5d028565b919e3eae2df9784fbc2317ae9 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Thu, 21 Jul 2022 22:12:18 +0200 Subject: [PATCH 05/15] Use variable --- .../src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index 16ed0c6fd9a0..13ee2571182a 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -1693,7 +1693,7 @@ else if (argument.getValue() instanceof Expression) { } Expression expression = (Expression) argument.getValue(); // 'descriptor' as a function name is not allowed in this context - if (argument.getValue() instanceof FunctionCall && ((FunctionCall) argument.getValue()).getName().hasSuffix(QualifiedName.of("descriptor"))) { // function name is always compared case-insensitive + if (expression instanceof FunctionCall && ((FunctionCall) expression).getName().hasSuffix(QualifiedName.of("descriptor"))) { // function name is always compared case-insensitive throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "'descriptor' function is not allowed as a table function argument"); } return analyzeScalarArgument(expression, ((ScalarArgumentSpecification) argumentSpecification).getType()); From 9771edb626ea47f2333988178107c7a2dc46ec6b Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Mon, 1 Aug 2022 14:39:14 +0200 Subject: [PATCH 06/15] Enforce the prune when empty property in TableArgumentSpecification Prune when empty is enforced for a table argument with row semantics. For a table argument with set semantocs, keep when empty is the default, and it can be changed to prune when empty. --- .../src/main/java/io/trino/connector/ConnectorServices.java | 3 +++ .../main/java/io/trino/spi/ptf/TableArgumentSpecification.java | 1 + 2 files changed, 4 insertions(+) diff --git a/core/trino-main/src/main/java/io/trino/connector/ConnectorServices.java b/core/trino-main/src/main/java/io/trino/connector/ConnectorServices.java index e94561c179a7..991bf3689bbf 100644 --- a/core/trino-main/src/main/java/io/trino/connector/ConnectorServices.java +++ b/core/trino-main/src/main/java/io/trino/connector/ConnectorServices.java @@ -356,5 +356,8 @@ private static void validateTableFunction(ConnectorTableFunction tableFunction) .filter(TableArgumentSpecification::isRowSemantics) .count(); checkArgument(tableArgumentsWithRowSemantics <= 1, "more than one table argument with row semantics"); + // The 'keep when empty' or 'prune when empty' property must not be explicitly specified for a table argument with row semantics. + // Such a table argument is implicitly 'prune when empty'. The TableArgumentSpecification.Builder enforces the 'prune when empty' property + // for a table argument with row semantics. } } diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgumentSpecification.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgumentSpecification.java index 3df865fcd707..c9a1aa16c519 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgumentSpecification.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgumentSpecification.java @@ -70,6 +70,7 @@ public Builder name(String name) public Builder rowSemantics() { this.rowSemantics = true; + this.pruneWhenEmpty = true; return this; } From 15664c845f4958ecffbb0dca2596226ee899c12e Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Mon, 1 Aug 2022 14:51:58 +0200 Subject: [PATCH 07/15] Add comment --- .../src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java | 1 + 1 file changed, 1 insertion(+) diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index 13ee2571182a..befeaa928be1 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -1608,6 +1608,7 @@ private Map analyzeArguments(Node node, List argumentSpecificationsByName = new HashMap<>(); for (ArgumentSpecification argumentSpecification : argumentSpecifications) { if (argumentSpecificationsByName.put(argumentSpecification.getName(), argumentSpecification) != null) { + // this should never happen, because the argument names are validated at function registration time throw new IllegalStateException("Duplicate argument specification for name: " + argumentSpecification.getName()); } } From 18e97d406ae89528de5d04c868587f3c6bc2f1c4 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 2 Aug 2022 11:30:36 +0200 Subject: [PATCH 08/15] Simplify table argument representation in SPI According to the SQL standard, the only properties of a table argument used during analysis are: the row type, partitioning, and ordering. Other information, like the prune / keep when empty property, are not involved in the analysis. --- .../java/io/trino/spi/ptf/TableArgument.java | 150 +----------------- 1 file changed, 6 insertions(+), 144 deletions(-) diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgument.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgument.java index 7c807eb6001c..1f2ece78339b 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgument.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgument.java @@ -20,9 +20,7 @@ import io.trino.spi.type.RowType; import java.util.List; -import java.util.Optional; -import static io.trino.spi.ptf.Preconditions.checkNotNullOrEmpty; import static java.util.Objects.requireNonNull; /** @@ -35,37 +33,19 @@ public class TableArgument extends Argument { - private final Optional name; private final RowType rowType; private final List partitionBy; - private final List orderBy; - private final boolean rowSemantics; - private final boolean pruneWhenEmpty; - private final boolean passThroughColumns; + private final List orderBy; @JsonCreator public TableArgument( - @JsonProperty("name") Optional name, @JsonProperty("rowType") RowType rowType, @JsonProperty("partitionBy") List partitionBy, - @JsonProperty("orderBy") List orderBy, - @JsonProperty("rowSemantics") boolean rowSemantics, - @JsonProperty("pruneWhenEmpty") boolean pruneWhenEmpty, - @JsonProperty("passThroughColumns") boolean passThroughColumns) + @JsonProperty("orderBy") List orderBy) { - this.name = requireNonNull(name, "name is null"); this.rowType = requireNonNull(rowType, "rowType is null"); this.partitionBy = requireNonNull(partitionBy, "partitionBy is null"); this.orderBy = requireNonNull(orderBy, "orderBy is null"); - this.rowSemantics = rowSemantics; - this.pruneWhenEmpty = pruneWhenEmpty; - this.passThroughColumns = passThroughColumns; - } - - @JsonProperty - public Optional getName() - { - return name; } @JsonProperty @@ -81,29 +61,11 @@ public List getPartitionBy() } @JsonProperty - public List getOrderBy() + public List getOrderBy() { return orderBy; } - @JsonProperty - public boolean isRowSemantics() - { - return rowSemantics; - } - - @JsonProperty - public boolean isPruneWhenEmpty() - { - return pruneWhenEmpty; - } - - @JsonProperty - public boolean isPassThroughColumns() - { - return passThroughColumns; - } - public static Builder builder() { return new Builder(); @@ -111,22 +73,12 @@ public static Builder builder() public static final class Builder { - private Optional name; private RowType rowType; private List partitionBy = List.of(); - private List orderBy = List.of(); - private boolean rowSemantics; - private boolean pruneWhenEmpty; - private boolean passThroughColumns; + private List orderBy = List.of(); private Builder() {} - public Builder name(Optional name) - { - this.name = name; - return this; - } - public Builder rowType(RowType rowType) { this.rowType = rowType; @@ -139,105 +91,15 @@ public Builder partitionBy(List partitionBy) return this; } - public Builder orderBy(List orderBy) + public Builder orderBy(List orderBy) { this.orderBy = orderBy; return this; } - public Builder rowSemantics(boolean rowSemantics) - { - this.rowSemantics = rowSemantics; - return this; - } - - public Builder pruneWhenEmpty(boolean pruneWhenEmpty) - { - this.pruneWhenEmpty = pruneWhenEmpty; - return this; - } - - public Builder passThroughColumns(boolean passThroughColumns) - { - this.passThroughColumns = passThroughColumns; - return this; - } - public TableArgument build() { - return new TableArgument(name, rowType, partitionBy, orderBy, rowSemantics, pruneWhenEmpty, passThroughColumns); - } - } - - public static class QualifiedName - { - private final String catalogName; - private final String schemaName; - private final String tableName; - - @JsonCreator - public QualifiedName( - @JsonProperty("catalogName") String catalogName, - @JsonProperty("schemaName") String schemaName, - @JsonProperty("tableName") String tableName) - { - this.catalogName = checkNotNullOrEmpty(catalogName, "catalogName"); - this.schemaName = checkNotNullOrEmpty(schemaName, "schemaName"); - this.tableName = checkNotNullOrEmpty(tableName, "tableName"); - } - - @JsonProperty - public String getCatalogName() - { - return catalogName; - } - - @JsonProperty - public String getSchemaName() - { - return schemaName; - } - - @JsonProperty - public String getTableName() - { - return tableName; - } - } - - public static class SortItem - { - private final String column; - private final boolean ascending; - private final boolean nullsLast; - - @JsonCreator - public SortItem( - @JsonProperty("column") String column, - @JsonProperty("ascending") boolean ascending, - @JsonProperty("nullsFirst") boolean nullsFirst) - { - this.column = checkNotNullOrEmpty(column, "ordering column"); - this.ascending = ascending; - this.nullsLast = nullsFirst; - } - - @JsonProperty - public String getColumn() - { - return column; - } - - @JsonProperty - public boolean isAscending() - { - return ascending; - } - - @JsonProperty - public boolean isNullsLast() - { - return nullsLast; + return new TableArgument(rowType, partitionBy, orderBy); } } } From 866f83c965500bfc962113db7c65b4c8a7931d0b Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 2 Aug 2022 21:31:22 +0200 Subject: [PATCH 09/15] Record relation names during analysis --- .../main/java/io/trino/sql/analyzer/Analysis.java | 13 +++++++++++++ .../io/trino/sql/analyzer/StatementAnalyzer.java | 4 ++++ 2 files changed, 17 insertions(+) diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java index 2e65779da0e3..58c2400dde08 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java @@ -65,6 +65,7 @@ import io.trino.sql.tree.Offset; import io.trino.sql.tree.OrderBy; import io.trino.sql.tree.Parameter; +import io.trino.sql.tree.QualifiedName; import io.trino.sql.tree.QuantifiedComparisonExpression; import io.trino.sql.tree.Query; import io.trino.sql.tree.QuerySpecification; @@ -235,6 +236,8 @@ public class Analysis private Optional tableExecuteHandle = Optional.empty(); + // names of tables and aliased relations. All names are resolved case-insensitive. + private final Map, QualifiedName> relationNames = new LinkedHashMap<>(); private final Map, TableFunctionInvocationAnalysis> tableFunctionAnalyses = new LinkedHashMap<>(); public Analysis(@Nullable Statement root, Map, Expression> parameters, QueryType queryType) @@ -1219,6 +1222,16 @@ public TableFunctionInvocationAnalysis getTableFunctionAnalysis(TableFunctionInv return tableFunctionAnalyses.get(NodeRef.of(node)); } + public void setRelationName(Relation relation, QualifiedName name) + { + relationNames.put(NodeRef.of(relation), name); + } + + public QualifiedName getRelationName(Relation relation) + { + return relationNames.get(NodeRef.of(relation)); + } + private boolean isInputTable(Table table) { return !(isUpdateTarget(table) || isInsertTarget(table)); diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index befeaa928be1..1ab993dbc68e 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -1770,6 +1770,7 @@ protected Scope visitTable(Table table, Optional scope) // is this a reference to a WITH query? Optional withQuery = createScope(scope).getNamedQuery(table.getName().getSuffix()); if (withQuery.isPresent()) { + analysis.setRelationName(table, table.getName()); return createScopeForCommonTableExpression(table, scope, withQuery.get()); } // is this a recursive reference in expandable WITH query? If so, there's base scope recorded. @@ -1781,11 +1782,13 @@ protected Scope visitTable(Table table, Optional scope) .withRelationType(baseScope.getRelationId(), baseScope.getRelationType()) .build(); analysis.setScope(table, resultScope); + analysis.setRelationName(table, table.getName()); return resultScope; } } QualifiedObjectName name = createQualifiedObjectName(session, table, table.getName()); + analysis.setRelationName(table, QualifiedName.of(name.getCatalogName(), name.getSchemaName(), name.getObjectName())); Optional optionalMaterializedView = metadata.getMaterializedView(session, name); if (optionalMaterializedView.isPresent()) { @@ -2337,6 +2340,7 @@ private ExpressionAnalysis analyzePatternRecognitionExpression(Expression expres @Override protected Scope visitAliasedRelation(AliasedRelation relation, Optional scope) { + analysis.setRelationName(relation, QualifiedName.of(ImmutableList.of(relation.getAlias()))); Scope relationScope = process(relation.getRelation(), scope); // todo this check should be inside of TupleDescriptor.withAlias, but the exception needs the node object From cbde4d7168efddf1d56f906d3d1e50d542d2f851 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 2 Aug 2022 22:23:35 +0200 Subject: [PATCH 10/15] Remove descriptor mapping from table function analysis and planning This is a change of approach on resolving arguments of table functions. Before this change, the mapping of descriptor arguments to table arguments had to be determined during query analysis. It was too limiting: in that model, input table references were limited to the specified descriptor fields. After this change, any input fields can be referenced, and the required fields (channels) are to be determined dynamically during function compilation. One advantage of determining the required fields early was the possibility to prune any columns that were not required by the table function. This should be replaced by adding a way for a table function to declare required fields during query optimization. --- .../trino/sql/analyzer/StatementAnalyzer.java | 14 --- .../io/trino/sql/planner/RelationPlanner.java | 6 +- .../UnaliasSymbolReferences.java | 3 +- .../sql/planner/plan/TableFunctionNode.java | 12 +-- .../sql/planner/planprinter/PlanPrinter.java | 2 +- .../io/trino/spi/ptf/DescriptorMapping.java | 86 ------------------- .../trino/spi/ptf/TableFunctionAnalysis.java | 25 +----- 7 files changed, 7 insertions(+), 141 deletions(-) delete mode 100644 core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorMapping.java diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index 1ab993dbc68e..d67e7677ccab 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -1505,20 +1505,6 @@ protected Scope visitTableFunctionInvocation(TableFunctionInvocation node, Optio TableFunctionAnalysis functionAnalysis = function.analyze(session.toConnectorSession(catalogHandle), transactionHandle, passedArguments); analysis.setTableFunctionAnalysis(node, new TableFunctionInvocationAnalysis(catalogHandle, function.getName(), passedArguments, functionAnalysis.getHandle(), transactionHandle)); - // TODO handle the DescriptorMapping descriptorsToTables mapping from the TableFunction.Analysis: - // This is a mapping of descriptor arguments to table arguments. It consists of two parts: - // - mapping by descriptor field: (arg name of descriptor argument, and position in the descriptor) to (arg name of table argument) - // - mapping by descriptor: (arg name of descriptor argument) to (arg name of table argument) - // 1. get the DescriptorField from the designated DescriptorArgument (or all fields for mapping by descriptor) - // 2. validate there is no DataType specified, - // 3. analyze the Identifier in the scope of the designated table (it is recorded, because args were already analyzed). Disable correlation. - // 4. at this point, the Identifier should be recorded as a column reference to the appropriate table - // 5. record the mapping NameAndPosition -> Identifier - // ... later translate Identifier to Symbol in Planner, and eventually translate it to channel before execution - if (!functionAnalysis.getDescriptorMapping().isEmpty()) { - throw semanticException(NOT_SUPPORTED, node, "Table arguments are not yet supported for table functions"); - } - // TODO process the copartitioning: // 1. validate input table references // 2. the copartitioned tables in each set must be partitioned, and have the same number of partitioning columns diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java b/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java index 70556fd420ce..5aff24baf19c 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java @@ -22,7 +22,6 @@ import io.trino.metadata.TableFunctionHandle; import io.trino.metadata.TableHandle; import io.trino.spi.connector.ColumnHandle; -import io.trino.spi.ptf.NameAndPosition; import io.trino.spi.type.RowType; import io.trino.spi.type.Type; import io.trino.sql.ExpressionUtils; @@ -337,10 +336,10 @@ protected RelationPlan visitTableFunctionInvocation(TableFunctionInvocation node // - prune when empty property (from the actualArgument) // - pass through columns property (from the actualArgument) // - optional Specification: ordering scheme and partitioning (from the node's argument) <- planned upon the source's RelationPlan (or combined RelationPlan from all sources) + // TODO add - argument name + // TODO add - mapping column name => Symbol // TODO mind the fields without names and duplicate field names in RelationType List sources = ImmutableList.of(); List inputRelationsProperties = ImmutableList.of(); - // TODO rewrite column references to Symbols upon the source's RelationPlan (or combined RelationPlan from all sources) - Map inputDescriptorMappings = ImmutableMap.of(); Scope scope = analysis.getScope(node); // TODO pass columns from input relations, and make sure they have the right qualifier @@ -355,7 +354,6 @@ protected RelationPlan visitTableFunctionInvocation(TableFunctionInvocation node outputSymbols, sources.stream().map(RelationPlan::getRoot).collect(toImmutableList()), inputRelationsProperties, - inputDescriptorMappings, new TableFunctionHandle(functionAnalysis.getCatalogHandle(), functionAnalysis.getConnectorTableFunctionHandle(), functionAnalysis.getTransactionHandle())); return new RelationPlan(root, scope, outputSymbols, outerContext); diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/optimizations/UnaliasSymbolReferences.java b/core/trino-main/src/main/java/io/trino/sql/planner/optimizations/UnaliasSymbolReferences.java index 02694735a291..ad2a77d37b42 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/optimizations/UnaliasSymbolReferences.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/optimizations/UnaliasSymbolReferences.java @@ -321,7 +321,7 @@ public PlanAndMappings visitPatternRecognition(PatternRecognitionNode node, Unal @Override public PlanAndMappings visitTableFunction(TableFunctionNode node, UnaliasContext context) { - // TODO rewrite sources, tableArgumentProperties, and inputDescriptorMappings when we add support for input tables + // TODO rewrite sources, and tableArgumentProperties when we add support for input tables Map mapping = new HashMap<>(context.getCorrelationMapping()); SymbolMapper mapper = symbolMapper(mapping); @@ -335,7 +335,6 @@ public PlanAndMappings visitTableFunction(TableFunctionNode node, UnaliasContext newProperOutputs, node.getSources(), node.getTableArgumentProperties(), - node.getInputDescriptorMappings(), node.getHandle()), mapping); } diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/plan/TableFunctionNode.java b/core/trino-main/src/main/java/io/trino/sql/planner/plan/TableFunctionNode.java index 11f925e73ab0..1fa713174fd0 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/plan/TableFunctionNode.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/plan/TableFunctionNode.java @@ -17,7 +17,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import io.trino.metadata.TableFunctionHandle; import io.trino.spi.ptf.Argument; -import io.trino.spi.ptf.NameAndPosition; import io.trino.sql.planner.Symbol; import io.trino.sql.planner.plan.WindowNode.Specification; @@ -38,7 +37,6 @@ public class TableFunctionNode private final List properOutputs; private final List sources; private final List tableArgumentProperties; - private final Map inputDescriptorMappings; private final TableFunctionHandle handle; @JsonCreator @@ -49,7 +47,6 @@ public TableFunctionNode( @JsonProperty("properOutputs") List properOutputs, @JsonProperty("sources") List sources, @JsonProperty("tableArgumentProperties") List tableArgumentProperties, - @JsonProperty("inputDescriptorMappings") Map inputDescriptorMappings, @JsonProperty("handle") TableFunctionHandle handle) { super(id); @@ -58,7 +55,6 @@ public TableFunctionNode( this.properOutputs = requireNonNull(properOutputs, "properOutputs is null"); this.sources = requireNonNull(sources, "sources is null"); this.tableArgumentProperties = requireNonNull(tableArgumentProperties, "tableArgumentProperties is null"); - this.inputDescriptorMappings = requireNonNull(inputDescriptorMappings, "inputDescriptorMappings is null"); this.handle = requireNonNull(handle, "handle is null"); } @@ -86,12 +82,6 @@ public List getTableArgumentProperties() return tableArgumentProperties; } - @JsonProperty - public Map getInputDescriptorMappings() - { - return inputDescriptorMappings; - } - @JsonProperty public TableFunctionHandle getHandle() { @@ -122,7 +112,7 @@ public R accept(PlanVisitor visitor, C context) public PlanNode replaceChildren(List newSources) { checkArgument(sources.size() == newSources.size(), "wrong number of new children"); - return new TableFunctionNode(getId(), name, arguments, properOutputs, newSources, tableArgumentProperties, inputDescriptorMappings, handle); + return new TableFunctionNode(getId(), name, arguments, properOutputs, newSources, tableArgumentProperties, handle); } public static class TableArgumentProperties diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/planprinter/PlanPrinter.java b/core/trino-main/src/main/java/io/trino/sql/planner/planprinter/PlanPrinter.java index 460f9f7c2d89..e873a2ed6b82 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/planprinter/PlanPrinter.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/planprinter/PlanPrinter.java @@ -1658,7 +1658,7 @@ public Void visitTableFunction(TableFunctionNode node, Void context) ImmutableMap.of("name", node.getName())); checkArgument( - node.getSources().isEmpty() && node.getTableArgumentProperties().isEmpty() && node.getInputDescriptorMappings().isEmpty(), + node.getSources().isEmpty() && node.getTableArgumentProperties().isEmpty(), "Table or descriptor arguments are not yet supported in PlanPrinter"); node.getArguments().entrySet().stream() diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorMapping.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorMapping.java deleted file mode 100644 index 3a910300c721..000000000000 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorMapping.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.trino.spi.ptf; - -import io.trino.spi.Experimental; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import static io.trino.spi.ptf.Preconditions.checkArgument; -import static io.trino.spi.ptf.Preconditions.checkNotNullOrEmpty; -import static java.lang.String.format; -import static java.util.Objects.requireNonNull; - -@Experimental(eta = "2022-10-31") -public class DescriptorMapping -{ - public static final DescriptorMapping EMPTY_MAPPING = new DescriptorMappingBuilder().build(); - - private final Map mappingByField; - private final Map mappingByDescriptor; - - private DescriptorMapping(Map mappingByField, Map mappingByDescriptor) - { - this.mappingByField = Map.copyOf(requireNonNull(mappingByField, "mappingByField is null")); - this.mappingByDescriptor = Map.copyOf(requireNonNull(mappingByDescriptor, "mappingByDescriptor is null")); - } - - public Map getMappingByField() - { - return mappingByField; - } - - public Map getMappingByDescriptor() - { - return mappingByDescriptor; - } - - public boolean isEmpty() - { - return mappingByField.isEmpty() && mappingByDescriptor.isEmpty(); - } - - public static class DescriptorMappingBuilder - { - private final Map mappingByField = new HashMap<>(); - private final Map mappingByDescriptor = new HashMap<>(); - private final Set descriptorsMappedByField = new HashSet<>(); - - public DescriptorMappingBuilder mapField(String descriptor, int field, String table) - { - checkNotNullOrEmpty(table, "table"); - checkArgument(!mappingByDescriptor.containsKey(descriptor), format("duplicate mapping for descriptor: %s, field: %s", descriptor, field)); - checkArgument(mappingByField.put(new NameAndPosition(descriptor, field), table) == null, format("duplicate mapping for descriptor: %s, field: %s", descriptor, field)); - descriptorsMappedByField.add(descriptor); - return this; - } - - public DescriptorMappingBuilder mapAllFields(String descriptor, String table) - { - checkNotNullOrEmpty(descriptor, "descriptor"); - checkNotNullOrEmpty(table, "table"); - checkArgument(!descriptorsMappedByField.contains(descriptor), "duplicate mapping for field of descriptor: " + descriptor); - checkArgument(mappingByDescriptor.put(descriptor, table) == null, "duplicate mapping for descriptor: " + descriptor); - return this; - } - - public DescriptorMapping build() - { - return new DescriptorMapping(mappingByField, mappingByDescriptor); - } - } -} diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableFunctionAnalysis.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableFunctionAnalysis.java index ae676df25428..a415c54d61f6 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableFunctionAnalysis.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableFunctionAnalysis.java @@ -17,7 +17,6 @@ import java.util.Optional; -import static io.trino.spi.ptf.DescriptorMapping.EMPTY_MAPPING; import static io.trino.spi.ptf.Preconditions.checkArgument; import static java.util.Objects.requireNonNull; @@ -29,12 +28,6 @@ * Function, that is, the columns produced by the function, as opposed to the columns passed from the * input tables. The `returnedType` should only be set if the declared returned type is GENERIC_TABLE. *

- * The `descriptorMapping` field is used to inform the Analyzer of the semantics of descriptor arguments. - * Some descriptor arguments (or some of their fields) might be references to columns of the input tables. - * In such case, the Analyzer must be informed of those dependencies. It allows to pass the right values - * (input channels) to the Table Function during execution. It also allows to prune unused input columns - * during the optimization phase. - *

* The `handle` field can be used to carry all information necessary to execute the table function, * gathered at analysis time. Typically, these are the values of the constant arguments, and results * of pre-processing arguments. @@ -43,14 +36,12 @@ public final class TableFunctionAnalysis { private final Optional returnedType; - private final DescriptorMapping descriptorMapping; private final ConnectorTableFunctionHandle handle; - private TableFunctionAnalysis(Optional returnedType, DescriptorMapping descriptorMapping, ConnectorTableFunctionHandle handle) + private TableFunctionAnalysis(Optional returnedType, ConnectorTableFunctionHandle handle) { this.returnedType = requireNonNull(returnedType, "returnedType is null"); returnedType.ifPresent(descriptor -> checkArgument(descriptor.isTyped(), "field types not specified")); - this.descriptorMapping = requireNonNull(descriptorMapping, "descriptorMapping is null"); this.handle = requireNonNull(handle, "handle is null"); } @@ -59,11 +50,6 @@ public Optional getReturnedType() return returnedType; } - public DescriptorMapping getDescriptorMapping() - { - return descriptorMapping; - } - public ConnectorTableFunctionHandle getHandle() { return handle; @@ -77,7 +63,6 @@ public static Builder builder() public static final class Builder { private Descriptor returnedType; - private DescriptorMapping descriptorMapping = EMPTY_MAPPING; private ConnectorTableFunctionHandle handle = new ConnectorTableFunctionHandle() {}; private Builder() {} @@ -88,12 +73,6 @@ public Builder returnedType(Descriptor returnedType) return this; } - public Builder descriptorMapping(DescriptorMapping descriptorMapping) - { - this.descriptorMapping = descriptorMapping; - return this; - } - public Builder handle(ConnectorTableFunctionHandle handle) { this.handle = handle; @@ -102,7 +81,7 @@ public Builder handle(ConnectorTableFunctionHandle handle) public TableFunctionAnalysis build() { - return new TableFunctionAnalysis(Optional.ofNullable(returnedType), descriptorMapping, handle); + return new TableFunctionAnalysis(Optional.ofNullable(returnedType), handle); } } } From dc3552f812be96211989b73d60f60a99ab943863 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Mon, 8 Aug 2022 11:32:06 +0200 Subject: [PATCH 11/15] Use Optional.orElseThrow() --- .../src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index d67e7677ccab..1ea0c0a084ed 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -1600,7 +1600,7 @@ private Map analyzeArguments(Node node, List uniqueArgumentNames = new HashSet<>(); for (TableFunctionArgument argument : arguments) { - String argumentName = argument.getName().get().getCanonicalValue(); + String argumentName = argument.getName().orElseThrow().getCanonicalValue(); if (!uniqueArgumentNames.add(argumentName)) { throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Duplicate argument name: " + argumentName); } From 612ad9bc1a5a3c5b80d5e5aacf7d5f3529a11a19 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Wed, 10 Aug 2022 11:17:47 +0200 Subject: [PATCH 12/15] Add equals() and hashCode() methods for descriptor argument It should make using descriptor arguments easier for the table function author, and allow to compare with NULL_DESCRIPTOR by equality. --- .../java/io/trino/spi/ptf/Descriptor.java | 39 +++++++++++++++++++ .../io/trino/spi/ptf/DescriptorArgument.java | 20 ++++++++++ 2 files changed, 59 insertions(+) diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/Descriptor.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/Descriptor.java index 11456c2d4e65..48a4d2a12dfe 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/Descriptor.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/Descriptor.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -72,6 +73,25 @@ public boolean isTyped() return fields.stream().allMatch(field -> field.type.isPresent()); } + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Descriptor that = (Descriptor) o; + return fields.equals(that.fields); + } + + @Override + public int hashCode() + { + return Objects.hash(fields); + } + public static class Field { private final String name; @@ -95,5 +115,24 @@ public Optional getType() { return type; } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Field field = (Field) o; + return name.equals(field.name) && type.equals(field.type); + } + + @Override + public int hashCode() + { + return Objects.hash(name, type); + } } } diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorArgument.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorArgument.java index 85fca6e2b4ef..9af83d5f22fb 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorArgument.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorArgument.java @@ -18,6 +18,7 @@ import io.trino.spi.Experimental; import io.trino.spi.expression.ConnectorExpression; +import java.util.Objects; import java.util.Optional; import static java.util.Objects.requireNonNull; @@ -47,6 +48,25 @@ public Optional getDescriptor() return descriptor; } + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DescriptorArgument that = (DescriptorArgument) o; + return descriptor.equals(that.descriptor); + } + + @Override + public int hashCode() + { + return Objects.hash(descriptor); + } + public static Builder builder() { return new Builder(); From 8568ed11b94130c2a76ad1cf99c04d91fd2a0eca Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Wed, 10 Aug 2022 11:58:34 +0200 Subject: [PATCH 13/15] Add serialization of table function scalar arguments --- .../java/io/trino/spi/ptf/ScalarArgument.java | 23 +++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/ScalarArgument.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/ScalarArgument.java index 3a981e3f5cbc..f0cea1c7bc57 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/ScalarArgument.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/ScalarArgument.java @@ -17,8 +17,11 @@ import com.fasterxml.jackson.annotation.JsonProperty; import io.trino.spi.Experimental; import io.trino.spi.expression.ConnectorExpression; +import io.trino.spi.predicate.NullableValue; import io.trino.spi.type.Type; +import javax.annotation.Nullable; + import static java.util.Objects.requireNonNull; /** @@ -37,27 +40,39 @@ public class ScalarArgument private final Type type; // native representation + @Nullable private final Object value; - @JsonCreator - public ScalarArgument(@JsonProperty("type") Type type, @JsonProperty("value") Object value) + public ScalarArgument(Type type, Object value) { this.type = requireNonNull(type, "type is null"); this.value = value; } - @JsonProperty public Type getType() { return type; } - @JsonProperty public Object getValue() { return value; } + // deserialization + @JsonCreator + public static ScalarArgument fromNullableValue(@JsonProperty("nullableValue") NullableValue nullableValue) + { + return new ScalarArgument(nullableValue.getType(), nullableValue.getValue()); + } + + // serialization + @JsonProperty + public NullableValue getNullableValue() + { + return new NullableValue(type, value); + } + public static Builder builder() { return new Builder(); From b7460da53871b9b6a77ed08c24afbee1d5f72dd9 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Tue, 6 Sep 2022 09:20:27 +0200 Subject: [PATCH 14/15] Copy a map argument in the constructor --- .../src/main/java/io/trino/sql/analyzer/Analysis.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java index 58c2400dde08..63dd379afb7d 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java @@ -2058,7 +2058,7 @@ public TableFunctionInvocationAnalysis( { this.catalogHandle = requireNonNull(catalogHandle, "catalogHandle is null"); this.functionName = requireNonNull(functionName, "functionName is null"); - this.arguments = requireNonNull(arguments, "arguments is null"); + this.arguments = ImmutableMap.copyOf(arguments); this.connectorTableFunctionHandle = requireNonNull(connectorTableFunctionHandle, "connectorTableFunctionHandle is null"); this.transactionHandle = requireNonNull(transactionHandle, "transactionHandle is null"); } From dbef4d9be37494967496230573ab400e54aab0d9 Mon Sep 17 00:00:00 2001 From: kasiafi <30203062+kasiafi@users.noreply.github.com> Date: Wed, 10 Aug 2022 12:54:34 +0200 Subject: [PATCH 15/15] Analyze table and descriptor arguments --- .../java/io/trino/sql/analyzer/Analysis.java | 160 ++++++++ .../trino/sql/analyzer/StatementAnalyzer.java | 351 +++++++++++++++-- .../io/trino/sql/planner/RelationPlanner.java | 12 + .../connector/TestingTableFunctions.java | 131 ++++++- .../io/trino/sql/analyzer/TestAnalyzer.java | 358 +++++++++++++++++- .../java/io/trino/spi/StandardErrorCode.java | 1 + 6 files changed, 969 insertions(+), 44 deletions(-) diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java index 63dd379afb7d..f07c574a7dfe 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java @@ -2041,11 +2041,157 @@ public Optional getAtMost() } } + public static class TableArgumentAnalysis + { + private final String argumentName; + private final Optional name; + private final Relation relation; + private final Optional> partitionBy; // it is allowed to partition by empty list + private final Optional orderBy; + private final boolean pruneWhenEmpty; + private final boolean rowSemantics; + private final boolean passThroughColumns; + + private TableArgumentAnalysis( + String argumentName, + Optional name, + Relation relation, + Optional> partitionBy, + Optional orderBy, + boolean pruneWhenEmpty, + boolean rowSemantics, + boolean passThroughColumns) + { + this.argumentName = requireNonNull(argumentName, "argumentName is null"); + this.name = requireNonNull(name, "name is null"); + this.relation = requireNonNull(relation, "relation is null"); + this.partitionBy = requireNonNull(partitionBy, "partitionBy is null").map(ImmutableList::copyOf); + this.orderBy = requireNonNull(orderBy, "orderBy is null"); + this.pruneWhenEmpty = pruneWhenEmpty; + this.rowSemantics = rowSemantics; + this.passThroughColumns = passThroughColumns; + } + + public String getArgumentName() + { + return argumentName; + } + + public Optional getName() + { + return name; + } + + public Relation getRelation() + { + return relation; + } + + public Optional> getPartitionBy() + { + return partitionBy; + } + + public Optional getOrderBy() + { + return orderBy; + } + + public boolean isPruneWhenEmpty() + { + return pruneWhenEmpty; + } + + public boolean isRowSemantics() + { + return rowSemantics; + } + + public boolean isPassThroughColumns() + { + return passThroughColumns; + } + + public static Builder builder() + { + return new Builder(); + } + + public static final class Builder + { + private String argumentName; + private Optional name = Optional.empty(); + private Relation relation; + private Optional> partitionBy = Optional.empty(); + private Optional orderBy = Optional.empty(); + private boolean pruneWhenEmpty; + private boolean rowSemantics; + private boolean passThroughColumns; + + private Builder() {} + + public Builder withArgumentName(String argumentName) + { + this.argumentName = argumentName; + return this; + } + + public Builder withName(QualifiedName name) + { + this.name = Optional.of(name); + return this; + } + + public Builder withRelation(Relation relation) + { + this.relation = relation; + return this; + } + + public Builder withPartitionBy(List partitionBy) + { + this.partitionBy = Optional.of(partitionBy); + return this; + } + + public Builder withOrderBy(OrderBy orderBy) + { + this.orderBy = Optional.of(orderBy); + return this; + } + + public Builder withPruneWhenEmpty(boolean pruneWhenEmpty) + { + this.pruneWhenEmpty = pruneWhenEmpty; + return this; + } + + public Builder withRowSemantics(boolean rowSemantics) + { + this.rowSemantics = rowSemantics; + return this; + } + + public Builder withPassThroughColumns(boolean passThroughColumns) + { + this.passThroughColumns = passThroughColumns; + return this; + } + + public TableArgumentAnalysis build() + { + return new TableArgumentAnalysis(argumentName, name, relation, partitionBy, orderBy, pruneWhenEmpty, rowSemantics, passThroughColumns); + } + } + } + public static class TableFunctionInvocationAnalysis { private final CatalogHandle catalogHandle; private final String functionName; private final Map arguments; + private final List tableArgumentAnalyses; + private final List> copartitioningLists; private final ConnectorTableFunctionHandle connectorTableFunctionHandle; private final ConnectorTransactionHandle transactionHandle; @@ -2053,12 +2199,16 @@ public TableFunctionInvocationAnalysis( CatalogHandle catalogHandle, String functionName, Map arguments, + List tableArgumentAnalyses, + List> copartitioningLists, ConnectorTableFunctionHandle connectorTableFunctionHandle, ConnectorTransactionHandle transactionHandle) { this.catalogHandle = requireNonNull(catalogHandle, "catalogHandle is null"); this.functionName = requireNonNull(functionName, "functionName is null"); this.arguments = ImmutableMap.copyOf(arguments); + this.tableArgumentAnalyses = ImmutableList.copyOf(tableArgumentAnalyses); + this.copartitioningLists = ImmutableList.copyOf(copartitioningLists); this.connectorTableFunctionHandle = requireNonNull(connectorTableFunctionHandle, "connectorTableFunctionHandle is null"); this.transactionHandle = requireNonNull(transactionHandle, "transactionHandle is null"); } @@ -2078,6 +2228,16 @@ public Map getArguments() return arguments; } + public List getTableArgumentAnalyses() + { + return tableArgumentAnalyses; + } + + public List> getCopartitioningLists() + { + return copartitioningLists; + } + public ConnectorTableFunctionHandle getConnectorTableFunctionHandle() { return connectorTableFunctionHandle; diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index 1ea0c0a084ed..77684bbe55ac 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; +import com.google.common.collect.Multimap; import com.google.common.collect.Streams; import com.google.common.math.IntMath; import io.airlift.slice.Slice; @@ -74,11 +75,13 @@ import io.trino.spi.ptf.ArgumentSpecification; import io.trino.spi.ptf.ConnectorTableFunction; import io.trino.spi.ptf.Descriptor; +import io.trino.spi.ptf.DescriptorArgument; import io.trino.spi.ptf.DescriptorArgumentSpecification; import io.trino.spi.ptf.ReturnTypeSpecification; import io.trino.spi.ptf.ReturnTypeSpecification.DescribedTable; import io.trino.spi.ptf.ScalarArgument; import io.trino.spi.ptf.ScalarArgumentSpecification; +import io.trino.spi.ptf.TableArgument; import io.trino.spi.ptf.TableArgumentSpecification; import io.trino.spi.ptf.TableFunctionAnalysis; import io.trino.spi.security.AccessDeniedException; @@ -104,6 +107,7 @@ import io.trino.sql.analyzer.Analysis.ResolvedWindow; import io.trino.sql.analyzer.Analysis.SelectExpression; import io.trino.sql.analyzer.Analysis.SourceColumn; +import io.trino.sql.analyzer.Analysis.TableArgumentAnalysis; import io.trino.sql.analyzer.Analysis.TableFunctionInvocationAnalysis; import io.trino.sql.analyzer.Analysis.UnnestAnalysis; import io.trino.sql.analyzer.PatternRecognitionAnalyzer.PatternRecognitionAnalysis; @@ -141,6 +145,7 @@ import io.trino.sql.tree.DropSchema; import io.trino.sql.tree.DropTable; import io.trino.sql.tree.DropView; +import io.trino.sql.tree.EmptyTableTreatment; import io.trino.sql.tree.Except; import io.trino.sql.tree.Execute; import io.trino.sql.tree.Explain; @@ -288,6 +293,7 @@ import static io.trino.spi.StandardErrorCode.FUNCTION_NOT_WINDOW; import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS; import static io.trino.spi.StandardErrorCode.INVALID_COLUMN_REFERENCE; +import static io.trino.spi.StandardErrorCode.INVALID_COPARTITIONING; import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static io.trino.spi.StandardErrorCode.INVALID_LIMIT_CLAUSE; import static io.trino.spi.StandardErrorCode.INVALID_ORDER_BY; @@ -321,6 +327,7 @@ import static io.trino.spi.connector.StandardWarningCode.REDUNDANT_ORDER_BY; import static io.trino.spi.function.FunctionKind.AGGREGATE; import static io.trino.spi.function.FunctionKind.WINDOW; +import static io.trino.spi.ptf.DescriptorArgument.NULL_DESCRIPTOR; import static io.trino.spi.ptf.ReturnTypeSpecification.GenericTable.GENERIC_TABLE; import static io.trino.spi.ptf.ReturnTypeSpecification.OnlyPassThrough.ONLY_PASS_THROUGH; import static io.trino.spi.type.BigintType.BIGINT; @@ -348,6 +355,7 @@ import static io.trino.sql.analyzer.ScopeReferenceExtractor.getReferencesToScope; import static io.trino.sql.analyzer.SemanticExceptions.semanticException; import static io.trino.sql.analyzer.TypeSignatureProvider.fromTypes; +import static io.trino.sql.analyzer.TypeSignatureTranslator.toTypeSignature; import static io.trino.sql.planner.ExpressionInterpreter.evaluateConstantExpression; import static io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL; import static io.trino.sql.tree.DereferenceExpression.getQualifiedName; @@ -1499,24 +1507,26 @@ protected Scope visitTableFunctionInvocation(TableFunctionInvocation node, Optio ConnectorTableFunction function = tableFunctionMetadata.getFunction(); CatalogHandle catalogHandle = tableFunctionMetadata.getCatalogHandle(); - Map passedArguments = analyzeArguments(node, function.getArguments(), node.getArguments()); + Node errorLocation = node; + if (!node.getArguments().isEmpty()) { + errorLocation = node.getArguments().get(0); + } + + ArgumentsAnalysis argumentsAnalysis = analyzeArguments(function.getArguments(), node.getArguments(), scope, errorLocation); ConnectorTransactionHandle transactionHandle = transactionManager.getConnectorTransaction(session.getRequiredTransactionId(), catalogHandle); - TableFunctionAnalysis functionAnalysis = function.analyze(session.toConnectorSession(catalogHandle), transactionHandle, passedArguments); - analysis.setTableFunctionAnalysis(node, new TableFunctionInvocationAnalysis(catalogHandle, function.getName(), passedArguments, functionAnalysis.getHandle(), transactionHandle)); - - // TODO process the copartitioning: - // 1. validate input table references - // 2. the copartitioned tables in each set must be partitioned, and have the same number of partitioning columns - // 3. the corresponding columns must be comparable - // 4. within a set, determine and record coercions of the corresponding columns to a common supertype - // Note that if a table is part of multiple copartitioning sets, it might require a different coercion for a column - // per each set. Additionally, there might be another coercion required by the Table Function logic. Also, since - // all partitioning columns are passed-through, we also need an un-coerced copy. - // See ExpressionAnalyzer.sortKeyCoercionsForFrameBoundCalculation for multiple coercions on a column. - if (!node.getCopartitioning().isEmpty()) { - throw semanticException(NOT_SUPPORTED, node, "COPARTITION clause is not yet supported for table functions"); - } + TableFunctionAnalysis functionAnalysis = function.analyze(session.toConnectorSession(catalogHandle), transactionHandle, argumentsAnalysis.getPassedArguments()); + + List> copartitioningLists = analyzeCopartitioning(node.getCopartitioning(), argumentsAnalysis.getTableArgumentAnalyses()); + + analysis.setTableFunctionAnalysis(node, new TableFunctionInvocationAnalysis( + catalogHandle, + function.getName(), + argumentsAnalysis.getPassedArguments(), + argumentsAnalysis.getTableArgumentAnalyses(), + copartitioningLists, + functionAnalysis.getHandle(), + transactionHandle)); // determine the result relation type. // The result relation type of a table function consists of: @@ -1544,6 +1554,7 @@ protected Scope visitTableFunctionInvocation(TableFunctionInvocation node, Optio } // currently we don't support input tables, so the output consists of proper columns only + // TODO implement SQL standard ISO/IEC 9075-2, 4.33 SQL-invoked routines, p. 123 List fields = properColumnsDescriptor.getFields().stream() // per spec, field names are mandatory .map(field -> Field.newUnqualified(field.getName(), field.getType().orElseThrow(() -> new IllegalStateException("missing returned type for proper field")))) @@ -1568,19 +1579,14 @@ private Optional resolveTableFunction(TableFunctionInvoca return Optional.empty(); } - private Map analyzeArguments(Node node, List argumentSpecifications, List arguments) + private ArgumentsAnalysis analyzeArguments(List argumentSpecifications, List arguments, Optional scope, Node errorLocation) { - Node errorLocation = node; - if (!arguments.isEmpty()) { - errorLocation = arguments.get(0); - } - if (argumentSpecifications.size() < arguments.size()) { throw semanticException(INVALID_ARGUMENTS, errorLocation, "Too many arguments. Expected at most %s arguments, got %s arguments", argumentSpecifications.size(), arguments.size()); } if (argumentSpecifications.isEmpty()) { - return ImmutableMap.of(); + return new ArgumentsAnalysis(ImmutableMap.of(), ImmutableList.of()); } boolean argumentsPassedByName = !arguments.isEmpty() && arguments.stream().allMatch(argument -> argument.getName().isPresent()); @@ -1590,6 +1596,7 @@ private Map analyzeArguments(Node node, List passedArguments = ImmutableMap.builder(); + ImmutableList.Builder tableArgumentAnalyses = ImmutableList.builder(); if (argumentsPassedByName) { Map argumentSpecificationsByName = new HashMap<>(); for (ArgumentSpecification argumentSpecification : argumentSpecifications) { @@ -1608,7 +1615,9 @@ private Map analyzeArguments(Node node, List entry : argumentSpecificationsByName.entrySet()) { @@ -1620,7 +1629,9 @@ private Map analyzeArguments(Node node, List analyzeArguments(Node node, List scope) { String actualType; if (argument.getValue() instanceof TableFunctionTableArgument) { @@ -1656,13 +1667,7 @@ else if (argument.getValue() instanceof Expression) { } throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Expected table, got %s", argumentSpecification.getName(), actualType); } - // TODO analyze the argument - // 1. process the Relation - // 2. partitioning and ordering must only apply to tables with set semantics - // 3. validate partitioning and ordering using `validateAndGetInputField()` - // 4. validate the prune when empty property vs argument specification (forbidden for row semantics; override? -> check spec) - // 5. return Argument - throw semanticException(NOT_SUPPORTED, argument, "Table arguments are not yet supported for table functions"); + return analyzeTableArgument(argument, (TableArgumentSpecification) argumentSpecification, scope); } if (argumentSpecification instanceof DescriptorArgumentSpecification) { if (!(argument.getValue() instanceof TableFunctionDescriptorArgument)) { @@ -1672,7 +1677,7 @@ else if (argument.getValue() instanceof Expression) { } throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Expected descriptor, got %s", argumentSpecification.getName(), actualType); } - throw semanticException(NOT_SUPPORTED, argument, "Descriptor arguments are not yet supported for table functions"); + return analyzeDescriptorArgument((TableFunctionDescriptorArgument) argument.getValue()); } if (argumentSpecification instanceof ScalarArgumentSpecification) { if (!(argument.getValue() instanceof Expression)) { @@ -1689,7 +1694,110 @@ else if (argument.getValue() instanceof Expression) { throw new IllegalStateException("Unexpected argument specification: " + argumentSpecification.getClass().getSimpleName()); } - private Argument analyzeScalarArgument(Expression expression, Type type) + private ArgumentAnalysis analyzeTableArgument(TableFunctionArgument argument, TableArgumentSpecification argumentSpecification, Optional scope) + { + TableFunctionTableArgument tableArgument = (TableFunctionTableArgument) argument.getValue(); + + TableArgument.Builder argumentBuilder = TableArgument.builder(); + TableArgumentAnalysis.Builder analysisBuilder = TableArgumentAnalysis.builder(); + analysisBuilder.withArgumentName(argumentSpecification.getName()); + + // process the relation + Relation relation = tableArgument.getTable(); + analysisBuilder.withRelation(relation); + Scope argumentScope = process(relation, scope); + QualifiedName relationName = analysis.getRelationName(relation); + if (relationName != null) { + analysisBuilder.withName(relationName); + } + + argumentBuilder.rowType(RowType.from(argumentScope.getRelationType().getVisibleFields().stream() + .map(field -> new RowType.Field(field.getName(), field.getType())) + .collect(toImmutableList()))); + + // analyze PARTITION BY + if (tableArgument.getPartitionBy().isPresent()) { + if (argumentSpecification.isRowSemantics()) { + throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Partitioning specified for table argument with row semantics", argumentSpecification.getName()); + } + List partitionBy = tableArgument.getPartitionBy().get(); + analysisBuilder.withPartitionBy(partitionBy); + partitionBy.stream() + .forEach(partitioningColumn -> { + validateAndGetInputField(partitioningColumn, argumentScope); + Type type = analyzeExpression(partitioningColumn, argumentScope).getType(partitioningColumn); + if (!type.isComparable()) { + throw semanticException(TYPE_MISMATCH, partitioningColumn, "%s is not comparable, and therefore cannot be used in PARTITION BY", type); + } + }); + argumentBuilder.partitionBy(partitionBy.stream() + // each expression is either an Identifier or a DereferenceExpression + .map(Expression::toString) + .collect(toImmutableList())); + } + + // analyze ORDER BY + if (tableArgument.getOrderBy().isPresent()) { + if (argumentSpecification.isRowSemantics()) { + throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Ordering specified for table argument with row semantics", argumentSpecification.getName()); + } + OrderBy orderBy = tableArgument.getOrderBy().get(); + analysisBuilder.withOrderBy(orderBy); + orderBy.getSortItems().stream() + .map(SortItem::getSortKey) + .forEach(orderingColumn -> { + validateAndGetInputField(orderingColumn, argumentScope); + Type type = analyzeExpression(orderingColumn, argumentScope).getType(orderingColumn); + if (!type.isOrderable()) { + throw semanticException(TYPE_MISMATCH, orderingColumn, "%s is not orderable, and therefore cannot be used in ORDER BY", type); + } + }); + argumentBuilder.orderBy(orderBy.getSortItems().stream() + // each sort key is either an Identifier or a DereferenceExpression + .map(sortItem -> sortItem.getSortKey().toString()) + .collect(toImmutableList())); + } + + // analyze the PRUNE/KEEP WHEN EMPTY property + boolean pruneWhenEmpty = argumentSpecification.isPruneWhenEmpty(); + if (tableArgument.getEmptyTableTreatment().isPresent()) { + if (argumentSpecification.isRowSemantics()) { + throw semanticException(INVALID_FUNCTION_ARGUMENT, tableArgument.getEmptyTableTreatment().get(), "Invalid argument %s. Empty behavior specified for table argument with row semantics", argumentSpecification.getName()); + } + pruneWhenEmpty = tableArgument.getEmptyTableTreatment().get().getTreatment() == EmptyTableTreatment.Treatment.PRUNE; + } + analysisBuilder.withPruneWhenEmpty(pruneWhenEmpty); + + // record remaining properties + analysisBuilder.withRowSemantics(argumentSpecification.isRowSemantics()); + analysisBuilder.withPassThroughColumns(argumentSpecification.isPassThroughColumns()); + + return new ArgumentAnalysis(argumentBuilder.build(), Optional.of(analysisBuilder.build())); + } + + private ArgumentAnalysis analyzeDescriptorArgument(TableFunctionDescriptorArgument argument) + { + return new ArgumentAnalysis( + argument.getDescriptor() + .map(descriptor -> DescriptorArgument.builder() + .descriptor(new Descriptor(descriptor.getFields().stream() + .map(field -> new Descriptor.Field( + field.getName().getCanonicalValue(), + field.getType().map(type -> { + try { + return plannerContext.getTypeManager().getType(toTypeSignature(type)); + } + catch (TypeNotFoundException e) { + throw semanticException(TYPE_MISMATCH, type, "Unknown type: %s", type); + } + }))) + .collect(toImmutableList()))) + .build()) + .orElse(NULL_DESCRIPTOR), + Optional.empty()); + } + + private ArgumentAnalysis analyzeScalarArgument(Expression expression, Type type) { // inline parameters Expression inlined = ExpressionTreeRewriter.rewriteWith(new ExpressionRewriter<>() @@ -1709,10 +1817,12 @@ public Expression rewriteParameter(Parameter node, Void context, ExpressionTreeR }, expression); // currently, only constant arguments are supported Object constantValue = ExpressionInterpreter.evaluateConstantExpression(inlined, type, plannerContext, session, accessControl, analysis.getParameters()); - return ScalarArgument.builder() - .type(type) - .value(constantValue) - .build(); + return new ArgumentAnalysis( + ScalarArgument.builder() + .type(type) + .value(constantValue) + .build(), + Optional.empty()); } private Argument analyzeDefault(ArgumentSpecification argumentSpecification, Node errorLocation) @@ -1724,7 +1834,9 @@ private Argument analyzeDefault(ArgumentSpecification argumentSpecification, Nod checkArgument(!(argumentSpecification instanceof TableArgumentSpecification), "invalid table argument specification: default set"); if (argumentSpecification instanceof DescriptorArgumentSpecification) { - throw semanticException(NOT_SUPPORTED, errorLocation, "Descriptor arguments are not yet supported for table functions"); + return DescriptorArgument.builder() + .descriptor((Descriptor) argumentSpecification.getDefaultValue()) + .build(); } if (argumentSpecification instanceof ScalarArgumentSpecification) { return ScalarArgument.builder() @@ -1736,6 +1848,119 @@ private Argument analyzeDefault(ArgumentSpecification argumentSpecification, Nod throw new IllegalStateException("Unexpected argument specification: " + argumentSpecification.getClass().getSimpleName()); } + private List> analyzeCopartitioning(List> copartitioning, List tableArgumentAnalyses) + { + // map table arguments by relation names. usa a multimap, because multiple arguments can have the same value, e.g. input_1 => tpch.tiny.orders, input_2 => tpch.tiny.orders + ImmutableMultimap.Builder unqualifiedInputsBuilder = ImmutableMultimap.builder(); + ImmutableMultimap.Builder qualifiedInputsBuilder = ImmutableMultimap.builder(); + tableArgumentAnalyses.stream() + .filter(argument -> argument.getName().isPresent()) + .forEach(argument -> { + QualifiedName name = argument.getName().get(); + if (name.getParts().size() == 1) { + unqualifiedInputsBuilder.put(name, argument); + } + else if (name.getParts().size() == 3) { + qualifiedInputsBuilder.put(name, argument); + } + else { + throw new IllegalStateException("relation name should be unqualified or fully qualified"); + } + }); + Multimap unqualifiedInputs = unqualifiedInputsBuilder.build(); + Multimap qualifiedInputs = qualifiedInputsBuilder.build(); + + ImmutableList.Builder> copartitionBuilder = ImmutableList.builder(); + Set referencedArguments = new HashSet<>(); + for (List nameList : copartitioning) { + ImmutableList.Builder copartitionListBuilder = ImmutableList.builder(); + + // resolve copartition tables as references to table arguments + for (QualifiedName name : nameList) { + Collection candidates = emptyList(); + if (name.getParts().size() == 1) { + // try to match unqualified name. it might be a reference to a CTE or an aliased relation + candidates = unqualifiedInputs.get(name); + } + if (candidates.isEmpty()) { + // qualify the name using current schema and catalog + QualifiedObjectName fullyQualifiedName = createQualifiedObjectName(session, name.getOriginalParts().get(0), name); + candidates = qualifiedInputs.get(QualifiedName.of(fullyQualifiedName.getCatalogName(), fullyQualifiedName.getSchemaName(), fullyQualifiedName.getObjectName())); + } + if (candidates.isEmpty()) { + throw semanticException(INVALID_COPARTITIONING, name.getOriginalParts().get(0), "No table argument found for name: " + name); + } + if (candidates.size() > 1) { + throw semanticException(INVALID_COPARTITIONING, name.getOriginalParts().get(0), "Ambiguous reference: multiple table arguments found for name: " + name); + } + TableArgumentAnalysis argument = getOnlyElement(candidates); + if (!referencedArguments.add(argument.getArgumentName())) { + // multiple references to argument in COPARTITION clause are implicitly prohibited by + // ISO/IEC TR REPORT 19075-7, p.33, Feature B203, “More than one copartition specification” + throw semanticException(INVALID_COPARTITIONING, name.getOriginalParts().get(0), "Multiple references to table argument: %s in COPARTITION clause", name); + } + copartitionListBuilder.add(argument); + } + List copartitionList = copartitionListBuilder.build(); + + // analyze partitioning columns + copartitionList.stream() + .filter(argument -> argument.getPartitionBy().isEmpty()) + .findFirst().ifPresent(unpartitioned -> { + throw semanticException(INVALID_COPARTITIONING, unpartitioned.getRelation(), "Table %s referenced in COPARTITION clause is not partitioned", unpartitioned.getName().orElseThrow()); + }); + // TODO make sure that copartitioned tables cannot have empty partitioning lists. + // ISO/IEC TR REPORT 19075-7, 4.5 Partitioning and ordering, p.25 is not clear: "With copartitioning, the copartitioned table arguments must have the same number of partitioning columns, + // and corresponding partitioning columns must be comparable. The DBMS effectively performs a full outer equijoin on the copartitioning columns" + copartitionList.stream() + .filter(argument -> argument.getPartitionBy().orElseThrow().isEmpty()) + .findFirst().ifPresent(partitionedOnEmpty -> { + // table is partitioned but no partitioning columns are specified (single partition) + throw semanticException(INVALID_COPARTITIONING, partitionedOnEmpty.getRelation(), "No partitioning columns specified for table %s referenced in COPARTITION clause", partitionedOnEmpty.getName().orElseThrow()); + }); + List> partitioningColumns = copartitionList.stream() + .map(TableArgumentAnalysis::getPartitionBy) + .map(Optional::orElseThrow) + .collect(toImmutableList()); + if (partitioningColumns.stream() + .map(List::size) + .distinct() + .count() > 1) { + throw semanticException(INVALID_COPARTITIONING, nameList.get(0).getOriginalParts().get(0), "Numbers of partitioning columns in copartitioned tables do not match"); + } + + // coerce corresponding copartition columns to common supertype + for (int index = 0; index < partitioningColumns.get(0).size(); index++) { + Type commonSuperType = analysis.getType(partitioningColumns.get(0).get(index)); + // find common supertype + for (List columnList : partitioningColumns) { + Optional superType = typeCoercion.getCommonSuperType(commonSuperType, analysis.getType(columnList.get(index))); + if (superType.isEmpty()) { + throw semanticException(TYPE_MISMATCH, nameList.get(0).getOriginalParts().get(0), "Partitioning columns in copartitioned tables have incompatible types"); + } + commonSuperType = superType.get(); + } + for (List columnList : partitioningColumns) { + Expression column = columnList.get(index); + Type type = analysis.getType(column); + if (!type.equals(commonSuperType)) { + if (!typeCoercion.canCoerce(type, commonSuperType)) { + throw semanticException(TYPE_MISMATCH, column, "Cannot coerce column of type %s to common supertype: %s", type.getDisplayName(), commonSuperType.getDisplayName()); + } + analysis.addCoercion(column, commonSuperType, typeCoercion.isTypeOnlyCoercion(type, commonSuperType)); + } + } + } + + // record the resolved copartition arguments by argument names + copartitionBuilder.add(copartitionList.stream() + .map(TableArgumentAnalysis::getArgumentName) + .collect(toImmutableList())); + } + + return copartitionBuilder.build(); + } + private Optional getMaterializedViewStorageTableName(MaterializedViewDefinition viewDefinition) { if (viewDefinition.getStorageTable().isEmpty()) { @@ -4919,4 +5144,48 @@ private static boolean hasScopeAsLocalParent(Scope root, Scope parent) return false; } + + private static final class ArgumentAnalysis + { + private final Argument argument; + private final Optional tableArgumentAnalysis; + + public ArgumentAnalysis(Argument argument, Optional tableArgumentAnalysis) + { + this.argument = requireNonNull(argument, "argument is null"); + this.tableArgumentAnalysis = requireNonNull(tableArgumentAnalysis, "tableArgumentAnalysis is null"); + } + + public Argument getArgument() + { + return argument; + } + + public Optional getTableArgumentAnalysis() + { + return tableArgumentAnalysis; + } + } + + private static final class ArgumentsAnalysis + { + private final Map passedArguments; + private final List tableArgumentAnalyses; + + public ArgumentsAnalysis(Map passedArguments, List tableArgumentAnalyses) + { + this.passedArguments = ImmutableMap.copyOf(requireNonNull(passedArguments, "passedArguments is null")); + this.tableArgumentAnalyses = ImmutableList.copyOf(requireNonNull(tableArgumentAnalyses, "tableArgumentAnalyses is null")); + } + + public Map getPassedArguments() + { + return passedArguments; + } + + public List getTableArgumentAnalyses() + { + return tableArgumentAnalyses; + } + } } diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java b/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java index 5aff24baf19c..19d2d37b0d40 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java @@ -88,7 +88,9 @@ import io.trino.sql.tree.SubqueryExpression; import io.trino.sql.tree.SubsetDefinition; import io.trino.sql.tree.Table; +import io.trino.sql.tree.TableFunctionDescriptorArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.Union; import io.trino.sql.tree.Unnest; @@ -327,6 +329,16 @@ private RelationPlan addColumnMasks(Table table, RelationPlan plan) @Override protected RelationPlan visitTableFunctionInvocation(TableFunctionInvocation node, Void context) { + node.getArguments().stream() + .forEach(argument -> { + if (argument.getValue() instanceof TableFunctionTableArgument) { + throw semanticException(NOT_SUPPORTED, argument, "Table arguments are not yet supported for table functions"); + } + if (argument.getValue() instanceof TableFunctionDescriptorArgument) { + throw semanticException(NOT_SUPPORTED, argument, "Descriptor arguments are not yet supported for table functions"); + } + }); + TableFunctionInvocationAnalysis functionAnalysis = analysis.getTableFunctionAnalysis(node); // TODO handle input relations: diff --git a/core/trino-main/src/test/java/io/trino/connector/TestingTableFunctions.java b/core/trino-main/src/test/java/io/trino/connector/TestingTableFunctions.java index 2b9df7592e9f..3fb954fd9235 100644 --- a/core/trino-main/src/test/java/io/trino/connector/TestingTableFunctions.java +++ b/core/trino-main/src/test/java/io/trino/connector/TestingTableFunctions.java @@ -23,8 +23,10 @@ import io.trino.spi.ptf.Argument; import io.trino.spi.ptf.ConnectorTableFunctionHandle; import io.trino.spi.ptf.Descriptor; +import io.trino.spi.ptf.DescriptorArgumentSpecification; import io.trino.spi.ptf.ScalarArgument; import io.trino.spi.ptf.ScalarArgumentSpecification; +import io.trino.spi.ptf.TableArgumentSpecification; import io.trino.spi.ptf.TableFunctionAnalysis; import java.util.List; @@ -39,6 +41,13 @@ public class TestingTableFunctions { + private static final String SCHEMA_NAME = "system"; + private static final ConnectorTableFunctionHandle HANDLE = new ConnectorTableFunctionHandle() {}; + private static final TableFunctionAnalysis ANALYSIS = TableFunctionAnalysis.builder() + .handle(HANDLE) + .returnedType(new Descriptor(ImmutableList.of(new Descriptor.Field("column", Optional.of(BOOLEAN))))) + .build(); + /** * A table function returning a table with single empty column of type BOOLEAN. * The argument `COLUMN` is the column name. @@ -48,7 +57,6 @@ public class TestingTableFunctions public static class SimpleTableFunction extends AbstractConnectorTableFunction { - private static final String SCHEMA_NAME = "system"; private static final String FUNCTION_NAME = "simple_table_function"; private static final String TABLE_NAME = "simple_table"; @@ -101,4 +109,125 @@ public MockConnectorTableHandle getTableHandle() } } } + + public static class TwoScalarArgumentsFunction + extends AbstractConnectorTableFunction + { + public TwoScalarArgumentsFunction() + { + super( + SCHEMA_NAME, + "two_arguments_function", + ImmutableList.of( + ScalarArgumentSpecification.builder() + .name("TEXT") + .type(VARCHAR) + .build(), + ScalarArgumentSpecification.builder() + .name("NUMBER") + .type(BIGINT) + .defaultValue(null) + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } + + public static class TableArgumentFunction + extends AbstractConnectorTableFunction + { + public TableArgumentFunction() + { + super( + SCHEMA_NAME, + "table_argument_function", + ImmutableList.of( + TableArgumentSpecification.builder() + .name("INPUT") + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } + + public static class TableArgumentRowSemanticsFunction + extends AbstractConnectorTableFunction + { + public TableArgumentRowSemanticsFunction() + { + super( + SCHEMA_NAME, + "table_argument_row_semantics_function", + ImmutableList.of( + TableArgumentSpecification.builder() + .name("INPUT") + .rowSemantics() + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } + + public static class DescriptorArgumentFunction + extends AbstractConnectorTableFunction + { + public DescriptorArgumentFunction() + { + super( + SCHEMA_NAME, + "descriptor_argument_function", + ImmutableList.of( + DescriptorArgumentSpecification.builder() + .name("SCHEMA") + .defaultValue(null) + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } + + public static class TwoTableArgumentsFunction + extends AbstractConnectorTableFunction + { + public TwoTableArgumentsFunction() + { + super( + SCHEMA_NAME, + "two_table_arguments_function", + ImmutableList.of( + TableArgumentSpecification.builder() + .name("INPUT1") + .build(), + TableArgumentSpecification.builder() + .name("INPUT2") + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } } diff --git a/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java b/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java index 6b0dd3790460..329ad50ca54e 100644 --- a/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java +++ b/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java @@ -21,9 +21,15 @@ import io.trino.FeaturesConfig; import io.trino.Session; import io.trino.SystemSessionProperties; +import io.trino.connector.CatalogHandle; import io.trino.connector.CatalogServiceProvider; import io.trino.connector.MockConnectorFactory; import io.trino.connector.StaticConnectorFactory; +import io.trino.connector.TestingTableFunctions.DescriptorArgumentFunction; +import io.trino.connector.TestingTableFunctions.TableArgumentFunction; +import io.trino.connector.TestingTableFunctions.TableArgumentRowSemanticsFunction; +import io.trino.connector.TestingTableFunctions.TwoScalarArgumentsFunction; +import io.trino.connector.TestingTableFunctions.TwoTableArgumentsFunction; import io.trino.execution.DynamicFilterConfig; import io.trino.execution.QueryManagerConfig; import io.trino.execution.TaskManagerConfig; @@ -32,6 +38,7 @@ import io.trino.memory.MemoryManagerConfig; import io.trino.memory.NodeMemoryConfig; import io.trino.metadata.AnalyzePropertyManager; +import io.trino.metadata.CatalogTableFunctions; import io.trino.metadata.ColumnPropertyManager; import io.trino.metadata.InternalFunctionBundle; import io.trino.metadata.MaterializedViewDefinition; @@ -40,7 +47,10 @@ import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.SchemaPropertyManager; import io.trino.metadata.SessionPropertyManager; +import io.trino.metadata.TableFunctionRegistry; import io.trino.metadata.TableHandle; +import io.trino.metadata.TableProceduresPropertyManager; +import io.trino.metadata.TableProceduresRegistry; import io.trino.metadata.TablePropertyManager; import io.trino.metadata.ViewColumn; import io.trino.metadata.ViewDefinition; @@ -65,6 +75,7 @@ import io.trino.spi.type.RowType; import io.trino.spi.type.Type; import io.trino.sql.PlannerContext; +import io.trino.sql.parser.ParsingException; import io.trino.sql.parser.ParsingOptions; import io.trino.sql.parser.SqlParser; import io.trino.sql.planner.OptimizerConfig; @@ -76,6 +87,8 @@ import io.trino.testing.TestingMetadata; import io.trino.testing.TestingMetadata.TestingTableHandle; import io.trino.testing.assertions.TrinoExceptionAssert; +import io.trino.transaction.NoOpTransactionManager; +import io.trino.transaction.TransactionId; import io.trino.transaction.TransactionManager; import org.intellij.lang.annotations.Language; import org.testng.annotations.AfterClass; @@ -106,6 +119,7 @@ import static io.trino.spi.StandardErrorCode.FUNCTION_NOT_FOUND; import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS; import static io.trino.spi.StandardErrorCode.INVALID_COLUMN_REFERENCE; +import static io.trino.spi.StandardErrorCode.INVALID_COPARTITIONING; import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static io.trino.spi.StandardErrorCode.INVALID_LABEL; import static io.trino.spi.StandardErrorCode.INVALID_LIMIT_CLAUSE; @@ -124,6 +138,7 @@ import static io.trino.spi.StandardErrorCode.INVALID_WINDOW_MEASURE; import static io.trino.spi.StandardErrorCode.INVALID_WINDOW_REFERENCE; import static io.trino.spi.StandardErrorCode.MISMATCHED_COLUMN_ALIASES; +import static io.trino.spi.StandardErrorCode.MISSING_ARGUMENT; import static io.trino.spi.StandardErrorCode.MISSING_CATALOG_NAME; import static io.trino.spi.StandardErrorCode.MISSING_COLUMN_ALIASES; import static io.trino.spi.StandardErrorCode.MISSING_COLUMN_NAME; @@ -164,7 +179,6 @@ import static io.trino.spi.type.VarcharType.VARCHAR; import static io.trino.spi.type.VarcharType.createUnboundedVarcharType; import static io.trino.spi.type.VarcharType.createVarcharType; -import static io.trino.sql.analyzer.StatementAnalyzerFactory.createTestingStatementAnalyzerFactory; import static io.trino.sql.parser.ParsingOptions.DecimalLiteralTreatment.AS_DECIMAL; import static io.trino.sql.parser.ParsingOptions.DecimalLiteralTreatment.AS_DOUBLE; import static io.trino.testing.TestingAccessControlManager.TestingPrivilegeType.SELECT_COLUMN; @@ -180,6 +194,7 @@ import static java.util.Collections.nCopies; import static java.util.Objects.requireNonNull; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; @Test(singleThreaded = true) public class TestAnalyzer @@ -6151,6 +6166,321 @@ public void testJsonArrayInAggregationContext() .hasMessage("line 1:8: 'JSON_ARRAY(b ABSENT ON NULL)' must be an aggregate expression or appear in GROUP BY clause"); } + @Test + public void testTableFunctionNotFound() + { + assertFails("SELECT * FROM TABLE(non_existent_table_function())") + .hasErrorCode(FUNCTION_NOT_FOUND) + .hasMessage("line 1:21: Table function non_existent_table_function not registered"); + } + + @Test + public void testTableFunctionArguments() + { + assertFails("SELECT * FROM TABLE(system.two_arguments_function(1, 2, 3))") + .hasErrorCode(INVALID_ARGUMENTS) + .hasMessage("line 1:51: Too many arguments. Expected at most 2 arguments, got 3 arguments"); + + analyze("SELECT * FROM TABLE(system.two_arguments_function('foo'))"); + analyze("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo'))"); + analyze("SELECT * FROM TABLE(system.two_arguments_function('foo', 1))"); + analyze("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', number => 1))"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function('foo', number => 1))") + .hasErrorCode(INVALID_ARGUMENTS) + .hasMessage("line 1:51: All arguments must be passed by name or all must be passed positionally"); + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', 1))") + .hasErrorCode(INVALID_ARGUMENTS) + .hasMessage("line 1:51: All arguments must be passed by name or all must be passed positionally"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', text => 'bar'))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Duplicate argument name: TEXT"); + // argument names are resolved in the canonical form + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', TeXt => 'bar'))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Duplicate argument name: TEXT"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', bar => 'bar'))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Unexpected argument name: BAR"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(number => 1))") + .hasErrorCode(MISSING_ARGUMENT) + .hasMessage("line 1:51: Missing argument: TEXT"); + } + + @Test + public void testTableArgument() + { + // cannot pass a table function as the argument + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => my_schema.my_table_function(1)))") + .hasErrorCode(NOT_SUPPORTED) + .hasMessage("line 1:52: Invalid table argument INPUT. Table functions are not allowed as table function arguments"); + + assertThatThrownBy(() -> analyze("SELECT * FROM TABLE(system.table_argument_function(input => my_schema.my_table_function(arg => 1)))")) + .isInstanceOf(ParsingException.class) + .hasMessageContaining("line 1:93: mismatched input '=>'."); + + // cannot pass a table function as the argument, also preceding nested table function with TABLE is incorrect + assertThatThrownBy(() -> analyze("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(my_schema.my_table_function(1))))")) + .isInstanceOf(ParsingException.class) + .hasMessageContaining("line 1:94: mismatched input '('."); + + // a table passed as the argument must be preceded with TABLE + analyze("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(t1)))"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => t1))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:52: Invalid argument INPUT. Expected table, got expression"); + + // a query passed as the argument must be preceded with TABLE + analyze("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT * FROM t1)))"); + + assertThatThrownBy(() -> analyze("SELECT * FROM TABLE(system.table_argument_function(input => SELECT * FROM t1))")) + .isInstanceOf(ParsingException.class) + .hasMessageContaining("line 1:61: mismatched input 'SELECT'."); + + // query passed as the argument is correlated + analyze(""" + SELECT * + FROM + t1 + CROSS JOIN + LATERAL (SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 WHERE a > 0)))) + """); + + // wrong argument type + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => 'foo'))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:52: Invalid argument INPUT. Expected table, got expression"); + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => DESCRIPTOR(x int, y int)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:52: Invalid argument INPUT. Expected table, got descriptor"); + } + + @Test + public void testTableArgumentProperties() + { + analyze(""" + SELECT * FROM TABLE(system.table_argument_function( + input => TABLE(t1) + PARTITION BY a + KEEP WHEN EMPTY + ORDER BY b)) + """); + + assertFails("SELECT * FROM TABLE(system.table_argument_row_semantics_function(input => TABLE(t1) PARTITION BY a))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Invalid argument INPUT. Partitioning specified for table argument with row semantics"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 a) PARTITION BY b))") + .hasErrorCode(COLUMN_NOT_FOUND) + .hasMessage("line 1:92: Column b is not present in the input relation"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 a) ORDER BY 1))") + .hasErrorCode(INVALID_COLUMN_REFERENCE) + .hasMessage("line 1:88: Expected column reference. Actual: 1"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT approx_set(1) a) PARTITION BY a))") + .hasErrorCode(TYPE_MISMATCH) + .hasMessage("line 1:104: HyperLogLog is not comparable, and therefore cannot be used in PARTITION BY"); + + assertFails("SELECT * FROM TABLE(system.table_argument_row_semantics_function(input => TABLE(t1) ORDER BY a))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Invalid argument INPUT. Ordering specified for table argument with row semantics"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 a) ORDER BY b))") + .hasErrorCode(COLUMN_NOT_FOUND) + .hasMessage("line 1:88: Column b is not present in the input relation"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 a) ORDER BY 1))") + .hasErrorCode(INVALID_COLUMN_REFERENCE) + .hasMessage("line 1:88: Expected column reference. Actual: 1"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT approx_set(1) a) ORDER BY a))") + .hasErrorCode(TYPE_MISMATCH) + .hasMessage("line 1:100: HyperLogLog is not orderable, and therefore cannot be used in ORDER BY"); + + assertFails("SELECT * FROM TABLE(system.table_argument_row_semantics_function(input => TABLE(t1) PRUNE WHEN EMPTY))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:85: Invalid argument INPUT. Empty behavior specified for table argument with row semantics"); + + assertFails("SELECT * FROM TABLE(system.table_argument_row_semantics_function(input => TABLE(t1) KEEP WHEN EMPTY))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:85: Invalid argument INPUT. Empty behavior specified for table argument with row semantics"); + } + + @Test + public void testDescriptorArgument() + { + analyze("SELECT * FROM TABLE(system.descriptor_argument_function(schema => DESCRIPTOR(x integer, y boolean)))"); + + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => DESCRIPTOR(1 + 2)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:57: Invalid descriptor argument SCHEMA. Descriptors should be formatted as 'DESCRIPTOR(name [type], ...)'"); + + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => 1))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:57: Invalid argument SCHEMA. Expected descriptor, got expression"); + + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => TABLE(t1)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:57: Invalid argument SCHEMA. Expected descriptor, got table"); + + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => DESCRIPTOR(x verybigint)))") + .hasErrorCode(TYPE_MISMATCH) + .hasMessage("line 1:80: Unknown type: verybigint"); + } + + @Test + public void testScalarArgument() + { + analyze("SELECT * FROM TABLE(system.two_arguments_function('foo', 1))"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'a', number => DESCRIPTOR(x integer, y boolean)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:64: Invalid argument NUMBER. Expected expression, got descriptor"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'a', number => DESCRIPTOR(1 + 2)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:64: 'descriptor' function is not allowed as a table function argument"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'a', number => TABLE(t1)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:64: Invalid argument NUMBER. Expected expression, got table"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'a', number => (SELECT 1)))") + .hasErrorCode(EXPRESSION_NOT_CONSTANT) + .hasMessage("line 1:74: Constant expression cannot contain a subquery"); + } + + @Test + public void testCopartitioning() + { + // TABLE(t1) is matched by fully qualified name: tpch.s1.t1. It matches the second copartition item s1.t1. + // Aliased relation TABLE(SELECT 1, 2) t1(x, y) is matched by unqualified name. It matches the first copartition item t1. + analyze(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(SELECT 1, 2) t1(x, y) PARTITION BY (x, y) + COPARTITION (t1, s1.t1))) + """); + + // Copartition items t1, t2 are first matched to arguments by unqualified names, and when no match is found, by fully qualified names. + // TABLE(tpch.s1.t1) is matched by fully qualified name. It matches the first copartition item t1. + // TABLE(s1.t2) is matched by unqualified name: tpch.s1.t2. It matches the second copartition item t2. + analyze(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(tpch.s1.t1) PARTITION BY (a, b), + input2 => TABLE(s1.t2) PARTITION BY (a, b) + COPARTITION (t1, t2))) + """); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(t2) PARTITION BY (a, b) + COPARTITION (t1, s1.foo))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:22: No table argument found for name: s1.foo"); + + // Both table arguments are matched by fully qualified name: tpch.s1.t1 + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(t1) PARTITION BY (a, b) + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:18: Ambiguous reference: multiple table arguments found for name: t1"); + + // Both table arguments are matched by unqualified name: t1 + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(SELECT 1, 2) t1(a, b) PARTITION BY (a, b), + input2 => TABLE(SELECT 3, 4) t1(c, d) PARTITION BY (c, d) + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:18: Ambiguous reference: multiple table arguments found for name: t1"); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(t2) PARTITION BY (a, b) + COPARTITION (t1, t1))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:22: Multiple references to table argument: t1 in COPARTITION clause"); + } + + @Test + public void testCopartitionColumns() + { + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1), + input2 => TABLE(t2) PARTITION BY (a, b) + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 2:15: Table tpch.s1.t1 referenced in COPARTITION clause is not partitioned"); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (), + input2 => TABLE(t2) PARTITION BY () + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 2:15: No partitioning columns specified for table tpch.s1.t1 referenced in COPARTITION clause"); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(t2) PARTITION BY (a) + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:18: Numbers of partitioning columns in copartitioned tables do not match"); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(SELECT 1) t1(a) PARTITION BY (a), + input2 => TABLE(SELECT 'x') t2(b) PARTITION BY (b) + COPARTITION (t1, t2))) + """) + .hasErrorCode(TYPE_MISMATCH) + .hasMessage("line 4:18: Partitioning columns in copartitioned tables have incompatible types"); + } + + @Test + public void testNullArguments() + { + // cannot pass null for table argument + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => null))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:52: Invalid argument INPUT. Expected table, got expression"); + + // the wrong way to pass null for descriptor + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => null))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:57: Invalid argument SCHEMA. Expected descriptor, got expression"); + + // the right way to pass null for descriptor + analyze("SELECT * FROM TABLE(system.descriptor_argument_function(schema => CAST(null AS DESCRIPTOR)))"); + + // the default value for the argument schema is null + analyze("SELECT * FROM TABLE(system.descriptor_argument_function())"); + + analyze("SELECT * FROM TABLE(system.two_arguments_function(null, null))"); + + // the default value for the second argument is null + analyze("SELECT * FROM TABLE(system.two_arguments_function('a'))"); + } + @BeforeClass public void setup() { @@ -6509,7 +6839,31 @@ private Analyzer createAnalyzer(Session session, AccessControl accessControl) new ColumnPropertyManager(CatalogServiceProvider.fail()), tablePropertyManager, new MaterializedViewPropertyManager(catalogName -> ImmutableMap.of())))); - StatementAnalyzerFactory statementAnalyzerFactory = createTestingStatementAnalyzerFactory(plannerContext, accessControl, tablePropertyManager, analyzePropertyManager); + StatementAnalyzerFactory statementAnalyzerFactory = new StatementAnalyzerFactory( + plannerContext, + new SqlParser(), + accessControl, + new NoOpTransactionManager() + { + // needed to analyze table functions + @Override + public ConnectorTransactionHandle getConnectorTransaction(TransactionId transactionId, CatalogHandle catalogHandle) + { + return new ConnectorTransactionHandle() {}; + } + }, + user -> ImmutableSet.of(), + new TableProceduresRegistry(CatalogServiceProvider.fail("procedures are not supported in testing analyzer")), + new TableFunctionRegistry(catalogName -> new CatalogTableFunctions(ImmutableList.of( + new TwoScalarArgumentsFunction(), + new TableArgumentFunction(), + new TableArgumentRowSemanticsFunction(), + new DescriptorArgumentFunction(), + new TwoTableArgumentsFunction()))), + new SessionPropertyManager(), + tablePropertyManager, + analyzePropertyManager, + new TableProceduresPropertyManager(CatalogServiceProvider.fail("procedures are not supported in testing analyzer"))); AnalyzerFactory analyzerFactory = new AnalyzerFactory(statementAnalyzerFactory, statementRewrite); return analyzerFactory.createAnalyzer( session, diff --git a/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java b/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java index 040471bca038..1f78f61f38f3 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java +++ b/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java @@ -141,6 +141,7 @@ public enum StandardErrorCode INVALID_JSON_LITERAL(117, USER_ERROR), JSON_VALUE_RESULT_ERROR(118, USER_ERROR), MERGE_TARGET_ROW_MULTIPLE_MATCHES(119, USER_ERROR), + INVALID_COPARTITIONING(120, USER_ERROR), GENERIC_INTERNAL_ERROR(65536, INTERNAL_ERROR), TOO_MANY_REQUESTS_FAILED(65537, INTERNAL_ERROR),