diff --git a/core/trino-main/src/main/java/io/trino/connector/ConnectorServices.java b/core/trino-main/src/main/java/io/trino/connector/ConnectorServices.java index e94561c179a7..991bf3689bbf 100644 --- a/core/trino-main/src/main/java/io/trino/connector/ConnectorServices.java +++ b/core/trino-main/src/main/java/io/trino/connector/ConnectorServices.java @@ -356,5 +356,8 @@ private static void validateTableFunction(ConnectorTableFunction tableFunction) .filter(TableArgumentSpecification::isRowSemantics) .count(); checkArgument(tableArgumentsWithRowSemantics <= 1, "more than one table argument with row semantics"); + // The 'keep when empty' or 'prune when empty' property must not be explicitly specified for a table argument with row semantics. + // Such a table argument is implicitly 'prune when empty'. The TableArgumentSpecification.Builder enforces the 'prune when empty' property + // for a table argument with row semantics. } } diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java index 2e65779da0e3..f07c574a7dfe 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java @@ -65,6 +65,7 @@ import io.trino.sql.tree.Offset; import io.trino.sql.tree.OrderBy; import io.trino.sql.tree.Parameter; +import io.trino.sql.tree.QualifiedName; import io.trino.sql.tree.QuantifiedComparisonExpression; import io.trino.sql.tree.Query; import io.trino.sql.tree.QuerySpecification; @@ -235,6 +236,8 @@ public class Analysis private Optional tableExecuteHandle = Optional.empty(); + // names of tables and aliased relations. All names are resolved case-insensitive. + private final Map, QualifiedName> relationNames = new LinkedHashMap<>(); private final Map, TableFunctionInvocationAnalysis> tableFunctionAnalyses = new LinkedHashMap<>(); public Analysis(@Nullable Statement root, Map, Expression> parameters, QueryType queryType) @@ -1219,6 +1222,16 @@ public TableFunctionInvocationAnalysis getTableFunctionAnalysis(TableFunctionInv return tableFunctionAnalyses.get(NodeRef.of(node)); } + public void setRelationName(Relation relation, QualifiedName name) + { + relationNames.put(NodeRef.of(relation), name); + } + + public QualifiedName getRelationName(Relation relation) + { + return relationNames.get(NodeRef.of(relation)); + } + private boolean isInputTable(Table table) { return !(isUpdateTarget(table) || isInsertTarget(table)); @@ -2028,11 +2041,157 @@ public Optional getAtMost() } } + public static class TableArgumentAnalysis + { + private final String argumentName; + private final Optional name; + private final Relation relation; + private final Optional> partitionBy; // it is allowed to partition by empty list + private final Optional orderBy; + private final boolean pruneWhenEmpty; + private final boolean rowSemantics; + private final boolean passThroughColumns; + + private TableArgumentAnalysis( + String argumentName, + Optional name, + Relation relation, + Optional> partitionBy, + Optional orderBy, + boolean pruneWhenEmpty, + boolean rowSemantics, + boolean passThroughColumns) + { + this.argumentName = requireNonNull(argumentName, "argumentName is null"); + this.name = requireNonNull(name, "name is null"); + this.relation = requireNonNull(relation, "relation is null"); + this.partitionBy = requireNonNull(partitionBy, "partitionBy is null").map(ImmutableList::copyOf); + this.orderBy = requireNonNull(orderBy, "orderBy is null"); + this.pruneWhenEmpty = pruneWhenEmpty; + this.rowSemantics = rowSemantics; + this.passThroughColumns = passThroughColumns; + } + + public String getArgumentName() + { + return argumentName; + } + + public Optional getName() + { + return name; + } + + public Relation getRelation() + { + return relation; + } + + public Optional> getPartitionBy() + { + return partitionBy; + } + + public Optional getOrderBy() + { + return orderBy; + } + + public boolean isPruneWhenEmpty() + { + return pruneWhenEmpty; + } + + public boolean isRowSemantics() + { + return rowSemantics; + } + + public boolean isPassThroughColumns() + { + return passThroughColumns; + } + + public static Builder builder() + { + return new Builder(); + } + + public static final class Builder + { + private String argumentName; + private Optional name = Optional.empty(); + private Relation relation; + private Optional> partitionBy = Optional.empty(); + private Optional orderBy = Optional.empty(); + private boolean pruneWhenEmpty; + private boolean rowSemantics; + private boolean passThroughColumns; + + private Builder() {} + + public Builder withArgumentName(String argumentName) + { + this.argumentName = argumentName; + return this; + } + + public Builder withName(QualifiedName name) + { + this.name = Optional.of(name); + return this; + } + + public Builder withRelation(Relation relation) + { + this.relation = relation; + return this; + } + + public Builder withPartitionBy(List partitionBy) + { + this.partitionBy = Optional.of(partitionBy); + return this; + } + + public Builder withOrderBy(OrderBy orderBy) + { + this.orderBy = Optional.of(orderBy); + return this; + } + + public Builder withPruneWhenEmpty(boolean pruneWhenEmpty) + { + this.pruneWhenEmpty = pruneWhenEmpty; + return this; + } + + public Builder withRowSemantics(boolean rowSemantics) + { + this.rowSemantics = rowSemantics; + return this; + } + + public Builder withPassThroughColumns(boolean passThroughColumns) + { + this.passThroughColumns = passThroughColumns; + return this; + } + + public TableArgumentAnalysis build() + { + return new TableArgumentAnalysis(argumentName, name, relation, partitionBy, orderBy, pruneWhenEmpty, rowSemantics, passThroughColumns); + } + } + } + public static class TableFunctionInvocationAnalysis { private final CatalogHandle catalogHandle; private final String functionName; private final Map arguments; + private final List tableArgumentAnalyses; + private final List> copartitioningLists; private final ConnectorTableFunctionHandle connectorTableFunctionHandle; private final ConnectorTransactionHandle transactionHandle; @@ -2040,12 +2199,16 @@ public TableFunctionInvocationAnalysis( CatalogHandle catalogHandle, String functionName, Map arguments, + List tableArgumentAnalyses, + List> copartitioningLists, ConnectorTableFunctionHandle connectorTableFunctionHandle, ConnectorTransactionHandle transactionHandle) { this.catalogHandle = requireNonNull(catalogHandle, "catalogHandle is null"); this.functionName = requireNonNull(functionName, "functionName is null"); - this.arguments = requireNonNull(arguments, "arguments is null"); + this.arguments = ImmutableMap.copyOf(arguments); + this.tableArgumentAnalyses = ImmutableList.copyOf(tableArgumentAnalyses); + this.copartitioningLists = ImmutableList.copyOf(copartitioningLists); this.connectorTableFunctionHandle = requireNonNull(connectorTableFunctionHandle, "connectorTableFunctionHandle is null"); this.transactionHandle = requireNonNull(transactionHandle, "transactionHandle is null"); } @@ -2065,6 +2228,16 @@ public Map getArguments() return arguments; } + public List getTableArgumentAnalyses() + { + return tableArgumentAnalyses; + } + + public List> getCopartitioningLists() + { + return copartitioningLists; + } + public ConnectorTableFunctionHandle getConnectorTableFunctionHandle() { return connectorTableFunctionHandle; diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index 874f10fc09b2..77684bbe55ac 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; +import com.google.common.collect.Multimap; import com.google.common.collect.Streams; import com.google.common.math.IntMath; import io.airlift.slice.Slice; @@ -74,11 +75,13 @@ import io.trino.spi.ptf.ArgumentSpecification; import io.trino.spi.ptf.ConnectorTableFunction; import io.trino.spi.ptf.Descriptor; +import io.trino.spi.ptf.DescriptorArgument; import io.trino.spi.ptf.DescriptorArgumentSpecification; import io.trino.spi.ptf.ReturnTypeSpecification; import io.trino.spi.ptf.ReturnTypeSpecification.DescribedTable; import io.trino.spi.ptf.ScalarArgument; import io.trino.spi.ptf.ScalarArgumentSpecification; +import io.trino.spi.ptf.TableArgument; import io.trino.spi.ptf.TableArgumentSpecification; import io.trino.spi.ptf.TableFunctionAnalysis; import io.trino.spi.security.AccessDeniedException; @@ -104,6 +107,7 @@ import io.trino.sql.analyzer.Analysis.ResolvedWindow; import io.trino.sql.analyzer.Analysis.SelectExpression; import io.trino.sql.analyzer.Analysis.SourceColumn; +import io.trino.sql.analyzer.Analysis.TableArgumentAnalysis; import io.trino.sql.analyzer.Analysis.TableFunctionInvocationAnalysis; import io.trino.sql.analyzer.Analysis.UnnestAnalysis; import io.trino.sql.analyzer.PatternRecognitionAnalyzer.PatternRecognitionAnalysis; @@ -136,12 +140,12 @@ import io.trino.sql.tree.Delete; import io.trino.sql.tree.Deny; import io.trino.sql.tree.DereferenceExpression; -import io.trino.sql.tree.DescriptorArgument; import io.trino.sql.tree.DropColumn; import io.trino.sql.tree.DropMaterializedView; import io.trino.sql.tree.DropSchema; import io.trino.sql.tree.DropTable; import io.trino.sql.tree.DropView; +import io.trino.sql.tree.EmptyTableTreatment; import io.trino.sql.tree.Except; import io.trino.sql.tree.Execute; import io.trino.sql.tree.Explain; @@ -220,7 +224,9 @@ import io.trino.sql.tree.Table; import io.trino.sql.tree.TableExecute; import io.trino.sql.tree.TableFunctionArgument; +import io.trino.sql.tree.TableFunctionDescriptorArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.TruncateTable; import io.trino.sql.tree.Union; @@ -287,6 +293,7 @@ import static io.trino.spi.StandardErrorCode.FUNCTION_NOT_WINDOW; import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS; import static io.trino.spi.StandardErrorCode.INVALID_COLUMN_REFERENCE; +import static io.trino.spi.StandardErrorCode.INVALID_COPARTITIONING; import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static io.trino.spi.StandardErrorCode.INVALID_LIMIT_CLAUSE; import static io.trino.spi.StandardErrorCode.INVALID_ORDER_BY; @@ -320,6 +327,7 @@ import static io.trino.spi.connector.StandardWarningCode.REDUNDANT_ORDER_BY; import static io.trino.spi.function.FunctionKind.AGGREGATE; import static io.trino.spi.function.FunctionKind.WINDOW; +import static io.trino.spi.ptf.DescriptorArgument.NULL_DESCRIPTOR; import static io.trino.spi.ptf.ReturnTypeSpecification.GenericTable.GENERIC_TABLE; import static io.trino.spi.ptf.ReturnTypeSpecification.OnlyPassThrough.ONLY_PASS_THROUGH; import static io.trino.spi.type.BigintType.BIGINT; @@ -347,6 +355,7 @@ import static io.trino.sql.analyzer.ScopeReferenceExtractor.getReferencesToScope; import static io.trino.sql.analyzer.SemanticExceptions.semanticException; import static io.trino.sql.analyzer.TypeSignatureProvider.fromTypes; +import static io.trino.sql.analyzer.TypeSignatureTranslator.toTypeSignature; import static io.trino.sql.planner.ExpressionInterpreter.evaluateConstantExpression; import static io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL; import static io.trino.sql.tree.DereferenceExpression.getQualifiedName; @@ -1498,38 +1507,26 @@ protected Scope visitTableFunctionInvocation(TableFunctionInvocation node, Optio ConnectorTableFunction function = tableFunctionMetadata.getFunction(); CatalogHandle catalogHandle = tableFunctionMetadata.getCatalogHandle(); - Map passedArguments = analyzeArguments(node, function.getArguments(), node.getArguments()); + Node errorLocation = node; + if (!node.getArguments().isEmpty()) { + errorLocation = node.getArguments().get(0); + } + + ArgumentsAnalysis argumentsAnalysis = analyzeArguments(function.getArguments(), node.getArguments(), scope, errorLocation); ConnectorTransactionHandle transactionHandle = transactionManager.getConnectorTransaction(session.getRequiredTransactionId(), catalogHandle); - TableFunctionAnalysis functionAnalysis = function.analyze(session.toConnectorSession(catalogHandle), transactionHandle, passedArguments); - analysis.setTableFunctionAnalysis(node, new TableFunctionInvocationAnalysis(catalogHandle, function.getName(), passedArguments, functionAnalysis.getHandle(), transactionHandle)); - - // TODO handle the DescriptorMapping descriptorsToTables mapping from the TableFunction.Analysis: - // This is a mapping of descriptor arguments to table arguments. It consists of two parts: - // - mapping by descriptor field: (arg name of descriptor argument, and position in the descriptor) to (arg name of table argument) - // - mapping by descriptor: (arg name of descriptor argument) to (arg name of table argument) - // 1. get the DescriptorField from the designated DescriptorArgument (or all fields for mapping by descriptor) - // 2. validate there is no DataType specified, - // 3. analyze the Identifier in the scope of the designated table (it is recorded, because args were already analyzed). Disable correlation. - // 4. at this point, the Identifier should be recorded as a column reference to the appropriate table - // 5. record the mapping NameAndPosition -> Identifier - // ... later translate Identifier to Symbol in Planner, and eventually translate it to channel before execution - if (!functionAnalysis.getDescriptorMapping().isEmpty()) { - throw semanticException(NOT_SUPPORTED, node, "Table arguments are not yet supported for table functions"); - } - - // TODO process the copartitioning: - // 1. validate input table references - // 2. the copartitioned tables in each set must be partitioned, and have the same number of partitioning columns - // 3. the corresponding columns must be comparable - // 4. within a set, determine and record coercions of the corresponding columns to a common supertype - // Note that if a table is part of multiple copartitioning sets, it might require a different coercion for a column - // per each set. Additionally, there might be another coercion required by the Table Function logic. Also, since - // all partitioning columns are passed-through, we also need an un-coerced copy. - // See ExpressionAnalyzer.sortKeyCoercionsForFrameBoundCalculation for multiple coercions on a column. - if (!node.getCopartitioning().isEmpty()) { - throw semanticException(NOT_SUPPORTED, node, "COPARTITION clause is not yet supported for table functions"); - } + TableFunctionAnalysis functionAnalysis = function.analyze(session.toConnectorSession(catalogHandle), transactionHandle, argumentsAnalysis.getPassedArguments()); + + List> copartitioningLists = analyzeCopartitioning(node.getCopartitioning(), argumentsAnalysis.getTableArgumentAnalyses()); + + analysis.setTableFunctionAnalysis(node, new TableFunctionInvocationAnalysis( + catalogHandle, + function.getName(), + argumentsAnalysis.getPassedArguments(), + argumentsAnalysis.getTableArgumentAnalyses(), + copartitioningLists, + functionAnalysis.getHandle(), + transactionHandle)); // determine the result relation type. // The result relation type of a table function consists of: @@ -1557,6 +1554,7 @@ protected Scope visitTableFunctionInvocation(TableFunctionInvocation node, Optio } // currently we don't support input tables, so the output consists of proper columns only + // TODO implement SQL standard ISO/IEC 9075-2, 4.33 SQL-invoked routines, p. 123 List fields = properColumnsDescriptor.getFields().stream() // per spec, field names are mandatory .map(field -> Field.newUnqualified(field.getName(), field.getType().orElseThrow(() -> new IllegalStateException("missing returned type for proper field")))) @@ -1581,19 +1579,14 @@ private Optional resolveTableFunction(TableFunctionInvoca return Optional.empty(); } - private Map analyzeArguments(Node node, List argumentSpecifications, List arguments) + private ArgumentsAnalysis analyzeArguments(List argumentSpecifications, List arguments, Optional scope, Node errorLocation) { - Node errorLocation = node; - if (!arguments.isEmpty()) { - errorLocation = arguments.get(0); - } - if (argumentSpecifications.size() < arguments.size()) { throw semanticException(INVALID_ARGUMENTS, errorLocation, "Too many arguments. Expected at most %s arguments, got %s arguments", argumentSpecifications.size(), arguments.size()); } if (argumentSpecifications.isEmpty()) { - return ImmutableMap.of(); + return new ArgumentsAnalysis(ImmutableMap.of(), ImmutableList.of()); } boolean argumentsPassedByName = !arguments.isEmpty() && arguments.stream().allMatch(argument -> argument.getName().isPresent()); @@ -1603,16 +1596,18 @@ private Map analyzeArguments(Node node, List passedArguments = ImmutableMap.builder(); + ImmutableList.Builder tableArgumentAnalyses = ImmutableList.builder(); if (argumentsPassedByName) { Map argumentSpecificationsByName = new HashMap<>(); for (ArgumentSpecification argumentSpecification : argumentSpecifications) { if (argumentSpecificationsByName.put(argumentSpecification.getName(), argumentSpecification) != null) { + // this should never happen, because the argument names are validated at function registration time throw new IllegalStateException("Duplicate argument specification for name: " + argumentSpecification.getName()); } } Set uniqueArgumentNames = new HashSet<>(); for (TableFunctionArgument argument : arguments) { - String argumentName = argument.getName().get().getCanonicalValue(); + String argumentName = argument.getName().orElseThrow().getCanonicalValue(); if (!uniqueArgumentNames.add(argumentName)) { throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Duplicate argument name: " + argumentName); } @@ -1620,7 +1615,9 @@ private Map analyzeArguments(Node node, List entry : argumentSpecificationsByName.entrySet()) { @@ -1632,7 +1629,9 @@ private Map analyzeArguments(Node node, List analyzeArguments(Node node, List scope) { String actualType; - if (argument.getValue() instanceof Relation) { + if (argument.getValue() instanceof TableFunctionTableArgument) { actualType = "table"; } - else if (argument.getValue() instanceof DescriptorArgument) { + else if (argument.getValue() instanceof TableFunctionDescriptorArgument) { actualType = "descriptor"; } else if (argument.getValue() instanceof Expression) { @@ -1661,30 +1660,24 @@ else if (argument.getValue() instanceof Expression) { } if (argumentSpecification instanceof TableArgumentSpecification) { - if (!(argument.getValue() instanceof Relation)) { + if (!(argument.getValue() instanceof TableFunctionTableArgument)) { if (argument.getValue() instanceof FunctionCall) { // probably an attempt to pass a table function call, which is not supported, and was parsed as a function call throw semanticException(NOT_SUPPORTED, argument, "Invalid table argument %s. Table functions are not allowed as table function arguments", argumentSpecification.getName()); } throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Expected table, got %s", argumentSpecification.getName(), actualType); } - // TODO analyze the argument - // 1. process the Relation - // 2. partitioning and ordering must only apply to tables with set semantics - // 3. validate partitioning and ordering using `validateAndGetInputField()` - // 4. validate the prune when empty property vs argument specification (forbidden for row semantics; override? -> check spec) - // 5. return Argument - throw semanticException(NOT_SUPPORTED, argument, "Table arguments are not yet supported for table functions"); + return analyzeTableArgument(argument, (TableArgumentSpecification) argumentSpecification, scope); } if (argumentSpecification instanceof DescriptorArgumentSpecification) { - if (!(argument.getValue() instanceof DescriptorArgument)) { + if (!(argument.getValue() instanceof TableFunctionDescriptorArgument)) { if (argument.getValue() instanceof FunctionCall && ((FunctionCall) argument.getValue()).getName().hasSuffix(QualifiedName.of("descriptor"))) { // function name is always compared case-insensitive // malformed descriptor which parsed as a function call throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid descriptor argument %s. Descriptors should be formatted as 'DESCRIPTOR(name [type], ...)'", argumentSpecification.getName()); } throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Expected descriptor, got %s", argumentSpecification.getName(), actualType); } - throw semanticException(NOT_SUPPORTED, argument, "Descriptor arguments are not yet supported for table functions"); + return analyzeDescriptorArgument((TableFunctionDescriptorArgument) argument.getValue()); } if (argumentSpecification instanceof ScalarArgumentSpecification) { if (!(argument.getValue() instanceof Expression)) { @@ -1692,37 +1685,146 @@ else if (argument.getValue() instanceof Expression) { } Expression expression = (Expression) argument.getValue(); // 'descriptor' as a function name is not allowed in this context - if (argument.getValue() instanceof FunctionCall && ((FunctionCall) argument.getValue()).getName().hasSuffix(QualifiedName.of("descriptor"))) { // function name is always compared case-insensitive + if (expression instanceof FunctionCall && ((FunctionCall) expression).getName().hasSuffix(QualifiedName.of("descriptor"))) { // function name is always compared case-insensitive throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "'descriptor' function is not allowed as a table function argument"); } - // inline parameters - Expression inlined = ExpressionTreeRewriter.rewriteWith(new ExpressionRewriter<>() - { - @Override - public Expression rewriteParameter(Parameter node, Void context, ExpressionTreeRewriter treeRewriter) - { - if (analysis.isDescribe()) { - // We cannot handle DESCRIBE when a table function argument involves a parameter. - // In DESCRIBE, the parameter values are not known. We cannot pass a dummy value for a parameter. - // The value of a table function argument can affect the returned relation type. The returned - // relation type can affect the assumed types for other parameters in the query. - throw semanticException(NOT_SUPPORTED, node, "DESCRIBE is not supported if a table function uses parameters"); - } - return analysis.getParameters().get(NodeRef.of(node)); - } - }, expression); - Type expectedArgumentType = ((ScalarArgumentSpecification) argumentSpecification).getType(); - // currently, only constant arguments are supported - Object constantValue = ExpressionInterpreter.evaluateConstantExpression(inlined, expectedArgumentType, plannerContext, session, accessControl, analysis.getParameters()); - return ScalarArgument.builder() - .type(expectedArgumentType) - .value(constantValue) - .build(); + return analyzeScalarArgument(expression, ((ScalarArgumentSpecification) argumentSpecification).getType()); } throw new IllegalStateException("Unexpected argument specification: " + argumentSpecification.getClass().getSimpleName()); } + private ArgumentAnalysis analyzeTableArgument(TableFunctionArgument argument, TableArgumentSpecification argumentSpecification, Optional scope) + { + TableFunctionTableArgument tableArgument = (TableFunctionTableArgument) argument.getValue(); + + TableArgument.Builder argumentBuilder = TableArgument.builder(); + TableArgumentAnalysis.Builder analysisBuilder = TableArgumentAnalysis.builder(); + analysisBuilder.withArgumentName(argumentSpecification.getName()); + + // process the relation + Relation relation = tableArgument.getTable(); + analysisBuilder.withRelation(relation); + Scope argumentScope = process(relation, scope); + QualifiedName relationName = analysis.getRelationName(relation); + if (relationName != null) { + analysisBuilder.withName(relationName); + } + + argumentBuilder.rowType(RowType.from(argumentScope.getRelationType().getVisibleFields().stream() + .map(field -> new RowType.Field(field.getName(), field.getType())) + .collect(toImmutableList()))); + + // analyze PARTITION BY + if (tableArgument.getPartitionBy().isPresent()) { + if (argumentSpecification.isRowSemantics()) { + throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Partitioning specified for table argument with row semantics", argumentSpecification.getName()); + } + List partitionBy = tableArgument.getPartitionBy().get(); + analysisBuilder.withPartitionBy(partitionBy); + partitionBy.stream() + .forEach(partitioningColumn -> { + validateAndGetInputField(partitioningColumn, argumentScope); + Type type = analyzeExpression(partitioningColumn, argumentScope).getType(partitioningColumn); + if (!type.isComparable()) { + throw semanticException(TYPE_MISMATCH, partitioningColumn, "%s is not comparable, and therefore cannot be used in PARTITION BY", type); + } + }); + argumentBuilder.partitionBy(partitionBy.stream() + // each expression is either an Identifier or a DereferenceExpression + .map(Expression::toString) + .collect(toImmutableList())); + } + + // analyze ORDER BY + if (tableArgument.getOrderBy().isPresent()) { + if (argumentSpecification.isRowSemantics()) { + throw semanticException(INVALID_FUNCTION_ARGUMENT, argument, "Invalid argument %s. Ordering specified for table argument with row semantics", argumentSpecification.getName()); + } + OrderBy orderBy = tableArgument.getOrderBy().get(); + analysisBuilder.withOrderBy(orderBy); + orderBy.getSortItems().stream() + .map(SortItem::getSortKey) + .forEach(orderingColumn -> { + validateAndGetInputField(orderingColumn, argumentScope); + Type type = analyzeExpression(orderingColumn, argumentScope).getType(orderingColumn); + if (!type.isOrderable()) { + throw semanticException(TYPE_MISMATCH, orderingColumn, "%s is not orderable, and therefore cannot be used in ORDER BY", type); + } + }); + argumentBuilder.orderBy(orderBy.getSortItems().stream() + // each sort key is either an Identifier or a DereferenceExpression + .map(sortItem -> sortItem.getSortKey().toString()) + .collect(toImmutableList())); + } + + // analyze the PRUNE/KEEP WHEN EMPTY property + boolean pruneWhenEmpty = argumentSpecification.isPruneWhenEmpty(); + if (tableArgument.getEmptyTableTreatment().isPresent()) { + if (argumentSpecification.isRowSemantics()) { + throw semanticException(INVALID_FUNCTION_ARGUMENT, tableArgument.getEmptyTableTreatment().get(), "Invalid argument %s. Empty behavior specified for table argument with row semantics", argumentSpecification.getName()); + } + pruneWhenEmpty = tableArgument.getEmptyTableTreatment().get().getTreatment() == EmptyTableTreatment.Treatment.PRUNE; + } + analysisBuilder.withPruneWhenEmpty(pruneWhenEmpty); + + // record remaining properties + analysisBuilder.withRowSemantics(argumentSpecification.isRowSemantics()); + analysisBuilder.withPassThroughColumns(argumentSpecification.isPassThroughColumns()); + + return new ArgumentAnalysis(argumentBuilder.build(), Optional.of(analysisBuilder.build())); + } + + private ArgumentAnalysis analyzeDescriptorArgument(TableFunctionDescriptorArgument argument) + { + return new ArgumentAnalysis( + argument.getDescriptor() + .map(descriptor -> DescriptorArgument.builder() + .descriptor(new Descriptor(descriptor.getFields().stream() + .map(field -> new Descriptor.Field( + field.getName().getCanonicalValue(), + field.getType().map(type -> { + try { + return plannerContext.getTypeManager().getType(toTypeSignature(type)); + } + catch (TypeNotFoundException e) { + throw semanticException(TYPE_MISMATCH, type, "Unknown type: %s", type); + } + }))) + .collect(toImmutableList()))) + .build()) + .orElse(NULL_DESCRIPTOR), + Optional.empty()); + } + + private ArgumentAnalysis analyzeScalarArgument(Expression expression, Type type) + { + // inline parameters + Expression inlined = ExpressionTreeRewriter.rewriteWith(new ExpressionRewriter<>() + { + @Override + public Expression rewriteParameter(Parameter node, Void context, ExpressionTreeRewriter treeRewriter) + { + if (analysis.isDescribe()) { + // We cannot handle DESCRIBE when a table function argument involves a parameter. + // In DESCRIBE, the parameter values are not known. We cannot pass a dummy value for a parameter. + // The value of a table function argument can affect the returned relation type. The returned + // relation type can affect the assumed types for other parameters in the query. + throw semanticException(NOT_SUPPORTED, node, "DESCRIBE is not supported if a table function uses parameters"); + } + return analysis.getParameters().get(NodeRef.of(node)); + } + }, expression); + // currently, only constant arguments are supported + Object constantValue = ExpressionInterpreter.evaluateConstantExpression(inlined, type, plannerContext, session, accessControl, analysis.getParameters()); + return new ArgumentAnalysis( + ScalarArgument.builder() + .type(type) + .value(constantValue) + .build(), + Optional.empty()); + } + private Argument analyzeDefault(ArgumentSpecification argumentSpecification, Node errorLocation) { if (argumentSpecification.isRequired()) { @@ -1732,7 +1834,9 @@ private Argument analyzeDefault(ArgumentSpecification argumentSpecification, Nod checkArgument(!(argumentSpecification instanceof TableArgumentSpecification), "invalid table argument specification: default set"); if (argumentSpecification instanceof DescriptorArgumentSpecification) { - throw semanticException(NOT_SUPPORTED, errorLocation, "Descriptor arguments are not yet supported for table functions"); + return DescriptorArgument.builder() + .descriptor((Descriptor) argumentSpecification.getDefaultValue()) + .build(); } if (argumentSpecification instanceof ScalarArgumentSpecification) { return ScalarArgument.builder() @@ -1744,6 +1848,119 @@ private Argument analyzeDefault(ArgumentSpecification argumentSpecification, Nod throw new IllegalStateException("Unexpected argument specification: " + argumentSpecification.getClass().getSimpleName()); } + private List> analyzeCopartitioning(List> copartitioning, List tableArgumentAnalyses) + { + // map table arguments by relation names. usa a multimap, because multiple arguments can have the same value, e.g. input_1 => tpch.tiny.orders, input_2 => tpch.tiny.orders + ImmutableMultimap.Builder unqualifiedInputsBuilder = ImmutableMultimap.builder(); + ImmutableMultimap.Builder qualifiedInputsBuilder = ImmutableMultimap.builder(); + tableArgumentAnalyses.stream() + .filter(argument -> argument.getName().isPresent()) + .forEach(argument -> { + QualifiedName name = argument.getName().get(); + if (name.getParts().size() == 1) { + unqualifiedInputsBuilder.put(name, argument); + } + else if (name.getParts().size() == 3) { + qualifiedInputsBuilder.put(name, argument); + } + else { + throw new IllegalStateException("relation name should be unqualified or fully qualified"); + } + }); + Multimap unqualifiedInputs = unqualifiedInputsBuilder.build(); + Multimap qualifiedInputs = qualifiedInputsBuilder.build(); + + ImmutableList.Builder> copartitionBuilder = ImmutableList.builder(); + Set referencedArguments = new HashSet<>(); + for (List nameList : copartitioning) { + ImmutableList.Builder copartitionListBuilder = ImmutableList.builder(); + + // resolve copartition tables as references to table arguments + for (QualifiedName name : nameList) { + Collection candidates = emptyList(); + if (name.getParts().size() == 1) { + // try to match unqualified name. it might be a reference to a CTE or an aliased relation + candidates = unqualifiedInputs.get(name); + } + if (candidates.isEmpty()) { + // qualify the name using current schema and catalog + QualifiedObjectName fullyQualifiedName = createQualifiedObjectName(session, name.getOriginalParts().get(0), name); + candidates = qualifiedInputs.get(QualifiedName.of(fullyQualifiedName.getCatalogName(), fullyQualifiedName.getSchemaName(), fullyQualifiedName.getObjectName())); + } + if (candidates.isEmpty()) { + throw semanticException(INVALID_COPARTITIONING, name.getOriginalParts().get(0), "No table argument found for name: " + name); + } + if (candidates.size() > 1) { + throw semanticException(INVALID_COPARTITIONING, name.getOriginalParts().get(0), "Ambiguous reference: multiple table arguments found for name: " + name); + } + TableArgumentAnalysis argument = getOnlyElement(candidates); + if (!referencedArguments.add(argument.getArgumentName())) { + // multiple references to argument in COPARTITION clause are implicitly prohibited by + // ISO/IEC TR REPORT 19075-7, p.33, Feature B203, “More than one copartition specification” + throw semanticException(INVALID_COPARTITIONING, name.getOriginalParts().get(0), "Multiple references to table argument: %s in COPARTITION clause", name); + } + copartitionListBuilder.add(argument); + } + List copartitionList = copartitionListBuilder.build(); + + // analyze partitioning columns + copartitionList.stream() + .filter(argument -> argument.getPartitionBy().isEmpty()) + .findFirst().ifPresent(unpartitioned -> { + throw semanticException(INVALID_COPARTITIONING, unpartitioned.getRelation(), "Table %s referenced in COPARTITION clause is not partitioned", unpartitioned.getName().orElseThrow()); + }); + // TODO make sure that copartitioned tables cannot have empty partitioning lists. + // ISO/IEC TR REPORT 19075-7, 4.5 Partitioning and ordering, p.25 is not clear: "With copartitioning, the copartitioned table arguments must have the same number of partitioning columns, + // and corresponding partitioning columns must be comparable. The DBMS effectively performs a full outer equijoin on the copartitioning columns" + copartitionList.stream() + .filter(argument -> argument.getPartitionBy().orElseThrow().isEmpty()) + .findFirst().ifPresent(partitionedOnEmpty -> { + // table is partitioned but no partitioning columns are specified (single partition) + throw semanticException(INVALID_COPARTITIONING, partitionedOnEmpty.getRelation(), "No partitioning columns specified for table %s referenced in COPARTITION clause", partitionedOnEmpty.getName().orElseThrow()); + }); + List> partitioningColumns = copartitionList.stream() + .map(TableArgumentAnalysis::getPartitionBy) + .map(Optional::orElseThrow) + .collect(toImmutableList()); + if (partitioningColumns.stream() + .map(List::size) + .distinct() + .count() > 1) { + throw semanticException(INVALID_COPARTITIONING, nameList.get(0).getOriginalParts().get(0), "Numbers of partitioning columns in copartitioned tables do not match"); + } + + // coerce corresponding copartition columns to common supertype + for (int index = 0; index < partitioningColumns.get(0).size(); index++) { + Type commonSuperType = analysis.getType(partitioningColumns.get(0).get(index)); + // find common supertype + for (List columnList : partitioningColumns) { + Optional superType = typeCoercion.getCommonSuperType(commonSuperType, analysis.getType(columnList.get(index))); + if (superType.isEmpty()) { + throw semanticException(TYPE_MISMATCH, nameList.get(0).getOriginalParts().get(0), "Partitioning columns in copartitioned tables have incompatible types"); + } + commonSuperType = superType.get(); + } + for (List columnList : partitioningColumns) { + Expression column = columnList.get(index); + Type type = analysis.getType(column); + if (!type.equals(commonSuperType)) { + if (!typeCoercion.canCoerce(type, commonSuperType)) { + throw semanticException(TYPE_MISMATCH, column, "Cannot coerce column of type %s to common supertype: %s", type.getDisplayName(), commonSuperType.getDisplayName()); + } + analysis.addCoercion(column, commonSuperType, typeCoercion.isTypeOnlyCoercion(type, commonSuperType)); + } + } + } + + // record the resolved copartition arguments by argument names + copartitionBuilder.add(copartitionList.stream() + .map(TableArgumentAnalysis::getArgumentName) + .collect(toImmutableList())); + } + + return copartitionBuilder.build(); + } + private Optional getMaterializedViewStorageTableName(MaterializedViewDefinition viewDefinition) { if (viewDefinition.getStorageTable().isEmpty()) { @@ -1764,6 +1981,7 @@ protected Scope visitTable(Table table, Optional scope) // is this a reference to a WITH query? Optional withQuery = createScope(scope).getNamedQuery(table.getName().getSuffix()); if (withQuery.isPresent()) { + analysis.setRelationName(table, table.getName()); return createScopeForCommonTableExpression(table, scope, withQuery.get()); } // is this a recursive reference in expandable WITH query? If so, there's base scope recorded. @@ -1775,11 +1993,13 @@ protected Scope visitTable(Table table, Optional scope) .withRelationType(baseScope.getRelationId(), baseScope.getRelationType()) .build(); analysis.setScope(table, resultScope); + analysis.setRelationName(table, table.getName()); return resultScope; } } QualifiedObjectName name = createQualifiedObjectName(session, table, table.getName()); + analysis.setRelationName(table, QualifiedName.of(name.getCatalogName(), name.getSchemaName(), name.getObjectName())); Optional optionalMaterializedView = metadata.getMaterializedView(session, name); if (optionalMaterializedView.isPresent()) { @@ -2331,6 +2551,7 @@ private ExpressionAnalysis analyzePatternRecognitionExpression(Expression expres @Override protected Scope visitAliasedRelation(AliasedRelation relation, Optional scope) { + analysis.setRelationName(relation, QualifiedName.of(ImmutableList.of(relation.getAlias()))); Scope relationScope = process(relation.getRelation(), scope); // todo this check should be inside of TupleDescriptor.withAlias, but the exception needs the node object @@ -4923,4 +5144,48 @@ private static boolean hasScopeAsLocalParent(Scope root, Scope parent) return false; } + + private static final class ArgumentAnalysis + { + private final Argument argument; + private final Optional tableArgumentAnalysis; + + public ArgumentAnalysis(Argument argument, Optional tableArgumentAnalysis) + { + this.argument = requireNonNull(argument, "argument is null"); + this.tableArgumentAnalysis = requireNonNull(tableArgumentAnalysis, "tableArgumentAnalysis is null"); + } + + public Argument getArgument() + { + return argument; + } + + public Optional getTableArgumentAnalysis() + { + return tableArgumentAnalysis; + } + } + + private static final class ArgumentsAnalysis + { + private final Map passedArguments; + private final List tableArgumentAnalyses; + + public ArgumentsAnalysis(Map passedArguments, List tableArgumentAnalyses) + { + this.passedArguments = ImmutableMap.copyOf(requireNonNull(passedArguments, "passedArguments is null")); + this.tableArgumentAnalyses = ImmutableList.copyOf(requireNonNull(tableArgumentAnalyses, "tableArgumentAnalyses is null")); + } + + public Map getPassedArguments() + { + return passedArguments; + } + + public List getTableArgumentAnalyses() + { + return tableArgumentAnalyses; + } + } } diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java b/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java index 70556fd420ce..19d2d37b0d40 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java @@ -22,7 +22,6 @@ import io.trino.metadata.TableFunctionHandle; import io.trino.metadata.TableHandle; import io.trino.spi.connector.ColumnHandle; -import io.trino.spi.ptf.NameAndPosition; import io.trino.spi.type.RowType; import io.trino.spi.type.Type; import io.trino.sql.ExpressionUtils; @@ -89,7 +88,9 @@ import io.trino.sql.tree.SubqueryExpression; import io.trino.sql.tree.SubsetDefinition; import io.trino.sql.tree.Table; +import io.trino.sql.tree.TableFunctionDescriptorArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.Union; import io.trino.sql.tree.Unnest; @@ -328,6 +329,16 @@ private RelationPlan addColumnMasks(Table table, RelationPlan plan) @Override protected RelationPlan visitTableFunctionInvocation(TableFunctionInvocation node, Void context) { + node.getArguments().stream() + .forEach(argument -> { + if (argument.getValue() instanceof TableFunctionTableArgument) { + throw semanticException(NOT_SUPPORTED, argument, "Table arguments are not yet supported for table functions"); + } + if (argument.getValue() instanceof TableFunctionDescriptorArgument) { + throw semanticException(NOT_SUPPORTED, argument, "Descriptor arguments are not yet supported for table functions"); + } + }); + TableFunctionInvocationAnalysis functionAnalysis = analysis.getTableFunctionAnalysis(node); // TODO handle input relations: @@ -337,10 +348,10 @@ protected RelationPlan visitTableFunctionInvocation(TableFunctionInvocation node // - prune when empty property (from the actualArgument) // - pass through columns property (from the actualArgument) // - optional Specification: ordering scheme and partitioning (from the node's argument) <- planned upon the source's RelationPlan (or combined RelationPlan from all sources) + // TODO add - argument name + // TODO add - mapping column name => Symbol // TODO mind the fields without names and duplicate field names in RelationType List sources = ImmutableList.of(); List inputRelationsProperties = ImmutableList.of(); - // TODO rewrite column references to Symbols upon the source's RelationPlan (or combined RelationPlan from all sources) - Map inputDescriptorMappings = ImmutableMap.of(); Scope scope = analysis.getScope(node); // TODO pass columns from input relations, and make sure they have the right qualifier @@ -355,7 +366,6 @@ protected RelationPlan visitTableFunctionInvocation(TableFunctionInvocation node outputSymbols, sources.stream().map(RelationPlan::getRoot).collect(toImmutableList()), inputRelationsProperties, - inputDescriptorMappings, new TableFunctionHandle(functionAnalysis.getCatalogHandle(), functionAnalysis.getConnectorTableFunctionHandle(), functionAnalysis.getTransactionHandle())); return new RelationPlan(root, scope, outputSymbols, outerContext); diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/optimizations/UnaliasSymbolReferences.java b/core/trino-main/src/main/java/io/trino/sql/planner/optimizations/UnaliasSymbolReferences.java index 02694735a291..ad2a77d37b42 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/optimizations/UnaliasSymbolReferences.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/optimizations/UnaliasSymbolReferences.java @@ -321,7 +321,7 @@ public PlanAndMappings visitPatternRecognition(PatternRecognitionNode node, Unal @Override public PlanAndMappings visitTableFunction(TableFunctionNode node, UnaliasContext context) { - // TODO rewrite sources, tableArgumentProperties, and inputDescriptorMappings when we add support for input tables + // TODO rewrite sources, and tableArgumentProperties when we add support for input tables Map mapping = new HashMap<>(context.getCorrelationMapping()); SymbolMapper mapper = symbolMapper(mapping); @@ -335,7 +335,6 @@ public PlanAndMappings visitTableFunction(TableFunctionNode node, UnaliasContext newProperOutputs, node.getSources(), node.getTableArgumentProperties(), - node.getInputDescriptorMappings(), node.getHandle()), mapping); } diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/plan/TableFunctionNode.java b/core/trino-main/src/main/java/io/trino/sql/planner/plan/TableFunctionNode.java index 11f925e73ab0..1fa713174fd0 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/plan/TableFunctionNode.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/plan/TableFunctionNode.java @@ -17,7 +17,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import io.trino.metadata.TableFunctionHandle; import io.trino.spi.ptf.Argument; -import io.trino.spi.ptf.NameAndPosition; import io.trino.sql.planner.Symbol; import io.trino.sql.planner.plan.WindowNode.Specification; @@ -38,7 +37,6 @@ public class TableFunctionNode private final List properOutputs; private final List sources; private final List tableArgumentProperties; - private final Map inputDescriptorMappings; private final TableFunctionHandle handle; @JsonCreator @@ -49,7 +47,6 @@ public TableFunctionNode( @JsonProperty("properOutputs") List properOutputs, @JsonProperty("sources") List sources, @JsonProperty("tableArgumentProperties") List tableArgumentProperties, - @JsonProperty("inputDescriptorMappings") Map inputDescriptorMappings, @JsonProperty("handle") TableFunctionHandle handle) { super(id); @@ -58,7 +55,6 @@ public TableFunctionNode( this.properOutputs = requireNonNull(properOutputs, "properOutputs is null"); this.sources = requireNonNull(sources, "sources is null"); this.tableArgumentProperties = requireNonNull(tableArgumentProperties, "tableArgumentProperties is null"); - this.inputDescriptorMappings = requireNonNull(inputDescriptorMappings, "inputDescriptorMappings is null"); this.handle = requireNonNull(handle, "handle is null"); } @@ -86,12 +82,6 @@ public List getTableArgumentProperties() return tableArgumentProperties; } - @JsonProperty - public Map getInputDescriptorMappings() - { - return inputDescriptorMappings; - } - @JsonProperty public TableFunctionHandle getHandle() { @@ -122,7 +112,7 @@ public R accept(PlanVisitor visitor, C context) public PlanNode replaceChildren(List newSources) { checkArgument(sources.size() == newSources.size(), "wrong number of new children"); - return new TableFunctionNode(getId(), name, arguments, properOutputs, newSources, tableArgumentProperties, inputDescriptorMappings, handle); + return new TableFunctionNode(getId(), name, arguments, properOutputs, newSources, tableArgumentProperties, handle); } public static class TableArgumentProperties diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/planprinter/PlanPrinter.java b/core/trino-main/src/main/java/io/trino/sql/planner/planprinter/PlanPrinter.java index 460f9f7c2d89..e873a2ed6b82 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/planprinter/PlanPrinter.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/planprinter/PlanPrinter.java @@ -1658,7 +1658,7 @@ public Void visitTableFunction(TableFunctionNode node, Void context) ImmutableMap.of("name", node.getName())); checkArgument( - node.getSources().isEmpty() && node.getTableArgumentProperties().isEmpty() && node.getInputDescriptorMappings().isEmpty(), + node.getSources().isEmpty() && node.getTableArgumentProperties().isEmpty(), "Table or descriptor arguments are not yet supported in PlanPrinter"); node.getArguments().entrySet().stream() diff --git a/core/trino-main/src/test/java/io/trino/connector/TestingTableFunctions.java b/core/trino-main/src/test/java/io/trino/connector/TestingTableFunctions.java index 2b9df7592e9f..3fb954fd9235 100644 --- a/core/trino-main/src/test/java/io/trino/connector/TestingTableFunctions.java +++ b/core/trino-main/src/test/java/io/trino/connector/TestingTableFunctions.java @@ -23,8 +23,10 @@ import io.trino.spi.ptf.Argument; import io.trino.spi.ptf.ConnectorTableFunctionHandle; import io.trino.spi.ptf.Descriptor; +import io.trino.spi.ptf.DescriptorArgumentSpecification; import io.trino.spi.ptf.ScalarArgument; import io.trino.spi.ptf.ScalarArgumentSpecification; +import io.trino.spi.ptf.TableArgumentSpecification; import io.trino.spi.ptf.TableFunctionAnalysis; import java.util.List; @@ -39,6 +41,13 @@ public class TestingTableFunctions { + private static final String SCHEMA_NAME = "system"; + private static final ConnectorTableFunctionHandle HANDLE = new ConnectorTableFunctionHandle() {}; + private static final TableFunctionAnalysis ANALYSIS = TableFunctionAnalysis.builder() + .handle(HANDLE) + .returnedType(new Descriptor(ImmutableList.of(new Descriptor.Field("column", Optional.of(BOOLEAN))))) + .build(); + /** * A table function returning a table with single empty column of type BOOLEAN. * The argument `COLUMN` is the column name. @@ -48,7 +57,6 @@ public class TestingTableFunctions public static class SimpleTableFunction extends AbstractConnectorTableFunction { - private static final String SCHEMA_NAME = "system"; private static final String FUNCTION_NAME = "simple_table_function"; private static final String TABLE_NAME = "simple_table"; @@ -101,4 +109,125 @@ public MockConnectorTableHandle getTableHandle() } } } + + public static class TwoScalarArgumentsFunction + extends AbstractConnectorTableFunction + { + public TwoScalarArgumentsFunction() + { + super( + SCHEMA_NAME, + "two_arguments_function", + ImmutableList.of( + ScalarArgumentSpecification.builder() + .name("TEXT") + .type(VARCHAR) + .build(), + ScalarArgumentSpecification.builder() + .name("NUMBER") + .type(BIGINT) + .defaultValue(null) + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } + + public static class TableArgumentFunction + extends AbstractConnectorTableFunction + { + public TableArgumentFunction() + { + super( + SCHEMA_NAME, + "table_argument_function", + ImmutableList.of( + TableArgumentSpecification.builder() + .name("INPUT") + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } + + public static class TableArgumentRowSemanticsFunction + extends AbstractConnectorTableFunction + { + public TableArgumentRowSemanticsFunction() + { + super( + SCHEMA_NAME, + "table_argument_row_semantics_function", + ImmutableList.of( + TableArgumentSpecification.builder() + .name("INPUT") + .rowSemantics() + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } + + public static class DescriptorArgumentFunction + extends AbstractConnectorTableFunction + { + public DescriptorArgumentFunction() + { + super( + SCHEMA_NAME, + "descriptor_argument_function", + ImmutableList.of( + DescriptorArgumentSpecification.builder() + .name("SCHEMA") + .defaultValue(null) + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } + + public static class TwoTableArgumentsFunction + extends AbstractConnectorTableFunction + { + public TwoTableArgumentsFunction() + { + super( + SCHEMA_NAME, + "two_table_arguments_function", + ImmutableList.of( + TableArgumentSpecification.builder() + .name("INPUT1") + .build(), + TableArgumentSpecification.builder() + .name("INPUT2") + .build()), + GENERIC_TABLE); + } + + @Override + public TableFunctionAnalysis analyze(ConnectorSession session, ConnectorTransactionHandle transaction, Map arguments) + { + return ANALYSIS; + } + } } diff --git a/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java b/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java index 6b0dd3790460..329ad50ca54e 100644 --- a/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java +++ b/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java @@ -21,9 +21,15 @@ import io.trino.FeaturesConfig; import io.trino.Session; import io.trino.SystemSessionProperties; +import io.trino.connector.CatalogHandle; import io.trino.connector.CatalogServiceProvider; import io.trino.connector.MockConnectorFactory; import io.trino.connector.StaticConnectorFactory; +import io.trino.connector.TestingTableFunctions.DescriptorArgumentFunction; +import io.trino.connector.TestingTableFunctions.TableArgumentFunction; +import io.trino.connector.TestingTableFunctions.TableArgumentRowSemanticsFunction; +import io.trino.connector.TestingTableFunctions.TwoScalarArgumentsFunction; +import io.trino.connector.TestingTableFunctions.TwoTableArgumentsFunction; import io.trino.execution.DynamicFilterConfig; import io.trino.execution.QueryManagerConfig; import io.trino.execution.TaskManagerConfig; @@ -32,6 +38,7 @@ import io.trino.memory.MemoryManagerConfig; import io.trino.memory.NodeMemoryConfig; import io.trino.metadata.AnalyzePropertyManager; +import io.trino.metadata.CatalogTableFunctions; import io.trino.metadata.ColumnPropertyManager; import io.trino.metadata.InternalFunctionBundle; import io.trino.metadata.MaterializedViewDefinition; @@ -40,7 +47,10 @@ import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.SchemaPropertyManager; import io.trino.metadata.SessionPropertyManager; +import io.trino.metadata.TableFunctionRegistry; import io.trino.metadata.TableHandle; +import io.trino.metadata.TableProceduresPropertyManager; +import io.trino.metadata.TableProceduresRegistry; import io.trino.metadata.TablePropertyManager; import io.trino.metadata.ViewColumn; import io.trino.metadata.ViewDefinition; @@ -65,6 +75,7 @@ import io.trino.spi.type.RowType; import io.trino.spi.type.Type; import io.trino.sql.PlannerContext; +import io.trino.sql.parser.ParsingException; import io.trino.sql.parser.ParsingOptions; import io.trino.sql.parser.SqlParser; import io.trino.sql.planner.OptimizerConfig; @@ -76,6 +87,8 @@ import io.trino.testing.TestingMetadata; import io.trino.testing.TestingMetadata.TestingTableHandle; import io.trino.testing.assertions.TrinoExceptionAssert; +import io.trino.transaction.NoOpTransactionManager; +import io.trino.transaction.TransactionId; import io.trino.transaction.TransactionManager; import org.intellij.lang.annotations.Language; import org.testng.annotations.AfterClass; @@ -106,6 +119,7 @@ import static io.trino.spi.StandardErrorCode.FUNCTION_NOT_FOUND; import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS; import static io.trino.spi.StandardErrorCode.INVALID_COLUMN_REFERENCE; +import static io.trino.spi.StandardErrorCode.INVALID_COPARTITIONING; import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static io.trino.spi.StandardErrorCode.INVALID_LABEL; import static io.trino.spi.StandardErrorCode.INVALID_LIMIT_CLAUSE; @@ -124,6 +138,7 @@ import static io.trino.spi.StandardErrorCode.INVALID_WINDOW_MEASURE; import static io.trino.spi.StandardErrorCode.INVALID_WINDOW_REFERENCE; import static io.trino.spi.StandardErrorCode.MISMATCHED_COLUMN_ALIASES; +import static io.trino.spi.StandardErrorCode.MISSING_ARGUMENT; import static io.trino.spi.StandardErrorCode.MISSING_CATALOG_NAME; import static io.trino.spi.StandardErrorCode.MISSING_COLUMN_ALIASES; import static io.trino.spi.StandardErrorCode.MISSING_COLUMN_NAME; @@ -164,7 +179,6 @@ import static io.trino.spi.type.VarcharType.VARCHAR; import static io.trino.spi.type.VarcharType.createUnboundedVarcharType; import static io.trino.spi.type.VarcharType.createVarcharType; -import static io.trino.sql.analyzer.StatementAnalyzerFactory.createTestingStatementAnalyzerFactory; import static io.trino.sql.parser.ParsingOptions.DecimalLiteralTreatment.AS_DECIMAL; import static io.trino.sql.parser.ParsingOptions.DecimalLiteralTreatment.AS_DOUBLE; import static io.trino.testing.TestingAccessControlManager.TestingPrivilegeType.SELECT_COLUMN; @@ -180,6 +194,7 @@ import static java.util.Collections.nCopies; import static java.util.Objects.requireNonNull; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; @Test(singleThreaded = true) public class TestAnalyzer @@ -6151,6 +6166,321 @@ public void testJsonArrayInAggregationContext() .hasMessage("line 1:8: 'JSON_ARRAY(b ABSENT ON NULL)' must be an aggregate expression or appear in GROUP BY clause"); } + @Test + public void testTableFunctionNotFound() + { + assertFails("SELECT * FROM TABLE(non_existent_table_function())") + .hasErrorCode(FUNCTION_NOT_FOUND) + .hasMessage("line 1:21: Table function non_existent_table_function not registered"); + } + + @Test + public void testTableFunctionArguments() + { + assertFails("SELECT * FROM TABLE(system.two_arguments_function(1, 2, 3))") + .hasErrorCode(INVALID_ARGUMENTS) + .hasMessage("line 1:51: Too many arguments. Expected at most 2 arguments, got 3 arguments"); + + analyze("SELECT * FROM TABLE(system.two_arguments_function('foo'))"); + analyze("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo'))"); + analyze("SELECT * FROM TABLE(system.two_arguments_function('foo', 1))"); + analyze("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', number => 1))"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function('foo', number => 1))") + .hasErrorCode(INVALID_ARGUMENTS) + .hasMessage("line 1:51: All arguments must be passed by name or all must be passed positionally"); + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', 1))") + .hasErrorCode(INVALID_ARGUMENTS) + .hasMessage("line 1:51: All arguments must be passed by name or all must be passed positionally"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', text => 'bar'))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Duplicate argument name: TEXT"); + // argument names are resolved in the canonical form + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', TeXt => 'bar'))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Duplicate argument name: TEXT"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'foo', bar => 'bar'))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Unexpected argument name: BAR"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(number => 1))") + .hasErrorCode(MISSING_ARGUMENT) + .hasMessage("line 1:51: Missing argument: TEXT"); + } + + @Test + public void testTableArgument() + { + // cannot pass a table function as the argument + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => my_schema.my_table_function(1)))") + .hasErrorCode(NOT_SUPPORTED) + .hasMessage("line 1:52: Invalid table argument INPUT. Table functions are not allowed as table function arguments"); + + assertThatThrownBy(() -> analyze("SELECT * FROM TABLE(system.table_argument_function(input => my_schema.my_table_function(arg => 1)))")) + .isInstanceOf(ParsingException.class) + .hasMessageContaining("line 1:93: mismatched input '=>'."); + + // cannot pass a table function as the argument, also preceding nested table function with TABLE is incorrect + assertThatThrownBy(() -> analyze("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(my_schema.my_table_function(1))))")) + .isInstanceOf(ParsingException.class) + .hasMessageContaining("line 1:94: mismatched input '('."); + + // a table passed as the argument must be preceded with TABLE + analyze("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(t1)))"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => t1))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:52: Invalid argument INPUT. Expected table, got expression"); + + // a query passed as the argument must be preceded with TABLE + analyze("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT * FROM t1)))"); + + assertThatThrownBy(() -> analyze("SELECT * FROM TABLE(system.table_argument_function(input => SELECT * FROM t1))")) + .isInstanceOf(ParsingException.class) + .hasMessageContaining("line 1:61: mismatched input 'SELECT'."); + + // query passed as the argument is correlated + analyze(""" + SELECT * + FROM + t1 + CROSS JOIN + LATERAL (SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 WHERE a > 0)))) + """); + + // wrong argument type + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => 'foo'))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:52: Invalid argument INPUT. Expected table, got expression"); + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => DESCRIPTOR(x int, y int)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:52: Invalid argument INPUT. Expected table, got descriptor"); + } + + @Test + public void testTableArgumentProperties() + { + analyze(""" + SELECT * FROM TABLE(system.table_argument_function( + input => TABLE(t1) + PARTITION BY a + KEEP WHEN EMPTY + ORDER BY b)) + """); + + assertFails("SELECT * FROM TABLE(system.table_argument_row_semantics_function(input => TABLE(t1) PARTITION BY a))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Invalid argument INPUT. Partitioning specified for table argument with row semantics"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 a) PARTITION BY b))") + .hasErrorCode(COLUMN_NOT_FOUND) + .hasMessage("line 1:92: Column b is not present in the input relation"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 a) ORDER BY 1))") + .hasErrorCode(INVALID_COLUMN_REFERENCE) + .hasMessage("line 1:88: Expected column reference. Actual: 1"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT approx_set(1) a) PARTITION BY a))") + .hasErrorCode(TYPE_MISMATCH) + .hasMessage("line 1:104: HyperLogLog is not comparable, and therefore cannot be used in PARTITION BY"); + + assertFails("SELECT * FROM TABLE(system.table_argument_row_semantics_function(input => TABLE(t1) ORDER BY a))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:66: Invalid argument INPUT. Ordering specified for table argument with row semantics"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 a) ORDER BY b))") + .hasErrorCode(COLUMN_NOT_FOUND) + .hasMessage("line 1:88: Column b is not present in the input relation"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT 1 a) ORDER BY 1))") + .hasErrorCode(INVALID_COLUMN_REFERENCE) + .hasMessage("line 1:88: Expected column reference. Actual: 1"); + + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => TABLE(SELECT approx_set(1) a) ORDER BY a))") + .hasErrorCode(TYPE_MISMATCH) + .hasMessage("line 1:100: HyperLogLog is not orderable, and therefore cannot be used in ORDER BY"); + + assertFails("SELECT * FROM TABLE(system.table_argument_row_semantics_function(input => TABLE(t1) PRUNE WHEN EMPTY))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:85: Invalid argument INPUT. Empty behavior specified for table argument with row semantics"); + + assertFails("SELECT * FROM TABLE(system.table_argument_row_semantics_function(input => TABLE(t1) KEEP WHEN EMPTY))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:85: Invalid argument INPUT. Empty behavior specified for table argument with row semantics"); + } + + @Test + public void testDescriptorArgument() + { + analyze("SELECT * FROM TABLE(system.descriptor_argument_function(schema => DESCRIPTOR(x integer, y boolean)))"); + + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => DESCRIPTOR(1 + 2)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:57: Invalid descriptor argument SCHEMA. Descriptors should be formatted as 'DESCRIPTOR(name [type], ...)'"); + + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => 1))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:57: Invalid argument SCHEMA. Expected descriptor, got expression"); + + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => TABLE(t1)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:57: Invalid argument SCHEMA. Expected descriptor, got table"); + + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => DESCRIPTOR(x verybigint)))") + .hasErrorCode(TYPE_MISMATCH) + .hasMessage("line 1:80: Unknown type: verybigint"); + } + + @Test + public void testScalarArgument() + { + analyze("SELECT * FROM TABLE(system.two_arguments_function('foo', 1))"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'a', number => DESCRIPTOR(x integer, y boolean)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:64: Invalid argument NUMBER. Expected expression, got descriptor"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'a', number => DESCRIPTOR(1 + 2)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:64: 'descriptor' function is not allowed as a table function argument"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'a', number => TABLE(t1)))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:64: Invalid argument NUMBER. Expected expression, got table"); + + assertFails("SELECT * FROM TABLE(system.two_arguments_function(text => 'a', number => (SELECT 1)))") + .hasErrorCode(EXPRESSION_NOT_CONSTANT) + .hasMessage("line 1:74: Constant expression cannot contain a subquery"); + } + + @Test + public void testCopartitioning() + { + // TABLE(t1) is matched by fully qualified name: tpch.s1.t1. It matches the second copartition item s1.t1. + // Aliased relation TABLE(SELECT 1, 2) t1(x, y) is matched by unqualified name. It matches the first copartition item t1. + analyze(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(SELECT 1, 2) t1(x, y) PARTITION BY (x, y) + COPARTITION (t1, s1.t1))) + """); + + // Copartition items t1, t2 are first matched to arguments by unqualified names, and when no match is found, by fully qualified names. + // TABLE(tpch.s1.t1) is matched by fully qualified name. It matches the first copartition item t1. + // TABLE(s1.t2) is matched by unqualified name: tpch.s1.t2. It matches the second copartition item t2. + analyze(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(tpch.s1.t1) PARTITION BY (a, b), + input2 => TABLE(s1.t2) PARTITION BY (a, b) + COPARTITION (t1, t2))) + """); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(t2) PARTITION BY (a, b) + COPARTITION (t1, s1.foo))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:22: No table argument found for name: s1.foo"); + + // Both table arguments are matched by fully qualified name: tpch.s1.t1 + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(t1) PARTITION BY (a, b) + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:18: Ambiguous reference: multiple table arguments found for name: t1"); + + // Both table arguments are matched by unqualified name: t1 + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(SELECT 1, 2) t1(a, b) PARTITION BY (a, b), + input2 => TABLE(SELECT 3, 4) t1(c, d) PARTITION BY (c, d) + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:18: Ambiguous reference: multiple table arguments found for name: t1"); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(t2) PARTITION BY (a, b) + COPARTITION (t1, t1))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:22: Multiple references to table argument: t1 in COPARTITION clause"); + } + + @Test + public void testCopartitionColumns() + { + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1), + input2 => TABLE(t2) PARTITION BY (a, b) + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 2:15: Table tpch.s1.t1 referenced in COPARTITION clause is not partitioned"); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (), + input2 => TABLE(t2) PARTITION BY () + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 2:15: No partitioning columns specified for table tpch.s1.t1 referenced in COPARTITION clause"); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(t1) PARTITION BY (a, b), + input2 => TABLE(t2) PARTITION BY (a) + COPARTITION (t1, t2))) + """) + .hasErrorCode(INVALID_COPARTITIONING) + .hasMessage("line 4:18: Numbers of partitioning columns in copartitioned tables do not match"); + + assertFails(""" + SELECT * FROM TABLE(system.two_table_arguments_function( + input1 => TABLE(SELECT 1) t1(a) PARTITION BY (a), + input2 => TABLE(SELECT 'x') t2(b) PARTITION BY (b) + COPARTITION (t1, t2))) + """) + .hasErrorCode(TYPE_MISMATCH) + .hasMessage("line 4:18: Partitioning columns in copartitioned tables have incompatible types"); + } + + @Test + public void testNullArguments() + { + // cannot pass null for table argument + assertFails("SELECT * FROM TABLE(system.table_argument_function(input => null))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:52: Invalid argument INPUT. Expected table, got expression"); + + // the wrong way to pass null for descriptor + assertFails("SELECT * FROM TABLE(system.descriptor_argument_function(schema => null))") + .hasErrorCode(INVALID_FUNCTION_ARGUMENT) + .hasMessage("line 1:57: Invalid argument SCHEMA. Expected descriptor, got expression"); + + // the right way to pass null for descriptor + analyze("SELECT * FROM TABLE(system.descriptor_argument_function(schema => CAST(null AS DESCRIPTOR)))"); + + // the default value for the argument schema is null + analyze("SELECT * FROM TABLE(system.descriptor_argument_function())"); + + analyze("SELECT * FROM TABLE(system.two_arguments_function(null, null))"); + + // the default value for the second argument is null + analyze("SELECT * FROM TABLE(system.two_arguments_function('a'))"); + } + @BeforeClass public void setup() { @@ -6509,7 +6839,31 @@ private Analyzer createAnalyzer(Session session, AccessControl accessControl) new ColumnPropertyManager(CatalogServiceProvider.fail()), tablePropertyManager, new MaterializedViewPropertyManager(catalogName -> ImmutableMap.of())))); - StatementAnalyzerFactory statementAnalyzerFactory = createTestingStatementAnalyzerFactory(plannerContext, accessControl, tablePropertyManager, analyzePropertyManager); + StatementAnalyzerFactory statementAnalyzerFactory = new StatementAnalyzerFactory( + plannerContext, + new SqlParser(), + accessControl, + new NoOpTransactionManager() + { + // needed to analyze table functions + @Override + public ConnectorTransactionHandle getConnectorTransaction(TransactionId transactionId, CatalogHandle catalogHandle) + { + return new ConnectorTransactionHandle() {}; + } + }, + user -> ImmutableSet.of(), + new TableProceduresRegistry(CatalogServiceProvider.fail("procedures are not supported in testing analyzer")), + new TableFunctionRegistry(catalogName -> new CatalogTableFunctions(ImmutableList.of( + new TwoScalarArgumentsFunction(), + new TableArgumentFunction(), + new TableArgumentRowSemanticsFunction(), + new DescriptorArgumentFunction(), + new TwoTableArgumentsFunction()))), + new SessionPropertyManager(), + tablePropertyManager, + analyzePropertyManager, + new TableProceduresPropertyManager(CatalogServiceProvider.fail("procedures are not supported in testing analyzer"))); AnalyzerFactory analyzerFactory = new AnalyzerFactory(statementAnalyzerFactory, statementRewrite); return analyzerFactory.createAnalyzer( session, diff --git a/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java b/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java index 06f66c949a67..5078f58f7a43 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java +++ b/core/trino-parser/src/main/java/io/trino/sql/SqlFormatter.java @@ -36,7 +36,6 @@ import io.trino.sql.tree.Deny; import io.trino.sql.tree.DescribeInput; import io.trino.sql.tree.DescribeOutput; -import io.trino.sql.tree.DescriptorArgument; import io.trino.sql.tree.DropColumn; import io.trino.sql.tree.DropMaterializedView; import io.trino.sql.tree.DropRole; @@ -121,10 +120,11 @@ import io.trino.sql.tree.SingleColumn; import io.trino.sql.tree.StartTransaction; import io.trino.sql.tree.Table; -import io.trino.sql.tree.TableArgument; import io.trino.sql.tree.TableExecute; import io.trino.sql.tree.TableFunctionArgument; +import io.trino.sql.tree.TableFunctionDescriptorArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.TransactionAccessMode; import io.trino.sql.tree.TransactionMode; @@ -282,7 +282,7 @@ private void appendTableFunctionArguments(List arguments, } @Override - protected Void visitTableArgument(TableArgument node, Integer indent) + protected Void visitTableArgument(TableFunctionTableArgument node, Integer indent) { Relation relation = node.getTable(); Relation unaliased = relation instanceof AliasedRelation ? ((AliasedRelation) relation).getRelation() : relation; @@ -302,14 +302,10 @@ protected Void visitTableArgument(TableArgument node, Integer indent) .map(ExpressionFormatter::formatExpression) .collect(joining(", "))); } - if (node.isPruneWhenEmpty()) { + node.getEmptyTableTreatment().ifPresent(treatment -> { builder.append("\n"); - append(indent, "PRUNE WHEN EMPTY"); - } - else { - builder.append("\n"); - append(indent, "KEEP WHEN EMPTY"); - } + append(indent, treatment.getTreatment().name() + " WHEN EMPTY"); + }); node.getOrderBy().ifPresent(orderBy -> { builder.append("\n"); append(indent, formatOrderBy(orderBy)); @@ -319,7 +315,7 @@ protected Void visitTableArgument(TableArgument node, Integer indent) } @Override - protected Void visitDescriptorArgument(DescriptorArgument node, Integer indent) + protected Void visitDescriptorArgument(TableFunctionDescriptorArgument node, Integer indent) { if (node.getDescriptor().isPresent()) { builder.append(node.getDescriptor().get().getFields().stream() diff --git a/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java b/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java index 0a6e47fbdffb..4f13e97e9455 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java +++ b/core/trino-parser/src/main/java/io/trino/sql/parser/AstBuilder.java @@ -72,6 +72,8 @@ import io.trino.sql.tree.DropTable; import io.trino.sql.tree.DropView; import io.trino.sql.tree.EmptyPattern; +import io.trino.sql.tree.EmptyTableTreatment; +import io.trino.sql.tree.EmptyTableTreatment.Treatment; import io.trino.sql.tree.Except; import io.trino.sql.tree.ExcludedPattern; import io.trino.sql.tree.Execute; @@ -220,11 +222,11 @@ import io.trino.sql.tree.SubscriptExpression; import io.trino.sql.tree.SubsetDefinition; import io.trino.sql.tree.Table; -import io.trino.sql.tree.TableArgument; import io.trino.sql.tree.TableElement; import io.trino.sql.tree.TableExecute; import io.trino.sql.tree.TableFunctionArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.TimeLiteral; import io.trino.sql.tree.TimestampLiteral; @@ -271,8 +273,6 @@ import static io.trino.sql.parser.SqlBaseParser.TIMESTAMP; import static io.trino.sql.tree.AnchorPattern.Type.PARTITION_END; import static io.trino.sql.tree.AnchorPattern.Type.PARTITION_START; -import static io.trino.sql.tree.DescriptorArgument.descriptorArgument; -import static io.trino.sql.tree.DescriptorArgument.nullDescriptorArgument; import static io.trino.sql.tree.JsonExists.ErrorBehavior.ERROR; import static io.trino.sql.tree.JsonExists.ErrorBehavior.FALSE; import static io.trino.sql.tree.JsonExists.ErrorBehavior.TRUE; @@ -301,6 +301,8 @@ import static io.trino.sql.tree.SkipTo.skipToFirst; import static io.trino.sql.tree.SkipTo.skipToLast; import static io.trino.sql.tree.SkipTo.skipToNextRow; +import static io.trino.sql.tree.TableFunctionDescriptorArgument.descriptorArgument; +import static io.trino.sql.tree.TableFunctionDescriptorArgument.nullDescriptorArgument; import static java.lang.String.format; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; @@ -1879,9 +1881,15 @@ public Node visitTableArgument(SqlBaseParser.TableArgumentContext context) orderBy = Optional.of(new OrderBy(visit(context.sortItem(), SortItem.class))); } - boolean pruneWhenEmpty = context.PRUNE() != null; + Optional emptyTableTreatment = Optional.empty(); + if (context.PRUNE() != null) { + emptyTableTreatment = Optional.of(new EmptyTableTreatment(getLocation(context.PRUNE()), Treatment.PRUNE)); + } + else if (context.KEEP() != null) { + emptyTableTreatment = Optional.of(new EmptyTableTreatment(getLocation(context.KEEP()), Treatment.KEEP)); + } - return new TableArgument(getLocation(context), table, partitionBy, orderBy, pruneWhenEmpty); + return new TableFunctionTableArgument(getLocation(context), table, partitionBy, orderBy, emptyTableTreatment); } @Override diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java b/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java index 86bf82db3c61..215b86378d41 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/AstVisitor.java @@ -1097,12 +1097,12 @@ protected R visitTableFunctionArgument(TableFunctionArgument node, C context) return visitNode(node, context); } - protected R visitTableArgument(TableArgument node, C context) + protected R visitTableArgument(TableFunctionTableArgument node, C context) { return visitNode(node, context); } - protected R visitDescriptorArgument(DescriptorArgument node, C context) + protected R visitDescriptorArgument(TableFunctionDescriptorArgument node, C context) { return visitNode(node, context); } @@ -1146,4 +1146,9 @@ protected R visitJsonArray(JsonArray node, C context) { return visitExpression(node, context); } + + protected R visitEmptyTableTreatment(EmptyTableTreatment node, C context) + { + return visitNode(node, context); + } } diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/EmptyTableTreatment.java b/core/trino-parser/src/main/java/io/trino/sql/tree/EmptyTableTreatment.java new file mode 100644 index 000000000000..fc49f28df879 --- /dev/null +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/EmptyTableTreatment.java @@ -0,0 +1,93 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.sql.tree; + +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +import static com.google.common.base.MoreObjects.toStringHelper; +import static java.util.Objects.requireNonNull; + +public final class EmptyTableTreatment + extends Node +{ + private final Treatment treatment; + + public EmptyTableTreatment(NodeLocation location, Treatment treatment) + { + super(Optional.of(location)); + this.treatment = requireNonNull(treatment, "treatment is null"); + } + + public Treatment getTreatment() + { + return treatment; + } + + @Override + public R accept(AstVisitor visitor, C context) + { + return visitor.visitEmptyTableTreatment(this, context); + } + + @Override + public List getChildren() + { + return ImmutableList.of(); + } + + @Override + public boolean equals(Object obj) + { + if (this == obj) { + return true; + } + if ((obj == null) || (getClass() != obj.getClass())) { + return false; + } + return treatment == ((EmptyTableTreatment) obj).treatment; + } + + @Override + public int hashCode() + { + return Objects.hash(treatment); + } + + @Override + public String toString() + { + return toStringHelper(this) + .add("treatment", treatment) + .toString(); + } + + @Override + public boolean shallowEquals(Node other) + { + if (!sameClass(this, other)) { + return false; + } + + return treatment == ((EmptyTableTreatment) other).treatment; + } + + public enum Treatment + { + KEEP, PRUNE + } +} diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionArgument.java b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionArgument.java index b403313cfd59..396c2661007e 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionArgument.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionArgument.java @@ -33,7 +33,7 @@ public TableFunctionArgument(NodeLocation location, Optional name, N super(Optional.of(location)); this.name = requireNonNull(name, "name is null"); requireNonNull(value, "value is null"); - checkArgument(value instanceof TableArgument || value instanceof DescriptorArgument || value instanceof Expression); + checkArgument(value instanceof TableFunctionTableArgument || value instanceof TableFunctionDescriptorArgument || value instanceof Expression); this.value = value; } diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/DescriptorArgument.java b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionDescriptorArgument.java similarity index 76% rename from core/trino-parser/src/main/java/io/trino/sql/tree/DescriptorArgument.java rename to core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionDescriptorArgument.java index c5f1f21a1b9a..374b98e43908 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/DescriptorArgument.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionDescriptorArgument.java @@ -21,23 +21,23 @@ import static java.util.Objects.requireNonNull; -public class DescriptorArgument +public class TableFunctionDescriptorArgument extends Node { private final Optional descriptor; - public static DescriptorArgument descriptorArgument(NodeLocation location, Descriptor descriptor) + public static TableFunctionDescriptorArgument descriptorArgument(NodeLocation location, Descriptor descriptor) { requireNonNull(descriptor, "descriptor is null"); - return new DescriptorArgument(location, Optional.of(descriptor)); + return new TableFunctionDescriptorArgument(location, Optional.of(descriptor)); } - public static DescriptorArgument nullDescriptorArgument(NodeLocation location) + public static TableFunctionDescriptorArgument nullDescriptorArgument(NodeLocation location) { - return new DescriptorArgument(location, Optional.empty()); + return new TableFunctionDescriptorArgument(location, Optional.empty()); } - private DescriptorArgument(NodeLocation location, Optional descriptor) + private TableFunctionDescriptorArgument(NodeLocation location, Optional descriptor) { super(Optional.of(location)); this.descriptor = descriptor; @@ -69,7 +69,7 @@ public boolean equals(Object o) if (o == null || getClass() != o.getClass()) { return false; } - return Objects.equals(descriptor, ((DescriptorArgument) o).descriptor); + return Objects.equals(descriptor, ((TableFunctionDescriptorArgument) o).descriptor); } @Override diff --git a/core/trino-parser/src/main/java/io/trino/sql/tree/TableArgument.java b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java similarity index 81% rename from core/trino-parser/src/main/java/io/trino/sql/tree/TableArgument.java rename to core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java index 6431d9d6b627..f44b4409ca69 100644 --- a/core/trino-parser/src/main/java/io/trino/sql/tree/TableArgument.java +++ b/core/trino-parser/src/main/java/io/trino/sql/tree/TableFunctionTableArgument.java @@ -23,26 +23,26 @@ import static io.trino.sql.ExpressionFormatter.formatSortItems; import static java.util.Objects.requireNonNull; -public class TableArgument +public class TableFunctionTableArgument extends Node { private final Relation table; private final Optional> partitionBy; // it is allowed to partition by empty list private final Optional orderBy; - private final boolean pruneWhenEmpty; + private final Optional emptyTableTreatment; - public TableArgument( + public TableFunctionTableArgument( NodeLocation location, Relation table, Optional> partitionBy, Optional orderBy, - boolean pruneWhenEmpty) + Optional emptyTableTreatment) { super(Optional.of(location)); this.table = requireNonNull(table, "table is null"); this.partitionBy = requireNonNull(partitionBy, "partitionBy is null"); this.orderBy = requireNonNull(orderBy, "orderBy is null"); - this.pruneWhenEmpty = pruneWhenEmpty; + this.emptyTableTreatment = requireNonNull(emptyTableTreatment, "emptyTableTreatment is null"); } public Relation getTable() @@ -60,9 +60,9 @@ public Optional getOrderBy() return orderBy; } - public boolean isPruneWhenEmpty() + public Optional getEmptyTableTreatment() { - return pruneWhenEmpty; + return emptyTableTreatment; } @Override @@ -78,6 +78,7 @@ public List getChildren() builder.add(table); partitionBy.ifPresent(builder::addAll); orderBy.ifPresent(builder::add); + emptyTableTreatment.ifPresent(builder::add); return builder.build(); } @@ -92,17 +93,17 @@ public boolean equals(Object o) return false; } - TableArgument other = (TableArgument) o; + TableFunctionTableArgument other = (TableFunctionTableArgument) o; return Objects.equals(table, other.table) && Objects.equals(partitionBy, other.partitionBy) && Objects.equals(orderBy, other.orderBy) && - pruneWhenEmpty == other.pruneWhenEmpty; + Objects.equals(emptyTableTreatment, other.emptyTableTreatment); } @Override public int hashCode() { - return Objects.hash(table, partitionBy, orderBy, pruneWhenEmpty); + return Objects.hash(table, partitionBy, orderBy, emptyTableTreatment); } @Override @@ -123,10 +124,6 @@ public String toString() @Override public boolean shallowEquals(Node o) { - if (!sameClass(this, o)) { - return false; - } - - return pruneWhenEmpty == ((TableArgument) o).pruneWhenEmpty; + return sameClass(this, o); } } diff --git a/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java b/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java index 3d2114dc6bba..35757563d01a 100644 --- a/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java +++ b/core/trino-parser/src/test/java/io/trino/sql/parser/TestSqlParser.java @@ -63,6 +63,7 @@ import io.trino.sql.tree.DropTable; import io.trino.sql.tree.DropView; import io.trino.sql.tree.EmptyPattern; +import io.trino.sql.tree.EmptyTableTreatment; import io.trino.sql.tree.Execute; import io.trino.sql.tree.ExistsPredicate; import io.trino.sql.tree.Explain; @@ -187,10 +188,10 @@ import io.trino.sql.tree.SubscriptExpression; import io.trino.sql.tree.SubsetDefinition; import io.trino.sql.tree.Table; -import io.trino.sql.tree.TableArgument; import io.trino.sql.tree.TableExecute; import io.trino.sql.tree.TableFunctionArgument; import io.trino.sql.tree.TableFunctionInvocation; +import io.trino.sql.tree.TableFunctionTableArgument; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.TimeLiteral; import io.trino.sql.tree.TimestampLiteral; @@ -256,8 +257,7 @@ import static io.trino.sql.tree.ArithmeticUnaryExpression.positive; import static io.trino.sql.tree.ComparisonExpression.Operator.EQUAL; import static io.trino.sql.tree.DateTimeDataType.Type.TIMESTAMP; -import static io.trino.sql.tree.DescriptorArgument.descriptorArgument; -import static io.trino.sql.tree.DescriptorArgument.nullDescriptorArgument; +import static io.trino.sql.tree.EmptyTableTreatment.Treatment.PRUNE; import static io.trino.sql.tree.FrameBound.Type.CURRENT_ROW; import static io.trino.sql.tree.FrameBound.Type.FOLLOWING; import static io.trino.sql.tree.JsonPathParameter.JsonFormat.JSON; @@ -273,6 +273,8 @@ import static io.trino.sql.tree.SortItem.NullOrdering.UNDEFINED; import static io.trino.sql.tree.SortItem.Ordering.ASCENDING; import static io.trino.sql.tree.SortItem.Ordering.DESCENDING; +import static io.trino.sql.tree.TableFunctionDescriptorArgument.descriptorArgument; +import static io.trino.sql.tree.TableFunctionDescriptorArgument.nullDescriptorArgument; import static io.trino.sql.tree.Trim.Specification.BOTH; import static io.trino.sql.tree.Trim.Specification.LEADING; import static io.trino.sql.tree.Trim.Specification.TRAILING; @@ -3921,7 +3923,7 @@ public void testTableFunctionInvocation() new TableFunctionArgument( location(1, 77), Optional.of(new Identifier(location(1, 77), "arg1", false)), - new TableArgument( + new TableFunctionTableArgument( location(1, 85), new AliasedRelation( location(1, 85), @@ -3933,7 +3935,7 @@ public void testTableFunctionInvocation() new Identifier(location(1, 112), "c", false))), Optional.of(ImmutableList.of(new Identifier(location(1, 196), "a", false))), Optional.of(new OrderBy(ImmutableList.of(new SortItem(location(1, 360), new Identifier(location(1, 360), "b", false), ASCENDING, LAST)))), - true)), + Optional.of(new EmptyTableTreatment(location(1, 266), PRUNE)))), new TableFunctionArgument( location(1, 425), Optional.of(new Identifier(location(1, 425), "arg2", false)), @@ -3976,12 +3978,12 @@ public void testTableFunctionTableArgumentAliasing() ImmutableList.of(new TableFunctionArgument( location(1, 30), Optional.of(new Identifier(location(1, 30), "input", false)), - new TableArgument( + new TableFunctionTableArgument( location(1, 39), new Table(location(1, 39), qualifiedName(location(1, 45), "orders")), Optional.empty(), Optional.empty(), - false))), + Optional.empty()))), ImmutableList.of()))); // table alias; no column aliases @@ -3992,7 +3994,7 @@ public void testTableFunctionTableArgumentAliasing() ImmutableList.of(new TableFunctionArgument( location(1, 30), Optional.of(new Identifier(location(1, 30), "input", false)), - new TableArgument( + new TableFunctionTableArgument( location(1, 39), new AliasedRelation( location(1, 39), @@ -4001,7 +4003,7 @@ public void testTableFunctionTableArgumentAliasing() null), Optional.empty(), Optional.empty(), - false))), + Optional.empty()))), ImmutableList.of()))); // table alias and column aliases @@ -4012,7 +4014,7 @@ public void testTableFunctionTableArgumentAliasing() ImmutableList.of(new TableFunctionArgument( location(1, 30), Optional.of(new Identifier(location(1, 30), "input", false)), - new TableArgument( + new TableFunctionTableArgument( location(1, 39), new AliasedRelation( location(1, 39), @@ -4024,7 +4026,7 @@ public void testTableFunctionTableArgumentAliasing() new Identifier(location(1, 66), "c", false))), Optional.empty(), Optional.empty(), - false))), + Optional.empty()))), ImmutableList.of()))); } diff --git a/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java b/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java index 040471bca038..1f78f61f38f3 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java +++ b/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java @@ -141,6 +141,7 @@ public enum StandardErrorCode INVALID_JSON_LITERAL(117, USER_ERROR), JSON_VALUE_RESULT_ERROR(118, USER_ERROR), MERGE_TARGET_ROW_MULTIPLE_MATCHES(119, USER_ERROR), + INVALID_COPARTITIONING(120, USER_ERROR), GENERIC_INTERNAL_ERROR(65536, INTERNAL_ERROR), TOO_MANY_REQUESTS_FAILED(65537, INTERNAL_ERROR), diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/Descriptor.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/Descriptor.java index 11456c2d4e65..48a4d2a12dfe 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/Descriptor.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/Descriptor.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -72,6 +73,25 @@ public boolean isTyped() return fields.stream().allMatch(field -> field.type.isPresent()); } + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Descriptor that = (Descriptor) o; + return fields.equals(that.fields); + } + + @Override + public int hashCode() + { + return Objects.hash(fields); + } + public static class Field { private final String name; @@ -95,5 +115,24 @@ public Optional getType() { return type; } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Field field = (Field) o; + return name.equals(field.name) && type.equals(field.type); + } + + @Override + public int hashCode() + { + return Objects.hash(name, type); + } } } diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorArgument.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorArgument.java index 85fca6e2b4ef..9af83d5f22fb 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorArgument.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorArgument.java @@ -18,6 +18,7 @@ import io.trino.spi.Experimental; import io.trino.spi.expression.ConnectorExpression; +import java.util.Objects; import java.util.Optional; import static java.util.Objects.requireNonNull; @@ -47,6 +48,25 @@ public Optional getDescriptor() return descriptor; } + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DescriptorArgument that = (DescriptorArgument) o; + return descriptor.equals(that.descriptor); + } + + @Override + public int hashCode() + { + return Objects.hash(descriptor); + } + public static Builder builder() { return new Builder(); diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorMapping.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorMapping.java deleted file mode 100644 index 3a910300c721..000000000000 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/DescriptorMapping.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.trino.spi.ptf; - -import io.trino.spi.Experimental; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import static io.trino.spi.ptf.Preconditions.checkArgument; -import static io.trino.spi.ptf.Preconditions.checkNotNullOrEmpty; -import static java.lang.String.format; -import static java.util.Objects.requireNonNull; - -@Experimental(eta = "2022-10-31") -public class DescriptorMapping -{ - public static final DescriptorMapping EMPTY_MAPPING = new DescriptorMappingBuilder().build(); - - private final Map mappingByField; - private final Map mappingByDescriptor; - - private DescriptorMapping(Map mappingByField, Map mappingByDescriptor) - { - this.mappingByField = Map.copyOf(requireNonNull(mappingByField, "mappingByField is null")); - this.mappingByDescriptor = Map.copyOf(requireNonNull(mappingByDescriptor, "mappingByDescriptor is null")); - } - - public Map getMappingByField() - { - return mappingByField; - } - - public Map getMappingByDescriptor() - { - return mappingByDescriptor; - } - - public boolean isEmpty() - { - return mappingByField.isEmpty() && mappingByDescriptor.isEmpty(); - } - - public static class DescriptorMappingBuilder - { - private final Map mappingByField = new HashMap<>(); - private final Map mappingByDescriptor = new HashMap<>(); - private final Set descriptorsMappedByField = new HashSet<>(); - - public DescriptorMappingBuilder mapField(String descriptor, int field, String table) - { - checkNotNullOrEmpty(table, "table"); - checkArgument(!mappingByDescriptor.containsKey(descriptor), format("duplicate mapping for descriptor: %s, field: %s", descriptor, field)); - checkArgument(mappingByField.put(new NameAndPosition(descriptor, field), table) == null, format("duplicate mapping for descriptor: %s, field: %s", descriptor, field)); - descriptorsMappedByField.add(descriptor); - return this; - } - - public DescriptorMappingBuilder mapAllFields(String descriptor, String table) - { - checkNotNullOrEmpty(descriptor, "descriptor"); - checkNotNullOrEmpty(table, "table"); - checkArgument(!descriptorsMappedByField.contains(descriptor), "duplicate mapping for field of descriptor: " + descriptor); - checkArgument(mappingByDescriptor.put(descriptor, table) == null, "duplicate mapping for descriptor: " + descriptor); - return this; - } - - public DescriptorMapping build() - { - return new DescriptorMapping(mappingByField, mappingByDescriptor); - } - } -} diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/ScalarArgument.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/ScalarArgument.java index 3a981e3f5cbc..f0cea1c7bc57 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/ScalarArgument.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/ScalarArgument.java @@ -17,8 +17,11 @@ import com.fasterxml.jackson.annotation.JsonProperty; import io.trino.spi.Experimental; import io.trino.spi.expression.ConnectorExpression; +import io.trino.spi.predicate.NullableValue; import io.trino.spi.type.Type; +import javax.annotation.Nullable; + import static java.util.Objects.requireNonNull; /** @@ -37,27 +40,39 @@ public class ScalarArgument private final Type type; // native representation + @Nullable private final Object value; - @JsonCreator - public ScalarArgument(@JsonProperty("type") Type type, @JsonProperty("value") Object value) + public ScalarArgument(Type type, Object value) { this.type = requireNonNull(type, "type is null"); this.value = value; } - @JsonProperty public Type getType() { return type; } - @JsonProperty public Object getValue() { return value; } + // deserialization + @JsonCreator + public static ScalarArgument fromNullableValue(@JsonProperty("nullableValue") NullableValue nullableValue) + { + return new ScalarArgument(nullableValue.getType(), nullableValue.getValue()); + } + + // serialization + @JsonProperty + public NullableValue getNullableValue() + { + return new NullableValue(type, value); + } + public static Builder builder() { return new Builder(); diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgument.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgument.java index 7c807eb6001c..1f2ece78339b 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgument.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgument.java @@ -20,9 +20,7 @@ import io.trino.spi.type.RowType; import java.util.List; -import java.util.Optional; -import static io.trino.spi.ptf.Preconditions.checkNotNullOrEmpty; import static java.util.Objects.requireNonNull; /** @@ -35,37 +33,19 @@ public class TableArgument extends Argument { - private final Optional name; private final RowType rowType; private final List partitionBy; - private final List orderBy; - private final boolean rowSemantics; - private final boolean pruneWhenEmpty; - private final boolean passThroughColumns; + private final List orderBy; @JsonCreator public TableArgument( - @JsonProperty("name") Optional name, @JsonProperty("rowType") RowType rowType, @JsonProperty("partitionBy") List partitionBy, - @JsonProperty("orderBy") List orderBy, - @JsonProperty("rowSemantics") boolean rowSemantics, - @JsonProperty("pruneWhenEmpty") boolean pruneWhenEmpty, - @JsonProperty("passThroughColumns") boolean passThroughColumns) + @JsonProperty("orderBy") List orderBy) { - this.name = requireNonNull(name, "name is null"); this.rowType = requireNonNull(rowType, "rowType is null"); this.partitionBy = requireNonNull(partitionBy, "partitionBy is null"); this.orderBy = requireNonNull(orderBy, "orderBy is null"); - this.rowSemantics = rowSemantics; - this.pruneWhenEmpty = pruneWhenEmpty; - this.passThroughColumns = passThroughColumns; - } - - @JsonProperty - public Optional getName() - { - return name; } @JsonProperty @@ -81,29 +61,11 @@ public List getPartitionBy() } @JsonProperty - public List getOrderBy() + public List getOrderBy() { return orderBy; } - @JsonProperty - public boolean isRowSemantics() - { - return rowSemantics; - } - - @JsonProperty - public boolean isPruneWhenEmpty() - { - return pruneWhenEmpty; - } - - @JsonProperty - public boolean isPassThroughColumns() - { - return passThroughColumns; - } - public static Builder builder() { return new Builder(); @@ -111,22 +73,12 @@ public static Builder builder() public static final class Builder { - private Optional name; private RowType rowType; private List partitionBy = List.of(); - private List orderBy = List.of(); - private boolean rowSemantics; - private boolean pruneWhenEmpty; - private boolean passThroughColumns; + private List orderBy = List.of(); private Builder() {} - public Builder name(Optional name) - { - this.name = name; - return this; - } - public Builder rowType(RowType rowType) { this.rowType = rowType; @@ -139,105 +91,15 @@ public Builder partitionBy(List partitionBy) return this; } - public Builder orderBy(List orderBy) + public Builder orderBy(List orderBy) { this.orderBy = orderBy; return this; } - public Builder rowSemantics(boolean rowSemantics) - { - this.rowSemantics = rowSemantics; - return this; - } - - public Builder pruneWhenEmpty(boolean pruneWhenEmpty) - { - this.pruneWhenEmpty = pruneWhenEmpty; - return this; - } - - public Builder passThroughColumns(boolean passThroughColumns) - { - this.passThroughColumns = passThroughColumns; - return this; - } - public TableArgument build() { - return new TableArgument(name, rowType, partitionBy, orderBy, rowSemantics, pruneWhenEmpty, passThroughColumns); - } - } - - public static class QualifiedName - { - private final String catalogName; - private final String schemaName; - private final String tableName; - - @JsonCreator - public QualifiedName( - @JsonProperty("catalogName") String catalogName, - @JsonProperty("schemaName") String schemaName, - @JsonProperty("tableName") String tableName) - { - this.catalogName = checkNotNullOrEmpty(catalogName, "catalogName"); - this.schemaName = checkNotNullOrEmpty(schemaName, "schemaName"); - this.tableName = checkNotNullOrEmpty(tableName, "tableName"); - } - - @JsonProperty - public String getCatalogName() - { - return catalogName; - } - - @JsonProperty - public String getSchemaName() - { - return schemaName; - } - - @JsonProperty - public String getTableName() - { - return tableName; - } - } - - public static class SortItem - { - private final String column; - private final boolean ascending; - private final boolean nullsLast; - - @JsonCreator - public SortItem( - @JsonProperty("column") String column, - @JsonProperty("ascending") boolean ascending, - @JsonProperty("nullsFirst") boolean nullsFirst) - { - this.column = checkNotNullOrEmpty(column, "ordering column"); - this.ascending = ascending; - this.nullsLast = nullsFirst; - } - - @JsonProperty - public String getColumn() - { - return column; - } - - @JsonProperty - public boolean isAscending() - { - return ascending; - } - - @JsonProperty - public boolean isNullsLast() - { - return nullsLast; + return new TableArgument(rowType, partitionBy, orderBy); } } } diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgumentSpecification.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgumentSpecification.java index 3df865fcd707..c9a1aa16c519 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgumentSpecification.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableArgumentSpecification.java @@ -70,6 +70,7 @@ public Builder name(String name) public Builder rowSemantics() { this.rowSemantics = true; + this.pruneWhenEmpty = true; return this; } diff --git a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableFunctionAnalysis.java b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableFunctionAnalysis.java index ae676df25428..a415c54d61f6 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/ptf/TableFunctionAnalysis.java +++ b/core/trino-spi/src/main/java/io/trino/spi/ptf/TableFunctionAnalysis.java @@ -17,7 +17,6 @@ import java.util.Optional; -import static io.trino.spi.ptf.DescriptorMapping.EMPTY_MAPPING; import static io.trino.spi.ptf.Preconditions.checkArgument; import static java.util.Objects.requireNonNull; @@ -29,12 +28,6 @@ * Function, that is, the columns produced by the function, as opposed to the columns passed from the * input tables. The `returnedType` should only be set if the declared returned type is GENERIC_TABLE. *

- * The `descriptorMapping` field is used to inform the Analyzer of the semantics of descriptor arguments. - * Some descriptor arguments (or some of their fields) might be references to columns of the input tables. - * In such case, the Analyzer must be informed of those dependencies. It allows to pass the right values - * (input channels) to the Table Function during execution. It also allows to prune unused input columns - * during the optimization phase. - *

* The `handle` field can be used to carry all information necessary to execute the table function, * gathered at analysis time. Typically, these are the values of the constant arguments, and results * of pre-processing arguments. @@ -43,14 +36,12 @@ public final class TableFunctionAnalysis { private final Optional returnedType; - private final DescriptorMapping descriptorMapping; private final ConnectorTableFunctionHandle handle; - private TableFunctionAnalysis(Optional returnedType, DescriptorMapping descriptorMapping, ConnectorTableFunctionHandle handle) + private TableFunctionAnalysis(Optional returnedType, ConnectorTableFunctionHandle handle) { this.returnedType = requireNonNull(returnedType, "returnedType is null"); returnedType.ifPresent(descriptor -> checkArgument(descriptor.isTyped(), "field types not specified")); - this.descriptorMapping = requireNonNull(descriptorMapping, "descriptorMapping is null"); this.handle = requireNonNull(handle, "handle is null"); } @@ -59,11 +50,6 @@ public Optional getReturnedType() return returnedType; } - public DescriptorMapping getDescriptorMapping() - { - return descriptorMapping; - } - public ConnectorTableFunctionHandle getHandle() { return handle; @@ -77,7 +63,6 @@ public static Builder builder() public static final class Builder { private Descriptor returnedType; - private DescriptorMapping descriptorMapping = EMPTY_MAPPING; private ConnectorTableFunctionHandle handle = new ConnectorTableFunctionHandle() {}; private Builder() {} @@ -88,12 +73,6 @@ public Builder returnedType(Descriptor returnedType) return this; } - public Builder descriptorMapping(DescriptorMapping descriptorMapping) - { - this.descriptorMapping = descriptorMapping; - return this; - } - public Builder handle(ConnectorTableFunctionHandle handle) { this.handle = handle; @@ -102,7 +81,7 @@ public Builder handle(ConnectorTableFunctionHandle handle) public TableFunctionAnalysis build() { - return new TableFunctionAnalysis(Optional.ofNullable(returnedType), descriptorMapping, handle); + return new TableFunctionAnalysis(Optional.ofNullable(returnedType), handle); } } }