diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index a47a1f9e74e4..6499054c5d15 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -124,6 +124,19 @@ Enable errorprone ([Error Prone Installation#IDEA](https://errorprone.info/docs/ - In ``Java Compiler`` tab, select ``Javac with error-prone`` as the compiler, - Update ``Additional command line parameters`` with ``-XepExcludedPaths:.*/target/generated-(|test-)sources/.* -XepDisableAllChecks -Xep:MissingOverride:ERROR ......`` (for current recommended list of command line parameters, see the top level ``pom.xml``, the definition of the ``errorprone-compiler`` profile. +### Language injection in IDE + +In order to enable language injection inside Intellij IDEA, some code elements can be annotated with the `@org.intellij.lang.annotations.Language` annotation. To make it useful, we recommend: + +- Set the project-wide SQL dialect in ``Languages & Frameworks | SQL Dialects`` - "Generic SQL" is a decent choice here, +- Disable inspection ``SQL | No data source configured``, +- Optionally disable inspection ``Language injection | Language mismatch``. + +Even if the IDE does not support language injection this annotation is useful for documenting the API's intent. Considering the above, we recommend annotating with `@Language`: + +- All API parameters which are expecting to take a `String` containing an SQL statement (or any other language, like regular expressions), +- Local variables which otherwise would not be properly recognized by IDE for language injection. + ## Building the Web UI The Trino Web UI is composed of several React components and is written in JSX and ES6. This source code is compiled and packaged into browser-compatible Javascript, which is then checked in to the Trino source code (in the `dist` folder). You must have [Node.js](https://nodejs.org/en/download/) and [Yarn](https://yarnpkg.com/en/) installed to execute these commands. To update this folder after making changes, simply run: diff --git a/core/trino-main/src/main/java/io/trino/connector/ConnectorManager.java b/core/trino-main/src/main/java/io/trino/connector/ConnectorManager.java index 4f75f7154ff2..26df3478a497 100644 --- a/core/trino-main/src/main/java/io/trino/connector/ConnectorManager.java +++ b/core/trino-main/src/main/java/io/trino/connector/ConnectorManager.java @@ -299,6 +299,7 @@ private synchronized void addConnectorInternal(MaterializedConnector connector) .ifPresent(accessControl -> accessControlManager.addCatalogAccessControl(catalogName, accessControl)); metadataManager.getTablePropertyManager().addProperties(catalogName, connector.getTableProperties()); + metadataManager.getMaterializedViewPropertyManager().addProperties(catalogName, connector.getMaterializedViewProperties()); metadataManager.getColumnPropertyManager().addProperties(catalogName, connector.getColumnProperties()); metadataManager.getSchemaPropertyManager().addProperties(catalogName, connector.getSchemaProperties()); metadataManager.getAnalyzePropertyManager().addProperties(catalogName, connector.getAnalyzeProperties()); @@ -328,6 +329,7 @@ private synchronized void removeConnectorInternal(CatalogName catalogName) metadataManager.getProcedureRegistry().removeProcedures(catalogName); accessControlManager.removeCatalogAccessControl(catalogName); metadataManager.getTablePropertyManager().removeProperties(catalogName); + metadataManager.getMaterializedViewPropertyManager().removeProperties(catalogName); metadataManager.getColumnPropertyManager().removeProperties(catalogName); metadataManager.getSchemaPropertyManager().removeProperties(catalogName); metadataManager.getAnalyzePropertyManager().removeProperties(catalogName); @@ -403,6 +405,7 @@ private static class MaterializedConnector private final List eventListeners; private final List> sessionProperties; private final List> tableProperties; + private final List> materializedViewProperties; private final List> schemaProperties; private final List> columnProperties; private final List> analyzeProperties; @@ -493,6 +496,10 @@ public MaterializedConnector(CatalogName catalogName, Connector connector) requireNonNull(tableProperties, format("Connector '%s' returned a null table properties set", catalogName)); this.tableProperties = ImmutableList.copyOf(tableProperties); + List> materializedViewProperties = connector.getMaterializedViewProperties(); + requireNonNull(materializedViewProperties, format("Connector '%s' returned a null materialized view properties set", catalogName)); + this.materializedViewProperties = ImmutableList.copyOf(materializedViewProperties); + List> schemaProperties = connector.getSchemaProperties(); requireNonNull(schemaProperties, format("Connector '%s' returned a null schema properties set", catalogName)); this.schemaProperties = ImmutableList.copyOf(schemaProperties); @@ -571,6 +578,11 @@ public List> getTableProperties() return tableProperties; } + public List> getMaterializedViewProperties() + { + return materializedViewProperties; + } + public List> getColumnProperties() { return columnProperties; diff --git a/core/trino-main/src/main/java/io/trino/connector/system/MaterializedViewPropertiesSystemTable.java b/core/trino-main/src/main/java/io/trino/connector/system/MaterializedViewPropertiesSystemTable.java new file mode 100644 index 000000000000..907c6a8ff066 --- /dev/null +++ b/core/trino-main/src/main/java/io/trino/connector/system/MaterializedViewPropertiesSystemTable.java @@ -0,0 +1,29 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.connector.system; + +import io.trino.metadata.Metadata; +import io.trino.transaction.TransactionManager; + +import javax.inject.Inject; + +public class MaterializedViewPropertiesSystemTable + extends AbstractPropertiesSystemTable +{ + @Inject + public MaterializedViewPropertiesSystemTable(TransactionManager transactionManager, Metadata metadata) + { + super("materialized_view_properties", transactionManager, () -> metadata.getMaterializedViewPropertyManager().getAllProperties()); + } +} diff --git a/core/trino-main/src/main/java/io/trino/connector/system/SystemConnectorModule.java b/core/trino-main/src/main/java/io/trino/connector/system/SystemConnectorModule.java index 8ca12e51a3a4..dfda21c2fc0a 100644 --- a/core/trino-main/src/main/java/io/trino/connector/system/SystemConnectorModule.java +++ b/core/trino-main/src/main/java/io/trino/connector/system/SystemConnectorModule.java @@ -52,6 +52,7 @@ public void configure(Binder binder) globalTableBinder.addBinding().to(TableCommentSystemTable.class).in(Scopes.SINGLETON); globalTableBinder.addBinding().to(SchemaPropertiesSystemTable.class).in(Scopes.SINGLETON); globalTableBinder.addBinding().to(TablePropertiesSystemTable.class).in(Scopes.SINGLETON); + globalTableBinder.addBinding().to(MaterializedViewPropertiesSystemTable.class).in(Scopes.SINGLETON); globalTableBinder.addBinding().to(ColumnPropertiesSystemTable.class).in(Scopes.SINGLETON); globalTableBinder.addBinding().to(AnalyzePropertiesSystemTable.class).in(Scopes.SINGLETON); globalTableBinder.addBinding().to(TransactionsSystemTable.class).in(Scopes.SINGLETON); diff --git a/core/trino-main/src/main/java/io/trino/execution/CreateMaterializedViewTask.java b/core/trino-main/src/main/java/io/trino/execution/CreateMaterializedViewTask.java index a26bb61e4885..95313bf8b863 100644 --- a/core/trino-main/src/main/java/io/trino/execution/CreateMaterializedViewTask.java +++ b/core/trino-main/src/main/java/io/trino/execution/CreateMaterializedViewTask.java @@ -105,7 +105,7 @@ public ListenableFuture execute( .orElseThrow(() -> new TrinoException(NOT_FOUND, "Catalog does not exist: " + name.getCatalogName())); Map sqlProperties = mapFromProperties(statement.getProperties()); - Map properties = metadata.getTablePropertyManager().getProperties( + Map properties = metadata.getMaterializedViewPropertyManager().getProperties( catalogName, name.getCatalogName(), sqlProperties, diff --git a/core/trino-main/src/main/java/io/trino/metadata/MaterializedViewPropertyManager.java b/core/trino-main/src/main/java/io/trino/metadata/MaterializedViewPropertyManager.java new file mode 100644 index 000000000000..d1b0179f923f --- /dev/null +++ b/core/trino-main/src/main/java/io/trino/metadata/MaterializedViewPropertyManager.java @@ -0,0 +1,25 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.metadata; + +import static io.trino.spi.StandardErrorCode.INVALID_MATERIALIZED_VIEW_PROPERTY; + +public class MaterializedViewPropertyManager + extends AbstractPropertyManager +{ + public MaterializedViewPropertyManager() + { + super("materialized view", INVALID_MATERIALIZED_VIEW_PROPERTY); + } +} diff --git a/core/trino-main/src/main/java/io/trino/metadata/Metadata.java b/core/trino-main/src/main/java/io/trino/metadata/Metadata.java index bb76cd1e54a8..15b098f50dd9 100644 --- a/core/trino-main/src/main/java/io/trino/metadata/Metadata.java +++ b/core/trino-main/src/main/java/io/trino/metadata/Metadata.java @@ -607,6 +607,8 @@ default ResolvedFunction getCoercion(Type fromType, Type toType) TablePropertyManager getTablePropertyManager(); + MaterializedViewPropertyManager getMaterializedViewPropertyManager(); + ColumnPropertyManager getColumnPropertyManager(); AnalyzePropertyManager getAnalyzePropertyManager(); diff --git a/core/trino-main/src/main/java/io/trino/metadata/MetadataManager.java b/core/trino-main/src/main/java/io/trino/metadata/MetadataManager.java index b65adbca7665..018a6bd258af 100644 --- a/core/trino-main/src/main/java/io/trino/metadata/MetadataManager.java +++ b/core/trino-main/src/main/java/io/trino/metadata/MetadataManager.java @@ -195,6 +195,7 @@ public final class MetadataManager private final SessionPropertyManager sessionPropertyManager; private final SchemaPropertyManager schemaPropertyManager; private final TablePropertyManager tablePropertyManager; + private final MaterializedViewPropertyManager materializedViewPropertyManager; private final ColumnPropertyManager columnPropertyManager; private final AnalyzePropertyManager analyzePropertyManager; private final TransactionManager transactionManager; @@ -214,6 +215,7 @@ public MetadataManager( SessionPropertyManager sessionPropertyManager, SchemaPropertyManager schemaPropertyManager, TablePropertyManager tablePropertyManager, + MaterializedViewPropertyManager materializedViewPropertyManager, ColumnPropertyManager columnPropertyManager, AnalyzePropertyManager analyzePropertyManager, TransactionManager transactionManager, @@ -230,6 +232,7 @@ public MetadataManager( this.sessionPropertyManager = requireNonNull(sessionPropertyManager, "sessionPropertyManager is null"); this.schemaPropertyManager = requireNonNull(schemaPropertyManager, "schemaPropertyManager is null"); this.tablePropertyManager = requireNonNull(tablePropertyManager, "tablePropertyManager is null"); + this.materializedViewPropertyManager = requireNonNull(materializedViewPropertyManager, "materializedViewPropertyManager is null"); this.columnPropertyManager = requireNonNull(columnPropertyManager, "columnPropertyManager is null"); this.analyzePropertyManager = requireNonNull(analyzePropertyManager, "analyzePropertyManager is null"); this.transactionManager = requireNonNull(transactionManager, "transactionManager is null"); @@ -302,6 +305,7 @@ public static MetadataManager createTestMetadataManager(TransactionManager trans new SessionPropertyManager(), new SchemaPropertyManager(), new TablePropertyManager(), + new MaterializedViewPropertyManager(), new ColumnPropertyManager(), new AnalyzePropertyManager(), transactionManager, @@ -2242,6 +2246,12 @@ public TablePropertyManager getTablePropertyManager() return tablePropertyManager; } + @Override + public MaterializedViewPropertyManager getMaterializedViewPropertyManager() + { + return materializedViewPropertyManager; + } + @Override public ColumnPropertyManager getColumnPropertyManager() { diff --git a/core/trino-main/src/main/java/io/trino/operator/scalar/CharacterStringCasts.java b/core/trino-main/src/main/java/io/trino/operator/scalar/CharacterStringCasts.java index 6439856b06f5..d283f05a8f7f 100644 --- a/core/trino-main/src/main/java/io/trino/operator/scalar/CharacterStringCasts.java +++ b/core/trino-main/src/main/java/io/trino/operator/scalar/CharacterStringCasts.java @@ -13,7 +13,6 @@ */ package io.trino.operator.scalar; -import com.google.common.collect.ImmutableList; import io.airlift.slice.Slice; import io.airlift.slice.SliceUtf8; import io.airlift.slice.Slices; @@ -22,12 +21,10 @@ import io.trino.spi.function.OperatorType; import io.trino.spi.function.ScalarOperator; import io.trino.spi.function.SqlType; - -import java.util.ArrayList; -import java.util.List; +import it.unimi.dsi.fastutil.ints.IntArrayList; +import it.unimi.dsi.fastutil.ints.IntList; import static com.google.common.base.Verify.verify; -import static com.google.common.collect.ImmutableList.toImmutableList; import static io.airlift.slice.SliceUtf8.getCodePointAt; import static io.airlift.slice.SliceUtf8.lengthOfCodePoint; import static io.airlift.slice.SliceUtf8.setCodePointAt; @@ -35,7 +32,6 @@ import static io.trino.spi.type.Chars.truncateToLengthAndTrimSpaces; import static io.trino.spi.type.Varchars.truncateToLength; import static java.lang.Math.toIntExact; -import static java.util.Collections.nCopies; public final class CharacterStringCasts { @@ -92,16 +88,14 @@ public static Slice charToVarcharCast(@LiteralParameter("x") Long x, @LiteralPar @LiteralParameters({"x", "y"}) public static Slice varcharToCharSaturatedFloorCast(@LiteralParameter("y") long y, @SqlType("varchar(x)") Slice slice) { - List codePoints = new ArrayList<>(toCodePoints(slice)); + IntList codePoints = toCodePoints(slice); // if Varchar(x) value length (including spaces) is greater than y, we can just truncate it if (codePoints.size() >= y) { // char(y) slice representation doesn't contain trailing spaces - codePoints = trimTrailing(codePoints, ' '); - List codePointsTruncated = codePoints.stream() - .limit(y) - .collect(toImmutableList()); - return codePointsToSliceUtf8(codePointsTruncated); + codePoints.size(Math.min(toIntExact(y), codePoints.size())); + trimTrailing(codePoints, ' '); + return codePointsToSliceUtf8(codePoints); } /* @@ -109,49 +103,52 @@ public static Slice varcharToCharSaturatedFloorCast(@LiteralParameter("y") long * We decrement last character in input (in fact, we decrement last non-zero character) and pad the value with * max code point up to y characters. */ - codePoints = trimTrailing(codePoints, '\0'); + trimTrailing(codePoints, '\0'); if (codePoints.isEmpty()) { // No non-zero characters in input and input is shorter than y. Input value is smaller than any char(4) casted back to varchar, so we return the smallest char(4) possible - return codePointsToSliceUtf8(nCopies(toIntExact(y), (int) '\0')); + return Slices.allocate(toIntExact(y)); } - codePoints = new ArrayList<>(codePoints); codePoints.set(codePoints.size() - 1, codePoints.get(codePoints.size() - 1) - 1); - codePoints.addAll(nCopies(toIntExact(y) - codePoints.size(), Character.MAX_CODE_POINT)); + int toAdd = toIntExact(y) - codePoints.size(); + for (int i = 0; i < toAdd; i++) { + codePoints.add(Character.MAX_CODE_POINT); + } - verify(codePoints.get(codePoints.size() - 1) != ' '); // no trailing spaces to trim + verify(codePoints.getInt(codePoints.size() - 1) != ' '); // no trailing spaces to trim return codePointsToSliceUtf8(codePoints); } - private static List trimTrailing(List codePoints, int codePointToTrim) + private static void trimTrailing(IntList codePoints, int codePointToTrim) { int endIndex = codePoints.size(); while (endIndex > 0 && codePoints.get(endIndex - 1) == codePointToTrim) { endIndex--; } - return ImmutableList.copyOf(codePoints.subList(0, endIndex)); + codePoints.size(endIndex); } - private static List toCodePoints(Slice slice) + private static IntList toCodePoints(Slice slice) { - ImmutableList.Builder codePoints = ImmutableList.builder(); + IntList codePoints = new IntArrayList(slice.length()); for (int offset = 0; offset < slice.length(); ) { int codePoint = getCodePointAt(slice, offset); offset += lengthOfCodePoint(slice, offset); codePoints.add(codePoint); } - return codePoints.build(); + return codePoints; } - public static Slice codePointsToSliceUtf8(List codePoints) + public static Slice codePointsToSliceUtf8(IntList codePoints) { - int length = codePoints.stream() - .mapToInt(SliceUtf8::lengthOfCodePoint) - .sum(); + int bufferLength = 0; + for (int codePoint : codePoints) { + bufferLength += SliceUtf8.lengthOfCodePoint(codePoint); + } - Slice result = Slices.wrappedBuffer(new byte[length]); + Slice result = Slices.wrappedBuffer(new byte[bufferLength]); int offset = 0; for (int codePoint : codePoints) { setCodePointAt(codePoint, result, offset); diff --git a/core/trino-main/src/main/java/io/trino/server/ServerMainModule.java b/core/trino-main/src/main/java/io/trino/server/ServerMainModule.java index 9c860a86c211..377b9595bb87 100644 --- a/core/trino-main/src/main/java/io/trino/server/ServerMainModule.java +++ b/core/trino-main/src/main/java/io/trino/server/ServerMainModule.java @@ -74,6 +74,7 @@ import io.trino.metadata.ForNodeManager; import io.trino.metadata.HandleJsonModule; import io.trino.metadata.InternalNodeManager; +import io.trino.metadata.MaterializedViewPropertyManager; import io.trino.metadata.Metadata; import io.trino.metadata.MetadataManager; import io.trino.metadata.SchemaPropertyManager; @@ -222,6 +223,9 @@ protected void setup(Binder binder) // table properties binder.bind(TablePropertyManager.class).in(Scopes.SINGLETON); + // materialized view properties + binder.bind(MaterializedViewPropertyManager.class).in(Scopes.SINGLETON); + // column properties binder.bind(ColumnPropertyManager.class).in(Scopes.SINGLETON); diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java index 94f96381c0dd..54c739723a8b 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java @@ -20,7 +20,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.ListMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.Multiset; import com.google.common.collect.Streams; @@ -74,6 +73,7 @@ import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.Deque; import java.util.HashSet; import java.util.LinkedHashMap; @@ -92,7 +92,6 @@ import static com.google.common.collect.ImmutableSet.toImmutableSet; import static java.lang.String.format; import static java.util.Collections.emptyList; -import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableSet; import static java.util.Objects.requireNonNull; @@ -148,11 +147,7 @@ public class Analysis private final Map, Expression> joins = new LinkedHashMap<>(); private final Map, JoinUsingAnalysis> joinUsing = new LinkedHashMap<>(); - - private final ListMultimap, InPredicate> inPredicatesSubqueries = ArrayListMultimap.create(); - private final ListMultimap, SubqueryExpression> scalarSubqueries = ArrayListMultimap.create(); - private final ListMultimap, ExistsPredicate> existsSubqueries = ArrayListMultimap.create(); - private final ListMultimap, QuantifiedComparisonExpression> quantifiedComparisonSubqueries = ArrayListMultimap.create(); + private final Map, SubqueryAnalysis> subqueries = new LinkedHashMap<>(); private final Map, TableEntry> tables = new LinkedHashMap<>(); @@ -427,11 +422,11 @@ public Expression getJoinCriteria(Join join) public void recordSubqueries(Node node, ExpressionAnalysis expressionAnalysis) { - NodeRef key = NodeRef.of(node); - this.inPredicatesSubqueries.putAll(key, dereference(expressionAnalysis.getSubqueryInPredicates())); - this.scalarSubqueries.putAll(key, dereference(expressionAnalysis.getScalarSubqueries())); - this.existsSubqueries.putAll(key, dereference(expressionAnalysis.getExistsSubqueries())); - this.quantifiedComparisonSubqueries.putAll(key, dereference(expressionAnalysis.getQuantifiedComparisons())); + SubqueryAnalysis subqueries = this.subqueries.computeIfAbsent(NodeRef.of(node), key -> new SubqueryAnalysis()); + subqueries.addInPredicates(dereference(expressionAnalysis.getSubqueryInPredicates())); + subqueries.addScalarSubqueries(dereference(expressionAnalysis.getScalarSubqueries())); + subqueries.addExistsSubqueries(dereference(expressionAnalysis.getExistsSubqueries())); + subqueries.addQuantifiedComparisons(dereference(expressionAnalysis.getQuantifiedComparisons())); } private List dereference(Collection> nodeRefs) @@ -441,24 +436,9 @@ private List dereference(Collection> nodeRefs) .collect(toImmutableList()); } - public List getInPredicateSubqueries(Node node) - { - return ImmutableList.copyOf(inPredicatesSubqueries.get(NodeRef.of(node))); - } - - public List getScalarSubqueries(Node node) + public SubqueryAnalysis getSubqueries(Node node) { - return ImmutableList.copyOf(scalarSubqueries.get(NodeRef.of(node))); - } - - public List getExistsSubqueries(Node node) - { - return ImmutableList.copyOf(existsSubqueries.get(NodeRef.of(node))); - } - - public List getQuantifiedComparisonSubqueries(Node node) - { - return unmodifiableList(quantifiedComparisonSubqueries.get(NodeRef.of(node))); + return subqueries.computeIfAbsent(NodeRef.of(node), key -> new SubqueryAnalysis()); } public void addWindowDefinition(QuerySpecification query, CanonicalizationAware name, ResolvedWindow window) @@ -1276,6 +1256,54 @@ public Optional getOrdinalityField() } } + public static class SubqueryAnalysis + { + private final List inPredicatesSubqueries = new ArrayList<>(); + private final List scalarSubqueries = new ArrayList<>(); + private final List existsSubqueries = new ArrayList<>(); + private final List quantifiedComparisonSubqueries = new ArrayList<>(); + + public void addInPredicates(List expressions) + { + inPredicatesSubqueries.addAll(expressions); + } + + public void addScalarSubqueries(List expressions) + { + scalarSubqueries.addAll(expressions); + } + + public void addExistsSubqueries(List expressions) + { + existsSubqueries.addAll(expressions); + } + + public void addQuantifiedComparisons(List expressions) + { + quantifiedComparisonSubqueries.addAll(expressions); + } + + public List getInPredicatesSubqueries() + { + return Collections.unmodifiableList(inPredicatesSubqueries); + } + + public List getScalarSubqueries() + { + return Collections.unmodifiableList(scalarSubqueries); + } + + public List getExistsSubqueries() + { + return Collections.unmodifiableList(existsSubqueries); + } + + public List getQuantifiedComparisonSubqueries() + { + return Collections.unmodifiableList(quantifiedComparisonSubqueries); + } + } + public static class ResolvedWindow { private final List partitionBy; diff --git a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java index a5251e702a74..38f02aca6dc3 100644 --- a/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java +++ b/core/trino-main/src/main/java/io/trino/sql/analyzer/StatementAnalyzer.java @@ -1470,6 +1470,9 @@ private List translateMaterializedViewColumn private Scope createScopeForView(Table table, QualifiedObjectName name, Optional scope, ConnectorViewDefinition view) { + if (!view.isRunAsInvoker() && view.getOwner().isEmpty()) { + throw semanticException(INVALID_VIEW, table, "Owner must be present in view '%s' with SECURITY DEFINER mode", name); + } return createScopeForView(table, name, scope, view.getOriginalSql(), view.getCatalog(), view.getSchema(), view.getOwner(), view.getColumns()); } diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/QueryPlanner.java b/core/trino-main/src/main/java/io/trino/sql/planner/QueryPlanner.java index aa45804b111c..44c052a1ec0f 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/QueryPlanner.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/QueryPlanner.java @@ -185,7 +185,7 @@ public RelationPlan plan(Query query) PlanBuilder builder = planQueryBody(query); List orderBy = analysis.getOrderByExpressions(query); - builder = subqueryPlanner.handleSubqueries(builder, orderBy, query); + builder = subqueryPlanner.handleSubqueries(builder, orderBy, analysis.getSubqueries(query)); List selectExpressions = analysis.getSelectExpressions(query); List outputs = selectExpressions.stream() @@ -382,7 +382,7 @@ public RelationPlan plan(QuerySpecification node) List expressions = selectExpressions.stream() .map(SelectExpression::getExpression) .collect(toImmutableList()); - builder = subqueryPlanner.handleSubqueries(builder, expressions, node); + builder = subqueryPlanner.handleSubqueries(builder, expressions, analysis.getSubqueries(node)); if (hasExpressionsToUnfold(selectExpressions)) { // pre-project the folded expressions to preserve any non-deterministic semantics of functions that might be referenced @@ -419,7 +419,7 @@ public RelationPlan plan(QuerySpecification node) } List orderBy = analysis.getOrderByExpressions(node); - builder = subqueryPlanner.handleSubqueries(builder, orderBy, node); + builder = subqueryPlanner.handleSubqueries(builder, orderBy, analysis.getSubqueries(node)); builder = builder.appendProjections(Iterables.concat(orderBy, outputs), symbolAllocator, idAllocator); builder = distinct(builder, node, outputs); @@ -608,7 +608,7 @@ private PlanBuilder filter(PlanBuilder subPlan, Expression predicate, Node node) return subPlan; } - subPlan = subqueryPlanner.handleSubqueries(subPlan, predicate, node); + subPlan = subqueryPlanner.handleSubqueries(subPlan, predicate, analysis.getSubqueries(node)); return subPlan.withNewRoot(new FilterNode(idAllocator.getNextId(), subPlan.getRoot(), subPlan.rewrite(predicate))); } @@ -644,7 +644,7 @@ private PlanBuilder aggregate(PlanBuilder subPlan, QuerySpecification node) inputBuilder.addAll(groupingSetAnalysis.getComplexExpressions()); List inputs = inputBuilder.build(); - subPlan = subqueryPlanner.handleSubqueries(subPlan, inputs, node); + subPlan = subqueryPlanner.handleSubqueries(subPlan, inputs, analysis.getSubqueries(node)); subPlan = subPlan.appendProjections(inputs, symbolAllocator, idAllocator); // Add projection to coerce inputs to their site-specific types. @@ -945,7 +945,7 @@ private PlanBuilder window(Node node, PlanBuilder subPlan, List wi List inputs = inputsBuilder.build(); - subPlan = subqueryPlanner.handleSubqueries(subPlan, inputs, node); + subPlan = subqueryPlanner.handleSubqueries(subPlan, inputs, analysis.getSubqueries(node)); subPlan = subPlan.appendProjections(inputs, symbolAllocator, idAllocator); // Add projection to coerce inputs to their site-specific types. diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java b/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java index 5fc5e0dc825e..32f1c06433d0 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/RelationPlanner.java @@ -226,7 +226,7 @@ private RelationPlan addRowFilters(Table node, RelationPlan plan) .withScope(analysis.getAccessControlScope(node), plan.getFieldMappings()); // The fields in the access control scope has the same layout as those for the table scope for (Expression filter : filters) { - planBuilder = subqueryPlanner.handleSubqueries(planBuilder, filter, filter); + planBuilder = subqueryPlanner.handleSubqueries(planBuilder, filter, analysis.getSubqueries(filter)); planBuilder = planBuilder.withNewRoot(new FilterNode( idAllocator.getNextId(), @@ -255,7 +255,7 @@ private RelationPlan addColumnMasks(Table table, RelationPlan plan) Field field = plan.getDescriptor().getFieldByIndex(i); for (Expression mask : columnMasks.getOrDefault(field.getName().get(), ImmutableList.of())) { - planBuilder = subqueryPlanner.handleSubqueries(planBuilder, mask, mask); + planBuilder = subqueryPlanner.handleSubqueries(planBuilder, mask, analysis.getSubqueries(mask)); Map assignments = new LinkedHashMap<>(); for (Symbol symbol : planBuilder.getRoot().getOutputSymbols()) { @@ -340,6 +340,11 @@ protected RelationPlan visitJoin(Join node, Void context) return planJoinUsing(node, leftPlan, rightPlan); } + return planJoin(analysis.getJoinCriteria(node), node.getType(), analysis.getScope(node), leftPlan, rightPlan, analysis.getSubqueries(node)); + } + + private RelationPlan planJoin(Expression criteria, Join.Type type, Scope scope, RelationPlan leftPlan, RelationPlan rightPlan, Analysis.SubqueryAnalysis subqueries) + { // NOTE: symbols must be in the same order as the outputDescriptor List outputSymbols = ImmutableList.builder() .addAll(leftPlan.getFieldMappings()) @@ -347,26 +352,24 @@ protected RelationPlan visitJoin(Join node, Void context) .build(); PlanBuilder leftPlanBuilder = newPlanBuilder(leftPlan, analysis, lambdaDeclarationToSymbolMap) - .withScope(analysis.getScope(node), outputSymbols); + .withScope(scope, outputSymbols); PlanBuilder rightPlanBuilder = newPlanBuilder(rightPlan, analysis, lambdaDeclarationToSymbolMap) - .withScope(analysis.getScope(node), outputSymbols); + .withScope(scope, outputSymbols); ImmutableList.Builder equiClauses = ImmutableList.builder(); List complexJoinExpressions = new ArrayList<>(); List postInnerJoinConditions = new ArrayList<>(); - RelationType left = analysis.getOutputDescriptor(node.getLeft()); - RelationType right = analysis.getOutputDescriptor(node.getRight()); - - if (node.getType() != CROSS && node.getType() != IMPLICIT) { - Expression criteria = analysis.getJoinCriteria(node); + RelationType left = leftPlan.getDescriptor(); + RelationType right = rightPlan.getDescriptor(); + if (type != CROSS && type != IMPLICIT) { List leftComparisonExpressions = new ArrayList<>(); List rightComparisonExpressions = new ArrayList<>(); List joinConditionComparisonOperators = new ArrayList<>(); for (Expression conjunct : ExpressionUtils.extractConjuncts(criteria)) { - if (!isEqualComparisonExpression(conjunct) && node.getType() != INNER) { + if (!isEqualComparisonExpression(conjunct) && type != INNER) { complexJoinExpressions.add(conjunct); continue; } @@ -405,8 +408,8 @@ else if (firstDependencies.stream().allMatch(right::canResolve) && secondDepende } } - leftPlanBuilder = subqueryPlanner.handleSubqueries(leftPlanBuilder, leftComparisonExpressions, node); - rightPlanBuilder = subqueryPlanner.handleSubqueries(rightPlanBuilder, rightComparisonExpressions, node); + leftPlanBuilder = subqueryPlanner.handleSubqueries(leftPlanBuilder, leftComparisonExpressions, subqueries); + rightPlanBuilder = subqueryPlanner.handleSubqueries(rightPlanBuilder, rightComparisonExpressions, subqueries); // Add projections for join criteria leftPlanBuilder = leftPlanBuilder.appendProjections(leftComparisonExpressions, symbolAllocator, idAllocator); @@ -434,7 +437,7 @@ else if (firstDependencies.stream().allMatch(right::canResolve) && secondDepende } PlanNode root = new JoinNode(idAllocator.getNextId(), - JoinNode.Type.typeConvert(node.getType()), + JoinNode.Type.typeConvert(type), leftPlanBuilder.getRoot(), rightPlanBuilder.getRoot(), equiClauses.build(), @@ -449,7 +452,7 @@ else if (firstDependencies.stream().allMatch(right::canResolve) && secondDepende ImmutableMap.of(), Optional.empty()); - if (node.getType() != INNER) { + if (type != INNER) { for (Expression complexExpression : complexJoinExpressions) { Set dependencies = SymbolsExtractor.extractNamesNoSubqueries(complexExpression, analysis.getColumnReferences()); @@ -461,22 +464,22 @@ else if (firstDependencies.stream().allMatch(right::canResolve) && secondDepende // t JOIN u ON t.x = (...) get's planned on the u side // t JOIN u ON t.x + u.x = (...) get's planned on an arbitrary side if (dependencies.stream().allMatch(left::canResolve)) { - leftPlanBuilder = subqueryPlanner.handleSubqueries(leftPlanBuilder, complexExpression, node); + leftPlanBuilder = subqueryPlanner.handleSubqueries(leftPlanBuilder, complexExpression, subqueries); } else { - rightPlanBuilder = subqueryPlanner.handleSubqueries(rightPlanBuilder, complexExpression, node); + rightPlanBuilder = subqueryPlanner.handleSubqueries(rightPlanBuilder, complexExpression, subqueries); } } } - TranslationMap translationMap = new TranslationMap(outerContext, analysis.getScope(node), analysis, lambdaDeclarationToSymbolMap, outputSymbols) + TranslationMap translationMap = new TranslationMap(outerContext, scope, analysis, lambdaDeclarationToSymbolMap, outputSymbols) .withAdditionalMappings(leftPlanBuilder.getTranslations().getMappings()) .withAdditionalMappings(rightPlanBuilder.getTranslations().getMappings()); - if (node.getType() != INNER && !complexJoinExpressions.isEmpty()) { + if (type != INNER && !complexJoinExpressions.isEmpty()) { Expression joinedFilterCondition = ExpressionUtils.and(complexJoinExpressions); Expression rewrittenFilterCondition = translationMap.rewrite(joinedFilterCondition); root = new JoinNode(idAllocator.getNextId(), - JoinNode.Type.typeConvert(node.getType()), + JoinNode.Type.typeConvert(type), leftPlanBuilder.getRoot(), rightPlanBuilder.getRoot(), equiClauses.build(), @@ -492,10 +495,10 @@ else if (firstDependencies.stream().allMatch(right::canResolve) && secondDepende Optional.empty()); } - if (node.getType() == INNER) { + if (type == INNER) { // rewrite all the other conditions using output symbols from left + right plan node. PlanBuilder rootPlanBuilder = new PlanBuilder(translationMap, root); - rootPlanBuilder = subqueryPlanner.handleSubqueries(rootPlanBuilder, complexJoinExpressions, node); + rootPlanBuilder = subqueryPlanner.handleSubqueries(rootPlanBuilder, complexJoinExpressions, subqueries); for (Expression expression : complexJoinExpressions) { postInnerJoinConditions.add(rootPlanBuilder.rewrite(expression)); @@ -509,7 +512,7 @@ else if (firstDependencies.stream().allMatch(right::canResolve) && secondDepende } } - return new RelationPlan(root, analysis.getScope(node), outputSymbols, outerContext); + return new RelationPlan(root, scope, outputSymbols, outerContext); } private RelationPlan planJoinUsing(Join node, RelationPlan left, RelationPlan right) diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/SubqueryPlanner.java b/core/trino-main/src/main/java/io/trino/sql/planner/SubqueryPlanner.java index a3ceabd88fa7..1e3d25f11f93 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/SubqueryPlanner.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/SubqueryPlanner.java @@ -104,27 +104,27 @@ class SubqueryPlanner this.recursiveSubqueries = recursiveSubqueries; } - public PlanBuilder handleSubqueries(PlanBuilder builder, Collection expressions, Node node) + public PlanBuilder handleSubqueries(PlanBuilder builder, Collection expressions, Analysis.SubqueryAnalysis subqueries) { for (Expression expression : expressions) { - builder = handleSubqueries(builder, expression, node); + builder = handleSubqueries(builder, expression, subqueries); } return builder; } - public PlanBuilder handleSubqueries(PlanBuilder builder, Expression expression, Node node) + public PlanBuilder handleSubqueries(PlanBuilder builder, Expression expression, Analysis.SubqueryAnalysis subqueries) { - for (Cluster cluster : cluster(builder.getScope(), selectSubqueries(builder, expression, analysis.getInPredicateSubqueries(node)))) { - builder = planInPredicate(builder, cluster, node); + for (Cluster cluster : cluster(builder.getScope(), selectSubqueries(builder, expression, subqueries.getInPredicatesSubqueries()))) { + builder = planInPredicate(builder, cluster, subqueries); } - for (Cluster cluster : cluster(builder.getScope(), selectSubqueries(builder, expression, analysis.getScalarSubqueries(node)))) { + for (Cluster cluster : cluster(builder.getScope(), selectSubqueries(builder, expression, subqueries.getScalarSubqueries()))) { builder = planScalarSubquery(builder, cluster); } - for (Cluster cluster : cluster(builder.getScope(), selectSubqueries(builder, expression, analysis.getExistsSubqueries(node)))) { + for (Cluster cluster : cluster(builder.getScope(), selectSubqueries(builder, expression, subqueries.getExistsSubqueries()))) { builder = planExists(builder, cluster); } - for (Cluster cluster : cluster(builder.getScope(), selectSubqueries(builder, expression, analysis.getQuantifiedComparisonSubqueries(node)))) { - builder = planQuantifiedComparison(builder, cluster, node); + for (Cluster cluster : cluster(builder.getScope(), selectSubqueries(builder, expression, subqueries.getQuantifiedComparisonSubqueries()))) { + builder = planQuantifiedComparison(builder, cluster, subqueries); } return builder; @@ -172,7 +172,7 @@ private Collection> cluster(Scope scope, List< .collect(toImmutableList()); } - private PlanBuilder planInPredicate(PlanBuilder subPlan, Cluster cluster, Node node) + private PlanBuilder planInPredicate(PlanBuilder subPlan, Cluster cluster, Analysis.SubqueryAnalysis subqueries) { // Plan one of the predicates from the cluster InPredicate predicate = cluster.getRepresentative(); @@ -181,7 +181,7 @@ private PlanBuilder planInPredicate(PlanBuilder subPlan, Cluster cl SubqueryExpression subquery = (SubqueryExpression) predicate.getValueList(); Symbol output = symbolAllocator.newSymbol(predicate, BOOLEAN); - subPlan = handleSubqueries(subPlan, value, node); + subPlan = handleSubqueries(subPlan, value, subqueries); subPlan = planInPredicate(subPlan, value, subquery, output, predicate); return new PlanBuilder( @@ -292,7 +292,7 @@ private RelationPlan planSubquery(Expression subquery, TranslationMap outerConte .process(subquery, null); } - private PlanBuilder planQuantifiedComparison(PlanBuilder subPlan, Cluster cluster, Node node) + private PlanBuilder planQuantifiedComparison(PlanBuilder subPlan, Cluster cluster, Analysis.SubqueryAnalysis subqueries) { // Plan one of the predicates from the cluster QuantifiedComparisonExpression quantifiedComparison = cluster.getRepresentative(); @@ -302,7 +302,7 @@ private PlanBuilder planQuantifiedComparison(PlanBuilder subPlan, Cluster aggregation.getMask().isPresent()) - .anyMatch(isMaskPresent -> isMaskPresent); + .allMatch(aggregation -> aggregation.getMask().isEmpty()); } @Override diff --git a/core/trino-main/src/main/java/io/trino/sql/planner/iterative/rule/ReplaceRedundantJoinWithSource.java b/core/trino-main/src/main/java/io/trino/sql/planner/iterative/rule/ReplaceRedundantJoinWithSource.java index 78af9038eeda..a904254cc344 100644 --- a/core/trino-main/src/main/java/io/trino/sql/planner/iterative/rule/ReplaceRedundantJoinWithSource.java +++ b/core/trino-main/src/main/java/io/trino/sql/planner/iterative/rule/ReplaceRedundantJoinWithSource.java @@ -25,6 +25,7 @@ import java.util.List; import static io.trino.sql.planner.iterative.rule.Util.restrictOutputs; +import static io.trino.sql.planner.optimizations.QueryCardinalityUtil.isAtLeastScalar; import static io.trino.sql.planner.optimizations.QueryCardinalityUtil.isAtMost; import static io.trino.sql.planner.optimizations.QueryCardinalityUtil.isScalar; import static io.trino.sql.planner.plan.Patterns.join; @@ -98,11 +99,11 @@ else if (rightSourceScalarWithNoOutputs) { } break; case FULL: - if (leftSourceScalarWithNoOutputs) { + if (leftSourceScalarWithNoOutputs && isAtLeastScalar(node.getRight(), context.getLookup())) { return Result.ofPlanNode(restrictOutputs(context.getIdAllocator(), node.getRight(), ImmutableSet.copyOf(node.getRightOutputSymbols())) .orElse(node.getRight())); } - if (rightSourceScalarWithNoOutputs) { + if (rightSourceScalarWithNoOutputs && isAtLeastScalar(node.getLeft(), context.getLookup())) { return Result.ofPlanNode(restrictOutputs(context.getIdAllocator(), node.getLeft(), ImmutableSet.copyOf(node.getLeftOutputSymbols())) .orElse(node.getLeft())); } diff --git a/core/trino-main/src/main/java/io/trino/sql/rewrite/ShowQueriesRewrite.java b/core/trino-main/src/main/java/io/trino/sql/rewrite/ShowQueriesRewrite.java index 3ca6ef720cf1..8c756e0cbbb4 100644 --- a/core/trino-main/src/main/java/io/trino/sql/rewrite/ShowQueriesRewrite.java +++ b/core/trino-main/src/main/java/io/trino/sql/rewrite/ShowQueriesRewrite.java @@ -84,7 +84,6 @@ import io.trino.sql.tree.TableElement; import io.trino.sql.tree.Values; -import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -106,6 +105,7 @@ import static io.trino.metadata.MetadataUtil.createQualifiedObjectName; import static io.trino.spi.StandardErrorCode.CATALOG_NOT_FOUND; import static io.trino.spi.StandardErrorCode.INVALID_COLUMN_PROPERTY; +import static io.trino.spi.StandardErrorCode.INVALID_MATERIALIZED_VIEW_PROPERTY; import static io.trino.spi.StandardErrorCode.INVALID_SCHEMA_PROPERTY; import static io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY; import static io.trino.spi.StandardErrorCode.INVALID_VIEW; @@ -498,8 +498,12 @@ protected Node visitShowCreate(ShowCreate node, Void context) accessControl.checkCanShowCreateTable(session.toSecurityContext(), new QualifiedObjectName(catalogName.getValue(), schemaName.getValue(), tableName.getValue())); + Map properties = viewDefinition.get().getProperties(); + Map> allMaterializedViewProperties = metadata.getMaterializedViewPropertyManager().getAllProperties().get(new CatalogName(catalogName.getValue())); + List propertyNodes = buildProperties(objectName, Optional.empty(), INVALID_MATERIALIZED_VIEW_PROPERTY, properties, allMaterializedViewProperties); + String sql = formatSql(new CreateMaterializedView(Optional.empty(), QualifiedName.of(ImmutableList.of(catalogName, schemaName, tableName)), - query, false, false, new ArrayList<>(), viewDefinition.get().getComment())).trim(); + query, false, false, propertyNodes, viewDefinition.get().getComment())).trim(); return singleValueQuery("Create Materialized View", sql); } diff --git a/core/trino-main/src/main/java/io/trino/testing/LocalQueryRunner.java b/core/trino-main/src/main/java/io/trino/testing/LocalQueryRunner.java index 848e31b9ba24..620c7fb13bdc 100644 --- a/core/trino-main/src/main/java/io/trino/testing/LocalQueryRunner.java +++ b/core/trino-main/src/main/java/io/trino/testing/LocalQueryRunner.java @@ -30,6 +30,7 @@ import io.trino.connector.system.ColumnPropertiesSystemTable; import io.trino.connector.system.GlobalSystemConnector; import io.trino.connector.system.GlobalSystemConnectorFactory; +import io.trino.connector.system.MaterializedViewPropertiesSystemTable; import io.trino.connector.system.NodeSystemTable; import io.trino.connector.system.SchemaPropertiesSystemTable; import io.trino.connector.system.TableCommentSystemTable; @@ -82,6 +83,7 @@ import io.trino.metadata.ColumnPropertyManager; import io.trino.metadata.HandleResolver; import io.trino.metadata.InMemoryNodeManager; +import io.trino.metadata.MaterializedViewPropertyManager; import io.trino.metadata.Metadata; import io.trino.metadata.MetadataManager; import io.trino.metadata.MetadataUtil; @@ -321,6 +323,7 @@ private LocalQueryRunner( new SessionPropertyManager(new SystemSessionProperties(new QueryManagerConfig(), taskManagerConfig, new MemoryManagerConfig(), featuresConfig, new NodeMemoryConfig(), new DynamicFilterConfig(), new NodeSchedulerConfig())), new SchemaPropertyManager(), new TablePropertyManager(), + new MaterializedViewPropertyManager(), new ColumnPropertyManager(), new AnalyzePropertyManager(), transactionManager, @@ -371,6 +374,7 @@ private LocalQueryRunner( new TableCommentSystemTable(metadata, accessControl), new SchemaPropertiesSystemTable(transactionManager, metadata), new TablePropertiesSystemTable(transactionManager, metadata), + new MaterializedViewPropertiesSystemTable(transactionManager, metadata), new ColumnPropertiesSystemTable(transactionManager, metadata), new AnalyzePropertiesSystemTable(transactionManager, metadata), new TransactionsSystemTable(metadata, transactionManager)), diff --git a/core/trino-main/src/test/java/io/trino/execution/TestCreateMaterializedViewTask.java b/core/trino-main/src/test/java/io/trino/execution/TestCreateMaterializedViewTask.java new file mode 100644 index 000000000000..4edc9afcfa21 --- /dev/null +++ b/core/trino-main/src/test/java/io/trino/execution/TestCreateMaterializedViewTask.java @@ -0,0 +1,308 @@ + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.execution; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.trino.Session; +import io.trino.connector.CatalogName; +import io.trino.cost.StatsCalculator; +import io.trino.execution.warnings.WarningCollector; +import io.trino.metadata.AbstractMockMetadata; +import io.trino.metadata.Catalog; +import io.trino.metadata.CatalogManager; +import io.trino.metadata.MaterializedViewPropertyManager; +import io.trino.metadata.MetadataManager; +import io.trino.metadata.QualifiedObjectName; +import io.trino.metadata.TableHandle; +import io.trino.metadata.TableMetadata; +import io.trino.metadata.TablePropertyManager; +import io.trino.metadata.TableSchema; +import io.trino.security.AccessControl; +import io.trino.security.AllowAllAccessControl; +import io.trino.spi.TrinoException; +import io.trino.spi.connector.ColumnHandle; +import io.trino.spi.connector.ColumnMetadata; +import io.trino.spi.connector.ConnectorMaterializedViewDefinition; +import io.trino.spi.connector.ConnectorTableMetadata; +import io.trino.spi.connector.ConnectorViewDefinition; +import io.trino.spi.connector.SchemaTableName; +import io.trino.spi.connector.TestingColumnHandle; +import io.trino.spi.resourcegroups.ResourceGroupId; +import io.trino.sql.parser.SqlParser; +import io.trino.sql.planner.TestingConnectorTransactionHandle; +import io.trino.sql.planner.TypeAnalyzer; +import io.trino.sql.tree.AllColumns; +import io.trino.sql.tree.CreateMaterializedView; +import io.trino.sql.tree.Identifier; +import io.trino.sql.tree.Property; +import io.trino.sql.tree.QualifiedName; +import io.trino.sql.tree.StringLiteral; +import io.trino.testing.TestingGroupProvider; +import io.trino.testing.TestingMetadata.TestingTableHandle; +import io.trino.transaction.TransactionManager; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import java.net.URI; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +import static com.google.common.collect.ImmutableMap.toImmutableMap; +import static com.google.common.util.concurrent.MoreExecutors.directExecutor; +import static io.airlift.concurrent.MoreFutures.getFutureValue; +import static io.trino.cost.StatsCalculatorModule.createNewStatsCalculator; +import static io.trino.metadata.MetadataManager.createTestMetadataManager; +import static io.trino.spi.StandardErrorCode.ALREADY_EXISTS; +import static io.trino.spi.StandardErrorCode.INVALID_MATERIALIZED_VIEW_PROPERTY; +import static io.trino.spi.session.PropertyMetadata.stringProperty; +import static io.trino.spi.type.BigintType.BIGINT; +import static io.trino.spi.type.SmallintType.SMALLINT; +import static io.trino.sql.QueryUtil.selectList; +import static io.trino.sql.QueryUtil.simpleQuery; +import static io.trino.sql.QueryUtil.table; +import static io.trino.testing.TestingSession.createBogusTestingCatalog; +import static io.trino.testing.TestingSession.testSessionBuilder; +import static io.trino.testing.assertions.TrinoExceptionAssert.assertTrinoExceptionThrownBy; +import static io.trino.transaction.InMemoryTransactionManager.createTestTransactionManager; +import static java.util.Objects.requireNonNull; +import static org.testng.Assert.assertEquals; + +@Test(singleThreaded = true) +public class TestCreateMaterializedViewTask +{ + private static final String CATALOG_NAME = "catalog"; + private static final ConnectorTableMetadata MOCK_TABLE = new ConnectorTableMetadata( + new SchemaTableName("schema", "mock_table"), + List.of(new ColumnMetadata("a", SMALLINT), new ColumnMetadata("b", BIGINT)), + ImmutableMap.of("baz", "property_value")); + + private Session testSession; + private MockMetadata metadata; + private TransactionManager transactionManager; + private SqlParser parser; + private StatsCalculator statsCalculator; + private QueryStateMachine queryStateMachine; + + @BeforeMethod + public void setUp() + { + CatalogManager catalogManager = new CatalogManager(); + transactionManager = createTestTransactionManager(catalogManager); + TablePropertyManager tablePropertyManager = new TablePropertyManager(); + MaterializedViewPropertyManager materializedViewPropertyManager = new MaterializedViewPropertyManager(); + Catalog testCatalog = createBogusTestingCatalog(CATALOG_NAME); + catalogManager.registerCatalog(testCatalog); + tablePropertyManager.addProperties( + testCatalog.getConnectorCatalogName(), + ImmutableList.of(stringProperty("baz", "test property", null, false))); + materializedViewPropertyManager.addProperties( + testCatalog.getConnectorCatalogName(), + ImmutableList.of(stringProperty("foo", "test materialized view property", null, false))); + testSession = testSessionBuilder() + .setTransactionId(transactionManager.beginTransaction(false)) + .build(); + metadata = new MockMetadata( + tablePropertyManager, + materializedViewPropertyManager, + testCatalog.getConnectorCatalogName()); + parser = new SqlParser(); + statsCalculator = createNewStatsCalculator(metadata, new TypeAnalyzer(parser, metadata)); + queryStateMachine = stateMachine(transactionManager, createTestMetadataManager(), new AllowAllAccessControl()); + } + + @Test + public void testCreateMaterializedViewIfNotExists() + { + CreateMaterializedView statement = new CreateMaterializedView( + Optional.empty(), + QualifiedName.of("test_mv"), + simpleQuery(selectList(new AllColumns()), table(QualifiedName.of("catalog", "schema", "mock_table"))), + false, + true, + ImmutableList.of(), + Optional.empty()); + + getFutureValue(new CreateMaterializedViewTask(parser, new TestingGroupProvider(), statsCalculator) + .execute(statement, transactionManager, metadata, new AllowAllAccessControl(), queryStateMachine, ImmutableList.of(), WarningCollector.NOOP)); + assertEquals(metadata.getCreateMaterializedViewCallCount(), 1); + } + + @Test + public void testCreateMaterializedViewWithExistingView() + { + CreateMaterializedView statement = new CreateMaterializedView( + Optional.empty(), + QualifiedName.of("test_mv"), + simpleQuery(selectList(new AllColumns()), table(QualifiedName.of("catalog", "schema", "mock_table"))), + false, + false, + ImmutableList.of(), + Optional.empty()); + + assertTrinoExceptionThrownBy(() -> getFutureValue(new CreateMaterializedViewTask(parser, new TestingGroupProvider(), statsCalculator) + .execute(statement, transactionManager, metadata, new AllowAllAccessControl(), queryStateMachine, ImmutableList.of(), WarningCollector.NOOP))) + .hasErrorCode(ALREADY_EXISTS) + .hasMessage("Materialized view already exists"); + + assertEquals(metadata.getCreateMaterializedViewCallCount(), 1); + } + + @Test + public void testCreateMaterializedViewWithInvalidProperty() + { + CreateMaterializedView statement = new CreateMaterializedView( + Optional.empty(), + QualifiedName.of("test_mv"), + simpleQuery(selectList(new AllColumns()), table(QualifiedName.of("catalog", "schema", "mock_table"))), + false, + true, + ImmutableList.of(new Property(new Identifier("baz"), new StringLiteral("abc"))), + Optional.empty()); + + assertTrinoExceptionThrownBy(() -> getFutureValue(new CreateMaterializedViewTask(parser, new TestingGroupProvider(), statsCalculator) + .execute(statement, transactionManager, metadata, new AllowAllAccessControl(), queryStateMachine, ImmutableList.of(), WarningCollector.NOOP))) + .hasErrorCode(INVALID_MATERIALIZED_VIEW_PROPERTY) + .hasMessage("Catalog 'catalog' does not support materialized view property 'baz'"); + + assertEquals(metadata.getCreateMaterializedViewCallCount(), 0); + } + + private QueryStateMachine stateMachine(TransactionManager transactionManager, MetadataManager metadata, AccessControl accessControl) + { + return QueryStateMachine.begin( + "test", + Optional.empty(), + testSession, + URI.create("fake://uri"), + new ResourceGroupId("test"), + false, + transactionManager, + accessControl, + directExecutor(), + metadata, + WarningCollector.NOOP, + Optional.empty()); + } + + private static class MockMetadata + extends AbstractMockMetadata + { + private final TablePropertyManager tablePropertyManager; + private final MaterializedViewPropertyManager materializedViewPropertyManager; + private final CatalogName catalogHandle; + private final Map materializedViews = new ConcurrentHashMap<>(); + + public MockMetadata( + TablePropertyManager tablePropertyManager, + MaterializedViewPropertyManager materializedViewPropertyManager, + CatalogName catalogHandle) + { + this.tablePropertyManager = requireNonNull(tablePropertyManager, "tablePropertyManager is null"); + this.materializedViewPropertyManager = requireNonNull(materializedViewPropertyManager, "materializedViewPropertyManager is null"); + this.catalogHandle = requireNonNull(catalogHandle, "catalogHandle is null"); + } + + @Override + public void createMaterializedView(Session session, QualifiedObjectName viewName, ConnectorMaterializedViewDefinition definition, boolean replace, boolean ignoreExisting) + { + materializedViews.put(viewName.asSchemaTableName(), definition); + if (!ignoreExisting) { + throw new TrinoException(ALREADY_EXISTS, "Materialized view already exists"); + } + } + + @Override + public TablePropertyManager getTablePropertyManager() + { + return tablePropertyManager; + } + + @Override + public MaterializedViewPropertyManager getMaterializedViewPropertyManager() + { + return materializedViewPropertyManager; + } + + @Override + public Optional getCatalogHandle(Session session, String catalogName) + { + if (catalogHandle.getCatalogName().equals(catalogName)) { + return Optional.of(catalogHandle); + } + return Optional.empty(); + } + + @Override + public TableSchema getTableSchema(Session session, TableHandle tableHandle) + { + return new TableSchema(tableHandle.getCatalogName(), MOCK_TABLE.getTableSchema()); + } + + @Override + public Optional getTableHandle(Session session, QualifiedObjectName tableName) + { + if (tableName.asSchemaTableName().equals(MOCK_TABLE.getTable())) { + return Optional.of( + new TableHandle( + new CatalogName(CATALOG_NAME), + new TestingTableHandle(tableName.asSchemaTableName()), + TestingConnectorTransactionHandle.INSTANCE, + Optional.empty())); + } + return Optional.empty(); + } + + @Override + public Map getColumnHandles(Session session, TableHandle tableHandle) + { + return MOCK_TABLE.getColumns().stream() + .collect(toImmutableMap( + ColumnMetadata::getName, + column -> new TestingColumnHandle(column.getName()))); + } + + @Override + public TableMetadata getTableMetadata(Session session, TableHandle tableHandle) + { + if ((tableHandle.getConnectorHandle() instanceof TestingTableHandle)) { + if (((TestingTableHandle) tableHandle.getConnectorHandle()).getTableName().equals(MOCK_TABLE.getTable())) { + return new TableMetadata(new CatalogName("catalog"), MOCK_TABLE); + } + } + + return super.getTableMetadata(session, tableHandle); + } + + @Override + public Optional getMaterializedView(Session session, QualifiedObjectName viewName) + { + return Optional.ofNullable(materializedViews.get(viewName.asSchemaTableName())); + } + + @Override + public Optional getView(Session session, QualifiedObjectName viewName) + { + return Optional.empty(); + } + + public int getCreateMaterializedViewCallCount() + { + return materializedViews.size(); + } + } +} diff --git a/core/trino-main/src/test/java/io/trino/execution/TestCreateTableTask.java b/core/trino-main/src/test/java/io/trino/execution/TestCreateTableTask.java index cfb9f95f8c66..68ccacb579b2 100644 --- a/core/trino-main/src/test/java/io/trino/execution/TestCreateTableTask.java +++ b/core/trino-main/src/test/java/io/trino/execution/TestCreateTableTask.java @@ -23,6 +23,7 @@ import io.trino.metadata.Catalog; import io.trino.metadata.CatalogManager; import io.trino.metadata.ColumnPropertyManager; +import io.trino.metadata.MaterializedViewPropertyManager; import io.trino.metadata.MetadataManager; import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.TableHandle; @@ -42,8 +43,11 @@ import io.trino.sql.planner.TestingConnectorTransactionHandle; import io.trino.sql.tree.ColumnDefinition; import io.trino.sql.tree.CreateTable; +import io.trino.sql.tree.Identifier; import io.trino.sql.tree.LikeClause; +import io.trino.sql.tree.Property; import io.trino.sql.tree.QualifiedName; +import io.trino.sql.tree.StringLiteral; import io.trino.sql.tree.TableElement; import io.trino.testing.TestingAccessControlManager; import io.trino.testing.TestingMetadata.TestingTableHandle; @@ -61,6 +65,7 @@ import static io.airlift.concurrent.MoreFutures.getFutureValue; import static io.trino.metadata.MetadataManager.createTestMetadataManager; import static io.trino.spi.StandardErrorCode.ALREADY_EXISTS; +import static io.trino.spi.StandardErrorCode.INVALID_TABLE_PROPERTY; import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; import static io.trino.spi.connector.ConnectorCapabilities.NOT_NULL_COLUMN_CONSTRAINT; import static io.trino.spi.session.PropertyMetadata.stringProperty; @@ -108,18 +113,23 @@ public void setUp() CatalogManager catalogManager = new CatalogManager(); transactionManager = createTestTransactionManager(catalogManager); TablePropertyManager tablePropertyManager = new TablePropertyManager(); + MaterializedViewPropertyManager materializedViewPropertyManager = new MaterializedViewPropertyManager(); ColumnPropertyManager columnPropertyManager = new ColumnPropertyManager(); Catalog testCatalog = createBogusTestingCatalog(CATALOG_NAME); catalogManager.registerCatalog(testCatalog); tablePropertyManager.addProperties( testCatalog.getConnectorCatalogName(), ImmutableList.of(stringProperty("baz", "test property", null, false))); + materializedViewPropertyManager.addProperties( + testCatalog.getConnectorCatalogName(), + ImmutableList.of(stringProperty("foo", "test materialized view property", null, false))); columnPropertyManager.addProperties(testCatalog.getConnectorCatalogName(), ImmutableList.of()); testSession = testSessionBuilder() .setTransactionId(transactionManager.beginTransaction(false)) .build(); metadata = new MockMetadata( tablePropertyManager, + materializedViewPropertyManager, columnPropertyManager, testCatalog.getConnectorCatalogName(), emptySet()); @@ -154,6 +164,22 @@ public void testCreateTableNotExistsFalse() assertEquals(metadata.getCreateTableCallCount(), 1); } + @Test + public void testCreateTableWithMaterializedViewPropertyFails() + { + CreateTable statement = new CreateTable(QualifiedName.of("test_table"), + ImmutableList.of(new ColumnDefinition(identifier("a"), toSqlType(BIGINT), true, emptyList(), Optional.empty())), + false, + ImmutableList.of(new Property(new Identifier("foo"), new StringLiteral("bar"))), + Optional.empty()); + + assertTrinoExceptionThrownBy(() -> getFutureValue(new CreateTableTask().internalExecute(statement, metadata, new AllowAllAccessControl(), testSession, emptyList(), output -> {}))) + .hasErrorCode(INVALID_TABLE_PROPERTY) + .hasMessage("Catalog 'catalog' does not support table property 'foo'"); + + assertEquals(metadata.getCreateTableCallCount(), 0); + } + @Test public void testCreateWithNotNullColumns() { @@ -270,6 +296,7 @@ private static class MockMetadata { private final MetadataManager metadata; private final TablePropertyManager tablePropertyManager; + private final MaterializedViewPropertyManager materializedViewPropertyManager; private final ColumnPropertyManager columnPropertyManager; private final CatalogName catalogHandle; private final List tables = new CopyOnWriteArrayList<>(); @@ -277,11 +304,13 @@ private static class MockMetadata public MockMetadata( TablePropertyManager tablePropertyManager, + MaterializedViewPropertyManager materializedViewPropertyManager, ColumnPropertyManager columnPropertyManager, CatalogName catalogHandle, Set connectorCapabilities) { this.tablePropertyManager = requireNonNull(tablePropertyManager, "tablePropertyManager is null"); + this.materializedViewPropertyManager = requireNonNull(materializedViewPropertyManager, "materializedViewPropertyManager is null"); this.columnPropertyManager = requireNonNull(columnPropertyManager, "columnPropertyManager is null"); this.catalogHandle = requireNonNull(catalogHandle, "catalogHandle is null"); this.connectorCapabilities = immutableEnumSet(requireNonNull(connectorCapabilities, "connectorCapabilities is null")); @@ -303,6 +332,12 @@ public TablePropertyManager getTablePropertyManager() return tablePropertyManager; } + @Override + public MaterializedViewPropertyManager getMaterializedViewPropertyManager() + { + return materializedViewPropertyManager; + } + @Override public ColumnPropertyManager getColumnPropertyManager() { diff --git a/core/trino-main/src/test/java/io/trino/metadata/AbstractMockMetadata.java b/core/trino-main/src/test/java/io/trino/metadata/AbstractMockMetadata.java index d1df73ff6e36..94a17d607a01 100644 --- a/core/trino-main/src/test/java/io/trino/metadata/AbstractMockMetadata.java +++ b/core/trino-main/src/test/java/io/trino/metadata/AbstractMockMetadata.java @@ -807,6 +807,12 @@ public TablePropertyManager getTablePropertyManager() throw new UnsupportedOperationException(); } + @Override + public MaterializedViewPropertyManager getMaterializedViewPropertyManager() + { + throw new UnsupportedOperationException(); + } + @Override public ColumnPropertyManager getColumnPropertyManager() { diff --git a/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java b/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java index ea968796431f..e0a213245ddb 100644 --- a/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java +++ b/core/trino-main/src/test/java/io/trino/sql/analyzer/TestAnalyzer.java @@ -3242,7 +3242,7 @@ public void setup() ImmutableList.of(new ViewColumn("a", BIGINT.getTypeId())), Optional.empty(), Optional.empty(), - false); + true); inSetupTransaction(session -> metadata.createView( session, tableViewAndMaterializedView, diff --git a/core/trino-main/src/test/java/io/trino/sql/planner/iterative/rule/TestReplaceRedundantJoinWithSource.java b/core/trino-main/src/test/java/io/trino/sql/planner/iterative/rule/TestReplaceRedundantJoinWithSource.java index 8313e8939bca..22d9c029cfdf 100644 --- a/core/trino-main/src/test/java/io/trino/sql/planner/iterative/rule/TestReplaceRedundantJoinWithSource.java +++ b/core/trino-main/src/test/java/io/trino/sql/planner/iterative/rule/TestReplaceRedundantJoinWithSource.java @@ -237,6 +237,32 @@ public void testReplaceFullJoin() expression("b > 0"))) .matches( values(ImmutableList.of("b"), nCopies(10, ImmutableList.of(new NullLiteral())))); + + // Right source is scalar with no outputs. Left source cannot be determined to be at least scalar. + // In such case, FULL join cannot be replaced with left source. The result would be incorrect + // if left source was empty. + tester().assertThat(new ReplaceRedundantJoinWithSource()) + .on(p -> + p.join( + FULL, + p.filter( + expression("a > 5"), + p.values(10, p.symbol("a"))), + p.values(1))) + .doesNotFire(); + + // Left source is scalar with no outputs. Right source cannot be determined to be at least scalar. + // In such case, FULL join cannot be replaced with right source. The result would be incorrect + // if right source was empty. + tester().assertThat(new ReplaceRedundantJoinWithSource()) + .on(p -> + p.join( + FULL, + p.values(1), + p.filter( + expression("a > 5"), + p.values(10, p.symbol("a"))))) + .doesNotFire(); } @Test diff --git a/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java b/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java index 001975266f20..c4935418b014 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java +++ b/core/trino-spi/src/main/java/io/trino/spi/StandardErrorCode.java @@ -115,6 +115,7 @@ public enum StandardErrorCode DUPLICATE_WINDOW_NAME(92, USER_ERROR), INVALID_WINDOW_REFERENCE(93, USER_ERROR), INVALID_PARTITION_BY(94, USER_ERROR), + INVALID_MATERIALIZED_VIEW_PROPERTY(95, USER_ERROR), GENERIC_INTERNAL_ERROR(65536, INTERNAL_ERROR), TOO_MANY_REQUESTS_FAILED(65537, INTERNAL_ERROR), diff --git a/core/trino-spi/src/main/java/io/trino/spi/connector/Connector.java b/core/trino-spi/src/main/java/io/trino/spi/connector/Connector.java index d018ca689614..81f6796ce212 100644 --- a/core/trino-spi/src/main/java/io/trino/spi/connector/Connector.java +++ b/core/trino-spi/src/main/java/io/trino/spi/connector/Connector.java @@ -140,6 +140,14 @@ default List> getTableProperties() return emptyList(); } + /** + * @return the materialized view properties for this connector + */ + default List> getMaterializedViewProperties() + { + return emptyList(); + } + /** * @return the column properties for this connector */ diff --git a/docs/src/main/sphinx/admin/jmx.rst b/docs/src/main/sphinx/admin/jmx.rst index 780d26a4f5da..cea851d4618f 100644 --- a/docs/src/main/sphinx/admin/jmx.rst +++ b/docs/src/main/sphinx/admin/jmx.rst @@ -40,7 +40,7 @@ Trino cluster and nodes ------------------------ * Active nodes: - ``trino.failureDetector:name=HeartbeatFailureDetector:ActiveCount`` + ``trino.failuredetector:name=HeartbeatFailureDetector:ActiveCount`` * Free memory (general pool): ``trino.memory:type=ClusterMemoryPool:name=general:FreeDistributedBytes`` diff --git a/docs/src/main/sphinx/connector/system.rst b/docs/src/main/sphinx/connector/system.rst index 55be97681320..f2db09662981 100644 --- a/docs/src/main/sphinx/connector/system.rst +++ b/docs/src/main/sphinx/connector/system.rst @@ -50,6 +50,12 @@ that can be set when creating a new schema. The table properties table contains the list of available properties that can be set when creating a new table. +``metadata.materialized_view_properties`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The materialized view properties table contains the list of available properties +that can be set when creating a new materialized view. + ``metadata.table_comments`` ^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/plugin/trino-accumulo/pom.xml b/plugin/trino-accumulo/pom.xml index a7261dc6ce44..74a0a0a55ca3 100644 --- a/plugin/trino-accumulo/pom.xml +++ b/plugin/trino-accumulo/pom.xml @@ -270,7 +270,7 @@ com.github.docker-java docker-java-api - 3.2.7 + ${dep.docker-java.version} test diff --git a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryClient.java b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryClient.java index ded50a1cfa94..c6c268862655 100644 --- a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryClient.java +++ b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryClient.java @@ -112,6 +112,11 @@ Optional toRemoteDataset(String projectId, String datasetN } Optional toRemoteTable(String projectId, String remoteDatasetName, String tableName) + { + return toRemoteTable(projectId, remoteDatasetName, tableName, listTables(DatasetId.of(projectId, remoteDatasetName), TABLE, VIEW)); + } + + Optional toRemoteTable(String projectId, String remoteDatasetName, String tableName, Iterable tables) { requireNonNull(projectId, "projectId is null"); requireNonNull(remoteDatasetName, "remoteDatasetName is null"); @@ -129,7 +134,7 @@ Optional toRemoteTable(String projectId, String remoteData // cache miss, reload the cache Map> mapping = new HashMap<>(); - for (Table table : listTables(DatasetId.of(projectId, remoteDatasetName), TABLE, VIEW)) { + for (Table table : tables) { mapping.merge( tableIdToLowerCase(table.getTableId()), Optional.of(RemoteDatabaseObject.of(table.getTableId().getTable())), @@ -202,6 +207,16 @@ Table update(TableInfo table) return bigQuery.update(table); } + public void createSchema(DatasetInfo datasetInfo) + { + bigQuery.create(datasetInfo); + } + + public void dropSchema(DatasetId datasetId) + { + bigQuery.delete(datasetId); + } + public void createTable(TableInfo tableInfo) { bigQuery.create(tableInfo); diff --git a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryMetadata.java b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryMetadata.java index 68c850fd9a31..4c965a7e8626 100644 --- a/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryMetadata.java +++ b/plugin/trino-bigquery/src/main/java/io/trino/plugin/bigquery/BigQueryMetadata.java @@ -15,6 +15,7 @@ import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.DatasetInfo; import com.google.cloud.bigquery.Field; import com.google.cloud.bigquery.Schema; import com.google.cloud.bigquery.StandardTableDefinition; @@ -44,12 +45,14 @@ import io.trino.spi.connector.LimitApplicationResult; import io.trino.spi.connector.ProjectionApplicationResult; import io.trino.spi.connector.RecordCursor; +import io.trino.spi.connector.SchemaNotFoundException; import io.trino.spi.connector.SchemaTableName; import io.trino.spi.connector.SchemaTablePrefix; import io.trino.spi.connector.SystemTable; import io.trino.spi.connector.TableNotFoundException; import io.trino.spi.expression.ConnectorExpression; import io.trino.spi.predicate.TupleDomain; +import io.trino.spi.security.TrinoPrincipal; import io.trino.spi.type.Type; import io.trino.spi.type.VarcharType; @@ -64,6 +67,7 @@ import static com.google.cloud.bigquery.TableDefinition.Type.TABLE; import static com.google.cloud.bigquery.TableDefinition.Type.VIEW; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static io.trino.plugin.bigquery.BigQueryType.toField; @@ -144,9 +148,10 @@ public List listTables(ConnectorSession session, Optional tableNames = ImmutableList.builder(); for (String remoteSchemaName : remoteSchemaNames) { - for (Table table : bigQueryClient.listTables(DatasetId.of(projectId, remoteSchemaName), TABLE, VIEW)) { + Iterable
tables = bigQueryClient.listTables(DatasetId.of(projectId, remoteSchemaName), TABLE, VIEW); + for (Table table : tables) { // filter ambiguous tables - boolean isAmbiguous = bigQueryClient.toRemoteTable(projectId, remoteSchemaName, table.getTableId().getTable().toLowerCase(ENGLISH)) + boolean isAmbiguous = bigQueryClient.toRemoteTable(projectId, remoteSchemaName, table.getTableId().getTable().toLowerCase(ENGLISH), tables) .filter(RemoteDatabaseObject::isAmbiguous) .isPresent(); if (!isAmbiguous) { @@ -292,6 +297,23 @@ public ConnectorTableProperties getTableProperties(ConnectorSession session, Con return new ConnectorTableProperties(); } + @Override + public void createSchema(ConnectorSession session, String schemaName, Map properties, TrinoPrincipal owner) + { + checkArgument(properties.isEmpty(), "Can't have properties for schema creation"); + DatasetInfo datasetInfo = DatasetInfo.newBuilder(schemaName).build(); + bigQueryClient.createSchema(datasetInfo); + } + + @Override + public void dropSchema(ConnectorSession session, String schemaName) + { + String remoteSchemaName = bigQueryClient.toRemoteDataset(projectId, schemaName) + .map(RemoteDatabaseObject::getOnlyRemoteName) + .orElseThrow(() -> new SchemaNotFoundException(schemaName)); + bigQueryClient.dropSchema(DatasetId.of(remoteSchemaName)); + } + @Override public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, boolean ignoreExisting) { diff --git a/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryCaseInsensitiveMapping.java b/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryCaseInsensitiveMapping.java index 89a71c4bd1fa..6a1d1a756ae8 100644 --- a/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryCaseInsensitiveMapping.java +++ b/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryCaseInsensitiveMapping.java @@ -172,6 +172,31 @@ public void testTableNameClash() } } + @Test + public void testDropSchema() + { + String schema = "Test_Drop_Case_Sensitive"; + bigQuerySqlExecutor.execute(format("DROP SCHEMA IF EXISTS `%s`", schema)); + bigQuerySqlExecutor.execute(format("CREATE SCHEMA `%s`", schema)); + + assertUpdate("DROP SCHEMA " + schema.toLowerCase(ENGLISH)); + } + + @Test + public void testDropSchemaNameClash() + { + String schema = "Test_Drop_Case_Sensitive_Clash"; + bigQuerySqlExecutor.execute(format("DROP SCHEMA IF EXISTS `%s`", schema)); + bigQuerySqlExecutor.execute(format("DROP SCHEMA IF EXISTS `%s`", schema.toLowerCase(ENGLISH))); + bigQuerySqlExecutor.execute(format("CREATE SCHEMA `%s`", schema)); + bigQuerySqlExecutor.execute(format("CREATE SCHEMA `%s`", schema.toLowerCase(ENGLISH))); + + assertQueryFails("DROP SCHEMA " + schema.toLowerCase(ENGLISH), "Found ambiguous names in BigQuery.*"); + + bigQuerySqlExecutor.execute(format("DROP SCHEMA `%s`", schema)); + bigQuerySqlExecutor.execute(format("DROP SCHEMA `%s`", schema.toLowerCase(ENGLISH))); + } + private AutoCloseable withSchema(String schemaName) { bigQuerySqlExecutor.createDataset(schemaName); diff --git a/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryIntegrationSmokeTest.java b/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryIntegrationSmokeTest.java index 2c73bc31f1fd..a5d13a30267a 100644 --- a/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryIntegrationSmokeTest.java +++ b/plugin/trino-bigquery/src/test/java/io/trino/plugin/bigquery/TestBigQueryIntegrationSmokeTest.java @@ -47,6 +47,39 @@ protected QueryRunner createQueryRunner() ImmutableMap.of()); } + @Test + public void testCreateSchema() + { + String schemaName = "test_create_schema"; + + assertUpdate("DROP SCHEMA IF EXISTS " + schemaName); + + assertUpdate("CREATE SCHEMA " + schemaName); + assertUpdate("CREATE SCHEMA IF NOT EXISTS " + schemaName); + + assertQueryFails( + "CREATE SCHEMA " + schemaName, + format("\\Qline 1:1: Schema 'bigquery.%s' already exists\\E", schemaName)); + + assertUpdate("DROP SCHEMA " + schemaName); + } + + @Test + public void testDropSchema() + { + String schemaName = "test_drop_schema"; + + assertUpdate("DROP SCHEMA IF EXISTS " + schemaName); + assertUpdate("CREATE SCHEMA " + schemaName); + + assertUpdate("DROP SCHEMA " + schemaName); + assertUpdate("DROP SCHEMA IF EXISTS " + schemaName); + + assertQueryFails( + "DROP SCHEMA " + schemaName, + format("\\Qline 1:1: Schema 'bigquery.%s' does not exist\\E", schemaName)); + } + @Override public void testDescribeTable() { diff --git a/plugin/trino-cassandra/pom.xml b/plugin/trino-cassandra/pom.xml index 63a6e51628a1..ba428c5d95d5 100644 --- a/plugin/trino-cassandra/pom.xml +++ b/plugin/trino-cassandra/pom.xml @@ -140,6 +140,13 @@ + + io.trino + trino-main + test-jar + test + + io.trino trino-main diff --git a/plugin/trino-cassandra/src/main/java/io/trino/plugin/cassandra/CassandraRecordCursor.java b/plugin/trino-cassandra/src/main/java/io/trino/plugin/cassandra/CassandraRecordCursor.java index 853bd20ed01d..1d6df2919e6e 100644 --- a/plugin/trino-cassandra/src/main/java/io/trino/plugin/cassandra/CassandraRecordCursor.java +++ b/plugin/trino-cassandra/src/main/java/io/trino/plugin/cassandra/CassandraRecordCursor.java @@ -149,6 +149,9 @@ public Type getType(int i) @Override public boolean isNull(int i) { + if (getCassandraType(i) == TIMESTAMP) { + return currentRow.getTimestamp(i) == null; + } return currentRow.isNull(i); } } diff --git a/plugin/trino-cassandra/src/test/java/io/trino/plugin/cassandra/TestCassandraIntegrationSmokeTest.java b/plugin/trino-cassandra/src/test/java/io/trino/plugin/cassandra/TestCassandraIntegrationSmokeTest.java index d0e9e87a85c9..4dbc8aa83533 100644 --- a/plugin/trino-cassandra/src/test/java/io/trino/plugin/cassandra/TestCassandraIntegrationSmokeTest.java +++ b/plugin/trino-cassandra/src/test/java/io/trino/plugin/cassandra/TestCassandraIntegrationSmokeTest.java @@ -519,6 +519,52 @@ public void testUnsupportedColumnType() session.execute("DROP KEYSPACE keyspace_6"); } + @Test + public void testNullAndEmptyTimestamp() + { + String tableName = "test_empty_timestamp"; + + session.execute(format("DROP TABLE IF EXISTS %s.%s", KEYSPACE, tableName)); + session.execute(format("CREATE TABLE %s.%s (id int PRIMARY KEY, timestamp_column_with_null timestamp, timestamp_column_with_empty timestamp)", KEYSPACE, tableName)); + session.execute(format("INSERT INTO %s.%s (id, timestamp_column_with_null, timestamp_column_with_empty) VALUES (1, NULL, '')", KEYSPACE, tableName)); + assertContainsEventually(() -> execute(format("SHOW TABLES FROM cassandra.%s LIKE '%s'", KEYSPACE, tableName)), resultBuilder(getSession(), createUnboundedVarcharType()) + .row(tableName) + .build(), new Duration(1, MINUTES)); + + assertThat(query(format("SELECT timestamp_column_with_null FROM %s.%s", KEYSPACE, tableName))) + .matches("VALUES CAST(NULL AS timestamp(3) with time zone)"); + assertThat(query(format("SELECT timestamp_column_with_empty FROM %s.%s", KEYSPACE, tableName))) + .matches("VALUES CAST(NULL AS timestamp(3) with time zone)"); + + assertThat(query(format("SELECT id FROM %s.%s WHERE timestamp_column_with_null IS NULL", KEYSPACE, tableName))) + .matches("VALUES 1"); + assertThat(query(format("SELECT id FROM %s.%s WHERE timestamp_column_with_empty IS NULL", KEYSPACE, tableName))) + .matches("VALUES 1"); + + session.execute(format("DROP TABLE %s.%s", KEYSPACE, tableName)); + } + + @Test + public void testEmptyTimestampClusteringKey() + { + String tableName = "test_empty_timestamp_clustering_key"; + + session.execute(format("DROP TABLE IF EXISTS %s.%s", KEYSPACE, tableName)); + session.execute(format("CREATE TABLE %s.%s (id int, timestamp_column_with_empty timestamp, PRIMARY KEY (id, timestamp_column_with_empty))", KEYSPACE, tableName)); + session.execute(format("INSERT INTO %s.%s (id, timestamp_column_with_empty) VALUES (1, '')", KEYSPACE, tableName)); + assertContainsEventually(() -> execute(format("SHOW TABLES FROM cassandra.%s LIKE '%s'", KEYSPACE, tableName)), resultBuilder(getSession(), createUnboundedVarcharType()) + .row(tableName) + .build(), new Duration(1, MINUTES)); + + assertThat(query(format("SELECT timestamp_column_with_empty FROM %s.%s", KEYSPACE, tableName))) + .matches("VALUES CAST(NULL AS timestamp(3) with time zone)"); + + assertThat(query(format("SELECT id FROM %s.%s WHERE timestamp_column_with_empty IS NULL", KEYSPACE, tableName))) + .matches("VALUES 1"); + + session.execute(format("DROP TABLE %s.%s", KEYSPACE, tableName)); + } + @Test public void testNestedCollectionType() { diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveConnector.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveConnector.java index e8b7fad1be5a..555b2c305da5 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveConnector.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveConnector.java @@ -60,6 +60,7 @@ public class HiveConnector private final List> schemaProperties; private final List> tableProperties; private final List> analyzeProperties; + private final HiveMaterializedViewMetadata hiveMaterializedViewMetadata; private final ConnectorAccessControl accessControl; private final ClassLoader classLoader; @@ -81,6 +82,7 @@ public HiveConnector( List> schemaProperties, List> tableProperties, List> analyzeProperties, + HiveMaterializedViewMetadata hiveMaterializedViewMetadata, ConnectorAccessControl accessControl, ClassLoader classLoader) { @@ -100,6 +102,7 @@ public HiveConnector( this.schemaProperties = ImmutableList.copyOf(requireNonNull(schemaProperties, "schemaProperties is null")); this.tableProperties = ImmutableList.copyOf(requireNonNull(tableProperties, "tableProperties is null")); this.analyzeProperties = ImmutableList.copyOf(requireNonNull(analyzeProperties, "analyzeProperties is null")); + this.hiveMaterializedViewMetadata = requireNonNull(hiveMaterializedViewMetadata, "hiveMaterializedViewMetadata is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); this.classLoader = requireNonNull(classLoader, "classLoader is null"); } @@ -178,6 +181,12 @@ public List> getTableProperties() return tableProperties; } + @Override + public List> getMaterializedViewProperties() + { + return hiveMaterializedViewMetadata.getMaterializedViewProperties(); + } + @Override public Iterable getEventListeners() { diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMaterializedViewMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMaterializedViewMetadata.java index 61ea0070196d..b3395f6b2eb3 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMaterializedViewMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMaterializedViewMetadata.java @@ -17,11 +17,15 @@ import io.trino.spi.connector.ConnectorSession; import io.trino.spi.connector.MaterializedViewFreshness; import io.trino.spi.connector.SchemaTableName; +import io.trino.spi.session.PropertyMetadata; +import java.util.List; import java.util.Optional; public interface HiveMaterializedViewMetadata { + List> getMaterializedViewProperties(); + void createMaterializedView(ConnectorSession session, SchemaTableName viewName, ConnectorMaterializedViewDefinition definition, boolean replace, boolean ignoreExisting); void dropMaterializedView(ConnectorSession session, SchemaTableName viewName); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java index 3ec67ef65f82..b995a5b6108d 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java @@ -66,7 +66,6 @@ import io.trino.spi.connector.ConnectorPartitioningHandle; import io.trino.spi.connector.ConnectorSession; import io.trino.spi.connector.ConnectorTableHandle; -import io.trino.spi.connector.ConnectorTableLayoutHandle; import io.trino.spi.connector.ConnectorTableMetadata; import io.trino.spi.connector.ConnectorTablePartitioning; import io.trino.spi.connector.ConnectorTableProperties; @@ -2008,13 +2007,6 @@ public Optional getSchemaOwner(ConnectorSession session, Catalog throw new SchemaNotFoundException(schemaName.getSchemaName()); } - @Override - public boolean supportsMetadataDelete(ConnectorSession session, ConnectorTableHandle tableHandle, ConnectorTableLayoutHandle tableLayoutHandle) - { - HiveTableHandle handle = (HiveTableHandle) tableHandle; - return handle.getTableParameters().isEmpty() || !isFullAcidTable(handle.getTableParameters().get()); - } - @Override public Map getViews(ConnectorSession session, Optional schemaName) { @@ -2155,7 +2147,10 @@ public ColumnHandle getUpdateRowIdColumnHandle(ConnectorSession session, Connect @Override public Optional applyDelete(ConnectorSession session, ConnectorTableHandle handle) { - return Optional.of(handle); + Map parameters = ((HiveTableHandle) handle).getTableParameters() + .orElseThrow(() -> new IllegalStateException("tableParameters missing from handle")); + + return isFullAcidTable(parameters) ? Optional.empty() : Optional.of(handle); } @Override diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/InternalHiveConnectorFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/InternalHiveConnectorFactory.java index 045a7131c6cd..fa8b467b5061 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/InternalHiveConnectorFactory.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/InternalHiveConnectorFactory.java @@ -131,6 +131,7 @@ public static Connector createConnector(String catalogName, Map Set sessionPropertiesProviders = injector.getInstance(Key.get(new TypeLiteral>() {})); HiveTableProperties hiveTableProperties = injector.getInstance(HiveTableProperties.class); HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class); + HiveMaterializedViewMetadata hiveMaterializedViewMetadata = injector.getInstance(HiveMaterializedViewMetadata.class); ConnectorAccessControl accessControl = new ClassLoaderSafeConnectorAccessControl( new SystemTableAwareAccessControl(injector.getInstance(ConnectorAccessControl.class)), classLoader); @@ -156,6 +157,7 @@ public static Connector createConnector(String catalogName, Map HiveSchemaProperties.SCHEMA_PROPERTIES, hiveTableProperties.getTableProperties(), hiveAnalyzeProperties.getAnalyzeProperties(), + hiveMaterializedViewMetadata, accessControl, classLoader); } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/NoneHiveMaterializedViewMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/NoneHiveMaterializedViewMetadata.java index 4c41aad30d47..57aa1d8f2d02 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/NoneHiveMaterializedViewMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/NoneHiveMaterializedViewMetadata.java @@ -13,12 +13,15 @@ */ package io.trino.plugin.hive; +import com.google.common.collect.ImmutableList; import io.trino.spi.TrinoException; import io.trino.spi.connector.ConnectorMaterializedViewDefinition; import io.trino.spi.connector.ConnectorSession; import io.trino.spi.connector.MaterializedViewFreshness; import io.trino.spi.connector.SchemaTableName; +import io.trino.spi.session.PropertyMetadata; +import java.util.List; import java.util.Optional; import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; @@ -26,6 +29,12 @@ public class NoneHiveMaterializedViewMetadata implements HiveMaterializedViewMetadata { + @Override + public List> getMaterializedViewProperties() + { + return ImmutableList.of(); + } + @Override public void createMaterializedView(ConnectorSession session, SchemaTableName viewName, ConnectorMaterializedViewDefinition definition, boolean replace, boolean ignoreExisting) { diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSource.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSource.java index 9e2c11480e5f..c945dda25923 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSource.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSource.java @@ -229,6 +229,11 @@ static ColumnAdaptation originalFileRowIdColumn(long startingRowId, int bucketId return new OriginalFileRowIdAdaptation(startingRowId, bucketId); } + static ColumnAdaptation updatedRowColumnsWithOriginalFiles(long startingRowId, int bucketId, HiveUpdateProcessor updateProcessor, List dependencyColumns) + { + return new UpdatedRowAdaptationWithOriginalFiles(startingRowId, bucketId, updateProcessor, dependencyColumns); + } + static ColumnAdaptation updatedRowColumns(HiveUpdateProcessor updateProcessor, List dependencyColumns) { return new UpdatedRowAdaptation(updateProcessor, dependencyColumns); @@ -359,6 +364,46 @@ public Block block(Page sourcePage, MaskDeletedRowsFunction maskDeletedRowsFunct } } + /** + * This ColumnAdaptation creates a RowBlock column containing the three + * ACID columms derived from the startingRowId and bucketId, and a special + * original files transaction block, plus a block containing + * all the columns not changed by the UPDATE statement. + */ + private static final class UpdatedRowAdaptationWithOriginalFiles + implements ColumnAdaptation + { + private final long startingRowId; + private final Block bucketBlock; + private final HiveUpdateProcessor updateProcessor; + private final List nonUpdatedSourceChannels; + + public UpdatedRowAdaptationWithOriginalFiles(long startingRowId, int bucketId, HiveUpdateProcessor updateProcessor, List dependencyColumns) + { + this.startingRowId = startingRowId; + this.bucketBlock = nativeValueToBlock(INTEGER, Long.valueOf(computeBucketValue(bucketId, 0))); + this.updateProcessor = requireNonNull(updateProcessor, "updateProcessor is null"); + requireNonNull(dependencyColumns, "dependencyColumns is null"); + this.nonUpdatedSourceChannels = updateProcessor.makeNonUpdatedSourceChannels(dependencyColumns); + } + + @Override + public Block block(Page sourcePage, MaskDeletedRowsFunction maskDeletedRowsFunction, long filePosition) + { + int positionCount = sourcePage.getPositionCount(); + ImmutableList.Builder originalFilesBlockBuilder = ImmutableList.builder(); + originalFilesBlockBuilder.add( + new RunLengthEncodedBlock(ORIGINAL_FILE_TRANSACTION_ID_BLOCK, positionCount), + createOriginalFilesRowIdBlock(startingRowId, filePosition, positionCount), + new RunLengthEncodedBlock(bucketBlock, positionCount)); + for (int channel = 0; channel < sourcePage.getChannelCount(); channel++) { + originalFilesBlockBuilder.add(sourcePage.getBlock(channel)); + } + Page page = new Page(originalFilesBlockBuilder.build().toArray(new Block[]{})); + return updateProcessor.createUpdateRowBlock(page, nonUpdatedSourceChannels, maskDeletedRowsFunction); + } + } + private static class OriginalFileRowIdAdaptation implements ColumnAdaptation { @@ -380,19 +425,19 @@ public Block block(Page sourcePage, MaskDeletedRowsFunction maskDeletedRowsFunct Optional.empty(), new Block[] { new RunLengthEncodedBlock(ORIGINAL_FILE_TRANSACTION_ID_BLOCK, positionCount), - createRowIdBlock(filePosition, positionCount), + createOriginalFilesRowIdBlock(startingRowId, filePosition, positionCount), new RunLengthEncodedBlock(bucketBlock, positionCount) })); return rowBlock; } + } - private Block createRowIdBlock(long filePosition, int positionCount) - { - long[] translatedRowIds = new long[positionCount]; - for (int index = 0; index < positionCount; index++) { - translatedRowIds[index] = filePosition + startingRowId + index; - } - return new LongArrayBlock(positionCount, Optional.empty(), translatedRowIds); + private static Block createOriginalFilesRowIdBlock(long startingRowId, long filePosition, int positionCount) + { + long[] translatedRowIds = new long[positionCount]; + for (int index = 0; index < positionCount; index++) { + translatedRowIds[index] = startingRowId + filePosition + index; } + return new LongArrayBlock(positionCount, Optional.empty(), translatedRowIds); } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java index f4aa4dc2f58e..addcc73361cd 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java @@ -100,6 +100,7 @@ import static io.trino.plugin.hive.HiveSessionProperties.isUseOrcColumnNames; import static io.trino.plugin.hive.ReaderPageSource.noProjectionAdaptation; import static io.trino.plugin.hive.orc.OrcPageSource.ColumnAdaptation.updatedRowColumns; +import static io.trino.plugin.hive.orc.OrcPageSource.ColumnAdaptation.updatedRowColumnsWithOriginalFiles; import static io.trino.plugin.hive.orc.OrcPageSource.handleException; import static io.trino.plugin.hive.util.HiveUtil.isDeserializerClass; import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; @@ -416,7 +417,14 @@ else if (transaction.isUpdate()) { List dependencyColumns = projections.stream() .filter(HiveColumnHandle::isBaseColumn) .collect(toImmutableList()); - columnAdaptations.add(updatedRowColumns(updateProcessor, dependencyColumns)); + if (originalFile) { + int bucket = bucketNumber.orElse(0); + long startingRowId = originalFileRowId.orElse(0L); + columnAdaptations.add(updatedRowColumnsWithOriginalFiles(startingRowId, bucket, updateProcessor, dependencyColumns)); + } + else { + columnAdaptations.add(updatedRowColumns(updateProcessor, dependencyColumns)); + } } return new OrcPageSource( diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergConnector.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergConnector.java index 122fb6a748eb..f65612cc73f4 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergConnector.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergConnector.java @@ -170,6 +170,12 @@ public List> getTableProperties() return tableProperties; } + @Override + public List> getMaterializedViewProperties() + { + return tableProperties; + } + @Override public ConnectorAccessControl getAccessControl() { diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java index 2befb6cec9b7..6a4ca092f3a2 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java @@ -994,6 +994,7 @@ public Optional getMaterializedView(Connect .orElseThrow(() -> new TrinoException(HIVE_INVALID_METADATA, "No view original text: " + viewName))); String storageTable = materializedView.getParameters().getOrDefault(STORAGE_TABLE, ""); + ConnectorTableMetadata tableMetadata = getTableMetadata(session, new SchemaTableName(viewName.getSchemaName(), storageTable)); return Optional.of(new ConnectorMaterializedViewDefinition( definition.getOriginalSql(), Optional.of(storageTable), @@ -1002,7 +1003,7 @@ public Optional getMaterializedView(Connect definition.getColumns(), definition.getComment(), materializedView.getOwner(), - new HashMap<>(materializedView.getParameters()))); + ImmutableMap.copyOf(tableMetadata.getProperties()))); } public Optional getTableToken(ConnectorSession session, ConnectorTableHandle tableHandle) @@ -1059,7 +1060,7 @@ private Map> getMaterializedViewToken(ConnectorSess return viewToken; } - String storageTableName = materializedViewDefinition.get().getProperties().getOrDefault(STORAGE_TABLE, "").toString(); + String storageTableName = Optional.ofNullable(materializedViewDefinition.get().getStorageTable()).orElse(""); org.apache.iceberg.Table icebergTable = getIcebergTable(metastore, hdfsEnvironment, session, new SchemaTableName(name.getSchemaName(), storageTableName)); String dependsOnTables = icebergTable.currentSnapshot().summary().getOrDefault(DEPENDS_ON_TABLES, ""); if (!dependsOnTables.isEmpty()) { diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMaterializedViews.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMaterializedViews.java index 88fa2dcd9df1..fe99ef8b1afc 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMaterializedViews.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMaterializedViews.java @@ -25,9 +25,12 @@ import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; +import static io.trino.testing.MaterializedResult.DEFAULT_PRECISION; import static io.trino.testing.assertions.Assert.assertEquals; import static io.trino.testing.assertions.Assert.assertFalse; import static io.trino.testing.assertions.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; public class TestIcebergMaterializedViews extends AbstractTestQueryFramework @@ -52,6 +55,44 @@ public void setUp() assertQuery("SELECT count(*) FROM base_table2", "VALUES 3"); } + @Test + public void testCreateWithInvalidPropertyFails() + { + assertThatThrownBy(() -> computeActual("CREATE MATERIALIZED VIEW materialized_view_with_property " + + "WITH (invalid_property = ARRAY['_date']) AS " + + "SELECT _bigint, _date FROM base_table1")) + .hasMessage("Catalog 'iceberg' does not support materialized view property 'invalid_property'"); + } + + @Test + public void testShowCreate() + { + assertUpdate("CREATE MATERIALIZED VIEW materialized_view_with_property " + + "WITH (partitioning = ARRAY['_date']) AS " + + "SELECT _bigint, _date FROM base_table1"); + assertQuery("SELECT COUNT(*) FROM materialized_view_with_property", "VALUES 6"); + assertThat(computeActual("SHOW CREATE MATERIALIZED VIEW materialized_view_with_property").getOnlyValue()) + .isEqualTo( + "CREATE MATERIALIZED VIEW iceberg.tpch.materialized_view_with_property\n" + + "WITH (\n" + + " format = 'ORC',\n" + + " partitioning = ARRAY['_date']\n" + + ") AS\n" + + "SELECT\n" + + " _bigint\n" + + ", _date\n" + + "FROM\n" + + " base_table1"); + assertUpdate("DROP MATERIALIZED VIEW materialized_view_with_property"); + } + + @Test + public void testSystemMaterializedViewProperties() + { + assertThat(computeActual("SELECT * FROM system.metadata.materialized_view_properties WHERE catalog_name = 'iceberg'")) + .contains(new MaterializedRow(DEFAULT_PRECISION, "iceberg", "partitioning", "[]", "array(varchar)", "Partition transforms")); + } + @Test(enabled = false) // TODO https://github.com/trinodb/trino/issues/5892 public void testCreateRefreshSelect() { diff --git a/plugin/trino-oracle/src/main/java/io/trino/plugin/oracle/OracleClient.java b/plugin/trino-oracle/src/main/java/io/trino/plugin/oracle/OracleClient.java index 186a39c2a129..fbbbc4890e0d 100644 --- a/plugin/trino-oracle/src/main/java/io/trino/plugin/oracle/OracleClient.java +++ b/plugin/trino-oracle/src/main/java/io/trino/plugin/oracle/OracleClient.java @@ -33,7 +33,6 @@ import io.trino.spi.connector.JoinCondition; import io.trino.spi.connector.SchemaTableName; import io.trino.spi.type.CharType; -import io.trino.spi.type.Chars; import io.trino.spi.type.DecimalType; import io.trino.spi.type.Decimals; import io.trino.spi.type.Type; @@ -306,7 +305,7 @@ else if (precision > Decimals.MAX_PRECISION || actualPrecision <= 0) { return Optional.of(ColumnMapping.sliceMapping( charType, charReadFunction(charType), - oracleCharWriteFunction(charType), + oracleCharWriteFunction(), FULL_PUSHDOWN)); case OracleTypes.VARCHAR: @@ -420,11 +419,10 @@ public static DoubleWriteFunction oracleDoubleWriteFunction() return ((statement, index, value) -> ((OraclePreparedStatement) statement).setBinaryDouble(index, value)); } - private SliceWriteFunction oracleCharWriteFunction(CharType charType) + private SliceWriteFunction oracleCharWriteFunction() { - return (statement, index, value) -> { - statement.setString(index, Chars.padSpaces(value, charType).toStringUtf8()); - }; + return (statement, index, value) -> + ((OraclePreparedStatement) statement).setFixedCHAR(index, value.toStringUtf8()); } @Override diff --git a/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/AbstractTestOracleTypeMapping.java b/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/AbstractTestOracleTypeMapping.java index d6fb13b61a27..4acc1e9743e6 100644 --- a/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/AbstractTestOracleTypeMapping.java +++ b/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/AbstractTestOracleTypeMapping.java @@ -49,17 +49,13 @@ import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.UNSUPPORTED_TYPE_HANDLING; import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR; import static io.trino.plugin.jdbc.UnsupportedTypeHandling.IGNORE; -import static io.trino.plugin.oracle.OracleDataTypes.CharacterSemantics.BYTE; -import static io.trino.plugin.oracle.OracleDataTypes.CharacterSemantics.CHAR; import static io.trino.plugin.oracle.OracleDataTypes.MAX_CHAR_ON_READ; import static io.trino.plugin.oracle.OracleDataTypes.MAX_CHAR_ON_WRITE; import static io.trino.plugin.oracle.OracleDataTypes.MAX_NCHAR; import static io.trino.plugin.oracle.OracleDataTypes.MAX_NVARCHAR2; import static io.trino.plugin.oracle.OracleDataTypes.MAX_VARCHAR2_ON_READ; import static io.trino.plugin.oracle.OracleDataTypes.MAX_VARCHAR2_ON_WRITE; -import static io.trino.plugin.oracle.OracleDataTypes.charDataType; import static io.trino.plugin.oracle.OracleDataTypes.dateDataType; -import static io.trino.plugin.oracle.OracleDataTypes.ncharDataType; import static io.trino.plugin.oracle.OracleDataTypes.oracleTimestamp3TimeZoneDataType; import static io.trino.plugin.oracle.OracleDataTypes.trinoTimestampWithTimeZoneDataType; import static io.trino.plugin.oracle.OracleSessionProperties.NUMBER_DEFAULT_SCALE; @@ -355,17 +351,42 @@ public void testCharReadMapping() @Test public void testCharUnicodeMapping() { - testTypeMapping("char_unicode", - unicodeTests(DataType::charDataType, codePoints(), MAX_CHAR_ON_WRITE)); + SqlDataTypeTest.create() + .addRoundTrip("char(5)", "'攻殻機動隊'", createCharType(5), "CAST('攻殻機動隊' AS char(5))") + .addRoundTrip("char(13)", "'攻殻機動隊'", createCharType(13), "CAST('攻殻機動隊' AS char(13))") + .addRoundTrip(format("char(%d)", MAX_CHAR_ON_WRITE), "'攻殻機動隊'", + createCharType(MAX_CHAR_ON_WRITE), format("CAST('攻殻機動隊' AS char(%d))", MAX_CHAR_ON_WRITE)) + .addRoundTrip("char(1)", "'😂'", createCharType(1), "CAST('😂' AS char(1))") + .addRoundTrip("char(6)", "'😂'", createCharType(6), "CAST('😂' AS char(6))") + .execute(getQueryRunner(), trinoCreateAsSelect("char_unicode")); } @Test public void testCharUnicodeReadMapping() { - testTypeReadMapping("read_char_unicode", - unicodeTests(charDataType(CHAR), codePoints(), MAX_CHAR_ON_READ), - unicodeTests(charDataType(BYTE), utf8Bytes(), MAX_CHAR_ON_READ), - unicodeTests(ncharDataType(), String::length, MAX_NCHAR)); + SqlDataTypeTest.create() + // the number of Unicode code points in 攻殻機動隊 is 5, and in 😂 is 1. + .addRoundTrip("char(5 char)", "'攻殻機動隊'", createCharType(5), "CAST('攻殻機動隊' AS CHAR(5))") + .addRoundTrip("char(13 char)", "'攻殻機動隊'", createCharType(13), "CAST('攻殻機動隊' AS CHAR(13))") + .addRoundTrip(format("char(%d char)", MAX_CHAR_ON_READ), "'攻殻機動隊'", + createCharType(MAX_CHAR_ON_READ), format("CAST('攻殻機動隊' AS char(%d))", MAX_CHAR_ON_READ)) + .addRoundTrip("char(1 char)", "'😂'", createCharType(1), "CAST('😂' AS CHAR(1))") + .addRoundTrip("char(6 char)", "'😂'", createCharType(6), "CAST('😂' AS CHAR(6))") + // the number of bytes using charset UTF-8 in 攻殻機動隊 is 15, and in 😂 is 4. + .addRoundTrip("char(15 byte)", "'攻殻機動隊'", createCharType(15), "CAST('攻殻機動隊' AS CHAR(15))") + .addRoundTrip("char(23 byte)", "'攻殻機動隊'", createCharType(23), "CAST('攻殻機動隊' AS CHAR(23))") + .addRoundTrip(format("char(%d byte)", MAX_CHAR_ON_READ), "'攻殻機動隊'", + createCharType(MAX_CHAR_ON_READ), format("CAST('攻殻機動隊' AS CHAR(%d))", MAX_CHAR_ON_READ)) + .addRoundTrip("char(4 byte)", "'😂'", createCharType(4), "CAST('😂' AS CHAR(4))") + .addRoundTrip("char(9 byte)", "'😂'", createCharType(9), "CAST('😂' AS CHAR(9))") + // the length of string in 攻殻機動隊 is 5, and in 😂 is 2. + .addRoundTrip("nchar(5)", "'攻殻機動隊'", createCharType(5), "CAST('攻殻機動隊' AS CHAR(5))") + .addRoundTrip("nchar(13)", "'攻殻機動隊'", createCharType(13), "CAST('攻殻機動隊' AS CHAR(13))") + .addRoundTrip(format("nchar(%d)", MAX_NCHAR), "'攻殻機動隊'", + createCharType(MAX_NCHAR), format("CAST('攻殻機動隊' AS CHAR(%d))", MAX_NCHAR)) + .addRoundTrip("nchar(2)", "'😂'", createCharType(2), "CAST('😂' AS CHAR(2))") + .addRoundTrip("nchar(7)", "'😂'", createCharType(7), "CAST('😂' AS CHAR(7))") + .execute(getQueryRunner(), oracleCreateAndInsert("read_char_unicode")); } private static DataTypeTest unicodeTests(IntFunction> typeConstructor, ToIntFunction stringLength, int maxSize) diff --git a/pom.xml b/pom.xml index bab987d9ff9c..540221face54 100644 --- a/pom.xml +++ b/pom.xml @@ -59,8 +59,8 @@ 183 2.0.0 2.5.1 - 1.15.1 - 3.2.7 + 1.15.3 + 3.2.8 1.0.42 5.5.2 @@ -1264,7 +1264,7 @@ net.java.dev.jna jna - 5.5.0 + 5.8.0 diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/hive/TestHiveTransactionalTable.java b/testing/trino-product-tests/src/main/java/io/trino/tests/hive/TestHiveTransactionalTable.java index dbbf94537b90..91947fcabb52 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/hive/TestHiveTransactionalTable.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/hive/TestHiveTransactionalTable.java @@ -42,6 +42,7 @@ import java.util.Optional; import java.util.concurrent.TimeoutException; import java.util.function.Consumer; +import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -77,6 +78,11 @@ public class TestHiveTransactionalTable private static final int TEST_TIMEOUT = 15 * 60 * 1000; + // Hive original file path end looks like /000000_0 + // New Trino original file path end looks like /000000_132574635756428963553891918669625313402 + // Older Trino path ends look like /20210416_190616_00000_fsymd_af6f0a3d-5449-4478-a53d-9f9f99c07ed9 + private static final Pattern ORIGINAL_FILE_MATCHER = Pattern.compile(".*/\\d+_\\d+(_[^/]+)?$"); + @Inject private TestHiveMetastoreClientFactory testHiveMetastoreClientFactory; @@ -253,6 +259,10 @@ public void testReadFullAcidWithOriginalFiles(boolean isPartitioned, BucketingTy String hivePartitionString = isPartitioned ? " PARTITION (part_col=2) " : ""; onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " VALUES (21, 1)"); onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " VALUES (22, 2)"); + + // verify that the existing rows are stored in original files + verifyOriginalFiles(tableName, "WHERE col = 21"); + onHive().executeQuery("ALTER TABLE " + tableName + " SET " + hiveTableProperties(ACID, bucketingType)); // read with original files @@ -292,6 +302,10 @@ public void testUpdateFullAcidWithOriginalFilesPrestoInserting(boolean isPartiti String hivePartitionString = isPartitioned ? " PARTITION (part_col=2) " : ""; onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " VALUES (21, 1)"); onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " VALUES (22, 2)"); + + // verify that the existing rows are stored in original files + verifyOriginalFiles(tableName, "WHERE col = 21"); + onHive().executeQuery("ALTER TABLE " + tableName + " SET " + hiveTableProperties(ACID, bucketingType)); // read with original files @@ -339,6 +353,9 @@ public void testUpdateFullAcidWithOriginalFilesPrestoInsertingAndDeleting(boolea onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " VALUES (10, 100), (11, 110), (12, 120), (13, 130), (14, 140)"); onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " VALUES (15, 150), (16, 160), (17, 170), (18, 180), (19, 190)"); + // verify that the existing rows are stored in original files + verifyOriginalFiles(tableName, "WHERE col = 10"); + onHive().executeQuery("ALTER TABLE " + tableName + " SET " + hiveTableProperties(ACID, bucketingType)); // read with original files @@ -392,6 +409,10 @@ public void testReadInsertOnlyWithOriginalFiles(boolean isPartitioned, Bucketing onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " VALUES (1)"); onHive().executeQuery("INSERT INTO TABLE " + tableName + hivePartitionString + " VALUES (2)"); + + // verify that the existing rows are stored in original files + verifyOriginalFiles(tableName, "WHERE col = 1"); + onHive().executeQuery("ALTER TABLE " + tableName + " SET " + hiveTableProperties(INSERT_ONLY, bucketingType)); // read with original files @@ -1314,15 +1335,17 @@ public void testInsertDeletUpdateWithPrestoAndHive() } @Test(groups = HIVE_TRANSACTIONAL, timeOut = TEST_TIMEOUT) - public void testDeletePartitionedTable() + public void testDeleteFromOriginalFiles() { - withTemporaryTable("delete_partitioned", true, true, NONE, tableName -> { + withTemporaryTable("delete_original_files", true, true, NONE, tableName -> { onTrino().executeQuery(format("CREATE TABLE %s WITH (transactional = true, partitioned_by = ARRAY['regionkey'])" + " AS SELECT nationkey, name, regionkey FROM tpch.tiny.nation", tableName)); - // SELECT before deletion may prime the cache on the Hive side + verifyOriginalFiles(tableName, "WHERE regionkey = 4"); verifySelectForPrestoAndHive("SELECT count(*) FROM " + tableName, "true", row(25)); + verifySelectForPrestoAndHive(format("SELECT nationkey, name FROM %s", tableName), "regionkey = 4", row(4, "EGYPT"), row(10, "IRAN"), row(11, "IRAQ"), row(13, "JORDAN"), row(20, "SAUDI ARABIA")); onTrino().executeQuery(format("DELETE FROM %s WHERE regionkey = 4 AND nationkey %% 10 = 3", tableName)); verifySelectForPrestoAndHive("SELECT count(*) FROM " + tableName, "true", row(24)); + verifySelectForPrestoAndHive(format("SELECT nationkey, name FROM %s", tableName), "regionkey = 4", row(4, "EGYPT"), row(10, "IRAN"), row(11, "IRAQ"), row(20, "SAUDI ARABIA")); }); } @@ -1332,13 +1355,58 @@ public void testDeleteWholePartition() withTemporaryTable("delete_partitioned", true, true, NONE, tableName -> { onTrino().executeQuery(format("CREATE TABLE %s WITH (transactional = true, partitioned_by = ARRAY['regionkey'])" + " AS SELECT nationkey, name, regionkey FROM tpch.tiny.nation", tableName)); - // SELECT before deletion may prime the cache on the Hive side + + verifyOriginalFiles(tableName, "WHERE regionkey = 4"); + verifySelectForPrestoAndHive("SELECT count(*) FROM " + tableName, "true", row(25)); + + // verify all partitions exist + assertThat(onTrino().executeQuery(format("SELECT * FROM \"%s$partitions\"", tableName))) + .containsOnly(row(0), row(1), row(2), row(3), row(4)); + + // run delete and verify row count onTrino().executeQuery(format("DELETE FROM %s WHERE regionkey = 4", tableName)); verifySelectForPrestoAndHive("SELECT count(*) FROM " + tableName, "true", row(20)); + + // verify all partitions still exist + assertThat(onTrino().executeQuery(format("SELECT * FROM \"%s$partitions\"", tableName))) + .containsOnly(row(0), row(1), row(2), row(3), row(4)); + }); + } + + @Test(groups = HIVE_TRANSACTIONAL, timeOut = TEST_TIMEOUT) + public void testUpdateOriginalFilesPartitioned() + { + withTemporaryTable("update_original_files", true, true, NONE, tableName -> { + onTrino().executeQuery(format("CREATE TABLE %s WITH (transactional = true, partitioned_by = ARRAY['regionkey'])" + + " AS SELECT nationkey, name, regionkey FROM tpch.tiny.nation", tableName)); + verifyOriginalFiles(tableName, "WHERE regionkey = 4"); + verifySelectForPrestoAndHive("SELECT nationkey, name FROM " + tableName, "regionkey = 4", row(4, "EGYPT"), row(10, "IRAN"), row(11, "IRAQ"), row(13, "JORDAN"), row(20, "SAUDI ARABIA")); + onTrino().executeQuery(format("UPDATE %s SET nationkey = 100 WHERE regionkey = 4 AND nationkey %% 10 = 3", tableName)); + verifySelectForPrestoAndHive("SELECT nationkey, name FROM " + tableName, "regionkey = 4", row(4, "EGYPT"), row(10, "IRAN"), row(11, "IRAQ"), row(100, "JORDAN"), row(20, "SAUDI ARABIA")); }); } + @Test(groups = HIVE_TRANSACTIONAL, timeOut = TEST_TIMEOUT) + public void testUpdateOriginalFilesUnpartitioned() + { + withTemporaryTable("update_original_files", true, true, NONE, tableName -> { + onTrino().executeQuery(format("CREATE TABLE %s WITH (transactional = true)" + + " AS SELECT nationkey, name, regionkey FROM tpch.tiny.nation", tableName)); + verifyOriginalFiles(tableName, "WHERE regionkey = 4"); + verifySelectForPrestoAndHive("SELECT nationkey, name, regionkey FROM " + tableName, "nationkey % 10 = 3", row(3, "CANADA", 1), row(13, "JORDAN", 4), row(23, "UNITED KINGDOM", 3)); + onTrino().executeQuery(format("UPDATE %s SET nationkey = nationkey + 100 WHERE nationkey %% 10 = 3", tableName)); + verifySelectForPrestoAndHive("SELECT nationkey, name, regionkey FROM " + tableName, "nationkey % 10 = 3", row(103, "CANADA", 1), row(113, "JORDAN", 4), row(123, "UNITED KINGDOM", 3)); + }); + } + + private void verifyOriginalFiles(String tableName, String whereClause) + { + QueryResult result = onTrino().executeQuery(format("SELECT DISTINCT \"$path\" FROM %s %s", tableName, whereClause)); + String path = (String) result.row(0).get(0); + checkArgument(ORIGINAL_FILE_MATCHER.matcher(path).matches(), "Path should be original file path, but isn't, path: %s", path); + } + @DataProvider public Object[][] insertersProvider() { diff --git a/testing/trino-product-tests/src/main/resources/sql-tests/testcases/system/selectInformationSchemaColumns.result b/testing/trino-product-tests/src/main/resources/sql-tests/testcases/system/selectInformationSchemaColumns.result index 01ea35f2da63..bbd9b989e3d6 100644 --- a/testing/trino-product-tests/src/main/resources/sql-tests/testcases/system/selectInformationSchemaColumns.result +++ b/testing/trino-product-tests/src/main/resources/sql-tests/testcases/system/selectInformationSchemaColumns.result @@ -51,6 +51,11 @@ system| metadata| column_properties| property_name| varchar| YES| null| null| system| metadata| column_properties| default_value| varchar| YES| null| null| system| metadata| column_properties| type| varchar| YES| null| null| system| metadata| column_properties| description| varchar| YES| null| null| +system| metadata| materialized_view_properties| catalog_name| varchar| YES| null| null| +system| metadata| materialized_view_properties| property_name| varchar| YES| null| null| +system| metadata| materialized_view_properties| default_value| varchar| YES| null| null| +system| metadata| materialized_view_properties| type| varchar| YES| null| null| +system| metadata| materialized_view_properties| description| varchar| YES| null| null| system| metadata| schema_properties| catalog_name| varchar| YES| null| null| system| metadata| schema_properties| property_name| varchar| YES| null| null| system| metadata| schema_properties| default_value| varchar| YES| null| null| diff --git a/testing/trino-product-tests/src/main/resources/sql-tests/testcases/system/showTablesSystemMetadata.result b/testing/trino-product-tests/src/main/resources/sql-tests/testcases/system/showTablesSystemMetadata.result index 5fd76135c937..3323d137c5e8 100644 --- a/testing/trino-product-tests/src/main/resources/sql-tests/testcases/system/showTablesSystemMetadata.result +++ b/testing/trino-product-tests/src/main/resources/sql-tests/testcases/system/showTablesSystemMetadata.result @@ -3,5 +3,6 @@ catalogs| analyze_properties| schema_properties| table_properties| +materialized_view_properties| column_properties| table_comments| diff --git a/testing/trino-test-jdbc-compatibility-old-server/pom.xml b/testing/trino-test-jdbc-compatibility-old-server/pom.xml index 3fccb1a3447b..42ec34f5e8d6 100644 --- a/testing/trino-test-jdbc-compatibility-old-server/pom.xml +++ b/testing/trino-test-jdbc-compatibility-old-server/pom.xml @@ -75,12 +75,13 @@ org.testcontainers - presto + testcontainers test + org.testcontainers - testcontainers + trino test diff --git a/testing/trino-test-jdbc-compatibility-old-server/src/test/java/io/trino/TestJdbcResultSetCompatibilityOldServer.java b/testing/trino-test-jdbc-compatibility-old-server/src/test/java/io/trino/TestJdbcResultSetCompatibilityOldServer.java index 9f395420bccc..6047b4a03e73 100644 --- a/testing/trino-test-jdbc-compatibility-old-server/src/test/java/io/trino/TestJdbcResultSetCompatibilityOldServer.java +++ b/testing/trino-test-jdbc-compatibility-old-server/src/test/java/io/trino/TestJdbcResultSetCompatibilityOldServer.java @@ -16,7 +16,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.io.Resources; import io.trino.jdbc.BaseTestJdbcResultSet; -import org.testcontainers.containers.PrestoContainer; +import org.testcontainers.containers.TrinoContainer; import org.testcontainers.utility.DockerImageName; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; @@ -50,7 +50,7 @@ public class TestJdbcResultSetCompatibilityOldServer * Empty means that we could not obtain current Trino version and tests defined here will be marked as failed. */ private final Optional testedTrinoVersion; - private PrestoContainer trinoContainer; + private TrinoContainer trinoContainer; @Factory(dataProvider = "testedTrinoVersions") public TestJdbcResultSetCompatibilityOldServer(Optional testedTrinoVersion) @@ -97,11 +97,10 @@ public void setupTrinoContainer() throw new AssertionError("Could not determine current Trino version"); } - // TODO: add TrinoContainer to Testcontainers DockerImageName image = DockerImageName.parse("trinodb/trino") .withTag(testedTrinoVersion.get()) .asCompatibleSubstituteFor("prestosql/presto"); - trinoContainer = new PrestoContainer<>(image); + trinoContainer = new TrinoContainer(image); trinoContainer.start(); // verify that version reported by Trino server matches requested one.