Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
*/
package io.trino.connector;

import com.google.common.collect.ImmutableList;
import io.trino.FullConnectorSession;
import io.trino.Session;
import io.trino.metadata.MaterializedViewDefinition;
Expand Down Expand Up @@ -54,12 +55,12 @@ public Optional<ConnectorTableSchema> getRelationMetadata(ConnectorSession conne

Optional<MaterializedViewDefinition> materializedView = metadata.getMaterializedView(session, qualifiedName);
if (materializedView.isPresent()) {
return Optional.of(new ConnectorTableSchema(tableName.getSchemaTableName(), toColumnSchema(materializedView.get().getColumns())));
return Optional.of(new ConnectorTableSchema(tableName.getSchemaTableName(), toColumnSchema(materializedView.get().getColumns()), ImmutableList.of()));
}

Optional<ViewDefinition> view = metadata.getView(session, qualifiedName);
if (view.isPresent()) {
return Optional.of(new ConnectorTableSchema(tableName.getSchemaTableName(), toColumnSchema(view.get().getColumns())));
return Optional.of(new ConnectorTableSchema(tableName.getSchemaTableName(), toColumnSchema(view.get().getColumns()), ImmutableList.of()));
}

Optional<TableHandle> tableHandle = metadata.getTableHandle(session, qualifiedName);
Expand Down
22 changes: 17 additions & 5 deletions core/trino-main/src/main/java/io/trino/sql/analyzer/Analysis.java
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,6 @@
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedHashMap;
Expand All @@ -112,6 +111,7 @@
import static java.lang.Boolean.FALSE;
import static java.lang.String.format;
import static java.util.Collections.emptyList;
import static java.util.Collections.unmodifiableList;
import static java.util.Collections.unmodifiableMap;
import static java.util.Collections.unmodifiableSet;
import static java.util.Objects.requireNonNull;
Expand Down Expand Up @@ -212,6 +212,7 @@ public class Analysis

private final Multiset<RowFilterScopeEntry> rowFilterScopes = HashMultiset.create();
private final Map<NodeRef<Table>, List<Expression>> rowFilters = new LinkedHashMap<>();
private final Map<NodeRef<Table>, List<Expression>> checkConstraints = new LinkedHashMap<>();

private final Multiset<ColumnMaskScopeEntry> columnMaskScopes = HashMultiset.create();
private final Map<NodeRef<Table>, Map<String, List<Expression>>> columnMasks = new LinkedHashMap<>();
Expand Down Expand Up @@ -1071,11 +1072,22 @@ public void addRowFilter(Table table, Expression filter)
.add(filter);
}

public void addCheckConstraints(Table table, Expression constraint)
{
checkConstraints.computeIfAbsent(NodeRef.of(table), node -> new ArrayList<>())
.add(constraint);
}

public List<Expression> getRowFilters(Table node)
{
return rowFilters.getOrDefault(NodeRef.of(node), ImmutableList.of());
}

public List<Expression> getCheckConstraints(Table node)
{
return unmodifiableList(checkConstraints.getOrDefault(NodeRef.of(node), ImmutableList.of()));
}

public boolean hasColumnMask(QualifiedObjectName table, String column, String identity)
{
return columnMaskScopes.contains(new ColumnMaskScopeEntry(table, column, identity));
Expand Down Expand Up @@ -1572,22 +1584,22 @@ public void addQuantifiedComparisons(List<QuantifiedComparisonExpression> expres

public List<InPredicate> getInPredicatesSubqueries()
{
return Collections.unmodifiableList(inPredicatesSubqueries);
return unmodifiableList(inPredicatesSubqueries);
}

public List<SubqueryExpression> getSubqueries()
{
return Collections.unmodifiableList(subqueries);
return unmodifiableList(subqueries);
}

public List<ExistsPredicate> getExistsSubqueries()
{
return Collections.unmodifiableList(existsSubqueries);
return unmodifiableList(existsSubqueries);
}

public List<QuantifiedComparisonExpression> getQuantifiedComparisonSubqueries()
{
return Collections.unmodifiableList(quantifiedComparisonSubqueries);
return unmodifiableList(quantifiedComparisonSubqueries);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,6 @@
import io.trino.sql.analyzer.Scope.AsteriskedIdentifierChainBasis;
import io.trino.sql.parser.ParsingException;
import io.trino.sql.parser.SqlParser;
import io.trino.sql.planner.DeterminismEvaluator;
import io.trino.sql.planner.ExpressionInterpreter;
import io.trino.sql.planner.PartitioningHandle;
import io.trino.sql.planner.ScopeAware;
Expand Down Expand Up @@ -297,6 +296,7 @@
import static io.trino.spi.StandardErrorCode.FUNCTION_NOT_FOUND;
import static io.trino.spi.StandardErrorCode.FUNCTION_NOT_WINDOW;
import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS;
import static io.trino.spi.StandardErrorCode.INVALID_CHECK_CONSTRAINT;
import static io.trino.spi.StandardErrorCode.INVALID_COLUMN_REFERENCE;
import static io.trino.spi.StandardErrorCode.INVALID_COPARTITIONING;
import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
Expand Down Expand Up @@ -365,6 +365,8 @@
import static io.trino.sql.analyzer.SemanticExceptions.semanticException;
import static io.trino.sql.analyzer.TypeSignatureProvider.fromTypes;
import static io.trino.sql.analyzer.TypeSignatureTranslator.toTypeSignature;
import static io.trino.sql.planner.DeterminismEvaluator.containsCurrentTimeFunctions;
import static io.trino.sql.planner.DeterminismEvaluator.isDeterministic;
import static io.trino.sql.planner.ExpressionInterpreter.evaluateConstantExpression;
import static io.trino.sql.tree.BooleanLiteral.TRUE_LITERAL;
import static io.trino.sql.tree.DereferenceExpression.getQualifiedName;
Expand Down Expand Up @@ -542,6 +544,7 @@ protected Scope visitInsert(Insert insert, Optional<Scope> scope)
List<ColumnSchema> columns = tableSchema.getColumns().stream()
.filter(column -> !column.isHidden())
.collect(toImmutableList());
List<String> checkConstraints = tableSchema.getTableSchema().getCheckConstraints();
Comment thread
ebyhr marked this conversation as resolved.
Outdated

for (ColumnSchema column : columns) {
if (!accessControl.getColumnMasks(session.toSecurityContext(), targetTable, column.getName(), column.getType()).isEmpty()) {
Expand All @@ -551,7 +554,12 @@ protected Scope visitInsert(Insert insert, Optional<Scope> scope)

Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(session, targetTableHandle.get());
List<Field> tableFields = analyzeTableOutputFields(insert.getTable(), targetTable, tableSchema, columnHandles);
analyzeFiltersAndMasks(insert.getTable(), targetTable, targetTableHandle, tableFields, session.getIdentity().getUser());
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), new RelationType(tableFields))
.build();
analyzeFiltersAndMasks(insert.getTable(), targetTable, new RelationType(tableFields), accessControlScope);
analyzeCheckConstraints(insert.getTable(), targetTable, accessControlScope, checkConstraints);
analysis.registerTable(insert.getTable(), targetTableHandle, targetTable, session.getIdentity().getUser(), accessControlScope);

List<String> tableColumns = columns.stream()
.map(ColumnSchema::getName)
Expand Down Expand Up @@ -801,7 +809,12 @@ protected Scope visitDelete(Delete node, Optional<Scope> scope)

analysis.setUpdateType("DELETE");
analysis.setUpdateTarget(tableName, Optional.of(table), Optional.empty());
analyzeFiltersAndMasks(table, tableName, Optional.of(handle), analysis.getScope(table).getRelationType(), session.getIdentity().getUser());
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), analysis.getScope(table).getRelationType())
.build();
analyzeFiltersAndMasks(table, tableName, analysis.getScope(table).getRelationType(), accessControlScope);
analyzeCheckConstraints(table, tableName, accessControlScope, tableSchema.getTableSchema().getCheckConstraints());
analysis.registerTable(table, Optional.of(handle), tableName, session.getIdentity().getUser(), accessControlScope);

createMergeAnalysis(table, handle, tableSchema, tableScope, tableScope, ImmutableList.of());

Expand Down Expand Up @@ -2188,7 +2201,12 @@ protected Scope visitTable(Table table, Optional<Scope> scope)

List<Field> outputFields = fields.build();

analyzeFiltersAndMasks(table, targetTableName, tableHandle, outputFields, session.getIdentity().getUser());
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), new RelationType(outputFields))
.build();
analyzeFiltersAndMasks(table, targetTableName, new RelationType(outputFields), accessControlScope);
analyzeCheckConstraints(table, targetTableName, accessControlScope, tableSchema.getTableSchema().getCheckConstraints());
analysis.registerTable(table, tableHandle, targetTableName, session.getIdentity().getUser(), accessControlScope);

Scope tableScope = createAndAssignScope(table, scope, outputFields);

Expand All @@ -2208,17 +2226,8 @@ private void checkStorageTableNotRedirected(QualifiedObjectName source)
});
}

private void analyzeFiltersAndMasks(Table table, QualifiedObjectName name, Optional<TableHandle> tableHandle, List<Field> fields, String authorization)
{
analyzeFiltersAndMasks(table, name, tableHandle, new RelationType(fields), authorization);
}

private void analyzeFiltersAndMasks(Table table, QualifiedObjectName name, Optional<TableHandle> tableHandle, RelationType relationType, String authorization)
private void analyzeFiltersAndMasks(Table table, QualifiedObjectName name, RelationType relationType, Scope accessControlScope)
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why did this change? It seems unrelated to this new feature.

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The same Scope instance needs to be used between row filters and check constraints for the subsequent Analysis#registerTable. Otherwise, PlanSanityChecker throws "Unexpected identifier in logical plan". Do you have alternative suggestions to avoid this change?

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok, that makes sense. Please add that short explanation to the commit message so it doesn't get lost.

{
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), relationType)
.build();

for (int index = 0; index < relationType.getAllFieldCount(); index++) {
Field field = relationType.getFieldByIndex(index);
if (field.getName().isPresent()) {
Expand All @@ -2232,8 +2241,14 @@ private void analyzeFiltersAndMasks(Table table, QualifiedObjectName name, Optio

accessControl.getRowFilters(session.toSecurityContext(), name)
.forEach(filter -> analyzeRowFilter(session.getIdentity().getUser(), table, name, accessControlScope, filter));
}

analysis.registerTable(table, tableHandle, name, authorization, accessControlScope);
private void analyzeCheckConstraints(Table table, QualifiedObjectName name, Scope accessControlScope, List<String> constraints)
{
for (String constraint : constraints) {
ViewExpression expression = new ViewExpression(session.getIdentity().getUser(), Optional.of(name.getCatalogName()), Optional.of(name.getSchemaName()), constraint);
analyzeCheckConstraint(table, name, accessControlScope, expression);
}
}

private boolean checkCanSelectFromColumn(QualifiedObjectName name, String column)
Expand Down Expand Up @@ -2375,13 +2390,21 @@ private Scope createScopeForView(

if (storageTable.isPresent()) {
List<Field> storageTableFields = analyzeStorageTable(table, viewFields, storageTable.get());
analyzeFiltersAndMasks(table, name, storageTable, viewFields, session.getIdentity().getUser());
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), new RelationType(viewFields))
.build();
analyzeFiltersAndMasks(table, name, new RelationType(viewFields), accessControlScope);
analysis.registerTable(table, storageTable, name, session.getIdentity().getUser(), accessControlScope);
analysis.addRelationCoercion(table, viewFields.stream().map(Field::getType).toArray(Type[]::new));
// use storage table output fields as they contain ColumnHandles
return createAndAssignScope(table, scope, storageTableFields);
}

analyzeFiltersAndMasks(table, name, storageTable, viewFields, session.getIdentity().getUser());
Scope accessControlScope = Scope.builder()
.withRelationType(RelationId.anonymous(), new RelationType(viewFields))
.build();
analyzeFiltersAndMasks(table, name, new RelationType(viewFields), accessControlScope);
analysis.registerTable(table, storageTable, name, session.getIdentity().getUser(), accessControlScope);
viewFields.forEach(field -> analysis.addSourceColumns(field, ImmutableSet.of(new SourceColumn(name, field.getName().orElseThrow()))));
analysis.registerNamedQuery(table, query);
return createAndAssignScope(table, scope, viewFields);
Expand Down Expand Up @@ -3174,6 +3197,10 @@ protected Scope visitUpdate(Update update, Optional<Scope> scope)
if (!accessControl.getRowFilters(session.toSecurityContext(), tableName).isEmpty()) {
throw semanticException(NOT_SUPPORTED, update, "Updating a table with a row filter is not supported");
}
if (!tableSchema.getTableSchema().getCheckConstraints().isEmpty()) {
// TODO https://github.com/trinodb/trino/issues/15411 Add support for CHECK constraint to UPDATE statement
throw semanticException(NOT_SUPPORTED, update, "Updating a table with a check constraint is not supported");
Comment on lines 3201 to 3202
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice!

}

// TODO: how to deal with connectors that need to see the pre-image of rows to perform the update without
// flowing that data through the masking logic
Expand Down Expand Up @@ -3301,6 +3328,10 @@ protected Scope visitMerge(Merge merge, Optional<Scope> scope)
if (!accessControl.getRowFilters(session.toSecurityContext(), tableName).isEmpty()) {
throw semanticException(NOT_SUPPORTED, merge, "Cannot merge into a table with row filters");
}
if (!tableSchema.getTableSchema().getCheckConstraints().isEmpty()) {
// TODO https://github.com/trinodb/trino/issues/15411 Add support for CHECK constraint to MERGE statement
throw semanticException(NOT_SUPPORTED, merge, "Cannot merge into a table with check constraints");
}

Scope targetTableScope = analyzer.analyzeForUpdate(relation, scope, UpdateKind.MERGE);
Scope sourceTableScope = process(merge.getSource(), scope);
Expand Down Expand Up @@ -4646,6 +4677,62 @@ private void analyzeRowFilter(String currentIdentity, Table table, QualifiedObje
analysis.addRowFilter(table, expression);
}

private void analyzeCheckConstraint(Table table, QualifiedObjectName name, Scope scope, ViewExpression constraint)
{
Expression expression;
try {
expression = sqlParser.createExpression(constraint.getExpression(), createParsingOptions(session));
}
catch (ParsingException e) {
throw new TrinoException(INVALID_CHECK_CONSTRAINT, extractLocation(table), format("Invalid check constraint for '%s': %s", name, e.getErrorMessage()), e);
}

verifyNoAggregateWindowOrGroupingFunctions(session, metadata, expression, format("Check constraint for '%s'", name));

ExpressionAnalysis expressionAnalysis;
try {
Identity filterIdentity = Identity.forUser(constraint.getIdentity())
.withGroups(groupProvider.getGroups(constraint.getIdentity()))
.build();
expressionAnalysis = ExpressionAnalyzer.analyzeExpression(
createViewSession(constraint.getCatalog(), constraint.getSchema(), filterIdentity, session.getPath()),
plannerContext,
statementAnalyzerFactory,
accessControl,
scope,
analysis,
expression,
warningCollector,
correlationSupport);
}
catch (TrinoException e) {
throw new TrinoException(e::getErrorCode, extractLocation(table), format("Invalid check constraint for '%s': %s", name, e.getRawMessage()), e);
}

// Ensure that the expression doesn't contain non-deterministic functions. This should be "retrospectively deterministic" per SQL standard.
if (!isDeterministic(expression, this::getResolvedFunction)) {
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

EXPRESSION_NOT_IN_DISTINCT is the wrong error code for this.

throw semanticException(INVALID_CHECK_CONSTRAINT, expression, "Check constraint expression should be deterministic");
}
if (containsCurrentTimeFunctions(expression)) {
throw semanticException(INVALID_CHECK_CONSTRAINT, expression, "Check constraint expression should not contain temporal expression");
}

analysis.recordSubqueries(expression, expressionAnalysis);

Type actualType = expressionAnalysis.getType(expression);
if (!actualType.equals(BOOLEAN)) {
TypeCoercion coercion = new TypeCoercion(plannerContext.getTypeManager()::getType);

if (!coercion.canCoerce(actualType, BOOLEAN)) {
throw new TrinoException(TYPE_MISMATCH, extractLocation(table), format("Expected check constraint for '%s' to be of type BOOLEAN, but was %s", name, actualType), null);
}

analysis.addCoercion(expression, BOOLEAN, coercion.isTypeOnlyCoercion(actualType, BOOLEAN));
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this coercion getting applied?
(see #15744)

}

analysis.addCheckConstraints(table, expression);
}

private void analyzeColumnMask(String currentIdentity, Table table, QualifiedObjectName tableName, Field field, Scope scope, ViewExpression mask)
{
String column = field.getName().orElseThrow();
Expand Down Expand Up @@ -5031,7 +5118,7 @@ private void verifySelectDistinct(QuerySpecification node, List<Expression> orde
}

for (Expression expression : orderByExpressions) {
if (!DeterminismEvaluator.isDeterministic(expression, this::getResolvedFunction)) {
if (!isDeterministic(expression, this::getResolvedFunction)) {
throw semanticException(EXPRESSION_NOT_IN_DISTINCT, expression, "Non deterministic ORDER BY expression is not supported with SELECT DISTINCT");
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import io.trino.metadata.Metadata;
import io.trino.metadata.ResolvedFunction;
import io.trino.sql.tree.CurrentTime;
import io.trino.sql.tree.DefaultExpressionTraversalVisitor;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.FunctionCall;
Expand Down Expand Up @@ -65,4 +66,24 @@ protected Void visitFunctionCall(FunctionCall node, AtomicBoolean deterministic)
return super.visitFunctionCall(node, deterministic);
}
}

public static boolean containsCurrentTimeFunctions(Expression expression)
{
requireNonNull(expression, "expression is null");

AtomicBoolean currentTime = new AtomicBoolean(false);
new CurrentTimeVisitor().process(expression, currentTime);
return currentTime.get();
}

private static class CurrentTimeVisitor
extends DefaultExpressionTraversalVisitor<AtomicBoolean>
{
@Override
protected Void visitCurrentTime(CurrentTime node, AtomicBoolean currentTime)
{
currentTime.set(true);
return super.visitCurrentTime(node, currentTime);
}
}
}
Loading