Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1292,8 +1292,8 @@ public void testGetColumnsMetadataCalls()
list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")),
list(list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_17", "varchar")),
new MetadataCallsCount()
.withListSchemasCount(2)
.withListTablesCount(3)
.withListSchemasCount(1)
.withListTablesCount(2)
.withGetColumnsCount(1));

// LIKE predicate on schema name and table name, but no predicate on catalog name
Expand All @@ -1306,8 +1306,8 @@ public void testGetColumnsMetadataCalls()
.mapToObj(columnIndex -> list(COUNTING_CATALOG, "test_schema1", "test_table1", "column_" + columnIndex, "varchar"))
.collect(toImmutableList()),
new MetadataCallsCount()
.withListSchemasCount(2)
.withListTablesCount(3)
.withListSchemasCount(1)
.withListTablesCount(2)
.withGetColumnsCount(1));

// LIKE predicate on schema name, but no predicate on catalog name and table name
Expand All @@ -1322,7 +1322,7 @@ public void testGetColumnsMetadataCalls()
.mapToObj(columnIndex -> list(COUNTING_CATALOG, "test_schema1", "test_table" + tableIndex, "column_" + columnIndex, "varchar")))
.collect(toImmutableList()),
new MetadataCallsCount()
.withListSchemasCount(2)
.withListSchemasCount(4)
.withListTablesCount(1)
.withGetColumnsCount(1000));

Expand All @@ -1338,9 +1338,9 @@ public void testGetColumnsMetadataCalls()
.mapToObj(columnIndex -> list(COUNTING_CATALOG, "test_schema" + schemaIndex, "test_table1", "column_" + columnIndex, "varchar")))
.collect(toImmutableList()),
new MetadataCallsCount()
.withListSchemasCount(3)
.withListTablesCount(8)
.withGetTableHandleCount(2)
.withListSchemasCount(5)
.withListTablesCount(5)
.withGetTableHandleCount(8)
.withGetColumnsCount(2));

// Equality predicate on schema name and table name, but no predicate on catalog name
Expand Down Expand Up @@ -1383,7 +1383,7 @@ public void testGetColumnsMetadataCalls()
list("TABLE_CAT", "TABLE_SCHEM", "TABLE_NAME", "COLUMN_NAME", "TYPE_NAME")),
list(),
new MetadataCallsCount()
.withListSchemasCount(2)
.withListSchemasCount(1)
.withListTablesCount(0)
.withGetColumnsCount(0));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -587,15 +587,15 @@ public PlanOptimizers(
ImmutableSet.of(
new ApplyTableScanRedirection(plannerContext),
new PruneTableScanColumns(metadata),
new PushPredicateIntoTableScan(plannerContext, typeAnalyzer))));
new PushPredicateIntoTableScan(plannerContext, typeAnalyzer, false))));

Set<Rule<?>> pushIntoTableScanRulesExceptJoins = ImmutableSet.<Rule<?>>builder()
.addAll(columnPruningRules)
.addAll(projectionPushdownRules)
.add(new PushProjectionIntoTableScan(plannerContext, typeAnalyzer, scalarStatsCalculator))
.add(new RemoveRedundantIdentityProjections())
.add(new PushLimitIntoTableScan(metadata))
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer))
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer, false))
.add(new PushSampleIntoTableScan(metadata))
.add(new PushAggregationIntoTableScan(plannerContext, typeAnalyzer))
.add(new PushDistinctLimitIntoTableScan(plannerContext, typeAnalyzer))
Expand Down Expand Up @@ -663,7 +663,7 @@ public PlanOptimizers(
costCalculator,
ImmutableSet.<Rule<?>>builder()
.addAll(simplifyOptimizerRules) // Should be always run after PredicatePushDown
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer))
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer, false))
.build()),
new UnaliasSymbolReferences(metadata), // Run again because predicate pushdown and projection pushdown might add more projections
columnPruningOptimizer, // Make sure to run this before index join. Filtered projections may not have all the columns.
Expand Down Expand Up @@ -728,7 +728,7 @@ public PlanOptimizers(
costCalculator,
ImmutableSet.<Rule<?>>builder()
.addAll(simplifyOptimizerRules) // Should be always run after PredicatePushDown
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer))
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer, false))
.build()),
pushProjectionIntoTableScanOptimizer,
// Projection pushdown rules may push reducing projections (e.g. dereferences) below filters for potential
Expand All @@ -742,7 +742,7 @@ public PlanOptimizers(
costCalculator,
ImmutableSet.<Rule<?>>builder()
.addAll(simplifyOptimizerRules) // Should be always run after PredicatePushDown
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer))
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer, false))
.build()),
columnPruningOptimizer,
new IterativeOptimizer(
Expand Down Expand Up @@ -808,6 +808,21 @@ public PlanOptimizers(
// new join nodes without JoinNode.maySkipOutputDuplicates flag set
new OptimizeDuplicateInsensitiveJoins(metadata))));

// Previous invocations of PushPredicateIntoTableScan do not prune using predicate expression. The invocation in AddExchanges
// does this pruning - and we may end up with empty union branches after that. We invoke PushPredicateIntoTableScan
// and rules to remove empty branches here to get empty values node through pushdown and then prune them.
builder.add(new IterativeOptimizer(
plannerContext,
ruleStats,
statsCalculator,
costCalculator,
ImmutableSet.of(
new PushPredicateIntoTableScan(plannerContext, typeAnalyzer, true),
new RemoveEmptyUnionBranches(),
new EvaluateEmptyIntersect(),
new RemoveEmptyExceptBranches(),
new TransformFilteringSemiJoinToInnerJoin())));

if (!forceSingleNode) {
builder.add(new IterativeOptimizer(
plannerContext,
Expand Down Expand Up @@ -895,7 +910,7 @@ public PlanOptimizers(
costCalculator,
ImmutableSet.<Rule<?>>builder()
.addAll(simplifyOptimizerRules) // Should be always run after PredicatePushDown
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer))
.add(new PushPredicateIntoTableScan(plannerContext, typeAnalyzer, false))
.add(new RemoveRedundantPredicateAboveTableScan(plannerContext, typeAnalyzer))
.build()));
builder.add(inlineProjections);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
import io.trino.sql.planner.plan.ValuesNode;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.NodeRef;
import org.assertj.core.util.VisibleForTesting;

import java.util.ArrayList;
import java.util.List;
Expand Down Expand Up @@ -90,10 +91,13 @@ public class PushPredicateIntoTableScan
private final PlannerContext plannerContext;
private final TypeAnalyzer typeAnalyzer;

public PushPredicateIntoTableScan(PlannerContext plannerContext, TypeAnalyzer typeAnalyzer)
private final boolean pruneWithPredicateExpression;

public PushPredicateIntoTableScan(PlannerContext plannerContext, TypeAnalyzer typeAnalyzer, boolean pruneWithPredicateExpression)
{
this.plannerContext = requireNonNull(plannerContext, "plannerContext is null");
this.typeAnalyzer = requireNonNull(typeAnalyzer, "typeAnalyzer is null");
this.pruneWithPredicateExpression = pruneWithPredicateExpression;
}

@Override
Expand All @@ -116,7 +120,7 @@ public Result apply(FilterNode filterNode, Captures captures, Context context)
Optional<PlanNode> rewritten = pushFilterIntoTableScan(
filterNode,
tableScan,
false,
pruneWithPredicateExpression,
context.getSession(),
context.getSymbolAllocator(),
plannerContext,
Expand Down Expand Up @@ -416,6 +420,12 @@ public static TupleDomain<ColumnHandle> computeEnforced(TupleDomain<ColumnHandle
return TupleDomain.withColumnDomains(enforcedDomainsBuilder.buildOrThrow());
}

@VisibleForTesting
public boolean getPruneWithPredicateExpression()
{
return pruneWithPredicateExpression;
}

private static class SplitExpression
{
private final Expression dynamicFilter;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,11 @@
import io.trino.sql.planner.assertions.BasePlanTest;
import io.trino.sql.planner.assertions.PlanMatchPattern;
import io.trino.sql.planner.assertions.RowNumberSymbolMatcher;
import io.trino.sql.planner.iterative.IterativeOptimizer;
import io.trino.sql.planner.iterative.rule.PushPredicateIntoTableScan;
import io.trino.sql.planner.optimizations.AddLocalExchanges;
import io.trino.sql.planner.optimizations.CheckSubqueryNodesAreRewritten;
import io.trino.sql.planner.optimizations.PlanOptimizer;
import io.trino.sql.planner.plan.AggregationNode;
import io.trino.sql.planner.plan.ApplyNode;
import io.trino.sql.planner.plan.CorrelatedJoinNode;
Expand Down Expand Up @@ -1030,7 +1033,24 @@ public void testDoubleNestedCorrelatedSubqueries()
project(
any(
tableScan("lineitem", ImmutableMap.of("L", "orderkey")))))))),
optimizer -> !(optimizer instanceof AddLocalExchanges || optimizer instanceof CheckSubqueryNodesAreRewritten));
optimizer -> !
(optimizer instanceof AddLocalExchanges
|| optimizer instanceof CheckSubqueryNodesAreRewritten
|| isPushPredicateIntoTableScanWithPrunePredicateOperation(optimizer)));
}

private boolean isPushPredicateIntoTableScanWithPrunePredicateOperation(PlanOptimizer optimizer)
{
if (optimizer instanceof IterativeOptimizer iterativeOptimizer) {
return iterativeOptimizer.getRules().stream().anyMatch(rule -> {
if (rule instanceof PushPredicateIntoTableScan pushPredicateIntoTableScan) {
return pushPredicateIntoTableScan.getPruneWithPredicateExpression();
}
return false;
});
}

return false;
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ public class TestPushPredicateIntoTableScan
@BeforeClass
public void setUpBeforeClass()
{
pushPredicateIntoTableScan = new PushPredicateIntoTableScan(tester().getPlannerContext(), createTestingTypeAnalyzer(tester().getPlannerContext()));
pushPredicateIntoTableScan = new PushPredicateIntoTableScan(tester().getPlannerContext(), createTestingTypeAnalyzer(tester().getPlannerContext()), false);

CatalogHandle catalogHandle = tester().getCurrentCatalogHandle();
tester().getQueryRunner().createCatalog(MOCK_CATALOG, createMockFactory(), ImmutableMap.of());
Expand Down
Loading