Skip to content
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -858,6 +858,7 @@ public RelNode visitPatterns(Patterns node, CalcitePlanContext context) {
@Override
public RelNode visitEval(Eval node, CalcitePlanContext context) {
visitChildren(node, context);

node.getExpressionList()
.forEach(
expr -> {
Expand Down Expand Up @@ -2245,9 +2246,10 @@ private RelNode mergeTableAndResolveColumnConflict(
@Override
public RelNode visitMultisearch(Multisearch node, CalcitePlanContext context) {
List<RelNode> subsearchNodes = new ArrayList<>();

for (UnresolvedPlan subsearch : node.getSubsearches()) {
UnresolvedPlan prunedSubSearch = subsearch.accept(new EmptySourcePropagateVisitor(), null);
prunedSubSearch.accept(this, context);
analyze(prunedSubSearch, context);
subsearchNodes.add(context.relBuilder.build());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,15 @@
import lombok.extern.log4j.Log4j2;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.plan.RelTraitDef;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgram;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelCollations;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Sort;
import org.apache.calcite.rel.logical.LogicalSort;
import org.apache.calcite.rel.rules.FilterMergeRule;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.tools.FrameworkConfig;
Expand Down Expand Up @@ -100,6 +104,7 @@ public void executeWithCalcite(
CalcitePlanContext.create(
buildFrameworkConfig(), SysLimit.fromSettings(settings), queryType);
RelNode relNode = analyze(plan, context);
relNode = mergeAdjacentFilters(relNode);
RelNode optimized = optimize(relNode, context);
RelNode calcitePlan = convertToCalcitePlan(optimized);
executionEngine.execute(calcitePlan, context, listener);
Expand Down Expand Up @@ -145,6 +150,7 @@ public void explainWithCalcite(
context.run(
() -> {
RelNode relNode = analyze(plan, context);
relNode = mergeAdjacentFilters(relNode);
RelNode optimized = optimize(relNode, context);
RelNode calcitePlan = convertToCalcitePlan(optimized);
executionEngine.explain(calcitePlan, format, context, listener);
Expand Down Expand Up @@ -259,6 +265,18 @@ public RelNode analyze(UnresolvedPlan plan, CalcitePlanContext context) {
return getRelNodeVisitor().analyze(plan, context);
}

/**
* Run Calcite FILTER_MERGE once so adjacent filters created during analysis can collapse before
* the rest of optimization.
*/
private RelNode mergeAdjacentFilters(RelNode relNode) {
HepProgram program =
new HepProgramBuilder().addRuleInstance(FilterMergeRule.Config.DEFAULT.toRule()).build();
HepPlanner planner = new HepPlanner(program);
planner.setRoot(relNode);
return planner.findBestExp();
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not sure performance impact, did u verify?

Copy link
Collaborator Author

@RyanL1997 RyanL1997 Dec 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I just scripted a mini benchmark break down by directly leverage the clickbench IT queries. The following report shows the detailed performance of each planning phase - in summary, performance testing shows filter merge adds only 0.19ms average overhead (10% of planning time, <1% of total query time).

> python3 analyze_performance.py

Analyzing log file: /Users/jiallian/Desktop/opensearch/sql-team/cve-fix/sql/integ-test/build/testclusters/integTest-0/logs/integTest.log
Using test log for query names: /Users/jiallian/Desktop/opensearch/sql-team/cve-fix/sql/performance_results.log

================================================================================
FILTER MERGE PERFORMANCE ANALYSIS
================================================================================

📊 OVERALL STATISTICS (168 queries)
--------------------------------------------------------------------------------
Filter Merge Time:
  Mean:          186 μs  (  0.19 ms)
  Median:        103 μs  (  0.10 ms)
  Std Dev:       197 μs
  Min:            41 μs  (  0.04 ms)
  Max:          1541 μs  (  1.54 ms)

Total Planning Time:
  Mean:         1870 μs  (  1.87 ms)
  Median:       1750 μs  (  1.75 ms)

Filter Merge as % of Planning:
  Mean:       9.87%
  Median:     6.22%
  Max:       47.52%

================================================================================
📈 PERFORMANCE ASSESSMENT
--------------------------------------------------------------------------------
Average overhead: 0.19ms (9.9% of planning)
Recommendation: No optimization needed. Merge immediately.

================================================================================
📊 PERCENTILE ANALYSIS
--------------------------------------------------------------------------------
Filter Merge Time Percentiles:
  p50:    105 μs  (  0.10 ms)
  p95:    477 μs  (  0.48 ms)
  p99:   1541 μs  (  1.54 ms)

================================================================================
⏱️  PLANNING PHASE BREAKDOWN
--------------------------------------------------------------------------------
Phase Averages:
  Analyze:          1672 μs  ( 89.4%)
  Filter Merge:      186 μs  ( 10.0%)  ← THIS IS WHAT WE ADDED
  Optimize:            9 μs  (  0.5%)
  Convert:             0 μs  (  0.0%)
  TOTAL:            1870 μs  (100.0%)

================================================================================
🐢 TOP 10 SLOWEST FILTER MERGE TIMES
--------------------------------------------------------------------------------
Rank   Query                Avg Merge Time       Max Merge Time       % of Planning
--------------------------------------------------------------------------------
1      Query46                  1541 μs ( 1.54ms)       1541 μs ( 1.54ms)    47.5%
2      Query29                   543 μs ( 0.54ms)        543 μs ( 0.54ms)    25.5%
3      Query24                   529 μs ( 0.53ms)        529 μs ( 0.53ms)    24.5%
4      Query54                   513 μs ( 0.51ms)        513 μs ( 0.51ms)    18.8%
5      Query44                   477 μs ( 0.48ms)        477 μs ( 0.48ms)    16.1%
6      Query23                   445 μs ( 0.45ms)        445 μs ( 0.45ms)    22.9%
7      Query15                   390 μs ( 0.39ms)        390 μs ( 0.39ms)    19.9%
8      Query71                   388 μs ( 0.39ms)        388 μs ( 0.39ms)    20.4%
9      Query16                   377 μs ( 0.38ms)        377 μs ( 0.38ms)    17.8%
10     Query55                   351 μs ( 0.35ms)        351 μs ( 0.35ms)    18.9%

================================================================================
📈 DISTRIBUTION ANALYSIS
--------------------------------------------------------------------------------
Filter Merge Time Distribution:
  <100μs                 82 ( 48.8%) ████████████████████████
  100-500μs              78 ( 46.4%) ███████████████████████
  500-1000μs (1ms)        6 (  3.6%) █
  1-5ms                   2 (  1.2%)
  5-10ms                  0 (  0.0%)
  >10ms                   0 (  0.0%)

================================================================================
📄 Detailed CSV exported to: /Users/jiallian/Desktop/opensearch/sql-team/cve-fix/sql/performance_analysis.csv
================================================================================

}

/** Analyze {@link UnresolvedPlan}. */
public LogicalPlan analyze(UnresolvedPlan plan, QueryType queryType) {
return analyzer.analyze(plan, new AnalysisContext(queryType));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,7 @@ calcite:
logical: |
LogicalSystemLimit(fetch=[10000], type=[QUERY_SIZE_LIMIT])
LogicalProject(age=[$8])
LogicalFilter(condition=[>($3, 10000)])
LogicalFilter(condition=[<($8, 40)])
LogicalFilter(condition=[>($8, 30)])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_account]])
LogicalFilter(condition=[AND(SEARCH($8, Sarg[(30..40)]), >($3, 10000))])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_account]])
physical: |
CalciteEnumerableIndexScan(table=[[OpenSearch, opensearch-sql_test_index_account]], PushDownContext=[[PROJECT->[balance, age], FILTER->AND(SEARCH($1, Sarg[(30..40)]), >($0, 10000)), PROJECT->[age], LIMIT->10000], OpenSearchRequestBuilder(sourceBuilder={"from":0,"size":10000,"timeout":"1m","query":{"bool":{"must":[{"range":{"age":{"from":30.0,"to":40.0,"include_lower":false,"include_upper":false,"boost":1.0}}},{"range":{"balance":{"from":10000,"to":null,"include_lower":false,"include_upper":true,"boost":1.0}}}],"adjust_pure_negative":true,"boost":1.0}},"_source":{"includes":["age"],"excludes":[]}}, requestedTotalSize=10000, pageSize=null, startFrom=0)])
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
calcite:
logical: |
LogicalSystemLimit(fetch=[10000], type=[QUERY_SIZE_LIMIT])
LogicalFilter(condition=[<($0, DATE('2018-11-09 00:00:00.000000000':VARCHAR))])
LogicalFilter(condition=[>($0, DATE('2016-12-08 00:00:00.123456789':VARCHAR))])
LogicalProject(yyyy-MM-dd=[$83])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]])
LogicalFilter(condition=[AND(>($0, DATE('2016-12-08 00:00:00.123456789':VARCHAR)), <($0, DATE('2018-11-09 00:00:00.000000000':VARCHAR)))])
LogicalProject(yyyy-MM-dd=[$83])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]])
physical: |
CalciteEnumerableIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]], PushDownContext=[[PROJECT->[yyyy-MM-dd], FILTER->SEARCH($0, Sarg[('2016-12-08':VARCHAR..'2018-11-09':VARCHAR)]:VARCHAR), LIMIT->10000], OpenSearchRequestBuilder(sourceBuilder={"from":0,"size":10000,"timeout":"1m","query":{"range":{"yyyy-MM-dd":{"from":"2016-12-08","to":"2018-11-09","include_lower":false,"include_upper":false,"boost":1.0}}},"_source":{"includes":["yyyy-MM-dd"],"excludes":[]}}, requestedTotalSize=10000, pageSize=null, startFrom=0)])
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
calcite:
logical: |
LogicalSystemLimit(fetch=[10000], type=[QUERY_SIZE_LIMIT])
LogicalFilter(condition=[<($0, TIME('2018-11-09 19:00:00.123456789':VARCHAR))])
LogicalFilter(condition=[>($0, TIME('2016-12-08 12:00:00.123456789':VARCHAR))])
LogicalProject(custom_time=[$49])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]])
LogicalFilter(condition=[AND(>($0, TIME('2016-12-08 12:00:00.123456789':VARCHAR)), <($0, TIME('2018-11-09 19:00:00.123456789':VARCHAR)))])
LogicalProject(custom_time=[$49])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]])
physical: |
CalciteEnumerableIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]], PushDownContext=[[PROJECT->[custom_time], FILTER->SEARCH($0, Sarg[('12:00:00.123456789':VARCHAR..'19:00:00.123456789':VARCHAR)]:VARCHAR), LIMIT->10000], OpenSearchRequestBuilder(sourceBuilder={"from":0,"size":10000,"timeout":"1m","query":{"range":{"custom_time":{"from":"12:00:00.123456789","to":"19:00:00.123456789","include_lower":false,"include_upper":false,"boost":1.0}}},"_source":{"includes":["custom_time"],"excludes":[]}}, requestedTotalSize=10000, pageSize=null, startFrom=0)])
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@ calcite:
logical: |
LogicalSystemLimit(fetch=[10000], type=[QUERY_SIZE_LIMIT])
LogicalProject(account_number=[$0], firstname=[$1], address=[$2], birthdate=[$3], gender=[$4], city=[$5], lastname=[$6], balance=[$7], employer=[$8], state=[$9], age=[$10], email=[$11], male=[$12])
LogicalFilter(condition=[<($3, TIMESTAMP('2018-11-09 00:00:00.000000000':VARCHAR))])
LogicalFilter(condition=[>($3, TIMESTAMP('2016-12-08 00:00:00.000000000':VARCHAR))])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_bank]])
LogicalFilter(condition=[AND(>($3, TIMESTAMP('2016-12-08 00:00:00.000000000':VARCHAR)), <($3, TIMESTAMP('2018-11-09 00:00:00.000000000':VARCHAR)))])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_bank]])
physical: |
CalciteEnumerableIndexScan(table=[[OpenSearch, opensearch-sql_test_index_bank]], PushDownContext=[[PROJECT->[account_number, firstname, address, birthdate, gender, city, lastname, balance, employer, state, age, email, male], FILTER->SEARCH($3, Sarg[('2016-12-08 00:00:00':VARCHAR..'2018-11-09 00:00:00':VARCHAR)]:VARCHAR), LIMIT->10000], OpenSearchRequestBuilder(sourceBuilder={"from":0,"size":10000,"timeout":"1m","query":{"range":{"birthdate":{"from":"2016-12-08T00:00:00.000Z","to":"2018-11-09T00:00:00.000Z","include_lower":false,"include_upper":false,"format":"date_time","boost":1.0}}},"_source":{"includes":["account_number","firstname","address","birthdate","gender","city","lastname","balance","employer","state","age","email","male"],"excludes":[]}}, requestedTotalSize=10000, pageSize=null, startFrom=0)])
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@ calcite:
logical: |
LogicalSystemLimit(fetch=[10000], type=[QUERY_SIZE_LIMIT])
LogicalProject(age=[$8])
LogicalFilter(condition=[>($3, 10000)])
LogicalFilter(condition=[<($8, 40)])
LogicalFilter(condition=[>($8, 30)])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_account]])
LogicalFilter(condition=[AND(SEARCH($8, Sarg[(30..40)]), >($3, 10000))])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_account]])
physical: |
EnumerableLimit(fetch=[10000])
EnumerableCalc(expr#0..16=[{inputs}], expr#17=[Sarg[(30..40)]], expr#18=[SEARCH($t8, $t17)], expr#19=[10000], expr#20=[>($t3, $t19)], expr#21=[AND($t18, $t20)], age=[$t8], $condition=[$t21])
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
calcite:
logical: |
LogicalSystemLimit(fetch=[10000], type=[QUERY_SIZE_LIMIT])
LogicalFilter(condition=[<($0, DATE('2018-11-09 00:00:00.000000000':VARCHAR))])
LogicalFilter(condition=[>($0, DATE('2016-12-08 00:00:00.123456789':VARCHAR))])
LogicalProject(yyyy-MM-dd=[$83])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]])
LogicalFilter(condition=[AND(>($0, DATE('2016-12-08 00:00:00.123456789':VARCHAR)), <($0, DATE('2018-11-09 00:00:00.000000000':VARCHAR)))])
LogicalProject(yyyy-MM-dd=[$83])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]])
physical: |
EnumerableLimit(fetch=[10000])
EnumerableCalc(expr#0..94=[{inputs}], expr#95=[Sarg[('2016-12-08':VARCHAR..'2018-11-09':VARCHAR)]:VARCHAR], expr#96=[SEARCH($t83, $t95)], yyyy-MM-dd=[$t83], $condition=[$t96])
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
calcite:
logical: |
LogicalSystemLimit(fetch=[10000], type=[QUERY_SIZE_LIMIT])
LogicalFilter(condition=[<($0, TIME('2018-11-09 19:00:00.123456789':VARCHAR))])
LogicalFilter(condition=[>($0, TIME('2016-12-08 12:00:00.123456789':VARCHAR))])
LogicalProject(custom_time=[$49])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]])
LogicalFilter(condition=[AND(>($0, TIME('2016-12-08 12:00:00.123456789':VARCHAR)), <($0, TIME('2018-11-09 19:00:00.123456789':VARCHAR)))])
LogicalProject(custom_time=[$49])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_date_formats]])
physical: |
EnumerableLimit(fetch=[10000])
EnumerableCalc(expr#0..94=[{inputs}], expr#95=[Sarg[('12:00:00.123456789':VARCHAR..'19:00:00.123456789':VARCHAR)]:VARCHAR], expr#96=[SEARCH($t49, $t95)], custom_time=[$t49], $condition=[$t96])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,8 @@ calcite:
logical: |
LogicalSystemLimit(fetch=[10000], type=[QUERY_SIZE_LIMIT])
LogicalProject(account_number=[$0], firstname=[$1], address=[$2], birthdate=[$3], gender=[$4], city=[$5], lastname=[$6], balance=[$7], employer=[$8], state=[$9], age=[$10], email=[$11], male=[$12])
LogicalFilter(condition=[<($3, TIMESTAMP('2018-11-09 00:00:00.000000000':VARCHAR))])
LogicalFilter(condition=[>($3, TIMESTAMP('2016-12-08 00:00:00.000000000':VARCHAR))])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_bank]])
LogicalFilter(condition=[AND(>($3, TIMESTAMP('2016-12-08 00:00:00.000000000':VARCHAR)), <($3, TIMESTAMP('2018-11-09 00:00:00.000000000':VARCHAR)))])
CalciteLogicalIndexScan(table=[[OpenSearch, opensearch-sql_test_index_bank]])
physical: |
EnumerableLimit(fetch=[10000])
EnumerableCalc(expr#0..18=[{inputs}], expr#19=[Sarg[('2016-12-08 00:00:00':VARCHAR..'2018-11-09 00:00:00':VARCHAR)]:VARCHAR], expr#20=[SEARCH($t3, $t19)], proj#0..12=[{exprs}], $condition=[$t20])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,13 @@
import lombok.Getter;
import org.apache.calcite.plan.Contexts;
import org.apache.calcite.plan.RelTraitDef;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgram;
import org.apache.calcite.plan.hep.HepProgramBuilder;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.rel2sql.RelToSqlConverter;
import org.apache.calcite.rel.rel2sql.SqlImplementor;
import org.apache.calcite.rel.rules.FilterMergeRule;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParser;
Expand Down Expand Up @@ -101,10 +105,19 @@ public RelNode getRelNode(String ppl) {
Query query = (Query) plan(pplParser, ppl);
planTransformer.analyze(query.getPlan(), context);
RelNode root = context.relBuilder.build();
root = mergeAdjacentFilters(root);
System.out.println(root.explain());
return root;
}

private RelNode mergeAdjacentFilters(RelNode relNode) {
HepProgram program =
new HepProgramBuilder().addRuleInstance(FilterMergeRule.Config.DEFAULT.toRule()).build();
HepPlanner planner = new HepPlanner(program);
planner.setRoot(relNode);
return planner.findBestExp();
}

private Node plan(PPLSyntaxParser parser, String query) {
final AstStatementBuilder builder =
new AstStatementBuilder(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,19 +38,18 @@ public void testRegexBasic() {
public void testRegexChainedFilters() {
String ppl = "source=EMP | regex ENAME='A.*' | regex JOB='.*CLERK' | fields ENAME, JOB";
RelNode root = getRelNode(ppl);
// Filter accumulation combines multiple regex conditions into a single Filter with AND
String expectedLogical =
"LogicalProject(ENAME=[$1], JOB=[$2])\n"
+ " LogicalFilter(condition=[REGEXP_CONTAINS($2, '.*CLERK':VARCHAR)])\n"
+ " LogicalFilter(condition=[REGEXP_CONTAINS($1, 'A.*':VARCHAR)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
+ " LogicalFilter(condition=[AND(REGEXP_CONTAINS($1, 'A.*':VARCHAR),"
+ " REGEXP_CONTAINS($2, '.*CLERK':VARCHAR))])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
verifyLogical(root, expectedLogical);

String expectedSparkSql =
"SELECT `ENAME`, `JOB`\n"
+ "FROM (SELECT *\n"
+ "FROM `scott`.`EMP`\n"
+ "WHERE REGEXP_CONTAINS(`ENAME`, 'A.*')) `t`\n"
+ "WHERE REGEXP_CONTAINS(`JOB`, '.*CLERK')";
+ "WHERE REGEXP_CONTAINS(`ENAME`, 'A.*') AND REGEXP_CONTAINS(`JOB`, '.*CLERK')";
verifyPPLToSparkSQL(root, expectedSparkSql);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,8 @@ public void testTrendlineMultipleFields() {
+ " DEPTNO_trendline=[CASE(>(COUNT() OVER (ROWS 1 PRECEDING), 1), /(SUM($7) OVER (ROWS"
+ " 1 PRECEDING), CAST(COUNT($7) OVER (ROWS 1 PRECEDING)):DOUBLE NOT NULL),"
+ " null:NULL)])\n"
+ " LogicalFilter(condition=[IS NOT NULL($7)])\n"
+ " LogicalFilter(condition=[IS NOT NULL($5)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
+ " LogicalFilter(condition=[AND(IS NOT NULL($5), IS NOT NULL($7))])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
verifyLogical(root, expectedLogical);

String expectedSparkSql =
Expand All @@ -89,10 +88,8 @@ public void testTrendlineMultipleFields() {
+ " BETWEEN 1 PRECEDING AND CURRENT ROW)) > 1 THEN (SUM(`DEPTNO`) OVER (ROWS BETWEEN 1"
+ " PRECEDING AND CURRENT ROW)) / CAST(COUNT(`DEPTNO`) OVER (ROWS BETWEEN 1 PRECEDING"
+ " AND CURRENT ROW) AS DOUBLE) ELSE NULL END `DEPTNO_trendline`\n"
+ "FROM (SELECT *\n"
+ "FROM `scott`.`EMP`\n"
+ "WHERE `SAL` IS NOT NULL) `t`\n"
+ "WHERE `DEPTNO` IS NOT NULL";
+ "WHERE `SAL` IS NOT NULL AND `DEPTNO` IS NOT NULL";
verifyPPLToSparkSQL(root, expectedSparkSql);
}
}
Loading