From ad961ae8b97087050494d26397f4b7ccad9e5788 Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Fri, 22 Aug 2025 18:11:18 +0200
Subject: [PATCH 01/18] ES|QL:Fix wrong pruning of plans with no output columns
---
.../src/main/resources/drop.csv-spec | 17 +++++++----
.../esql/expression/predicate/Predicates.java | 3 ++
.../esql/optimizer/LogicalPlanOptimizer.java | 6 ++--
.../rules/logical/PruneEmptyAggregates.java | 28 +++++++++++++++++++
.../xpack/esql/session/FieldNameUtils.java | 1 +
.../optimizer/LogicalPlanOptimizerTests.java | 9 +++---
.../esql/session/FieldNameUtilsTests.java | 4 ++-
7 files changed, 55 insertions(+), 13 deletions(-)
create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneEmptyAggregates.java
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
index eec8e073e3eec..2c81a143b61d7 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
@@ -49,29 +49,36 @@ b:integer | x:integer
;
dropAllColumns
-from employees | keep height | drop height | eval x = 1;
+from languages | keep language_code | drop language_code | eval x = 1;
x:integer
+1
+1
+1
+1
;
dropAllColumns_WithLimit
from employees | keep height | drop height | eval x = 1 | limit 3;
x:integer
+1
+1
+1
;
dropAllColumns_WithCount
-from employees | keep height | drop height | eval x = 1 | stats c=count(x);
+from languages | keep language_code | drop language_code | eval x = 1 | stats c=count(x);
c:long
-0
+4
;
dropAllColumns_WithStats
-from employees | keep height | drop height | eval x = 1 | stats c=count(x), mi=min(x), s=sum(x);
+from languages | keep language_code | drop language_code | eval x = 1 | stats c=count(x), mi=min(x), s=sum(x);
c:l|mi:i|s:l
-0 |null|null
+4 |1 |4
;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Predicates.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Predicates.java
index 64fd63a844957..b9a58e82a2349 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Predicates.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Predicates.java
@@ -145,6 +145,9 @@ public static Tuple> extractCommon(List
}
splitAnds.add(split);
}
+ if (common == null) {
+ common = List.of();
+ }
List trimmed = new ArrayList<>(expressions.size());
final List finalCommon = common;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java
index dac533f872022..fd5bccd9e92b1 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java
@@ -28,7 +28,7 @@
import org.elasticsearch.xpack.esql.optimizer.rules.logical.PropagateNullable;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.PropgateUnmappedFields;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneColumns;
-import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneEmptyPlans;
+import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneEmptyAggregates;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneFilters;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneLiteralsInOrderBy;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.PruneRedundantOrderBy;
@@ -166,7 +166,6 @@ protected static Batch operators(boolean local) {
"Operator Optimization",
new CombineProjections(local),
new CombineEvals(),
- new PruneEmptyPlans(),
new PropagateEmptyRelation(),
new FoldNull(),
new SplitInWithFoldableValue(),
@@ -203,7 +202,8 @@ protected static Batch operators(boolean local) {
new PushDownAndCombineOrderBy(),
new PruneRedundantOrderBy(),
new PruneRedundantSortClauses(),
- new PruneLeftJoinOnNullMatchingField()
+ new PruneLeftJoinOnNullMatchingField(),
+ new PruneEmptyAggregates()
);
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneEmptyAggregates.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneEmptyAggregates.java
new file mode 100644
index 0000000000000..f06d936a47934
--- /dev/null
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneEmptyAggregates.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.optimizer.rules.logical;
+
+import org.elasticsearch.xpack.esql.plan.logical.Aggregate;
+import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan;
+import org.elasticsearch.xpack.esql.plan.logical.local.EmptyLocalSupplier;
+import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation;
+
+import java.util.List;
+
+public final class PruneEmptyAggregates extends OptimizerRules.OptimizerRule {
+ @Override
+ protected LogicalPlan rule(Aggregate agg) {
+ if (agg.aggregates().isEmpty() && agg.groupings().isEmpty()) {
+ // TODO this is wrong, it should return -one- row with -no- columns, but I can't represent it as an array of blocks...
+ // Needs some refactoring to LocalSupplier
+ return new LocalRelation(agg.source(), List.of(), EmptyLocalSupplier.EMPTY);
+ }
+ return agg;
+ }
+
+}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/FieldNameUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/FieldNameUtils.java
index 844f7cb1989df..be1049dd67234 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/FieldNameUtils.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/FieldNameUtils.java
@@ -237,6 +237,7 @@ public static PreAnalysisResult resolveFieldNames(LogicalPlan parsed, EnrichReso
// there cannot be an empty list of fields, we'll ask the simplest and lightest one instead: _index
return new PreAnalysisResult(enrichResolution, IndexResolver.INDEX_METADATA_FIELD, wildcardJoinIndices);
} else {
+ fieldNames.add(MetadataAttribute.INDEX);
fieldNames.addAll(subfields(fieldNames));
fieldNames.addAll(enrichPolicyMatchFields);
fieldNames.addAll(subfields(enrichPolicyMatchFields));
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java
index b6d66d673ea7a..93a8bbfd8e179 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java
@@ -213,9 +213,10 @@ public void testEmptyProjections() {
| drop salary
""");
- var relation = as(plan, LocalRelation.class);
- assertThat(relation.output(), is(empty()));
- assertThat(relation.supplier().get(), emptyArray());
+ var project = as(plan, EsqlProject.class);
+ assertThat(project.expressions(), is(empty()));
+ var limit = as(project.child(), Limit.class);
+ as(limit.child(), EsRelation.class);
}
public void testEmptyProjectionInStat() {
@@ -224,7 +225,7 @@ public void testEmptyProjectionInStat() {
| stats c = count(salary)
| drop c
""");
-
+ // TODO Wrong! It should return an empty row, not an empty result
var relation = as(plan, LocalRelation.class);
assertThat(relation.output(), is(empty()));
assertThat(relation.supplier().get(), emptyArray());
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
index 5f773a69e8664..aaa624d4ca58f 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
@@ -7,6 +7,7 @@
package org.elasticsearch.xpack.esql.session;
+import org.apache.lucene.tests.util.LuceneTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.esql.EsqlTestUtils;
import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
@@ -25,6 +26,7 @@
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
+@LuceneTestCase.AwaitsFix(bugUrl = "")
public class FieldNameUtilsTests extends ESTestCase {
private static final EsqlParser parser = new EsqlParser();
@@ -176,7 +178,7 @@ public void testDateToDate() {
| where birth_date < hire_date
| keep emp_no
| sort emp_no
- | limit 1""", Set.of("birth_date", "birth_date.*", "emp_no", "emp_no.*", "hire_date", "hire_date.*"));
+ | limit 1""", Set.of("_index", "birth_date", "birth_date.*", "emp_no", "emp_no.*", "hire_date", "hire_date.*"));
}
public void testTwoConditionsWithDefault() {
From 21dc299272a98af90162b1b2893d00a6d6fbafdf Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Fri, 22 Aug 2025 18:28:57 +0200
Subject: [PATCH 02/18] Fix test
---
.../esql/session/FieldNameUtilsTests.java | 1236 +++++++++++------
1 file changed, 810 insertions(+), 426 deletions(-)
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
index aaa624d4ca58f..ba1be9f970145 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
@@ -7,7 +7,6 @@
package org.elasticsearch.xpack.esql.session;
-import org.apache.lucene.tests.util.LuceneTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.esql.EsqlTestUtils;
import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
@@ -26,7 +25,6 @@
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
-@LuceneTestCase.AwaitsFix(bugUrl = "")
public class FieldNameUtilsTests extends ESTestCase {
private static final EsqlParser parser = new EsqlParser();
@@ -56,7 +54,7 @@ public void testBasicEvalAndDrop() {
public void testSimple1() {
assertFieldNames(
"from employees | sort emp_no | keep emp_no, still_hired | limit 3",
- Set.of("emp_no", "emp_no.*", "still_hired", "still_hired.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "still_hired", "still_hired.*")
);
}
@@ -70,7 +68,7 @@ public void testSimple2() {
public void testDirectFilter() {
assertFieldNames(
"from employees | sort emp_no | where still_hired | keep emp_no | limit 3",
- Set.of("emp_no", "emp_no.*", "still_hired", "still_hired.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "still_hired", "still_hired.*")
);
}
@@ -81,14 +79,14 @@ public void testForkEval() {
public void testSort1() {
assertFieldNames(
"from employees | sort still_hired, emp_no | keep emp_no, still_hired | limit 3",
- Set.of("emp_no", "emp_no.*", "still_hired", "still_hired.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "still_hired", "still_hired.*")
);
}
public void testStatsBy() {
assertFieldNames(
"from employees | stats avg(salary) by still_hired | sort still_hired",
- Set.of("salary", "salary.*", "still_hired", "still_hired.*")
+ Set.of("_index", "_index.*", "salary", "salary.*", "still_hired", "still_hired.*")
);
}
@@ -96,7 +94,7 @@ public void testStatsByAlwaysTrue() {
assertFieldNames(
"from employees | where first_name is not null | eval always_true = starts_with(first_name, \"\") "
+ "| stats avg(salary) by always_true",
- Set.of("first_name", "first_name.*", "salary", "salary.*")
+ Set.of("_index", "_index.*", "first_name", "first_name.*", "salary", "salary.*")
);
}
@@ -105,7 +103,7 @@ public void testStatsByAlwaysFalse() {
"from employees | where first_name is not null "
+ "| eval always_false = starts_with(first_name, \"nonestartwiththis\") "
+ "| stats avg(salary) by always_false",
- Set.of("first_name", "first_name.*", "salary", "salary.*")
+ Set.of("_index", "_index.*", "first_name", "first_name.*", "salary", "salary.*")
);
}
@@ -113,7 +111,7 @@ public void testIn1() {
assertFieldNames(
"from employees | keep emp_no, is_rehired, still_hired "
+ "| where is_rehired in (still_hired, true) | where is_rehired != still_hired",
- Set.of("emp_no", "emp_no.*", "is_rehired", "is_rehired.*", "still_hired", "still_hired.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "is_rehired", "is_rehired.*", "still_hired", "still_hired.*")
);
}
@@ -125,17 +123,20 @@ public void testConvertFromString1() {
| eval rehired_bool = to_boolean(rehired_str)
| eval all_false = to_boolean(first_name)
| drop first_name
- | limit 5""", Set.of("emp_no", "emp_no.*", "is_rehired", "is_rehired.*", "first_name", "first_name.*"));
+ | limit 5""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "is_rehired", "is_rehired.*", "first_name", "first_name.*"));
}
public void testConvertFromDouble1() {
- assertFieldNames("""
- from employees
- | eval h_2 = height - 2.0, double2bool = to_boolean(h_2)
- | where emp_no in (10036, 10037, 10038)
- | keep emp_no, height, *2bool""", Set.of("height", "height.*", "emp_no", "emp_no.*", "h_2", "h_2.*", "*2bool.*", "*2bool"));
+ assertFieldNames(
+ """
+ from employees
+ | eval h_2 = height - 2.0, double2bool = to_boolean(h_2)
+ | where emp_no in (10036, 10037, 10038)
+ | keep emp_no, height, *2bool""",
+ Set.of("_index", "_index.*", "height", "height.*", "emp_no", "emp_no.*", "h_2", "h_2.*", "*2bool.*", "*2bool")
+ );
// TODO asking for more shouldn't hurt. Can we do better? ("h_2" shouldn't be in the list of fields)
- // Set.of("height", "height.*", "emp_no", "emp_no.*", "*2bool.*", "*2bool"));
+ // Set.of("_index", "_index.*", "height", "height.*", "emp_no", "emp_no.*", "*2bool.*", "*2bool"));
}
public void testConvertFromIntAndLong() {
@@ -143,6 +144,8 @@ public void testConvertFromIntAndLong() {
"from employees | keep emp_no, salary_change*"
+ "| eval int2bool = to_boolean(salary_change.int), long2bool = to_boolean(salary_change.long) | limit 10",
Set.of(
+ "_index",
+ "_index.*",
"emp_no",
"emp_no.*",
"salary_change*",
@@ -158,7 +161,7 @@ public void testIntToInt() {
assertFieldNames("""
from employees
| where emp_no < 10002
- | keep emp_no""", Set.of("emp_no", "emp_no.*"));
+ | keep emp_no""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
}
public void testLongToLong() {
@@ -168,7 +171,16 @@ public void testLongToLong() {
| where languages.long < avg_worked_seconds
| limit 1
| keep emp_no""",
- Set.of("emp_no", "emp_no.*", "languages.long", "languages.long.*", "avg_worked_seconds", "avg_worked_seconds.*")
+ Set.of(
+ "_index",
+ "_index.*",
+ "emp_no",
+ "emp_no.*",
+ "languages.long",
+ "languages.long.*",
+ "avg_worked_seconds",
+ "avg_worked_seconds.*"
+ )
);
}
@@ -178,7 +190,7 @@ public void testDateToDate() {
| where birth_date < hire_date
| keep emp_no
| sort emp_no
- | limit 1""", Set.of("_index", "birth_date", "birth_date.*", "emp_no", "emp_no.*", "hire_date", "hire_date.*"));
+ | limit 1""", Set.of("_index", "_index.*", "birth_date", "birth_date.*", "emp_no", "emp_no.*", "hire_date", "hire_date.*"));
}
public void testTwoConditionsWithDefault() {
@@ -186,7 +198,7 @@ public void testTwoConditionsWithDefault() {
from employees
| eval type = case(languages <= 1, "monolingual", languages <= 2, "bilingual", "polyglot")
| keep emp_no, type
- | limit 10""", Set.of("emp_no", "emp_no.*", "languages", "languages.*"));
+ | limit 10""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testSingleCondition() {
@@ -194,7 +206,7 @@ public void testSingleCondition() {
from employees
| eval g = case(gender == "F", true)
| keep gender, g
- | limit 10""", Set.of("gender", "gender.*"));
+ | limit 10""", Set.of("_index", "_index.*", "gender", "gender.*"));
}
public void testConditionIsNull() {
@@ -202,24 +214,27 @@ public void testConditionIsNull() {
from employees
| eval g = case(gender == "F", 1, languages > 1, 2, 3)
| keep gender, languages, g
- | limit 25""", Set.of("gender", "gender.*", "languages", "languages.*"));
+ | limit 25""", Set.of("_index", "_index.*", "gender", "gender.*", "languages", "languages.*"));
}
public void testEvalAssign() {
assertFieldNames(
"from employees | sort hire_date | eval x = hire_date | keep emp_no, x | limit 5",
- Set.of("hire_date", "hire_date.*", "emp_no", "emp_no.*")
+ Set.of("_index", "_index.*", "hire_date", "hire_date.*", "emp_no", "emp_no.*")
);
}
public void testMinMax() {
- assertFieldNames("from employees | stats min = min(hire_date), max = max(hire_date)", Set.of("hire_date", "hire_date.*"));
+ assertFieldNames(
+ "from employees | stats min = min(hire_date), max = max(hire_date)",
+ Set.of("_index", "_index.*", "hire_date", "hire_date.*")
+ );
}
public void testEvalDateTruncIntervalExpressionPeriod() {
assertFieldNames(
"from employees | sort hire_date | eval x = date_trunc(hire_date, 1 month) | keep emp_no, hire_date, x | limit 5",
- Set.of("hire_date", "hire_date.*", "emp_no", "emp_no.*")
+ Set.of("_index", "_index.*", "hire_date", "hire_date.*", "emp_no", "emp_no.*")
);
}
@@ -230,7 +245,7 @@ public void testEvalDateTruncGrouping() {
| stats count(emp_no) by y
| sort y
| keep y, `count(emp_no)`
- | limit 5""", Set.of("hire_date", "hire_date.*", "emp_no", "emp_no.*"));
+ | limit 5""", Set.of("_index", "_index.*", "hire_date", "hire_date.*", "emp_no", "emp_no.*"));
}
public void testIn2() {
@@ -240,7 +255,7 @@ public void testIn2() {
| where birth_date not in (x, hire_date)
| keep x, hire_date
| sort x desc
- | limit 4""", Set.of("hire_date", "hire_date.*", "birth_date", "birth_date.*"));
+ | limit 4""", Set.of("_index", "_index.*", "hire_date", "hire_date.*", "birth_date", "birth_date.*"));
}
public void testBucketMonth() {
@@ -249,13 +264,13 @@ public void testBucketMonth() {
| where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z"
| eval hd = bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")
| sort hire_date
- | keep hire_date, hd""", Set.of("hire_date", "hire_date.*"));
+ | keep hire_date, hd""", Set.of("_index", "_index.*", "hire_date", "hire_date.*"));
}
public void testBorn_before_today() {
assertFieldNames(
"from employees | where birth_date < now() | sort emp_no asc | keep emp_no, birth_date| limit 1",
- Set.of("birth_date", "birth_date.*", "emp_no", "emp_no.*")
+ Set.of("_index", "_index.*", "birth_date", "birth_date.*", "emp_no", "emp_no.*")
);
}
@@ -265,7 +280,7 @@ public void testBucketMonthInAgg() {
| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z"
| EVAL bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")
| STATS AVG(salary) BY bucket
- | SORT bucket""", Set.of("salary", "salary.*", "hire_date", "hire_date.*"));
+ | SORT bucket""", Set.of("_index", "_index.*", "salary", "salary.*", "hire_date", "hire_date.*"));
}
public void testEvalDateParseDynamic() {
@@ -276,7 +291,7 @@ public void testEvalDateParseDynamic() {
| eval birth_date_string = date_format("yyyy-MM-dd", birth_date)
| eval new_date = date_parse("yyyy-MM-dd", birth_date_string)
| eval bool = new_date == birth_date
- | keep emp_no, new_date, birth_date, bool""", Set.of("emp_no", "emp_no.*", "birth_date", "birth_date.*"));
+ | keep emp_no, new_date, birth_date, bool""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "birth_date", "birth_date.*"));
}
public void testDateFields() {
@@ -284,7 +299,7 @@ public void testDateFields() {
from employees
| where emp_no == 10049 or emp_no == 10050
| eval year = date_extract("year", birth_date), month = date_extract("month_of_year", birth_date)
- | keep emp_no, year, month""", Set.of("emp_no", "emp_no.*", "birth_date", "birth_date.*"));
+ | keep emp_no, year, month""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "birth_date", "birth_date.*"));
}
public void testEvalDissect() {
@@ -294,7 +309,7 @@ public void testEvalDissect() {
| dissect full_name "%{a} %{b}"
| sort emp_no asc
| keep full_name, a, b
- | limit 3""", Set.of("first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testDissectExpression() {
@@ -303,7 +318,7 @@ public void testDissectExpression() {
| dissect concat(first_name, " ", last_name) "%{a} %{b}"
| sort emp_no asc
| keep a, b
- | limit 3""", Set.of("first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testMultivalueInput1() {
@@ -312,7 +327,7 @@ public void testMultivalueInput1() {
| where emp_no <= 10006
| dissect job_positions "%{a} %{b} %{c}"
| sort emp_no
- | keep emp_no, a, b, c""", Set.of("emp_no", "emp_no.*", "job_positions", "job_positions.*"));
+ | keep emp_no, a, b, c""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "job_positions", "job_positions.*"));
}
public void testLimitZero() {
@@ -367,14 +382,14 @@ public void testDocsEval() {
| KEEP first_name, last_name, height
| EVAL height_feet = height * 3.281, height_cm = height * 100
| WHERE first_name == "Georgi"
- | LIMIT 1""", Set.of("first_name", "first_name.*", "last_name", "last_name.*", "height", "height.*"));
+ | LIMIT 1""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "height", "height.*"));
}
public void testDocsKeepWildcard() {
assertFieldNames("""
FROM employees
| KEEP h*
- | LIMIT 0""", Set.of("h*"));
+ | LIMIT 0""", Set.of("_index", "_index.*", "h*"));
}
public void testDocsKeepDoubleWildcard() {
@@ -385,11 +400,14 @@ public void testDocsKeepDoubleWildcard() {
}
public void testDocsRename() {
- assertFieldNames("""
- FROM employees
- | KEEP first_name, last_name, still_hired
- | RENAME still_hired AS employed
- | LIMIT 0""", Set.of("first_name", "first_name.*", "last_name", "last_name.*", "still_hired", "still_hired.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | KEEP first_name, last_name, still_hired
+ | RENAME still_hired AS employed
+ | LIMIT 0""",
+ Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "still_hired", "still_hired.*")
+ );
}
public void testDocsRenameMultipleColumns() {
@@ -397,14 +415,14 @@ public void testDocsRenameMultipleColumns() {
FROM employees
| KEEP first_name, last_name
| RENAME first_name AS fn, last_name AS ln
- | LIMIT 0""", Set.of("first_name", "first_name.*", "last_name", "last_name.*"));
+ | LIMIT 0""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*"));
}
public void testDocsStats() {
assertFieldNames("""
FROM employees
| STATS count = COUNT(emp_no) BY languages
- | SORT languages""", Set.of("emp_no", "emp_no.*", "languages", "languages.*"));
+ | SORT languages""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testEvalStats() {
@@ -429,13 +447,13 @@ public void testEvalStats() {
assertFieldNames("""
FROM employees
| STATS count = COUNT(*) BY first_name
- | SORT first_name""", Set.of("first_name", "first_name.*"));
+ | SORT first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| EVAL y = "a"
| STATS count = COUNT(*) BY x = y
- | SORT x, first_name""", Set.of("first_name", "first_name.*"));
+ | SORT x, first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
@@ -453,26 +471,26 @@ public void testEvalStats() {
FROM employees
| EVAL y = to_upper(first_name), z = "z"
| STATS count = COUNT(*) BY first_name = to_lower(y), z
- | SORT first_name""", Set.of("first_name", "first_name.*"));
+ | SORT first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| EVAL y = "a"
| STATS count = COUNT(*) BY x = y, z = first_name
- | SORT x, z""", Set.of("first_name", "first_name.*"));
+ | SORT x, z""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| EVAL y = "a"
| STATS count = COUNT(*) BY x = y, first_name
- | SORT x, first_name""", Set.of("first_name", "first_name.*"));
+ | SORT x, first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| EVAL y = "a"
| STATS count = COUNT(first_name) BY x = y
| SORT x
- | DROP first_name""", Set.of("first_name", "first_name.*"));
+ | DROP first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
@@ -484,14 +502,14 @@ public void testEvalStats() {
FROM employees
| EVAL y = "a"
| STATS count = COUNT(*) BY first_name, y
- | MV_EXPAND first_name""", Set.of("first_name", "first_name.*"));
+ | MV_EXPAND first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| MV_EXPAND first_name
| EVAL y = "a"
| STATS count = COUNT(*) BY first_name, y
- | SORT y""", Set.of("first_name", "first_name.*"));
+ | SORT y""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
@@ -512,7 +530,7 @@ public void testEvalStats() {
| EVAL y = "a"
| STATS count = COUNT(*) BY first_name, y
| STATS count = COUNT(count) by x = y
- | SORT x""", Set.of("first_name", "first_name.*"));
+ | SORT x""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
}
public void testSortWithLimitOne_DropHeight() {
@@ -525,13 +543,13 @@ public void testSortWithLimitOne_DropHeight_WithInlinestats() {
}
public void testDropAllColumns() {
- assertFieldNames("from employees | keep height | drop height | eval x = 1", Set.of("height", "height.*"));
+ assertFieldNames("from employees | keep height | drop height | eval x = 1", Set.of("_index", "_index.*", "height", "height.*"));
}
public void testDropAllColumns_WithStats() {
assertFieldNames(
"from employees | keep height | drop height | eval x = 1 | stats c=count(x), mi=min(x), s=sum(x)",
- Set.of("height", "height.*")
+ Set.of("_index", "_index.*", "height", "height.*")
);
}
@@ -544,18 +562,21 @@ public void testEnrichOn() {
| eval x = to_string(languages)
| enrich languages_policy on x
| keep emp_no, language_name""",
- Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
public void testEnrichOn2() {
- assertFieldNames("""
- from employees
- | eval x = to_string(languages)
- | enrich languages_policy on x
- | keep emp_no, language_name
- | sort emp_no
- | limit 1""", Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*"));
+ assertFieldNames(
+ """
+ from employees
+ | eval x = to_string(languages)
+ | enrich languages_policy on x
+ | keep emp_no, language_name
+ | sort emp_no
+ | limit 1""",
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ );
}
public void testUselessEnrich() {
@@ -567,13 +588,16 @@ public void testUselessEnrich() {
}
public void testSimpleSortLimit() {
- assertFieldNames("""
- from employees
- | eval x = to_string(languages)
- | enrich languages_policy on x
- | keep emp_no, language_name
- | sort emp_no
- | limit 1""", Set.of("languages", "languages.*", "emp_no", "emp_no.*", "language_name", "language_name.*", "x", "x.*"));
+ assertFieldNames(
+ """
+ from employees
+ | eval x = to_string(languages)
+ | enrich languages_policy on x
+ | keep emp_no, language_name
+ | sort emp_no
+ | limit 1""",
+ Set.of("_index", "_index.*", "languages", "languages.*", "emp_no", "emp_no.*", "language_name", "language_name.*", "x", "x.*")
+ );
}
public void testWith() {
@@ -581,7 +605,7 @@ public void testWith() {
"""
from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1
| enrich languages_policy on x with language_name""",
- Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -590,7 +614,7 @@ public void testWithAlias() {
"""
from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x
| enrich languages_policy on x with lang = language_name""",
- Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -599,7 +623,7 @@ public void testWithAliasSort() {
"""
from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3
| enrich languages_policy on x with lang = language_name""",
- Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -608,7 +632,7 @@ public void testWithAliasAndPlain() {
"""
from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x
| enrich languages_policy on x with lang = language_name, language_name""",
- Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -617,7 +641,7 @@ public void testWithTwoAliasesSameProp() {
"""
from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x
| enrich languages_policy on x with lang = language_name, lang2 = language_name""",
- Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -626,7 +650,7 @@ public void testRedundantWith() {
"""
from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x
| enrich languages_policy on x with language_name, language_name""",
- Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -637,7 +661,7 @@ public void testNullInput() {
| where emp_no == 10017
| keep emp_no, gender
| enrich languages_policy on gender with language_name, language_name""",
- Set.of("gender", "gender.*", "emp_no", "emp_no.*", "language_name", "language_name.*")
+ Set.of("_index", "_index.*", "gender", "gender.*", "emp_no", "emp_no.*", "language_name", "language_name.*")
);
}
@@ -649,7 +673,7 @@ public void testConstantNullInput() {
| eval x = to_string(languages)
| keep emp_no, x
| enrich languages_policy on x with language_name, language_name""",
- Set.of("languages", "languages.*", "emp_no", "emp_no.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "_index.*", "languages", "languages.*", "emp_no", "emp_no.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -663,6 +687,8 @@ public void testEnrichEval() {
| keep emp_no, x, lang, language
| sort emp_no desc | limit 3""",
Set.of(
+ "_index",
+ "_index.*",
"emp_no",
"x",
"lang",
@@ -688,14 +714,27 @@ public void testSimple() {
| where x > 1
| keep emp_no, language_name
| limit 1""",
- Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "y", "x.*", "y.*")
+ Set.of(
+ "_index",
+ "_index.*",
+ "emp_no",
+ "emp_no.*",
+ "languages",
+ "languages.*",
+ "language_name",
+ "language_name.*",
+ "x",
+ "y",
+ "x.*",
+ "y.*"
+ )
);
}
public void testEvalNullSort() {
assertFieldNames(
"from employees | eval x = null | sort x asc, emp_no desc | keep emp_no, x, last_name | limit 2",
- Set.of("last_name", "last_name.*", "emp_no", "emp_no.*")
+ Set.of("_index", "_index.*", "last_name", "last_name.*", "emp_no", "emp_no.*")
);
}
@@ -706,7 +745,7 @@ public void testFilterEvalFilter() {
| eval name_len = length(first_name)
| where name_len < 4
| keep first_name
- | sort first_name""", Set.of("emp_no", "emp_no.*", "first_name", "first_name.*"));
+ | sort first_name""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testEvalWithIsNullIsNotNull() {
@@ -717,7 +756,18 @@ public void testEvalWithIsNullIsNotNull() {
| sort emp_no
| limit 1
| keep *true*, *false*, first_name, last_name""",
- Set.of("emp_no", "emp_no.*", "first_name", "first_name.*", "last_name", "last_name.*", "*true*", "*false*")
+ Set.of(
+ "_index",
+ "_index.*",
+ "emp_no",
+ "emp_no.*",
+ "first_name",
+ "first_name.*",
+ "last_name",
+ "last_name.*",
+ "*true*",
+ "*false*"
+ )
);
}
@@ -725,6 +775,8 @@ public void testInDouble() {
assertFieldNames(
"from employees | keep emp_no, height, height.float, height.half_float, height.scaled_float | where height in (2.03)",
Set.of(
+ "_index",
+ "_index.*",
"emp_no",
"emp_no.*",
"height",
@@ -742,7 +794,7 @@ public void testInDouble() {
public void testConvertFromDatetime() {
assertFieldNames(
"from employees | sort emp_no | eval hire_double = to_double(hire_date) | keep emp_no, hire_date, hire_double | limit 3",
- Set.of("emp_no", "emp_no.*", "hire_date", "hire_date.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "hire_date", "hire_date.*")
);
}
@@ -752,7 +804,7 @@ public void testBucket() {
| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z"
| EVAL bh = bucket(height, 20, 1.41, 2.10)
| SORT hire_date
- | KEEP hire_date, height, bh""", Set.of("hire_date", "hire_date.*", "height", "height.*"));
+ | KEEP hire_date, height, bh""", Set.of("_index", "_index.*", "hire_date", "hire_date.*", "height", "height.*"));
}
public void testEvalGrok() {
@@ -762,7 +814,7 @@ public void testEvalGrok() {
| grok full_name "%{WORD:a} %{WORD:b}"
| sort emp_no asc
| keep full_name, a, b
- | limit 3""", Set.of("first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testGrokExpression() {
@@ -771,7 +823,7 @@ public void testGrokExpression() {
| grok concat(first_name, " ", last_name) "%{WORD:a} %{WORD:b}"
| sort emp_no asc
| keep a, b
- | limit 3""", Set.of("first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testEvalGrokSort() {
@@ -781,7 +833,7 @@ public void testEvalGrokSort() {
| grok full_name "%{WORD:a} %{WORD:b}"
| sort a asc
| keep full_name, a, b
- | limit 3""", Set.of("first_name", "first_name.*", "last_name", "last_name.*"));
+ | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*"));
}
public void testGrokStats() {
@@ -791,7 +843,7 @@ public void testGrokStats() {
| grok x "%{WORD:a} %{WORD:b}"
| stats n = max(emp_no) by a
| keep a, n
- | sort a asc""", Set.of("gender", "gender.*", "emp_no", "emp_no.*"));
+ | sort a asc""", Set.of("_index", "_index.*", "gender", "gender.*", "emp_no", "emp_no.*"));
}
public void testNullOnePattern() {
@@ -799,16 +851,19 @@ public void testNullOnePattern() {
from employees
| where emp_no == 10030
| grok first_name "%{WORD:a}"
- | keep first_name, a""", Set.of("first_name", "first_name.*", "emp_no", "emp_no.*"));
+ | keep first_name, a""", Set.of("_index", "_index.*", "first_name", "first_name.*", "emp_no", "emp_no.*"));
}
public void testMultivalueInput() {
- assertFieldNames("""
- from employees
- | where emp_no <= 10006
- | grok job_positions "%{WORD:a} %{WORD:b} %{WORD:c}"
- | sort emp_no
- | keep emp_no, a, b, c, job_positions""", Set.of("job_positions", "job_positions.*", "emp_no", "emp_no.*"));
+ assertFieldNames(
+ """
+ from employees
+ | where emp_no <= 10006
+ | grok job_positions "%{WORD:a} %{WORD:b} %{WORD:c}"
+ | sort emp_no
+ | keep emp_no, a, b, c, job_positions""",
+ Set.of("_index", "_index.*", "job_positions", "job_positions.*", "emp_no", "emp_no.*")
+ );
}
public void testSelectAll() {
@@ -825,7 +880,7 @@ public void testFilterById_WithInlinestats() {
}
public void testKeepId() {
- assertFieldNames("FROM apps metadata _id | WHERE id == 3 | KEEP _id", Set.of("id", "id.*"));
+ assertFieldNames("FROM apps metadata _id | WHERE id == 3 | KEEP _id", Set.of("_index", "_index.*", "id", "id.*"));
}
public void testIdRangeAndSort() {
@@ -833,19 +888,22 @@ public void testIdRangeAndSort() {
FROM apps metadata _id
| WHERE _id >= "2" AND _id <= "7"
| SORT _id
- | keep id, name, _id""", Set.of("id", "id.*", "name", "name.*"));
+ | keep id, name, _id""", Set.of("_index", "_index.*", "id", "id.*", "name", "name.*"));
}
public void testOrderById() {
- assertFieldNames("FROM apps metadata _id | KEEP _id, name | SORT _id", Set.of("name", "name.*"));
+ assertFieldNames("FROM apps metadata _id | KEEP _id, name | SORT _id", Set.of("_index", "_index.*", "name", "name.*"));
}
public void testOrderByIdDesc() {
- assertFieldNames("FROM apps metadata _id | KEEP _id, name | SORT _id DESC", Set.of("name", "name.*"));
+ assertFieldNames("FROM apps metadata _id | KEEP _id, name | SORT _id DESC", Set.of("_index", "_index.*", "name", "name.*"));
}
public void testConcatId() {
- assertFieldNames("FROM apps metadata _id | eval c = concat(_id, name) | SORT _id | KEEP c", Set.of("name", "name.*"));
+ assertFieldNames(
+ "FROM apps metadata _id | eval c = concat(_id, name) | SORT _id | KEEP c",
+ Set.of("_index", "_index.*", "name", "name.*")
+ );
}
public void testStatsOnId() {
@@ -853,45 +911,60 @@ public void testStatsOnId() {
}
public void testStatsOnIdByGroup() {
- assertFieldNames("FROM apps metadata _id | stats c = count(_id) by name | sort c desc, name | limit 5", Set.of("name", "name.*"));
+ assertFieldNames(
+ "FROM apps metadata _id | stats c = count(_id) by name | sort c desc, name | limit 5",
+ Set.of("_index", "_index.*", "name", "name.*")
+ );
}
public void testSimpleProject() {
assertFieldNames(
"from hosts | keep card, host, ip0, ip1",
- Set.of("card", "card.*", "host", "host.*", "ip0", "ip0.*", "ip1", "ip1.*")
+ Set.of("_index", "_index.*", "card", "card.*", "host", "host.*", "ip0", "ip0.*", "ip1", "ip1.*")
);
}
public void testEquals() {
assertFieldNames(
"from hosts | sort host, card | where ip0 == ip1 | keep card, host",
- Set.of("card", "card.*", "host", "host.*", "ip0", "ip0.*", "ip1", "ip1.*")
+ Set.of("_index", "_index.*", "card", "card.*", "host", "host.*", "ip0", "ip0.*", "ip1", "ip1.*")
);
}
public void testConditional() {
- assertFieldNames("from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1", Set.of("ip1", "ip1.*", "ip0", "ip0.*"));
+ assertFieldNames(
+ "from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1",
+ Set.of("_index", "_index.*", "ip1", "ip1.*", "ip0", "ip0.*")
+ );
}
public void testWhereWithAverageBySubField() {
assertFieldNames(
"from employees | where languages + 1 == 6 | stats avg(avg_worked_seconds) by languages.long",
- Set.of("languages", "languages.*", "avg_worked_seconds", "avg_worked_seconds.*", "languages.long", "languages.long.*")
+ Set.of(
+ "_index",
+ "_index.*",
+ "languages",
+ "languages.*",
+ "avg_worked_seconds",
+ "avg_worked_seconds.*",
+ "languages.long",
+ "languages.long.*"
+ )
);
}
public void testAverageOfEvalValue() {
assertFieldNames(
"from employees | eval ratio = salary / height | stats avg(ratio)",
- Set.of("salary", "salary.*", "height", "height.*")
+ Set.of("_index", "_index.*", "salary", "salary.*", "height", "height.*")
);
}
public void testTopNProjectEvalProject() {
assertFieldNames(
"from employees | sort salary | limit 1 | keep languages, salary | eval x = languages + 1 | keep x",
- Set.of("salary", "salary.*", "languages", "languages.*")
+ Set.of("_index", "_index.*", "salary", "salary.*", "languages", "languages.*")
);
}
@@ -902,11 +975,14 @@ public void testMvSum() {
| eval salary_change = mv_sum(salary_change.int)
| sort emp_no
| keep emp_no, salary_change.int, salary_change
- | limit 7""", Set.of("emp_no", "emp_no.*", "salary_change.int", "salary_change.int.*"));
+ | limit 7""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "salary_change.int", "salary_change.int.*"));
}
public void testMetaIndexAliasedInAggs() {
- assertFieldNames("from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i", Set.of("emp_no", "emp_no.*"));
+ assertFieldNames(
+ "from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i",
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*")
+ );
}
public void testCoalesceFolding() {
@@ -915,13 +991,13 @@ public void testCoalesceFolding() {
| EVAL foo=COALESCE(true, false, null)
| SORT emp_no ASC
| KEEP emp_no, first_name, foo
- | limit 3""", Set.of("emp_no", "emp_no.*", "first_name", "first_name.*"));
+ | limit 3""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testRenameEvalProject() {
assertFieldNames(
"from employees | rename languages as x | keep x | eval z = 2 * x | keep x, z | limit 3",
- Set.of("languages", "languages.*")
+ Set.of("_index", "_index.*", "languages", "languages.*")
);
}
@@ -933,27 +1009,38 @@ public void testRenameProjectEval() {
| keep x, y
| eval x2 = x + 1
| eval y2 = y + 2
- | limit 3""", Set.of("languages", "languages.*"));
+ | limit 3""", Set.of("_index", "_index.*", "languages", "languages.*"));
}
public void testRenameWithFilterPushedToES() {
assertFieldNames(
"from employees | rename emp_no as x | keep languages, first_name, last_name, x | where x > 10030 and x < 10040 | limit 5",
- Set.of("emp_no", "emp_no.*", "languages", "languages.*", "first_name", "first_name.*", "last_name", "last_name.*")
+ Set.of(
+ "_index",
+ "_index.*",
+ "emp_no",
+ "emp_no.*",
+ "languages",
+ "languages.*",
+ "first_name",
+ "first_name.*",
+ "last_name",
+ "last_name.*"
+ )
);
}
public void testRenameOverride() {
assertFieldNames(
"from employees | rename emp_no as languages | keep languages, last_name | limit 3",
- Set.of("emp_no", "emp_no.*", "last_name", "last_name.*")
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "last_name", "last_name.*")
);
}
public void testProjectRenameDate() {
assertFieldNames(
"from employees | sort hire_date | rename hire_date as x | keep emp_no, x | limit 5",
- Set.of("hire_date", "hire_date.*", "emp_no", "emp_no.*")
+ Set.of("_index", "_index.*", "hire_date", "hire_date.*", "emp_no", "emp_no.*")
);
}
@@ -968,13 +1055,16 @@ public void testRenameDrop() {
}
public void testMaxOfLong() {
- assertFieldNames("from employees | stats l = max(languages.long)", Set.of("languages.long", "languages.long.*"));
+ assertFieldNames(
+ "from employees | stats l = max(languages.long)",
+ Set.of("_index", "_index.*", "languages.long", "languages.long.*")
+ );
}
public void testGroupByAlias() {
assertFieldNames(
"from employees | rename languages as l | keep l, height | stats m = min(height) by l | sort l",
- Set.of("languages", "languages.*", "height", "height.*")
+ Set.of("_index", "_index.*", "languages", "languages.*", "height", "height.*")
);
}
@@ -983,7 +1073,7 @@ public void testByStringAndLong() {
from employees
| eval trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000
| stats c = count(gender) by gender, trunk_worked_seconds
- | sort c desc""", Set.of("avg_worked_seconds", "avg_worked_seconds.*", "gender", "gender.*"));
+ | sort c desc""", Set.of("_index", "_index.*", "avg_worked_seconds", "avg_worked_seconds.*", "gender", "gender.*"));
}
public void testByStringAndLongWithAlias() {
@@ -993,7 +1083,7 @@ public void testByStringAndLongWithAlias() {
| rename gender as g, trunk_worked_seconds as tws
| keep g, tws
| stats c = count(g) by g, tws
- | sort c desc""", Set.of("avg_worked_seconds", "avg_worked_seconds.*", "gender", "gender.*"));
+ | sort c desc""", Set.of("_index", "_index.*", "avg_worked_seconds", "avg_worked_seconds.*", "gender", "gender.*"));
}
public void testByStringAndString() {
@@ -1002,26 +1092,32 @@ public void testByStringAndString() {
| eval hire_year_str = date_format("yyyy", hire_date)
| stats c = count(gender) by gender, hire_year_str
| sort c desc, gender, hire_year_str
- | where c >= 5""", Set.of("hire_date", "hire_date.*", "gender", "gender.*"));
+ | where c >= 5""", Set.of("_index", "_index.*", "hire_date", "hire_date.*", "gender", "gender.*"));
}
public void testByLongAndLong() {
- assertFieldNames("""
- from employees
- | eval trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000
- | stats c = count(languages.long) by languages.long, trunk_worked_seconds
- | sort c desc""", Set.of("avg_worked_seconds", "avg_worked_seconds.*", "languages.long", "languages.long.*"));
+ assertFieldNames(
+ """
+ from employees
+ | eval trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000
+ | stats c = count(languages.long) by languages.long, trunk_worked_seconds
+ | sort c desc""",
+ Set.of("_index", "_index.*", "avg_worked_seconds", "avg_worked_seconds.*", "languages.long", "languages.long.*")
+ );
}
public void testByDateAndKeywordAndIntWithAlias() {
- assertFieldNames("""
- from employees
- | eval d = date_trunc(hire_date, 1 year)
- | rename gender as g, languages as l, emp_no as e
- | keep d, g, l, e
- | stats c = count(e) by d, g, l
- | sort c desc, d, l desc
- | limit 10""", Set.of("hire_date", "hire_date.*", "gender", "gender.*", "languages", "languages.*", "emp_no", "emp_no.*"));
+ assertFieldNames(
+ """
+ from employees
+ | eval d = date_trunc(hire_date, 1 year)
+ | rename gender as g, languages as l, emp_no as e
+ | keep d, g, l, e
+ | stats c = count(e) by d, g, l
+ | sort c desc, d, l desc
+ | limit 10""",
+ Set.of("_index", "_index.*", "hire_date", "hire_date.*", "gender", "gender.*", "languages", "languages.*", "emp_no", "emp_no.*")
+ );
}
public void testCountDistinctOfKeywords() {
@@ -1030,14 +1126,14 @@ public void testCountDistinctOfKeywords() {
from employees
| eval hire_year_str = date_format("yyyy", hire_date)
| stats g = count_distinct(gender), h = count_distinct(hire_year_str)""",
- Set.of("hire_date", "hire_date.*", "gender", "gender.*")
+ Set.of("_index", "_index.*", "hire_date", "hire_date.*", "gender", "gender.*")
);
}
public void testCountDistinctOfIpPrecision() {
assertFieldNames("""
FROM hosts
- | STATS COUNT_DISTINCT(ip0, 80000), COUNT_DISTINCT(ip1, 5)""", Set.of("ip0", "ip0.*", "ip1", "ip1.*"));
+ | STATS COUNT_DISTINCT(ip0, 80000), COUNT_DISTINCT(ip1, 5)""", Set.of("_index", "_index.*", "ip0", "ip0.*", "ip1", "ip1.*"));
}
public void testPercentileOfLong() {
@@ -1045,20 +1141,20 @@ public void testPercentileOfLong() {
"""
from employees
| stats p0 = percentile(salary_change.long, 0), p50 = percentile(salary_change.long, 50)""",
- Set.of("salary_change.long", "salary_change.long.*")
+ Set.of("_index", "_index.*", "salary_change.long", "salary_change.long.*")
);
}
public void testMedianOfInteger() {
assertFieldNames("""
FROM employees
- | STATS MEDIAN(salary), PERCENTILE(salary, 50)""", Set.of("salary", "salary.*"));
+ | STATS MEDIAN(salary), PERCENTILE(salary, 50)""", Set.of("_index", "_index.*", "salary", "salary.*"));
}
public void testMedianAbsoluteDeviation() {
assertFieldNames("""
FROM employees
- | STATS MEDIAN(salary), MEDIAN_ABSOLUTE_DEVIATION(salary)""", Set.of("salary", "salary.*"));
+ | STATS MEDIAN(salary), MEDIAN_ABSOLUTE_DEVIATION(salary)""", Set.of("_index", "_index.*", "salary", "salary.*"));
}
public void testIn3VLWithComputedNull() {
@@ -1071,7 +1167,7 @@ public void testIn3VLWithComputedNull() {
| keep emp_no, job_positions
| eval nil = concat("", null)
| eval is_in = job_positions in ("Accountant", "Internship", nil)""",
- Set.of("job_positions", "job_positions.*", "emp_no", "emp_no.*")
+ Set.of("_index", "_index.*", "job_positions", "job_positions.*", "emp_no", "emp_no.*")
);
}
@@ -1089,21 +1185,21 @@ version > TO_VER("1.1"), "high",
version IS NULL, "none",
"low")
| SORT version DESC NULLS LAST, id DESC
- | KEEP v, version, version_text, id, m, g, i, c""", Set.of("version", "version.*", "id", "id.*"));
+ | KEEP v, version, version_text, id, m, g, i, c""", Set.of("_index", "_index.*", "version", "version.*", "id", "id.*"));
}
public void testLikePrefix() {
assertFieldNames("""
from employees
| where first_name like "Eberhar*"
- | keep emp_no, first_name""", Set.of("emp_no", "emp_no.*", "first_name", "first_name.*"));
+ | keep emp_no, first_name""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testRLikePrefix() {
assertFieldNames("""
from employees
| where first_name rlike "Aleja.*"
- | keep emp_no""", Set.of("first_name", "first_name.*", "emp_no", "emp_no.*"));
+ | keep emp_no""", Set.of("_index", "_index.*", "first_name", "first_name.*", "emp_no", "emp_no.*"));
}
public void testByUnmentionedLongAndLong() {
@@ -1113,7 +1209,16 @@ public void testByUnmentionedLongAndLong() {
| eval trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000
| stats c = count(gender) by languages.long, trunk_worked_seconds
| sort c desc""",
- Set.of("avg_worked_seconds", "avg_worked_seconds.*", "languages.long", "languages.long.*", "gender", "gender.*")
+ Set.of(
+ "_index",
+ "_index.*",
+ "avg_worked_seconds",
+ "avg_worked_seconds.*",
+ "languages.long",
+ "languages.long.*",
+ "gender",
+ "gender.*"
+ )
);
}
@@ -1122,7 +1227,7 @@ public void testRenameNopProject() {
from employees
| rename emp_no as emp_no
| keep emp_no, last_name
- | limit 3""", Set.of("emp_no", "emp_no.*", "last_name", "last_name.*"));
+ | limit 3""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "last_name", "last_name.*"));
}
public void testRename() {
@@ -1130,7 +1235,7 @@ public void testRename() {
from test
| rename emp_no as e
| keep first_name, e
- """, Set.of("emp_no", "emp_no.*", "first_name", "first_name.*"));
+ """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testChainedRename() {
@@ -1138,9 +1243,12 @@ public void testChainedRename() {
from test
| rename emp_no as r1, r1 as r2, r2 as r3
| keep first_name, r3
- """, Set.of("emp_no", "emp_no.*", "first_name", "first_name.*", "r1", "r1.*", "r2", "r2.*"));// TODO asking for more shouldn't
- // hurt. Can we do better?
- // Set.of("emp_no", "emp_no.*", "first_name", "first_name.*"));
+ """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*", "r1", "r1.*", "r2", "r2.*"));// TODO
+ // asking for
+ // more
+ // shouldn't
+ // hurt. Can we do better?
+ // Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testChainedRenameReuse() {
@@ -1148,9 +1256,12 @@ public void testChainedRenameReuse() {
from test
| rename emp_no as r1, r1 as r2, r2 as r3, first_name as r1
| keep r1, r3
- """, Set.of("emp_no", "emp_no.*", "first_name", "first_name.*", "r1", "r1.*", "r2", "r2.*"));// TODO asking for more shouldn't
- // hurt. Can we do better?
- // Set.of("emp_no", "emp_no.*", "first_name", "first_name.*"));
+ """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*", "r1", "r1.*", "r2", "r2.*"));// TODO
+ // asking for
+ // more
+ // shouldn't
+ // hurt. Can we do better?
+ // Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testRenameBackAndForth() {
@@ -1158,8 +1269,8 @@ public void testRenameBackAndForth() {
from test
| rename emp_no as r1, r1 as emp_no
| keep emp_no
- """, Set.of("emp_no", "emp_no.*", "r1", "r1.*"));// TODO asking for more shouldn't hurt. Can we do better?
- // Set.of("emp_no", "emp_no.*"));
+ """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "r1", "r1.*"));// TODO asking for more shouldn't hurt. Can we do better?
+ // Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
}
public void testRenameReuseAlias() {
@@ -1172,7 +1283,7 @@ public void testRenameReuseAlias() {
public void testIfDuplicateNamesGroupingHasPriority() {
assertFieldNames(
"from employees | stats languages = avg(height), languages = min(height) by languages | sort languages",
- Set.of("height", "height.*", "languages", "languages.*")
+ Set.of("_index", "_index.*", "height", "height.*", "languages", "languages.*")
);
}
@@ -1182,7 +1293,7 @@ public void testCoalesce() {
| EVAL first_name = COALESCE(first_name, "X")
| SORT first_name DESC, emp_no ASC
| KEEP emp_no, first_name
- | limit 10""", Set.of("first_name", "first_name.*", "emp_no", "emp_no.*"));
+ | limit 10""", Set.of("_index", "_index.*", "first_name", "first_name.*", "emp_no", "emp_no.*"));
}
public void testCoalesceBackwards() {
@@ -1191,7 +1302,7 @@ public void testCoalesceBackwards() {
| EVAL first_name = COALESCE("X", first_name)
| SORT first_name DESC, emp_no ASC
| KEEP emp_no, first_name
- | limit 10""", Set.of("first_name", "first_name.*", "emp_no", "emp_no.*"));
+ | limit 10""", Set.of("_index", "_index.*", "first_name", "first_name.*", "emp_no", "emp_no.*"));
}
public void testGroupByVersionCast() {
@@ -1200,7 +1311,7 @@ public void testGroupByVersionCast() {
| EVAL g = TO_VER(CONCAT("1.", TO_STR(version)))
| STATS id = MAX(id) BY g
| SORT id
- | DROP g""", Set.of("version", "version.*", "id", "id.*"));
+ | DROP g""", Set.of("_index", "_index.*", "version", "version.*", "id", "id.*"));
}
public void testCoalesceEndsInNull() {
@@ -1209,17 +1320,29 @@ public void testCoalesceEndsInNull() {
| EVAL first_name = COALESCE(first_name, last_name, null)
| SORT first_name DESC, emp_no ASC
| KEEP emp_no, first_name
- | limit 3""", Set.of("first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testMvAvg() {
- assertFieldNames("""
- from employees
- | where emp_no > 10008
- | eval salary_change = mv_avg(salary_change)
- | sort emp_no
- | keep emp_no, salary_change.int, salary_change
- | limit 7""", Set.of("emp_no", "emp_no.*", "salary_change", "salary_change.*", "salary_change.int", "salary_change.int.*"));
+ assertFieldNames(
+ """
+ from employees
+ | where emp_no > 10008
+ | eval salary_change = mv_avg(salary_change)
+ | sort emp_no
+ | keep emp_no, salary_change.int, salary_change
+ | limit 7""",
+ Set.of(
+ "_index",
+ "_index.*",
+ "emp_no",
+ "emp_no.*",
+ "salary_change",
+ "salary_change.*",
+ "salary_change.int",
+ "salary_change.int.*"
+ )
+ );
}
public void testEvalOverride() {
@@ -1228,8 +1351,9 @@ public void testEvalOverride() {
| eval languages = languages + 1
| eval languages = languages + 1
| limit 5
- | keep l*""", Set.of("languages", "languages.*", "l*"));// subtlety here. Keeping only "languages*" can remove any other "l*"
- // named fields
+ | keep l*""", Set.of("_index", "_index.*", "languages", "languages.*", "l*"));// subtlety here. Keeping only "languages*" can
+ // remove any other "l*"
+ // named fields
}
public void testBasicWildcardKeep() {
@@ -1240,7 +1364,7 @@ public void testBasicWildcardKeep2() {
assertFieldNames("""
from test
| keep un*
- """, Set.of("un*"));
+ """, Set.of("_index", "_index.*", "un*"));
}
public void testWildcardKeep() {
@@ -1255,7 +1379,7 @@ public void testProjectThenDropName() {
from test
| keep *name
| drop first_name
- """, Set.of("*name", "*name.*", "first_name", "first_name.*"));
+ """, Set.of("_index", "_index.*", "*name", "*name.*", "first_name", "first_name.*"));
}
public void testProjectAfterDropName() {
@@ -1263,7 +1387,7 @@ public void testProjectAfterDropName() {
from test
| drop first_name
| keep *name
- """, Set.of("*name.*", "*name", "first_name", "first_name.*"));
+ """, Set.of("_index", "_index.*", "*name.*", "*name", "first_name", "first_name.*"));
}
public void testProjectWithMixedQuoting() {
@@ -1271,7 +1395,7 @@ public void testProjectWithMixedQuoting() {
from test
| drop first_name
| keep *`name`
- """, Set.of("*name.*", "*name", "first_name", "first_name.*"));
+ """, Set.of("_index", "_index.*", "*name.*", "*name", "first_name", "first_name.*"));
}
public void testProjectKeepAndDropName() {
@@ -1279,7 +1403,7 @@ public void testProjectKeepAndDropName() {
from test
| drop first_name
| keep last_name
- """, Set.of("last_name", "last_name.*", "first_name", "first_name.*"));
+ """, Set.of("_index", "_index.*", "last_name", "last_name.*", "first_name", "first_name.*"));
}
public void testProjectDropPattern() {
@@ -1328,7 +1452,7 @@ public void testProjectDropPatternAndKeepOthers() {
from test
| drop l*
| keep first_name, salary
- """, Set.of("l*", "first_name", "first_name.*", "salary", "salary.*"));
+ """, Set.of("_index", "_index.*", "l*", "first_name", "first_name.*", "salary", "salary.*"));
}
public void testProjectDropWithQuotedAndUnquotedPatternAndKeepOthers() {
@@ -1336,7 +1460,7 @@ public void testProjectDropWithQuotedAndUnquotedPatternAndKeepOthers() {
from test
| drop `l`*
| keep first_name, salary
- """, Set.of("l*", "first_name", "first_name.*", "salary", "salary.*"));
+ """, Set.of("_index", "_index.*", "l*", "first_name", "first_name.*", "salary", "salary.*"));
}
public void testAliasesThatGetDropped() {
@@ -1362,7 +1486,7 @@ public void testCountAllGrouped() {
| stats c = count(*) by languages
| rename languages as l
| sort l DESC
- """, Set.of("languages", "languages.*"));
+ """, Set.of("_index", "_index.*", "languages", "languages.*"));
}
public void testCountAllAndOtherStatGrouped() {
@@ -1370,7 +1494,7 @@ public void testCountAllAndOtherStatGrouped() {
from test
| stats c = count(*), min = min(emp_no) by languages
| sort languages
- """, Set.of("emp_no", "emp_no.*", "languages", "languages.*"));
+ """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testCountAllAndOtherStatGrouped_WithInlinestats() {
@@ -1380,7 +1504,7 @@ public void testCountAllAndOtherStatGrouped_WithInlinestats() {
| inlinestats c = count(*), min = min(emp_no) by languages
| stats c = count(*), min = min(emp_no) by languages
| sort languages
- """, Set.of("emp_no", "emp_no.*", "languages", "languages.*"));
+ """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testCountAllWithImplicitNameOtherStatGrouped() {
@@ -1389,7 +1513,7 @@ public void testCountAllWithImplicitNameOtherStatGrouped() {
| stats count(*), min = min(emp_no) by languages
| drop `count(*)`
| sort languages
- """, Set.of("emp_no", "emp_no.*", "languages", "languages.*"));
+ """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testDropWithQuotedAndUnquotedName() {
@@ -1398,7 +1522,7 @@ public void testDropWithQuotedAndUnquotedName() {
| stats count(*), min = min(emp_no) by languages
| drop count`(*)`
| sort languages
- """, Set.of("emp_no", "emp_no.*", "languages", "languages.*"));
+ """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testCountAllWithEval() {
@@ -1409,7 +1533,7 @@ public void testCountAllWithEval() {
| eval x = min + 1
| stats ca = count(*), cx = count(x) by l
| sort l
- """, Set.of("languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
}
public void testCountAllWithEval_AndInlinestats() {
@@ -1422,7 +1546,7 @@ public void testCountAllWithEval_AndInlinestats() {
| eval x = min + 1
| stats ca = count(*), cx = count(x) by l
| sort l
- """, Set.of("languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
}
public void testKeepAfterEval_AndInlinestats() {
@@ -1435,7 +1559,7 @@ public void testKeepAfterEval_AndInlinestats() {
| eval x = min + 1
| keep x, l
| sort l
- """, Set.of("languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
}
public void testKeepBeforeEval_AndInlinestats() {
@@ -1448,7 +1572,7 @@ public void testKeepBeforeEval_AndInlinestats() {
| eval x = `max(salary)` + 1
| stats min = min(salary) by l
| sort l
- """, Set.of("languages", "languages.*", "salary", "salary.*", "emp_no", "emp_no.*"));
+ """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*", "emp_no", "emp_no.*"));
}
public void testStatsBeforeEval_AndInlinestats() {
@@ -1460,7 +1584,7 @@ public void testStatsBeforeEval_AndInlinestats() {
| eval salary = min + 1
| inlinestats max(salary) by l
| sort l
- """, Set.of("languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
}
public void testStatsBeforeInlinestats() {
@@ -1469,7 +1593,7 @@ public void testStatsBeforeInlinestats() {
from test
| stats min = min(salary) by languages
| inlinestats max(min) by languages
- """, Set.of("languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
}
public void testKeepBeforeInlinestats() {
@@ -1478,7 +1602,7 @@ public void testKeepBeforeInlinestats() {
from test
| keep languages, salary
| inlinestats max(salary) by languages
- """, Set.of("languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
}
public void testCountStar() {
@@ -1497,7 +1621,7 @@ public void testEnrichOnDefaultFieldWithKeep() {
| enrich languages_policy
| keep emp_no""",
enrichResolutionWith("language_name"),
- Set.of("emp_no", "emp_no.*", "language_name", "language_name.*"),
+ Set.of("_index", "_index.*", "emp_no", "emp_no.*", "language_name", "language_name.*"),
Set.of()
);
}
@@ -1506,7 +1630,7 @@ public void testDissectOverwriteName() {
assertFieldNames("""
from employees
| dissect first_name "%{first_name} %{more}"
- | keep emp_no, first_name, more""", Set.of("emp_no", "emp_no.*", "first_name", "first_name.*"));
+ | keep emp_no, first_name, more""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
/**
@@ -1522,7 +1646,7 @@ public void testAvoidGrokAttributesRemoval() {
| drop message
| grok type "%{WORD:b}"
| stats x = max(b)
- | keep x""", Set.of("x", "b", "type", "message", "x.*", "message.*", "type.*", "b.*"));
+ | keep x""", Set.of("_index", "_index.*", "x", "b", "type", "message", "x.*", "message.*", "type.*", "b.*"));
}
public void testAvoidGrokAttributesRemoval2() {
@@ -1534,19 +1658,22 @@ public void testAvoidGrokAttributesRemoval2() {
| lookup join message_types_lookup on message
| stats count = count(*) by type
| keep count
- | sort count""", Set.of("type", "message", "count", "message.*", "type.*", "count.*"));
+ | sort count""", Set.of("_index", "_index.*", "type", "message", "count", "message.*", "type.*", "count.*"));
}
public void testAvoidGrokAttributesRemoval3() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled());
- assertFieldNames("""
- from sample_data
- | grok message "%{WORD:type}"
- | drop type
- | lookup join message_types_lookup on message
- | stats max = max(event_duration) by type
- | keep max
- | sort max""", Set.of("type", "event_duration", "message", "max", "event_duration.*", "message.*", "type.*", "max.*"));
+ assertFieldNames(
+ """
+ from sample_data
+ | grok message "%{WORD:type}"
+ | drop type
+ | lookup join message_types_lookup on message
+ | stats max = max(event_duration) by type
+ | keep max
+ | sort max""",
+ Set.of("_index", "_index.*", "type", "event_duration", "message", "max", "event_duration.*", "message.*", "type.*", "max.*")
+ );
}
/**
@@ -1561,7 +1688,7 @@ public void testAvoidGrokAttributesRemoval4() {
| drop message
| grok type "%{WORD:b}"
| stats x = max(b)
- | keep x""", Set.of("x", "b", "type", "message", "x.*", "message.*", "type.*", "b.*"));
+ | keep x""", Set.of("_index", "_index.*", "x", "b", "type", "message", "x.*", "message.*", "type.*", "b.*"));
}
/**
@@ -1585,6 +1712,8 @@ public void testAvoidGrokAttributesRemoval5() {
| SORT message DESC
| LIMIT 1""",
Set.of(
+ "_index",
+ "_index.*",
"message",
"type",
"languages",
@@ -1616,6 +1745,8 @@ public void testMetrics() {
assertFieldNames(
query,
Set.of(
+ "_index",
+ "_index.*",
"@timestamp",
"@timestamp.*",
"network.total_bytes_in",
@@ -1633,7 +1764,7 @@ public void testLookupJoin() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V12.isEnabled());
assertFieldNames(
"FROM employees | KEEP languages | RENAME languages AS language_code | LOOKUP JOIN languages_lookup ON language_code",
- Set.of("languages", "languages.*", "language_code", "language_code.*"),
+ Set.of("_index", "_index.*", "languages", "languages.*", "language_code", "language_code.*"),
Set.of("languages_lookup") // Since we have KEEP before the LOOKUP JOIN we need to wildcard the lookup index
);
}
@@ -1647,7 +1778,16 @@ public void testLookupJoinKeep() {
| RENAME languages AS language_code
| LOOKUP JOIN languages_lookup ON language_code
| KEEP languages, language_code, language_name""",
- Set.of("languages", "languages.*", "language_code", "language_code.*", "language_name", "language_name.*"),
+ Set.of(
+ "_index",
+ "_index.*",
+ "languages",
+ "languages.*",
+ "language_code",
+ "language_code.*",
+ "language_name",
+ "language_name.*"
+ ),
Set.of() // Since we have KEEP after the LOOKUP, we can use the global field names instead of wildcarding the lookup index
);
}
@@ -1661,7 +1801,7 @@ public void testLookupJoinKeepWildcard() {
| RENAME languages AS language_code
| LOOKUP JOIN languages_lookup ON language_code
| KEEP language*""",
- Set.of("language*", "languages", "languages.*", "language_code", "language_code.*"),
+ Set.of("_index", "_index.*", "language*", "languages", "languages.*", "language_code", "language_code.*"),
Set.of() // Since we have KEEP after the LOOKUP, we can use the global field names instead of wildcarding the lookup index
);
}
@@ -1688,7 +1828,18 @@ public void testMultiLookupJoinKeepBefore() {
| KEEP @timestamp, client_ip, event_duration, message
| LOOKUP JOIN clientips_lookup ON client_ip
| LOOKUP JOIN message_types_lookup ON message""",
- Set.of("@timestamp", "@timestamp.*", "client_ip", "client_ip.*", "event_duration", "event_duration.*", "message", "message.*"),
+ Set.of(
+ "_index",
+ "_index.*",
+ "@timestamp",
+ "@timestamp.*",
+ "client_ip",
+ "client_ip.*",
+ "event_duration",
+ "event_duration.*",
+ "message",
+ "message.*"
+ ),
Set.of("clientips_lookup", "message_types_lookup") // Since the KEEP is before both JOINS we need to wildcard both indices
);
}
@@ -1703,6 +1854,8 @@ public void testMultiLookupJoinKeepBetween() {
| KEEP @timestamp, client_ip, event_duration, message, env
| LOOKUP JOIN message_types_lookup ON message""",
Set.of(
+ "_index",
+ "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1728,6 +1881,8 @@ public void testMultiLookupJoinKeepAfter() {
| LOOKUP JOIN message_types_lookup ON message
| KEEP @timestamp, client_ip, event_duration, message, env, type""",
Set.of(
+ "_index",
+ "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1754,7 +1909,7 @@ public void testMultiLookupJoinKeepAfterWildcard() {
| LOOKUP JOIN clientips_lookup ON client_ip
| LOOKUP JOIN message_types_lookup ON message
| KEEP *env*, *type*""",
- Set.of("*env*", "*type*", "client_ip", "client_ip.*", "message", "message.*"),
+ Set.of("_index", "_index.*", "*env*", "*type*", "client_ip", "client_ip.*", "message", "message.*"),
Set.of() // Since the KEEP is after both JOINs, we can use the global field names
);
}
@@ -1783,7 +1938,18 @@ public void testMultiLookupJoinSameIndexKeepBefore() {
| LOOKUP JOIN clientips_lookup ON client_ip
| EVAL client_ip = message
| LOOKUP JOIN clientips_lookup ON client_ip""",
- Set.of("@timestamp", "@timestamp.*", "client_ip", "client_ip.*", "event_duration", "event_duration.*", "message", "message.*"),
+ Set.of(
+ "_index",
+ "_index.*",
+ "@timestamp",
+ "@timestamp.*",
+ "client_ip",
+ "client_ip.*",
+ "event_duration",
+ "event_duration.*",
+ "message",
+ "message.*"
+ ),
Set.of("clientips_lookup") // Since there is no KEEP after the last JOIN, we need to wildcard the index
);
}
@@ -1799,6 +1965,8 @@ public void testMultiLookupJoinSameIndexKeepBetween() {
| EVAL client_ip = message
| LOOKUP JOIN clientips_lookup ON client_ip""",
Set.of(
+ "_index",
+ "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1825,6 +1993,8 @@ public void testMultiLookupJoinSameIndexKeepAfter() {
| LOOKUP JOIN clientips_lookup ON client_ip
| KEEP @timestamp, client_ip, event_duration, message, env""",
Set.of(
+ "_index",
+ "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1844,7 +2014,7 @@ public void testInsist_fieldIsMappedToNonKeywordSingleIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM partial_mapping_sample_data | INSIST_🐔 client_ip | KEEP @timestamp, client_ip",
- Set.of("@timestamp", "@timestamp.*", "client_ip", "client_ip.*"),
+ Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "client_ip", "client_ip.*"),
Set.of()
);
}
@@ -1853,7 +2023,7 @@ public void testInsist_fieldIsMappedToKeywordSingleIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM partial_mapping_sample_data | INSIST_🐔 message | KEEP @timestamp, message",
- Set.of("@timestamp", "@timestamp.*", "message", "message.*"),
+ Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "message", "message.*"),
Set.of()
);
}
@@ -1862,7 +2032,7 @@ public void testInsist_fieldDoesNotExistSingleIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM partial_mapping_sample_data | INSIST_🐔 foo | KEEP @timestamp, foo",
- Set.of("@timestamp", "@timestamp.*", "foo", "foo.*"),
+ Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "foo", "foo.*"),
Set.of()
);
}
@@ -1871,7 +2041,7 @@ public void testInsist_fieldIsUnmappedSingleIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM partial_mapping_sample_data | INSIST_🐔 unmapped_message | KEEP @timestamp, unmapped_message",
- Set.of("@timestamp", "@timestamp.*", "unmapped_message", "unmapped_message.*"),
+ Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "unmapped_message", "unmapped_message.*"),
Set.of()
);
}
@@ -1881,6 +2051,8 @@ public void testInsist_multiFieldTestSingleIndex() {
assertFieldNames(
"FROM partial_mapping_sample_data | INSIST_🐔 message, unmapped_message, client_ip, foo | KEEP @timestamp, unmapped_message",
Set.of(
+ "_index",
+ "_index.*",
"@timestamp",
"@timestamp.*",
"message",
@@ -1900,7 +2072,7 @@ public void testInsist_fieldIsMappedToDifferentTypesMultiIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM sample_data_ts_long, sample_data METADATA _index | INSIST_🐔 @timestamp | KEEP _index, @timestamp",
- Set.of("@timestamp", "@timestamp.*"),
+ Set.of("_index", "_index.*", "@timestamp", "@timestamp.*"),
Set.of()
);
}
@@ -1913,7 +2085,18 @@ public void testInsist_multiFieldMappedMultiIndex() {
| INSIST_🐔 @timestamp, unmapped_message
| INSIST_🐔 message, foo
| KEEP _index, @timestamp, message, foo""",
- Set.of("@timestamp", "@timestamp.*", "message", "message.*", "unmapped_message", "unmapped_message.*", "foo", "foo.*"),
+ Set.of(
+ "_index",
+ "_index.*",
+ "@timestamp",
+ "@timestamp.*",
+ "message",
+ "message.*",
+ "unmapped_message",
+ "unmapped_message.*",
+ "foo",
+ "foo.*"
+ ),
Set.of()
);
}
@@ -1928,42 +2111,62 @@ public void testJoinMaskingKeep() {
| rename type as message
| lookup join message_types_lookup on message
| keep `language.name`""",
- Set.of("language.name", "type", "language_name", "message", "language_name.*", "message.*", "type.*", "language.name.*")
+ Set.of(
+ "_index",
+ "_index.*",
+ "language.name",
+ "type",
+ "language_name",
+ "message",
+ "language_name.*",
+ "message.*",
+ "type.*",
+ "language.name.*"
+ )
);
}
public void testJoinMaskingKeep2() {
- assertFieldNames("""
- from languag*
- | eval type = "foo"
- | rename type as message
- | lookup join message_types_lookup on message
- | rename type as message
- | lookup join message_types_lookup on message
- | keep `language.name`""", Set.of("language.name", "type", "message", "message.*", "type.*", "language.name.*"));
+ assertFieldNames(
+ """
+ from languag*
+ | eval type = "foo"
+ | rename type as message
+ | lookup join message_types_lookup on message
+ | rename type as message
+ | lookup join message_types_lookup on message
+ | keep `language.name`""",
+ Set.of("_index", "_index.*", "language.name", "type", "message", "message.*", "type.*", "language.name.*")
+ );
}
public void testEnrichMaskingEvalOn() {
- assertFieldNames("""
- from employees
- | eval language_name = null
- | enrich languages_policy on languages
- | rename language_name as languages
- | eval languages = length(languages)
- | enrich languages_policy on languages
- | keep emp_no, language_name""", Set.of("emp_no", "language_name", "languages", "language_name.*", "languages.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ from employees
+ | eval language_name = null
+ | enrich languages_policy on languages
+ | rename language_name as languages
+ | eval languages = length(languages)
+ | enrich languages_policy on languages
+ | keep emp_no, language_name""",
+ Set.of("_index", "_index.*", "emp_no", "language_name", "languages", "language_name.*", "languages.*", "emp_no.*")
+ );
}
public void testEnrichAndJoinMaskingEvalWh() {
- assertFieldNames("""
- from employees
- | eval language_name = null
- | enrich languages_policy on languages
- | rename language_name as languages
- | eval languages = length(languages)
- | enrich languages_policy on languages
- | lookup join message_types_lookup on language_name
- | keep emp_no, language_name""", Set.of("emp_no", "language_name", "languages", "language_name.*", "languages.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ from employees
+ | eval language_name = null
+ | enrich languages_policy on languages
+ | rename language_name as languages
+ | eval languages = length(languages)
+ | enrich languages_policy on languages
+ | lookup join message_types_lookup on language_name
+ | keep emp_no, language_name""",
+ Set.of("_index", "_index.*", "emp_no", "language_name", "languages", "language_name.*", "languages.*", "emp_no.*")
+ );
}
public void testDropAgainWithWildcardAfterEval() {
@@ -1973,7 +2176,7 @@ public void testDropAgainWithWildcardAfterEval() {
| drop full_name
| drop *name
| keep emp_no
- """, Set.of("emp_no", "emp_no.*", "*name", "*name.*"));
+ """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "*name", "*name.*"));
}
public void testDropWildcardFieldsAfterRename() {
@@ -1985,7 +2188,18 @@ public void testDropWildcardFieldsAfterRename() {
| drop first_names
| drop *_names
| keep gender""",
- Set.of("first_name", "first_name.*", "last_name", "last_name.*", "*_names", "*_names.*", "gender", "gender.*")
+ Set.of(
+ "_index",
+ "_index.*",
+ "first_name",
+ "first_name.*",
+ "last_name",
+ "last_name.*",
+ "*_names",
+ "*_names.*",
+ "gender",
+ "gender.*"
+ )
);
}
@@ -2023,7 +2237,19 @@ public void testDropWildcardFieldsAfterLookupJoinsAndKeep() {
| KEEP @timestamp, message, *e*
| SORT @timestamp
| DROP *e""",
- Set.of("client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"),
+ Set.of(
+ "_index",
+ "_index.*",
+ "client_ip",
+ "client_ip.*",
+ "message",
+ "message.*",
+ "@timestamp",
+ "@timestamp.*",
+ "*e*",
+ "*e",
+ "*e.*"
+ ),
Set.of()
);
}
@@ -2039,7 +2265,19 @@ public void testDropWildcardFieldsAfterLookupJoinKeepLookupJoin() {
| LOOKUP JOIN message_types_lookup ON message
| SORT @timestamp
| DROP *e""",
- Set.of("client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"),
+ Set.of(
+ "_index",
+ "_index.*",
+ "client_ip",
+ "client_ip.*",
+ "message",
+ "message.*",
+ "@timestamp",
+ "@timestamp.*",
+ "*e*",
+ "*e",
+ "*e.*"
+ ),
Set.of("message_types_lookup")
);
}
@@ -2055,7 +2293,19 @@ public void testDropWildcardFieldsAfterKeepAndLookupJoins() {
| LOOKUP JOIN message_types_lookup ON message
| SORT @timestamp
| DROP *e""",
- Set.of("client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"),
+ Set.of(
+ "_index",
+ "_index.*",
+ "client_ip",
+ "client_ip.*",
+ "message",
+ "message.*",
+ "@timestamp",
+ "@timestamp.*",
+ "*e*",
+ "*e",
+ "*e.*"
+ ),
Set.of("clientips_lookup", "message_types_lookup")
);
}
@@ -2072,7 +2322,19 @@ public void testDropWildcardFieldsAfterKeepAndLookupJoins2() {
| LOOKUP JOIN message_types_lookup ON message
| SORT @timestamp
| DROP *e, client_ip""",
- Set.of("client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"),
+ Set.of(
+ "_index",
+ "_index.*",
+ "client_ip",
+ "client_ip.*",
+ "message",
+ "message.*",
+ "@timestamp",
+ "@timestamp.*",
+ "*e*",
+ "*e",
+ "*e.*"
+ ),
Set.of("clientips_lookup", "message_types_lookup")
);
}
@@ -2086,7 +2348,7 @@ public void testForkFieldsWithKeepAfterFork() {
(WHERE d > 1000 AND e == "aaa" | EVAL c = a + 200)
| WHERE x > y
| KEEP a, b, c, d, x
- """, Set.of("a", "x", "y", "c", "d", "e", "e.*", "d.*", "y.*", "x.*", "a.*", "c.*"));
+ """, Set.of("_index", "_index.*", "a", "x", "y", "c", "d", "e", "e.*", "d.*", "y.*", "x.*", "a.*", "c.*"));
}
public void testForkFieldsWithKeepBeforeFork() {
@@ -2098,7 +2360,7 @@ public void testForkFieldsWithKeepBeforeFork() {
| FORK (WHERE c > 1 AND a < 10000 | EVAL d = a + 500)
(WHERE d > 1000 AND e == "aaa" | EVAL c = a + 200)
| WHERE x > y
- """, Set.of("x", "y", "a", "d", "e", "b", "c", "e.*", "d.*", "y.*", "x.*", "a.*", "c.*", "b.*"));
+ """, Set.of("_index", "_index.*", "x", "y", "a", "d", "e", "b", "c", "e.*", "d.*", "y.*", "x.*", "a.*", "c.*", "b.*"));
}
public void testForkFieldsWithNoProjection() {
@@ -2120,7 +2382,7 @@ public void testForkFieldsWithStatsInOneBranch() {
| FORK (WHERE c > 1 AND a < 10000 | EVAL d = a + 500)
(STATS x = count(*), y=min(z))
| WHERE x > y
- """, Set.of("x", "y", "a", "c", "z", "y.*", "x.*", "z.*", "a.*", "c.*"));
+ """, Set.of("_index", "_index.*", "x", "y", "a", "c", "z", "y.*", "x.*", "z.*", "a.*", "c.*"));
}
public void testForkFieldsWithEnrichAndLookupJoins() {
@@ -2136,7 +2398,28 @@ public void testForkFieldsWithEnrichAndLookupJoins() {
| LOOKUP JOIN my_lookup_index ON xyz
| WHERE x > y OR _fork == "fork1"
""",
- Set.of("x", "y", "a", "c", "abc", "b", "def", "z", "xyz", "def.*", "y.*", "x.*", "xyz.*", "z.*", "abc.*", "a.*", "c.*", "b.*"),
+ Set.of(
+ "_index",
+ "_index.*",
+ "x",
+ "y",
+ "a",
+ "c",
+ "abc",
+ "b",
+ "def",
+ "z",
+ "xyz",
+ "def.*",
+ "y.*",
+ "x.*",
+ "xyz.*",
+ "z.*",
+ "abc.*",
+ "a.*",
+ "c.*",
+ "b.*"
+ ),
Set.of("my_lookup_index")
);
}
@@ -2150,7 +2433,7 @@ public void testForkWithStatsInAllBranches() {
(EVAL z = a * b | STATS m = max(z))
(STATS x = count(*), y=min(z))
| WHERE x > y
- """, Set.of("x", "y", "c", "a", "z", "y.*", "x.*", "z.*", "a.*", "c.*"));
+ """, Set.of("_index", "_index.*", "x", "y", "c", "a", "z", "y.*", "x.*", "z.*", "a.*", "c.*"));
}
public void testForkWithStatsInAllBranches1() {
@@ -2159,7 +2442,7 @@ public void testForkWithStatsInAllBranches1() {
| FORK
( STATS x = min(last_name))
( EVAL last_name = first_name | STATS y = max(last_name))
- """, Set.of("first_name", "last_name", "first_name.*", "last_name.*"));
+ """, Set.of("_index", "_index.*", "first_name", "last_name", "first_name.*", "last_name.*"));
}
public void testForkWithStatsInAllBranches2() {
@@ -2168,7 +2451,7 @@ public void testForkWithStatsInAllBranches2() {
| FORK
( EVAL last_name = first_name | STATS y = VALUES(last_name))
( STATS x = VALUES(last_name))
- """, Set.of("first_name", "last_name", "first_name.*", "last_name.*"));
+ """, Set.of("_index", "_index.*", "first_name", "last_name", "first_name.*", "last_name.*"));
}
public void testForkWithStatsAndWhere() {
@@ -2211,7 +2494,7 @@ public void testForkRefs1() {
| FORK
( EVAL x = first_name)
( EVAL x = last_name)
- """, Set.of("first_name", "last_name", "last_name.*", "first_name.*"));
+ """, Set.of("_index", "_index.*", "first_name", "last_name", "last_name.*", "first_name.*"));
}
public void testForkRefs2() {
@@ -2220,7 +2503,7 @@ public void testForkRefs2() {
| FORK
( KEEP first_name | EVAL x = first_name)
( KEEP last_name | EVAL x = last_name)
- """, Set.of("first_name", "last_name", "last_name.*", "first_name.*"));
+ """, Set.of("_index", "_index.*", "first_name", "last_name", "last_name.*", "first_name.*"));
}
public void testForkRefs3() {
@@ -2229,33 +2512,52 @@ public void testForkRefs3() {
| FORK
( KEEP first_name | EVAL last_name = first_name)
( KEEP first_name | EVAL x = first_name)
- """, Set.of("first_name", "first_name.*"));
+ """, Set.of("_index", "_index.*", "first_name", "first_name.*"));
}
public void testForkRef4() {
- assertFieldNames("""
- from employees
- | sort emp_no
- | limit 1
- | FORK
- (eval x = to_string(languages) | enrich languages_policy on x | keep language_name)
- (eval y = to_string(emp_no) | enrich languages_policy on y | keep emp_no)
- """, Set.of("emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*", "y", "y.*"));
+ assertFieldNames(
+ """
+ from employees
+ | sort emp_no
+ | limit 1
+ | FORK
+ (eval x = to_string(languages) | enrich languages_policy on x | keep language_name)
+ (eval y = to_string(emp_no) | enrich languages_policy on y | keep emp_no)
+ """,
+ Set.of(
+ "_index",
+ "_index.*",
+ "emp_no",
+ "emp_no.*",
+ "languages",
+ "languages.*",
+ "language_name",
+ "language_name.*",
+ "x",
+ "x.*",
+ "y",
+ "y.*"
+ )
+ );
}
public void testRerankerAfterFuse() {
assumeTrue("FUSE required", EsqlCapabilities.Cap.FUSE.isEnabled());
assertTrue("FORK required", EsqlCapabilities.Cap.FORK_V9.isEnabled());
- assertFieldNames("""
- FROM books METADATA _id, _index, _score
- | FORK ( WHERE title:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
- ( WHERE author:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
- | FUSE
- | RERANK "Tolkien" ON title WITH { "inference_id" : "test_reranker" }
- | EVAL _score=ROUND(_score, 2)
- | SORT _score DESC, book_no ASC
- | LIMIT 2
- | KEEP book_no, title, author, _score""", Set.of("book_no", "title", "author", "title.*", "author.*", "book_no.*"));
+ assertFieldNames(
+ """
+ FROM books METADATA _id, _index, _score
+ | FORK ( WHERE title:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
+ ( WHERE author:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
+ | FUSE
+ | RERANK "Tolkien" ON title WITH { "inference_id" : "test_reranker" }
+ | EVAL _score=ROUND(_score, 2)
+ | SORT _score DESC, book_no ASC
+ | LIMIT 2
+ | KEEP book_no, title, author, _score""",
+ Set.of("_index", "_index.*", "book_no", "title", "author", "title.*", "author.*", "book_no.*")
+ );
}
public void testSimpleFuse() {
@@ -2268,7 +2570,7 @@ public void testSimpleFuse() {
| FUSE
| EVAL _score = round(_score, 4)
| KEEP _score, _fork, emp_no
- | SORT _score, _fork, emp_no""", Set.of("emp_no", "emp_no.*"));
+ | SORT _score, _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
}
public void testFuseWithMatchAndScore() {
@@ -2282,7 +2584,7 @@ public void testFuseWithMatchAndScore() {
| SORT _score DESC, _id, _index
| EVAL _fork = mv_sort(_fork)
| EVAL _score = round(_score, 5)
- | KEEP _score, _fork, _id""", Set.of("title", "author", "title.*", "author.*"));
+ | KEEP _score, _fork, _id""", Set.of("_index", "_index.*", "title", "author", "title.*", "author.*"));
}
public void testFuseWithDisjunctionAndPostFilter() {
@@ -2297,7 +2599,7 @@ public void testFuseWithDisjunctionAndPostFilter() {
| EVAL _fork = mv_sort(_fork)
| EVAL _score = round(_score, 5)
| KEEP _score, _fork, _id
- | WHERE _score > 0.014""", Set.of("title", "author", "title.*", "author.*"));
+ | WHERE _score > 0.014""", Set.of("_index", "_index.*", "title", "author", "title.*", "author.*"));
}
public void testFuseWithStats() {
@@ -2310,38 +2612,44 @@ public void testFuseWithStats() {
( WHERE author:"Ursula K. Le Guin" AND title:"short stories" | SORT _score, _id DESC | LIMIT 3)
| FUSE
| STATS count_fork=COUNT(*) BY _fork
- | SORT _fork""", Set.of("title", "author", "title.*", "author.*"));
+ | SORT _fork""", Set.of("_index", "_index.*", "title", "author", "title.*", "author.*"));
}
public void testFuseWithMultipleForkBranches() {
assumeTrue("FUSE required", EsqlCapabilities.Cap.FUSE.isEnabled());
assertTrue("FORK required", EsqlCapabilities.Cap.FORK_V9.isEnabled());
- assertFieldNames("""
- FROM books METADATA _id, _index, _score
- | FORK (WHERE author:"Keith Faulkner" AND qstr("author:Rory or author:Beverlie") | SORT _score, _id DESC | LIMIT 3)
- (WHERE author:"Ursula K. Le Guin" | SORT _score, _id DESC | LIMIT 3)
- (WHERE title:"Tolkien" AND author:"Tolkien" AND year > 2000 AND mv_count(author) == 1 | SORT _score, _id DESC | LIMIT 3)
- (WHERE match(author, "Keith Faulkner") AND match(author, "Rory Tyger") | SORT _score, _id DESC | LIMIT 3)
- | FUSE
- | SORT _score DESC, _id, _index
- | EVAL _fork = mv_sort(_fork)
- | EVAL _score = round(_score, 4)
- | EVAL title = trim(substring(title, 1, 20))
- | KEEP _score, author, title, _fork""", Set.of("author", "title", "year", "title.*", "author.*", "year.*"));
+ assertFieldNames(
+ """
+ FROM books METADATA _id, _index, _score
+ | FORK (WHERE author:"Keith Faulkner" AND qstr("author:Rory or author:Beverlie") | SORT _score, _id DESC | LIMIT 3)
+ (WHERE author:"Ursula K. Le Guin" | SORT _score, _id DESC | LIMIT 3)
+ (WHERE title:"Tolkien" AND author:"Tolkien" AND year > 2000 AND mv_count(author) == 1 | SORT _score, _id DESC | LIMIT 3)
+ (WHERE match(author, "Keith Faulkner") AND match(author, "Rory Tyger") | SORT _score, _id DESC | LIMIT 3)
+ | FUSE
+ | SORT _score DESC, _id, _index
+ | EVAL _fork = mv_sort(_fork)
+ | EVAL _score = round(_score, 4)
+ | EVAL title = trim(substring(title, 1, 20))
+ | KEEP _score, author, title, _fork""",
+ Set.of("_index", "_index.*", "author", "title", "year", "title.*", "author.*", "year.*")
+ );
}
public void testFuseWithSemanticSearch() {
assumeTrue("FUSE required", EsqlCapabilities.Cap.FUSE.isEnabled());
assertTrue("FORK required", EsqlCapabilities.Cap.FORK_V9.isEnabled());
- assertFieldNames("""
- FROM semantic_text METADATA _id, _score, _index
- | FORK ( WHERE semantic_text_field:"something" | SORT _score DESC | LIMIT 2)
- ( WHERE semantic_text_field:"something else" | SORT _score DESC | LIMIT 2)
- | FUSE
- | SORT _score DESC, _id, _index
- | EVAL _score = round(_score, 4)
- | EVAL _fork = mv_sort(_fork)
- | KEEP _fork, _score, _id, semantic_text_field""", Set.of("semantic_text_field", "semantic_text_field.*"));
+ assertFieldNames(
+ """
+ FROM semantic_text METADATA _id, _score, _index
+ | FORK ( WHERE semantic_text_field:"something" | SORT _score DESC | LIMIT 2)
+ ( WHERE semantic_text_field:"something else" | SORT _score DESC | LIMIT 2)
+ | FUSE
+ | SORT _score DESC, _id, _index
+ | EVAL _score = round(_score, 4)
+ | EVAL _fork = mv_sort(_fork)
+ | KEEP _fork, _score, _id, semantic_text_field""",
+ Set.of("_index", "_index.*", "semantic_text_field", "semantic_text_field.*")
+ );
}
public void testSimpleFork() {
@@ -2350,7 +2658,7 @@ public void testSimpleFork() {
| FORK ( WHERE emp_no == 10001 )
( WHERE emp_no == 10002 )
| KEEP emp_no, _fork
- | SORT emp_no""", Set.of("emp_no", "emp_no.*"));
+ | SORT emp_no""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
}
public void testSimpleForkWithStats() {
@@ -2360,16 +2668,19 @@ public void testSimpleForkWithStats() {
| EVAL score = round(_score, 2)
| FORK (SORT score DESC, author | LIMIT 5 | KEEP author, score)
(STATS total = COUNT(*))
- | SORT _fork, score DESC, author""", Set.of("score", "author", "score.*", "author.*"));
+ | SORT _fork, score DESC, author""", Set.of("_index", "_index.*", "score", "author", "score.*", "author.*"));
}
public void testForkWithWhereSortAndLimit() {
- assertFieldNames("""
- FROM employees
- | FORK ( WHERE hire_date < "1985-03-01T00:00:00Z" | SORT first_name | LIMIT 5 )
- ( WHERE hire_date < "1988-03-01T00:00:00Z" | SORT first_name | LIMIT 5 )
- | KEEP emp_no, first_name, _fork
- | SORT emp_no, _fork""", Set.of("emp_no", "first_name", "hire_date", "first_name.*", "hire_date.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | FORK ( WHERE hire_date < "1985-03-01T00:00:00Z" | SORT first_name | LIMIT 5 )
+ ( WHERE hire_date < "1988-03-01T00:00:00Z" | SORT first_name | LIMIT 5 )
+ | KEEP emp_no, first_name, _fork
+ | SORT emp_no, _fork""",
+ Set.of("_index", "_index.*", "emp_no", "first_name", "hire_date", "first_name.*", "hire_date.*", "emp_no.*")
+ );
}
public void testFiveFork() {
@@ -2381,16 +2692,19 @@ public void testFiveFork() {
( WHERE emp_no == 10002 )
( WHERE emp_no == 10001 )
| KEEP _fork, emp_no
- | SORT _fork""", Set.of("emp_no", "emp_no.*"));
+ | SORT _fork""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
}
public void testForkWithWhereSortDescAndLimit() {
- assertFieldNames("""
- FROM employees
- | FORK ( WHERE hire_date < "1985-03-01T00:00:00Z" | SORT first_name DESC | LIMIT 2 )
- ( WHERE hire_date < "1988-03-01T00:00:00Z" | SORT first_name DESC NULLS LAST | LIMIT 2 )
- | KEEP _fork, emp_no, first_name
- | SORT _fork, first_name DESC""", Set.of("first_name", "emp_no", "hire_date", "first_name.*", "hire_date.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | FORK ( WHERE hire_date < "1985-03-01T00:00:00Z" | SORT first_name DESC | LIMIT 2 )
+ ( WHERE hire_date < "1988-03-01T00:00:00Z" | SORT first_name DESC NULLS LAST | LIMIT 2 )
+ | KEEP _fork, emp_no, first_name
+ | SORT _fork, first_name DESC""",
+ Set.of("_index", "_index.*", "first_name", "emp_no", "hire_date", "first_name.*", "hire_date.*", "emp_no.*")
+ );
}
public void testForkWithCommonPrefilter() {
@@ -2400,17 +2714,20 @@ public void testForkWithCommonPrefilter() {
| FORK ( SORT emp_no ASC | LIMIT 2 )
( SORT emp_no DESC NULLS LAST | LIMIT 2 )
| KEEP _fork, emp_no
- | SORT _fork, emp_no""", Set.of("emp_no", "emp_no.*"));
+ | SORT _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
}
public void testForkWithSemanticSearchAndScore() {
- assertFieldNames("""
- FROM semantic_text METADATA _id, _score
- | FORK ( WHERE semantic_text_field:"something" | SORT _score DESC | LIMIT 2)
- ( WHERE semantic_text_field:"something else" | SORT _score DESC | LIMIT 2)
- | EVAL _score = round(_score, 4)
- | SORT _fork, _score, _id
- | KEEP _fork, _score, _id, semantic_text_field""", Set.of("semantic_text_field", "semantic_text_field.*"));
+ assertFieldNames(
+ """
+ FROM semantic_text METADATA _id, _score
+ | FORK ( WHERE semantic_text_field:"something" | SORT _score DESC | LIMIT 2)
+ ( WHERE semantic_text_field:"something else" | SORT _score DESC | LIMIT 2)
+ | EVAL _score = round(_score, 4)
+ | SORT _fork, _score, _id
+ | KEEP _fork, _score, _id, semantic_text_field""",
+ Set.of("_index", "_index.*", "semantic_text_field", "semantic_text_field.*")
+ );
}
public void testForkWithEvals() {
@@ -2419,7 +2736,7 @@ public void testForkWithEvals() {
| FORK (WHERE emp_no == 10048 OR emp_no == 10081 | EVAL x = "abc" | EVAL y = 1)
(WHERE emp_no == 10081 OR emp_no == 10087 | EVAL x = "def" | EVAL z = 2)
| KEEP _fork, emp_no, x, y, z
- | SORT _fork, emp_no""", Set.of("emp_no", "x", "y", "z", "y.*", "x.*", "z.*", "emp_no.*"));
+ | SORT _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "x", "y", "z", "y.*", "x.*", "z.*", "emp_no.*"));
}
public void testForkWithStats() {
@@ -2430,7 +2747,7 @@ public void testForkWithStats() {
(STATS x = COUNT(*), y = MAX(emp_no), z = MIN(emp_no))
(STATS x = COUNT(*), y = MIN(emp_no))
| KEEP _fork, emp_no, x, y, z
- | SORT _fork, emp_no""", Set.of("emp_no", "x", "y", "z", "y.*", "x.*", "z.*", "emp_no.*"));
+ | SORT _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "x", "y", "z", "y.*", "x.*", "z.*", "emp_no.*"));
}
public void testForkWithDissect() {
@@ -2445,6 +2762,8 @@ public void testForkWithDissect() {
| KEEP _fork, emp_no, x, y, z, w
| SORT _fork, emp_no""",
Set.of(
+ "_index",
+ "_index.*",
"emp_no",
"x",
"y",
@@ -2477,6 +2796,8 @@ public void testForkWithMixOfCommands() {
| KEEP _fork, emp_no, x, y, z, a
| SORT _fork, emp_no""",
Set.of(
+ "_index",
+ "_index.*",
"emp_no",
"x",
"y",
@@ -2506,7 +2827,7 @@ public void testForkWithFiltersOnConstantValues() {
(STATS x = COUNT(*), y = MIN(emp_no))
| WHERE _fork == "fork2" OR a == "y"
| KEEP _fork, emp_no, x, y, z
- | SORT _fork, emp_no""", Set.of("emp_no", "a", "a.*", "emp_no.*"));
+ | SORT _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "a", "a.*", "emp_no.*"));
}
public void testForkWithUnsupportedAttributes() {
@@ -2531,6 +2852,8 @@ public void testForkAfterLookupJoin() {
| KEEP _fork, emp_no, language_code, language_name
| SORT _fork, emp_no""",
Set.of(
+ "_index",
+ "_index.*",
"emp_no",
"language_code",
"language_name",
@@ -2559,6 +2882,8 @@ public void testForkBeforeLookupJoin() {
| KEEP _fork, emp_no, language_code, language_name
| SORT _fork, emp_no""",
Set.of(
+ "_index",
+ "_index.*",
"emp_no",
"language_code",
"language_name",
@@ -2586,6 +2911,8 @@ public void testForkBranchWithLookupJoin() {
| KEEP _fork, emp_no, language_code, language_name
| SORT _fork, emp_no""",
Set.of(
+ "_index",
+ "_index.*",
"emp_no",
"language_code",
"language_name",
@@ -2612,37 +2939,43 @@ public void testForkBeforeStats() {
( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
( EVAL x = "abc" | EVAL y = "aaa" )
| STATS c = count(*), m = max(_fork)""",
- Set.of("first_name", "emp_no", "last_name", "last_name.*", "first_name.*", "emp_no.*")
+ Set.of("_index", "_index.*", "first_name", "emp_no", "last_name", "last_name.*", "first_name.*", "emp_no.*")
);
}
public void testForkBeforeStatsWithWhere() {
- assertFieldNames("""
- FROM employees
- | WHERE emp_no == 10048 OR emp_no == 10081
- | FORK ( EVAL a = CONCAT(first_name, " ", emp_no::keyword, " ", last_name)
- | DISSECT a "%{x} %{y} %{z}"
- | EVAL y = y::keyword )
- ( STATS x = COUNT(*)::keyword, y = MAX(emp_no)::keyword, z = MIN(emp_no)::keyword )
- ( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
- ( EVAL x = "abc" | EVAL y = "aaa" )
- | STATS a = count(*) WHERE _fork == "fork1",
- b = max(_fork)""", Set.of("first_name", "emp_no", "last_name", "last_name.*", "first_name.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | WHERE emp_no == 10048 OR emp_no == 10081
+ | FORK ( EVAL a = CONCAT(first_name, " ", emp_no::keyword, " ", last_name)
+ | DISSECT a "%{x} %{y} %{z}"
+ | EVAL y = y::keyword )
+ ( STATS x = COUNT(*)::keyword, y = MAX(emp_no)::keyword, z = MIN(emp_no)::keyword )
+ ( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
+ ( EVAL x = "abc" | EVAL y = "aaa" )
+ | STATS a = count(*) WHERE _fork == "fork1",
+ b = max(_fork)""",
+ Set.of("_index", "_index.*", "first_name", "emp_no", "last_name", "last_name.*", "first_name.*", "emp_no.*")
+ );
}
public void testForkBeforeStatsByWithWhere() {
- assertFieldNames("""
- FROM employees
- | WHERE emp_no == 10048 OR emp_no == 10081
- | FORK ( EVAL a = CONCAT(first_name, " ", emp_no::keyword, " ", last_name)
- | DISSECT a "%{x} %{y} %{z}"
- | EVAL y = y::keyword )
- ( STATS x = COUNT(*)::keyword, y = MAX(emp_no)::keyword, z = MIN(emp_no)::keyword )
- ( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
- ( EVAL x = "abc" | EVAL y = "aaa" )
- | STATS a = count(*) WHERE emp_no > 10000,
- b = max(x) WHERE _fork == "fork1" BY _fork
- | SORT _fork""", Set.of("emp_no", "x", "first_name", "last_name", "last_name.*", "x.*", "first_name.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | WHERE emp_no == 10048 OR emp_no == 10081
+ | FORK ( EVAL a = CONCAT(first_name, " ", emp_no::keyword, " ", last_name)
+ | DISSECT a "%{x} %{y} %{z}"
+ | EVAL y = y::keyword )
+ ( STATS x = COUNT(*)::keyword, y = MAX(emp_no)::keyword, z = MIN(emp_no)::keyword )
+ ( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
+ ( EVAL x = "abc" | EVAL y = "aaa" )
+ | STATS a = count(*) WHERE emp_no > 10000,
+ b = max(x) WHERE _fork == "fork1" BY _fork
+ | SORT _fork""",
+ Set.of("_index", "_index.*", "emp_no", "x", "first_name", "last_name", "last_name.*", "x.*", "first_name.*", "emp_no.*")
+ );
}
public void testForkAfterDrop() {
@@ -2655,12 +2988,15 @@ public void testForkAfterDrop() {
}
public void testForkBranchWithDrop() {
- assertFieldNames("""
- FROM languages
- | FORK ( EVAL x = 1 | DROP language_code | WHERE language_name == "English" | DROP x )
- ( WHERE language_name != "English" )
- | SORT _fork, language_name
- | KEEP language_name, language_code, _fork""", Set.of("language_name", "language_code", "language_code.*", "language_name.*"));
+ assertFieldNames(
+ """
+ FROM languages
+ | FORK ( EVAL x = 1 | DROP language_code | WHERE language_name == "English" | DROP x )
+ ( WHERE language_name != "English" )
+ | SORT _fork, language_name
+ | KEEP language_name, language_code, _fork""",
+ Set.of("_index", "_index.*", "language_name", "language_code", "language_code.*", "language_name.*")
+ );
}
public void testForkBeforeDrop() {
@@ -2673,11 +3009,14 @@ public void testForkBeforeDrop() {
}
public void testForkBranchWithKeep() {
- assertFieldNames("""
- FROM languages
- | FORK ( WHERE language_name == "English" | KEEP language_name, language_code )
- ( WHERE language_name != "English" )
- | SORT _fork, language_name""", Set.of("language_name", "language_code", "language_code.*", "language_name.*"));
+ assertFieldNames(
+ """
+ FROM languages
+ | FORK ( WHERE language_name == "English" | KEEP language_name, language_code )
+ ( WHERE language_name != "English" )
+ | SORT _fork, language_name""",
+ Set.of("_index", "_index.*", "language_name", "language_code", "language_code.*", "language_name.*")
+ );
}
public void testForkBeforeRename() {
@@ -2690,21 +3029,27 @@ public void testForkBeforeRename() {
}
public void testForkBranchWithRenameAs() {
- assertFieldNames("""
- FROM languages
- | FORK (RENAME language_code AS code | WHERE code == 1 OR code == 2)
- (WHERE language_code == 1 | RENAME language_code AS x)
- | SORT _fork, language_name
- | KEEP code, language_name, x, _fork""", Set.of("language_name", "language_code", "language_code.*", "language_name.*"));
+ assertFieldNames(
+ """
+ FROM languages
+ | FORK (RENAME language_code AS code | WHERE code == 1 OR code == 2)
+ (WHERE language_code == 1 | RENAME language_code AS x)
+ | SORT _fork, language_name
+ | KEEP code, language_name, x, _fork""",
+ Set.of("_index", "_index.*", "language_name", "language_code", "language_code.*", "language_name.*")
+ );
}
public void testForkBranchWithRenameEquals() {
- assertFieldNames("""
- FROM languages
- | FORK (RENAME code = language_code | WHERE code == 1 OR code == 2)
- (WHERE language_code == 1 | RENAME x = language_code)
- | SORT _fork, language_name
- | KEEP code, language_name, x, _fork""", Set.of("language_name", "language_code", "language_code.*", "language_name.*"));
+ assertFieldNames(
+ """
+ FROM languages
+ | FORK (RENAME code = language_code | WHERE code == 1 OR code == 2)
+ (WHERE language_code == 1 | RENAME x = language_code)
+ | SORT _fork, language_name
+ | KEEP code, language_name, x, _fork""",
+ Set.of("_index", "_index.*", "language_name", "language_code", "language_code.*", "language_name.*")
+ );
}
public void testForkAfterRename() {
@@ -2724,7 +3069,7 @@ public void testForkBeforeDissect() {
| EVAL x = concat(gender, " foobar")
| DISSECT x "%{a} %{b}"
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
}
public void testForkBranchWithDissect() {
@@ -2735,7 +3080,7 @@ public void testForkBranchWithDissect() {
| DISSECT x "%{a} %{b}")
(WHERE emp_no == 10081 OR emp_no == 10087)
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
}
public void testForkAfterDissect() {
@@ -2746,7 +3091,7 @@ public void testForkAfterDissect() {
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
}
public void testForkAfterEnrich() {
@@ -2760,6 +3105,8 @@ public void testForkAfterEnrich() {
(WHERE city.country.name == "Japan")
| SORT _fork, city.name""",
Set.of(
+ "_index",
+ "_index.*",
"city.name",
"airport",
"city.country.continent.planet.name",
@@ -2782,6 +3129,8 @@ public void testForkBranchWithEnrich() {
(ENRICH city_names ON city.name WITH city.country.continent.planet.name = airport)
| SORT _fork, city.name""",
Set.of(
+ "_index",
+ "_index.*",
"city.name",
"airport",
"city.country.continent.planet.name",
@@ -2805,6 +3154,8 @@ public void testForkBeforeEnrich() {
| ENRICH city_names ON city.name WITH city.country.continent.planet.name = airport
| SORT _fork, city.name""",
Set.of(
+ "_index",
+ "_index.*",
"city.name",
"airport",
"city.country.name",
@@ -2824,7 +3175,7 @@ public void testForkBeforeMvExpand() {
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
| MV_EXPAND job_positions
- | SORT _fork, emp_no, job_positions""", Set.of("emp_no", "job_positions", "job_positions.*", "emp_no.*"));
+ | SORT _fork, emp_no, job_positions""", Set.of("_index", "_index.*", "emp_no", "job_positions", "job_positions.*", "emp_no.*"));
}
public void testForkBranchWithMvExpand() {
@@ -2833,7 +3184,7 @@ public void testForkBranchWithMvExpand() {
| KEEP emp_no, job_positions
| FORK (WHERE emp_no == 10048 OR emp_no == 10081 | MV_EXPAND job_positions)
(WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT _fork, emp_no, job_positions""", Set.of("emp_no", "job_positions", "job_positions.*", "emp_no.*"));
+ | SORT _fork, emp_no, job_positions""", Set.of("_index", "_index.*", "emp_no", "job_positions", "job_positions.*", "emp_no.*"));
}
public void testForkAfterMvExpand() {
@@ -2843,7 +3194,7 @@ public void testForkAfterMvExpand() {
| MV_EXPAND job_positions
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT _fork, emp_no, job_positions""", Set.of("emp_no", "job_positions", "job_positions.*", "emp_no.*"));
+ | SORT _fork, emp_no, job_positions""", Set.of("_index", "_index.*", "emp_no", "job_positions", "job_positions.*", "emp_no.*"));
}
public void testForkBeforeInlineStatsIgnore() {
@@ -2856,33 +3207,39 @@ public void testForkBeforeInlineStatsIgnore() {
(WHERE emp_no == 10081 OR emp_no == 10087)
| INLINESTATS max_lang = MAX(languages) BY gender
| SORT emp_no, gender, _fork
- | LIMIT 5""", Set.of("emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*"));
+ | LIMIT 5""", Set.of("_index", "_index.*", "emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*"));
}
public void testForkBranchWithInlineStatsIgnore() {
assumeTrue("INLINESTATS required", EsqlCapabilities.Cap.INLINESTATS_V9.isEnabled());
assertTrue("FORK required", EsqlCapabilities.Cap.FORK_V9.isEnabled());
- assertFieldNames("""
- FROM employees
- | KEEP emp_no, languages, gender
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081
- | INLINESTATS x = MAX(languages) BY gender)
- (WHERE emp_no == 10081 OR emp_no == 10087
- | INLINESTATS x = MIN(languages))
- (WHERE emp_no == 10012 OR emp_no == 10012)
- | SORT emp_no, gender, _fork""", Set.of("emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | KEEP emp_no, languages, gender
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081
+ | INLINESTATS x = MAX(languages) BY gender)
+ (WHERE emp_no == 10081 OR emp_no == 10087
+ | INLINESTATS x = MIN(languages))
+ (WHERE emp_no == 10012 OR emp_no == 10012)
+ | SORT emp_no, gender, _fork""",
+ Set.of("_index", "_index.*", "emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*")
+ );
}
public void testForkAfterInlineStatsIgnore() {
assumeTrue("INLINESTATS required", EsqlCapabilities.Cap.INLINESTATS_V9.isEnabled());
assertTrue("FORK required", EsqlCapabilities.Cap.FORK_V9.isEnabled());
- assertFieldNames("""
- FROM employees
- | KEEP emp_no, languages, gender
- | INLINESTATS max_lang = MAX(languages) BY gender
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
- (WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT emp_no, gender, _fork""", Set.of("emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | KEEP emp_no, languages, gender
+ | INLINESTATS max_lang = MAX(languages) BY gender
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
+ (WHERE emp_no == 10081 OR emp_no == 10087)
+ | SORT emp_no, gender, _fork""",
+ Set.of("_index", "_index.*", "emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*")
+ );
}
public void testForkBeforeChangePoint() {
@@ -2894,7 +3251,7 @@ public void testForkBeforeChangePoint() {
(WHERE emp_no <= 10100)
| CHANGE_POINT salary ON emp_no
| STATS COUNT() by type
- | SORT type""", Set.of("type", "emp_no", "salary", "type.*", "salary.*", "emp_no.*"));
+ | SORT type""", Set.of("_index", "_index.*", "type", "emp_no", "salary", "type.*", "salary.*", "emp_no.*"));
}
public void testForkBranchWithChangePoint() {
@@ -2906,48 +3263,60 @@ public void testForkBranchWithChangePoint() {
(EVAL salary=CASE(emp_no==10087, 1000000, salary)
| CHANGE_POINT salary ON emp_no)
| STATS COUNT() by type, _fork
- | SORT _fork, type""", Set.of("type", "emp_no", "salary", "type.*", "salary.*", "emp_no.*"));
+ | SORT _fork, type""", Set.of("_index", "_index.*", "type", "emp_no", "salary", "type.*", "salary.*", "emp_no.*"));
}
public void testForkAfterChangePoint() {
- assertFieldNames("""
- FROM employees
- | KEEP emp_no, salary
- | EVAL salary = CASE(emp_no==10042, 1000000, salary)
- | CHANGE_POINT salary ON emp_no
- | FORK (STATS a = COUNT() by type)
- (STATS b = VALUES(type))
- | SORT _fork, a, type, b""", Set.of("a", "type", "b", "emp_no", "salary", "type.*", "a.*", "salary.*", "b.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | KEEP emp_no, salary
+ | EVAL salary = CASE(emp_no==10042, 1000000, salary)
+ | CHANGE_POINT salary ON emp_no
+ | FORK (STATS a = COUNT() by type)
+ (STATS b = VALUES(type))
+ | SORT _fork, a, type, b""",
+ Set.of("_index", "_index.*", "a", "type", "b", "emp_no", "salary", "type.*", "a.*", "salary.*", "b.*", "emp_no.*")
+ );
}
public void testForkBeforeCompletion() {
- assertFieldNames("""
- FROM employees
- | KEEP emp_no, first_name, last_name
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
- (WHERE emp_no == 10081 OR emp_no == 10087)
- | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
- | SORT _fork, emp_no""", Set.of("emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | KEEP emp_no, first_name, last_name
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
+ (WHERE emp_no == 10081 OR emp_no == 10087)
+ | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
+ | SORT _fork, emp_no""",
+ Set.of("_index", "_index.*", "emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*")
+ );
}
public void testForkBranchWithCompletion() {
- assertFieldNames("""
- FROM employees
- | KEEP emp_no, first_name, last_name
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081
- | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" })
- (WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT _fork, emp_no""", Set.of("emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | KEEP emp_no, first_name, last_name
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081
+ | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" })
+ (WHERE emp_no == 10081 OR emp_no == 10087)
+ | SORT _fork, emp_no""",
+ Set.of("_index", "_index.*", "emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*")
+ );
}
public void testForkAfterCompletion() {
- assertFieldNames("""
- FROM employees
- | KEEP emp_no, first_name, last_name
- | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
- (WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT _fork, emp_no""", Set.of("emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*"));
+ assertFieldNames(
+ """
+ FROM employees
+ | KEEP emp_no, first_name, last_name
+ | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
+ (WHERE emp_no == 10081 OR emp_no == 10087)
+ | SORT _fork, emp_no""",
+ Set.of("_index", "_index.*", "emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*")
+ );
}
public void testForkAfterGrok() {
@@ -2958,7 +3327,7 @@ public void testForkAfterGrok() {
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
}
public void testForkBranchWithGrok() {
@@ -2972,7 +3341,22 @@ public void testForkBranchWithGrok() {
| GROK b "%{WORD:x} %{WORD:y} %{WORD:z}" )
| KEEP _fork, emp_no, x, y, z
| SORT _fork, emp_no""",
- Set.of("emp_no", "x", "y", "z", "first_name", "last_name", "y.*", "last_name.*", "x.*", "z.*", "first_name.*", "emp_no.*")
+ Set.of(
+ "_index",
+ "_index.*",
+ "emp_no",
+ "x",
+ "y",
+ "z",
+ "first_name",
+ "last_name",
+ "y.*",
+ "last_name.*",
+ "x.*",
+ "z.*",
+ "first_name.*",
+ "emp_no.*"
+ )
);
}
@@ -2984,7 +3368,7 @@ public void testForkBeforeGrok() {
| EVAL x = concat(gender, " foobar")
| GROK x "%{WORD:a} %{WORD:b}"
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
}
private void assertFieldNames(String query, Set expected) {
From de95b791947d8cb48d57d2b7891969157d1fc85a Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Fri, 22 Aug 2025 18:31:14 +0200
Subject: [PATCH 03/18] Update docs/changelog/133405.yaml
---
docs/changelog/133405.yaml | 5 +++++
1 file changed, 5 insertions(+)
create mode 100644 docs/changelog/133405.yaml
diff --git a/docs/changelog/133405.yaml b/docs/changelog/133405.yaml
new file mode 100644
index 0000000000000..30a86b310e050
--- /dev/null
+++ b/docs/changelog/133405.yaml
@@ -0,0 +1,5 @@
+pr: 133405
+summary: Fix wrong pruning of plans with no output columns
+area: ES|QL
+type: bug
+issues: []
From 42d364faf437300db0ca44fb04d2feaebb7637b4 Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Fri, 22 Aug 2025 19:57:12 +0200
Subject: [PATCH 04/18] Fix BWC
---
.../esql/qa/testFixtures/src/main/resources/drop.csv-spec | 4 ++++
.../elasticsearch/xpack/esql/action/EsqlCapabilities.java | 8 +++++++-
2 files changed, 11 insertions(+), 1 deletion(-)
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
index 2c81a143b61d7..0298b00781edd 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
@@ -49,6 +49,7 @@ b:integer | x:integer
;
dropAllColumns
+required_capability: fix_no_columns
from languages | keep language_code | drop language_code | eval x = 1;
x:integer
@@ -59,6 +60,7 @@ x:integer
;
dropAllColumns_WithLimit
+required_capability: fix_no_columns
from employees | keep height | drop height | eval x = 1 | limit 3;
x:integer
@@ -68,6 +70,7 @@ x:integer
;
dropAllColumns_WithCount
+required_capability: fix_no_columns
from languages | keep language_code | drop language_code | eval x = 1 | stats c=count(x);
c:long
@@ -75,6 +78,7 @@ c:long
;
dropAllColumns_WithStats
+required_capability: fix_no_columns
from languages | keep language_code | drop language_code | eval x = 1 | stats c=count(x), mi=min(x), s=sum(x);
c:l|mi:i|s:l
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java
index cdef9f8c33cbd..3c94c19237a9d 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java
@@ -1384,7 +1384,13 @@ public enum Cap {
/**
* Support for vector Hamming distance.
*/
- HAMMING_VECTOR_SIMILARITY_FUNCTION(Build.current().isSnapshot());
+ HAMMING_VECTOR_SIMILARITY_FUNCTION(Build.current().isSnapshot()),
+
+ /**
+ * Fix management of plans with no columns
+ * https://github.com/elastic/elasticsearch/issues/120272
+ */
+ FIX_NO_COLUMNS;
private final boolean enabled;
From c2f01d6537d152b2ce8df69b5e99a3bebdedee17 Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Mon, 25 Aug 2025 10:03:26 +0200
Subject: [PATCH 05/18] Restore original tests
---
.../src/main/resources/drop.csv-spec | 108 +++++++++++++++++-
1 file changed, 102 insertions(+), 6 deletions(-)
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
index 0298b00781edd..0795701a6185d 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
@@ -50,13 +50,109 @@ b:integer | x:integer
dropAllColumns
required_capability: fix_no_columns
-from languages | keep language_code | drop language_code | eval x = 1;
+from employees | keep height | drop height | eval x = 1;
x:integer
1
1
1
1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
;
dropAllColumns_WithLimit
@@ -71,18 +167,18 @@ x:integer
dropAllColumns_WithCount
required_capability: fix_no_columns
-from languages | keep language_code | drop language_code | eval x = 1 | stats c=count(x);
+from employees | keep height | drop height | eval x = 1 | stats c=count(x);
c:long
-4
+100
;
dropAllColumns_WithStats
required_capability: fix_no_columns
-from languages | keep language_code | drop language_code | eval x = 1 | stats c=count(x), mi=min(x), s=sum(x);
+from employees | keep height | drop height | eval x = 1 | stats c=count(x), mi=min(x), s=sum(x);
-c:l|mi:i|s:l
-4 |1 |4
+c:l | mi:i | s:l
+100 | 1 | 100
;
From 764cace7d95917baa52264b788ad3fed5c1f4369 Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Mon, 25 Aug 2025 11:35:03 +0200
Subject: [PATCH 06/18] Fix tests
---
.../esql/action/CrossClusterLookupJoinIT.java | 25 ++++++++++++++++++-
.../xpack/esql/action/EsqlActionIT.java | 2 +-
2 files changed, 25 insertions(+), 2 deletions(-)
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterLookupJoinIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterLookupJoinIT.java
index fc6207a6e6872..d6224e1cd8717 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterLookupJoinIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterLookupJoinIT.java
@@ -437,7 +437,7 @@ public void testLookupJoinEmptyIndex() throws IOException {
setSkipUnavailable(REMOTE_CLUSTER_1, randomBoolean());
Exception ex;
- for (String index : List.of("values_lookup", "values_lookup_map", "values_lookup_map_lookup")) {
+ for (String index : List.of("values_lookup", "values_lookup_map_lookup")) {
ex = expectThrows(
VerificationException.class,
() -> runQuery("FROM logs-* | LOOKUP JOIN " + index + " ON v | KEEP v", randomBoolean())
@@ -449,6 +449,29 @@ public void testLookupJoinEmptyIndex() throws IOException {
);
assertThat(ex.getMessage(), containsString("Unknown column [v] in right side of join"));
}
+
+ ex = expectThrows(
+ VerificationException.class,
+ () -> runQuery("FROM logs-* | LOOKUP JOIN values_lookup_map ON v | KEEP v", randomBoolean())
+ );
+ assertThat(
+ ex.getMessage(),
+ containsString(
+ "Lookup Join requires a single lookup mode index; "
+ + "[values_lookup_map] resolves to [values_lookup_map] in [standard] mode"
+ )
+ );
+ ex = expectThrows(
+ VerificationException.class,
+ () -> runQuery("FROM c*:logs-* | LOOKUP JOIN values_lookup_map ON v | KEEP v", randomBoolean())
+ );
+ assertThat(
+ ex.getMessage(),
+ containsString(
+ "Lookup Join requires a single lookup mode index; "
+ + "[values_lookup_map] resolves to [cluster-a:values_lookup_map] in [standard] mode"
+ )
+ );
}
public void testLookupJoinIndexMode() throws IOException {
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java
index 1a5068fd38fcc..4f335733b9b47 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java
@@ -951,7 +951,7 @@ public void testDropAllColumns() {
logger.info(results);
assertThat(results.columns(), hasSize(1));
assertThat(results.columns(), contains(new ColumnInfoImpl("a", "integer", null)));
- assertThat(getValuesList(results), is(empty()));
+ assertThat(getValuesList(results).size(), is(40));
}
}
From 642b50aa13ab7aab414e3e1d7658322831d71e1b Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Mon, 25 Aug 2025 14:40:28 +0200
Subject: [PATCH 07/18] Refactor local suppliers to return a Page
---
.../org/elasticsearch/TransportVersions.java | 1 +
.../xpack/esql/EsqlTestUtils.java | 7 ++-
.../xpack/esql/action/EsqlActionIT.java | 2 +-
.../xpack/esql/analysis/Analyzer.java | 3 +-
.../rules/logical/PropagateEmptyRelation.java | 6 +-
.../optimizer/rules/logical/PruneColumns.java | 4 +-
.../rules/logical/PruneEmptyAggregates.java | 7 +--
.../logical/ReplaceRowAsLocalRelation.java | 3 +-
.../ReplaceStatsFilteredAggWithEval.java | 3 +-
.../SubstituteSurrogateAggregations.java | 4 +-
.../esql/plan/logical/join/InlineJoin.java | 8 ++-
.../logical/local/CopyingLocalSupplier.java | 16 ++---
.../logical/local/EmptyLocalSupplier.java | 6 +-
.../logical/local/ImmediateLocalSupplier.java | 42 +++++++++----
.../plan/logical/local/LocalSupplier.java | 7 ++-
.../esql/plan/physical/LocalSourceExec.java | 3 +-
.../esql/planner/LocalExecutionPlanner.java | 8 +--
.../xpack/esql/session/EsqlSession.java | 7 ++-
.../optimizer/LogicalPlanOptimizerTests.java | 59 ++++++++++---------
.../local/CopyingLocalSupplierTests.java | 6 +-
.../local/ImmediateLocalSupplierTests.java | 6 +-
.../LocalRelationSerializationTests.java | 3 +-
.../logical/local/LocalSupplierTests.java | 27 +++++----
23 files changed, 144 insertions(+), 94 deletions(-)
diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java
index 1338339b01173..3e37c24571794 100644
--- a/server/src/main/java/org/elasticsearch/TransportVersions.java
+++ b/server/src/main/java/org/elasticsearch/TransportVersions.java
@@ -368,6 +368,7 @@ static TransportVersion def(int id) {
public static final TransportVersion DATA_STREAM_WRITE_INDEX_ONLY_SETTINGS = def(9_142_0_00);
public static final TransportVersion SCRIPT_RESCORER = def(9_143_0_00);
public static final TransportVersion ESQL_LOOKUP_OPERATOR_EMITTED_ROWS = def(9_144_0_00);
+ public static final TransportVersion ESQL_PLAN_WITH_NO_COLUMNS = def(9_145_0_00);
/*
* STOP! READ THIS FIRST! No, really,
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java
index 3551a75c65faf..18c79a98412e8 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java
@@ -34,6 +34,7 @@
import org.elasticsearch.compute.data.DoubleBlock;
import org.elasticsearch.compute.data.IntBlock;
import org.elasticsearch.compute.data.LongBlock;
+import org.elasticsearch.compute.data.Page;
import org.elasticsearch.core.PathUtils;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.core.Tuple;
@@ -475,7 +476,11 @@ public static LogicalPlan emptySource() {
}
public static LogicalPlan localSource(BlockFactory blockFactory, List fields, List
*/
-public interface LocalSupplier extends Supplier, NamedWriteable {
+public interface LocalSupplier extends Supplier, NamedWriteable {
- static LocalSupplier of(Block[] blocks) {
- return new ImmediateLocalSupplier(blocks);
+ static LocalSupplier of(Page page) {
+ return new ImmediateLocalSupplier(page);
}
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LocalSourceExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LocalSourceExec.java
index 5994ce813c851..eeb7ff2cd173f 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LocalSourceExec.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LocalSourceExec.java
@@ -12,6 +12,7 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.compute.data.Block;
+import org.elasticsearch.compute.data.Page;
import org.elasticsearch.xpack.esql.core.expression.Attribute;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
import org.elasticsearch.xpack.esql.core.tree.Source;
@@ -60,7 +61,7 @@ public LocalSourceExec(StreamInput in) throws IOException {
*/
public static LocalSupplier readLegacyLocalSupplierFrom(PlanStreamInput in) throws IOException {
Block[] blocks = in.readCachedBlockArray();
- return blocks.length == 0 ? EmptyLocalSupplier.EMPTY : LocalSupplier.of(blocks);
+ return blocks.length == 0 ? EmptyLocalSupplier.EMPTY : LocalSupplier.of(new Page(blocks));
}
@Override
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java
index 5ae15f4c0e844..7821072aca991 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java
@@ -681,7 +681,7 @@ private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerC
}
Layout layout = layoutBuilder.build();
LocalSourceExec localSourceExec = (LocalSourceExec) join.joinData();
- Block[] localData = localSourceExec.supplier().get();
+ Page localData = localSourceExec.supplier().get();
RowInTableLookupOperator.Key[] keys = new RowInTableLookupOperator.Key[join.leftFields().size()];
int[] blockMapping = new int[join.leftFields().size()];
@@ -692,7 +692,7 @@ private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerC
List output = join.joinData().output();
for (int l = 0; l < output.size(); l++) {
if (output.get(l).name().equals(right.name())) {
- localField = localData[l];
+ localField = localData.getBlock(l);
}
}
if (localField == null) {
@@ -713,7 +713,7 @@ private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerC
Block localField = null;
for (int l = 0; l < joinDataOutput.size(); l++) {
if (joinDataOutput.get(l).name().equals(f.name())) {
- localField = localData[l];
+ localField = localData.getBlock(l);
}
}
if (localField == null) {
@@ -808,7 +808,7 @@ private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlan
private PhysicalOperation planLocal(LocalSourceExec localSourceExec, LocalExecutionPlannerContext context) {
Layout.Builder layout = new Layout.Builder();
layout.append(localSourceExec.output());
- LocalSourceOperator.BlockSupplier supplier = () -> localSourceExec.supplier().get();
+ LocalSourceOperator.PageSupplier supplier = () -> localSourceExec.supplier().get();
var operator = new LocalSourceOperator(supplier);
return PhysicalOperation.fromSource(new LocalSourceFactory(() -> operator), layout.build());
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java
index 5de03645ed684..4800a281dc318 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java
@@ -219,7 +219,7 @@ public void executeOptimizedPlan(
values.add(List.of("coordinator", "optimizedLogicalPlan", optimizedLogicalPlanString));
values.add(List.of("coordinator", "optimizedPhysicalPlan", physicalPlanString));
var blocks = BlockUtils.fromList(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, values);
- physicalPlan = new LocalSourceExec(Source.EMPTY, fields, LocalSupplier.of(blocks));
+ physicalPlan = new LocalSourceExec(Source.EMPTY, fields, LocalSupplier.of(new Page(blocks)));
planRunner.run(physicalPlan, listener);
} else {
// TODO: this could be snuck into the underlying listener
@@ -304,7 +304,7 @@ private static LocalRelation resultToPlan(LogicalPlan plan, Result result) {
List schema = result.schema();
// if (pages.size() > 1) {
Block[] blocks = SessionUtils.fromPages(schema, pages);
- return new LocalRelation(plan.source(), schema, LocalSupplier.of(blocks));
+ return new LocalRelation(plan.source(), schema, LocalSupplier.of(blocks.length == 0 ? new Page(0) : new Page(blocks)));
}
private LogicalPlan parse(String query, QueryParams params) {
@@ -559,8 +559,9 @@ private ElasticsearchException findFailure(Map LocalSupplierTests.randomBlock(blockSize)).toArray(Block[]::new);
+ return new CopyingLocalSupplier(new Page(blocks));
}
protected void assertOnBWCObject(LocalSupplier testInstance, LocalSupplier bwcDeserializedObject, TransportVersion version) {
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplierTests.java
index 1de9581f4dbc0..165116af0ba5f 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplierTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplierTests.java
@@ -9,6 +9,7 @@
import org.elasticsearch.TransportVersion;
import org.elasticsearch.compute.data.Block;
+import org.elasticsearch.compute.data.Page;
import static org.hamcrest.Matchers.equalTo;
@@ -16,8 +17,9 @@ public class ImmediateLocalSupplierTests extends LocalSupplierTests {
@Override
protected LocalSupplier createTestInstance() {
- Block[] blocks = randomList(1, 10, LocalSupplierTests::randomBlock).toArray(Block[]::new);
- return new ImmediateLocalSupplier(blocks);
+ int blockSize = randomInt(1000);
+ Block[] blocks = randomList(1, 10, () -> LocalSupplierTests.randomBlock(blockSize)).toArray(Block[]::new);
+ return new ImmediateLocalSupplier(new Page(blocks));
}
protected void assertOnBWCObject(LocalSupplier testInstance, LocalSupplier bwcDeserializedObject, TransportVersion version) {
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelationSerializationTests.java
index cf59a35799ad1..bf02afa13128c 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelationSerializationTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelationSerializationTests.java
@@ -9,6 +9,7 @@
import org.elasticsearch.compute.data.Block;
import org.elasticsearch.compute.data.BlockUtils;
+import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.test.TestBlockFactory;
import org.elasticsearch.xpack.esql.core.expression.Attribute;
import org.elasticsearch.xpack.esql.core.tree.Source;
@@ -36,7 +37,7 @@ private static LocalSupplier randomLocalSupplier(List attributes) {
1
);
}
- return LocalSupplier.of(blocks);
+ return LocalSupplier.of(new Page(blocks));
}
@Override
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java
index 1d144b995711f..8dfd65c58c52a 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java
@@ -17,6 +17,7 @@
import org.elasticsearch.compute.data.Block;
import org.elasticsearch.compute.data.BlockFactory;
import org.elasticsearch.compute.data.IntBlock;
+import org.elasticsearch.compute.data.Page;
import org.elasticsearch.test.AbstractWireTestCase;
import org.elasticsearch.test.TransportVersionUtils;
import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput;
@@ -95,22 +96,27 @@ public static LocalSupplier randomLocalSupplier() {
}
public static LocalSupplier randomNonEmpty() {
- Block[] blocks = randomList(1, 10, LocalSupplierTests::randomBlock).toArray(Block[]::new);
- return randomBoolean() ? LocalSupplier.of(blocks) : new CopyingLocalSupplier(blocks);
+ int blockSize = randomInt(1000);
+ Block[] blocks = randomList(1, 10, () -> LocalSupplierTests.randomBlock(blockSize)).toArray(Block[]::new);
+ return randomBoolean() ? LocalSupplier.of(new Page(blocks)) : new CopyingLocalSupplier(new Page(blocks));
}
@Override
protected LocalSupplier mutateInstance(LocalSupplier instance) throws IOException {
- Block[] blocks = instance.get();
+ Page page = instance.get();
+ Block[] blocks = new Block[page.getBlockCount()];
+ for (int i = 0; i < page.getBlockCount(); i++) {
+ blocks[i] = page.getBlock(i);
+ }
if (blocks.length > 0 && randomBoolean()) {
if (randomBoolean()) {
return EmptyLocalSupplier.EMPTY;
}
- return LocalSupplier.of(Arrays.copyOf(blocks, blocks.length - 1, Block[].class));
+ return LocalSupplier.of(new Page(page.getPositionCount(), Arrays.copyOf(blocks, blocks.length - 1, Block[].class)));
}
blocks = Arrays.copyOf(blocks, blocks.length + 1, Block[].class);
- blocks[blocks.length - 1] = randomBlock();
- return LocalSupplier.of(blocks);
+ blocks[blocks.length - 1] = randomBlock(page.getPositionCount());
+ return LocalSupplier.of(new Page(blocks));
}
@Override
@@ -118,10 +124,9 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(PlanWritables.others());
}
- static Block randomBlock() {
- int len = between(1, 1000);
- try (IntBlock.Builder ints = BLOCK_FACTORY.newIntBlockBuilder(len)) {
- for (int i = 0; i < len; i++) {
+ static Block randomBlock(int blockSize) {
+ try (IntBlock.Builder ints = BLOCK_FACTORY.newIntBlockBuilder(blockSize)) {
+ for (int i = 0; i < blockSize; i++) {
ints.appendInt(randomInt());
}
return ints.build();
@@ -130,6 +135,6 @@ static Block randomBlock() {
@Override
protected boolean shouldBeSame(LocalSupplier newInstance) {
- return newInstance.get().length == 0;
+ return newInstance.get().getBlockCount() == 0;
}
}
From 3ae2314394a40edef1f378734837b211234e2fca Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Mon, 25 Aug 2025 15:43:58 +0200
Subject: [PATCH 08/18] Fix flaky test
---
.../xpack/esql/action/CrossClusterLookupJoinIT.java | 11 -----------
1 file changed, 11 deletions(-)
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterLookupJoinIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterLookupJoinIT.java
index d6224e1cd8717..b8e85325e39cf 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterLookupJoinIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterLookupJoinIT.java
@@ -461,17 +461,6 @@ public void testLookupJoinEmptyIndex() throws IOException {
+ "[values_lookup_map] resolves to [values_lookup_map] in [standard] mode"
)
);
- ex = expectThrows(
- VerificationException.class,
- () -> runQuery("FROM c*:logs-* | LOOKUP JOIN values_lookup_map ON v | KEEP v", randomBoolean())
- );
- assertThat(
- ex.getMessage(),
- containsString(
- "Lookup Join requires a single lookup mode index; "
- + "[values_lookup_map] resolves to [cluster-a:values_lookup_map] in [standard] mode"
- )
- );
}
public void testLookupJoinIndexMode() throws IOException {
From 0421de5f098863ab23a0833ee437654e0d6b3594 Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Thu, 28 Aug 2025 11:49:36 +0200
Subject: [PATCH 09/18] More tests
---
.../src/main/resources/drop.csv-spec | 136 ++++++------------
.../src/main/resources/lookup-join.csv-spec | 16 +++
.../src/main/resources/sample.csv-spec | 10 ++
.../esql/session/FieldNameUtilsTests.java | 69 +++++----
4 files changed, 113 insertions(+), 118 deletions(-)
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
index 0795701a6185d..3134233f371d3 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec
@@ -50,111 +50,61 @@ b:integer | x:integer
dropAllColumns
required_capability: fix_no_columns
-from employees | keep height | drop height | eval x = 1;
+from employees | limit 4 | keep height | drop height | eval x = 1;
x:integer
1
1
1
1
+;
+
+dropEvalKeep
+required_capability: fix_no_columns
+from employees | drop salary | eval salary = 1 | keep salary | limit 4;
+
+salary:integer
1
1
1
1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
-1
;
+
+dropEvalStats
+required_capability: fix_no_columns
+from mv_sample* | drop `client_ip`, message | eval `event_duration` = "foo", @timestamp = 1 | stats max(@timestamp) by event_duration;
+
+max(@timestamp):integer | event_duration:keyword
+1 | foo
+;
+
+
+
+dropAllColumnsIndexPattern
+required_capability: fix_no_columns
+from emp* | drop languages | eval languages = 123 | keep languages | limit 4;
+
+languages:integer
+123
+123
+123
+123
+;
+
+
+dropAllColumnsWithMetadata
+required_capability: fix_no_columns
+from employees metadata _index | drop languages | eval languages = 123 | keep languages | limit 4;
+
+languages:integer
+123
+123
+123
+123
+;
+
+
dropAllColumns_WithLimit
required_capability: fix_no_columns
from employees | keep height | drop height | eval x = 1 | limit 3;
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec
index 8694505411291..d3da62c5afff9 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec
@@ -5149,3 +5149,19 @@ null | null | bar2 | null | null
null | null | corge | null | null
null | null | fred | null | null
;
+
+lookupAfterDropAllColumns
+required_capability: fix_no_columns
+required_capability: join_lookup_v12
+FROM languages
+| DROP language_code
+| EVAL language_code = 3
+| LOOKUP JOIN languages_lookup ON language_code
+;
+
+ language_code:integer | language_name:keyword
+3 |Spanish
+3 |Spanish
+3 |Spanish
+3 |Spanish
+;
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample.csv-spec
index 8ecf1b7d374de..405e618b91707 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample.csv-spec
@@ -246,3 +246,13 @@ emp_no:integer
10081
// end::sampleForDocs-result[]
;
+
+
+sampleStatsEval
+required_capability: fix_no_columns
+required_capability: sample_v3
+FROM employees | SAMPLE 0.5 | LIMIT 10 | STATS count = COUNT() | EVAL is_expected = count > 0;
+
+count:long | is_expected:boolean
+10 | true
+;
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
index 70c3edc1283cc..f1a04d6d28db6 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
@@ -3374,17 +3374,20 @@ public void testForkBeforeGrok() {
public void testImplicitFieldNames() {
assertFieldNames("""
FROM sample_data
- | STATS x = 1 year + TBUCKET(1 day) BY b1d = TBUCKET(1 day)""", Set.of("@timestamp", "@timestamp.*"));
+ | STATS x = 1 year + TBUCKET(1 day) BY b1d = TBUCKET(1 day)""", Set.of("_index", "_index.*", "@timestamp", "@timestamp.*"));
}
public void testKeepTimestampBeforeStats() {
- assertFieldNames("""
- FROM sample_data
- | WHERE event_duration > 0
- | KEEP @timestamp, client_ip
- | STATS count = COUNT(*), avg_dur = AVG(event_duration) BY hour = TBUCKET(1h), client_ip
- | SORT hour ASC
- """, Set.of("@timestamp", "@timestamp.*", "client_ip", "client_ip.*", "event_duration", "event_duration.*"));
+ assertFieldNames(
+ """
+ FROM sample_data
+ | WHERE event_duration > 0
+ | KEEP @timestamp, client_ip
+ | STATS count = COUNT(*), avg_dur = AVG(event_duration) BY hour = TBUCKET(1h), client_ip
+ | SORT hour ASC
+ """,
+ Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "client_ip", "client_ip.*", "event_duration", "event_duration.*")
+ );
}
public void testKeepAtWildcardBeforeStats() {
@@ -3394,17 +3397,30 @@ public void testKeepAtWildcardBeforeStats() {
| KEEP @*, message
| STATS errors = COUNT() BY day = TBUCKET(1d), message
| SORT day ASC
- """, Set.of("@timestamp", "@timestamp.*", "@*", "message", "message.*"));
+ """, Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "@*", "message", "message.*"));
}
public void testKeepWildcardBeforeStats() {
- assertFieldNames("""
- FROM sample_data
- | WHERE client_ip IS NOT NULL
- | KEEP *stamp*, client_ip
- | STATS p95 = PERCENTILE(event_duration, 95) BY ten_min = TBUCKET(10min), client_ip
- | SORT ten_min ASC
- """, Set.of("@timestamp", "@timestamp.*", "client_ip", "client_ip.*", "event_duration", "event_duration.*", "*stamp*"));
+ assertFieldNames(
+ """
+ FROM sample_data
+ | WHERE client_ip IS NOT NULL
+ | KEEP *stamp*, client_ip
+ | STATS p95 = PERCENTILE(event_duration, 95) BY ten_min = TBUCKET(10min), client_ip
+ | SORT ten_min ASC
+ """,
+ Set.of(
+ "_index",
+ "_index.*",
+ "@timestamp",
+ "@timestamp.*",
+ "client_ip",
+ "client_ip.*",
+ "event_duration",
+ "event_duration.*",
+ "*stamp*"
+ )
+ );
}
public void testStatsChainingWithTimestampCarriedForward() {
@@ -3415,7 +3431,7 @@ public void testStatsChainingWithTimestampCarriedForward() {
| WHERE day_count > 0
| STATS hour_count = COUNT(), hour_p95 = PERCENTILE(day_p95, 95) BY hour = TBUCKET(1h), day
| SORT day ASC, hour ASC
- """, Set.of("@timestamp", "@timestamp.*", "event_duration", "event_duration.*"));
+ """, Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "event_duration", "event_duration.*"));
}
public void testStatsChainingWithTimestampEval() {
@@ -3426,17 +3442,20 @@ public void testStatsChainingWithTimestampEval() {
| STATS total = COUNT(*), med = MEDIAN(event_duration) BY d = TBUCKET(1d), message, t
| WHERE total > 5
| STATS day_total = SUM(total), hour_med = MEDIAN(med) BY h = TBUCKET(1h), message
- """, Set.of("@timestamp", "@timestamp.*", "event_duration", "event_duration.*", "message", "message.*"));
+ """, Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "event_duration", "event_duration.*", "message", "message.*"));
}
public void testStatsChainingWithTimestampCarriedForwardAsByKey() {
- assertFieldNames("""
- FROM sample_data
- | KEEP @timestamp, client_ip, event_duration
- | STATS reqs = COUNT(), max_dur = MAX(event_duration) BY day = TBUCKET(1d), client_ip, @timestamp
- | WHERE max_dur > 1000
- | STATS spikes = COUNT() BY hour = TBUCKET(1h), client_ip, day
- """, Set.of("@timestamp", "@timestamp.*", "event_duration", "event_duration.*", "client_ip", "client_ip.*"));
+ assertFieldNames(
+ """
+ FROM sample_data
+ | KEEP @timestamp, client_ip, event_duration
+ | STATS reqs = COUNT(), max_dur = MAX(event_duration) BY day = TBUCKET(1d), client_ip, @timestamp
+ | WHERE max_dur > 1000
+ | STATS spikes = COUNT() BY hour = TBUCKET(1h), client_ip, day
+ """,
+ Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "event_duration", "event_duration.*", "client_ip", "client_ip.*")
+ );
}
private void assertFieldNames(String query, Set expected) {
From 34b995a72905c665a5dfe523b84bc240d5c2a805 Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Thu, 28 Aug 2025 12:35:18 +0200
Subject: [PATCH 10/18] Tests
---
.../qa/testFixtures/src/main/resources/lookup-join.csv-spec | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec
index d3da62c5afff9..62b391c49c336 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec
@@ -5157,11 +5157,11 @@ FROM languages
| DROP language_code
| EVAL language_code = 3
| LOOKUP JOIN languages_lookup ON language_code
+| LIMIT 3
;
language_code:integer | language_name:keyword
3 |Spanish
3 |Spanish
3 |Spanish
-3 |Spanish
;
From be758560968599a72db114fe13fb6006c374c98b Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Thu, 28 Aug 2025 15:48:54 +0200
Subject: [PATCH 11/18] More tests
---
.../compute/data/BasicPageTests.java | 42 +++++++++++++++++--
1 file changed, 39 insertions(+), 3 deletions(-)
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java
index 55b39766ca350..809f9332f888e 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java
@@ -51,6 +51,15 @@ public void testEqualityAndHashCodeSmallInput() {
);
in.releaseBlocks();
+ in = new Page(10);
+ EqualsHashCodeTestUtils.checkEqualsAndHashCode(
+ in,
+ page -> new Page(10),
+ page -> new Page(8, blockFactory.newConstantIntBlockWith(1, 8)),
+ Page::releaseBlocks
+ );
+ in.releaseBlocks();
+
in = new Page(blockFactory.newIntArrayVector(new int[] {}, 0).asBlock());
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
in,
@@ -133,8 +142,8 @@ public void testEqualityAndHashCode() throws IOException {
return new Page(blocks);
};
- int positions = randomIntBetween(1, 512);
- int blockCount = randomIntBetween(1, 256);
+ int positions = randomIntBetween(0, 512);
+ int blockCount = randomIntBetween(0, 256);
Block[] blocks = new Block[blockCount];
for (int blockIndex = 0; blockIndex < blockCount; blockIndex++) {
blocks[blockIndex] = switch (randomInt(9)) {
@@ -198,6 +207,16 @@ public void testAppend() {
page2.releaseBlocks();
}
+ public void testAppendToEmpty() {
+ Page page1 = new Page(10);
+ Page page2 = page1.appendBlock(blockFactory.newLongArrayVector(LongStream.range(0, 10).toArray(), 10).asBlock());
+ assertThat(0, is(page1.getBlockCount()));
+ assertThat(1, is(page2.getBlockCount()));
+ LongBlock block1 = page2.getBlock(0);
+ IntStream.range(0, 10).forEach(i -> assertThat((long) i, is(block1.getLong(i))));
+ page2.releaseBlocks();
+ }
+
public void testPageSerializationSimple() throws IOException {
IntVector toFilter = blockFactory.newIntArrayVector(IntStream.range(0, 20).toArray(), 20);
Page origPage = new Page(
@@ -248,6 +267,22 @@ public void testPageSerializationSimple() throws IOException {
}
}
+ public void testPageSerializationEmpty() throws IOException {
+ Page origPage = new Page(10);
+ try {
+ Page deserPage = serializeDeserializePage(origPage);
+ try {
+ EqualsHashCodeTestUtils.checkEqualsAndHashCode(origPage, unused -> deserPage);
+ assertEquals(origPage.getBlockCount(), deserPage.getBlockCount());
+ assertEquals(origPage.getPositionCount(), deserPage.getPositionCount());
+ } finally {
+ deserPage.releaseBlocks();
+ }
+ } finally {
+ origPage.releaseBlocks();
+ }
+ }
+
public void testSerializationListPages() throws IOException {
final int positions = randomIntBetween(1, 64);
List origPages = List.of(
@@ -265,7 +300,8 @@ public void testSerializationListPages() throws IOException {
positions
)
),
- new Page(blockFactory.newConstantBytesRefBlockWith(new BytesRef("Hello World"), positions))
+ new Page(blockFactory.newConstantBytesRefBlockWith(new BytesRef("Hello World"), positions)),
+ new Page(10)
);
try {
EqualsHashCodeTestUtils.checkEqualsAndHashCode(origPages, page -> {
From afd03506ee39f2a595d70bcb81320bb15eb2303d Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Mon, 8 Sep 2025 10:35:39 +0200
Subject: [PATCH 12/18] More tests
---
.../rest/generative/EsqlQueryGenerator.java | 2 +
.../command/pipe/DropAllGenerator.java | 68 +++++++++++++++++++
.../rest-api-spec/test/esql/10_basic.yml | 15 ++++
3 files changed, 85 insertions(+)
create mode 100644 x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/DropAllGenerator.java
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java
index b6cf5ff3a8d15..6ccbd0bb3fb57 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java
@@ -11,6 +11,7 @@
import org.elasticsearch.xpack.esql.qa.rest.generative.command.CommandGenerator;
import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.ChangePointGenerator;
import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.DissectGenerator;
+import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.DropAllGenerator;
import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.DropGenerator;
import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.EnrichGenerator;
import org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe.EvalGenerator;
@@ -58,6 +59,7 @@ public record QueryExecuted(String query, int depth, List outputSchema,
ChangePointGenerator.INSTANCE,
DissectGenerator.INSTANCE,
DropGenerator.INSTANCE,
+ DropAllGenerator.INSTANCE,
EnrichGenerator.INSTANCE,
EvalGenerator.INSTANCE,
ForkGenerator.INSTANCE,
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/DropAllGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/DropAllGenerator.java
new file mode 100644
index 0000000000000..febab3085d72e
--- /dev/null
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/command/pipe/DropAllGenerator.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.qa.rest.generative.command.pipe;
+
+import org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator;
+import org.elasticsearch.xpack.esql.qa.rest.generative.command.CommandGenerator;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+public class DropAllGenerator implements CommandGenerator {
+
+ public static final String DROP_ALL = "drop_all";
+ public static final String DROPPED_COLUMNS = "dropped_columns";
+
+ public static final CommandGenerator INSTANCE = new DropAllGenerator();
+
+ @Override
+ public CommandDescription generate(
+ List previousCommands,
+ List previousOutput,
+ QuerySchema schema
+ ) {
+ Set droppedColumns = new HashSet<>();
+ String name = EsqlQueryGenerator.randomStringField(previousOutput);
+ if (name == null || name.isEmpty()) {
+ return CommandGenerator.EMPTY_DESCRIPTION;
+ }
+
+ String cmdString = " | keep " + name + " | drop " + name;
+ return new CommandDescription(DROP_ALL, this, cmdString, Map.ofEntries(Map.entry(DROPPED_COLUMNS, droppedColumns)));
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public ValidationResult validateOutput(
+ List previousCommands,
+ CommandDescription commandDescription,
+ List previousColumns,
+ List> previousOutput,
+ List columns,
+ List> output
+ ) {
+ if (commandDescription == EMPTY_DESCRIPTION) {
+ return VALIDATION_OK;
+ }
+
+ if (columns.size() > 0) {
+ return new ValidationResult(
+ false,
+ "Expecting no columns, got ["
+ + columns.stream().map(EsqlQueryGenerator.Column::name).collect(Collectors.joining(", "))
+ + "]"
+ );
+ }
+
+ return VALIDATION_OK;
+ }
+
+}
diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
index b9d20d4cd40cf..2ff4c6482c1c4 100644
--- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
+++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
@@ -596,3 +596,18 @@ version is not allowed:
- length: {values: 2}
- match: {values.0: ["1981-01-12T15:00:00.000Z","2000-01-01T00:00:00.000Z"]}
- match: {values.1: ["1999-12-31T20:59:00.000Z", null]}
+
+
+---
+"No columns left after drop":
+ - do:
+ allowed_warnings_regex:
+ - "No limit defined, adding default limit of \\[.*\\]"
+ esql.query:
+ body:
+ query: 'from test_range | keep date_1 | drop date_1 | limit 2'
+
+ - length: {columns: 0}
+ - length: {values: 2}
+ - length: {values.0: 0 }
+ - length: {values.1: 0 }
From b42b09a85b23d2c2860b6c6dd632a4127a9fac0d Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Mon, 8 Sep 2025 11:40:41 +0200
Subject: [PATCH 13/18] Fix pushdown stats and new tests
---
.../physical/local/PushStatsToSource.java | 2 +-
.../rest-api-spec/test/esql/10_basic.yml | 28 +++++++++++++++++++
2 files changed, 29 insertions(+), 1 deletion(-)
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java
index 0fff9e233e956..c16be81a405b2 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java
@@ -51,7 +51,7 @@ protected PhysicalPlan rule(AggregateExec aggregateExec, LocalPhysicalOptimizerC
// for the moment support pushing count just for one field
List stats = tuple.v2();
- if (stats.size() > 1) {
+ if (stats.size() != 1) {
return aggregateExec;
}
diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
index 2ff4c6482c1c4..f45dab141f14e 100644
--- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
+++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
@@ -611,3 +611,31 @@ version is not allowed:
- length: {values: 2}
- length: {values.0: 0 }
- length: {values.1: 0 }
+
+
+---
+"Drop all columns after stats":
+ - requires:
+ test_runner_features: [capabilities, allowed_warnings_regex]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: []
+ capabilities: [fix_no_columns]
+ reason: "plans witn no columns only recently supported"
+
+ - do:
+ allowed_warnings_regex:
+ - "No limit defined, adding default limit of \\[.*\\]"
+ esql.query:
+ body:
+ query: 'from test | stats by color | keep color | drop color'
+
+ - length: {columns: 0}
+ - length: {values: 3}
+ - length: {values.0: 0 }
+ - length: {values.1: 0 }
+ - length: {values.2: 0 }
+
+
+
From 3a388a71e1812d62bfc35cf4a3bd93a1a36067e8 Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Mon, 8 Sep 2025 12:29:09 +0200
Subject: [PATCH 14/18] BWC
---
.../resources/rest-api-spec/test/esql/10_basic.yml | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
index f45dab141f14e..d8c6491ebf6ac 100644
--- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
+++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
@@ -600,6 +600,15 @@ version is not allowed:
---
"No columns left after drop":
+ - requires:
+ test_runner_features: [capabilities, allowed_warnings_regex]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: []
+ capabilities: [fix_no_columns]
+ reason: "plans witn no columns only recently supported"
+
- do:
allowed_warnings_regex:
- "No limit defined, adding default limit of \\[.*\\]"
From 710fdccc60faacc9433a5fe0c53551c8193c0c77 Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Tue, 30 Sep 2025 14:54:28 +0200
Subject: [PATCH 15/18] Fix compile and add transport version
---
.../referable/esql_plan_with_no_columns.csv | 1 +
.../resources/transport/upper_bounds/9.2.csv | 2 +-
.../command/pipe/DropAllGenerator.java | 18 +++++++++---------
.../logical/local/ImmediateLocalSupplier.java | 1 -
.../xpack/esql/session/EsqlSession.java | 16 ++++++++--------
5 files changed, 19 insertions(+), 19 deletions(-)
create mode 100644 server/src/main/resources/transport/definitions/referable/esql_plan_with_no_columns.csv
diff --git a/server/src/main/resources/transport/definitions/referable/esql_plan_with_no_columns.csv b/server/src/main/resources/transport/definitions/referable/esql_plan_with_no_columns.csv
new file mode 100644
index 0000000000000..2752f57900f3e
--- /dev/null
+++ b/server/src/main/resources/transport/definitions/referable/esql_plan_with_no_columns.csv
@@ -0,0 +1 @@
+9183000
diff --git a/server/src/main/resources/transport/upper_bounds/9.2.csv b/server/src/main/resources/transport/upper_bounds/9.2.csv
index 0dde9b0f74618..a87bb55d0b516 100644
--- a/server/src/main/resources/transport/upper_bounds/9.2.csv
+++ b/server/src/main/resources/transport/upper_bounds/9.2.csv
@@ -1 +1 @@
-inference_telemetry_added_semantic_text_stats,9182000
+esql_plan_with_no_columns,9183000
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/generator/command/pipe/DropAllGenerator.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/generator/command/pipe/DropAllGenerator.java
index c6490f18ab6dd..622c77572fc9c 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/generator/command/pipe/DropAllGenerator.java
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/generator/command/pipe/DropAllGenerator.java
@@ -8,8 +8,9 @@
package org.elasticsearch.xpack.esql.generator.command.pipe;
import org.elasticsearch.xpack.esql.generator.Column;
-import org.elasticsearch.xpack.esql.qa.rest.generative.EsqlQueryGenerator;
-import org.elasticsearch.xpack.esql.qa.rest.generative.command.CommandGenerator;
+import org.elasticsearch.xpack.esql.generator.EsqlQueryGenerator;
+import org.elasticsearch.xpack.esql.generator.QueryExecutor;
+import org.elasticsearch.xpack.esql.generator.command.CommandGenerator;
import java.util.HashSet;
import java.util.List;
@@ -27,8 +28,9 @@ public class DropAllGenerator implements CommandGenerator {
@Override
public CommandDescription generate(
List previousCommands,
- List previousOutput,
- QuerySchema schema
+ List previousOutput,
+ QuerySchema schema,
+ QueryExecutor executor
) {
Set droppedColumns = new HashSet<>();
String name = EsqlQueryGenerator.randomStringField(previousOutput);
@@ -45,9 +47,9 @@ public CommandDescription generate(
public ValidationResult validateOutput(
List previousCommands,
CommandDescription commandDescription,
- List previousColumns,
+ List previousColumns,
List> previousOutput,
- List columns,
+ List columns,
List> output
) {
if (commandDescription == EMPTY_DESCRIPTION) {
@@ -57,9 +59,7 @@ public ValidationResult validateOutput(
if (columns.size() > 0) {
return new ValidationResult(
false,
- "Expecting no columns, got ["
- + columns.stream().map(Column::name).collect(Collectors.joining(", "))
- + "]"
+ "Expecting no columns, got [" + columns.stream().map(Column::name).collect(Collectors.joining(", ")) + "]"
);
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java
index 734cbd6739f11..bc32588f838ff 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/ImmediateLocalSupplier.java
@@ -8,7 +8,6 @@
package org.elasticsearch.xpack.esql.plan.logical.local;
import org.elasticsearch.TransportVersion;
-import org.elasticsearch.TransportVersions;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java
index 2950ff0c98999..293bbb89dc121 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java
@@ -292,13 +292,13 @@ private void executeSubPlan(
executionInfo.startSubPlans();
runner.run(physicalSubPlan, listener.delegateFailureAndWrap((next, result) -> {
- AtomicReference localRelationBlocks = new AtomicReference<>();
+ AtomicReference localRelationPage = new AtomicReference<>();
try {
// Translate the subquery into a separate, coordinator based plan and the results 'broadcasted' as a local relation
completionInfoAccumulator.accumulate(result.completionInfo());
LocalRelation resultWrapper = resultToPlan(subPlans.stubReplacedSubPlan().source(), result);
- localRelationBlocks.set(resultWrapper.supplier().get());
- var releasingNext = ActionListener.runAfter(next, () -> releaseLocalRelationBlocks(localRelationBlocks));
+ localRelationPage.set(resultWrapper.supplier().get());
+ var releasingNext = ActionListener.runAfter(next, () -> releaseLocalRelationBlocks(localRelationPage));
subPlansResults.add(resultWrapper);
// replace the original logical plan with the backing result
@@ -340,7 +340,7 @@ private void executeSubPlan(
} catch (Exception e) {
// safely release the blocks in case an exception occurs either before, but also after the "final" runner.run() forks off
// the current thread, but with the blocks still referenced
- releaseLocalRelationBlocks(localRelationBlocks);
+ releaseLocalRelationBlocks(localRelationPage);
throw e;
} finally {
Releasables.closeExpectNoException(Releasables.wrap(Iterators.map(result.pages().iterator(), p -> p::releaseBlocks)));
@@ -361,10 +361,10 @@ private LocalRelation resultToPlan(Source planSource, Result result) {
return new LocalRelation(planSource, schema, LocalSupplier.of(blocks.length == 0 ? new Page(0) : new Page(blocks)));
}
- private static void releaseLocalRelationBlocks(AtomicReference localRelationBlocks) {
- Block[] relationBlocks = localRelationBlocks.getAndSet(null);
- if (relationBlocks != null) {
- Releasables.closeExpectNoException(relationBlocks);
+ private static void releaseLocalRelationBlocks(AtomicReference localRelationPage) {
+ Page relationPage = localRelationPage.getAndSet(null);
+ if (relationPage != null) {
+ Releasables.closeExpectNoException(relationPage);
}
}
From edc3890edb6206b241a915784d1f6b0dc5b569ae Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Tue, 30 Sep 2025 15:22:40 +0200
Subject: [PATCH 16/18] Fix test
---
.../elasticsearch/xpack/esql/session/FieldNameUtilsTests.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
index 04a3af74eac95..478e139efcb61 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
@@ -3201,7 +3201,7 @@ public void testForkBranchWithInlineStatsIgnore() {
FROM employees
| KEEP emp_no, languages, gender
| FORK (WHERE emp_no == 10048 OR emp_no == 10081
- | INLINESTATS x = MAX(languages) BY gender)
+ | INLINE STATS x = MAX(languages) BY gender)
(WHERE emp_no == 10081 OR emp_no == 10087
| INLINE STATS x = MIN(languages))
(WHERE emp_no == 10012 OR emp_no == 10012)
From f8ca703fb95cc816d44544122343f362239f05ee Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Thu, 2 Oct 2025 10:33:40 +0200
Subject: [PATCH 17/18] More tests
---
.../rest-api-spec/test/esql/10_basic.yml | 168 ++++++++++++++++++
1 file changed, 168 insertions(+)
diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
index e3fbb32843c21..c09e851cb821d 100644
--- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
+++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
@@ -711,5 +711,173 @@ version is not allowed:
- length: {values.1: 0 }
- length: {values.2: 0 }
+---
+"Drop all columns after eval where false":
+ - requires:
+ test_runner_features: [capabilities, allowed_warnings_regex]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: []
+ capabilities: [fix_no_columns]
+ reason: "plans witn no columns only recently supported"
+
+ - do:
+ allowed_warnings_regex:
+ - "No limit defined, adding default limit of \\[.*\\]"
+ esql.query:
+ body:
+ query: 'from test | keep color | eval c = 1 | drop c, color | where false'
+
+ - length: {columns: 0}
+ - length: {values: 0}
+
+---
+"Drop all columns with inline stats ":
+ - requires:
+ test_runner_features: [capabilities, allowed_warnings_regex]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: []
+ capabilities: [fix_no_columns]
+ reason: "plans witn no columns only recently supported"
+
+ - do:
+ allowed_warnings_regex:
+ - "No limit defined, adding default limit of \\[.*\\]"
+ esql.query:
+ body:
+ query: 'from test | keep color | drop color | eval x = 1 | inline stats a = count(), b = max(x) | limit 3'
+
+ - length: {columns: 3}
+ - match: { columns.0.name: "x" }
+ - match: { columns.1.name: "a" }
+ - match: { columns.2.name: "b" }
+ - length: {values: 3}
+ - match: {values.0: [1, 40, 1]}
+ - match: {values.1: [1, 40, 1]}
+ - match: {values.2: [1, 40, 1]}
+
+---
+"Drop all columns with inline stats and by":
+ - requires:
+ test_runner_features: [capabilities, allowed_warnings_regex]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: []
+ capabilities: [fix_no_columns]
+ reason: "plans witn no columns only recently supported"
+
+ - do:
+ allowed_warnings_regex:
+ - "No limit defined, adding default limit of \\[.*\\]"
+ esql.query:
+ body:
+ query: 'from test | keep color | drop color | eval x = 1, y = 1 | inline stats a = count(), b = max(x) by y | limit 3'
+ - length: {columns: 4}
+ - match: { columns.0.name: "x" }
+ - match: { columns.1.name: "a" }
+ - match: { columns.2.name: "b" }
+ - match: { columns.3.name: "y" }
+ - length: {values: 3}
+ - match: {values.0: [1, 40, 1, 1]}
+ - match: {values.1: [1, 40, 1, 1]}
+ - match: {values.2: [1, 40, 1, 1]}
+
+---
+"Drop all columns with mutiple indices":
+ - requires:
+ test_runner_features: [ capabilities, allowed_warnings_regex ]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: [ ]
+ capabilities: [ fix_no_columns ]
+ reason: "plans witn no columns only recently supported"
+
+ - do:
+ allowed_warnings_regex:
+ - "No limit defined, adding default limit of \\[.*\\]"
+ esql.query:
+ body:
+ query: 'from test* | keep color | drop color | eval x = 1, y = 1 | inline stats a = count(), b = max(x) by y | limit 3'
+ - length: { columns: 4 }
+ - match: { columns.0.name: "x" }
+ - match: { columns.1.name: "a" }
+ - match: { columns.2.name: "b" }
+ - match: { columns.3.name: "y" }
+ - length: { values: 3 }
+ - match: {values.0: [1, 43, 1, 1]}
+ - match: {values.1: [1, 43, 1, 1]}
+ - match: {values.2: [1, 43, 1, 1]}
+
+---
+"Drop all columns with fork":
+ - requires:
+ test_runner_features: [ capabilities, allowed_warnings_regex ]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: [ ]
+ capabilities: [ fix_no_columns ]
+ reason: "plans witn no columns only recently supported"
+
+ - do:
+ allowed_warnings_regex:
+ - "No limit defined, adding default limit of \\[.*\\]"
+ esql.query:
+ body:
+ query: 'from test | keep color | drop color | fork (stats c=count()) (stats c=count())'
+
+ - length: { columns: 2 }
+ - match: { columns.0.name: "c" }
+ - match: { columns.1.name: "_fork" }
+ - length: { values: 2 }
+ - match: {values.0.0: 40}
+ - match: {values.1.0: 40}
+
+
+---
+"Drop all columns with fork drop all":
+ - requires:
+ test_runner_features: [ capabilities, allowed_warnings_regex ]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: [ ]
+ capabilities: [ fix_no_columns ]
+ reason: "plans witn no columns only recently supported"
+ - do:
+ allowed_warnings_regex:
+ - "No limit defined, adding default limit of \\[.*\\]"
+ esql.query:
+ body:
+ query: 'from test | keep color | drop color | fork (stats c=count()) (stats c=count()) | drop _fork*, c'
+
+ - length: { columns: 0 }
+ - length: { values: 2 }
+
+
+---
+"Drop all columns with fork drop all count":
+ - requires:
+ test_runner_features: [ capabilities, allowed_warnings_regex ]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: [ ]
+ capabilities: [ fix_no_columns ]
+ reason: "plans witn no columns only recently supported"
+ - do:
+ allowed_warnings_regex:
+ - "No limit defined, adding default limit of \\[.*\\]"
+ esql.query:
+ body:
+ query: 'from test | keep color | drop color | fork (stats c=count()) (stats c=count()) | drop _fork*, c | stats count(*)'
+ - length: { columns: 1 }
+ - length: { values: 1 }
+ - match: {values.0.0: 2}
From e9ebf8bf936dae9e8af1605173f3cf9b35f29111 Mon Sep 17 00:00:00 2001
From: Luigi Dell'Aquila
Date: Thu, 2 Oct 2025 10:51:57 +0200
Subject: [PATCH 18/18] Implement suggestions
---
.../rules/logical/PruneEmptyAggregates.java | 7 +
.../logical/local/CopyingLocalSupplier.java | 8 +-
.../xpack/esql/session/FieldNameUtils.java | 5 +-
.../esql/session/FieldNameUtilsTests.java | 962 +++++++-----------
4 files changed, 369 insertions(+), 613 deletions(-)
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneEmptyAggregates.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneEmptyAggregates.java
index 953595ca9dc53..116c723fd4de2 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneEmptyAggregates.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneEmptyAggregates.java
@@ -15,6 +15,13 @@
import java.util.List;
+/**
+ * STATS with no aggregates and no groupings can be replaced with a single, empty row.
+ * This can happen due to expression pruning at optimization time, after all them are dropped,
+ * eg.
+ *
+ * STATS a = count(*) by b | drop a, b
+ */
public final class PruneEmptyAggregates extends OptimizerRules.OptimizerRule {
@Override
protected LogicalPlan rule(Aggregate agg) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/CopyingLocalSupplier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/CopyingLocalSupplier.java
index 3bec9bade79d2..8234605c53f5d 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/CopyingLocalSupplier.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/CopyingLocalSupplier.java
@@ -20,18 +20,18 @@
import java.io.IOException;
/**
- * A {@link LocalSupplier} that allways creates a new copy of the {@link Block}s initially provided at creation time.
+ * A {@link LocalSupplier} that allways creates a new copy of the {@link Page} initially provided at creation time.
* This is created specifically for {@link InlineStats} usage in {@link EsqlSession} for queries that use ROW command.
*
- * The ROW which gets replaced by {@link ReplaceRowAsLocalRelation} with a {@link LocalRelation} will have its blocks
+ * The ROW which gets replaced by {@link ReplaceRowAsLocalRelation} with a {@link LocalRelation} will have its page
* used (and released) at least twice:
* - the {@link LocalRelation} from the left-hand side is used as a source for the right-hand side
* - the same {@link LocalRelation} is then used to continue the execution of the query on the left-hand side
*
* It delegates all its operations to {@link ImmediateLocalSupplier} and, to prevent the double release, it will always
- * create a deep copy of the blocks received in the constructor initially.
+ * create a deep copy of the page received in the constructor initially.
*
- * Example with the flow and the blocks reuse for a query like "row x = 1 | inline stats y = max(x)"
+ * Example with the flow and the page reuse for a query like "row x = 1 | inline stats y = max(x)"
* Step 1:
* Limit[1000[INTEGER],true]
* \_InlineJoin[LEFT,[],[],[]]
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/FieldNameUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/FieldNameUtils.java
index 4d959fec173e2..3b17ac0a4f443 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/FieldNameUtils.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/FieldNameUtils.java
@@ -244,8 +244,9 @@ public static PreAnalysisResult resolveFieldNames(LogicalPlan parsed, boolean ha
// there cannot be an empty list of fields, we'll ask the simplest and lightest one instead: _index
return new PreAnalysisResult(IndexResolver.INDEX_METADATA_FIELD, wildcardJoinIndices);
} else {
- fieldNames.add(MetadataAttribute.INDEX);
- return new PreAnalysisResult(fieldNames.stream().flatMap(FieldNameUtils::withSubfields).collect(toSet()), wildcardJoinIndices);
+ HashSet allFields = new HashSet<>(fieldNames.stream().flatMap(FieldNameUtils::withSubfields).collect(toSet()));
+ allFields.add(MetadataAttribute.INDEX);
+ return new PreAnalysisResult(allFields, wildcardJoinIndices);
}
}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
index 313c50415075b..16d3665db10e0 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/FieldNameUtilsTests.java
@@ -47,7 +47,7 @@ public void testBasicEvalAndDrop() {
public void testSimple1() {
assertFieldNames(
"from employees | sort emp_no | keep emp_no, still_hired | limit 3",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "still_hired", "still_hired.*")
+ Set.of("_index", "emp_no", "emp_no.*", "still_hired", "still_hired.*")
);
}
@@ -61,7 +61,7 @@ public void testSimple2() {
public void testDirectFilter() {
assertFieldNames(
"from employees | sort emp_no | where still_hired | keep emp_no | limit 3",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "still_hired", "still_hired.*")
+ Set.of("_index", "emp_no", "emp_no.*", "still_hired", "still_hired.*")
);
}
@@ -72,14 +72,14 @@ public void testForkEval() {
public void testSort1() {
assertFieldNames(
"from employees | sort still_hired, emp_no | keep emp_no, still_hired | limit 3",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "still_hired", "still_hired.*")
+ Set.of("_index", "emp_no", "emp_no.*", "still_hired", "still_hired.*")
);
}
public void testStatsBy() {
assertFieldNames(
"from employees | stats avg(salary) by still_hired | sort still_hired",
- Set.of("_index", "_index.*", "salary", "salary.*", "still_hired", "still_hired.*")
+ Set.of("_index", "salary", "salary.*", "still_hired", "still_hired.*")
);
}
@@ -87,7 +87,7 @@ public void testStatsByAlwaysTrue() {
assertFieldNames(
"from employees | where first_name is not null | eval always_true = starts_with(first_name, \"\") "
+ "| stats avg(salary) by always_true",
- Set.of("_index", "_index.*", "first_name", "first_name.*", "salary", "salary.*")
+ Set.of("_index", "first_name", "first_name.*", "salary", "salary.*")
);
}
@@ -96,7 +96,7 @@ public void testStatsByAlwaysFalse() {
"from employees | where first_name is not null "
+ "| eval always_false = starts_with(first_name, \"nonestartwiththis\") "
+ "| stats avg(salary) by always_false",
- Set.of("_index", "_index.*", "first_name", "first_name.*", "salary", "salary.*")
+ Set.of("_index", "first_name", "first_name.*", "salary", "salary.*")
);
}
@@ -104,7 +104,7 @@ public void testIn1() {
assertFieldNames(
"from employees | keep emp_no, is_rehired, still_hired "
+ "| where is_rehired in (still_hired, true) | where is_rehired != still_hired",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "is_rehired", "is_rehired.*", "still_hired", "still_hired.*")
+ Set.of("_index", "emp_no", "emp_no.*", "is_rehired", "is_rehired.*", "still_hired", "still_hired.*")
);
}
@@ -116,7 +116,7 @@ public void testConvertFromString1() {
| eval rehired_bool = to_boolean(rehired_str)
| eval all_false = to_boolean(first_name)
| drop first_name
- | limit 5""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "is_rehired", "is_rehired.*", "first_name", "first_name.*"));
+ | limit 5""", Set.of("_index", "emp_no", "emp_no.*", "is_rehired", "is_rehired.*", "first_name", "first_name.*"));
}
public void testConvertFromDouble1() {
@@ -126,10 +126,10 @@ public void testConvertFromDouble1() {
| eval h_2 = height - 2.0, double2bool = to_boolean(h_2)
| where emp_no in (10036, 10037, 10038)
| keep emp_no, height, *2bool""",
- Set.of("_index", "_index.*", "height", "height.*", "emp_no", "emp_no.*", "h_2", "h_2.*", "*2bool.*", "*2bool")
+ Set.of("_index", "height", "height.*", "emp_no", "emp_no.*", "h_2", "h_2.*", "*2bool.*", "*2bool")
);
// TODO asking for more shouldn't hurt. Can we do better? ("h_2" shouldn't be in the list of fields)
- // Set.of("_index", "_index.*", "height", "height.*", "emp_no", "emp_no.*", "*2bool.*", "*2bool"));
+ // Set.of("_index", "height", "height.*", "emp_no", "emp_no.*", "*2bool.*", "*2bool"));
}
public void testConvertFromIntAndLong() {
@@ -138,7 +138,6 @@ public void testConvertFromIntAndLong() {
+ "| eval int2bool = to_boolean(salary_change.int), long2bool = to_boolean(salary_change.long) | limit 10",
Set.of(
"_index",
- "_index.*",
"emp_no",
"emp_no.*",
"salary_change*",
@@ -154,7 +153,7 @@ public void testIntToInt() {
assertFieldNames("""
from employees
| where emp_no < 10002
- | keep emp_no""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
+ | keep emp_no""", Set.of("_index", "emp_no", "emp_no.*"));
}
public void testLongToLong() {
@@ -164,16 +163,7 @@ public void testLongToLong() {
| where languages.long < avg_worked_seconds
| limit 1
| keep emp_no""",
- Set.of(
- "_index",
- "_index.*",
- "emp_no",
- "emp_no.*",
- "languages.long",
- "languages.long.*",
- "avg_worked_seconds",
- "avg_worked_seconds.*"
- )
+ Set.of("_index", "emp_no", "emp_no.*", "languages.long", "languages.long.*", "avg_worked_seconds", "avg_worked_seconds.*")
);
}
@@ -183,7 +173,7 @@ public void testDateToDate() {
| where birth_date < hire_date
| keep emp_no
| sort emp_no
- | limit 1""", Set.of("_index", "_index.*", "birth_date", "birth_date.*", "emp_no", "emp_no.*", "hire_date", "hire_date.*"));
+ | limit 1""", Set.of("_index", "birth_date", "birth_date.*", "emp_no", "emp_no.*", "hire_date", "hire_date.*"));
}
public void testTwoConditionsWithDefault() {
@@ -191,7 +181,7 @@ public void testTwoConditionsWithDefault() {
from employees
| eval type = case(languages <= 1, "monolingual", languages <= 2, "bilingual", "polyglot")
| keep emp_no, type
- | limit 10""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
+ | limit 10""", Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testSingleCondition() {
@@ -199,7 +189,7 @@ public void testSingleCondition() {
from employees
| eval g = case(gender == "F", true)
| keep gender, g
- | limit 10""", Set.of("_index", "_index.*", "gender", "gender.*"));
+ | limit 10""", Set.of("_index", "gender", "gender.*"));
}
public void testConditionIsNull() {
@@ -207,27 +197,24 @@ public void testConditionIsNull() {
from employees
| eval g = case(gender == "F", 1, languages > 1, 2, 3)
| keep gender, languages, g
- | limit 25""", Set.of("_index", "_index.*", "gender", "gender.*", "languages", "languages.*"));
+ | limit 25""", Set.of("_index", "gender", "gender.*", "languages", "languages.*"));
}
public void testEvalAssign() {
assertFieldNames(
"from employees | sort hire_date | eval x = hire_date | keep emp_no, x | limit 5",
- Set.of("_index", "_index.*", "hire_date", "hire_date.*", "emp_no", "emp_no.*")
+ Set.of("_index", "hire_date", "hire_date.*", "emp_no", "emp_no.*")
);
}
public void testMinMax() {
- assertFieldNames(
- "from employees | stats min = min(hire_date), max = max(hire_date)",
- Set.of("_index", "_index.*", "hire_date", "hire_date.*")
- );
+ assertFieldNames("from employees | stats min = min(hire_date), max = max(hire_date)", Set.of("_index", "hire_date", "hire_date.*"));
}
public void testEvalDateTruncIntervalExpressionPeriod() {
assertFieldNames(
"from employees | sort hire_date | eval x = date_trunc(hire_date, 1 month) | keep emp_no, hire_date, x | limit 5",
- Set.of("_index", "_index.*", "hire_date", "hire_date.*", "emp_no", "emp_no.*")
+ Set.of("_index", "hire_date", "hire_date.*", "emp_no", "emp_no.*")
);
}
@@ -238,7 +225,7 @@ public void testEvalDateTruncGrouping() {
| stats count(emp_no) by y
| sort y
| keep y, `count(emp_no)`
- | limit 5""", Set.of("_index", "_index.*", "hire_date", "hire_date.*", "emp_no", "emp_no.*"));
+ | limit 5""", Set.of("_index", "hire_date", "hire_date.*", "emp_no", "emp_no.*"));
}
public void testIn2() {
@@ -248,7 +235,7 @@ public void testIn2() {
| where birth_date not in (x, hire_date)
| keep x, hire_date
| sort x desc
- | limit 4""", Set.of("_index", "_index.*", "hire_date", "hire_date.*", "birth_date", "birth_date.*"));
+ | limit 4""", Set.of("_index", "hire_date", "hire_date.*", "birth_date", "birth_date.*"));
}
public void testBucketMonth() {
@@ -257,13 +244,13 @@ public void testBucketMonth() {
| where hire_date >= "1985-01-01T00:00:00Z" and hire_date < "1986-01-01T00:00:00Z"
| eval hd = bucket(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")
| sort hire_date
- | keep hire_date, hd""", Set.of("_index", "_index.*", "hire_date", "hire_date.*"));
+ | keep hire_date, hd""", Set.of("_index", "hire_date", "hire_date.*"));
}
public void testBorn_before_today() {
assertFieldNames(
"from employees | where birth_date < now() | sort emp_no asc | keep emp_no, birth_date| limit 1",
- Set.of("_index", "_index.*", "birth_date", "birth_date.*", "emp_no", "emp_no.*")
+ Set.of("_index", "birth_date", "birth_date.*", "emp_no", "emp_no.*")
);
}
@@ -273,7 +260,7 @@ public void testBucketMonthInAgg() {
| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z"
| EVAL bucket = BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z")
| STATS AVG(salary) BY bucket
- | SORT bucket""", Set.of("_index", "_index.*", "salary", "salary.*", "hire_date", "hire_date.*"));
+ | SORT bucket""", Set.of("_index", "salary", "salary.*", "hire_date", "hire_date.*"));
}
public void testEvalDateParseDynamic() {
@@ -284,7 +271,7 @@ public void testEvalDateParseDynamic() {
| eval birth_date_string = date_format("yyyy-MM-dd", birth_date)
| eval new_date = date_parse("yyyy-MM-dd", birth_date_string)
| eval bool = new_date == birth_date
- | keep emp_no, new_date, birth_date, bool""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "birth_date", "birth_date.*"));
+ | keep emp_no, new_date, birth_date, bool""", Set.of("_index", "emp_no", "emp_no.*", "birth_date", "birth_date.*"));
}
public void testDateFields() {
@@ -292,7 +279,7 @@ public void testDateFields() {
from employees
| where emp_no == 10049 or emp_no == 10050
| eval year = date_extract("year", birth_date), month = date_extract("month_of_year", birth_date)
- | keep emp_no, year, month""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "birth_date", "birth_date.*"));
+ | keep emp_no, year, month""", Set.of("_index", "emp_no", "emp_no.*", "birth_date", "birth_date.*"));
}
public void testEvalDissect() {
@@ -302,7 +289,7 @@ public void testEvalDissect() {
| dissect full_name "%{a} %{b}"
| sort emp_no asc
| keep full_name, a, b
- | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testDissectExpression() {
@@ -311,7 +298,7 @@ public void testDissectExpression() {
| dissect concat(first_name, " ", last_name) "%{a} %{b}"
| sort emp_no asc
| keep a, b
- | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testMultivalueInput1() {
@@ -320,7 +307,7 @@ public void testMultivalueInput1() {
| where emp_no <= 10006
| dissect job_positions "%{a} %{b} %{c}"
| sort emp_no
- | keep emp_no, a, b, c""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "job_positions", "job_positions.*"));
+ | keep emp_no, a, b, c""", Set.of("_index", "emp_no", "emp_no.*", "job_positions", "job_positions.*"));
}
public void testLimitZero() {
@@ -375,14 +362,14 @@ public void testDocsEval() {
| KEEP first_name, last_name, height
| EVAL height_feet = height * 3.281, height_cm = height * 100
| WHERE first_name == "Georgi"
- | LIMIT 1""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "height", "height.*"));
+ | LIMIT 1""", Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*", "height", "height.*"));
}
public void testDocsKeepWildcard() {
assertFieldNames("""
FROM employees
| KEEP h*
- | LIMIT 0""", Set.of("_index", "_index.*", "h*"));
+ | LIMIT 0""", Set.of("_index", "h*"));
}
public void testDocsKeepDoubleWildcard() {
@@ -393,14 +380,11 @@ public void testDocsKeepDoubleWildcard() {
}
public void testDocsRename() {
- assertFieldNames(
- """
- FROM employees
- | KEEP first_name, last_name, still_hired
- | RENAME still_hired AS employed
- | LIMIT 0""",
- Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "still_hired", "still_hired.*")
- );
+ assertFieldNames("""
+ FROM employees
+ | KEEP first_name, last_name, still_hired
+ | RENAME still_hired AS employed
+ | LIMIT 0""", Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*", "still_hired", "still_hired.*"));
}
public void testDocsRenameMultipleColumns() {
@@ -408,14 +392,14 @@ public void testDocsRenameMultipleColumns() {
FROM employees
| KEEP first_name, last_name
| RENAME first_name AS fn, last_name AS ln
- | LIMIT 0""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*"));
+ | LIMIT 0""", Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*"));
}
public void testDocsStats() {
assertFieldNames("""
FROM employees
| STATS count = COUNT(emp_no) BY languages
- | SORT languages""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
+ | SORT languages""", Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testEvalStats() {
@@ -440,13 +424,13 @@ public void testEvalStats() {
assertFieldNames("""
FROM employees
| STATS count = COUNT(*) BY first_name
- | SORT first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ | SORT first_name""", Set.of("_index", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| EVAL y = "a"
| STATS count = COUNT(*) BY x = y
- | SORT x, first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ | SORT x, first_name""", Set.of("_index", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
@@ -464,26 +448,26 @@ public void testEvalStats() {
FROM employees
| EVAL y = to_upper(first_name), z = "z"
| STATS count = COUNT(*) BY first_name = to_lower(y), z
- | SORT first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ | SORT first_name""", Set.of("_index", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| EVAL y = "a"
| STATS count = COUNT(*) BY x = y, z = first_name
- | SORT x, z""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ | SORT x, z""", Set.of("_index", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| EVAL y = "a"
| STATS count = COUNT(*) BY x = y, first_name
- | SORT x, first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ | SORT x, first_name""", Set.of("_index", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| EVAL y = "a"
| STATS count = COUNT(first_name) BY x = y
| SORT x
- | DROP first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ | DROP first_name""", Set.of("_index", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
@@ -495,14 +479,14 @@ public void testEvalStats() {
FROM employees
| EVAL y = "a"
| STATS count = COUNT(*) BY first_name, y
- | MV_EXPAND first_name""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ | MV_EXPAND first_name""", Set.of("_index", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
| MV_EXPAND first_name
| EVAL y = "a"
| STATS count = COUNT(*) BY first_name, y
- | SORT y""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ | SORT y""", Set.of("_index", "first_name", "first_name.*"));
assertFieldNames("""
FROM employees
@@ -523,7 +507,7 @@ public void testEvalStats() {
| EVAL y = "a"
| STATS count = COUNT(*) BY first_name, y
| STATS count = COUNT(count) by x = y
- | SORT x""", Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ | SORT x""", Set.of("_index", "first_name", "first_name.*"));
}
public void testSortWithLimitOne_DropHeight() {
@@ -536,13 +520,13 @@ public void testSortWithLimitOne_DropHeight_WithInlineStats() {
}
public void testDropAllColumns() {
- assertFieldNames("from employees | keep height | drop height | eval x = 1", Set.of("_index", "_index.*", "height", "height.*"));
+ assertFieldNames("from employees | keep height | drop height | eval x = 1", Set.of("_index", "height", "height.*"));
}
public void testDropAllColumns_WithStats() {
assertFieldNames(
"from employees | keep height | drop height | eval x = 1 | stats c=count(x), mi=min(x), s=sum(x)",
- Set.of("_index", "_index.*", "height", "height.*")
+ Set.of("_index", "height", "height.*")
);
}
@@ -555,7 +539,7 @@ public void testEnrichOn() {
| eval x = to_string(languages)
| enrich languages_policy on x
| keep emp_no, language_name""",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -568,7 +552,7 @@ public void testEnrichOn2() {
| keep emp_no, language_name
| sort emp_no
| limit 1""",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -589,7 +573,7 @@ public void testSimpleSortLimit() {
| keep emp_no, language_name
| sort emp_no
| limit 1""",
- Set.of("_index", "_index.*", "languages", "languages.*", "emp_no", "emp_no.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "languages", "languages.*", "emp_no", "emp_no.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -598,7 +582,7 @@ public void testWith() {
"""
from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1
| enrich languages_policy on x with language_name""",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -607,7 +591,7 @@ public void testWithAlias() {
"""
from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x
| enrich languages_policy on x with lang = language_name""",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -616,7 +600,7 @@ public void testWithAliasSort() {
"""
from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3
| enrich languages_policy on x with lang = language_name""",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -625,7 +609,7 @@ public void testWithAliasAndPlain() {
"""
from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x
| enrich languages_policy on x with lang = language_name, language_name""",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -634,7 +618,7 @@ public void testWithTwoAliasesSameProp() {
"""
from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x
| enrich languages_policy on x with lang = language_name, lang2 = language_name""",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -643,7 +627,7 @@ public void testRedundantWith() {
"""
from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x
| enrich languages_policy on x with language_name, language_name""",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -654,7 +638,7 @@ public void testNullInput() {
| where emp_no == 10017
| keep emp_no, gender
| enrich languages_policy on gender with language_name, language_name""",
- Set.of("_index", "_index.*", "gender", "gender.*", "emp_no", "emp_no.*", "language_name", "language_name.*")
+ Set.of("_index", "gender", "gender.*", "emp_no", "emp_no.*", "language_name", "language_name.*")
);
}
@@ -666,7 +650,7 @@ public void testConstantNullInput() {
| eval x = to_string(languages)
| keep emp_no, x
| enrich languages_policy on x with language_name, language_name""",
- Set.of("_index", "_index.*", "languages", "languages.*", "emp_no", "emp_no.*", "language_name", "language_name.*", "x", "x.*")
+ Set.of("_index", "languages", "languages.*", "emp_no", "emp_no.*", "language_name", "language_name.*", "x", "x.*")
);
}
@@ -681,7 +665,6 @@ public void testEnrichEval() {
| sort emp_no desc | limit 3""",
Set.of(
"_index",
- "_index.*",
"emp_no",
"x",
"lang",
@@ -707,27 +690,14 @@ public void testSimple() {
| where x > 1
| keep emp_no, language_name
| limit 1""",
- Set.of(
- "_index",
- "_index.*",
- "emp_no",
- "emp_no.*",
- "languages",
- "languages.*",
- "language_name",
- "language_name.*",
- "x",
- "y",
- "x.*",
- "y.*"
- )
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "y", "x.*", "y.*")
);
}
public void testEvalNullSort() {
assertFieldNames(
"from employees | eval x = null | sort x asc, emp_no desc | keep emp_no, x, last_name | limit 2",
- Set.of("_index", "_index.*", "last_name", "last_name.*", "emp_no", "emp_no.*")
+ Set.of("_index", "last_name", "last_name.*", "emp_no", "emp_no.*")
);
}
@@ -738,7 +708,7 @@ public void testFilterEvalFilter() {
| eval name_len = length(first_name)
| where name_len < 4
| keep first_name
- | sort first_name""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
+ | sort first_name""", Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testEvalWithIsNullIsNotNull() {
@@ -749,18 +719,7 @@ public void testEvalWithIsNullIsNotNull() {
| sort emp_no
| limit 1
| keep *true*, *false*, first_name, last_name""",
- Set.of(
- "_index",
- "_index.*",
- "emp_no",
- "emp_no.*",
- "first_name",
- "first_name.*",
- "last_name",
- "last_name.*",
- "*true*",
- "*false*"
- )
+ Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*", "last_name", "last_name.*", "*true*", "*false*")
);
}
@@ -769,7 +728,6 @@ public void testInDouble() {
"from employees | keep emp_no, height, height.float, height.half_float, height.scaled_float | where height in (2.03)",
Set.of(
"_index",
- "_index.*",
"emp_no",
"emp_no.*",
"height",
@@ -787,7 +745,7 @@ public void testInDouble() {
public void testConvertFromDatetime() {
assertFieldNames(
"from employees | sort emp_no | eval hire_double = to_double(hire_date) | keep emp_no, hire_date, hire_double | limit 3",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "hire_date", "hire_date.*")
+ Set.of("_index", "emp_no", "emp_no.*", "hire_date", "hire_date.*")
);
}
@@ -797,7 +755,7 @@ public void testBucket() {
| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z"
| EVAL bh = bucket(height, 20, 1.41, 2.10)
| SORT hire_date
- | KEEP hire_date, height, bh""", Set.of("_index", "_index.*", "hire_date", "hire_date.*", "height", "height.*"));
+ | KEEP hire_date, height, bh""", Set.of("_index", "hire_date", "hire_date.*", "height", "height.*"));
}
public void testEvalGrok() {
@@ -807,7 +765,7 @@ public void testEvalGrok() {
| grok full_name "%{WORD:a} %{WORD:b}"
| sort emp_no asc
| keep full_name, a, b
- | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testGrokExpression() {
@@ -816,7 +774,7 @@ public void testGrokExpression() {
| grok concat(first_name, " ", last_name) "%{WORD:a} %{WORD:b}"
| sort emp_no asc
| keep a, b
- | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testEvalGrokSort() {
@@ -826,7 +784,7 @@ public void testEvalGrokSort() {
| grok full_name "%{WORD:a} %{WORD:b}"
| sort a asc
| keep full_name, a, b
- | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*"));
+ | limit 3""", Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*"));
}
public void testGrokStats() {
@@ -836,7 +794,7 @@ public void testGrokStats() {
| grok x "%{WORD:a} %{WORD:b}"
| stats n = max(emp_no) by a
| keep a, n
- | sort a asc""", Set.of("_index", "_index.*", "gender", "gender.*", "emp_no", "emp_no.*"));
+ | sort a asc""", Set.of("_index", "gender", "gender.*", "emp_no", "emp_no.*"));
}
public void testNullOnePattern() {
@@ -844,19 +802,16 @@ public void testNullOnePattern() {
from employees
| where emp_no == 10030
| grok first_name "%{WORD:a}"
- | keep first_name, a""", Set.of("_index", "_index.*", "first_name", "first_name.*", "emp_no", "emp_no.*"));
+ | keep first_name, a""", Set.of("_index", "first_name", "first_name.*", "emp_no", "emp_no.*"));
}
public void testMultivalueInput() {
- assertFieldNames(
- """
- from employees
- | where emp_no <= 10006
- | grok job_positions "%{WORD:a} %{WORD:b} %{WORD:c}"
- | sort emp_no
- | keep emp_no, a, b, c, job_positions""",
- Set.of("_index", "_index.*", "job_positions", "job_positions.*", "emp_no", "emp_no.*")
- );
+ assertFieldNames("""
+ from employees
+ | where emp_no <= 10006
+ | grok job_positions "%{WORD:a} %{WORD:b} %{WORD:c}"
+ | sort emp_no
+ | keep emp_no, a, b, c, job_positions""", Set.of("_index", "job_positions", "job_positions.*", "emp_no", "emp_no.*"));
}
public void testSelectAll() {
@@ -873,7 +828,7 @@ public void testFilterById_WithInlineStats() {
}
public void testKeepId() {
- assertFieldNames("FROM apps metadata _id | WHERE id == 3 | KEEP _id", Set.of("_index", "_index.*", "id", "id.*"));
+ assertFieldNames("FROM apps metadata _id | WHERE id == 3 | KEEP _id", Set.of("_index", "id", "id.*"));
}
public void testIdRangeAndSort() {
@@ -881,22 +836,19 @@ public void testIdRangeAndSort() {
FROM apps metadata _id
| WHERE _id >= "2" AND _id <= "7"
| SORT _id
- | keep id, name, _id""", Set.of("_index", "_index.*", "id", "id.*", "name", "name.*"));
+ | keep id, name, _id""", Set.of("_index", "id", "id.*", "name", "name.*"));
}
public void testOrderById() {
- assertFieldNames("FROM apps metadata _id | KEEP _id, name | SORT _id", Set.of("_index", "_index.*", "name", "name.*"));
+ assertFieldNames("FROM apps metadata _id | KEEP _id, name | SORT _id", Set.of("_index", "name", "name.*"));
}
public void testOrderByIdDesc() {
- assertFieldNames("FROM apps metadata _id | KEEP _id, name | SORT _id DESC", Set.of("_index", "_index.*", "name", "name.*"));
+ assertFieldNames("FROM apps metadata _id | KEEP _id, name | SORT _id DESC", Set.of("_index", "name", "name.*"));
}
public void testConcatId() {
- assertFieldNames(
- "FROM apps metadata _id | eval c = concat(_id, name) | SORT _id | KEEP c",
- Set.of("_index", "_index.*", "name", "name.*")
- );
+ assertFieldNames("FROM apps metadata _id | eval c = concat(_id, name) | SORT _id | KEEP c", Set.of("_index", "name", "name.*"));
}
public void testStatsOnId() {
@@ -906,58 +858,49 @@ public void testStatsOnId() {
public void testStatsOnIdByGroup() {
assertFieldNames(
"FROM apps metadata _id | stats c = count(_id) by name | sort c desc, name | limit 5",
- Set.of("_index", "_index.*", "name", "name.*")
+ Set.of("_index", "name", "name.*")
);
}
public void testSimpleProject() {
assertFieldNames(
"from hosts | keep card, host, ip0, ip1",
- Set.of("_index", "_index.*", "card", "card.*", "host", "host.*", "ip0", "ip0.*", "ip1", "ip1.*")
+ Set.of("_index", "card", "card.*", "host", "host.*", "ip0", "ip0.*", "ip1", "ip1.*")
);
}
public void testEquals() {
assertFieldNames(
"from hosts | sort host, card | where ip0 == ip1 | keep card, host",
- Set.of("_index", "_index.*", "card", "card.*", "host", "host.*", "ip0", "ip0.*", "ip1", "ip1.*")
+ Set.of("_index", "card", "card.*", "host", "host.*", "ip0", "ip0.*", "ip1", "ip1.*")
);
}
public void testConditional() {
assertFieldNames(
"from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1",
- Set.of("_index", "_index.*", "ip1", "ip1.*", "ip0", "ip0.*")
+ Set.of("_index", "ip1", "ip1.*", "ip0", "ip0.*")
);
}
public void testWhereWithAverageBySubField() {
assertFieldNames(
"from employees | where languages + 1 == 6 | stats avg(avg_worked_seconds) by languages.long",
- Set.of(
- "_index",
- "_index.*",
- "languages",
- "languages.*",
- "avg_worked_seconds",
- "avg_worked_seconds.*",
- "languages.long",
- "languages.long.*"
- )
+ Set.of("_index", "languages", "languages.*", "avg_worked_seconds", "avg_worked_seconds.*", "languages.long", "languages.long.*")
);
}
public void testAverageOfEvalValue() {
assertFieldNames(
"from employees | eval ratio = salary / height | stats avg(ratio)",
- Set.of("_index", "_index.*", "salary", "salary.*", "height", "height.*")
+ Set.of("_index", "salary", "salary.*", "height", "height.*")
);
}
public void testTopNProjectEvalProject() {
assertFieldNames(
"from employees | sort salary | limit 1 | keep languages, salary | eval x = languages + 1 | keep x",
- Set.of("_index", "_index.*", "salary", "salary.*", "languages", "languages.*")
+ Set.of("_index", "salary", "salary.*", "languages", "languages.*")
);
}
@@ -968,13 +911,13 @@ public void testMvSum() {
| eval salary_change = mv_sum(salary_change.int)
| sort emp_no
| keep emp_no, salary_change.int, salary_change
- | limit 7""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "salary_change.int", "salary_change.int.*"));
+ | limit 7""", Set.of("_index", "emp_no", "emp_no.*", "salary_change.int", "salary_change.int.*"));
}
public void testMetaIndexAliasedInAggs() {
assertFieldNames(
"from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*")
+ Set.of("_index", "emp_no", "emp_no.*")
);
}
@@ -984,13 +927,13 @@ public void testCoalesceFolding() {
| EVAL foo=COALESCE(true, false, null)
| SORT emp_no ASC
| KEEP emp_no, first_name, foo
- | limit 3""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
+ | limit 3""", Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testRenameEvalProject() {
assertFieldNames(
"from employees | rename languages as x | keep x | eval z = 2 * x | keep x, z | limit 3",
- Set.of("_index", "_index.*", "languages", "languages.*")
+ Set.of("_index", "languages", "languages.*")
);
}
@@ -1002,38 +945,27 @@ public void testRenameProjectEval() {
| keep x, y
| eval x2 = x + 1
| eval y2 = y + 2
- | limit 3""", Set.of("_index", "_index.*", "languages", "languages.*"));
+ | limit 3""", Set.of("_index", "languages", "languages.*"));
}
public void testRenameWithFilterPushedToES() {
assertFieldNames(
"from employees | rename emp_no as x | keep languages, first_name, last_name, x | where x > 10030 and x < 10040 | limit 5",
- Set.of(
- "_index",
- "_index.*",
- "emp_no",
- "emp_no.*",
- "languages",
- "languages.*",
- "first_name",
- "first_name.*",
- "last_name",
- "last_name.*"
- )
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "first_name", "first_name.*", "last_name", "last_name.*")
);
}
public void testRenameOverride() {
assertFieldNames(
"from employees | rename emp_no as languages | keep languages, last_name | limit 3",
- Set.of("_index", "_index.*", "emp_no", "emp_no.*", "last_name", "last_name.*")
+ Set.of("_index", "emp_no", "emp_no.*", "last_name", "last_name.*")
);
}
public void testProjectRenameDate() {
assertFieldNames(
"from employees | sort hire_date | rename hire_date as x | keep emp_no, x | limit 5",
- Set.of("_index", "_index.*", "hire_date", "hire_date.*", "emp_no", "emp_no.*")
+ Set.of("_index", "hire_date", "hire_date.*", "emp_no", "emp_no.*")
);
}
@@ -1048,16 +980,13 @@ public void testRenameDrop() {
}
public void testMaxOfLong() {
- assertFieldNames(
- "from employees | stats l = max(languages.long)",
- Set.of("_index", "_index.*", "languages.long", "languages.long.*")
- );
+ assertFieldNames("from employees | stats l = max(languages.long)", Set.of("_index", "languages.long", "languages.long.*"));
}
public void testGroupByAlias() {
assertFieldNames(
"from employees | rename languages as l | keep l, height | stats m = min(height) by l | sort l",
- Set.of("_index", "_index.*", "languages", "languages.*", "height", "height.*")
+ Set.of("_index", "languages", "languages.*", "height", "height.*")
);
}
@@ -1066,7 +995,7 @@ public void testByStringAndLong() {
from employees
| eval trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000
| stats c = count(gender) by gender, trunk_worked_seconds
- | sort c desc""", Set.of("_index", "_index.*", "avg_worked_seconds", "avg_worked_seconds.*", "gender", "gender.*"));
+ | sort c desc""", Set.of("_index", "avg_worked_seconds", "avg_worked_seconds.*", "gender", "gender.*"));
}
public void testByStringAndLongWithAlias() {
@@ -1076,7 +1005,7 @@ public void testByStringAndLongWithAlias() {
| rename gender as g, trunk_worked_seconds as tws
| keep g, tws
| stats c = count(g) by g, tws
- | sort c desc""", Set.of("_index", "_index.*", "avg_worked_seconds", "avg_worked_seconds.*", "gender", "gender.*"));
+ | sort c desc""", Set.of("_index", "avg_worked_seconds", "avg_worked_seconds.*", "gender", "gender.*"));
}
public void testByStringAndString() {
@@ -1085,18 +1014,15 @@ public void testByStringAndString() {
| eval hire_year_str = date_format("yyyy", hire_date)
| stats c = count(gender) by gender, hire_year_str
| sort c desc, gender, hire_year_str
- | where c >= 5""", Set.of("_index", "_index.*", "hire_date", "hire_date.*", "gender", "gender.*"));
+ | where c >= 5""", Set.of("_index", "hire_date", "hire_date.*", "gender", "gender.*"));
}
public void testByLongAndLong() {
- assertFieldNames(
- """
- from employees
- | eval trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000
- | stats c = count(languages.long) by languages.long, trunk_worked_seconds
- | sort c desc""",
- Set.of("_index", "_index.*", "avg_worked_seconds", "avg_worked_seconds.*", "languages.long", "languages.long.*")
- );
+ assertFieldNames("""
+ from employees
+ | eval trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000
+ | stats c = count(languages.long) by languages.long, trunk_worked_seconds
+ | sort c desc""", Set.of("_index", "avg_worked_seconds", "avg_worked_seconds.*", "languages.long", "languages.long.*"));
}
public void testByDateAndKeywordAndIntWithAlias() {
@@ -1109,7 +1035,7 @@ public void testByDateAndKeywordAndIntWithAlias() {
| stats c = count(e) by d, g, l
| sort c desc, d, l desc
| limit 10""",
- Set.of("_index", "_index.*", "hire_date", "hire_date.*", "gender", "gender.*", "languages", "languages.*", "emp_no", "emp_no.*")
+ Set.of("_index", "hire_date", "hire_date.*", "gender", "gender.*", "languages", "languages.*", "emp_no", "emp_no.*")
);
}
@@ -1119,14 +1045,14 @@ public void testCountDistinctOfKeywords() {
from employees
| eval hire_year_str = date_format("yyyy", hire_date)
| stats g = count_distinct(gender), h = count_distinct(hire_year_str)""",
- Set.of("_index", "_index.*", "hire_date", "hire_date.*", "gender", "gender.*")
+ Set.of("_index", "hire_date", "hire_date.*", "gender", "gender.*")
);
}
public void testCountDistinctOfIpPrecision() {
assertFieldNames("""
FROM hosts
- | STATS COUNT_DISTINCT(ip0, 80000), COUNT_DISTINCT(ip1, 5)""", Set.of("_index", "_index.*", "ip0", "ip0.*", "ip1", "ip1.*"));
+ | STATS COUNT_DISTINCT(ip0, 80000), COUNT_DISTINCT(ip1, 5)""", Set.of("_index", "ip0", "ip0.*", "ip1", "ip1.*"));
}
public void testPercentileOfLong() {
@@ -1134,20 +1060,20 @@ public void testPercentileOfLong() {
"""
from employees
| stats p0 = percentile(salary_change.long, 0), p50 = percentile(salary_change.long, 50)""",
- Set.of("_index", "_index.*", "salary_change.long", "salary_change.long.*")
+ Set.of("_index", "salary_change.long", "salary_change.long.*")
);
}
public void testMedianOfInteger() {
assertFieldNames("""
FROM employees
- | STATS MEDIAN(salary), PERCENTILE(salary, 50)""", Set.of("_index", "_index.*", "salary", "salary.*"));
+ | STATS MEDIAN(salary), PERCENTILE(salary, 50)""", Set.of("_index", "salary", "salary.*"));
}
public void testMedianAbsoluteDeviation() {
assertFieldNames("""
FROM employees
- | STATS MEDIAN(salary), MEDIAN_ABSOLUTE_DEVIATION(salary)""", Set.of("_index", "_index.*", "salary", "salary.*"));
+ | STATS MEDIAN(salary), MEDIAN_ABSOLUTE_DEVIATION(salary)""", Set.of("_index", "salary", "salary.*"));
}
public void testIn3VLWithComputedNull() {
@@ -1160,7 +1086,7 @@ public void testIn3VLWithComputedNull() {
| keep emp_no, job_positions
| eval nil = concat("", null)
| eval is_in = job_positions in ("Accountant", "Internship", nil)""",
- Set.of("_index", "_index.*", "job_positions", "job_positions.*", "emp_no", "emp_no.*")
+ Set.of("_index", "job_positions", "job_positions.*", "emp_no", "emp_no.*")
);
}
@@ -1178,21 +1104,21 @@ version > TO_VER("1.1"), "high",
version IS NULL, "none",
"low")
| SORT version DESC NULLS LAST, id DESC
- | KEEP v, version, version_text, id, m, g, i, c""", Set.of("_index", "_index.*", "version", "version.*", "id", "id.*"));
+ | KEEP v, version, version_text, id, m, g, i, c""", Set.of("_index", "version", "version.*", "id", "id.*"));
}
public void testLikePrefix() {
assertFieldNames("""
from employees
| where first_name like "Eberhar*"
- | keep emp_no, first_name""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
+ | keep emp_no, first_name""", Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testRLikePrefix() {
assertFieldNames("""
from employees
| where first_name rlike "Aleja.*"
- | keep emp_no""", Set.of("_index", "_index.*", "first_name", "first_name.*", "emp_no", "emp_no.*"));
+ | keep emp_no""", Set.of("_index", "first_name", "first_name.*", "emp_no", "emp_no.*"));
}
public void testByUnmentionedLongAndLong() {
@@ -1202,16 +1128,7 @@ public void testByUnmentionedLongAndLong() {
| eval trunk_worked_seconds = avg_worked_seconds / 100000000 * 100000000
| stats c = count(gender) by languages.long, trunk_worked_seconds
| sort c desc""",
- Set.of(
- "_index",
- "_index.*",
- "avg_worked_seconds",
- "avg_worked_seconds.*",
- "languages.long",
- "languages.long.*",
- "gender",
- "gender.*"
- )
+ Set.of("_index", "avg_worked_seconds", "avg_worked_seconds.*", "languages.long", "languages.long.*", "gender", "gender.*")
);
}
@@ -1220,7 +1137,7 @@ public void testRenameNopProject() {
from employees
| rename emp_no as emp_no
| keep emp_no, last_name
- | limit 3""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "last_name", "last_name.*"));
+ | limit 3""", Set.of("_index", "emp_no", "emp_no.*", "last_name", "last_name.*"));
}
public void testRename() {
@@ -1228,7 +1145,7 @@ public void testRename() {
from test
| rename emp_no as e
| keep first_name, e
- """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
+ """, Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testChainedRename() {
@@ -1236,12 +1153,12 @@ public void testChainedRename() {
from test
| rename emp_no as r1, r1 as r2, r2 as r3
| keep first_name, r3
- """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*", "r1", "r1.*", "r2", "r2.*"));// TODO
- // asking for
- // more
- // shouldn't
+ """, Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*", "r1", "r1.*", "r2", "r2.*"));// TODO
+ // asking for
+ // more
+ // shouldn't
// hurt. Can we do better?
- // Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
+ // Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testChainedRenameReuse() {
@@ -1249,12 +1166,12 @@ public void testChainedRenameReuse() {
from test
| rename emp_no as r1, r1 as r2, r2 as r3, first_name as r1
| keep r1, r3
- """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*", "r1", "r1.*", "r2", "r2.*"));// TODO
- // asking for
- // more
- // shouldn't
+ """, Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*", "r1", "r1.*", "r2", "r2.*"));// TODO
+ // asking for
+ // more
+ // shouldn't
// hurt. Can we do better?
- // Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
+ // Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
public void testRenameBackAndForth() {
@@ -1262,8 +1179,8 @@ public void testRenameBackAndForth() {
from test
| rename emp_no as r1, r1 as emp_no
| keep emp_no
- """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "r1", "r1.*"));// TODO asking for more shouldn't hurt. Can we do better?
- // Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
+ """, Set.of("_index", "emp_no", "emp_no.*", "r1", "r1.*"));// TODO asking for more shouldn't hurt. Can we do better?
+ // Set.of("_index", "emp_no", "emp_no.*"));
}
public void testRenameReuseAlias() {
@@ -1276,7 +1193,7 @@ public void testRenameReuseAlias() {
public void testIfDuplicateNamesGroupingHasPriority() {
assertFieldNames(
"from employees | stats languages = avg(height), languages = min(height) by languages | sort languages",
- Set.of("_index", "_index.*", "height", "height.*", "languages", "languages.*")
+ Set.of("_index", "height", "height.*", "languages", "languages.*")
);
}
@@ -1286,7 +1203,7 @@ public void testCoalesce() {
| EVAL first_name = COALESCE(first_name, "X")
| SORT first_name DESC, emp_no ASC
| KEEP emp_no, first_name
- | limit 10""", Set.of("_index", "_index.*", "first_name", "first_name.*", "emp_no", "emp_no.*"));
+ | limit 10""", Set.of("_index", "first_name", "first_name.*", "emp_no", "emp_no.*"));
}
public void testCoalesceBackwards() {
@@ -1295,7 +1212,7 @@ public void testCoalesceBackwards() {
| EVAL first_name = COALESCE("X", first_name)
| SORT first_name DESC, emp_no ASC
| KEEP emp_no, first_name
- | limit 10""", Set.of("_index", "_index.*", "first_name", "first_name.*", "emp_no", "emp_no.*"));
+ | limit 10""", Set.of("_index", "first_name", "first_name.*", "emp_no", "emp_no.*"));
}
public void testGroupByVersionCast() {
@@ -1304,7 +1221,7 @@ public void testGroupByVersionCast() {
| EVAL g = TO_VER(CONCAT("1.", TO_STR(version)))
| STATS id = MAX(id) BY g
| SORT id
- | DROP g""", Set.of("_index", "_index.*", "version", "version.*", "id", "id.*"));
+ | DROP g""", Set.of("_index", "version", "version.*", "id", "id.*"));
}
public void testCoalesceEndsInNull() {
@@ -1313,7 +1230,7 @@ public void testCoalesceEndsInNull() {
| EVAL first_name = COALESCE(first_name, last_name, null)
| SORT first_name DESC, emp_no ASC
| KEEP emp_no, first_name
- | limit 3""", Set.of("_index", "_index.*", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
+ | limit 3""", Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*", "emp_no", "emp_no.*"));
}
public void testMvAvg() {
@@ -1325,16 +1242,7 @@ public void testMvAvg() {
| sort emp_no
| keep emp_no, salary_change.int, salary_change
| limit 7""",
- Set.of(
- "_index",
- "_index.*",
- "emp_no",
- "emp_no.*",
- "salary_change",
- "salary_change.*",
- "salary_change.int",
- "salary_change.int.*"
- )
+ Set.of("_index", "emp_no", "emp_no.*", "salary_change", "salary_change.*", "salary_change.int", "salary_change.int.*")
);
}
@@ -1344,8 +1252,8 @@ public void testEvalOverride() {
| eval languages = languages + 1
| eval languages = languages + 1
| limit 5
- | keep l*""", Set.of("_index", "_index.*", "languages", "languages.*", "l*"));// subtlety here. Keeping only "languages*" can
- // remove any other "l*"
+ | keep l*""", Set.of("_index", "languages", "languages.*", "l*"));// subtlety here. Keeping only "languages*" can
+ // remove any other "l*"
// named fields
}
@@ -1357,7 +1265,7 @@ public void testBasicWildcardKeep2() {
assertFieldNames("""
from test
| keep un*
- """, Set.of("_index", "_index.*", "un*"));
+ """, Set.of("_index", "un*"));
}
public void testWildcardKeep() {
@@ -1372,7 +1280,7 @@ public void testProjectThenDropName() {
from test
| keep *name
| drop first_name
- """, Set.of("_index", "_index.*", "*name", "*name.*", "first_name", "first_name.*"));
+ """, Set.of("_index", "*name", "*name.*", "first_name", "first_name.*"));
}
public void testProjectAfterDropName() {
@@ -1380,7 +1288,7 @@ public void testProjectAfterDropName() {
from test
| drop first_name
| keep *name
- """, Set.of("_index", "_index.*", "*name.*", "*name", "first_name", "first_name.*"));
+ """, Set.of("_index", "*name.*", "*name", "first_name", "first_name.*"));
}
public void testProjectWithMixedQuoting() {
@@ -1388,7 +1296,7 @@ public void testProjectWithMixedQuoting() {
from test
| drop first_name
| keep *`name`
- """, Set.of("_index", "_index.*", "*name.*", "*name", "first_name", "first_name.*"));
+ """, Set.of("_index", "*name.*", "*name", "first_name", "first_name.*"));
}
public void testProjectKeepAndDropName() {
@@ -1396,7 +1304,7 @@ public void testProjectKeepAndDropName() {
from test
| drop first_name
| keep last_name
- """, Set.of("_index", "_index.*", "last_name", "last_name.*", "first_name", "first_name.*"));
+ """, Set.of("_index", "last_name", "last_name.*", "first_name", "first_name.*"));
}
public void testProjectDropPattern() {
@@ -1445,7 +1353,7 @@ public void testProjectDropPatternAndKeepOthers() {
from test
| drop l*
| keep first_name, salary
- """, Set.of("_index", "_index.*", "l*", "first_name", "first_name.*", "salary", "salary.*"));
+ """, Set.of("_index", "l*", "first_name", "first_name.*", "salary", "salary.*"));
}
public void testProjectDropWithQuotedAndUnquotedPatternAndKeepOthers() {
@@ -1453,7 +1361,7 @@ public void testProjectDropWithQuotedAndUnquotedPatternAndKeepOthers() {
from test
| drop `l`*
| keep first_name, salary
- """, Set.of("_index", "_index.*", "l*", "first_name", "first_name.*", "salary", "salary.*"));
+ """, Set.of("_index", "l*", "first_name", "first_name.*", "salary", "salary.*"));
}
public void testAliasesThatGetDropped() {
@@ -1479,7 +1387,7 @@ public void testCountAllGrouped() {
| stats c = count(*) by languages
| rename languages as l
| sort l DESC
- """, Set.of("_index", "_index.*", "languages", "languages.*"));
+ """, Set.of("_index", "languages", "languages.*"));
}
public void testCountAllAndOtherStatGrouped() {
@@ -1487,7 +1395,7 @@ public void testCountAllAndOtherStatGrouped() {
from test
| stats c = count(*), min = min(emp_no) by languages
| sort languages
- """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
+ """, Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testCountAllAndOtherStatGrouped_WithInlineStats() {
@@ -1497,7 +1405,7 @@ public void testCountAllAndOtherStatGrouped_WithInlineStats() {
| inline stats c = count(*), min = min(emp_no) by languages
| stats c = count(*), min = min(emp_no) by languages
| sort languages
- """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
+ """, Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testCountAllWithImplicitNameOtherStatGrouped() {
@@ -1506,7 +1414,7 @@ public void testCountAllWithImplicitNameOtherStatGrouped() {
| stats count(*), min = min(emp_no) by languages
| drop `count(*)`
| sort languages
- """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
+ """, Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testDropWithQuotedAndUnquotedName() {
@@ -1515,7 +1423,7 @@ public void testDropWithQuotedAndUnquotedName() {
| stats count(*), min = min(emp_no) by languages
| drop count`(*)`
| sort languages
- """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "languages", "languages.*"));
+ """, Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*"));
}
public void testCountAllWithEval() {
@@ -1526,7 +1434,7 @@ public void testCountAllWithEval() {
| eval x = min + 1
| stats ca = count(*), cx = count(x) by l
| sort l
- """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "languages", "languages.*", "salary", "salary.*"));
}
public void testCountAllWithEval_AndInlineStats() {
@@ -1539,7 +1447,7 @@ public void testCountAllWithEval_AndInlineStats() {
| eval x = min + 1
| stats ca = count(*), cx = count(x) by l
| sort l
- """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "languages", "languages.*", "salary", "salary.*"));
}
public void testKeepAfterEval_AndInlineStats() {
@@ -1552,7 +1460,7 @@ public void testKeepAfterEval_AndInlineStats() {
| eval x = min + 1
| keep x, l
| sort l
- """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "languages", "languages.*", "salary", "salary.*"));
}
public void testKeepBeforeEval_AndInlineStats() {
@@ -1565,7 +1473,7 @@ public void testKeepBeforeEval_AndInlineStats() {
| eval x = `max(salary)` + 1
| stats min = min(salary) by l
| sort l
- """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*", "emp_no", "emp_no.*"));
+ """, Set.of("_index", "languages", "languages.*", "salary", "salary.*", "emp_no", "emp_no.*"));
}
public void testStatsBeforeEval_AndInlineStats() {
@@ -1577,7 +1485,7 @@ public void testStatsBeforeEval_AndInlineStats() {
| eval salary = min + 1
| inline stats max(salary) by l
| sort l
- """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "languages", "languages.*", "salary", "salary.*"));
}
public void testStatsBeforeInlineStats() {
@@ -1586,7 +1494,7 @@ public void testStatsBeforeInlineStats() {
from test
| stats min = min(salary) by languages
| inline stats max(min) by languages
- """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "languages", "languages.*", "salary", "salary.*"));
}
public void testKeepBeforeInlineStats() {
@@ -1595,7 +1503,7 @@ public void testKeepBeforeInlineStats() {
from test
| keep languages, salary
| inline stats max(salary) by languages
- """, Set.of("_index", "_index.*", "languages", "languages.*", "salary", "salary.*"));
+ """, Set.of("_index", "languages", "languages.*", "salary", "salary.*"));
}
public void testCountStar() {
@@ -1618,7 +1526,7 @@ public void testDissectOverwriteName() {
assertFieldNames("""
from employees
| dissect first_name "%{first_name} %{more}"
- | keep emp_no, first_name, more""", Set.of("_index", "_index.*", "emp_no", "emp_no.*", "first_name", "first_name.*"));
+ | keep emp_no, first_name, more""", Set.of("_index", "emp_no", "emp_no.*", "first_name", "first_name.*"));
}
/**
@@ -1633,7 +1541,7 @@ public void testAvoidGrokAttributesRemoval() {
| drop message
| grok type "%{WORD:b}"
| stats x = max(b)
- | keep x""", Set.of("_index", "_index.*", "x", "b", "type", "message", "x.*", "message.*", "type.*", "b.*"));
+ | keep x""", Set.of("_index", "x", "b", "type", "message", "x.*", "message.*", "type.*", "b.*"));
}
public void testAvoidGrokAttributesRemoval2() {
@@ -1644,7 +1552,7 @@ public void testAvoidGrokAttributesRemoval2() {
| lookup join message_types_lookup on message
| stats count = count(*) by type
| keep count
- | sort count""", Set.of("_index", "_index.*", "type", "message", "count", "message.*", "type.*", "count.*"));
+ | sort count""", Set.of("_index", "type", "message", "count", "message.*", "type.*", "count.*"));
}
public void testAvoidGrokAttributesRemoval3() {
@@ -1657,7 +1565,7 @@ public void testAvoidGrokAttributesRemoval3() {
| stats max = max(event_duration) by type
| keep max
| sort max""",
- Set.of("_index", "_index.*", "type", "event_duration", "message", "max", "event_duration.*", "message.*", "type.*", "max.*")
+ Set.of("_index", "type", "event_duration", "message", "max", "event_duration.*", "message.*", "type.*", "max.*")
);
}
@@ -1672,7 +1580,7 @@ public void testAvoidGrokAttributesRemoval4() {
| drop message
| grok type "%{WORD:b}"
| stats x = max(b)
- | keep x""", Set.of("_index", "_index.*", "x", "b", "type", "message", "x.*", "message.*", "type.*", "b.*"));
+ | keep x""", Set.of("_index", "x", "b", "type", "message", "x.*", "message.*", "type.*", "b.*"));
}
/**
@@ -1696,7 +1604,6 @@ public void testAvoidGrokAttributesRemoval5() {
| LIMIT 1""",
Set.of(
"_index",
- "_index.*",
"message",
"type",
"languages",
@@ -1724,7 +1631,6 @@ public void testMetrics() {
query,
Set.of(
"_index",
- "_index.*",
"@timestamp",
"@timestamp.*",
"network.total_bytes_in",
@@ -1741,7 +1647,7 @@ public void testMetrics() {
public void testLookupJoin() {
assertFieldNames(
"FROM employees | KEEP languages | RENAME languages AS language_code | LOOKUP JOIN languages_lookup ON language_code",
- Set.of("_index", "_index.*", "languages", "languages.*", "language_code", "language_code.*"),
+ Set.of("_index", "languages", "languages.*", "language_code", "language_code.*"),
Set.of("languages_lookup") // Since we have KEEP before the LOOKUP JOIN we need to wildcard the lookup index
);
}
@@ -1754,16 +1660,7 @@ public void testLookupJoinKeep() {
| RENAME languages AS language_code
| LOOKUP JOIN languages_lookup ON language_code
| KEEP languages, language_code, language_name""",
- Set.of(
- "_index",
- "_index.*",
- "languages",
- "languages.*",
- "language_code",
- "language_code.*",
- "language_name",
- "language_name.*"
- ),
+ Set.of("_index", "languages", "languages.*", "language_code", "language_code.*", "language_name", "language_name.*"),
Set.of() // Since we have KEEP after the LOOKUP, we can use the global field names instead of wildcarding the lookup index
);
}
@@ -1776,7 +1673,7 @@ public void testLookupJoinKeepWildcard() {
| RENAME languages AS language_code
| LOOKUP JOIN languages_lookup ON language_code
| KEEP language*""",
- Set.of("_index", "_index.*", "language*", "languages", "languages.*", "language_code", "language_code.*"),
+ Set.of("_index", "language*", "languages", "languages.*", "language_code", "language_code.*"),
Set.of() // Since we have KEEP after the LOOKUP, we can use the global field names instead of wildcarding the lookup index
);
}
@@ -1803,7 +1700,6 @@ public void testMultiLookupJoinKeepBefore() {
| LOOKUP JOIN message_types_lookup ON message""",
Set.of(
"_index",
- "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1827,7 +1723,6 @@ public void testMultiLookupJoinKeepBetween() {
| LOOKUP JOIN message_types_lookup ON message""",
Set.of(
"_index",
- "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1853,7 +1748,6 @@ public void testMultiLookupJoinKeepAfter() {
| KEEP @timestamp, client_ip, event_duration, message, env, type""",
Set.of(
"_index",
- "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1879,7 +1773,7 @@ public void testMultiLookupJoinKeepAfterWildcard() {
| LOOKUP JOIN clientips_lookup ON client_ip
| LOOKUP JOIN message_types_lookup ON message
| KEEP *env*, *type*""",
- Set.of("_index", "_index.*", "*env*", "*type*", "client_ip", "client_ip.*", "message", "message.*"),
+ Set.of("_index", "*env*", "*type*", "client_ip", "client_ip.*", "message", "message.*"),
Set.of() // Since the KEEP is after both JOINs, we can use the global field names
);
}
@@ -1908,7 +1802,6 @@ public void testMultiLookupJoinSameIndexKeepBefore() {
| LOOKUP JOIN clientips_lookup ON client_ip""",
Set.of(
"_index",
- "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1933,7 +1826,6 @@ public void testMultiLookupJoinSameIndexKeepBetween() {
| LOOKUP JOIN clientips_lookup ON client_ip""",
Set.of(
"_index",
- "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1960,7 +1852,6 @@ public void testMultiLookupJoinSameIndexKeepAfter() {
| KEEP @timestamp, client_ip, event_duration, message, env""",
Set.of(
"_index",
- "_index.*",
"@timestamp",
"@timestamp.*",
"client_ip",
@@ -1980,7 +1871,7 @@ public void testInsist_fieldIsMappedToNonKeywordSingleIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM partial_mapping_sample_data | INSIST_🐔 client_ip | KEEP @timestamp, client_ip",
- Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "client_ip", "client_ip.*"),
+ Set.of("_index", "@timestamp", "@timestamp.*", "client_ip", "client_ip.*"),
Set.of()
);
}
@@ -1989,7 +1880,7 @@ public void testInsist_fieldIsMappedToKeywordSingleIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM partial_mapping_sample_data | INSIST_🐔 message | KEEP @timestamp, message",
- Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "message", "message.*"),
+ Set.of("_index", "@timestamp", "@timestamp.*", "message", "message.*"),
Set.of()
);
}
@@ -1998,7 +1889,7 @@ public void testInsist_fieldDoesNotExistSingleIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM partial_mapping_sample_data | INSIST_🐔 foo | KEEP @timestamp, foo",
- Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "foo", "foo.*"),
+ Set.of("_index", "@timestamp", "@timestamp.*", "foo", "foo.*"),
Set.of()
);
}
@@ -2007,7 +1898,7 @@ public void testInsist_fieldIsUnmappedSingleIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM partial_mapping_sample_data | INSIST_🐔 unmapped_message | KEEP @timestamp, unmapped_message",
- Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "unmapped_message", "unmapped_message.*"),
+ Set.of("_index", "@timestamp", "@timestamp.*", "unmapped_message", "unmapped_message.*"),
Set.of()
);
}
@@ -2018,7 +1909,6 @@ public void testInsist_multiFieldTestSingleIndex() {
"FROM partial_mapping_sample_data | INSIST_🐔 message, unmapped_message, client_ip, foo | KEEP @timestamp, unmapped_message",
Set.of(
"_index",
- "_index.*",
"@timestamp",
"@timestamp.*",
"message",
@@ -2038,7 +1928,7 @@ public void testInsist_fieldIsMappedToDifferentTypesMultiIndex() {
assumeTrue("UNMAPPED_FIELDS available as snapshot only", EsqlCapabilities.Cap.UNMAPPED_FIELDS.isEnabled());
assertFieldNames(
"FROM sample_data_ts_long, sample_data METADATA _index | INSIST_🐔 @timestamp | KEEP _index, @timestamp",
- Set.of("_index", "_index.*", "@timestamp", "@timestamp.*"),
+ Set.of("_index", "@timestamp", "@timestamp.*"),
Set.of()
);
}
@@ -2053,7 +1943,6 @@ public void testInsist_multiFieldMappedMultiIndex() {
| KEEP _index, @timestamp, message, foo""",
Set.of(
"_index",
- "_index.*",
"@timestamp",
"@timestamp.*",
"message",
@@ -2079,7 +1968,6 @@ public void testJoinMaskingKeep() {
| keep `language.name`""",
Set.of(
"_index",
- "_index.*",
"language.name",
"type",
"language_name",
@@ -2093,17 +1981,14 @@ public void testJoinMaskingKeep() {
}
public void testJoinMaskingKeep2() {
- assertFieldNames(
- """
- from languag*
- | eval type = "foo"
- | rename type as message
- | lookup join message_types_lookup on message
- | rename type as message
- | lookup join message_types_lookup on message
- | keep `language.name`""",
- Set.of("_index", "_index.*", "language.name", "type", "message", "message.*", "type.*", "language.name.*")
- );
+ assertFieldNames("""
+ from languag*
+ | eval type = "foo"
+ | rename type as message
+ | lookup join message_types_lookup on message
+ | rename type as message
+ | lookup join message_types_lookup on message
+ | keep `language.name`""", Set.of("_index", "language.name", "type", "message", "message.*", "type.*", "language.name.*"));
}
public void testEnrichMaskingEvalOn() {
@@ -2116,7 +2001,7 @@ public void testEnrichMaskingEvalOn() {
| eval languages = length(languages)
| enrich languages_policy on languages
| keep emp_no, language_name""",
- Set.of("_index", "_index.*", "emp_no", "language_name", "languages", "language_name.*", "languages.*", "emp_no.*")
+ Set.of("_index", "emp_no", "language_name", "languages", "language_name.*", "languages.*", "emp_no.*")
);
}
@@ -2131,7 +2016,7 @@ public void testEnrichAndJoinMaskingEvalWh() {
| enrich languages_policy on languages
| lookup join message_types_lookup on language_name
| keep emp_no, language_name""",
- Set.of("_index", "_index.*", "emp_no", "language_name", "languages", "language_name.*", "languages.*", "emp_no.*")
+ Set.of("_index", "emp_no", "language_name", "languages", "language_name.*", "languages.*", "emp_no.*")
);
}
@@ -2142,7 +2027,7 @@ public void testDropAgainWithWildcardAfterEval() {
| drop full_name
| drop *name
| keep emp_no
- """, Set.of("_index", "_index.*", "emp_no", "emp_no.*", "*name", "*name.*"));
+ """, Set.of("_index", "emp_no", "emp_no.*", "*name", "*name.*"));
}
public void testDropWildcardFieldsAfterRename() {
@@ -2154,18 +2039,7 @@ public void testDropWildcardFieldsAfterRename() {
| drop first_names
| drop *_names
| keep gender""",
- Set.of(
- "_index",
- "_index.*",
- "first_name",
- "first_name.*",
- "last_name",
- "last_name.*",
- "*_names",
- "*_names.*",
- "gender",
- "gender.*"
- )
+ Set.of("_index", "first_name", "first_name.*", "last_name", "last_name.*", "*_names", "*_names.*", "gender", "gender.*")
);
}
@@ -2200,19 +2074,7 @@ public void testDropWildcardFieldsAfterLookupJoinsAndKeep() {
| KEEP @timestamp, message, *e*
| SORT @timestamp
| DROP *e""",
- Set.of(
- "_index",
- "_index.*",
- "client_ip",
- "client_ip.*",
- "message",
- "message.*",
- "@timestamp",
- "@timestamp.*",
- "*e*",
- "*e",
- "*e.*"
- ),
+ Set.of("_index", "client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"),
Set.of()
);
}
@@ -2227,19 +2089,7 @@ public void testDropWildcardFieldsAfterLookupJoinKeepLookupJoin() {
| LOOKUP JOIN message_types_lookup ON message
| SORT @timestamp
| DROP *e""",
- Set.of(
- "_index",
- "_index.*",
- "client_ip",
- "client_ip.*",
- "message",
- "message.*",
- "@timestamp",
- "@timestamp.*",
- "*e*",
- "*e",
- "*e.*"
- ),
+ Set.of("_index", "client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"),
Set.of("message_types_lookup")
);
}
@@ -2254,19 +2104,7 @@ public void testDropWildcardFieldsAfterKeepAndLookupJoins() {
| LOOKUP JOIN message_types_lookup ON message
| SORT @timestamp
| DROP *e""",
- Set.of(
- "_index",
- "_index.*",
- "client_ip",
- "client_ip.*",
- "message",
- "message.*",
- "@timestamp",
- "@timestamp.*",
- "*e*",
- "*e",
- "*e.*"
- ),
+ Set.of("_index", "client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"),
Set.of("clientips_lookup", "message_types_lookup")
);
}
@@ -2282,19 +2120,7 @@ public void testDropWildcardFieldsAfterKeepAndLookupJoins2() {
| LOOKUP JOIN message_types_lookup ON message
| SORT @timestamp
| DROP *e, client_ip""",
- Set.of(
- "_index",
- "_index.*",
- "client_ip",
- "client_ip.*",
- "message",
- "message.*",
- "@timestamp",
- "@timestamp.*",
- "*e*",
- "*e",
- "*e.*"
- ),
+ Set.of("_index", "client_ip", "client_ip.*", "message", "message.*", "@timestamp", "@timestamp.*", "*e*", "*e", "*e.*"),
Set.of("clientips_lookup", "message_types_lookup")
);
}
@@ -2308,7 +2134,7 @@ public void testForkFieldsWithKeepAfterFork() {
(WHERE d > 1000 AND e == "aaa" | EVAL c = a + 200)
| WHERE x > y
| KEEP a, b, c, d, x
- """, Set.of("_index", "_index.*", "a", "x", "y", "c", "d", "e", "e.*", "d.*", "y.*", "x.*", "a.*", "c.*"));
+ """, Set.of("_index", "a", "x", "y", "c", "d", "e", "e.*", "d.*", "y.*", "x.*", "a.*", "c.*"));
}
public void testForkFieldsWithKeepBeforeFork() {
@@ -2320,7 +2146,7 @@ public void testForkFieldsWithKeepBeforeFork() {
| FORK (WHERE c > 1 AND a < 10000 | EVAL d = a + 500)
(WHERE d > 1000 AND e == "aaa" | EVAL c = a + 200)
| WHERE x > y
- """, Set.of("_index", "_index.*", "x", "y", "a", "d", "e", "b", "c", "e.*", "d.*", "y.*", "x.*", "a.*", "c.*", "b.*"));
+ """, Set.of("_index", "x", "y", "a", "d", "e", "b", "c", "e.*", "d.*", "y.*", "x.*", "a.*", "c.*", "b.*"));
}
public void testForkFieldsWithNoProjection() {
@@ -2342,7 +2168,7 @@ public void testForkFieldsWithStatsInOneBranch() {
| FORK (WHERE c > 1 AND a < 10000 | EVAL d = a + 500)
(STATS x = count(*), y=min(z))
| WHERE x > y
- """, Set.of("_index", "_index.*", "x", "y", "a", "c", "z", "y.*", "x.*", "z.*", "a.*", "c.*"));
+ """, Set.of("_index", "x", "y", "a", "c", "z", "y.*", "x.*", "z.*", "a.*", "c.*"));
}
public void testForkFieldsWithEnrichAndLookupJoins() {
@@ -2360,7 +2186,6 @@ public void testForkFieldsWithEnrichAndLookupJoins() {
""",
Set.of(
"_index",
- "_index.*",
"x",
"y",
"a",
@@ -2393,7 +2218,7 @@ public void testForkWithStatsInAllBranches() {
(EVAL z = a * b | STATS m = max(z))
(STATS x = count(*), y=min(z))
| WHERE x > y
- """, Set.of("_index", "_index.*", "x", "y", "c", "a", "z", "y.*", "x.*", "z.*", "a.*", "c.*"));
+ """, Set.of("_index", "x", "y", "c", "a", "z", "y.*", "x.*", "z.*", "a.*", "c.*"));
}
public void testForkWithStatsInAllBranches1() {
@@ -2402,7 +2227,7 @@ public void testForkWithStatsInAllBranches1() {
| FORK
( STATS x = min(last_name))
( EVAL last_name = first_name | STATS y = max(last_name))
- """, Set.of("_index", "_index.*", "first_name", "last_name", "first_name.*", "last_name.*"));
+ """, Set.of("_index", "first_name", "last_name", "first_name.*", "last_name.*"));
}
public void testForkWithStatsInAllBranches2() {
@@ -2411,7 +2236,7 @@ public void testForkWithStatsInAllBranches2() {
| FORK
( EVAL last_name = first_name | STATS y = VALUES(last_name))
( STATS x = VALUES(last_name))
- """, Set.of("_index", "_index.*", "first_name", "last_name", "first_name.*", "last_name.*"));
+ """, Set.of("_index", "first_name", "last_name", "first_name.*", "last_name.*"));
}
public void testForkWithStatsAndWhere() {
@@ -2454,7 +2279,7 @@ public void testForkRefs1() {
| FORK
( EVAL x = first_name)
( EVAL x = last_name)
- """, Set.of("_index", "_index.*", "first_name", "last_name", "last_name.*", "first_name.*"));
+ """, Set.of("_index", "first_name", "last_name", "last_name.*", "first_name.*"));
}
public void testForkRefs2() {
@@ -2463,7 +2288,7 @@ public void testForkRefs2() {
| FORK
( KEEP first_name | EVAL x = first_name)
( KEEP last_name | EVAL x = last_name)
- """, Set.of("_index", "_index.*", "first_name", "last_name", "last_name.*", "first_name.*"));
+ """, Set.of("_index", "first_name", "last_name", "last_name.*", "first_name.*"));
}
public void testForkRefs3() {
@@ -2472,7 +2297,7 @@ public void testForkRefs3() {
| FORK
( KEEP first_name | EVAL last_name = first_name)
( KEEP first_name | EVAL x = first_name)
- """, Set.of("_index", "_index.*", "first_name", "first_name.*"));
+ """, Set.of("_index", "first_name", "first_name.*"));
}
public void testForkRef4() {
@@ -2485,37 +2310,21 @@ public void testForkRef4() {
(eval x = to_string(languages) | enrich languages_policy on x | keep language_name)
(eval y = to_string(emp_no) | enrich languages_policy on y | keep emp_no)
""",
- Set.of(
- "_index",
- "_index.*",
- "emp_no",
- "emp_no.*",
- "languages",
- "languages.*",
- "language_name",
- "language_name.*",
- "x",
- "x.*",
- "y",
- "y.*"
- )
+ Set.of("_index", "emp_no", "emp_no.*", "languages", "languages.*", "language_name", "language_name.*", "x", "x.*", "y", "y.*")
);
}
public void testRerankerAfterFuse() {
- assertFieldNames(
- """
- FROM books METADATA _id, _index, _score
- | FORK ( WHERE title:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
- ( WHERE author:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
- | FUSE
- | RERANK "Tolkien" ON title WITH { "inference_id" : "test_reranker" }
- | EVAL _score=ROUND(_score, 2)
- | SORT _score DESC, book_no ASC
- | LIMIT 2
- | KEEP book_no, title, author, _score""",
- Set.of("_index", "_index.*", "book_no", "title", "author", "title.*", "author.*", "book_no.*")
- );
+ assertFieldNames("""
+ FROM books METADATA _id, _index, _score
+ | FORK ( WHERE title:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
+ ( WHERE author:"Tolkien" | SORT _score, _id DESC | LIMIT 3 )
+ | FUSE
+ | RERANK "Tolkien" ON title WITH { "inference_id" : "test_reranker" }
+ | EVAL _score=ROUND(_score, 2)
+ | SORT _score DESC, book_no ASC
+ | LIMIT 2
+ | KEEP book_no, title, author, _score""", Set.of("_index", "book_no", "title", "author", "title.*", "author.*", "book_no.*"));
}
public void testSimpleFuse() {
@@ -2526,7 +2335,7 @@ public void testSimpleFuse() {
| FUSE
| EVAL _score = round(_score, 4)
| KEEP _score, _fork, emp_no
- | SORT _score, _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
+ | SORT _score, _fork, emp_no""", Set.of("_index", "emp_no", "emp_no.*"));
}
public void testFuseWithMatchAndScore() {
@@ -2538,7 +2347,7 @@ public void testFuseWithMatchAndScore() {
| SORT _score DESC, _id, _index
| EVAL _fork = mv_sort(_fork)
| EVAL _score = round(_score, 5)
- | KEEP _score, _fork, _id""", Set.of("_index", "_index.*", "title", "author", "title.*", "author.*"));
+ | KEEP _score, _fork, _id""", Set.of("_index", "title", "author", "title.*", "author.*"));
}
public void testFuseWithDisjunctionAndPostFilter() {
@@ -2551,7 +2360,7 @@ public void testFuseWithDisjunctionAndPostFilter() {
| EVAL _fork = mv_sort(_fork)
| EVAL _score = round(_score, 5)
| KEEP _score, _fork, _id
- | WHERE _score > 0.014""", Set.of("_index", "_index.*", "title", "author", "title.*", "author.*"));
+ | WHERE _score > 0.014""", Set.of("_index", "title", "author", "title.*", "author.*"));
}
public void testFuseWithStats() {
@@ -2562,40 +2371,34 @@ public void testFuseWithStats() {
( WHERE author:"Ursula K. Le Guin" AND title:"short stories" | SORT _score, _id DESC | LIMIT 3)
| FUSE
| STATS count_fork=COUNT(*) BY _fork
- | SORT _fork""", Set.of("_index", "_index.*", "title", "author", "title.*", "author.*"));
+ | SORT _fork""", Set.of("_index", "title", "author", "title.*", "author.*"));
}
public void testFuseWithMultipleForkBranches() {
- assertFieldNames(
- """
- FROM books METADATA _id, _index, _score
- | FORK (WHERE author:"Keith Faulkner" AND qstr("author:Rory or author:Beverlie") | SORT _score, _id DESC | LIMIT 3)
- (WHERE author:"Ursula K. Le Guin" | SORT _score, _id DESC | LIMIT 3)
- (WHERE title:"Tolkien" AND author:"Tolkien" AND year > 2000 AND mv_count(author) == 1 | SORT _score, _id DESC | LIMIT 3)
- (WHERE match(author, "Keith Faulkner") AND match(author, "Rory Tyger") | SORT _score, _id DESC | LIMIT 3)
- | FUSE
- | SORT _score DESC, _id, _index
- | EVAL _fork = mv_sort(_fork)
- | EVAL _score = round(_score, 4)
- | EVAL title = trim(substring(title, 1, 20))
- | KEEP _score, author, title, _fork""",
- Set.of("_index", "_index.*", "author", "title", "year", "title.*", "author.*", "year.*")
- );
+ assertFieldNames("""
+ FROM books METADATA _id, _index, _score
+ | FORK (WHERE author:"Keith Faulkner" AND qstr("author:Rory or author:Beverlie") | SORT _score, _id DESC | LIMIT 3)
+ (WHERE author:"Ursula K. Le Guin" | SORT _score, _id DESC | LIMIT 3)
+ (WHERE title:"Tolkien" AND author:"Tolkien" AND year > 2000 AND mv_count(author) == 1 | SORT _score, _id DESC | LIMIT 3)
+ (WHERE match(author, "Keith Faulkner") AND match(author, "Rory Tyger") | SORT _score, _id DESC | LIMIT 3)
+ | FUSE
+ | SORT _score DESC, _id, _index
+ | EVAL _fork = mv_sort(_fork)
+ | EVAL _score = round(_score, 4)
+ | EVAL title = trim(substring(title, 1, 20))
+ | KEEP _score, author, title, _fork""", Set.of("_index", "author", "title", "year", "title.*", "author.*", "year.*"));
}
public void testFuseWithSemanticSearch() {
- assertFieldNames(
- """
- FROM semantic_text METADATA _id, _score, _index
- | FORK ( WHERE semantic_text_field:"something" | SORT _score DESC | LIMIT 2)
- ( WHERE semantic_text_field:"something else" | SORT _score DESC | LIMIT 2)
- | FUSE
- | SORT _score DESC, _id, _index
- | EVAL _score = round(_score, 4)
- | EVAL _fork = mv_sort(_fork)
- | KEEP _fork, _score, _id, semantic_text_field""",
- Set.of("_index", "_index.*", "semantic_text_field", "semantic_text_field.*")
- );
+ assertFieldNames("""
+ FROM semantic_text METADATA _id, _score, _index
+ | FORK ( WHERE semantic_text_field:"something" | SORT _score DESC | LIMIT 2)
+ ( WHERE semantic_text_field:"something else" | SORT _score DESC | LIMIT 2)
+ | FUSE
+ | SORT _score DESC, _id, _index
+ | EVAL _score = round(_score, 4)
+ | EVAL _fork = mv_sort(_fork)
+ | KEEP _fork, _score, _id, semantic_text_field""", Set.of("_index", "semantic_text_field", "semantic_text_field.*"));
}
public void testSimpleFork() {
@@ -2604,7 +2407,7 @@ public void testSimpleFork() {
| FORK ( WHERE emp_no == 10001 )
( WHERE emp_no == 10002 )
| KEEP emp_no, _fork
- | SORT emp_no""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
+ | SORT emp_no""", Set.of("_index", "emp_no", "emp_no.*"));
}
public void testSimpleForkWithStats() {
@@ -2614,19 +2417,16 @@ public void testSimpleForkWithStats() {
| EVAL score = round(_score, 2)
| FORK (SORT score DESC, author | LIMIT 5 | KEEP author, score)
(STATS total = COUNT(*))
- | SORT _fork, score DESC, author""", Set.of("_index", "_index.*", "score", "author", "score.*", "author.*"));
+ | SORT _fork, score DESC, author""", Set.of("_index", "score", "author", "score.*", "author.*"));
}
public void testForkWithWhereSortAndLimit() {
- assertFieldNames(
- """
- FROM employees
- | FORK ( WHERE hire_date < "1985-03-01T00:00:00Z" | SORT first_name | LIMIT 5 )
- ( WHERE hire_date < "1988-03-01T00:00:00Z" | SORT first_name | LIMIT 5 )
- | KEEP emp_no, first_name, _fork
- | SORT emp_no, _fork""",
- Set.of("_index", "_index.*", "emp_no", "first_name", "hire_date", "first_name.*", "hire_date.*", "emp_no.*")
- );
+ assertFieldNames("""
+ FROM employees
+ | FORK ( WHERE hire_date < "1985-03-01T00:00:00Z" | SORT first_name | LIMIT 5 )
+ ( WHERE hire_date < "1988-03-01T00:00:00Z" | SORT first_name | LIMIT 5 )
+ | KEEP emp_no, first_name, _fork
+ | SORT emp_no, _fork""", Set.of("_index", "emp_no", "first_name", "hire_date", "first_name.*", "hire_date.*", "emp_no.*"));
}
public void testFiveFork() {
@@ -2638,7 +2438,7 @@ public void testFiveFork() {
( WHERE emp_no == 10002 )
( WHERE emp_no == 10001 )
| KEEP _fork, emp_no
- | SORT _fork""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
+ | SORT _fork""", Set.of("_index", "emp_no", "emp_no.*"));
}
public void testForkWithWhereSortDescAndLimit() {
@@ -2649,7 +2449,7 @@ public void testForkWithWhereSortDescAndLimit() {
( WHERE hire_date < "1988-03-01T00:00:00Z" | SORT first_name DESC NULLS LAST | LIMIT 2 )
| KEEP _fork, emp_no, first_name
| SORT _fork, first_name DESC""",
- Set.of("_index", "_index.*", "first_name", "emp_no", "hire_date", "first_name.*", "hire_date.*", "emp_no.*")
+ Set.of("_index", "first_name", "emp_no", "hire_date", "first_name.*", "hire_date.*", "emp_no.*")
);
}
@@ -2660,20 +2460,17 @@ public void testForkWithCommonPrefilter() {
| FORK ( SORT emp_no ASC | LIMIT 2 )
( SORT emp_no DESC NULLS LAST | LIMIT 2 )
| KEEP _fork, emp_no
- | SORT _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "emp_no.*"));
+ | SORT _fork, emp_no""", Set.of("_index", "emp_no", "emp_no.*"));
}
public void testForkWithSemanticSearchAndScore() {
- assertFieldNames(
- """
- FROM semantic_text METADATA _id, _score
- | FORK ( WHERE semantic_text_field:"something" | SORT _score DESC | LIMIT 2)
- ( WHERE semantic_text_field:"something else" | SORT _score DESC | LIMIT 2)
- | EVAL _score = round(_score, 4)
- | SORT _fork, _score, _id
- | KEEP _fork, _score, _id, semantic_text_field""",
- Set.of("_index", "_index.*", "semantic_text_field", "semantic_text_field.*")
- );
+ assertFieldNames("""
+ FROM semantic_text METADATA _id, _score
+ | FORK ( WHERE semantic_text_field:"something" | SORT _score DESC | LIMIT 2)
+ ( WHERE semantic_text_field:"something else" | SORT _score DESC | LIMIT 2)
+ | EVAL _score = round(_score, 4)
+ | SORT _fork, _score, _id
+ | KEEP _fork, _score, _id, semantic_text_field""", Set.of("_index", "semantic_text_field", "semantic_text_field.*"));
}
public void testForkWithEvals() {
@@ -2682,7 +2479,7 @@ public void testForkWithEvals() {
| FORK (WHERE emp_no == 10048 OR emp_no == 10081 | EVAL x = "abc" | EVAL y = 1)
(WHERE emp_no == 10081 OR emp_no == 10087 | EVAL x = "def" | EVAL z = 2)
| KEEP _fork, emp_no, x, y, z
- | SORT _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "x", "y", "z", "y.*", "x.*", "z.*", "emp_no.*"));
+ | SORT _fork, emp_no""", Set.of("_index", "emp_no", "x", "y", "z", "y.*", "x.*", "z.*", "emp_no.*"));
}
public void testForkWithStats() {
@@ -2693,7 +2490,7 @@ public void testForkWithStats() {
(STATS x = COUNT(*), y = MAX(emp_no), z = MIN(emp_no))
(STATS x = COUNT(*), y = MIN(emp_no))
| KEEP _fork, emp_no, x, y, z
- | SORT _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "x", "y", "z", "y.*", "x.*", "z.*", "emp_no.*"));
+ | SORT _fork, emp_no""", Set.of("_index", "emp_no", "x", "y", "z", "y.*", "x.*", "z.*", "emp_no.*"));
}
public void testForkWithDissect() {
@@ -2709,7 +2506,6 @@ public void testForkWithDissect() {
| SORT _fork, emp_no""",
Set.of(
"_index",
- "_index.*",
"emp_no",
"x",
"y",
@@ -2743,7 +2539,6 @@ public void testForkWithMixOfCommands() {
| SORT _fork, emp_no""",
Set.of(
"_index",
- "_index.*",
"emp_no",
"x",
"y",
@@ -2773,7 +2568,7 @@ public void testForkWithFiltersOnConstantValues() {
(STATS x = COUNT(*), y = MIN(emp_no))
| WHERE _fork == "fork2" OR a == "y"
| KEEP _fork, emp_no, x, y, z
- | SORT _fork, emp_no""", Set.of("_index", "_index.*", "emp_no", "a", "a.*", "emp_no.*"));
+ | SORT _fork, emp_no""", Set.of("_index", "emp_no", "a", "a.*", "emp_no.*"));
}
public void testForkWithUnsupportedAttributes() {
@@ -2797,7 +2592,6 @@ public void testForkAfterLookupJoin() {
| SORT _fork, emp_no""",
Set.of(
"_index",
- "_index.*",
"emp_no",
"language_code",
"language_name",
@@ -2825,7 +2619,6 @@ public void testForkBeforeLookupJoin() {
| SORT _fork, emp_no""",
Set.of(
"_index",
- "_index.*",
"emp_no",
"language_code",
"language_name",
@@ -2852,7 +2645,6 @@ public void testForkBranchWithLookupJoin() {
| SORT _fork, emp_no""",
Set.of(
"_index",
- "_index.*",
"emp_no",
"language_code",
"language_name",
@@ -2879,43 +2671,37 @@ public void testForkBeforeStats() {
( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
( EVAL x = "abc" | EVAL y = "aaa" )
| STATS c = count(*), m = max(_fork)""",
- Set.of("_index", "_index.*", "first_name", "emp_no", "last_name", "last_name.*", "first_name.*", "emp_no.*")
+ Set.of("_index", "first_name", "emp_no", "last_name", "last_name.*", "first_name.*", "emp_no.*")
);
}
public void testForkBeforeStatsWithWhere() {
- assertFieldNames(
- """
- FROM employees
- | WHERE emp_no == 10048 OR emp_no == 10081
- | FORK ( EVAL a = CONCAT(first_name, " ", emp_no::keyword, " ", last_name)
- | DISSECT a "%{x} %{y} %{z}"
- | EVAL y = y::keyword )
- ( STATS x = COUNT(*)::keyword, y = MAX(emp_no)::keyword, z = MIN(emp_no)::keyword )
- ( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
- ( EVAL x = "abc" | EVAL y = "aaa" )
- | STATS a = count(*) WHERE _fork == "fork1",
- b = max(_fork)""",
- Set.of("_index", "_index.*", "first_name", "emp_no", "last_name", "last_name.*", "first_name.*", "emp_no.*")
- );
+ assertFieldNames("""
+ FROM employees
+ | WHERE emp_no == 10048 OR emp_no == 10081
+ | FORK ( EVAL a = CONCAT(first_name, " ", emp_no::keyword, " ", last_name)
+ | DISSECT a "%{x} %{y} %{z}"
+ | EVAL y = y::keyword )
+ ( STATS x = COUNT(*)::keyword, y = MAX(emp_no)::keyword, z = MIN(emp_no)::keyword )
+ ( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
+ ( EVAL x = "abc" | EVAL y = "aaa" )
+ | STATS a = count(*) WHERE _fork == "fork1",
+ b = max(_fork)""", Set.of("_index", "first_name", "emp_no", "last_name", "last_name.*", "first_name.*", "emp_no.*"));
}
public void testForkBeforeStatsByWithWhere() {
- assertFieldNames(
- """
- FROM employees
- | WHERE emp_no == 10048 OR emp_no == 10081
- | FORK ( EVAL a = CONCAT(first_name, " ", emp_no::keyword, " ", last_name)
- | DISSECT a "%{x} %{y} %{z}"
- | EVAL y = y::keyword )
- ( STATS x = COUNT(*)::keyword, y = MAX(emp_no)::keyword, z = MIN(emp_no)::keyword )
- ( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
- ( EVAL x = "abc" | EVAL y = "aaa" )
- | STATS a = count(*) WHERE emp_no > 10000,
- b = max(x) WHERE _fork == "fork1" BY _fork
- | SORT _fork""",
- Set.of("_index", "_index.*", "emp_no", "x", "first_name", "last_name", "last_name.*", "x.*", "first_name.*", "emp_no.*")
- );
+ assertFieldNames("""
+ FROM employees
+ | WHERE emp_no == 10048 OR emp_no == 10081
+ | FORK ( EVAL a = CONCAT(first_name, " ", emp_no::keyword, " ", last_name)
+ | DISSECT a "%{x} %{y} %{z}"
+ | EVAL y = y::keyword )
+ ( STATS x = COUNT(*)::keyword, y = MAX(emp_no)::keyword, z = MIN(emp_no)::keyword )
+ ( SORT emp_no ASC | LIMIT 2 | EVAL x = last_name )
+ ( EVAL x = "abc" | EVAL y = "aaa" )
+ | STATS a = count(*) WHERE emp_no > 10000,
+ b = max(x) WHERE _fork == "fork1" BY _fork
+ | SORT _fork""", Set.of("_index", "emp_no", "x", "first_name", "last_name", "last_name.*", "x.*", "first_name.*", "emp_no.*"));
}
public void testForkAfterDrop() {
@@ -2935,7 +2721,7 @@ public void testForkBranchWithDrop() {
( WHERE language_name != "English" )
| SORT _fork, language_name
| KEEP language_name, language_code, _fork""",
- Set.of("_index", "_index.*", "language_name", "language_code", "language_code.*", "language_name.*")
+ Set.of("_index", "language_name", "language_code", "language_code.*", "language_name.*")
);
}
@@ -2949,14 +2735,11 @@ public void testForkBeforeDrop() {
}
public void testForkBranchWithKeep() {
- assertFieldNames(
- """
- FROM languages
- | FORK ( WHERE language_name == "English" | KEEP language_name, language_code )
- ( WHERE language_name != "English" )
- | SORT _fork, language_name""",
- Set.of("_index", "_index.*", "language_name", "language_code", "language_code.*", "language_name.*")
- );
+ assertFieldNames("""
+ FROM languages
+ | FORK ( WHERE language_name == "English" | KEEP language_name, language_code )
+ ( WHERE language_name != "English" )
+ | SORT _fork, language_name""", Set.of("_index", "language_name", "language_code", "language_code.*", "language_name.*"));
}
public void testForkBeforeRename() {
@@ -2976,7 +2759,7 @@ public void testForkBranchWithRenameAs() {
(WHERE language_code == 1 | RENAME language_code AS x)
| SORT _fork, language_name
| KEEP code, language_name, x, _fork""",
- Set.of("_index", "_index.*", "language_name", "language_code", "language_code.*", "language_name.*")
+ Set.of("_index", "language_name", "language_code", "language_code.*", "language_name.*")
);
}
@@ -2988,7 +2771,7 @@ public void testForkBranchWithRenameEquals() {
(WHERE language_code == 1 | RENAME x = language_code)
| SORT _fork, language_name
| KEEP code, language_name, x, _fork""",
- Set.of("_index", "_index.*", "language_name", "language_code", "language_code.*", "language_name.*")
+ Set.of("_index", "language_name", "language_code", "language_code.*", "language_name.*")
);
}
@@ -3009,7 +2792,7 @@ public void testForkBeforeDissect() {
| EVAL x = concat(gender, " foobar")
| DISSECT x "%{a} %{b}"
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "emp_no", "gender", "gender.*", "emp_no.*"));
}
public void testForkBranchWithDissect() {
@@ -3020,7 +2803,7 @@ public void testForkBranchWithDissect() {
| DISSECT x "%{a} %{b}")
(WHERE emp_no == 10081 OR emp_no == 10087)
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "emp_no", "gender", "gender.*", "emp_no.*"));
}
public void testForkAfterDissect() {
@@ -3031,7 +2814,7 @@ public void testForkAfterDissect() {
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "emp_no", "gender", "gender.*", "emp_no.*"));
}
public void testForkAfterEnrich() {
@@ -3046,7 +2829,6 @@ public void testForkAfterEnrich() {
| SORT _fork, city.name""",
Set.of(
"_index",
- "_index.*",
"city.name",
"airport",
"city.country.continent.planet.name",
@@ -3070,7 +2852,6 @@ public void testForkBranchWithEnrich() {
| SORT _fork, city.name""",
Set.of(
"_index",
- "_index.*",
"city.name",
"airport",
"city.country.continent.planet.name",
@@ -3095,7 +2876,6 @@ public void testForkBeforeEnrich() {
| SORT _fork, city.name""",
Set.of(
"_index",
- "_index.*",
"city.name",
"airport",
"city.country.name",
@@ -3115,7 +2895,7 @@ public void testForkBeforeMvExpand() {
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
| MV_EXPAND job_positions
- | SORT _fork, emp_no, job_positions""", Set.of("_index", "_index.*", "emp_no", "job_positions", "job_positions.*", "emp_no.*"));
+ | SORT _fork, emp_no, job_positions""", Set.of("_index", "emp_no", "job_positions", "job_positions.*", "emp_no.*"));
}
public void testForkBranchWithMvExpand() {
@@ -3124,7 +2904,7 @@ public void testForkBranchWithMvExpand() {
| KEEP emp_no, job_positions
| FORK (WHERE emp_no == 10048 OR emp_no == 10081 | MV_EXPAND job_positions)
(WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT _fork, emp_no, job_positions""", Set.of("_index", "_index.*", "emp_no", "job_positions", "job_positions.*", "emp_no.*"));
+ | SORT _fork, emp_no, job_positions""", Set.of("_index", "emp_no", "job_positions", "job_positions.*", "emp_no.*"));
}
public void testForkAfterMvExpand() {
@@ -3134,7 +2914,7 @@ public void testForkAfterMvExpand() {
| MV_EXPAND job_positions
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT _fork, emp_no, job_positions""", Set.of("_index", "_index.*", "emp_no", "job_positions", "job_positions.*", "emp_no.*"));
+ | SORT _fork, emp_no, job_positions""", Set.of("_index", "emp_no", "job_positions", "job_positions.*", "emp_no.*"));
}
public void testForkBeforeInlineStatsIgnore() {
@@ -3146,37 +2926,31 @@ public void testForkBeforeInlineStatsIgnore() {
(WHERE emp_no == 10081 OR emp_no == 10087)
| INLINE STATS max_lang = MAX(languages) BY gender
| SORT emp_no, gender, _fork
- | LIMIT 5""", Set.of("_index", "_index.*", "emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*"));
+ | LIMIT 5""", Set.of("_index", "emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*"));
}
public void testForkBranchWithInlineStatsIgnore() {
assumeTrue("INLINE STATS required", EsqlCapabilities.Cap.INLINE_STATS.isEnabled());
- assertFieldNames(
- """
- FROM employees
- | KEEP emp_no, languages, gender
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081
- | INLINE STATS x = MAX(languages) BY gender)
- (WHERE emp_no == 10081 OR emp_no == 10087
- | INLINE STATS x = MIN(languages))
- (WHERE emp_no == 10012 OR emp_no == 10012)
- | SORT emp_no, gender, _fork""",
- Set.of("_index", "_index.*", "emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*")
- );
+ assertFieldNames("""
+ FROM employees
+ | KEEP emp_no, languages, gender
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081
+ | INLINE STATS x = MAX(languages) BY gender)
+ (WHERE emp_no == 10081 OR emp_no == 10087
+ | INLINE STATS x = MIN(languages))
+ (WHERE emp_no == 10012 OR emp_no == 10012)
+ | SORT emp_no, gender, _fork""", Set.of("_index", "emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*"));
}
public void testForkAfterInlineStatsIgnore() {
assumeTrue("INLINE STATS required", EsqlCapabilities.Cap.INLINE_STATS.isEnabled());
- assertFieldNames(
- """
- FROM employees
- | KEEP emp_no, languages, gender
- | INLINE STATS max_lang = MAX(languages) BY gender
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
- (WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT emp_no, gender, _fork""",
- Set.of("_index", "_index.*", "emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*")
- );
+ assertFieldNames("""
+ FROM employees
+ | KEEP emp_no, languages, gender
+ | INLINE STATS max_lang = MAX(languages) BY gender
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
+ (WHERE emp_no == 10081 OR emp_no == 10087)
+ | SORT emp_no, gender, _fork""", Set.of("_index", "emp_no", "gender", "languages", "gender.*", "languages.*", "emp_no.*"));
}
public void testForkBeforeChangePoint() {
@@ -3188,7 +2962,7 @@ public void testForkBeforeChangePoint() {
(WHERE emp_no <= 10100)
| CHANGE_POINT salary ON emp_no
| STATS COUNT() by type
- | SORT type""", Set.of("_index", "_index.*", "type", "emp_no", "salary", "type.*", "salary.*", "emp_no.*"));
+ | SORT type""", Set.of("_index", "type", "emp_no", "salary", "type.*", "salary.*", "emp_no.*"));
}
public void testForkBranchWithChangePoint() {
@@ -3200,7 +2974,7 @@ public void testForkBranchWithChangePoint() {
(EVAL salary=CASE(emp_no==10087, 1000000, salary)
| CHANGE_POINT salary ON emp_no)
| STATS COUNT() by type, _fork
- | SORT _fork, type""", Set.of("_index", "_index.*", "type", "emp_no", "salary", "type.*", "salary.*", "emp_no.*"));
+ | SORT _fork, type""", Set.of("_index", "type", "emp_no", "salary", "type.*", "salary.*", "emp_no.*"));
}
public void testForkAfterChangePoint() {
@@ -3213,47 +2987,38 @@ public void testForkAfterChangePoint() {
| FORK (STATS a = COUNT() by type)
(STATS b = VALUES(type))
| SORT _fork, a, type, b""",
- Set.of("_index", "_index.*", "a", "type", "b", "emp_no", "salary", "type.*", "a.*", "salary.*", "b.*", "emp_no.*")
+ Set.of("_index", "a", "type", "b", "emp_no", "salary", "type.*", "a.*", "salary.*", "b.*", "emp_no.*")
);
}
public void testForkBeforeCompletion() {
- assertFieldNames(
- """
- FROM employees
- | KEEP emp_no, first_name, last_name
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
- (WHERE emp_no == 10081 OR emp_no == 10087)
- | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
- | SORT _fork, emp_no""",
- Set.of("_index", "_index.*", "emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*")
- );
+ assertFieldNames("""
+ FROM employees
+ | KEEP emp_no, first_name, last_name
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
+ (WHERE emp_no == 10081 OR emp_no == 10087)
+ | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
+ | SORT _fork, emp_no""", Set.of("_index", "emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*"));
}
public void testForkBranchWithCompletion() {
- assertFieldNames(
- """
- FROM employees
- | KEEP emp_no, first_name, last_name
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081
- | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" })
- (WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT _fork, emp_no""",
- Set.of("_index", "_index.*", "emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*")
- );
+ assertFieldNames("""
+ FROM employees
+ | KEEP emp_no, first_name, last_name
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081
+ | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" })
+ (WHERE emp_no == 10081 OR emp_no == 10087)
+ | SORT _fork, emp_no""", Set.of("_index", "emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*"));
}
public void testForkAfterCompletion() {
- assertFieldNames(
- """
- FROM employees
- | KEEP emp_no, first_name, last_name
- | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
- | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
- (WHERE emp_no == 10081 OR emp_no == 10087)
- | SORT _fork, emp_no""",
- Set.of("_index", "_index.*", "emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*")
- );
+ assertFieldNames("""
+ FROM employees
+ | KEEP emp_no, first_name, last_name
+ | COMPLETION x=CONCAT(first_name, " ", last_name) WITH { "inference_id" : "test_completion" }
+ | FORK (WHERE emp_no == 10048 OR emp_no == 10081)
+ (WHERE emp_no == 10081 OR emp_no == 10087)
+ | SORT _fork, emp_no""", Set.of("_index", "emp_no", "first_name", "last_name", "last_name.*", "first_name.*", "emp_no.*"));
}
public void testForkAfterGrok() {
@@ -3264,7 +3029,7 @@ public void testForkAfterGrok() {
| FORK (WHERE emp_no == 10048 OR emp_no == 10081)
(WHERE emp_no == 10081 OR emp_no == 10087)
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "emp_no", "gender", "gender.*", "emp_no.*"));
}
public void testForkBranchWithGrok() {
@@ -3280,7 +3045,6 @@ public void testForkBranchWithGrok() {
| SORT _fork, emp_no""",
Set.of(
"_index",
- "_index.*",
"emp_no",
"x",
"y",
@@ -3305,26 +3069,23 @@ public void testForkBeforeGrok() {
| EVAL x = concat(gender, " foobar")
| GROK x "%{WORD:a} %{WORD:b}"
| SORT _fork, emp_no
- | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "_index.*", "emp_no", "gender", "gender.*", "emp_no.*"));
+ | KEEP emp_no, gender, x, a, b, _fork""", Set.of("_index", "emp_no", "gender", "gender.*", "emp_no.*"));
}
public void testImplicitFieldNames() {
assertFieldNames("""
FROM sample_data
- | STATS x = 1 year + TBUCKET(1 day) BY b1d = TBUCKET(1 day)""", Set.of("_index", "_index.*", "@timestamp", "@timestamp.*"));
+ | STATS x = 1 year + TBUCKET(1 day) BY b1d = TBUCKET(1 day)""", Set.of("_index", "@timestamp", "@timestamp.*"));
}
public void testKeepTimestampBeforeStats() {
- assertFieldNames(
- """
- FROM sample_data
- | WHERE event_duration > 0
- | KEEP @timestamp, client_ip
- | STATS count = COUNT(*), avg_dur = AVG(event_duration) BY hour = TBUCKET(1h), client_ip
- | SORT hour ASC
- """,
- Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "client_ip", "client_ip.*", "event_duration", "event_duration.*")
- );
+ assertFieldNames("""
+ FROM sample_data
+ | WHERE event_duration > 0
+ | KEEP @timestamp, client_ip
+ | STATS count = COUNT(*), avg_dur = AVG(event_duration) BY hour = TBUCKET(1h), client_ip
+ | SORT hour ASC
+ """, Set.of("_index", "@timestamp", "@timestamp.*", "client_ip", "client_ip.*", "event_duration", "event_duration.*"));
}
public void testKeepAtWildcardBeforeStats() {
@@ -3334,7 +3095,7 @@ public void testKeepAtWildcardBeforeStats() {
| KEEP @*, message
| STATS errors = COUNT() BY day = TBUCKET(1d), message
| SORT day ASC
- """, Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "@*", "message", "message.*"));
+ """, Set.of("_index", "@timestamp", "@timestamp.*", "@*", "message", "message.*"));
}
public void testKeepWildcardBeforeStats() {
@@ -3346,17 +3107,7 @@ public void testKeepWildcardBeforeStats() {
| STATS p95 = PERCENTILE(event_duration, 95) BY ten_min = TBUCKET(10min), client_ip
| SORT ten_min ASC
""",
- Set.of(
- "_index",
- "_index.*",
- "@timestamp",
- "@timestamp.*",
- "client_ip",
- "client_ip.*",
- "event_duration",
- "event_duration.*",
- "*stamp*"
- )
+ Set.of("_index", "@timestamp", "@timestamp.*", "client_ip", "client_ip.*", "event_duration", "event_duration.*", "*stamp*")
);
}
@@ -3368,7 +3119,7 @@ public void testStatsChainingWithTimestampCarriedForward() {
| WHERE day_count > 0
| STATS hour_count = COUNT(), hour_p95 = PERCENTILE(day_p95, 95) BY hour = TBUCKET(1h), day
| SORT day ASC, hour ASC
- """, Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "event_duration", "event_duration.*"));
+ """, Set.of("_index", "@timestamp", "@timestamp.*", "event_duration", "event_duration.*"));
}
public void testStatsChainingWithTimestampEval() {
@@ -3379,20 +3130,17 @@ public void testStatsChainingWithTimestampEval() {
| STATS total = COUNT(*), med = MEDIAN(event_duration) BY d = TBUCKET(1d), message, t
| WHERE total > 5
| STATS day_total = SUM(total), hour_med = MEDIAN(med) BY h = TBUCKET(1h), message
- """, Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "event_duration", "event_duration.*", "message", "message.*"));
+ """, Set.of("_index", "@timestamp", "@timestamp.*", "event_duration", "event_duration.*", "message", "message.*"));
}
public void testStatsChainingWithTimestampCarriedForwardAsByKey() {
- assertFieldNames(
- """
- FROM sample_data
- | KEEP @timestamp, client_ip, event_duration
- | STATS reqs = COUNT(), max_dur = MAX(event_duration) BY day = TBUCKET(1d), client_ip, @timestamp
- | WHERE max_dur > 1000
- | STATS spikes = COUNT() BY hour = TBUCKET(1h), client_ip, day
- """,
- Set.of("_index", "_index.*", "@timestamp", "@timestamp.*", "event_duration", "event_duration.*", "client_ip", "client_ip.*")
- );
+ assertFieldNames("""
+ FROM sample_data
+ | KEEP @timestamp, client_ip, event_duration
+ | STATS reqs = COUNT(), max_dur = MAX(event_duration) BY day = TBUCKET(1d), client_ip, @timestamp
+ | WHERE max_dur > 1000
+ | STATS spikes = COUNT() BY hour = TBUCKET(1h), client_ip, day
+ """, Set.of("_index", "@timestamp", "@timestamp.*", "event_duration", "event_duration.*", "client_ip", "client_ip.*"));
}
private void assertFieldNames(String query, Set expected) {