diff --git a/.github/workflows/sql-odbc-main.yml b/.github/workflows/sql-odbc-main.yml index c14b3354d07..54cdebf1e90 100644 --- a/.github/workflows/sql-odbc-main.yml +++ b/.github/workflows/sql-odbc-main.yml @@ -11,7 +11,7 @@ env: jobs: build-mac: - runs-on: macos-latest + runs-on: macos-10.15 defaults: run: working-directory: sql-odbc diff --git a/.github/workflows/sql-odbc-release-workflow.yml b/.github/workflows/sql-odbc-release-workflow.yml index 6e471248a50..7a594492bf8 100644 --- a/.github/workflows/sql-odbc-release-workflow.yml +++ b/.github/workflows/sql-odbc-release-workflow.yml @@ -16,7 +16,7 @@ env: jobs: build-mac: - runs-on: macos-latest + runs-on: macos-11 defaults: run: working-directory: sql-odbc diff --git a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java index 933e68085aa..eed46337873 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/ExpressionAnalyzer.java @@ -51,6 +51,7 @@ import org.opensearch.sql.ast.expression.Not; import org.opensearch.sql.ast.expression.Or; import org.opensearch.sql.ast.expression.QualifiedName; +import org.opensearch.sql.ast.expression.Span; import org.opensearch.sql.ast.expression.UnresolvedAttribute; import org.opensearch.sql.ast.expression.UnresolvedExpression; import org.opensearch.sql.ast.expression.When; @@ -62,6 +63,7 @@ import org.opensearch.sql.exception.SemanticCheckException; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.Expression; +import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.aggregation.AggregationState; import org.opensearch.sql.expression.aggregation.Aggregator; @@ -70,6 +72,7 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.expression.function.BuiltinFunctionRepository; import org.opensearch.sql.expression.function.FunctionName; +import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.expression.window.aggregation.AggregateWindowFunction; /** @@ -258,6 +261,14 @@ public Expression visitQualifiedName(QualifiedName node, AnalysisContext context return visitIdentifier(qualifierAnalyzer.unqualified(node), context); } + @Override + public Expression visitSpan(Span node, AnalysisContext context) { + return new SpanExpression( + node.getField().accept(this, context), + node.getValue().accept(this, context), + node.getUnit()); + } + private Expression visitIdentifier(String ident, AnalysisContext context) { TypeEnvironment typeEnv = context.peek(); ReferenceExpression ref = DSL.ref(ident, diff --git a/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java b/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java index 2ad8731760d..1145c2dd312 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/NamedExpressionAnalyzer.java @@ -32,9 +32,11 @@ import org.opensearch.sql.ast.AbstractNodeVisitor; import org.opensearch.sql.ast.expression.Alias; import org.opensearch.sql.ast.expression.QualifiedName; +import org.opensearch.sql.ast.expression.Span; import org.opensearch.sql.ast.expression.UnresolvedExpression; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.NamedExpression; +import org.opensearch.sql.expression.span.SpanExpression; /** * Analyze the Alias node in the {@link AnalysisContext} to construct the list of diff --git a/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java b/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java index 7e2cfc90a36..f79145e3518 100644 --- a/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java @@ -46,6 +46,7 @@ import org.opensearch.sql.ast.expression.Not; import org.opensearch.sql.ast.expression.Or; import org.opensearch.sql.ast.expression.QualifiedName; +import org.opensearch.sql.ast.expression.Span; import org.opensearch.sql.ast.expression.UnresolvedArgument; import org.opensearch.sql.ast.expression.UnresolvedAttribute; import org.opensearch.sql.ast.expression.When; @@ -249,4 +250,8 @@ public T visitUnresolvedArgument(UnresolvedArgument node, C context) { public T visitLimit(Limit node, C context) { return visitChildren(node, context); } + + public T visitSpan(Span node, C context) { + return visitChildren(node, context); + } } diff --git a/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java b/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java index 3b78483736c..4c4250bb866 100644 --- a/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java +++ b/core/src/main/java/org/opensearch/sql/ast/dsl/AstDSL.java @@ -50,6 +50,8 @@ import org.opensearch.sql.ast.expression.Not; import org.opensearch.sql.ast.expression.Or; import org.opensearch.sql.ast.expression.QualifiedName; +import org.opensearch.sql.ast.expression.Span; +import org.opensearch.sql.ast.expression.SpanUnit; import org.opensearch.sql.ast.expression.UnresolvedArgument; import org.opensearch.sql.ast.expression.UnresolvedAttribute; import org.opensearch.sql.ast.expression.UnresolvedExpression; @@ -381,6 +383,10 @@ public static List defaultSortFieldArgs() { return exprList(argument("asc", booleanLiteral(true)), argument("type", nullLiteral())); } + public static Span span(UnresolvedExpression field, UnresolvedExpression value, SpanUnit unit) { + return new Span(field, value, unit); + } + public static Sort sort(UnresolvedPlan input, Field... sorts) { return new Sort(input, Arrays.asList(sorts)); } diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/Span.java b/core/src/main/java/org/opensearch/sql/ast/expression/Span.java new file mode 100644 index 00000000000..615a890a7e8 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/ast/expression/Span.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.ast.expression; + +import com.google.common.collect.ImmutableList; +import java.util.List; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.ToString; +import org.opensearch.sql.ast.AbstractNodeVisitor; + +/** + * Span expression node. + * Params include field expression and the span value. + */ +@Getter +@EqualsAndHashCode(callSuper = false) +@RequiredArgsConstructor +@ToString +public class Span extends UnresolvedExpression { + private final UnresolvedExpression field; + private final UnresolvedExpression value; + private final SpanUnit unit; + + @Override + public List getChild() { + return ImmutableList.of(field, value); + } + + @Override + public R accept(AbstractNodeVisitor nodeVisitor, C context) { + return nodeVisitor.visitSpan(this, context); + } + +} diff --git a/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java b/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java new file mode 100644 index 00000000000..d06e5f63178 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/ast/expression/SpanUnit.java @@ -0,0 +1,75 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.ast.expression; + +import com.google.common.collect.ImmutableList; +import java.util.List; +import lombok.Getter; +import lombok.RequiredArgsConstructor; + +@Getter +@RequiredArgsConstructor +public enum SpanUnit { + UNKNOWN("unknown"), + NONE(""), + MILLISECOND("ms"), + MS("ms"), + SECOND("s"), + S("s"), + MINUTE("m"), + m("m"), + HOUR("h"), + H("h"), + DAY("d"), + D("d"), + WEEK("w"), + W("w"), + MONTH("M"), + M("M"), + QUARTER("q"), + Q("q"), + YEAR("y"), + Y("y"); + + private final String name; + private static final List SPAN_UNITS; + + static { + ImmutableList.Builder builder = ImmutableList.builder(); + SPAN_UNITS = builder.add(SpanUnit.values()).build(); + } + + /** + * Util method to get span unit given the unit name. + */ + public static SpanUnit of(String unit) { + switch (unit) { + case "": + return NONE; + case "M": + return M; + case "m": + return m; + default: + return SPAN_UNITS.stream() + .filter(v -> unit.equalsIgnoreCase(v.name())) + .findFirst() + .orElse(UNKNOWN); + } + } + + public static String getName(SpanUnit unit) { + return unit.name; + } + +} diff --git a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java index b2172e54f16..610a978ea3b 100644 --- a/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java +++ b/core/src/main/java/org/opensearch/sql/data/model/ExprValueUtils.java @@ -86,7 +86,8 @@ public static ExprValue intervalValue(TemporalAmount value) { */ public static ExprValue tupleValue(Map map) { LinkedHashMap valueMap = new LinkedHashMap<>(); - map.forEach((k, v) -> valueMap.put(k, fromObjectValue(v))); + map.forEach((k, v) -> valueMap + .put(k, v instanceof ExprValue ? (ExprValue) v : fromObjectValue(v))); return new ExprTupleValue(valueMap); } diff --git a/core/src/main/java/org/opensearch/sql/expression/DSL.java b/core/src/main/java/org/opensearch/sql/expression/DSL.java index af51d0898ad..4c0ec1262d8 100644 --- a/core/src/main/java/org/opensearch/sql/expression/DSL.java +++ b/core/src/main/java/org/opensearch/sql/expression/DSL.java @@ -29,6 +29,7 @@ import java.util.Arrays; import java.util.Collections; import lombok.RequiredArgsConstructor; +import org.opensearch.sql.ast.expression.SpanUnit; import org.opensearch.sql.data.model.ExprShortValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.model.ExprValueUtils; @@ -39,6 +40,7 @@ import org.opensearch.sql.expression.conditional.cases.WhenClause; import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.expression.function.BuiltinFunctionRepository; +import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.expression.window.ranking.RankingWindowFunction; @RequiredArgsConstructor @@ -128,6 +130,10 @@ public static NamedAggregator named(String name, Aggregator aggregator) { return new NamedAggregator(name, aggregator); } + public static SpanExpression span(Expression field, Expression value, String unit) { + return new SpanExpression(field, value, SpanUnit.of(unit)); + } + public FunctionExpression abs(Expression... expressions) { return function(BuiltinFunctionName.ABS, expressions); } diff --git a/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java b/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java index b8bf9de3da4..18fee6016db 100644 --- a/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/expression/ExpressionNodeVisitor.java @@ -27,6 +27,7 @@ package org.opensearch.sql.expression; +import org.opensearch.sql.ast.expression.Span; import org.opensearch.sql.expression.aggregation.Aggregator; import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.expression.conditional.cases.CaseClause; diff --git a/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java b/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java new file mode 100644 index 00000000000..cbaa81756de --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/expression/span/SpanExpression.java @@ -0,0 +1,64 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.expression.span; + +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.ToString; +import org.opensearch.sql.ast.expression.SpanUnit; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.data.type.ExprType; +import org.opensearch.sql.expression.Expression; +import org.opensearch.sql.expression.ExpressionNodeVisitor; +import org.opensearch.sql.expression.env.Environment; + +@RequiredArgsConstructor +@Getter +@ToString +@EqualsAndHashCode +public class SpanExpression implements Expression { + private final Expression field; + private final Expression value; + private final SpanUnit unit; + + @Override + public ExprValue valueOf(Environment valueEnv) { + return value.valueOf(valueEnv); + } + + /** + * Return type follows the following table. + * FIELD VALUE RETURN_TYPE + * int/long integer int/long (field type) + * int/long double double + * float/double integer float/double (field type) + * float/double double float/double (field type) + * other any field type + */ + @Override + public ExprType type() { + if (field.type().isCompatible(value.type())) { + return field.type(); + } else if (value.type().isCompatible(field.type())) { + return value.type(); + } else { + return field.type(); + } + } + + @Override + public T accept(ExpressionNodeVisitor visitor, C context) { + return visitor.visitNode(this, context); + } +} diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java index 9506462205c..404a649aed0 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/AggregationOperator.java @@ -26,28 +26,20 @@ package org.opensearch.sql.planner.physical; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableList; -import java.util.AbstractMap; -import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Getter; -import lombok.RequiredArgsConstructor; import lombok.ToString; -import org.opensearch.sql.data.model.ExprTupleValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.NamedExpression; -import org.opensearch.sql.expression.aggregation.AggregationState; import org.opensearch.sql.expression.aggregation.Aggregator; import org.opensearch.sql.expression.aggregation.NamedAggregator; +import org.opensearch.sql.expression.span.SpanExpression; +import org.opensearch.sql.planner.physical.bucket.Group; +import org.opensearch.sql.planner.physical.bucket.SpanBucket; import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** @@ -80,7 +72,8 @@ public AggregationOperator(PhysicalPlan input, List aggregatorL this.input = input; this.aggregatorList = aggregatorList; this.groupByExprList = groupByExprList; - this.group = new Group(); + this.group = groupBySpan(groupByExprList) ? new SpanBucket(aggregatorList, groupByExprList) + : new Group(aggregatorList, groupByExprList); } @Override @@ -113,79 +106,9 @@ public void open() { iterator = group.result().iterator(); } - @VisibleForTesting - @RequiredArgsConstructor - public class Group { - - private final Map>> groupListMap = - new HashMap<>(); - - /** - * Push the BindingTuple to Group. Two functions will be applied to each BindingTuple to - * generate the {@link GroupKey} and {@link AggregationState} - * Key = GroupKey(bindingTuple), State = Aggregator(bindingTuple) - */ - public void push(ExprValue inputValue) { - GroupKey groupKey = new GroupKey(inputValue); - groupListMap.computeIfAbsent(groupKey, k -> - aggregatorList.stream() - .map(aggregator -> new AbstractMap.SimpleEntry<>(aggregator, - aggregator.create())) - .collect(Collectors.toList()) - ); - groupListMap.computeIfPresent(groupKey, (key, aggregatorList) -> { - aggregatorList - .forEach(entry -> entry.getKey().iterate(inputValue.bindingTuples(), entry.getValue())); - return aggregatorList; - }); - } - - /** - * Get the list of {@link BindingTuple} for each group. - */ - public List result() { - ImmutableList.Builder resultBuilder = new ImmutableList.Builder<>(); - for (Map.Entry>> - entry : groupListMap.entrySet()) { - LinkedHashMap map = new LinkedHashMap<>(); - map.putAll(entry.getKey().groupKeyMap()); - for (Map.Entry stateEntry : entry.getValue()) { - map.put(stateEntry.getKey().getName(), stateEntry.getValue().result()); - } - resultBuilder.add(ExprTupleValue.fromExprValueMap(map)); - } - return resultBuilder.build(); - } + private boolean groupBySpan(List namedExpressionList) { + return namedExpressionList.size() == 1 + && namedExpressionList.get(0).getDelegated() instanceof SpanExpression; } - /** - * Group Key. - */ - @EqualsAndHashCode - @VisibleForTesting - public class GroupKey { - - private final List groupByValueList; - - /** - * GroupKey constructor. - */ - public GroupKey(ExprValue value) { - this.groupByValueList = new ArrayList<>(); - for (Expression groupExpr : groupByExprList) { - this.groupByValueList.add(groupExpr.valueOf(value.bindingTuples())); - } - } - - /** - * Return the Map of group field and group field value. - */ - public LinkedHashMap groupKeyMap() { - LinkedHashMap map = new LinkedHashMap<>(); - for (int i = 0; i < groupByExprList.size(); i++) { - map.put(groupByExprList.get(i).getNameOrAlias(), groupByValueList.get(i)); - } - return map; - } - } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/bucket/Group.java b/core/src/main/java/org/opensearch/sql/planner/physical/bucket/Group.java new file mode 100644 index 00000000000..fcbc889f489 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/planner/physical/bucket/Group.java @@ -0,0 +1,113 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.planner.physical.bucket; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableList; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.opensearch.sql.data.model.ExprTupleValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.Expression; +import org.opensearch.sql.expression.NamedExpression; +import org.opensearch.sql.expression.aggregation.AggregationState; +import org.opensearch.sql.expression.aggregation.NamedAggregator; +import org.opensearch.sql.storage.bindingtuple.BindingTuple; + +@VisibleForTesting +@RequiredArgsConstructor +public class Group { + @Getter + private final List aggregatorList; + @Getter + private final List groupByExprList; + + protected final Map>> groupListMap = + new HashMap<>(); + + /** + * Push the BindingTuple to Group. Two functions will be applied to each BindingTuple to + * generate the {@link Key} and {@link AggregationState} + * Key = GroupKey(bindingTuple), State = Aggregator(bindingTuple) + */ + public void push(ExprValue inputValue) { + Key groupKey = new Key(inputValue, groupByExprList); + groupListMap.computeIfAbsent(groupKey, k -> + aggregatorList.stream() + .map(aggregator -> new AbstractMap.SimpleEntry<>(aggregator, + aggregator.create())) + .collect(Collectors.toList()) + ); + groupListMap.computeIfPresent(groupKey, (key, aggregatorList) -> { + aggregatorList + .forEach(entry -> entry.getKey().iterate(inputValue.bindingTuples(), entry.getValue())); + return aggregatorList; + }); + } + + /** + * Get the list of {@link BindingTuple} for each group. + */ + public List result() { + ImmutableList.Builder resultBuilder = new ImmutableList.Builder<>(); + for (Map.Entry>> + entry : groupListMap.entrySet()) { + LinkedHashMap map = new LinkedHashMap<>(entry.getKey().groupKeyMap()); + for (Map.Entry stateEntry : entry.getValue()) { + map.put(stateEntry.getKey().getName(), stateEntry.getValue().result()); + } + resultBuilder.add(ExprTupleValue.fromExprValueMap(map)); + } + return resultBuilder.build(); + } + + /** + * Group Key. + */ + @EqualsAndHashCode + @VisibleForTesting + public static class Key { + private final List groupByValueList; + private final List groupByExprList; + + /** + * GroupKey constructor. + */ + public Key(ExprValue value, List groupByExprList) { + this.groupByValueList = new ArrayList<>(); + this.groupByExprList = groupByExprList; + for (Expression groupExpr : groupByExprList) { + this.groupByValueList.add(groupExpr.valueOf(value.bindingTuples())); + } + } + + /** + * Return the Map of group field and group field value. + */ + public LinkedHashMap groupKeyMap() { + LinkedHashMap map = new LinkedHashMap<>(); + for (int i = 0; i < groupByExprList.size(); i++) { + map.put(groupByExprList.get(i).getNameOrAlias(), groupByValueList.get(i)); + } + return map; + } + } +} diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/bucket/Rounding.java b/core/src/main/java/org/opensearch/sql/planner/physical/bucket/Rounding.java new file mode 100644 index 00000000000..43bfed37c61 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/planner/physical/bucket/Rounding.java @@ -0,0 +1,641 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.planner.physical.bucket; + +import static org.opensearch.sql.data.type.ExprCoreType.DATE; +import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; +import static org.opensearch.sql.data.type.ExprCoreType.LONG; +import static org.opensearch.sql.data.type.ExprCoreType.TIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; +import java.util.Arrays; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.opensearch.sql.data.model.ExprDateValue; +import org.opensearch.sql.data.model.ExprDatetimeValue; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; +import org.opensearch.sql.data.model.ExprTupleValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.data.model.ExprValueUtils; +import org.opensearch.sql.data.type.ExprType; +import org.opensearch.sql.exception.ExpressionEvaluationException; +import org.opensearch.sql.expression.span.SpanExpression; +import org.opensearch.sql.utils.DateTimeUtils; + +/** + * Rounding. + */ +@EqualsAndHashCode +public abstract class Rounding { + @Getter + protected T maxRounded; + @Getter + protected T minRounded; + + /** + * Create Rounding instance. + */ + public static Rounding createRounding(SpanExpression span) { + ExprValue interval = span.getValue().valueOf(null); + ExprType type = span.type(); + + if (LONG.isCompatible(type)) { + return new LongRounding(interval); + } + if (DOUBLE.isCompatible(type)) { + return new DoubleRounding(interval); + } + if (type.equals(DATETIME)) { + return new DatetimeRounding(interval, span.getUnit().getName()); + } + if (type.equals(TIMESTAMP)) { + return new TimestampRounding(interval, span.getUnit().getName()); + } + if (type.equals(DATE)) { + return new DateRounding(interval, span.getUnit().getName()); + } + if (type.equals(TIME)) { + return new TimeRounding(interval, span.getUnit().getName()); + } + return new UnknownRounding(); + } + + public abstract ExprValue round(ExprValue value); + + public abstract Integer locate(ExprValue value); + + public abstract ExprValue[] createBuckets(); + + public abstract ExprValue[] fillBuckets(ExprValue[] buckets, Map map, + String key); + + + static class TimestampRounding extends Rounding { + private final ExprValue interval; + private final DateTimeUnit dateTimeUnit; + + public TimestampRounding(ExprValue interval, String unit) { + this.interval = interval; + this.dateTimeUnit = DateTimeUnit.resolve(unit); + } + + @Override + public ExprValue round(ExprValue var) { + Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.timestampValue() + .toEpochMilli(), interval.integerValue())); + updateRounded(instant); + return new ExprTimestampValue(instant); + } + + @Override + public ExprValue[] createBuckets() { + if (dateTimeUnit.isMillisBased) { + int size = (int) ((maxRounded.toEpochMilli() - minRounded.toEpochMilli()) / (interval + .integerValue() * dateTimeUnit.ratio)) + 1; + return new ExprValue[size]; + } else { + ZonedDateTime maxZonedDateTime = maxRounded.atZone(ZoneId.of("UTC")); + ZonedDateTime minZonedDateTime = minRounded.atZone(ZoneId.of("UTC")); + int monthDiff = (maxZonedDateTime.getYear() - minZonedDateTime + .getYear()) * 12 + maxZonedDateTime.getMonthValue() - minZonedDateTime.getMonthValue(); + int size = monthDiff / ((int) dateTimeUnit.ratio * interval.integerValue()) + 1; + return new ExprValue[size]; + } + } + + @Override + public ExprValue[] fillBuckets(ExprValue[] buckets, + Map map, + String key) { + for (int id = 0; id < buckets.length; id++) { + ExprValue tuple = buckets[id]; + if (tuple == null) { + long placeHolder; + if (dateTimeUnit.isMillisBased) { + placeHolder = minRounded.toEpochMilli() + dateTimeUnit.ratio * interval + .integerValue() * id; + } else { + placeHolder = minRounded.atZone(ZoneId.of("UTC")).plusMonths(dateTimeUnit + .ratio * interval.integerValue()).toInstant().toEpochMilli(); + } + map.replace(key, new ExprTimestampValue(Instant.ofEpochMilli(placeHolder))); + buckets[id] = ExprTupleValue.fromExprValueMap(map); + } + } + return buckets; + } + + @Override + public Integer locate(ExprValue value) { + if (dateTimeUnit.isMillisBased) { + long intervalInEpochMillis = dateTimeUnit.ratio; + return Long.valueOf((value.timestampValue() + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli() - minRounded + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli()) / (intervalInEpochMillis + * interval.integerValue())).intValue(); + } else { + int monthDiff = (value.dateValue().getYear() - minRounded.atZone(ZoneId.of("UTC")) + .getYear()) * 12 + value.dateValue().getMonthValue() - minRounded + .atZone(ZoneId.of("UTC")).getMonthValue(); + return (int) (monthDiff / (dateTimeUnit.ratio * interval.integerValue())); + } + } + + private void updateRounded(Instant value) { + if (maxRounded == null || value.isAfter(maxRounded)) { + maxRounded = value; + } + if (minRounded == null || value.isBefore(minRounded)) { + minRounded = value; + } + } + } + + + static class DatetimeRounding extends Rounding { + private final ExprValue interval; + private final DateTimeUnit dateTimeUnit; + + public DatetimeRounding(ExprValue interval, String unit) { + this.interval = interval; + this.dateTimeUnit = DateTimeUnit.resolve(unit); + } + + @Override + public ExprValue round(ExprValue var) { + Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.datetimeValue() + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli(), interval.integerValue())); + updateRounded(instant); + return new ExprDatetimeValue(instant.atZone(ZoneId.of("UTC")).toLocalDateTime()); + } + + @Override + public ExprValue[] createBuckets() { + if (dateTimeUnit.isMillisBased) { + int size = (int) ((maxRounded.atZone(ZoneId.of("UTC")).toInstant() + .toEpochMilli() - minRounded.atZone(ZoneId.of("UTC")).toInstant() + .toEpochMilli()) / (interval.integerValue() * dateTimeUnit.ratio)) + 1; + return new ExprValue[size]; + } else { + ZonedDateTime maxZonedDateTime = maxRounded.atZone(ZoneId.of("UTC")); + ZonedDateTime minZonedDateTime = minRounded.atZone(ZoneId.of("UTC")); + int monthDiff = (maxZonedDateTime.getYear() - minZonedDateTime + .getYear()) * 12 + maxZonedDateTime.getMonthValue() - minZonedDateTime.getMonthValue(); + int size = monthDiff / ((int) dateTimeUnit.ratio * interval.integerValue()) + 1; + return new ExprValue[size]; + } + } + + @Override + public ExprValue[] fillBuckets(ExprValue[] buckets, + Map map, + String key) { + for (int id = 0; id < buckets.length; id++) { + ExprValue tuple = buckets[id]; + if (tuple == null) { + long placeHolder; + if (dateTimeUnit.isMillisBased) { + placeHolder = minRounded.atZone(ZoneId.of("UTC")).toInstant() + .toEpochMilli() + dateTimeUnit.ratio * interval.integerValue() * id; + } else { + placeHolder = minRounded.atZone(ZoneId.of("UTC")).plusMonths(dateTimeUnit + .ratio * interval.integerValue() * id).toInstant().toEpochMilli(); + } + map.replace(key, new ExprDatetimeValue(Instant.ofEpochMilli(placeHolder) + .atZone(ZoneId.of("UTC")).toLocalDateTime())); + buckets[id] = ExprTupleValue.fromExprValueMap(map); + } + } + return buckets; + } + + @Override + public Integer locate(ExprValue value) { + if (dateTimeUnit.isMillisBased) { + long intervalInEpochMillis = dateTimeUnit.ratio; + return Long.valueOf((value.datetimeValue() + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli() - minRounded + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli()) / (intervalInEpochMillis + * interval.integerValue())).intValue(); + } else { + int monthDiff = (value.datetimeValue().getYear() - minRounded.getYear()) * 12 + + value.dateValue().getMonthValue() - minRounded.getMonthValue(); + return (int) (monthDiff / (dateTimeUnit.ratio * interval.integerValue())); + } + } + + private void updateRounded(Instant value) { + if (maxRounded == null || value.isAfter(maxRounded + .atZone(ZoneId.of("UTC")).toInstant())) { + maxRounded = value.atZone(ZoneId.of("UTC")).toLocalDateTime(); + } + if (minRounded == null || value.isBefore(minRounded + .atZone(ZoneId.of("UTC")).toInstant())) { + minRounded = value.atZone(ZoneId.of("UTC")).toLocalDateTime(); + } + } + } + + + static class DateRounding extends Rounding { + private final ExprValue interval; + private final DateTimeUnit dateTimeUnit; + + public DateRounding(ExprValue interval, String unit) { + this.interval = interval; + this.dateTimeUnit = DateTimeUnit.resolve(unit); + } + + @Override + public ExprValue round(ExprValue var) { + Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.dateValue().atStartOfDay() + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli(), interval.integerValue())); + updateRounded(instant); + return new ExprDateValue(instant.atZone(ZoneId.of("UTC")).toLocalDate()); + } + + @Override + public ExprValue[] createBuckets() { + if (dateTimeUnit.isMillisBased) { + int size = (int) ((maxRounded.atStartOfDay().atZone(ZoneId.of("UTC")).toInstant() + .toEpochMilli() - minRounded.atStartOfDay().atZone(ZoneId.of("UTC")).toInstant() + .toEpochMilli()) / (interval.integerValue() * dateTimeUnit.ratio)) + 1; + return new ExprValue[size]; + } else { + ZonedDateTime maxZonedDateTime = maxRounded.atStartOfDay().atZone(ZoneId.of("UTC")); + ZonedDateTime minZonedDateTime = minRounded.atStartOfDay().atZone(ZoneId.of("UTC")); + int monthDiff = (maxZonedDateTime.getYear() - minZonedDateTime + .getYear()) * 12 + maxZonedDateTime.getMonthValue() - minZonedDateTime.getMonthValue(); + int size = monthDiff / ((int) dateTimeUnit.ratio * interval.integerValue()) + 1; + return new ExprValue[size]; + } + } + + @Override + public ExprValue[] fillBuckets(ExprValue[] buckets, + Map map, + String key) { + for (int id = 0; id < buckets.length; id++) { + ExprValue tuple = buckets[id]; + if (tuple == null) { + long placeHolder; + if (dateTimeUnit.isMillisBased) { + placeHolder = minRounded.atStartOfDay().atZone(ZoneId.of("UTC")).toInstant() + .toEpochMilli() + dateTimeUnit.ratio * interval.integerValue() * id; + } else { + placeHolder = minRounded.atStartOfDay().atZone(ZoneId.of("UTC")) + .plusMonths(dateTimeUnit.ratio * interval.integerValue()).toInstant() + .toEpochMilli(); + } + map.replace(key, new ExprDateValue(Instant.ofEpochMilli(placeHolder) + .atZone(ZoneId.of("UTC")).toLocalDate())); + buckets[id] = ExprTupleValue.fromExprValueMap(map); + } + } + return buckets; + } + + @Override + public Integer locate(ExprValue value) { + if (dateTimeUnit.isMillisBased) { + long intervalInEpochMillis = dateTimeUnit.ratio; + return Long.valueOf((value.dateValue().atStartOfDay() + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli() - minRounded.atStartOfDay() + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli()) / (intervalInEpochMillis + * interval.integerValue())).intValue(); + } else { + int monthDiff = (value.dateValue().getYear() - minRounded.getYear()) * 12 + + value.dateValue().getMonthValue() - minRounded.getMonthValue(); + return (int) (monthDiff / (dateTimeUnit.ratio * interval.integerValue())); + } + } + + private void updateRounded(Instant value) { + if (maxRounded == null || value.isAfter(maxRounded.atStartOfDay() + .atZone(ZoneId.of("UTC")).toInstant())) { + maxRounded = value.atZone(ZoneId.of("UTC")).toLocalDate(); + } + if (minRounded == null || value.isBefore(minRounded.atStartOfDay() + .atZone(ZoneId.of("UTC")).toInstant())) { + minRounded = value.atZone(ZoneId.of("UTC")).toLocalDate(); + } + } + } + + static class TimeRounding extends Rounding { + private final ExprValue interval; + private final DateTimeUnit dateTimeUnit; + + public TimeRounding(ExprValue interval, String unit) { + this.interval = interval; + this.dateTimeUnit = DateTimeUnit.resolve(unit); + } + + @Override + public ExprValue round(ExprValue var) { + if (dateTimeUnit.id > 4) { + throw new ExpressionEvaluationException(String + .format("Unable to set span unit %s for TIME type", dateTimeUnit.getName())); + } + + Instant instant = Instant.ofEpochMilli(dateTimeUnit.round(var.timeValue().getLong( + ChronoField.MILLI_OF_DAY), interval.integerValue())); + updateRounded(instant); + return new ExprTimeValue(instant.atZone(ZoneId.of("UTC")).toLocalTime()); + } + + @Override + public ExprValue[] createBuckets() { + // local time is converted to timestamp on 1970-01-01 for aggregations + int size = (int) ((maxRounded.atDate(LocalDate.of(1970, 1, 1)) + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli() - minRounded + .atDate(LocalDate.of(1970, 1, 1)).atZone(ZoneId.of("UTC")).toInstant() + .toEpochMilli()) / (interval.integerValue() * dateTimeUnit.ratio)) + 1; + return new ExprValue[size]; + } + + @Override + public ExprValue[] fillBuckets(ExprValue[] buckets, + Map map, + String key) { + for (int id = 0; id < buckets.length; id++) { + ExprValue tuple = buckets[id]; + if (tuple == null) { + long placeHolder = minRounded.atDate(LocalDate.of(1970, 1, 1)) + .atZone(ZoneId.of("UTC")) + .toInstant().toEpochMilli() + dateTimeUnit.ratio * interval.integerValue() * id; + map.replace(key, new ExprTimeValue(Instant.ofEpochMilli(placeHolder) + .atZone(ZoneId.of("UTC")).toLocalTime())); + buckets[id] = ExprTupleValue.fromExprValueMap(map); + } + } + return buckets; + } + + @Override + public Integer locate(ExprValue value) { + long intervalInEpochMillis = dateTimeUnit.ratio; + return Long.valueOf((value.timeValue().atDate(LocalDate.of(1970, 1, 1)) + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli() - minRounded + .atDate(LocalDate.of(1970, 1, 1)) + .atZone(ZoneId.of("UTC")).toInstant().toEpochMilli()) / (intervalInEpochMillis * interval + .integerValue())).intValue(); + } + + private void updateRounded(Instant value) { + if (maxRounded == null || value.isAfter(maxRounded.atDate(LocalDate.of(1970, 1, 1)) + .atZone(ZoneId.of("UTC")).toInstant())) { + maxRounded = value.atZone(ZoneId.of("UTC")).toLocalTime(); + } + if (minRounded == null) { + minRounded = value.atZone(ZoneId.of("UTC")).toLocalTime(); + } + } + } + + + static class LongRounding extends Rounding { + private final Long longInterval; + + protected LongRounding(ExprValue interval) { + longInterval = interval.longValue(); + } + + @Override + public ExprValue round(ExprValue value) { + long rounded = Math.floorDiv(value.longValue(), longInterval) * longInterval; + updateRounded(rounded); + return ExprValueUtils.longValue(rounded); + } + + @Override + public Integer locate(ExprValue value) { + return Long.valueOf((value.longValue() - minRounded) / longInterval).intValue(); + } + + @Override + public ExprValue[] createBuckets() { + int size = Long.valueOf((maxRounded - minRounded) / longInterval).intValue() + 1; + return new ExprValue[size]; + } + + @Override + public ExprValue[] fillBuckets(ExprValue[] buckets, + Map map, + String key) { + for (int id = 0; id < buckets.length; id++) { + ExprValue tuple = buckets[id]; + if (tuple == null) { + map.replace(key, ExprValueUtils.longValue(minRounded + longInterval * id)); + buckets[id] = ExprTupleValue.fromExprValueMap(map); + } + } + return buckets; + } + + private void updateRounded(Long value) { + if (maxRounded == null || value > maxRounded) { + maxRounded = value; + } + if (minRounded == null || value < minRounded) { + minRounded = value; + } + } + } + + + static class DoubleRounding extends Rounding { + private final Double doubleInterval; + + protected DoubleRounding(ExprValue interval) { + doubleInterval = interval.doubleValue(); + } + + @Override + public ExprValue round(ExprValue value) { + double rounded = Double + .valueOf(value.doubleValue() / doubleInterval).intValue() * doubleInterval; + updateRounded(rounded); + return ExprValueUtils.doubleValue(rounded); + } + + @Override + public Integer locate(ExprValue value) { + return Double.valueOf((value.doubleValue() - minRounded) / doubleInterval).intValue(); + } + + @Override + public ExprValue[] createBuckets() { + int size = Double.valueOf((maxRounded - minRounded) / doubleInterval).intValue() + 1; + return new ExprValue[size]; + } + + @Override + public ExprValue[] fillBuckets(ExprValue[] buckets, Map map, + String key) { + for (int id = 0; id < buckets.length; id++) { + ExprValue tuple = buckets[id]; + if (tuple == null) { + map.replace(key, ExprValueUtils.doubleValue(minRounded + doubleInterval * id)); + buckets[id] = ExprTupleValue.fromExprValueMap(map); + } + } + return buckets; + } + + private void updateRounded(Double value) { + if (maxRounded == null || value > maxRounded) { + maxRounded = value; + } + if (minRounded == null || value < minRounded) { + minRounded = value; + } + } + } + + + @RequiredArgsConstructor + static class UnknownRounding extends Rounding { + @Override + public ExprValue round(ExprValue var) { + return null; + } + + @Override + public Integer locate(ExprValue value) { + return null; + } + + @Override + public ExprValue[] createBuckets() { + return new ExprValue[0]; + } + + @Override + public ExprValue[] fillBuckets(ExprValue[] buckets, Map map, + String key) { + return new ExprValue[0]; + } + } + + + @RequiredArgsConstructor + public enum DateTimeUnit { + MILLISECOND(1, "ms", true, ChronoField.MILLI_OF_SECOND + .getBaseUnit().getDuration().toMillis()) { + @Override + long round(long utcMillis, int interval) { + return DateTimeUtils.roundFloor(utcMillis, ratio * interval); + } + }, + + SECOND(2, "s", true, ChronoField.SECOND_OF_MINUTE + .getBaseUnit().getDuration().toMillis()) { + @Override + long round(long utcMillis, int interval) { + return DateTimeUtils.roundFloor(utcMillis, ratio * interval); + } + }, + + MINUTE(3, "m", true, ChronoField.MINUTE_OF_HOUR + .getBaseUnit().getDuration().toMillis()) { + @Override + long round(long utcMillis, int interval) { + return DateTimeUtils.roundFloor(utcMillis, ratio * interval); + } + }, + + HOUR(4, "h", true, ChronoField.HOUR_OF_DAY + .getBaseUnit().getDuration().toMillis()) { + @Override + long round(long utcMillis, int interval) { + return DateTimeUtils.roundFloor(utcMillis, ratio * interval); + } + }, + + DAY(5, "d", true, ChronoField.DAY_OF_MONTH + .getBaseUnit().getDuration().toMillis()) { + @Override + long round(long utcMillis, int interval) { + return DateTimeUtils.roundFloor(utcMillis, ratio * interval); + } + }, + + WEEK(6, "w", true, TimeUnit.DAYS.toMillis(7L)) { + @Override + long round(long utcMillis, int interval) { + return DateTimeUtils.roundWeek(utcMillis, interval); + } + }, + + MONTH(7, "M", false, 1) { + @Override + long round(long utcMillis, int interval) { + return DateTimeUtils.roundMonth(utcMillis, interval); + } + }, + + QUARTER(8, "q", false, 3) { + @Override + long round(long utcMillis, int interval) { + return DateTimeUtils.roundQuarter(utcMillis, interval); + } + }, + + YEAR(9, "y", false, 12) { + @Override + long round(long utcMillis, int interval) { + return DateTimeUtils.roundYear(utcMillis, interval); + } + }; + + @Getter + private final int id; + @Getter + private final String name; + protected final boolean isMillisBased; + protected final long ratio; + + abstract long round(long utcMillis, int interval); + + /** + * Resolve the date time unit. + */ + public static Rounding.DateTimeUnit resolve(String name) { + switch (name) { + case "M": + return MONTH; + case "m": + return MINUTE; + default: + return Arrays.stream(values()) + .filter(v -> v.getName().equalsIgnoreCase(name)) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("Unable to resolve unit " + name)); + } + } + } + +} diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/bucket/SpanBucket.java b/core/src/main/java/org/opensearch/sql/planner/physical/bucket/SpanBucket.java new file mode 100644 index 00000000000..f325bf45c0d --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/planner/physical/bucket/SpanBucket.java @@ -0,0 +1,117 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.planner.physical.bucket; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableList; +import java.util.AbstractMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import org.opensearch.sql.ast.expression.SpanUnit; +import org.opensearch.sql.data.model.ExprTupleValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.data.model.ExprValueUtils; +import org.opensearch.sql.expression.NamedExpression; +import org.opensearch.sql.expression.aggregation.AggregationState; +import org.opensearch.sql.expression.aggregation.NamedAggregator; +import org.opensearch.sql.expression.span.SpanExpression; + +public class SpanBucket extends Group { + private final List aggregatorList; + private final List groupByExprList; + private final Rounding rounding; + + /** + * SpanBucket Constructor. + */ + public SpanBucket(List aggregatorList, List groupByExprList) { + super(aggregatorList, groupByExprList); + this.aggregatorList = aggregatorList; + this.groupByExprList = groupByExprList; + rounding = Rounding.createRounding(((SpanExpression) groupByExprList.get(0).getDelegated())); + } + + @Override + public void push(ExprValue inputValue) { + Key spanKey = new Key(inputValue, groupByExprList, rounding); + groupListMap.computeIfAbsent(spanKey, k -> + aggregatorList.stream() + .map(aggregator -> new AbstractMap.SimpleEntry<>(aggregator, + aggregator.create())) + .collect(Collectors.toList()) + ); + groupListMap.computeIfPresent(spanKey, (key, aggregatorList) -> { + aggregatorList + .forEach(entry -> entry.getKey().iterate(inputValue.bindingTuples(), entry.getValue())); + return aggregatorList; + }); + } + + @Override + public List result() { + ExprValue[] buckets = rounding.createBuckets(); + LinkedHashMap emptyBucketTuple = new LinkedHashMap<>(); + String spanKey = null; + for (Map.Entry>> + entry : groupListMap.entrySet()) { + LinkedHashMap tupleMap = new LinkedHashMap<>(entry.getKey().groupKeyMap()); + if (spanKey == null) { + spanKey = ((Key) entry.getKey()).namedSpan.getNameOrAlias(); + } + for (Map.Entry stateEntry : entry.getValue()) { + tupleMap.put(stateEntry.getKey().getName(), stateEntry.getValue().result()); + if (emptyBucketTuple.isEmpty()) { + entry.getKey().groupKeyMap().keySet().forEach(key -> emptyBucketTuple.put(key, null)); + emptyBucketTuple.put(stateEntry.getKey().getName(), ExprValueUtils.fromObjectValue(0)); + } + } + int index = rounding.locate(((SpanBucket.Key) entry.getKey()).getRoundedValue()); + buckets[index] = ExprTupleValue.fromExprValueMap(tupleMap); + } + return ImmutableList.copyOf(rounding.fillBuckets(buckets, emptyBucketTuple, spanKey)); + } + + @EqualsAndHashCode(callSuper = false) + @VisibleForTesting + public static class Key extends Group.Key { + @Getter + private final ExprValue roundedValue; + private final NamedExpression namedSpan; + + /** + * SpanBucket.Key Constructor. + */ + public Key(ExprValue value, List groupByExprList, Rounding rounding) { + super(value, groupByExprList); + namedSpan = groupByExprList.get(0); + ExprValue actualValue = ((SpanExpression) namedSpan.getDelegated()).getField() + .valueOf(value.bindingTuples()); + roundedValue = rounding.round(actualValue); + } + + /** + * Return the Map of span key and its actual value. + */ + @Override + public LinkedHashMap groupKeyMap() { + LinkedHashMap map = new LinkedHashMap<>(); + map.put(namedSpan.getNameOrAlias(), roundedValue); + return map; + } + } + +} diff --git a/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java new file mode 100644 index 00000000000..30abfbd31a3 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/utils/DateTimeUtils.java @@ -0,0 +1,94 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.utils; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import lombok.experimental.UtilityClass; + +@UtilityClass +public class DateTimeUtils { + + /** + * Util method to round the date/time with given unit. + * + * @param utcMillis Date/time value to round, given in utc millis + * @param unitMillis Date/time interval unit in utc millis + * @return Rounded date/time value in utc millis + */ + public static long roundFloor(long utcMillis, long unitMillis) { + return utcMillis - utcMillis % unitMillis; + } + + /** + * Util method to round the date/time in week(s). + * + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of weeks as the rounding interval + * @return Rounded date/time value in utc millis + */ + public static long roundWeek(long utcMillis, int interval) { + return roundFloor(utcMillis + 259200000L, 604800000L * interval) - 259200000L; + } + + /** + * Util method to round the date/time in month(s). + * + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of months as the rounding interval + * @return Rounded date/time value in utc millis + */ + public static long roundMonth(long utcMillis, int interval) { + ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneId.of("UTC")); + ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(ZoneId.of("UTC")) + .plusMonths(interval); + long monthDiff = (zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime + .getMonthValue() - initDateTime.getMonthValue(); + long monthToAdd = (monthDiff / interval - 1) * interval; + return initDateTime.plusMonths(monthToAdd).toInstant().toEpochMilli(); + } + + /** + * Util method to round the date/time in quarter(s). + * + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of quarters as the rounding interval + * @return Rounded date/time value in utc millis + */ + public static long roundQuarter(long utcMillis, int interval) { + ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneId.of("UTC")); + ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(ZoneId.of("UTC")) + .plusMonths(interval * 3L); + long monthDiff = ((zonedDateTime.getYear() - initDateTime.getYear()) * 12L + zonedDateTime + .getMonthValue() - initDateTime.getMonthValue()); + long monthToAdd = (monthDiff / (interval * 3L) - 1) * interval * 3; + return initDateTime.plusMonths(monthToAdd).toInstant().toEpochMilli(); + } + + /** + * Util method to round the date/time in year(s). + * + * @param utcMillis Date/time value to round, given in utc millis + * @param interval Number of years as the rounding interval + * @return Rounded date/time value in utc millis + */ + public static long roundYear(long utcMillis, int interval) { + ZonedDateTime initDateTime = ZonedDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneId.of("UTC")); + ZonedDateTime zonedDateTime = Instant.ofEpochMilli(utcMillis).atZone(ZoneId.of("UTC")); + int yearDiff = zonedDateTime.getYear() - initDateTime.getYear(); + int yearToAdd = (yearDiff / interval) * interval; + return initDateTime.plusYears(yearToAdd).toInstant().toEpochMilli(); + } + +} diff --git a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java index b0b1e7e773e..50e9ed5a9de 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/ExpressionAnalyzerTest.java @@ -46,12 +46,14 @@ import org.opensearch.sql.ast.dsl.AstDSL; import org.opensearch.sql.ast.expression.AllFields; import org.opensearch.sql.ast.expression.DataType; +import org.opensearch.sql.ast.expression.SpanUnit; import org.opensearch.sql.ast.expression.UnresolvedExpression; import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.exception.SemanticCheckException; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.Expression; +import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.config.ExpressionConfig; import org.opensearch.sql.expression.window.aggregation.AggregateWindowFunction; import org.springframework.context.annotation.Configuration; @@ -318,6 +320,14 @@ public void filtered_distinct_count() { ); } + @Test + void visit_span() { + assertAnalyzeEqual( + DSL.span(DSL.ref("integer_value", INTEGER), DSL.literal(1), ""), + AstDSL.span(qualifiedName("integer_value"), intLiteral(1), SpanUnit.NONE) + ); + } + protected Expression analyze(UnresolvedExpression unresolvedExpression) { return expressionAnalyzer.analyze(unresolvedExpression, analysisContext); } diff --git a/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java index bac3c66a9b6..20e74744d9d 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/NamedExpressionAnalyzerTest.java @@ -45,7 +45,7 @@ @ContextConfiguration(classes = {ExpressionConfig.class, AnalyzerTestBase.class}) class NamedExpressionAnalyzerTest extends AnalyzerTestBase { @Test - void visit_named_seleteitem() { + void visit_named_select_item() { Alias alias = AstDSL.alias("integer_value", AstDSL.qualifiedName("integer_value")); NamedExpressionAnalyzer analyzer = diff --git a/core/src/test/java/org/opensearch/sql/expression/ExpressionNodeVisitorTest.java b/core/src/test/java/org/opensearch/sql/expression/ExpressionNodeVisitorTest.java index 9c1671ae2b8..f5da116e85a 100644 --- a/core/src/test/java/org/opensearch/sql/expression/ExpressionNodeVisitorTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/ExpressionNodeVisitorTest.java @@ -64,6 +64,7 @@ void should_return_null_by_default() { INTEGER)).accept(visitor, null)); assertNull(new CaseClause(ImmutableList.of(), null).accept(visitor, null)); assertNull(new WhenClause(literal("test"), literal(10)).accept(visitor, null)); + assertNull(DSL.span(ref("age", INTEGER), literal(1), "").accept(visitor, null)); } @Test diff --git a/core/src/test/java/org/opensearch/sql/expression/NamedExpressionTest.java b/core/src/test/java/org/opensearch/sql/expression/NamedExpressionTest.java index 13994ae8a0d..414d5dce2f9 100644 --- a/core/src/test/java/org/opensearch/sql/expression/NamedExpressionTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/NamedExpressionTest.java @@ -28,10 +28,12 @@ package org.opensearch.sql.expression; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; +import org.opensearch.sql.expression.span.SpanExpression; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class NamedExpressionTest extends ExpressionTestBase { @@ -62,4 +64,11 @@ void name_an_named_expression() { assertEquals("ten", namedExpression.getNameOrAlias()); } + @Test + void name_a_span_expression() { + SpanExpression span = DSL.span(DSL.ref("integer_value", INTEGER), DSL.literal(1), ""); + NamedExpression named = DSL.named(span); + assertEquals(span, named.getDelegated()); + } + } diff --git a/core/src/test/java/org/opensearch/sql/expression/span/SpanExpressionTest.java b/core/src/test/java/org/opensearch/sql/expression/span/SpanExpressionTest.java new file mode 100644 index 00000000000..04a8491b250 --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/expression/span/SpanExpressionTest.java @@ -0,0 +1,48 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.expression.span; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; +import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; + +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.opensearch.sql.data.model.ExprValueUtils; +import org.opensearch.sql.expression.DSL; +import org.opensearch.sql.expression.ExpressionTestBase; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class SpanExpressionTest extends ExpressionTestBase { + @Test + void span() { + SpanExpression span = DSL.span(DSL.ref("integer_value", INTEGER), DSL.literal(1), ""); + assertEquals(INTEGER, span.type()); + assertEquals(ExprValueUtils.integerValue(1), span.valueOf(valueEnv())); + + span = DSL.span(DSL.ref("integer_value", INTEGER), DSL.literal(1.5), ""); + assertEquals(DOUBLE, span.type()); + assertEquals(ExprValueUtils.doubleValue(1.5), span.valueOf(valueEnv())); + + span = DSL.span(DSL.ref("double_value", DOUBLE), DSL.literal(1), ""); + assertEquals(DOUBLE, span.type()); + assertEquals(ExprValueUtils.doubleValue(1.0), span.valueOf(valueEnv())); + + span = DSL.span(DSL.ref("timestamp_value", TIMESTAMP), DSL.literal(1), "d"); + assertEquals(TIMESTAMP, span.type()); + assertEquals(ExprValueUtils.integerValue(1), span.valueOf(valueEnv())); + } + +} diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java index d3eef3b06e5..6c4701a887a 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/AggregationOperatorTest.java @@ -28,15 +28,27 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsInRelativeOrder; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.opensearch.sql.data.type.ExprCoreType.DATE; +import static org.opensearch.sql.data.type.ExprCoreType.DATETIME; +import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; +import static org.opensearch.sql.data.type.ExprCoreType.FLOAT; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; +import static org.opensearch.sql.data.type.ExprCoreType.LONG; import static org.opensearch.sql.data.type.ExprCoreType.STRING; +import static org.opensearch.sql.data.type.ExprCoreType.TIME; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; import com.google.common.collect.ImmutableMap; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.junit.jupiter.api.Test; +import org.opensearch.sql.data.model.ExprDateValue; +import org.opensearch.sql.data.model.ExprDatetimeValue; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.expression.DSL; @@ -88,4 +100,391 @@ public void sum_with_one_groups() { ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST", "sum(response)", 700)) )); } + + @Test + public void millisecond_span() { + PhysicalPlan plan = new AggregationOperator(new DateTimeTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("second", TIMESTAMP)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("second", TIMESTAMP), DSL.literal(6 * 1000), "ms")))); + List result = execute(plan); + assertEquals(3, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 2)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2021-01-01 00:00:06"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2021-01-01 00:00:12"), "count", 3)) + )); + } + + @Test + public void second_span() { + PhysicalPlan plan = new AggregationOperator(new DateTimeTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("second", TIMESTAMP)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("second", TIMESTAMP), DSL.literal(6), "s")))); + List result = execute(plan); + assertEquals(3, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2021-01-01 00:00:00"), "count", 2)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2021-01-01 00:00:06"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2021-01-01 00:00:12"), "count", 3)) + )); + } + + @Test + public void minute_span() { + PhysicalPlan plan = new AggregationOperator(new DateTimeTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("minute", DATETIME)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("minute", DATETIME), DSL.literal(5), "m")))); + List result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2020-12-31 23:50:00"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2020-12-31 23:55:00"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2021-01-01 00:05:00"), "count", 1)) + )); + + plan = new AggregationOperator(new DateTimeTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("hour", TIME)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("hour", TIME), DSL.literal(30), "m")))); + result = execute(plan); + assertEquals(5, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimeValue("17:00:00"), "count", 2)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimeValue("17:30:00"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimeValue("18:00:00"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimeValue("18:30:00"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimeValue("19:00:00"), "count", 1)) + )); + } + + @Test + public void hour_span() { + PhysicalPlan plan = new AggregationOperator(new DateTimeTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("hour", TIME)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("hour", TIME), DSL.literal(1), "h")))); + List result = execute(plan); + assertEquals(3, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimeValue("17:00:00"), "count", 2)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimeValue("18:00:00"), "count", 2)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimeValue("19:00:00"), "count", 1)) + )); + } + + @Test + public void day_span() { + PhysicalPlan plan = new AggregationOperator(new DateTestScan(), + Collections.singletonList(DSL + .named("count(day)", dsl.count(DSL.ref("day", DATE)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("day", DATE), DSL.literal(1), "d")))); + List result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-01-01"), "count(day)", 2)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-01-02"), "count(day)", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-01-03"), "count(day)", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-01-04"), "count(day)", 1)) + )); + + plan = new AggregationOperator(new DateTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("month", DATE)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("month", DATE), DSL.literal(30), "d")))); + result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2020-12-04"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-01-03"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-02-02"), "count", 3)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-03-04"), "count", 1)) + )); + } + + @Test + public void week_span() { + PhysicalPlan plan = new AggregationOperator(new DateTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("month", DATE)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("month", DATE), DSL.literal(5), "w")))); + List result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2020-11-16"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2020-12-21"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-01-25"), "count", 3)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-03-01"), "count", 1)) + )); + } + + @Test + public void month_span() { + PhysicalPlan plan = new AggregationOperator(new DateTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("month", DATE)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("month", DATE), DSL.literal(1), "M")))); + List result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2020-12-01"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-01-01"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-02-01"), "count", 3)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDateValue("2021-03-01"), "count", 1)) + )); + + plan = new AggregationOperator(new DateTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("quarter", DATETIME)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "M")))); + result = execute(plan); + assertEquals(5, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2020-09-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2020-11-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2021-03-01 00:00:00"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2021-05-01 00:00:00"), "count", 2)) + )); + + plan = new AggregationOperator(new DateTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("year", TIMESTAMP)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("year", TIMESTAMP), DSL.literal(10 * 12), "M")))); + result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("1990-01-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2000-01-01 00:00:00"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2010-01-01 00:00:00"), "count", 3)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2020-01-01 00:00:00"), "count", 1)) + )); + + } + + @Test + public void quarter_span() { + PhysicalPlan plan = new AggregationOperator(new DateTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("quarter", DATETIME)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("quarter", DATETIME), DSL.literal(2), "q")))); + List result = execute(plan); + assertEquals(2, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2020-07-01 00:00:00"), "count", 2)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprDatetimeValue("2021-01-01 00:00:00"), "count", 3)) + )); + + plan = new AggregationOperator(new DateTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("year", TIMESTAMP)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("year", TIMESTAMP), DSL.literal(10 * 4), "q")))); + result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("1990-01-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2000-01-01 00:00:00"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2010-01-01 00:00:00"), "count", 3)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2020-01-01 00:00:00"), "count", 1)) + )); + } + + @Test + public void year_span() { + PhysicalPlan plan = new AggregationOperator(new DateTestScan(), + Collections.singletonList(DSL + .named("count", dsl.count(DSL.ref("year", TIMESTAMP)))), + Collections.singletonList(DSL + .named("span", DSL.span(DSL.ref("year", TIMESTAMP), DSL.literal(10), "y")))); + List result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("1990-01-01 00:00:00"), "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2000-01-01 00:00:00"), "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2010-01-01 00:00:00"), "count", 3)), + ExprValueUtils.tupleValue(ImmutableMap + .of("span", new ExprTimestampValue("2020-01-01 00:00:00"), "count", 1)) + )); + } + + @Test + public void integer_field() { + PhysicalPlan plan = new AggregationOperator(new NumericTestScan(), + Collections.singletonList(DSL.named("count", dsl.count(DSL.ref("integer", INTEGER)))), + Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("integer", INTEGER), DSL + .literal(1), "")))); + List result = execute(plan); + assertEquals(5, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 2, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 3, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 5, "count", 1)))); + + plan = new AggregationOperator(new NumericTestScan(), + Collections.singletonList(DSL.named("count", dsl.count(DSL.ref("integer", INTEGER)))), + Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("integer", INTEGER), DSL + .literal(1.5), "")))); + result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 3.0D, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); + } + + @Test + public void long_field() { + PhysicalPlan plan = new AggregationOperator(new NumericTestScan(), + Collections.singletonList(DSL.named("count", dsl.count(DSL.ref("long", LONG)))), + Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("long", LONG), DSL + .literal(1), "")))); + List result = execute(plan); + assertEquals(5, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1L, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 2L, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 3L, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4L, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 5L, "count", 1)))); + + plan = new AggregationOperator(new NumericTestScan(), + Collections.singletonList(DSL.named("count", dsl.count(DSL.ref("long", LONG)))), + Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("long", LONG), DSL + .literal(1.5), "")))); + result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 3.0D, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); + } + + @Test + public void float_field() { + PhysicalPlan plan = new AggregationOperator(new NumericTestScan(), + Collections.singletonList(DSL.named("count", dsl.count(DSL.ref("float", FLOAT)))), + Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("float", FLOAT), DSL + .literal(1), "")))); + List result = execute(plan); + assertEquals(5, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1F, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 2F, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 3F, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4F, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 5F, "count", 1)))); + + plan = new AggregationOperator(new NumericTestScan(), + Collections.singletonList(DSL.named("count", dsl.count(DSL.ref("float", FLOAT)))), + Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("float", FLOAT), DSL + .literal(1.5), "")))); + result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 3.0D, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); + } + + @Test + public void double_field() { + PhysicalPlan plan = new AggregationOperator(new NumericTestScan(), + Collections.singletonList(DSL.named("count", dsl.count(DSL.ref("double", DOUBLE)))), + Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("double", DOUBLE), DSL + .literal(1), "")))); + List result = execute(plan); + assertEquals(5, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 2D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 3D, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4D, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 5D, "count", 1)))); + + plan = new AggregationOperator(new NumericTestScan(), + Collections.singletonList(DSL.named("count", dsl.count(DSL.ref("double", DOUBLE)))), + Collections.singletonList(DSL.named("span", DSL.span(DSL.ref("double", DOUBLE), DSL + .literal(1.5), "")))); + result = execute(plan); + assertEquals(4, result.size()); + assertThat(result, containsInRelativeOrder( + ExprValueUtils.tupleValue(ImmutableMap.of("span", 0D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 1.5D, "count", 1)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 3.0D, "count", 0)), + ExprValueUtils.tupleValue(ImmutableMap.of("span", 4.5D, "count", 1)))); + } + } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java index 749a8d2ed02..22b56fdd598 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTestBase.java @@ -32,6 +32,10 @@ import java.util.List; import java.util.Map; import org.junit.jupiter.api.extension.ExtendWith; +import org.opensearch.sql.data.model.ExprDateValue; +import org.opensearch.sql.data.model.ExprDatetimeValue; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprTimestampValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.data.type.ExprCoreType; @@ -104,6 +108,75 @@ public class PhysicalPlanTestBase { .put("referer", ExprCoreType.STRING) .build(); + protected static final List dateInputs = new ImmutableList.Builder() + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "day", new ExprDateValue("2021-01-03"), + "month", new ExprDateValue("2021-02-04"), + "quarter", new ExprDatetimeValue("2021-01-01 12:25:02"), + "year", new ExprTimestampValue("2013-01-01 12:25:02")))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "day", new ExprDateValue("2021-01-01"), + "month", new ExprDateValue("2021-03-17"), + "quarter", new ExprDatetimeValue("2021-05-17 12:25:01"), + "year", new ExprTimestampValue("2021-01-01 12:25:02")))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "day", new ExprDateValue("2021-01-04"), + "month", new ExprDateValue("2021-02-08"), + "quarter", new ExprDatetimeValue("2021-06-08 12:25:02"), + "year", new ExprTimestampValue("2016-01-01 12:25:02")))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "day", new ExprDateValue("2021-01-02"), + "month", new ExprDateValue("2020-12-12"), + "quarter", new ExprDatetimeValue("2020-12-12 12:25:03"), + "year", new ExprTimestampValue("1999-01-01 12:25:02")))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "day", new ExprDateValue("2021-01-01"), + "month", new ExprDateValue("2021-02-28"), + "quarter", new ExprDatetimeValue("2020-09-28 12:25:01"), + "year", new ExprTimestampValue("2018-01-01 12:25:02")))) + .build(); + + protected static final List datetimeInputs = new ImmutableList.Builder() + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "hour", new ExprTimeValue("17:17:00"), + "minute", new ExprDatetimeValue("2020-12-31 23:54:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:05")))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "hour", new ExprTimeValue("18:17:00"), + "minute", new ExprDatetimeValue("2021-01-01 00:05:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:12")))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "hour", new ExprTimeValue("17:15:00"), + "minute", new ExprDatetimeValue("2021-01-01 00:03:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:17")))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "hour", new ExprTimeValue("19:01:00"), + "minute", new ExprDatetimeValue("2021-01-01 00:02:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:03")))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "hour", new ExprTimeValue("18:50:00"), + "minute", new ExprDatetimeValue("2021-01-01 00:00:12"), + "second", new ExprTimestampValue("2021-01-01 00:00:13")))) + .build(); + + protected static final List numericInputs = new ImmutableList.Builder() + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "integer", 2, + "long", 2L, + "float", 2F, + "double", 2D))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "integer", 1, + "long", 1L, + "float", 1F, + "double", 1D))) + .add(ExprValueUtils.tupleValue(ImmutableMap.of( + "integer", 5, + "long", 5L, + "float", 5F, + "double", 5D))) + .build(); + @Bean protected Environment typeEnv() { return var -> { @@ -182,4 +255,88 @@ public ExprValue next() { return iterator.next(); } } + + protected static class DateTimeTestScan extends PhysicalPlan { + private final Iterator iterator; + + public DateTimeTestScan() { + iterator = datetimeInputs.iterator(); + } + + @Override + public R accept(PhysicalPlanNodeVisitor visitor, C context) { + return null; + } + + @Override + public List getChild() { + return ImmutableList.of(); + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public ExprValue next() { + return iterator.next(); + } + } + + protected static class DateTestScan extends PhysicalPlan { + private final Iterator iterator; + + public DateTestScan() { + iterator = dateInputs.iterator(); + } + + @Override + public R accept(PhysicalPlanNodeVisitor visitor, C context) { + return null; + } + + @Override + public List getChild() { + return ImmutableList.of(); + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public ExprValue next() { + return iterator.next(); + } + } + + protected static class NumericTestScan extends PhysicalPlan { + private final Iterator iterator; + + public NumericTestScan() { + iterator = numericInputs.iterator(); + } + + @Override + public R accept(PhysicalPlanNodeVisitor visitor, C context) { + return null; + } + + @Override + public List getChild() { + return ImmutableList.of(); + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public ExprValue next() { + return iterator.next(); + } + } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/bucket/RoundingTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/bucket/RoundingTest.java new file mode 100644 index 00000000000..c7dc67afdf4 --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/planner/physical/bucket/RoundingTest.java @@ -0,0 +1,56 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.planner.physical.bucket; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.opensearch.sql.data.type.ExprCoreType.STRING; +import static org.opensearch.sql.data.type.ExprCoreType.TIME; + +import com.google.common.collect.ImmutableMap; +import org.junit.jupiter.api.Test; +import org.opensearch.sql.data.model.ExprTimeValue; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.data.model.ExprValueUtils; +import org.opensearch.sql.exception.ExpressionEvaluationException; +import org.opensearch.sql.expression.DSL; +import org.opensearch.sql.expression.span.SpanExpression; + +public class RoundingTest { + @Test + void time_rounding_illegal_span() { + SpanExpression span = DSL.span(DSL.ref("time", TIME), DSL.literal(1), "d"); + Rounding rounding = Rounding.createRounding(span); + assertThrows(ExpressionEvaluationException.class, + () -> rounding.round(new ExprTimeValue("23:30:00"))); + } + + @Test + void round_unknown_type() { + SpanExpression span = DSL.span(DSL.ref("unknown", STRING), DSL.literal(1), ""); + Rounding rounding = Rounding.createRounding(span); + assertNull(rounding.round(ExprValueUtils.integerValue(1))); + assertNull(rounding.locate(ExprValueUtils.integerValue(1))); + assertEquals(0, rounding.createBuckets().length); + assertEquals(0, rounding.fillBuckets(new ExprValue[0], ImmutableMap.of(), "span").length); + } + + @Test + void resolve() { + String illegalUnit = "illegal"; + assertThrows(IllegalArgumentException.class, + () -> Rounding.DateTimeUnit.resolve(illegalUnit), + "Unable to resolve unit " + illegalUnit); + } +} diff --git a/core/src/test/java/org/opensearch/sql/utils/DateTimeUtilsTest.java b/core/src/test/java/org/opensearch/sql/utils/DateTimeUtilsTest.java new file mode 100644 index 00000000000..a77b86e75a0 --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/utils/DateTimeUtilsTest.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.utils; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.Test; + +public class DateTimeUtilsTest { + @Test + void round() { + long actual = LocalDateTime.parse("2021-09-28T23:40:00").atZone(ZoneId.systemDefault()) + .toInstant().toEpochMilli(); + long rounded = DateTimeUtils.roundFloor(actual, TimeUnit.HOURS.toMillis(1)); + assertEquals( + LocalDateTime.parse("2021-09-28T23:00:00").atZone(ZoneId.systemDefault()).toInstant() + .toEpochMilli(), + Instant.ofEpochMilli(rounded).toEpochMilli()); + } +} diff --git a/docs/user/ppl/cmd/stats.rst b/docs/user/ppl/cmd/stats.rst index ee91d86d147..4f88bb75d6f 100644 --- a/docs/user/ppl/cmd/stats.rst +++ b/docs/user/ppl/cmd/stats.rst @@ -206,6 +206,39 @@ Example:: | 2.8613807855648994 | +--------------------+ + +By Clause +========= + +The by clause could be the fields and expressions like scalar functions and aggregation functions. Besides, the span clause can also be used in the by clause to split specific field into buckets in the same interval, the stats then does the aggregation by these span buckets. + +The span syntax is ``span(field_expr, interval_expr)``, the unit of the interval expression is the natural unit by default. If the field is a date and time type field, and the interval is in date/time units, you will need to specify the unit in the interval expression. For example, to split the field ``age`` into buckets by 10 years, it looks like ``span(age, 10)``. And here is another example of time span, the span to split a ``timestamp`` field into hourly intervals, it looks like ``span(timestamp, 1h)``. + +Available time unit: + ++----------------------------+ +| Span Interval Units | ++============================+ +| millisecond (ms) | ++----------------------------+ +| second (s) | ++----------------------------+ +| minute (m, case sensitive) | ++----------------------------+ +| hour (h) | ++----------------------------+ +| day (d) | ++----------------------------+ +| week (w) | ++----------------------------+ +| month (M, case sensitive) | ++----------------------------+ +| quarter (q) | ++----------------------------+ +| year (y) | ++----------------------------+ + + Example 1: Calculate the count of events ======================================== @@ -317,3 +350,19 @@ PPL query:: | 4 | 2 | +-----------------+--------------------------+ +Example 8: Calculate the count by a span +======================================== + +The example gets the count of age by the interval of 10 years. + +PPL query:: + + os> source=accounts | stats count(age) by span(age, 10) as age_span + fetched rows / total rows = 2/2 + +--------------+------------+ + | count(age) | age_span | + |--------------+------------| + | 1 | 20 | + | 3 | 30 | + +--------------+------------+ + diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java index 4a9603fe6bd..8b24f1c45f1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java @@ -27,6 +27,7 @@ package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; @@ -34,6 +35,10 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import java.io.IOException; +import java.time.LocalDate; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import org.json.JSONObject; import org.junit.jupiter.api.Test; @@ -43,6 +48,7 @@ public class StatsCommandIT extends PPLIntegTestCase { public void init() throws IOException { loadIndex(Index.ACCOUNT); loadIndex(Index.BANK_WITH_NULL_VALUES); + loadIndex(Index.BANK); } @Test @@ -185,4 +191,32 @@ public void testStatsWithMissing() throws IOException { verifySchema(response, schema("avg(balance)", null, "double")); verifyDataRows(response, rows(31082.25)); } + + @Test + public void testStatsBySpan() throws IOException { + JSONObject response = executeQuery(String.format( + "source=%s | stats count() by span(age,10)", + TEST_INDEX_BANK)); + verifySchema(response, schema("count()", null, "integer"), schema("span(age,10)", null, "integer")); + verifyDataRows(response, rows(1, 20), rows(6, 30)); + } + + @Test + public void testStatsTimeSpan() throws IOException { + JSONObject response = executeQuery(String.format( + "source=%s | stats count() by span(birthdate,1y)", + TEST_INDEX_BANK)); + verifySchema(response, schema("count()", null, "integer"), schema( + "span(birthdate,1y)", null, "timestamp")); + verifyDataRows(response, rows(2, "2017-01-01 00:00:00"), rows(5, "2018-01-01 00:00:00")); + } + + @Test + public void testStatsAliasedSpan() throws IOException { + JSONObject response = executeQuery(String.format( + "source=%s | stats count() by span(age,10) as age_bucket", + TEST_INDEX_BANK)); + verifySchema(response, schema("count()", null, "integer"), schema("age_bucket", null, "integer")); + verifyDataRows(response, rows(1, 20), rows(6, 30)); + } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java index 3e1ee558d17..70b9b7f8127 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java @@ -43,24 +43,28 @@ public class ObjectContent implements Content { private final Object value; + /** + * The parse method parses the value as double value, + * since the key values histogram buckets are defaulted to double. + */ @Override public Integer intValue() { - return parseNumberValue(value, Integer::valueOf, Number::intValue); + return parseNumberValue(value, v -> Double.valueOf(v).intValue(), Number::intValue); } @Override public Long longValue() { - return parseNumberValue(value, Long::valueOf, Number::longValue); + return parseNumberValue(value, v -> Double.valueOf(v).longValue(), Number::longValue); } @Override public Short shortValue() { - return parseNumberValue(value, Short::valueOf, Number::shortValue); + return parseNumberValue(value, v -> Double.valueOf(v).shortValue(), Number::shortValue); } @Override public Byte byteValue() { - return parseNumberValue(value, Byte::valueOf, Number::byteValue); + return parseNumberValue(value, v -> Double.valueOf(v).byteValue(), Number::byteValue); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SpanAggregationParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SpanAggregationParser.java new file mode 100644 index 00000000000..be7064a9299 --- /dev/null +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SpanAggregationParser.java @@ -0,0 +1,52 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.opensearch.response.agg; + +import com.google.common.collect.ImmutableList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.opensearch.search.aggregations.Aggregation; +import org.opensearch.search.aggregations.Aggregations; +import org.opensearch.search.aggregations.bucket.histogram.Histogram; + +public class SpanAggregationParser implements OpenSearchAggregationResponseParser { + private final MetricParserHelper metricsParser; + + public SpanAggregationParser(MetricParser metricParser) { + this.metricsParser = new MetricParserHelper(Collections.singletonList(metricParser)); + } + + @Override + public List> parse(Aggregations aggregations) { + ImmutableList.Builder> list = ImmutableList.builder(); + aggregations.asList().forEach(aggregation -> list + .addAll(parseHistogram((Histogram) aggregation))); + return list.build(); + } + + private List> parseHistogram(Histogram histogram) { + ImmutableList.Builder> mapList = ImmutableList.builder(); + histogram.getBuckets().forEach(bucket -> { + Map map = new HashMap<>(); + map.put(histogram.getName(), bucket.getKey().toString()); + Aggregation aggregation = bucket.getAggregations().asList().get(0); + Map metricsAggMap = metricsParser.parse(bucket.getAggregations()); + map.put(aggregation.getName(), metricsAggMap.get(aggregation.getName())); + mapList.add(map); + }); + return mapList.build(); + } + +} diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java index 403f99e593b..baa5575c824 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java @@ -51,12 +51,15 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.aggregation.NamedAggregator; +import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.opensearch.response.agg.CompositeAggregationParser; import org.opensearch.sql.opensearch.response.agg.MetricParser; import org.opensearch.sql.opensearch.response.agg.NoBucketAggregationParser; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; +import org.opensearch.sql.opensearch.response.agg.SpanAggregationParser; import org.opensearch.sql.opensearch.storage.script.aggregation.dsl.BucketAggregationBuilder; import org.opensearch.sql.opensearch.storage.script.aggregation.dsl.MetricAggregationBuilder; +import org.opensearch.sql.opensearch.storage.script.aggregation.dsl.SpanAggregationBuilder; import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** @@ -81,10 +84,19 @@ public class AggregationQueryBuilder extends ExpressionNodeVisitor ParsedPercentilesBucket.fromXContent(p, (String) c)) .put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) + .put(HistogramAggregationBuilder.NAME, + (p, c) -> ParsedHistogram.fromXContent(p, (String) c)) .put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c)) .put(FilterAggregationBuilder.NAME, diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java index 120d48b6010..05fca860a73 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java @@ -49,6 +49,7 @@ import org.opensearch.sql.opensearch.response.agg.NoBucketAggregationParser; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; import org.opensearch.sql.opensearch.response.agg.SingleValueParser; +import org.opensearch.sql.opensearch.response.agg.SpanAggregationParser; import org.opensearch.sql.opensearch.response.agg.StatsParser; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @@ -292,6 +293,74 @@ void no_bucket_max_and_extended_stats() { contains(entry("esField", 93.71390409320287, "maxField", 360D))); } + @Test + void parse_histogram() { + String response = "{\n" + + " \"histogram#span\":{\n" + + " \"buckets\":[\n" + + " {\n" + + " \"key\":0.0,\n" + + " \"doc_count\":87,\n" + + " \"avg#avg\":{\n" + + " \"value\":48.04521372126437\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\":1.5,\n" + + " \"doc_count\":4176,\n" + + " \"avg#avg\":{\n" + + " \"value\":68.71430682770594\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\":3.0,\n" + + " \"doc_count\":412,\n" + + " \"avg#avg\":{\n" + + " \"value\":145.03216019417476\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + OpenSearchAggregationResponseParser parser = new SpanAggregationParser( + new SingleValueParser("avg")); + assertThat(parse(parser, response), contains( + entry("avg", 48.04521372126437, "span", "0.0"), + entry("avg", 68.71430682770594, "span", "1.5"), + entry("avg", 145.03216019417476, "span", "3.0"))); + } + + @Test + void parse_date_histogram() { + String response = "{\n" + + " \"date_histogram#timespan\":{\n" + + " \"buckets\":[\n" + + " {\n" + + " \"key_as_string\":\"2021-07-01T00:00:00.000Z\",\n" + + " \"key\":1625097600000,\n" + + " \"doc_count\":3586,\n" + + " \"value_count#count\":{\n" + + " \"value\":3586\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key_as_string\":\"2021-08-01T00:00:00.000Z\",\n" + + " \"key\":1627776000000,\n" + + " \"doc_count\":1089,\n" + + " \"value_count#count\":{\n" + + " \"value\":1089\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + OpenSearchAggregationResponseParser parser = new SpanAggregationParser( + new SingleValueParser("count")); + assertThat(parse(parser, response), contains( + entry("count", 3586D, "timespan", "2021-07-01T00:00Z"), + entry("count", 1089D, "timespan", "2021-08-01T00:00Z"))); + } + public List> parse(OpenSearchAggregationResponseParser parser, String json) { return parser.parse(fromJson(json)); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java index 62643baad2e..0d0f4856a17 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java @@ -39,6 +39,7 @@ import static org.opensearch.sql.expression.DSL.literal; import static org.opensearch.sql.expression.DSL.named; import static org.opensearch.sql.expression.DSL.ref; +import static org.opensearch.sql.expression.DSL.span; import static org.opensearch.sql.opensearch.data.type.OpenSearchDataType.OPENSEARCH_TEXT_KEYWORD; import static org.opensearch.sql.opensearch.utils.Utils.agg; import static org.opensearch.sql.opensearch.utils.Utils.avg; @@ -67,6 +68,7 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.aggregation.AvgAggregator; +import org.opensearch.sql.expression.aggregation.CountAggregator; import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.expression.config.ExpressionConfig; import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; @@ -416,6 +418,35 @@ void should_build_type_mapping_without_bucket() { )); } + @Test + void should_build_histogram() { + assertEquals( + "{\n" + + " \"SpanExpression(field=age, value=10, unit=NONE)\" : {\n" + + " \"histogram\" : {\n" + + " \"field\" : \"age\",\n" + + " \"interval\" : 10.0,\n" + + " \"offset\" : 0.0,\n" + + " \"order\" : {\n" + + " \"_key\" : \"asc\"\n" + + " },\n" + + " \"keyed\" : false,\n" + + " \"min_doc_count\" : 0\n" + + " },\n" + + " \"aggregations\" : {\n" + + " \"count(a)\" : {\n" + + " \"value_count\" : {\n" + + " \"field\" : \"a\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}", + buildQuery(Arrays.asList(named("count(a)", new CountAggregator(Arrays.asList(ref( + "a", INTEGER)), INTEGER))), + Arrays.asList(named(span(ref("age", INTEGER), literal(10), ""))))); + } + @SneakyThrows private String buildQuery(List namedAggregatorList, List groupByList) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/SpanAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/SpanAggregationBuilderTest.java new file mode 100644 index 00000000000..f8a78e3bab0 --- /dev/null +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/SpanAggregationBuilderTest.java @@ -0,0 +1,116 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + * + */ + +package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.opensearch.sql.data.type.ExprCoreType.DATE; +import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; +import static org.opensearch.sql.data.type.ExprCoreType.TIMESTAMP; +import static org.opensearch.sql.expression.DSL.literal; +import static org.opensearch.sql.expression.DSL.named; +import static org.opensearch.sql.expression.DSL.ref; +import static org.opensearch.sql.expression.DSL.span; + +import com.fasterxml.jackson.databind.ObjectMapper; +import lombok.SneakyThrows; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.sql.expression.NamedExpression; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +@ExtendWith(MockitoExtension.class) +public class SpanAggregationBuilderTest { + private final SpanAggregationBuilder aggregationBuilder = new SpanAggregationBuilder(); + + @Test + void fixed_interval_time_span() { + assertEquals( + "{\n" + + " \"SpanExpression(field=timestamp, value=1, unit=H)\" : {\n" + + " \"date_histogram\" : {\n" + + " \"field\" : \"timestamp\",\n" + + " \"fixed_interval\" : \"1h\",\n" + + " \"offset\" : 0,\n" + + " \"order\" : {\n" + + " \"_key\" : \"asc\"\n" + + " },\n" + + " \"keyed\" : false,\n" + + " \"min_doc_count\" : 0\n" + + " }\n" + + " }\n" + + "}", + buildQuery(named(span(ref("timestamp", TIMESTAMP), literal(1), "h"))) + ); + } + + @Test + void calendar_interval_time_span() { + assertEquals( + "{\n" + + " \"SpanExpression(field=date, value=1, unit=W)\" : {\n" + + " \"date_histogram\" : {\n" + + " \"field\" : \"date\",\n" + + " \"calendar_interval\" : \"1w\",\n" + + " \"offset\" : 0,\n" + + " \"order\" : {\n" + + " \"_key\" : \"asc\"\n" + + " },\n" + + " \"keyed\" : false,\n" + + " \"min_doc_count\" : 0\n" + + " }\n" + + " }\n" + + "}", + buildQuery(named(span(ref("date", DATE), literal(1), "w"))) + ); + } + + @Test + void general_span() { + assertEquals( + "{\n" + + " \"SpanExpression(field=age, value=1, unit=NONE)\" : {\n" + + " \"histogram\" : {\n" + + " \"field\" : \"age\",\n" + + " \"interval\" : 1.0,\n" + + " \"offset\" : 0.0,\n" + + " \"order\" : {\n" + + " \"_key\" : \"asc\"\n" + + " },\n" + + " \"keyed\" : false,\n" + + " \"min_doc_count\" : 0\n" + + " }\n" + + " }\n" + + "}", + buildQuery(named(span(ref("age", INTEGER), literal(1), ""))) + ); + } + + @Test + void invalid_unit() { + NamedExpression namedSpan = named(span(ref("age", INTEGER), literal(1), "invalid_unit")); + assertThrows(IllegalStateException.class, () -> buildQuery(namedSpan)); + } + + @SneakyThrows + private String buildQuery(NamedExpression namedExpression) { + ObjectMapper objectMapper = new ObjectMapper(); + return objectMapper.readTree( + aggregationBuilder.build(namedExpression).toString()) + .toPrettyString(); + } + +} diff --git a/ppl/src/main/antlr/OpenSearchPPLLexer.g4 b/ppl/src/main/antlr/OpenSearchPPLLexer.g4 index cb665f6c887..55331fe2ccd 100644 --- a/ppl/src/main/antlr/OpenSearchPPLLexer.g4 +++ b/ppl/src/main/antlr/OpenSearchPPLLexer.g4 @@ -85,6 +85,7 @@ REGEXP: 'REGEXP'; DATETIME: 'DATETIME'; INTERVAL: 'INTERVAL'; MICROSECOND: 'MICROSECOND'; +MILLISECOND: 'MILLISECOND'; SECOND: 'SECOND'; MINUTE: 'MINUTE'; HOUR: 'HOUR'; @@ -255,6 +256,16 @@ IFNULL: 'IFNULL'; NULLIF: 'NULLIF'; IF: 'IF'; +// SPAN KEYWORDS +SPAN: 'SPAN'; +MS: 'MS'; +S: 'S'; +M: 'M'; +H: 'H'; +W: 'W'; +Q: 'Q'; +Y: 'Y'; + // LITERALS AND VALUES //STRING_LITERAL: DQUOTA_STRING | SQUOTA_STRING | BQUOTA_STRING; diff --git a/ppl/src/main/antlr/OpenSearchPPLParser.g4 b/ppl/src/main/antlr/OpenSearchPPLParser.g4 index fa3c5b64b77..48641e7feb3 100644 --- a/ppl/src/main/antlr/OpenSearchPPLParser.g4 +++ b/ppl/src/main/antlr/OpenSearchPPLParser.g4 @@ -66,7 +66,7 @@ statsCommand (ALLNUM EQUAL allnum=booleanLiteral)? (DELIM EQUAL delim=stringLiteral)? statsAggTerm (COMMA statsAggTerm)* - (byClause)? + (byClause | bySpanClause)? (DEDUP_SPLITVALUES EQUAL dedupsplit=booleanLiteral)? ; @@ -118,6 +118,14 @@ byClause : BY fieldList ; +bySpanClause + : BY spanClause (AS alias=qualifiedName)? + ; + +spanClause + : SPAN LT_PRTHS fieldExpression COMMA value=literalValue (unit=timespanUnit)? RT_PRTHS + ; + sortbyClause : sortField (COMMA sortField)* ; @@ -316,6 +324,11 @@ intervalUnit | DAY_SECOND | DAY_MINUTE | DAY_HOUR | YEAR_MONTH ; +timespanUnit + : MS | S | M | H | D | W | Q | Y + | MILLISECOND | SECOND | MINUTE | HOUR | DAY | WEEK | MONTH | QUARTER | YEAR + ; + valueList : LT_PRTHS literalValue (COMMA literalValue)* RT_PRTHS @@ -348,4 +361,5 @@ keywordsCanBeId | statsFunctionName | TIMESTAMP | DATE | TIME | FIRST | LAST + | timespanUnit | SPAN ; diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java index fb812e738f3..46f242ee972 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java @@ -166,13 +166,16 @@ public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { aggListBuilder.add(alias); } - List groupList = ctx.byClause() == null ? Collections.emptyList() : - ctx.byClause() - .fieldList() - .fieldExpression() - .stream() - .map(groupCtx -> new Alias(getTextInQuery(groupCtx), visitExpression(groupCtx))) - .collect(Collectors.toList()); + List groupList = ctx.byClause() != null + ? ctx.byClause() + .fieldList() + .fieldExpression() + .stream() + .map(groupCtx -> new Alias(getTextInQuery(groupCtx), visitExpression(groupCtx))).collect( + Collectors.toList()) + : ctx.bySpanClause() != null + ? Collections.singletonList(visitExpression(ctx.bySpanClause())) + : Collections.emptyList(); Aggregation aggregation = new Aggregation( aggListBuilder.build(), diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java index 7da4f90cf0c..8287b2266b4 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java @@ -32,6 +32,7 @@ import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.BinaryArithmeticContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.BooleanFunctionCallContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.BooleanLiteralContext; +import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.BySpanClauseContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.CompareExprContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.CountAllFunctionCallContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.DecimalLiteralContext; @@ -51,6 +52,7 @@ import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.ParentheticBinaryArithmeticContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.PercentileAggFunctionContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.SortFieldContext; +import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.SpanClauseContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.StatsFunctionCallContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.StringLiteralContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.TableSourceContext; @@ -64,7 +66,9 @@ import java.util.stream.Collectors; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.Token; import org.opensearch.sql.ast.expression.AggregateFunction; +import org.opensearch.sql.ast.expression.Alias; import org.opensearch.sql.ast.expression.AllFields; import org.opensearch.sql.ast.expression.And; import org.opensearch.sql.ast.expression.Argument; @@ -80,9 +84,12 @@ import org.opensearch.sql.ast.expression.Not; import org.opensearch.sql.ast.expression.Or; import org.opensearch.sql.ast.expression.QualifiedName; +import org.opensearch.sql.ast.expression.Span; +import org.opensearch.sql.ast.expression.SpanUnit; import org.opensearch.sql.ast.expression.UnresolvedExpression; import org.opensearch.sql.ast.expression.Xor; import org.opensearch.sql.common.utils.StringUtils; +import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser; import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; import org.opensearch.sql.ppl.utils.ArgumentFactory; @@ -294,6 +301,19 @@ public UnresolvedExpression visitBooleanLiteral(BooleanLiteralContext ctx) { return new Literal(Boolean.valueOf(ctx.getText()), DataType.BOOLEAN); } + @Override + public UnresolvedExpression visitBySpanClause(BySpanClauseContext ctx) { + String name = ctx.spanClause().getText(); + return ctx.alias != null ? new Alias(name, visit(ctx.spanClause()), StringUtils + .unquoteIdentifier(ctx.alias.getText())) : new Alias(name, visit(ctx.spanClause())); + } + + @Override + public UnresolvedExpression visitSpanClause(SpanClauseContext ctx) { + String unit = ctx.unit != null ? ctx.unit.getText() : ""; + return new Span(visit(ctx.fieldExpression()), visit(ctx.value), SpanUnit.of(unit)); + } + private QualifiedName visitIdentifiers(List ctx) { return new QualifiedName( ctx.stream() diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java index 38d28fe30a4..ad6a1bcab3f 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java @@ -55,6 +55,7 @@ import static org.opensearch.sql.ast.dsl.AstDSL.relation; import static org.opensearch.sql.ast.dsl.AstDSL.rename; import static org.opensearch.sql.ast.dsl.AstDSL.sort; +import static org.opensearch.sql.ast.dsl.AstDSL.span; import static org.opensearch.sql.ast.dsl.AstDSL.stringLiteral; import org.junit.Ignore; @@ -62,6 +63,7 @@ import org.junit.Test; import org.junit.rules.ExpectedException; import org.opensearch.sql.ast.Node; +import org.opensearch.sql.ast.expression.SpanUnit; import org.opensearch.sql.ast.tree.RareTopN.CommandType; import org.opensearch.sql.ppl.antlr.PPLSyntaxParser; @@ -286,6 +288,65 @@ public void testStatsCommandWithNestedFunctions() { )); } + @Test + public void testStatsCommandWithSpan() { + assertEqual("source=t | stats avg(price) by span(timestamp, 1h)", + agg( + relation("t"), + exprList( + alias("avg(price)", aggregate("avg", field("price"))) + ), + emptyList(), + exprList( + alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)) + ), + defaultStatsArgs() + )); + + assertEqual("source=t | stats count(a) by span(age, 10)", + agg( + relation("t"), + exprList( + alias("count(a)", aggregate("count", field("a"))) + ), + emptyList(), + exprList( + alias("span(age,10)", span(field("age"), intLiteral(10), SpanUnit.NONE)) + ), + defaultStatsArgs() + )); + } + + @Test + public void testStatsSpanWithAlias() { + assertEqual("source=t | stats avg(price) by span(timestamp, 1h) as time_span", + agg( + relation("t"), + exprList( + alias("avg(price)", aggregate("avg", field("price"))) + ), + emptyList(), + exprList( + alias("span(timestamp,1h)", span( + field("timestamp"), intLiteral(1), SpanUnit.H), "time_span") + ), + defaultStatsArgs() + )); + + assertEqual("source=t | stats count(a) by span(age, 10) as numeric_span", + agg( + relation("t"), + exprList( + alias("count(a)", aggregate("count", field("a"))) + ), + emptyList(), + exprList(alias("span(age,10)", span( + field("age"), intLiteral(10), SpanUnit.NONE), "numeric_span") + ), + defaultStatsArgs() + )); + } + @Test public void testDedupCommand() { assertEqual("source=t | dedup f1, f2",