, ToXContentFragment {
public static final Version V_2_13_0 = new Version(2130099, org.apache.lucene.util.Version.LUCENE_9_10_0);
public static final Version V_2_13_1 = new Version(2130199, org.apache.lucene.util.Version.LUCENE_9_10_0);
public static final Version V_2_14_0 = new Version(2140099, org.apache.lucene.util.Version.LUCENE_9_10_0);
+ public static final Version V_2_14_1 = new Version(2140199, org.apache.lucene.util.Version.LUCENE_9_10_0);
public static final Version V_2_15_0 = new Version(2150099, org.apache.lucene.util.Version.LUCENE_9_10_0);
- public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_11_0);
+ public static final Version V_2_15_1 = new Version(2150199, org.apache.lucene.util.Version.LUCENE_9_10_0);
+ public static final Version V_2_16_0 = new Version(2160099, org.apache.lucene.util.Version.LUCENE_9_11_1);
+ public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_12_0);
public static final Version CURRENT = V_3_0_0;
public static Version fromId(int id) {
diff --git a/server/src/main/java/org/opensearch/index/translog/BufferedChecksumStreamInput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/BufferedChecksumStreamInput.java
similarity index 96%
rename from server/src/main/java/org/opensearch/index/translog/BufferedChecksumStreamInput.java
rename to libs/core/src/main/java/org/opensearch/core/common/io/stream/BufferedChecksumStreamInput.java
index f75f27b7bcb91..41680961b36e9 100644
--- a/server/src/main/java/org/opensearch/index/translog/BufferedChecksumStreamInput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/BufferedChecksumStreamInput.java
@@ -30,12 +30,10 @@
* GitHub history for details.
*/
-package org.opensearch.index.translog;
+package org.opensearch.core.common.io.stream;
import org.apache.lucene.store.BufferedChecksum;
import org.apache.lucene.util.BitUtil;
-import org.opensearch.core.common.io.stream.FilterStreamInput;
-import org.opensearch.core.common.io.stream.StreamInput;
import java.io.EOFException;
import java.io.IOException;
diff --git a/server/src/main/java/org/opensearch/index/translog/BufferedChecksumStreamOutput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/BufferedChecksumStreamOutput.java
similarity index 96%
rename from server/src/main/java/org/opensearch/index/translog/BufferedChecksumStreamOutput.java
rename to libs/core/src/main/java/org/opensearch/core/common/io/stream/BufferedChecksumStreamOutput.java
index 9e96664c79cc5..422f956c0cd47 100644
--- a/server/src/main/java/org/opensearch/index/translog/BufferedChecksumStreamOutput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/BufferedChecksumStreamOutput.java
@@ -30,11 +30,10 @@
* GitHub history for details.
*/
-package org.opensearch.index.translog;
+package org.opensearch.core.common.io.stream;
import org.apache.lucene.store.BufferedChecksum;
import org.opensearch.common.annotation.PublicApi;
-import org.opensearch.core.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.zip.CRC32;
diff --git a/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java b/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
index af09a7aebba79..711f56c9f3e3b 100644
--- a/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
+++ b/libs/core/src/main/java/org/opensearch/core/compress/CompressorRegistry.java
@@ -78,6 +78,19 @@ public static Compressor compressor(final BytesReference bytes) {
return null;
}
+ /**
+ * @param bytes The bytes to check the compression for
+ * @return The detected compressor. If no compressor detected then return NoneCompressor.
+ */
+ public static Compressor compressorForWritable(final BytesReference bytes) {
+ for (Compressor compressor : registeredCompressors.values()) {
+ if (compressor.isCompressed(bytes) == true) {
+ return compressor;
+ }
+ }
+ return CompressorRegistry.none();
+ }
+
/** Decompress the provided {@link BytesReference}. */
public static BytesReference uncompress(BytesReference bytes) throws IOException {
Compressor compressor = compressor(bytes);
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
index a278b61894a65..e7b51c3389b52 100644
--- a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/ResourceUsageInfo.java
@@ -104,6 +104,10 @@ public long getTotalValue() {
return endValue.get() - startValue;
}
+ public long getStartValue() {
+ return startValue;
+ }
+
@Override
public String toString() {
return String.valueOf(getTotalValue());
diff --git a/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceInfo.java b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceInfo.java
new file mode 100644
index 0000000000000..373cdbfa7e9a1
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/core/tasks/resourcetracker/TaskResourceInfo.java
@@ -0,0 +1,225 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.core.tasks.resourcetracker;
+
+import org.opensearch.common.annotation.PublicApi;
+import org.opensearch.core.ParseField;
+import org.opensearch.core.common.Strings;
+import org.opensearch.core.common.io.stream.StreamInput;
+import org.opensearch.core.common.io.stream.StreamOutput;
+import org.opensearch.core.common.io.stream.Writeable;
+import org.opensearch.core.xcontent.ConstructingObjectParser;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.opensearch.core.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * Task resource usage information with minimal information about the task
+ *
+ * Writeable TaskResourceInfo objects are used to represent resource usage
+ * information of running tasks, which can be propagated to coordinator node
+ * to infer query-level resource usage
+ *
+ * @opensearch.api
+ */
+@PublicApi(since = "2.15.0")
+public class TaskResourceInfo implements Writeable, ToXContentObject {
+ private final String action;
+ private final long taskId;
+ private final long parentTaskId;
+ private final String nodeId;
+ private final TaskResourceUsage taskResourceUsage;
+
+ private static final ParseField ACTION = new ParseField("action");
+ private static final ParseField TASK_ID = new ParseField("taskId");
+ private static final ParseField PARENT_TASK_ID = new ParseField("parentTaskId");
+ private static final ParseField NODE_ID = new ParseField("nodeId");
+ private static final ParseField TASK_RESOURCE_USAGE = new ParseField("taskResourceUsage");
+
+ public TaskResourceInfo(
+ final String action,
+ final long taskId,
+ final long parentTaskId,
+ final String nodeId,
+ final TaskResourceUsage taskResourceUsage
+ ) {
+ this.action = action;
+ this.taskId = taskId;
+ this.parentTaskId = parentTaskId;
+ this.nodeId = nodeId;
+ this.taskResourceUsage = taskResourceUsage;
+ }
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "task_resource_info",
+ a -> new Builder().setAction((String) a[0])
+ .setTaskId((Long) a[1])
+ .setParentTaskId((Long) a[2])
+ .setNodeId((String) a[3])
+ .setTaskResourceUsage((TaskResourceUsage) a[4])
+ .build()
+ );
+
+ static {
+ PARSER.declareString(constructorArg(), ACTION);
+ PARSER.declareLong(constructorArg(), TASK_ID);
+ PARSER.declareLong(constructorArg(), PARENT_TASK_ID);
+ PARSER.declareString(constructorArg(), NODE_ID);
+ PARSER.declareObject(constructorArg(), TaskResourceUsage.PARSER, TASK_RESOURCE_USAGE);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(ACTION.getPreferredName(), this.action);
+ builder.field(TASK_ID.getPreferredName(), this.taskId);
+ builder.field(PARENT_TASK_ID.getPreferredName(), this.parentTaskId);
+ builder.field(NODE_ID.getPreferredName(), this.nodeId);
+ builder.startObject(TASK_RESOURCE_USAGE.getPreferredName());
+ this.taskResourceUsage.toXContent(builder, params);
+ builder.endObject();
+ builder.endObject();
+ return builder;
+ }
+
+ /**
+ * Builder for {@link TaskResourceInfo}
+ */
+ public static class Builder {
+ private TaskResourceUsage taskResourceUsage;
+ private String action;
+ private long taskId;
+ private long parentTaskId;
+ private String nodeId;
+
+ public Builder setTaskResourceUsage(final TaskResourceUsage taskResourceUsage) {
+ this.taskResourceUsage = taskResourceUsage;
+ return this;
+ }
+
+ public Builder setAction(final String action) {
+ this.action = action;
+ return this;
+ }
+
+ public Builder setTaskId(final long taskId) {
+ this.taskId = taskId;
+ return this;
+ }
+
+ public Builder setParentTaskId(final long parentTaskId) {
+ this.parentTaskId = parentTaskId;
+ return this;
+ }
+
+ public Builder setNodeId(final String nodeId) {
+ this.nodeId = nodeId;
+ return this;
+ }
+
+ public TaskResourceInfo build() {
+ return new TaskResourceInfo(action, taskId, parentTaskId, nodeId, taskResourceUsage);
+ }
+ }
+
+ /**
+ * Read task info from a stream.
+ *
+ * @param in StreamInput to read
+ * @return {@link TaskResourceInfo}
+ * @throws IOException IOException
+ */
+ public static TaskResourceInfo readFromStream(StreamInput in) throws IOException {
+ return new TaskResourceInfo.Builder().setAction(in.readString())
+ .setTaskId(in.readLong())
+ .setParentTaskId(in.readLong())
+ .setNodeId(in.readString())
+ .setTaskResourceUsage(TaskResourceUsage.readFromStream(in))
+ .build();
+ }
+
+ /**
+ * Get TaskResourceUsage
+ *
+ * @return taskResourceUsage
+ */
+ public TaskResourceUsage getTaskResourceUsage() {
+ return taskResourceUsage;
+ }
+
+ /**
+ * Get parent task id
+ *
+ * @return parent task id
+ */
+ public long getParentTaskId() {
+ return parentTaskId;
+ }
+
+ /**
+ * Get task id
+ * @return task id
+ */
+ public long getTaskId() {
+ return taskId;
+ }
+
+ /**
+ * Get node id
+ * @return node id
+ */
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ /**
+ * Get task action
+ * @return task action
+ */
+ public String getAction() {
+ return action;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(action);
+ out.writeLong(taskId);
+ out.writeLong(parentTaskId);
+ out.writeString(nodeId);
+ taskResourceUsage.writeTo(out);
+ }
+
+ @Override
+ public String toString() {
+ return Strings.toString(MediaTypeRegistry.JSON, this);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null || obj.getClass() != TaskResourceInfo.class) {
+ return false;
+ }
+ TaskResourceInfo other = (TaskResourceInfo) obj;
+ return action.equals(other.action)
+ && taskId == other.taskId
+ && parentTaskId == other.parentTaskId
+ && Objects.equals(nodeId, other.nodeId)
+ && taskResourceUsage.equals(other.taskResourceUsage);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(action, taskId, parentTaskId, nodeId, taskResourceUsage);
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java
index 976f353100c55..552945d085884 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java
@@ -157,6 +157,9 @@ public static XContentBuilder builder(XContent xContent, Set includes, S
/**
* Returns a string representation of the builder (only applicable for text based xcontent).
+ * Note: explicitly or implicitly (from debugger) calling toString() could cause XContentBuilder
+ * to close which is a side effect done by @see BytesReference#bytes().
+ * Trying to write more contents after toString() will cause NPE. Use it with caution.
*/
@Override
public String toString() {
diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/filtering/FilterPath.java b/libs/core/src/main/java/org/opensearch/core/xcontent/filtering/FilterPath.java
index 5389538a8c7dd..b8da9787165f8 100644
--- a/libs/core/src/main/java/org/opensearch/core/xcontent/filtering/FilterPath.java
+++ b/libs/core/src/main/java/org/opensearch/core/xcontent/filtering/FilterPath.java
@@ -46,7 +46,6 @@
public class FilterPath {
static final FilterPath EMPTY = new FilterPath();
-
private final String filter;
private final String segment;
private final FilterPath next;
@@ -99,32 +98,29 @@ public static FilterPath[] compile(Set filters) {
List paths = new ArrayList<>();
for (String filter : filters) {
- if (filter != null) {
+ if (filter != null && !filter.isEmpty()) {
filter = filter.trim();
if (filter.length() > 0) {
- paths.add(parse(filter, filter));
+ paths.add(parse(filter));
}
}
}
return paths.toArray(new FilterPath[0]);
}
- private static FilterPath parse(final String filter, final String segment) {
- int end = segment.length();
-
- for (int i = 0; i < end;) {
- char c = segment.charAt(i);
+ private static FilterPath parse(final String filter) {
+ // Split the filter into segments using a regex
+ // that avoids splitting escaped dots.
+ String[] segments = filter.split("(?= 0; i--) {
+ // Replace escaped dots with actual dots in the current segment.
+ String segment = segments[i].replaceAll("\\\\.", ".");
+ next = new FilterPath(filter, segment, next);
}
- return new FilterPath(filter, segment.replaceAll("\\\\.", "."), EMPTY);
+
+ return next;
}
@Override
diff --git a/libs/core/src/test/java/org/opensearch/core/xcontent/filtering/FilterPathTests.java b/libs/core/src/test/java/org/opensearch/core/xcontent/filtering/FilterPathTests.java
index 0c5a17b70a956..d3191609f6119 100644
--- a/libs/core/src/test/java/org/opensearch/core/xcontent/filtering/FilterPathTests.java
+++ b/libs/core/src/test/java/org/opensearch/core/xcontent/filtering/FilterPathTests.java
@@ -35,6 +35,7 @@
import org.opensearch.common.util.set.Sets;
import org.opensearch.test.OpenSearchTestCase;
+import java.util.HashSet;
import java.util.Set;
import static java.util.Collections.singleton;
@@ -369,4 +370,20 @@ public void testMultipleFilterPaths() {
assertThat(filterPath.getSegment(), is(emptyString()));
assertSame(filterPath, FilterPath.EMPTY);
}
+
+ public void testCompileWithEmptyString() {
+ Set filters = new HashSet<>();
+ filters.add("");
+ FilterPath[] filterPaths = FilterPath.compile(filters);
+ assertNotNull(filterPaths);
+ assertEquals(0, filterPaths.length);
+ }
+
+ public void testCompileWithNull() {
+ Set filters = new HashSet<>();
+ filters.add(null);
+ FilterPath[] filterPaths = FilterPath.compile(filters);
+ assertNotNull(filterPaths);
+ assertEquals(0, filterPaths.length);
+ }
}
diff --git a/libs/geo/src/main/java/org/opensearch/geometry/utils/WellKnownText.java b/libs/geo/src/main/java/org/opensearch/geometry/utils/WellKnownText.java
index ed1d63e6d4fef..8ad135b8bc1ca 100644
--- a/libs/geo/src/main/java/org/opensearch/geometry/utils/WellKnownText.java
+++ b/libs/geo/src/main/java/org/opensearch/geometry/utils/WellKnownText.java
@@ -49,8 +49,10 @@
import java.io.StreamTokenizer;
import java.io.StringReader;
import java.text.ParseException;
+import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.Deque;
import java.util.List;
import java.util.Locale;
@@ -67,6 +69,7 @@ public class WellKnownText {
public static final String RPAREN = ")";
public static final String COMMA = ",";
public static final String NAN = "NaN";
+ public static final int MAX_DEPTH_OF_GEO_COLLECTION = 1000;
private final String NUMBER = "";
private final String EOF = "END-OF-STREAM";
@@ -278,6 +281,16 @@ public Geometry fromWKT(String wkt) throws IOException, ParseException {
*/
private Geometry parseGeometry(StreamTokenizer stream) throws IOException, ParseException {
final String type = nextWord(stream).toLowerCase(Locale.ROOT);
+ switch (type) {
+ case "geometrycollection":
+ return parseGeometryCollection(stream);
+ default:
+ return parseSimpleGeometry(stream, type);
+ }
+ }
+
+ private Geometry parseSimpleGeometry(StreamTokenizer stream, String type) throws IOException, ParseException {
+ assert "geometrycollection".equals(type) == false;
switch (type) {
case "point":
return parsePoint(stream);
@@ -294,7 +307,7 @@ private Geometry parseGeometry(StreamTokenizer stream) throws IOException, Parse
case "bbox":
return parseBBox(stream);
case "geometrycollection":
- return parseGeometryCollection(stream);
+ throw new IllegalStateException("Unexpected type: geometrycollection");
case "circle": // Not part of the standard, but we need it for internal serialization
return parseCircle(stream);
}
@@ -305,12 +318,56 @@ private GeometryCollection parseGeometryCollection(StreamTokenizer str
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
return GeometryCollection.EMPTY;
}
- List shapes = new ArrayList<>();
- shapes.add(parseGeometry(stream));
- while (nextCloserOrComma(stream).equals(COMMA)) {
- shapes.add(parseGeometry(stream));
+
+ List topLevelShapes = new ArrayList<>();
+ Deque> deque = new ArrayDeque<>();
+ deque.push(topLevelShapes);
+ boolean isFirstIteration = true;
+ List currentLevelShapes = null;
+ while (!deque.isEmpty()) {
+ List previousShapes = deque.pop();
+ if (currentLevelShapes != null) {
+ previousShapes.add(new GeometryCollection<>(currentLevelShapes));
+ }
+ currentLevelShapes = previousShapes;
+
+ if (isFirstIteration == true) {
+ isFirstIteration = false;
+ } else {
+ if (nextCloserOrComma(stream).equals(COMMA) == false) {
+ // Done with current level, continue with parent level
+ continue;
+ }
+ }
+ while (true) {
+ final String type = nextWord(stream).toLowerCase(Locale.ROOT);
+ if (type.equals("geometrycollection")) {
+ if (nextEmptyOrOpen(stream).equals(EMPTY) == false) {
+ // GEOMETRYCOLLECTION() -> 1 depth, GEOMETRYCOLLECTION(GEOMETRYCOLLECTION()) -> 2 depth
+ // When parsing the top level geometry collection, the queue size is zero.
+ // When max depth is 1, we don't want to push any sub geometry collection in the queue.
+ // Therefore, we subtract 2 from max depth.
+ if (deque.size() >= MAX_DEPTH_OF_GEO_COLLECTION - 2) {
+ throw new IllegalArgumentException(
+ "a geometry collection with a depth greater than " + MAX_DEPTH_OF_GEO_COLLECTION + " is not supported"
+ );
+ }
+ deque.push(currentLevelShapes);
+ currentLevelShapes = new ArrayList<>();
+ continue;
+ }
+ currentLevelShapes.add(GeometryCollection.EMPTY);
+ } else {
+ currentLevelShapes.add(parseSimpleGeometry(stream, type));
+ }
+
+ if (nextCloserOrComma(stream).equals(COMMA) == false) {
+ break;
+ }
+ }
}
- return new GeometryCollection<>(shapes);
+
+ return new GeometryCollection<>(topLevelShapes);
}
private Point parsePoint(StreamTokenizer stream) throws IOException, ParseException {
diff --git a/libs/geo/src/test/java/org/opensearch/geometry/GeometryCollectionTests.java b/libs/geo/src/test/java/org/opensearch/geometry/GeometryCollectionTests.java
index 631b6456a77da..cd8bb8f585966 100644
--- a/libs/geo/src/test/java/org/opensearch/geometry/GeometryCollectionTests.java
+++ b/libs/geo/src/test/java/org/opensearch/geometry/GeometryCollectionTests.java
@@ -62,6 +62,11 @@ public void testBasicSerialization() throws IOException, ParseException {
assertEquals("GEOMETRYCOLLECTION EMPTY", wkt.toWKT(GeometryCollection.EMPTY));
assertEquals(GeometryCollection.EMPTY, wkt.fromWKT("GEOMETRYCOLLECTION EMPTY)"));
+
+ assertEquals(
+ new GeometryCollection(Arrays.asList(GeometryCollection.EMPTY)),
+ wkt.fromWKT("GEOMETRYCOLLECTION (GEOMETRYCOLLECTION EMPTY)")
+ );
}
@SuppressWarnings("ConstantConditions")
@@ -86,4 +91,29 @@ public void testInitValidation() {
new StandardValidator(true).validate(new GeometryCollection(Collections.singletonList(new Point(20, 10, 30))));
}
+
+ public void testDeeplyNestedGeometryCollection() throws IOException, ParseException {
+ WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
+ StringBuilder validGeometryCollectionHead = new StringBuilder("GEOMETRYCOLLECTION");
+ StringBuilder validGeometryCollectionTail = new StringBuilder(" EMPTY");
+ for (int i = 0; i < WellKnownText.MAX_DEPTH_OF_GEO_COLLECTION - 1; i++) {
+ validGeometryCollectionHead.append(" (GEOMETRYCOLLECTION");
+ validGeometryCollectionTail.append(")");
+ }
+ // Expect no exception
+ wkt.fromWKT(validGeometryCollectionHead.append(validGeometryCollectionTail).toString());
+
+ StringBuilder invalidGeometryCollectionHead = new StringBuilder("GEOMETRYCOLLECTION");
+ StringBuilder invalidGeometryCollectionTail = new StringBuilder(" EMPTY");
+ for (int i = 0; i < WellKnownText.MAX_DEPTH_OF_GEO_COLLECTION; i++) {
+ invalidGeometryCollectionHead.append(" (GEOMETRYCOLLECTION");
+ invalidGeometryCollectionTail.append(")");
+ }
+
+ IllegalArgumentException ex = expectThrows(
+ IllegalArgumentException.class,
+ () -> wkt.fromWKT(invalidGeometryCollectionHead.append(invalidGeometryCollectionTail).toString())
+ );
+ assertEquals("a geometry collection with a depth greater than 1000 is not supported", ex.getMessage());
+ }
}
diff --git a/libs/grok/src/main/java/org/opensearch/grok/Grok.java b/libs/grok/src/main/java/org/opensearch/grok/Grok.java
index 7aa3347ba4f4b..aa5b1a936b99d 100644
--- a/libs/grok/src/main/java/org/opensearch/grok/Grok.java
+++ b/libs/grok/src/main/java/org/opensearch/grok/Grok.java
@@ -37,14 +37,18 @@
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
+import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.Deque;
+import java.util.HashMap;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
-import java.util.Stack;
+import java.util.Set;
import java.util.function.Consumer;
import org.jcodings.specific.UTF8Encoding;
@@ -86,6 +90,7 @@ public final class Grok {
UTF8Encoding.INSTANCE,
Syntax.DEFAULT
);
+ private static final int MAX_PATTERN_DEPTH_SIZE = 500;
private static final int MAX_TO_REGEX_ITERATIONS = 100_000; // sanity limit
@@ -128,7 +133,7 @@ private Grok(
expressionBytes.length,
Option.DEFAULT,
UTF8Encoding.INSTANCE,
- message -> logCallBack.accept(message)
+ logCallBack::accept
);
List captureConfig = new ArrayList<>();
@@ -144,7 +149,7 @@ private Grok(
*/
private void validatePatternBank() {
for (String patternName : patternBank.keySet()) {
- validatePatternBank(patternName, new Stack<>());
+ validatePatternBank(patternName);
}
}
@@ -156,33 +161,84 @@ private void validatePatternBank() {
* a reference to another named pattern. This method will navigate to all these named patterns and
* check for a circular reference.
*/
- private void validatePatternBank(String patternName, Stack path) {
- String pattern = patternBank.get(patternName);
- boolean isSelfReference = pattern.contains("%{" + patternName + "}") || pattern.contains("%{" + patternName + ":");
- if (isSelfReference) {
- throwExceptionForCircularReference(patternName, pattern);
- } else if (path.contains(patternName)) {
- // current pattern name is already in the path, fetch its predecessor
- String prevPatternName = path.pop();
- String prevPattern = patternBank.get(prevPatternName);
- throwExceptionForCircularReference(prevPatternName, prevPattern, patternName, path);
- }
- path.push(patternName);
- for (int i = pattern.indexOf("%{"); i != -1; i = pattern.indexOf("%{", i + 1)) {
- int begin = i + 2;
- int syntaxEndIndex = pattern.indexOf('}', begin);
- if (syntaxEndIndex == -1) {
- throw new IllegalArgumentException("Malformed pattern [" + patternName + "][" + pattern + "]");
+ private void validatePatternBank(String initialPatternName) {
+ Deque stack = new ArrayDeque<>();
+ Set visitedPatterns = new HashSet<>();
+ Map> pathMap = new HashMap<>();
+
+ List initialPath = new ArrayList<>();
+ initialPath.add(initialPatternName);
+ pathMap.put(initialPatternName, initialPath);
+ stack.push(new Frame(initialPatternName, initialPath, 0));
+
+ while (!stack.isEmpty()) {
+ Frame frame = stack.peek();
+ String patternName = frame.patternName;
+ List path = frame.path;
+ int startIndex = frame.startIndex;
+ String pattern = patternBank.get(patternName);
+
+ if (visitedPatterns.contains(patternName)) {
+ stack.pop();
+ continue;
+ }
+
+ visitedPatterns.add(patternName);
+ boolean foundDependency = false;
+
+ for (int i = startIndex; i < pattern.length(); i++) {
+ if (pattern.startsWith("%{", i)) {
+ int begin = i + 2;
+ int syntaxEndIndex = pattern.indexOf('}', begin);
+ if (syntaxEndIndex == -1) {
+ throw new IllegalArgumentException("Malformed pattern [" + patternName + "][" + pattern + "]");
+ }
+
+ int semanticNameIndex = pattern.indexOf(':', begin);
+ int end = semanticNameIndex == -1 ? syntaxEndIndex : Math.min(syntaxEndIndex, semanticNameIndex);
+
+ String dependsOnPattern = pattern.substring(begin, end);
+
+ if (dependsOnPattern.equals(patternName)) {
+ throwExceptionForCircularReference(patternName, pattern);
+ }
+
+ if (pathMap.containsKey(dependsOnPattern)) {
+ throwExceptionForCircularReference(patternName, pattern, dependsOnPattern, path.subList(0, path.size() - 1));
+ }
+
+ List newPath = new ArrayList<>(path);
+ newPath.add(dependsOnPattern);
+ pathMap.put(dependsOnPattern, newPath);
+
+ stack.push(new Frame(dependsOnPattern, newPath, 0));
+ frame.startIndex = i + 1;
+ foundDependency = true;
+ break;
+ }
}
- int semanticNameIndex = pattern.indexOf(':', begin);
- int end = syntaxEndIndex;
- if (semanticNameIndex != -1) {
- end = Math.min(syntaxEndIndex, semanticNameIndex);
+
+ if (!foundDependency) {
+ pathMap.remove(patternName);
+ stack.pop();
+ }
+
+ if (stack.size() > MAX_PATTERN_DEPTH_SIZE) {
+ throw new IllegalArgumentException("Pattern references exceeded maximum depth of " + MAX_PATTERN_DEPTH_SIZE);
}
- String dependsOnPattern = pattern.substring(begin, end);
- validatePatternBank(dependsOnPattern, path);
}
- path.pop();
+ }
+
+ private static class Frame {
+ String patternName;
+ List path;
+ int startIndex;
+
+ Frame(String patternName, List path, int startIndex) {
+ this.patternName = patternName;
+ this.path = path;
+ this.startIndex = startIndex;
+ }
}
private static void throwExceptionForCircularReference(String patternName, String pattern) {
@@ -192,13 +248,13 @@ private static void throwExceptionForCircularReference(String patternName, Strin
private static void throwExceptionForCircularReference(
String patternName,
String pattern,
- String originPatterName,
- Stack path
+ String originPatternName,
+ List path
) {
StringBuilder message = new StringBuilder("circular reference in pattern [");
message.append(patternName).append("][").append(pattern).append("]");
- if (originPatterName != null) {
- message.append(" back to pattern [").append(originPatterName).append("]");
+ if (originPatternName != null) {
+ message.append(" back to pattern [").append(originPatternName).append("]");
}
if (path != null && path.size() > 1) {
message.append(" via patterns [").append(String.join("=>", path)).append("]");
@@ -217,9 +273,7 @@ private String groupMatch(String name, Region region, String pattern) {
int begin = region.getBeg(number);
int end = region.getEnd(number);
return new String(pattern.getBytes(StandardCharsets.UTF_8), begin, end - begin, StandardCharsets.UTF_8);
- } catch (StringIndexOutOfBoundsException e) {
- return null;
- } catch (ValueException e) {
+ } catch (StringIndexOutOfBoundsException | ValueException e) {
return null;
}
}
diff --git a/libs/grok/src/test/java/org/opensearch/grok/GrokTests.java b/libs/grok/src/test/java/org/opensearch/grok/GrokTests.java
index a37689e051c67..8476d541aa46e 100644
--- a/libs/grok/src/test/java/org/opensearch/grok/GrokTests.java
+++ b/libs/grok/src/test/java/org/opensearch/grok/GrokTests.java
@@ -377,6 +377,16 @@ public void testCircularReference() {
"circular reference in pattern [NAME5][!!!%{NAME1}!!!] back to pattern [NAME1] " + "via patterns [NAME1=>NAME2=>NAME3=>NAME4]",
e.getMessage()
);
+
+ e = expectThrows(IllegalArgumentException.class, () -> {
+ Map bank = new TreeMap<>();
+ for (int i = 1; i <= 501; i++) {
+ bank.put("NAME" + i, "!!!%{NAME" + (i + 1) + "}!!!");
+ }
+ String pattern = "%{NAME1}";
+ new Grok(bank, pattern, false, logger::warn);
+ });
+ assertEquals("Pattern references exceeded maximum depth of 500", e.getMessage());
}
public void testMalformedPattern() {
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
index c861c21f89fc5..bcf5c163cb91f 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
@@ -48,6 +48,11 @@ public Closeable createGauge(String name, String description, String unit, Suppl
return metricsTelemetry.createGauge(name, description, unit, valueProvider, tags);
}
+ @Override
+ public Closeable createGauge(String name, String description, String unit, Supplier value) {
+ return metricsTelemetry.createGauge(name, description, unit, value);
+ }
+
@Override
public void close() throws IOException {
metricsTelemetry.close();
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
index 3ab3dcf82c7a7..3dc212b1341cc 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
@@ -63,4 +63,16 @@ public interface MetricsRegistry extends Closeable {
*/
Closeable createGauge(String name, String description, String unit, Supplier valueProvider, Tags tags);
+ /**
+ * Creates the Observable Gauge type of Metric. Where the value provider will be called at a certain frequency
+ * to capture the value.
+ *
+ * @param name name of the observable gauge.
+ * @param description any description about the metric.
+ * @param unit unit of the metric.
+ * @param value value provider.
+ * @return closeable to dispose/close the Gauge metric.
+ */
+ Closeable createGauge(String name, String description, String unit, Supplier value);
+
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/TaggedMeasurement.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/TaggedMeasurement.java
new file mode 100644
index 0000000000000..707f2c79c62f2
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/TaggedMeasurement.java
@@ -0,0 +1,53 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * Observable Measurement for the Asynchronous instruments.
+ * @opensearch.experimental
+ */
+@ExperimentalApi
+public final class TaggedMeasurement {
+ private final Double value;
+ private final Tags tags;
+
+ /**
+ * Factory method to create the {@link TaggedMeasurement} object.
+ * @param value value.
+ * @param tags tags to be added per value.
+ * @return tagged measurement TaggedMeasurement
+ */
+ public static TaggedMeasurement create(double value, Tags tags) {
+ return new TaggedMeasurement(value, tags);
+ }
+
+ private TaggedMeasurement(double value, Tags tags) {
+ this.value = value;
+ this.tags = tags;
+ }
+
+ /**
+ * Returns the value.
+ * @return value
+ */
+ public Double getValue() {
+ return value;
+ }
+
+ /**
+ * Returns the tags.
+ * @return tags
+ */
+ public Tags getTags() {
+ return tags;
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
index 9a913d25e872d..7bec136c42ba7 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
@@ -12,6 +12,7 @@
import org.opensearch.telemetry.metrics.Counter;
import org.opensearch.telemetry.metrics.Histogram;
import org.opensearch.telemetry.metrics.MetricsRegistry;
+import org.opensearch.telemetry.metrics.TaggedMeasurement;
import org.opensearch.telemetry.metrics.tags.Tags;
import java.io.Closeable;
@@ -52,6 +53,11 @@ public Closeable createGauge(String name, String description, String unit, Suppl
return () -> {};
}
+ @Override
+ public Closeable createGauge(String name, String description, String unit, Supplier value) {
+ return () -> {};
+ }
+
@Override
public void close() throws IOException {
diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
index 872f697ade09e..e1506eecff6e9 100644
--- a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
+++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
@@ -79,4 +79,19 @@ public void testGauge() {
assertSame(mockCloseable, closeable);
}
+ @SuppressWarnings("unchecked")
+ public void testGaugeWithValueAndTagSupplier() {
+ Closeable mockCloseable = mock(Closeable.class);
+ when(defaultMeterRegistry.createGauge(any(String.class), any(String.class), any(String.class), any(Supplier.class))).thenReturn(
+ mockCloseable
+ );
+ Closeable closeable = defaultMeterRegistry.createGauge(
+ "org.opensearch.telemetry.metrics.DefaultMeterRegistryTests.testObservableGauge",
+ "test observable gauge",
+ "ms",
+ () -> TaggedMeasurement.create(1.0, Tags.EMPTY)
+ );
+ assertSame(mockCloseable, closeable);
+ }
+
}
diff --git a/libs/x-content/licenses/jackson-core-2.17.0.jar.sha1 b/libs/x-content/licenses/jackson-core-2.17.0.jar.sha1
deleted file mode 100644
index 9b906dbda1656..0000000000000
--- a/libs/x-content/licenses/jackson-core-2.17.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a6e5058ef9720623c517252d17162f845306ff3a
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-core-2.17.2.jar.sha1 b/libs/x-content/licenses/jackson-core-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..e15f2340980bc
--- /dev/null
+++ b/libs/x-content/licenses/jackson-core-2.17.2.jar.sha1
@@ -0,0 +1 @@
+969a35cb35c86512acbadcdbbbfb044c877db814
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.17.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.17.0.jar.sha1
deleted file mode 100644
index 382e20d3d31c1..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-cbor-2.17.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6833c8573452d583e4af650a7424d547606b2501
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.17.2.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..069e088413ef1
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.17.2.jar.sha1
@@ -0,0 +1 @@
+57fa7c1b5104bbc4599278d13933a937ee058e68
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.17.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.17.0.jar.sha1
deleted file mode 100644
index d117479166d17..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-smile-2.17.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f10183857607fde789490d33ea46372a2d2b0c72
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.17.2.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..28d8c8382aed3
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-smile-2.17.2.jar.sha1
@@ -0,0 +1 @@
+20e956b9b6f67138edd39fab7a506ded19638bcb
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.17.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.17.0.jar.sha1
deleted file mode 100644
index 35242eed9b212..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-yaml-2.17.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-57a963c6258c49febc11390082d8503f71bb15a9
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.17.2.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..f3e25b7eb253c
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.17.2.jar.sha1
@@ -0,0 +1 @@
+78d2c73dbec62044d7cf3b544b2e0d24a1a093b0
\ No newline at end of file
diff --git a/modules/cache-common/src/internalClusterTest/java/org.opensearch.cache.common.tier/TieredSpilloverCacheIT.java b/modules/cache-common/src/internalClusterTest/java/org/opensearch/cache/common/tier/TieredSpilloverCacheIT.java
similarity index 99%
rename from modules/cache-common/src/internalClusterTest/java/org.opensearch.cache.common.tier/TieredSpilloverCacheIT.java
rename to modules/cache-common/src/internalClusterTest/java/org/opensearch/cache/common/tier/TieredSpilloverCacheIT.java
index bfc184cff0566..02be0990eb136 100644
--- a/modules/cache-common/src/internalClusterTest/java/org.opensearch.cache.common.tier/TieredSpilloverCacheIT.java
+++ b/modules/cache-common/src/internalClusterTest/java/org/opensearch/cache/common/tier/TieredSpilloverCacheIT.java
@@ -65,7 +65,7 @@ protected Collection> nodePlugins() {
return Arrays.asList(TieredSpilloverCachePlugin.class, MockDiskCachePlugin.class);
}
- private Settings defaultSettings(String onHeapCacheSizeInBytesOrPecentage) {
+ static Settings defaultSettings(String onHeapCacheSizeInBytesOrPercentage) {
return Settings.builder()
.put(FeatureFlags.PLUGGABLE_CACHE, "true")
.put(
@@ -88,7 +88,7 @@ private Settings defaultSettings(String onHeapCacheSizeInBytesOrPecentage) {
OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
.get(MAXIMUM_SIZE_IN_BYTES_KEY)
.getKey(),
- onHeapCacheSizeInBytesOrPecentage
+ onHeapCacheSizeInBytesOrPercentage
)
.build();
}
diff --git a/modules/cache-common/src/internalClusterTest/java/org/opensearch/cache/common/tier/TieredSpilloverCacheStatsIT.java b/modules/cache-common/src/internalClusterTest/java/org/opensearch/cache/common/tier/TieredSpilloverCacheStatsIT.java
new file mode 100644
index 0000000000000..783b6083e9226
--- /dev/null
+++ b/modules/cache-common/src/internalClusterTest/java/org/opensearch/cache/common/tier/TieredSpilloverCacheStatsIT.java
@@ -0,0 +1,507 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.cache.common.tier;
+
+import org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest;
+import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse;
+import org.opensearch.action.admin.indices.forcemerge.ForceMergeResponse;
+import org.opensearch.action.admin.indices.stats.CommonStatsFlags;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.client.Client;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.common.cache.CacheType;
+import org.opensearch.common.cache.service.NodeCacheStats;
+import org.opensearch.common.cache.stats.ImmutableCacheStats;
+import org.opensearch.common.cache.stats.ImmutableCacheStatsHolder;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.unit.TimeValue;
+import org.opensearch.index.IndexSettings;
+import org.opensearch.index.cache.request.RequestCacheStats;
+import org.opensearch.index.query.QueryBuilders;
+import org.opensearch.indices.IndicesRequestCache;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.test.OpenSearchIntegTestCase;
+import org.opensearch.test.hamcrest.OpenSearchAssertions;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import static org.opensearch.cache.common.tier.TieredSpilloverCacheStatsHolder.TIER_DIMENSION_NAME;
+import static org.opensearch.cache.common.tier.TieredSpilloverCacheStatsHolder.TIER_DIMENSION_VALUE_DISK;
+import static org.opensearch.cache.common.tier.TieredSpilloverCacheStatsHolder.TIER_DIMENSION_VALUE_ON_HEAP;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
+import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse;
+
+// Use a single data node to simplify accessing cache stats across different shards.
+@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0)
+public class TieredSpilloverCacheStatsIT extends OpenSearchIntegTestCase {
+ @Override
+ protected Collection> nodePlugins() {
+ return Arrays.asList(TieredSpilloverCachePlugin.class, TieredSpilloverCacheIT.MockDiskCachePlugin.class);
+ }
+
+ private final String HEAP_CACHE_SIZE_STRING = "10000B";
+ private final int HEAP_CACHE_SIZE = 10_000;
+ private final String index1Name = "index1";
+ private final String index2Name = "index2";
+
+ /**
+ * Test aggregating by indices
+ */
+ public void testIndicesLevelAggregation() throws Exception {
+ internalCluster().startNodes(
+ 1,
+ Settings.builder()
+ .put(TieredSpilloverCacheIT.defaultSettings(HEAP_CACHE_SIZE_STRING))
+ .put(
+ TieredSpilloverCacheSettings.TOOK_TIME_POLICY_CONCRETE_SETTINGS_MAP.get(CacheType.INDICES_REQUEST_CACHE).getKey(),
+ new TimeValue(0, TimeUnit.SECONDS)
+ )
+ .build()
+ );
+ Client client = client();
+ Map values = setupCacheForAggregationTests(client);
+
+ ImmutableCacheStatsHolder allLevelsStatsHolder = getNodeCacheStatsResult(
+ client,
+ List.of(IndicesRequestCache.INDEX_DIMENSION_NAME, TIER_DIMENSION_NAME)
+ );
+ ImmutableCacheStatsHolder indicesOnlyStatsHolder = getNodeCacheStatsResult(
+ client,
+ List.of(IndicesRequestCache.INDEX_DIMENSION_NAME)
+ );
+
+ // Get values for indices alone, assert these match for statsHolders that have additional dimensions vs. a statsHolder that only has
+ // the indices dimension
+ ImmutableCacheStats index1ExpectedStats = returnNullIfAllZero(
+ new ImmutableCacheStats(
+ values.get("hitsOnHeapIndex1") + values.get("hitsOnDiskIndex1"),
+ values.get("itemsOnDiskIndex1AfterTest") + values.get("itemsOnHeapIndex1AfterTest"),
+ 0,
+ (values.get("itemsOnDiskIndex1AfterTest") + values.get("itemsOnHeapIndex1AfterTest")) * values.get("singleSearchSize"),
+ values.get("itemsOnDiskIndex1AfterTest") + values.get("itemsOnHeapIndex1AfterTest")
+ )
+ );
+ ImmutableCacheStats index2ExpectedStats = returnNullIfAllZero(
+ new ImmutableCacheStats(
+ values.get("hitsOnHeapIndex2") + values.get("hitsOnDiskIndex2"),
+ values.get("itemsOnDiskIndex2AfterTest") + values.get("itemsOnHeapIndex2AfterTest"),
+ 0,
+ (values.get("itemsOnDiskIndex2AfterTest") + values.get("itemsOnHeapIndex2AfterTest")) * values.get("singleSearchSize"),
+ values.get("itemsOnDiskIndex2AfterTest") + values.get("itemsOnHeapIndex2AfterTest")
+ )
+ );
+
+ for (ImmutableCacheStatsHolder statsHolder : List.of(allLevelsStatsHolder, indicesOnlyStatsHolder)) {
+ assertEquals(index1ExpectedStats, statsHolder.getStatsForDimensionValues(List.of(index1Name)));
+ assertEquals(index2ExpectedStats, statsHolder.getStatsForDimensionValues(List.of(index2Name)));
+ }
+ }
+
+ /**
+ * Test aggregating by indices and tier
+ */
+ public void testIndicesAndTierLevelAggregation() throws Exception {
+ internalCluster().startNodes(
+ 1,
+ Settings.builder()
+ .put(TieredSpilloverCacheIT.defaultSettings(HEAP_CACHE_SIZE_STRING))
+ .put(
+ TieredSpilloverCacheSettings.TOOK_TIME_POLICY_CONCRETE_SETTINGS_MAP.get(CacheType.INDICES_REQUEST_CACHE).getKey(),
+ new TimeValue(0, TimeUnit.SECONDS)
+ )
+ .build()
+ );
+ Client client = client();
+ Map values = setupCacheForAggregationTests(client);
+
+ ImmutableCacheStatsHolder allLevelsStatsHolder = getNodeCacheStatsResult(
+ client,
+ List.of(IndicesRequestCache.INDEX_DIMENSION_NAME, TIER_DIMENSION_NAME)
+ );
+
+ // Get values broken down by indices+tiers
+ ImmutableCacheStats index1HeapExpectedStats = returnNullIfAllZero(
+ new ImmutableCacheStats(
+ values.get("hitsOnHeapIndex1"),
+ values.get("itemsOnHeapIndex1AfterTest") + values.get("itemsOnDiskIndex1AfterTest") + values.get("hitsOnDiskIndex1"),
+ values.get("itemsOnDiskIndex1AfterTest"),
+ values.get("itemsOnHeapIndex1AfterTest") * values.get("singleSearchSize"),
+ values.get("itemsOnHeapIndex1AfterTest")
+ )
+ );
+ assertEquals(
+ index1HeapExpectedStats,
+ allLevelsStatsHolder.getStatsForDimensionValues(List.of(index1Name, TIER_DIMENSION_VALUE_ON_HEAP))
+ );
+
+ ImmutableCacheStats index2HeapExpectedStats = returnNullIfAllZero(
+ new ImmutableCacheStats(
+ values.get("hitsOnHeapIndex2"),
+ values.get("itemsOnHeapIndex2AfterTest") + values.get("itemsOnDiskIndex2AfterTest") + values.get("hitsOnDiskIndex2"),
+ values.get("itemsOnDiskIndex2AfterTest"),
+ values.get("itemsOnHeapIndex2AfterTest") * values.get("singleSearchSize"),
+ values.get("itemsOnHeapIndex2AfterTest")
+ )
+ );
+ assertEquals(
+ index2HeapExpectedStats,
+ allLevelsStatsHolder.getStatsForDimensionValues(List.of(index2Name, TIER_DIMENSION_VALUE_ON_HEAP))
+ );
+
+ ImmutableCacheStats index1DiskExpectedStats = returnNullIfAllZero(
+ new ImmutableCacheStats(
+ values.get("hitsOnDiskIndex1"),
+ values.get("itemsOnHeapIndex1AfterTest") + values.get("itemsOnDiskIndex1AfterTest"),
+ 0,
+ values.get("itemsOnDiskIndex1AfterTest") * values.get("singleSearchSize"),
+ values.get("itemsOnDiskIndex1AfterTest")
+ )
+ );
+ assertEquals(
+ index1DiskExpectedStats,
+ allLevelsStatsHolder.getStatsForDimensionValues(List.of(index1Name, TIER_DIMENSION_VALUE_DISK))
+ );
+
+ ImmutableCacheStats index2DiskExpectedStats = returnNullIfAllZero(
+ new ImmutableCacheStats(
+ values.get("hitsOnDiskIndex2"),
+ values.get("itemsOnHeapIndex2AfterTest") + values.get("itemsOnDiskIndex2AfterTest"),
+ 0,
+ values.get("itemsOnDiskIndex2AfterTest") * values.get("singleSearchSize"),
+ values.get("itemsOnDiskIndex2AfterTest")
+ )
+ );
+ assertEquals(
+ index2DiskExpectedStats,
+ allLevelsStatsHolder.getStatsForDimensionValues(List.of(index2Name, TIER_DIMENSION_VALUE_DISK))
+ );
+ }
+
+ /**
+ * Test aggregating by tier only
+ */
+ public void testTierLevelAggregation() throws Exception {
+ internalCluster().startNodes(
+ 1,
+ Settings.builder()
+ .put(TieredSpilloverCacheIT.defaultSettings(HEAP_CACHE_SIZE_STRING))
+ .put(
+ TieredSpilloverCacheSettings.TOOK_TIME_POLICY_CONCRETE_SETTINGS_MAP.get(CacheType.INDICES_REQUEST_CACHE).getKey(),
+ new TimeValue(0, TimeUnit.SECONDS)
+ )
+ .build()
+ );
+ Client client = client();
+ Map values = setupCacheForAggregationTests(client);
+
+ // Get values for tiers alone and check they add correctly across indices
+ ImmutableCacheStatsHolder tiersOnlyStatsHolder = getNodeCacheStatsResult(client, List.of(TIER_DIMENSION_NAME));
+ ImmutableCacheStats totalHeapExpectedStats = returnNullIfAllZero(
+ new ImmutableCacheStats(
+ values.get("hitsOnHeapIndex1") + values.get("hitsOnHeapIndex2"),
+ values.get("itemsOnHeapAfterTest") + values.get("itemsOnDiskAfterTest") + values.get("hitsOnDiskIndex1") + values.get(
+ "hitsOnDiskIndex2"
+ ),
+ values.get("itemsOnDiskAfterTest"),
+ values.get("itemsOnHeapAfterTest") * values.get("singleSearchSize"),
+ values.get("itemsOnHeapAfterTest")
+ )
+ );
+ ImmutableCacheStats heapStats = tiersOnlyStatsHolder.getStatsForDimensionValues(List.of(TIER_DIMENSION_VALUE_ON_HEAP));
+ assertEquals(totalHeapExpectedStats, heapStats);
+ ImmutableCacheStats totalDiskExpectedStats = returnNullIfAllZero(
+ new ImmutableCacheStats(
+ values.get("hitsOnDiskIndex1") + values.get("hitsOnDiskIndex2"),
+ values.get("itemsOnHeapAfterTest") + values.get("itemsOnDiskAfterTest"),
+ 0,
+ values.get("itemsOnDiskAfterTest") * values.get("singleSearchSize"),
+ values.get("itemsOnDiskAfterTest")
+ )
+ );
+ ImmutableCacheStats diskStats = tiersOnlyStatsHolder.getStatsForDimensionValues(List.of(TIER_DIMENSION_VALUE_DISK));
+ assertEquals(totalDiskExpectedStats, diskStats);
+ }
+
+ public void testInvalidLevelsAreIgnored() throws Exception {
+ internalCluster().startNodes(
+ 1,
+ Settings.builder()
+ .put(TieredSpilloverCacheIT.defaultSettings(HEAP_CACHE_SIZE_STRING))
+ .put(
+ TieredSpilloverCacheSettings.TOOK_TIME_POLICY_CONCRETE_SETTINGS_MAP.get(CacheType.INDICES_REQUEST_CACHE).getKey(),
+ new TimeValue(0, TimeUnit.SECONDS)
+ )
+ .build()
+ );
+ Client client = client();
+ Map values = setupCacheForAggregationTests(client);
+
+ ImmutableCacheStatsHolder allLevelsStatsHolder = getNodeCacheStatsResult(
+ client,
+ List.of(IndicesRequestCache.INDEX_DIMENSION_NAME, TIER_DIMENSION_NAME)
+ );
+ ImmutableCacheStatsHolder indicesOnlyStatsHolder = getNodeCacheStatsResult(
+ client,
+ List.of(IndicesRequestCache.INDEX_DIMENSION_NAME)
+ );
+
+ // Test invalid levels are ignored and permuting the order of levels in the request doesn't matter
+
+ // This should be equivalent to just "indices"
+ ImmutableCacheStatsHolder indicesEquivalentStatsHolder = getNodeCacheStatsResult(
+ client,
+ List.of(IndicesRequestCache.INDEX_DIMENSION_NAME, "unrecognized_dimension")
+ );
+ assertEquals(indicesOnlyStatsHolder, indicesEquivalentStatsHolder);
+
+ // This should be equivalent to "indices", "tier"
+ ImmutableCacheStatsHolder indicesAndTierEquivalentStatsHolder = getNodeCacheStatsResult(
+ client,
+ List.of(TIER_DIMENSION_NAME, "unrecognized_dimension_1", IndicesRequestCache.INDEX_DIMENSION_NAME, "unrecognized_dimension_2")
+ );
+ assertEquals(allLevelsStatsHolder, indicesAndTierEquivalentStatsHolder);
+
+ // This should be equivalent to no levels passed in
+ ImmutableCacheStatsHolder noLevelsEquivalentStatsHolder = getNodeCacheStatsResult(
+ client,
+ List.of("unrecognized_dimension_1", "unrecognized_dimension_2")
+ );
+ ImmutableCacheStatsHolder noLevelsStatsHolder = getNodeCacheStatsResult(client, List.of());
+ assertEquals(noLevelsStatsHolder, noLevelsEquivalentStatsHolder);
+ }
+
+ /**
+ * Check the new stats API returns the same values as the old stats API.
+ */
+ public void testStatsMatchOldApi() throws Exception {
+ internalCluster().startNodes(
+ 1,
+ Settings.builder()
+ .put(TieredSpilloverCacheIT.defaultSettings(HEAP_CACHE_SIZE_STRING))
+ .put(
+ TieredSpilloverCacheSettings.TOOK_TIME_POLICY_CONCRETE_SETTINGS_MAP.get(CacheType.INDICES_REQUEST_CACHE).getKey(),
+ new TimeValue(0, TimeUnit.SECONDS)
+ )
+ .build()
+ );
+ String index = "index";
+ Client client = client();
+ startIndex(client, index);
+
+ // First search one time to see how big a single value will be
+ searchIndex(client, index, 0);
+ // get total stats
+ long singleSearchSize = getTotalStats(client).getSizeInBytes();
+ // Select numbers so we get some values on both heap and disk
+ int itemsOnHeap = HEAP_CACHE_SIZE / (int) singleSearchSize;
+ int itemsOnDisk = 1 + randomInt(30); // The first one we search (to get the size) always goes to disk
+ int expectedEntries = itemsOnHeap + itemsOnDisk;
+
+ for (int i = 1; i < expectedEntries; i++) {
+ // Cause misses
+ searchIndex(client, index, i);
+ }
+ int expectedMisses = itemsOnHeap + itemsOnDisk;
+
+ // Cause some hits
+ int expectedHits = randomIntBetween(itemsOnHeap, expectedEntries); // Select it so some hits come from both tiers
+ for (int i = 0; i < expectedHits; i++) {
+ searchIndex(client, index, i);
+ }
+
+ ImmutableCacheStats totalStats = getNodeCacheStatsResult(client, List.of()).getTotalStats();
+
+ // Check the new stats API values are as expected
+ assertEquals(
+ new ImmutableCacheStats(expectedHits, expectedMisses, 0, expectedEntries * singleSearchSize, expectedEntries),
+ totalStats
+ );
+ // Now check the new stats API values for the cache as a whole match the old stats API values
+ RequestCacheStats oldAPIStats = client.admin()
+ .indices()
+ .prepareStats(index)
+ .setRequestCache(true)
+ .get()
+ .getTotal()
+ .getRequestCache();
+ assertEquals(oldAPIStats.getHitCount(), totalStats.getHits());
+ assertEquals(oldAPIStats.getMissCount(), totalStats.getMisses());
+ assertEquals(oldAPIStats.getEvictions(), totalStats.getEvictions());
+ assertEquals(oldAPIStats.getMemorySizeInBytes(), totalStats.getSizeInBytes());
+ }
+
+ private void startIndex(Client client, String indexName) throws InterruptedException {
+ assertAcked(
+ client.admin()
+ .indices()
+ .prepareCreate(indexName)
+ .setMapping("k", "type=keyword")
+ .setSettings(
+ Settings.builder()
+ .put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
+ // Disable index refreshing to avoid cache being invalidated mid-test
+ .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(-1))
+ .build()
+ )
+ .get()
+ );
+ indexRandom(true, client.prepareIndex(indexName).setSource("k", "hello"));
+ // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache
+ ForceMergeResponse forceMergeResponse = client.admin().indices().prepareForceMerge(indexName).setFlush(true).get();
+ ensureSearchable(indexName);
+ }
+
+ private Map setupCacheForAggregationTests(Client client) throws Exception {
+ startIndex(client, index1Name);
+ startIndex(client, index2Name);
+
+ // First search one time to see how big a single value will be
+ searchIndex(client, index1Name, 0);
+ // get total stats
+ long singleSearchSize = getTotalStats(client).getSizeInBytes();
+ int itemsOnHeapAfterTest = HEAP_CACHE_SIZE / (int) singleSearchSize; // As the heap tier evicts, the items on it after the test will
+ // be the same as its max capacity
+ int itemsOnDiskAfterTest = 1 + randomInt(30); // The first one we search (to get the size) always goes to disk
+
+ // Put some values on heap and disk for each index
+ int itemsOnHeapIndex1AfterTest = randomInt(itemsOnHeapAfterTest);
+ int itemsOnHeapIndex2AfterTest = itemsOnHeapAfterTest - itemsOnHeapIndex1AfterTest;
+ int itemsOnDiskIndex1AfterTest = 1 + randomInt(itemsOnDiskAfterTest - 1);
+ // The first one we search (to get the size) always goes to disk
+ int itemsOnDiskIndex2AfterTest = itemsOnDiskAfterTest - itemsOnDiskIndex1AfterTest;
+ int hitsOnHeapIndex1 = randomInt(itemsOnHeapIndex1AfterTest);
+ int hitsOnDiskIndex1 = randomInt(itemsOnDiskIndex1AfterTest);
+ int hitsOnHeapIndex2 = randomInt(itemsOnHeapIndex2AfterTest);
+ int hitsOnDiskIndex2 = randomInt(itemsOnDiskIndex2AfterTest);
+
+ // Put these values into a map so tests can know what to expect in stats responses
+ Map expectedValues = new HashMap<>();
+ expectedValues.put("itemsOnHeapIndex1AfterTest", itemsOnHeapIndex1AfterTest);
+ expectedValues.put("itemsOnHeapIndex2AfterTest", itemsOnHeapIndex2AfterTest);
+ expectedValues.put("itemsOnDiskIndex1AfterTest", itemsOnDiskIndex1AfterTest);
+ expectedValues.put("itemsOnDiskIndex2AfterTest", itemsOnDiskIndex2AfterTest);
+ expectedValues.put("hitsOnHeapIndex1", hitsOnHeapIndex1);
+ expectedValues.put("hitsOnDiskIndex1", hitsOnDiskIndex1);
+ expectedValues.put("hitsOnHeapIndex2", hitsOnHeapIndex2);
+ expectedValues.put("hitsOnDiskIndex2", hitsOnDiskIndex2);
+ expectedValues.put("singleSearchSize", (int) singleSearchSize);
+ expectedValues.put("itemsOnDiskAfterTest", itemsOnDiskAfterTest);
+ expectedValues.put("itemsOnHeapAfterTest", itemsOnHeapAfterTest); // Can only pass 10 keys in Map.of() constructor
+
+ // The earliest items (0 - itemsOnDiskAfterTest) are the ones which get evicted to disk
+ for (int i = 1; i < itemsOnDiskIndex1AfterTest; i++) { // Start at 1 as 0 has already been searched
+ searchIndex(client, index1Name, i);
+ }
+ for (int i = itemsOnDiskIndex1AfterTest; i < itemsOnDiskIndex1AfterTest + itemsOnDiskIndex2AfterTest; i++) {
+ searchIndex(client, index2Name, i);
+ }
+ // The remaining items stay on heap
+ for (int i = itemsOnDiskAfterTest; i < itemsOnDiskAfterTest + itemsOnHeapIndex1AfterTest; i++) {
+ searchIndex(client, index1Name, i);
+ }
+ for (int i = itemsOnDiskAfterTest + itemsOnHeapIndex1AfterTest; i < itemsOnDiskAfterTest + itemsOnHeapAfterTest; i++) {
+ searchIndex(client, index2Name, i);
+ }
+
+ // Get some hits on all combinations of indices and tiers
+ for (int i = itemsOnDiskAfterTest; i < itemsOnDiskAfterTest + hitsOnHeapIndex1; i++) {
+ // heap hits for index 1
+ searchIndex(client, index1Name, i);
+ }
+ for (int i = itemsOnDiskAfterTest + itemsOnHeapIndex1AfterTest; i < itemsOnDiskAfterTest + itemsOnHeapIndex1AfterTest
+ + hitsOnHeapIndex2; i++) {
+ // heap hits for index 2
+ searchIndex(client, index2Name, i);
+ }
+ for (int i = 0; i < hitsOnDiskIndex1; i++) {
+ // disk hits for index 1
+ searchIndex(client, index1Name, i);
+ }
+ for (int i = itemsOnDiskIndex1AfterTest; i < itemsOnDiskIndex1AfterTest + hitsOnDiskIndex2; i++) {
+ // disk hits for index 2
+ searchIndex(client, index2Name, i);
+ }
+ return expectedValues;
+ }
+
+ private ImmutableCacheStats returnNullIfAllZero(ImmutableCacheStats expectedStats) {
+ // If the randomly chosen numbers are such that the expected stats would be 0, we actually have not interacted with the cache for
+ // this index.
+ // In this case, we expect the stats holder to have no stats for this node, and therefore we should get null from
+ // statsHolder.getStatsForDimensionValues().
+ // We will not see it in the XContent response.
+ if (expectedStats.equals(new ImmutableCacheStats(0, 0, 0, 0, 0))) {
+ return null;
+ }
+ return expectedStats;
+ }
+
+ // Duplicated from CacheStatsAPIIndicesRequestCacheIT.java, as we can't add a dependency on server.internalClusterTest
+
+ private SearchResponse searchIndex(Client client, String index, int searchSuffix) {
+ SearchResponse resp = client.prepareSearch(index)
+ .setRequestCache(true)
+ .setQuery(QueryBuilders.termQuery("k", "hello" + padWithZeros(4, searchSuffix)))
+ // pad with zeros so request 0 and request 10 have the same size ("0000" and "0010" instead of "0" and "10")
+ .get();
+ assertSearchResponse(resp);
+ OpenSearchAssertions.assertAllSuccessful(resp);
+ return resp;
+ }
+
+ private String padWithZeros(int finalLength, int inputValue) {
+ // Avoid forbidden API String.format()
+ String input = String.valueOf(inputValue);
+ if (input.length() >= finalLength) {
+ return input;
+ }
+ StringBuilder sb = new StringBuilder();
+ while (sb.length() < finalLength - input.length()) {
+ sb.append('0');
+ }
+ sb.append(input);
+ return sb.toString();
+ }
+
+ private ImmutableCacheStats getTotalStats(Client client) throws IOException {
+ ImmutableCacheStatsHolder statsHolder = getNodeCacheStatsResult(client, List.of());
+ return statsHolder.getStatsForDimensionValues(List.of());
+ }
+
+ private static ImmutableCacheStatsHolder getNodeCacheStatsResult(Client client, List aggregationLevels) throws IOException {
+ CommonStatsFlags statsFlags = new CommonStatsFlags();
+ statsFlags.includeAllCacheTypes();
+ String[] flagsLevels;
+ if (aggregationLevels == null) {
+ flagsLevels = null;
+ } else {
+ flagsLevels = aggregationLevels.toArray(new String[0]);
+ }
+ statsFlags.setLevels(flagsLevels);
+
+ NodesStatsResponse nodeStatsResponse = client.admin()
+ .cluster()
+ .prepareNodesStats("data:true")
+ .addMetric(NodesStatsRequest.Metric.CACHE_STATS.metricName())
+ .setIndices(statsFlags)
+ .get();
+ // Can always get the first data node as there's only one in this test suite
+ assertEquals(1, nodeStatsResponse.getNodes().size());
+ NodeCacheStats ncs = nodeStatsResponse.getNodes().get(0).getNodeCacheStats();
+ return ncs.getStatsByCache(CacheType.INDICES_REQUEST_CACHE);
+ }
+}
diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java
index 9942651ccdd67..f69c56808b2a1 100644
--- a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java
+++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java
@@ -8,6 +8,8 @@
package org.opensearch.cache.common.tier;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.opensearch.cache.common.policy.TookTimePolicy;
import org.opensearch.common.annotation.ExperimentalApi;
import org.opensearch.common.cache.CacheType;
@@ -35,9 +37,13 @@
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.ToLongBiFunction;
@@ -61,6 +67,7 @@ public class TieredSpilloverCache implements ICache {
// Used to avoid caching stale entries in lower tiers.
private static final List SPILLOVER_REMOVAL_REASONS = List.of(RemovalReason.EVICTED, RemovalReason.CAPACITY);
+ private static final Logger logger = LogManager.getLogger(TieredSpilloverCache.class);
private final ICache diskCache;
private final ICache onHeapCache;
@@ -86,6 +93,12 @@ public class TieredSpilloverCache implements ICache {
private final Map, TierInfo> caches;
private final List> policies;
+ /**
+ * This map is used to handle concurrent requests for same key in computeIfAbsent() to ensure we load the value
+ * only once.
+ */
+ Map, CompletableFuture, V>>> completableFutureMap = new ConcurrentHashMap<>();
+
TieredSpilloverCache(Builder builder) {
Objects.requireNonNull(builder.onHeapCacheFactory, "onHeap cache builder can't be null");
Objects.requireNonNull(builder.diskCacheFactory, "disk cache builder can't be null");
@@ -119,6 +132,8 @@ public class TieredSpilloverCache implements ICache {
.setValueType(builder.cacheConfig.getValueType())
.setSettings(builder.cacheConfig.getSettings())
.setWeigher(builder.cacheConfig.getWeigher())
+ .setKeySerializer(builder.cacheConfig.getKeySerializer())
+ .setValueSerializer(builder.cacheConfig.getValueSerializer())
.setDimensionNames(builder.cacheConfig.getDimensionNames())
.setStatsTrackingEnabled(false)
.build(),
@@ -180,7 +195,16 @@ public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V>
// and it only has to be loaded one time, we should report one miss and the rest hits. But, if we do stats in
// getValueFromTieredCache(),
// we will see all misses. Instead, handle stats in computeIfAbsent().
- Tuple cacheValueTuple = getValueFromTieredCache(false).apply(key);
+ Tuple cacheValueTuple;
+ CompletableFuture, V>> future = null;
+ try (ReleasableLock ignore = readLock.acquire()) {
+ cacheValueTuple = getValueFromTieredCache(false).apply(key);
+ if (cacheValueTuple == null) {
+ // Only one of the threads will succeed putting a future into map for the same key.
+ // Rest will fetch existing future and wait on that to complete.
+ future = completableFutureMap.putIfAbsent(key, new CompletableFuture<>());
+ }
+ }
List heapDimensionValues = statsHolder.getDimensionsWithTierValue(key.dimensions, TIER_DIMENSION_VALUE_ON_HEAP);
List diskDimensionValues = statsHolder.getDimensionsWithTierValue(key.dimensions, TIER_DIMENSION_VALUE_DISK);
@@ -188,10 +212,7 @@ public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V>
// Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside.
// This is needed as there can be many requests for the same key at the same time and we only want to load
// the value once.
- V value = null;
- try (ReleasableLock ignore = writeLock.acquire()) {
- value = onHeapCache.computeIfAbsent(key, loader);
- }
+ V value = compute(key, loader, future);
// Handle stats
if (loader.isLoaded()) {
// The value was just computed and added to the cache by this thread. Register a miss for the heap cache, and the disk cache
@@ -220,6 +241,55 @@ public V computeIfAbsent(ICacheKey key, LoadAwareCacheLoader, V>
return cacheValueTuple.v1();
}
+ private V compute(ICacheKey key, LoadAwareCacheLoader, V> loader, CompletableFuture, V>> future)
+ throws Exception {
+ // Handler to handle results post processing. Takes a tuple or exception as an input and returns
+ // the value. Also before returning value, puts the value in cache.
+ BiFunction, V>, Throwable, Void> handler = (pair, ex) -> {
+ if (pair != null) {
+ try (ReleasableLock ignore = writeLock.acquire()) {
+ onHeapCache.put(pair.v1(), pair.v2());
+ } catch (Exception e) {
+ // TODO: Catch specific exceptions to know whether this resulted from cache or underlying removal
+ // listeners/stats. Needs better exception handling at underlying layers.For now swallowing
+ // exception.
+ logger.warn("Exception occurred while putting item onto heap cache", e);
+ }
+ } else {
+ if (ex != null) {
+ logger.warn("Exception occurred while trying to compute the value", ex);
+ }
+ }
+ completableFutureMap.remove(key);// Remove key from map as not needed anymore.
+ return null;
+ };
+ V value = null;
+ if (future == null) {
+ future = completableFutureMap.get(key);
+ future.handle(handler);
+ try {
+ value = loader.load(key);
+ } catch (Exception ex) {
+ future.completeExceptionally(ex);
+ throw new ExecutionException(ex);
+ }
+ if (value == null) {
+ NullPointerException npe = new NullPointerException("Loader returned a null value");
+ future.completeExceptionally(npe);
+ throw new ExecutionException(npe);
+ } else {
+ future.complete(new Tuple<>(key, value));
+ }
+ } else {
+ try {
+ value = future.get().v2();
+ } catch (InterruptedException ex) {
+ throw new IllegalStateException(ex);
+ }
+ }
+ return value;
+ }
+
@Override
public void invalidate(ICacheKey key) {
// We are trying to invalidate the key from all caches though it would be present in only of them.
@@ -325,30 +395,48 @@ private Function, Tuple> getValueFromTieredCache(boolean
void handleRemovalFromHeapTier(RemovalNotification, V> notification) {
ICacheKey key = notification.getKey();
boolean wasEvicted = SPILLOVER_REMOVAL_REASONS.contains(notification.getRemovalReason());
- if (caches.get(diskCache).isEnabled() && wasEvicted && evaluatePolicies(notification.getValue())) {
+ boolean countEvictionTowardsTotal = false; // Don't count this eviction towards the cache's total if it ends up in the disk tier
+ boolean exceptionOccurredOnDiskCachePut = false;
+ boolean canCacheOnDisk = caches.get(diskCache).isEnabled() && wasEvicted && evaluatePolicies(notification.getValue());
+ if (canCacheOnDisk) {
try (ReleasableLock ignore = writeLock.acquire()) {
diskCache.put(key, notification.getValue()); // spill over to the disk tier and increment its stats
+ } catch (Exception ex) {
+ // TODO: Catch specific exceptions. Needs better exception handling. We are just swallowing exception
+ // in this case as it shouldn't cause upstream request to fail.
+ logger.warn("Exception occurred while putting item to disk cache", ex);
+ exceptionOccurredOnDiskCachePut = true;
}
- updateStatsOnPut(TIER_DIMENSION_VALUE_DISK, key, notification.getValue());
- } else {
+ if (!exceptionOccurredOnDiskCachePut) {
+ updateStatsOnPut(TIER_DIMENSION_VALUE_DISK, key, notification.getValue());
+ }
+ }
+ if (!canCacheOnDisk || exceptionOccurredOnDiskCachePut) {
// If the value is not going to the disk cache, send this notification to the TSC's removal listener
// as the value is leaving the TSC entirely
removalListener.onRemoval(notification);
+ countEvictionTowardsTotal = true;
}
- updateStatsOnRemoval(TIER_DIMENSION_VALUE_ON_HEAP, wasEvicted, key, notification.getValue());
+ updateStatsOnRemoval(TIER_DIMENSION_VALUE_ON_HEAP, wasEvicted, key, notification.getValue(), countEvictionTowardsTotal);
}
void handleRemovalFromDiskTier(RemovalNotification, V> notification) {
// Values removed from the disk tier leave the TSC entirely
removalListener.onRemoval(notification);
boolean wasEvicted = SPILLOVER_REMOVAL_REASONS.contains(notification.getRemovalReason());
- updateStatsOnRemoval(TIER_DIMENSION_VALUE_DISK, wasEvicted, notification.getKey(), notification.getValue());
+ updateStatsOnRemoval(TIER_DIMENSION_VALUE_DISK, wasEvicted, notification.getKey(), notification.getValue(), true);
}
- void updateStatsOnRemoval(String removedFromTierValue, boolean wasEvicted, ICacheKey key, V value) {
+ void updateStatsOnRemoval(
+ String removedFromTierValue,
+ boolean wasEvicted,
+ ICacheKey key,
+ V value,
+ boolean countEvictionTowardsTotal
+ ) {
List dimensionValues = statsHolder.getDimensionsWithTierValue(key.dimensions, removedFromTierValue);
if (wasEvicted) {
- statsHolder.incrementEvictions(dimensionValues);
+ statsHolder.incrementEvictions(dimensionValues, countEvictionTowardsTotal);
}
statsHolder.decrementItems(dimensionValues);
statsHolder.decrementSizeInBytes(dimensionValues, weigher.applyAsLong(key, value));
diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheStatsHolder.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheStatsHolder.java
index d17059e8dee94..b40724430454b 100644
--- a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheStatsHolder.java
+++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheStatsHolder.java
@@ -105,20 +105,29 @@ public void incrementMisses(List dimensionValues) {
internalIncrement(dimensionValues, missIncrementer, true);
}
+ /**
+ * This method shouldn't be used in this class. Instead, use incrementEvictions(dimensionValues, includeInTotal)
+ * which specifies whether the eviction should be included in the cache's total evictions, or if it should
+ * just count towards that tier's evictions.
+ * @param dimensionValues The dimension values
+ */
@Override
public void incrementEvictions(List dimensionValues) {
- final String tierValue = validateTierDimensionValue(dimensionValues);
+ throw new UnsupportedOperationException(
+ "TieredSpilloverCacheHolder must specify whether to include an eviction in the total cache stats. Use incrementEvictions(List dimensionValues, boolean includeInTotal)"
+ );
+ }
- // If the disk tier is present, only evictions from the disk tier should be included in total values.
+ /**
+ * Increment evictions for this set of dimension values.
+ * @param dimensionValues The dimension values
+ * @param includeInTotal Whether to include this eviction in the total for the whole cache's evictions
+ */
+ public void incrementEvictions(List dimensionValues, boolean includeInTotal) {
+ validateTierDimensionValue(dimensionValues);
+ // If we count this eviction towards the total, we should increment all ancestor nodes. If not, only increment the leaf node.
Consumer evictionsIncrementer = (node) -> {
- if (tierValue.equals(TIER_DIMENSION_VALUE_ON_HEAP) && diskCacheEnabled) {
- // If on-heap tier, increment only the leaf node corresponding to the on heap values; not the total values in its parent
- // nodes
- if (node.isAtLowestLevel()) {
- node.incrementEvictions();
- }
- } else {
- // If disk tier, or on-heap tier with a disabled disk tier, increment the leaf node and its parents
+ if (includeInTotal || node.isAtLowestLevel()) {
node.incrementEvictions();
}
};
diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/MockDiskCache.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/MockDiskCache.java
index 2058faa5181b1..69e2060f7ea2f 100644
--- a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/MockDiskCache.java
+++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/MockDiskCache.java
@@ -141,6 +141,10 @@ public MockDiskCacheFactory(long delay, int maxSize, boolean statsTrackingEnable
@Override
@SuppressWarnings({ "unchecked" })
public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) {
+ // As we can't directly IT with the tiered cache and ehcache, check that we receive non-null serializers, as an ehcache disk
+ // cache would require.
+ assert config.getKeySerializer() != null;
+ assert config.getValueSerializer() != null;
return new Builder().setKeySerializer((Serializer) config.getKeySerializer())
.setValueSerializer((Serializer) config.getValueSerializer())
.setMaxSize(maxSize)
diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java
index 6d5ee91326338..c6440a1e1797f 100644
--- a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java
+++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java
@@ -16,6 +16,7 @@
import org.opensearch.common.cache.RemovalListener;
import org.opensearch.common.cache.RemovalNotification;
import org.opensearch.common.cache.policy.CachedQueryResult;
+import org.opensearch.common.cache.serializer.Serializer;
import org.opensearch.common.cache.settings.CacheSettings;
import org.opensearch.common.cache.stats.ImmutableCacheStats;
import org.opensearch.common.cache.stats.ImmutableCacheStatsHolder;
@@ -32,6 +33,8 @@
import org.junit.Before;
import java.io.IOException;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@@ -41,8 +44,12 @@
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
import java.util.concurrent.Phaser;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.function.Predicate;
@@ -53,6 +60,10 @@
import static org.opensearch.cache.common.tier.TieredSpilloverCacheStatsHolder.TIER_DIMENSION_VALUE_DISK;
import static org.opensearch.cache.common.tier.TieredSpilloverCacheStatsHolder.TIER_DIMENSION_VALUE_ON_HEAP;
import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
public class TieredSpilloverCacheTests extends OpenSearchTestCase {
static final List dimensionNames = List.of("dim1", "dim2", "dim3");
@@ -166,6 +177,8 @@ public void testComputeIfAbsentWithFactoryBasedCacheCreation() throws Exception
.setKeyType(String.class)
.setWeigher((k, v) -> keyValueSize)
.setRemovalListener(removalListener)
+ .setKeySerializer(new StringSerializer())
+ .setValueSerializer(new StringSerializer())
.setSettings(settings)
.setDimensionNames(dimensionNames)
.setCachedResultParser(s -> new CachedQueryResult.PolicyValues(20_000_000L)) // Values will always appear to have taken
@@ -318,6 +331,8 @@ public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception {
.setKeyType(String.class)
.setWeigher((k, v) -> keyValueSize)
.setRemovalListener(removalListener)
+ .setKeySerializer(new StringSerializer())
+ .setValueSerializer(new StringSerializer())
.setDimensionNames(dimensionNames)
.setSettings(
Settings.builder()
@@ -401,6 +416,7 @@ public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception {
assertEquals(onHeapCacheHit, getHitsForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP));
assertEquals(cacheMiss + numOfItems1, getMissesForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_DISK));
assertEquals(diskCacheHit, getHitsForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_DISK));
+ assertEquals(0, tieredSpilloverCache.completableFutureMap.size());
}
public void testComputeIfAbsentWithEvictionsFromTieredCache() throws Exception {
@@ -744,7 +760,7 @@ public void testInvalidateAll() throws Exception {
}
public void testComputeIfAbsentConcurrently() throws Exception {
- int onHeapCacheSize = randomIntBetween(100, 300);
+ int onHeapCacheSize = randomIntBetween(500, 700);
int diskCacheSize = randomIntBetween(200, 400);
int keyValueSize = 50;
@@ -766,7 +782,7 @@ public void testComputeIfAbsentConcurrently() throws Exception {
0
);
- int numberOfSameKeys = randomIntBetween(10, onHeapCacheSize - 1);
+ int numberOfSameKeys = randomIntBetween(400, onHeapCacheSize - 1);
ICacheKey key = getICacheKey(UUID.randomUUID().toString());
String value = UUID.randomUUID().toString();
@@ -795,7 +811,7 @@ public String load(ICacheKey key) {
};
loadAwareCacheLoaderList.add(loadAwareCacheLoader);
phaser.arriveAndAwaitAdvance();
- tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader);
+ assertEquals(value, tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader));
} catch (Exception e) {
throw new RuntimeException(e);
}
@@ -804,7 +820,7 @@ public String load(ICacheKey key) {
threads[i].start();
}
phaser.arriveAndAwaitAdvance();
- countDownLatch.await(); // Wait for rest of tasks to be cancelled.
+ countDownLatch.await();
int numberOfTimesKeyLoaded = 0;
assertEquals(numberOfSameKeys, loadAwareCacheLoaderList.size());
for (int i = 0; i < loadAwareCacheLoaderList.size(); i++) {
@@ -817,6 +833,215 @@ public String load(ICacheKey key) {
// We should see only one heap miss, and the rest hits
assertEquals(1, getMissesForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP));
assertEquals(numberOfSameKeys - 1, getHitsForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP));
+ assertEquals(0, tieredSpilloverCache.completableFutureMap.size());
+ }
+
+ public void testComputIfAbsentConcurrentlyWithMultipleKeys() throws Exception {
+ int onHeapCacheSize = randomIntBetween(300, 500);
+ int diskCacheSize = randomIntBetween(600, 700);
+ int keyValueSize = 50;
+
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+ Settings settings = Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build();
+
+ TieredSpilloverCache tieredSpilloverCache = initializeTieredSpilloverCache(
+ keyValueSize,
+ diskCacheSize,
+ removalListener,
+ settings,
+ 0
+ );
+
+ int iterations = 10;
+ int numberOfKeys = 20;
+ List> iCacheKeyList = new ArrayList<>();
+ for (int i = 0; i < numberOfKeys; i++) {
+ ICacheKey key = getICacheKey(UUID.randomUUID().toString());
+ iCacheKeyList.add(key);
+ }
+ ExecutorService executorService = Executors.newFixedThreadPool(8);
+ CountDownLatch countDownLatch = new CountDownLatch(iterations * numberOfKeys); // To wait for all threads to finish.
+
+ List, String>> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>();
+ for (int j = 0; j < numberOfKeys; j++) {
+ int finalJ = j;
+ for (int i = 0; i < iterations; i++) {
+ executorService.submit(() -> {
+ try {
+ LoadAwareCacheLoader, String> loadAwareCacheLoader = new LoadAwareCacheLoader<>() {
+ boolean isLoaded = false;
+
+ @Override
+ public boolean isLoaded() {
+ return isLoaded;
+ }
+
+ @Override
+ public String load(ICacheKey key) {
+ isLoaded = true;
+ return iCacheKeyList.get(finalJ).key;
+ }
+ };
+ loadAwareCacheLoaderList.add(loadAwareCacheLoader);
+ tieredSpilloverCache.computeIfAbsent(iCacheKeyList.get(finalJ), loadAwareCacheLoader);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ } finally {
+ countDownLatch.countDown();
+ }
+ });
+ }
+ }
+ countDownLatch.await();
+ int numberOfTimesKeyLoaded = 0;
+ assertEquals(iterations * numberOfKeys, loadAwareCacheLoaderList.size());
+ for (int i = 0; i < loadAwareCacheLoaderList.size(); i++) {
+ LoadAwareCacheLoader, String> loader = loadAwareCacheLoaderList.get(i);
+ if (loader.isLoaded()) {
+ numberOfTimesKeyLoaded++;
+ }
+ }
+ assertEquals(numberOfKeys, numberOfTimesKeyLoaded); // It should be loaded only once.
+ // We should see only one heap miss, and the rest hits
+ assertEquals(numberOfKeys, getMissesForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP));
+ assertEquals((iterations * numberOfKeys) - numberOfKeys, getHitsForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP));
+ assertEquals(0, tieredSpilloverCache.completableFutureMap.size());
+ executorService.shutdownNow();
+ }
+
+ public void testComputeIfAbsentConcurrentlyAndThrowsException() throws Exception {
+ LoadAwareCacheLoader, String> loadAwareCacheLoader = new LoadAwareCacheLoader<>() {
+ boolean isLoaded = false;
+
+ @Override
+ public boolean isLoaded() {
+ return isLoaded;
+ }
+
+ @Override
+ public String load(ICacheKey key) {
+ throw new RuntimeException("Testing");
+ }
+ };
+ verifyComputeIfAbsentThrowsException(RuntimeException.class, loadAwareCacheLoader, "Testing");
+ }
+
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ public void testComputeIfAbsentWithOnHeapCacheThrowingExceptionOnPut() throws Exception {
+ int onHeapCacheSize = randomIntBetween(100, 300);
+ int diskCacheSize = randomIntBetween(200, 400);
+ int keyValueSize = 50;
+
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+ Settings settings = Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build();
+ ICache.Factory onHeapCacheFactory = mock(OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.class);
+ ICache mockOnHeapCache = mock(ICache.class);
+ when(onHeapCacheFactory.create(any(), any(), any())).thenReturn(mockOnHeapCache);
+ doThrow(new RuntimeException("Testing")).when(mockOnHeapCache).put(any(), any());
+ CacheConfig cacheConfig = getCacheConfig(keyValueSize, settings, removalListener);
+ ICache.Factory mockDiskCacheFactory = new MockDiskCache.MockDiskCacheFactory(0, diskCacheSize, false);
+
+ TieredSpilloverCache tieredSpilloverCache = getTieredSpilloverCache(
+ onHeapCacheFactory,
+ mockDiskCacheFactory,
+ cacheConfig,
+ null,
+ removalListener
+ );
+ String value = "";
+ value = tieredSpilloverCache.computeIfAbsent(getICacheKey("test"), new LoadAwareCacheLoader<>() {
+ @Override
+ public boolean isLoaded() {
+ return false;
+ }
+
+ @Override
+ public String load(ICacheKey key) {
+ return "test";
+ }
+ });
+ assertEquals("test", value);
+ assertEquals(0, tieredSpilloverCache.completableFutureMap.size());
+ }
+
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ public void testComputeIfAbsentWithDiskCacheThrowingExceptionOnPut() throws Exception {
+ int onHeapCacheSize = 0;
+ int keyValueSize = 50;
+
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+ Settings settings = Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build();
+ ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory();
+ CacheConfig cacheConfig = getCacheConfig(keyValueSize, settings, removalListener);
+ ICache.Factory mockDiskCacheFactory = mock(MockDiskCache.MockDiskCacheFactory.class);
+ ICache mockDiskCache = mock(ICache.class);
+ when(mockDiskCacheFactory.create(any(), any(), any())).thenReturn(mockDiskCache);
+ doThrow(new RuntimeException("Test")).when(mockDiskCache).put(any(), any());
+
+ TieredSpilloverCache tieredSpilloverCache = getTieredSpilloverCache(
+ onHeapCacheFactory,
+ mockDiskCacheFactory,
+ cacheConfig,
+ null,
+ removalListener
+ );
+
+ String response = "";
+ response = tieredSpilloverCache.computeIfAbsent(getICacheKey("test"), new LoadAwareCacheLoader<>() {
+ @Override
+ public boolean isLoaded() {
+ return false;
+ }
+
+ @Override
+ public String load(ICacheKey key) {
+ return "test";
+ }
+ });
+ ImmutableCacheStats diskStats = getStatsSnapshotForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_DISK);
+
+ assertEquals(0, diskStats.getSizeInBytes());
+ assertEquals(1, removalListener.evictionsMetric.count());
+ assertEquals("test", response);
+ assertEquals(0, tieredSpilloverCache.completableFutureMap.size());
+ }
+
+ public void testComputeIfAbsentConcurrentlyWithLoaderReturningNull() throws Exception {
+ LoadAwareCacheLoader, String> loadAwareCacheLoader = new LoadAwareCacheLoader<>() {
+ boolean isLoaded = false;
+
+ @Override
+ public boolean isLoaded() {
+ return isLoaded;
+ }
+
+ @Override
+ public String load(ICacheKey key) {
+ return null;
+ }
+ };
+ verifyComputeIfAbsentThrowsException(NullPointerException.class, loadAwareCacheLoader, "Loader returned a null value");
}
public void testConcurrencyForEvictionFlowFromOnHeapToDiskTier() throws Exception {
@@ -830,6 +1055,8 @@ public void testConcurrencyForEvictionFlowFromOnHeapToDiskTier() throws Exceptio
.setKeyType(String.class)
.setWeigher((k, v) -> 150)
.setRemovalListener(removalListener)
+ .setKeySerializer(new StringSerializer())
+ .setValueSerializer(new StringSerializer())
.setSettings(
Settings.builder()
.put(
@@ -917,14 +1144,14 @@ public void testDiskTierPolicies() throws Exception {
MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
keyValueSize,
- 100,
+ keyValueSize * 100,
removalListener,
Settings.builder()
.put(
OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
.get(MAXIMUM_SIZE_IN_BYTES_KEY)
.getKey(),
- onHeapCacheSize * 50 + "b"
+ onHeapCacheSize * keyValueSize + "b"
)
.build(),
0,
@@ -946,6 +1173,7 @@ public void testDiskTierPolicies() throws Exception {
LoadAwareCacheLoader, String> loader = getLoadAwareCacheLoader(keyValuePairs);
+ int expectedEvictions = 0;
for (String key : keyValuePairs.keySet()) {
ICacheKey iCacheKey = getICacheKey(key);
Boolean expectedOutput = expectedOutputs.get(key);
@@ -958,8 +1186,15 @@ public void testDiskTierPolicies() throws Exception {
} else {
// Should miss as heap tier size = 0 and the policy rejected it
assertNull(result);
+ expectedEvictions++;
}
}
+
+ // We expect values that were evicted from the heap tier and not allowed into the disk tier by the policy
+ // to count towards total evictions
+ assertEquals(keyValuePairs.size(), getEvictionsForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_ON_HEAP));
+ assertEquals(0, getEvictionsForTier(tieredSpilloverCache, TIER_DIMENSION_VALUE_DISK)); // Disk tier is large enough for no evictions
+ assertEquals(expectedEvictions, getTotalStatsSnapshot(tieredSpilloverCache).getEvictions());
}
public void testTookTimePolicyFromFactory() throws Exception {
@@ -1014,6 +1249,8 @@ public void testTookTimePolicyFromFactory() throws Exception {
.setKeyType(String.class)
.setWeigher((k, v) -> keyValueSize)
.setRemovalListener(removalListener)
+ .setKeySerializer(new StringSerializer())
+ .setValueSerializer(new StringSerializer())
.setSettings(settings)
.setMaxSizeInBytes(onHeapCacheSize * keyValueSize)
.setDimensionNames(dimensionNames)
@@ -1389,6 +1626,26 @@ public boolean isLoaded() {
};
}
+ private TieredSpilloverCache getTieredSpilloverCache(
+ ICache.Factory onHeapCacheFactory,
+ ICache.Factory mockDiskCacheFactory,
+ CacheConfig cacheConfig,
+ List> policies,
+ RemovalListener, String> removalListener
+ ) {
+ TieredSpilloverCache.Builder builder = new TieredSpilloverCache.Builder().setCacheType(
+ CacheType.INDICES_REQUEST_CACHE
+ )
+ .setRemovalListener(removalListener)
+ .setOnHeapCacheFactory(onHeapCacheFactory)
+ .setDiskCacheFactory(mockDiskCacheFactory)
+ .setCacheConfig(cacheConfig);
+ if (policies != null) {
+ builder.addPolicies(policies);
+ }
+ return builder.build();
+ }
+
private TieredSpilloverCache initializeTieredSpilloverCache(
int keyValueSize,
int diskCacheSize,
@@ -1415,6 +1672,8 @@ private TieredSpilloverCache intializeTieredSpilloverCache(
.setSettings(settings)
.setDimensionNames(dimensionNames)
.setRemovalListener(removalListener)
+ .setKeySerializer(new StringSerializer())
+ .setValueSerializer(new StringSerializer())
.setSettings(
Settings.builder()
.put(
@@ -1429,17 +1688,34 @@ private TieredSpilloverCache intializeTieredSpilloverCache(
.build();
ICache.Factory mockDiskCacheFactory = new MockDiskCache.MockDiskCacheFactory(diskDeliberateDelay, diskCacheSize, false);
- TieredSpilloverCache.Builder builder = new TieredSpilloverCache.Builder().setCacheType(
- CacheType.INDICES_REQUEST_CACHE
- )
+ return getTieredSpilloverCache(onHeapCacheFactory, mockDiskCacheFactory, cacheConfig, policies, removalListener);
+ }
+
+ private CacheConfig getCacheConfig(
+ int keyValueSize,
+ Settings settings,
+ RemovalListener, String> removalListener
+ ) {
+ return new CacheConfig.Builder().setKeyType(String.class)
+ .setKeyType(String.class)
+ .setWeigher((k, v) -> keyValueSize)
+ .setSettings(settings)
+ .setDimensionNames(dimensionNames)
.setRemovalListener(removalListener)
- .setOnHeapCacheFactory(onHeapCacheFactory)
- .setDiskCacheFactory(mockDiskCacheFactory)
- .setCacheConfig(cacheConfig);
- if (policies != null) {
- builder.addPolicies(policies);
- }
- return builder.build();
+ .setKeySerializer(new StringSerializer())
+ .setValueSerializer(new StringSerializer())
+ .setSettings(
+ Settings.builder()
+ .put(
+ CacheSettings.getConcreteStoreNameSettingForCacheType(CacheType.INDICES_REQUEST_CACHE).getKey(),
+ TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME
+ )
+ .put(FeatureFlags.PLUGGABLE_CACHE, "true")
+ .put(settings)
+ .build()
+ )
+ .setClusterSettings(clusterSettings)
+ .build();
}
// Helper functions for extracting tier aggregated stats.
@@ -1479,4 +1755,91 @@ private ImmutableCacheStats getStatsSnapshotForTier(TieredSpilloverCache, ?> t
}
return snapshot;
}
+
+ private void verifyComputeIfAbsentThrowsException(
+ Class extends Exception> expectedException,
+ LoadAwareCacheLoader, String> loader,
+ String expectedExceptionMessage
+ ) throws InterruptedException {
+ int onHeapCacheSize = randomIntBetween(100, 300);
+ int diskCacheSize = randomIntBetween(200, 400);
+ int keyValueSize = 50;
+
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+ Settings settings = Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build();
+
+ TieredSpilloverCache tieredSpilloverCache = initializeTieredSpilloverCache(
+ keyValueSize,
+ diskCacheSize,
+ removalListener,
+ settings,
+ 0
+ );
+
+ int numberOfSameKeys = randomIntBetween(10, onHeapCacheSize - 1);
+ ICacheKey key = getICacheKey(UUID.randomUUID().toString());
+ String value = UUID.randomUUID().toString();
+ AtomicInteger exceptionCount = new AtomicInteger();
+
+ Thread[] threads = new Thread[numberOfSameKeys];
+ Phaser phaser = new Phaser(numberOfSameKeys + 1);
+ CountDownLatch countDownLatch = new CountDownLatch(numberOfSameKeys); // To wait for all threads to finish.
+
+ for (int i = 0; i < numberOfSameKeys; i++) {
+ threads[i] = new Thread(() -> {
+ try {
+ phaser.arriveAndAwaitAdvance();
+ tieredSpilloverCache.computeIfAbsent(key, loader);
+ } catch (Exception e) {
+ exceptionCount.incrementAndGet();
+ assertEquals(ExecutionException.class, e.getClass());
+ assertEquals(expectedException, e.getCause().getClass());
+ assertEquals(expectedExceptionMessage, e.getCause().getMessage());
+ } finally {
+ countDownLatch.countDown();
+ }
+ });
+ threads[i].start();
+ }
+ phaser.arriveAndAwaitAdvance();
+ countDownLatch.await(); // Wait for rest of tasks to be cancelled.
+
+ // Verify exception count was equal to number of requests
+ assertEquals(numberOfSameKeys, exceptionCount.get());
+ assertEquals(0, tieredSpilloverCache.completableFutureMap.size());
+ }
+
+ private ImmutableCacheStats getTotalStatsSnapshot(TieredSpilloverCache, ?> tsc) throws IOException {
+ ImmutableCacheStatsHolder cacheStats = tsc.stats(new String[0]);
+ return cacheStats.getStatsForDimensionValues(List.of());
+ }
+
+ // Duplicated here from EhcacheDiskCacheTests.java, we can't add a dependency on that plugin
+ static class StringSerializer implements Serializer {
+ private final Charset charset = StandardCharsets.UTF_8;
+
+ @Override
+ public byte[] serialize(String object) {
+ return object.getBytes(charset);
+ }
+
+ @Override
+ public String deserialize(byte[] bytes) {
+ if (bytes == null) {
+ return null;
+ }
+ return new String(bytes, charset);
+ }
+
+ public boolean equals(String object, byte[] bytes) {
+ return object.equals(deserialize(bytes));
+ }
+ }
}
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java
index c968fb2f6c2da..c84892971c87e 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java
@@ -29,7 +29,7 @@
* Processor that generating community id flow hash for the network flow tuples, the algorithm is defined in
* Community ID Flow Hashing.
*/
-public class CommunityIdProcessor extends AbstractProcessor {
+public final class CommunityIdProcessor extends AbstractProcessor {
public static final String TYPE = "community_id";
// the version of the community id flow hashing algorithm
private static final String COMMUNITY_ID_HASH_VERSION = "1";
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FingerprintProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FingerprintProcessor.java
new file mode 100644
index 0000000000000..c2f59bf586c81
--- /dev/null
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/FingerprintProcessor.java
@@ -0,0 +1,279 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.common.Nullable;
+import org.opensearch.common.hash.MessageDigests;
+import org.opensearch.core.common.Strings;
+import org.opensearch.ingest.AbstractProcessor;
+import org.opensearch.ingest.ConfigurationUtils;
+import org.opensearch.ingest.IngestDocument;
+import org.opensearch.ingest.Processor;
+
+import java.nio.charset.StandardCharsets;
+import java.security.MessageDigest;
+import java.util.Base64;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException;
+
+/**
+ * Processor that generating hash value for the specified fields or fields not in the specified excluded list
+ */
+public final class FingerprintProcessor extends AbstractProcessor {
+ public static final String TYPE = "fingerprint";
+ // this processor is introduced in 2.16.0, we append the OpenSearch version to the hash method name to ensure
+ // that this processor always generates same hash value based on a specific hash method, if the processing logic
+ // of this processor changes in future version, the version number in the hash method should be increased correspondingly.
+ private static final Set HASH_METHODS = Set.of("MD5@2.16.0", "SHA-1@2.16.0", "SHA-256@2.16.0", "SHA3-256@2.16.0");
+
+ // fields used to generate hash value
+ private final List fields;
+ // all fields other than the excluded fields are used to generate hash value
+ private final List excludeFields;
+ // the target field to store the hash value, defaults to fingerprint
+ private final String targetField;
+ // hash method used to generate the hash value, defaults to SHA-1
+ private final String hashMethod;
+ private final boolean ignoreMissing;
+
+ FingerprintProcessor(
+ String tag,
+ String description,
+ @Nullable List fields,
+ @Nullable List excludeFields,
+ String targetField,
+ String hashMethod,
+ boolean ignoreMissing
+ ) {
+ super(tag, description);
+ if (fields != null && !fields.isEmpty()) {
+ if (fields.stream().anyMatch(Strings::isNullOrEmpty)) {
+ throw new IllegalArgumentException("field name in [fields] cannot be null nor empty");
+ }
+ if (excludeFields != null && !excludeFields.isEmpty()) {
+ throw new IllegalArgumentException("either fields or exclude_fields can be set");
+ }
+ }
+ if (excludeFields != null && !excludeFields.isEmpty() && excludeFields.stream().anyMatch(Strings::isNullOrEmpty)) {
+ throw new IllegalArgumentException("field name in [exclude_fields] cannot be null nor empty");
+ }
+
+ if (!HASH_METHODS.contains(hashMethod.toUpperCase(Locale.ROOT))) {
+ throw new IllegalArgumentException("hash method must be MD5@2.16.0, SHA-1@2.16.0 or SHA-256@2.16.0 or SHA3-256@2.16.0");
+ }
+ this.fields = fields;
+ this.excludeFields = excludeFields;
+ this.targetField = targetField;
+ this.hashMethod = hashMethod;
+ this.ignoreMissing = ignoreMissing;
+ }
+
+ public List getFields() {
+ return fields;
+ }
+
+ public List getExcludeFields() {
+ return excludeFields;
+ }
+
+ public String getTargetField() {
+ return targetField;
+ }
+
+ public String getHashMethod() {
+ return hashMethod;
+ }
+
+ public boolean isIgnoreMissing() {
+ return ignoreMissing;
+ }
+
+ @Override
+ public IngestDocument execute(IngestDocument document) {
+ // we should deduplicate and sort the field names to make sure we can get consistent hash value
+ final List sortedFields;
+ Set existingFields = new HashSet<>(document.getSourceAndMetadata().keySet());
+ Set metadataFields = document.getMetadata()
+ .keySet()
+ .stream()
+ .map(IngestDocument.Metadata::getFieldName)
+ .collect(Collectors.toSet());
+ // metadata fields such as _index, _id and _routing are ignored
+ if (fields != null && !fields.isEmpty()) {
+ sortedFields = fields.stream()
+ .distinct()
+ .filter(field -> !metadataFields.contains(field))
+ .sorted()
+ .collect(Collectors.toList());
+ } else if (excludeFields != null && !excludeFields.isEmpty()) {
+ sortedFields = existingFields.stream()
+ .filter(field -> !metadataFields.contains(field) && !excludeFields.contains(field))
+ .sorted()
+ .collect(Collectors.toList());
+ } else {
+ sortedFields = existingFields.stream().filter(field -> !metadataFields.contains(field)).sorted().collect(Collectors.toList());
+ }
+ assert (!sortedFields.isEmpty());
+
+ final StringBuilder concatenatedFields = new StringBuilder();
+ sortedFields.forEach(field -> {
+ if (!document.hasField(field)) {
+ if (ignoreMissing) {
+ return;
+ } else {
+ throw new IllegalArgumentException("field [" + field + "] doesn't exist");
+ }
+ }
+
+ final Object value = document.getFieldValue(field, Object.class);
+ if (value instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map flattenedMap = toFlattenedMap((Map) value);
+ flattenedMap.entrySet().stream().sorted(Map.Entry.comparingByKey()).forEach(entry -> {
+ String fieldValue = String.valueOf(entry.getValue());
+ concatenatedFields.append("|")
+ .append(field)
+ .append(".")
+ .append(entry.getKey())
+ .append("|")
+ .append(fieldValue.length())
+ .append(":")
+ .append(fieldValue);
+ });
+ } else {
+ String fieldValue = String.valueOf(value);
+ concatenatedFields.append("|").append(field).append("|").append(fieldValue.length()).append(":").append(fieldValue);
+ }
+ });
+ // if all specified fields don't exist and ignore_missing is true, then do nothing
+ if (concatenatedFields.length() == 0) {
+ return document;
+ }
+ concatenatedFields.append("|");
+
+ MessageDigest messageDigest = HashMethod.fromMethodName(hashMethod);
+ assert (messageDigest != null);
+ messageDigest.update(concatenatedFields.toString().getBytes(StandardCharsets.UTF_8));
+ document.setFieldValue(targetField, hashMethod + ":" + Base64.getEncoder().encodeToString(messageDigest.digest()));
+
+ return document;
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ /**
+ * Convert a map containing nested fields to a flattened map,
+ * for example, if the original map is
+ * {
+ * "a": {
+ * "b": 1,
+ * "c": 2
+ * }
+ * }, then the converted map is
+ * {
+ * "a.b": 1,
+ * "a.c": 2
+ * }
+ * @param map the original map which may contain nested fields
+ * @return a flattened map which has only one level fields
+ */
+ @SuppressWarnings("unchecked")
+ private Map toFlattenedMap(Map map) {
+ Map flattenedMap = new HashMap<>();
+ for (Map.Entry entry : map.entrySet()) {
+ if (entry.getValue() instanceof Map) {
+ toFlattenedMap((Map) entry.getValue()).forEach(
+ (key, value) -> flattenedMap.put(entry.getKey() + "." + key, value)
+ );
+ } else {
+ flattenedMap.put(entry.getKey(), entry.getValue());
+ }
+ }
+ return flattenedMap;
+ }
+
+ /**
+ * The supported hash methods used to generate hash value
+ */
+ enum HashMethod {
+ MD5(MessageDigests.md5()),
+ SHA1(MessageDigests.sha1()),
+ SHA256(MessageDigests.sha256()),
+ SHA3256(MessageDigests.sha3256());
+
+ private final MessageDigest messageDigest;
+
+ HashMethod(MessageDigest messageDigest) {
+ this.messageDigest = messageDigest;
+ }
+
+ public static MessageDigest fromMethodName(String methodName) {
+ String name = methodName.toUpperCase(Locale.ROOT);
+ switch (name) {
+ case "MD5@2.16.0":
+ return MD5.messageDigest;
+ case "SHA-1@2.16.0":
+ return SHA1.messageDigest;
+ case "SHA-256@2.16.0":
+ return SHA256.messageDigest;
+ case "SHA3-256@2.16.0":
+ return SHA3256.messageDigest;
+ default:
+ return null;
+ }
+ }
+ }
+
+ public static final class Factory implements Processor.Factory {
+ @Override
+ public FingerprintProcessor create(
+ Map registry,
+ String processorTag,
+ String description,
+ Map config
+ ) throws Exception {
+ List fields = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "fields");
+ List excludeFields = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "exclude_fields");
+ if (fields != null && !fields.isEmpty()) {
+ if (fields.stream().anyMatch(Strings::isNullOrEmpty)) {
+ throw newConfigurationException(TYPE, processorTag, "fields", "field name cannot be null nor empty");
+ }
+ if (excludeFields != null && !excludeFields.isEmpty()) {
+ throw newConfigurationException(TYPE, processorTag, "fields", "either fields or exclude_fields can be set");
+ }
+ }
+ if (excludeFields != null && !excludeFields.isEmpty() && excludeFields.stream().anyMatch(Strings::isNullOrEmpty)) {
+ throw newConfigurationException(TYPE, processorTag, "exclude_fields", "field name cannot be null nor empty");
+ }
+
+ String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", "fingerprint");
+ String hashMethod = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "hash_method", "SHA-1@2.16.0");
+ if (!HASH_METHODS.contains(hashMethod.toUpperCase(Locale.ROOT))) {
+ throw newConfigurationException(
+ TYPE,
+ processorTag,
+ "hash_method",
+ "hash method must be MD5@2.16.0, SHA-1@2.16.0, SHA-256@2.16.0 or SHA3-256@2.16.0"
+ );
+ }
+ boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
+ return new FingerprintProcessor(processorTag, description, fields, excludeFields, targetField, hashMethod, ignoreMissing);
+ }
+ }
+}
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
index 0f8b248fd5af8..5b2db9ff940e7 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
@@ -58,10 +58,20 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
import java.util.function.Supplier;
+import java.util.stream.Collectors;
public class IngestCommonModulePlugin extends Plugin implements ActionPlugin, IngestPlugin {
+ static final Setting> PROCESSORS_ALLOWLIST_SETTING = Setting.listSetting(
+ "ingest.common.processors.allowed",
+ List.of(),
+ Function.identity(),
+ Setting.Property.NodeScope
+ );
+
static final Setting WATCHDOG_INTERVAL = Setting.timeSetting(
"ingest.grok.watchdog.interval",
TimeValue.timeValueSeconds(1),
@@ -77,7 +87,7 @@ public IngestCommonModulePlugin() {}
@Override
public Map getProcessors(Processor.Parameters parameters) {
- Map processors = new HashMap<>();
+ final Map processors = new HashMap<>();
processors.put(DateProcessor.TYPE, new DateProcessor.Factory(parameters.scriptService));
processors.put(SetProcessor.TYPE, new SetProcessor.Factory(parameters.scriptService));
processors.put(AppendProcessor.TYPE, new AppendProcessor.Factory(parameters.scriptService));
@@ -109,7 +119,8 @@ public Map getProcessors(Processor.Parameters paramet
processors.put(CopyProcessor.TYPE, new CopyProcessor.Factory(parameters.scriptService));
processors.put(RemoveByPatternProcessor.TYPE, new RemoveByPatternProcessor.Factory());
processors.put(CommunityIdProcessor.TYPE, new CommunityIdProcessor.Factory());
- return Collections.unmodifiableMap(processors);
+ processors.put(FingerprintProcessor.TYPE, new FingerprintProcessor.Factory());
+ return filterForAllowlistSetting(parameters.env.settings(), processors);
}
@Override
@@ -132,7 +143,7 @@ public List getRestHandlers(
@Override
public List> getSettings() {
- return Arrays.asList(WATCHDOG_INTERVAL, WATCHDOG_MAX_EXECUTION_TIME);
+ return Arrays.asList(WATCHDOG_INTERVAL, WATCHDOG_MAX_EXECUTION_TIME, PROCESSORS_ALLOWLIST_SETTING);
}
private static MatcherWatchdog createGrokThreadWatchdog(Processor.Parameters parameters) {
@@ -146,4 +157,27 @@ private static MatcherWatchdog createGrokThreadWatchdog(Processor.Parameters par
);
}
+ private Map filterForAllowlistSetting(Settings settings, Map map) {
+ if (PROCESSORS_ALLOWLIST_SETTING.exists(settings) == false) {
+ return Map.copyOf(map);
+ }
+ final Set allowlist = Set.copyOf(PROCESSORS_ALLOWLIST_SETTING.get(settings));
+ // Assert that no unknown processors are defined in the allowlist
+ final Set unknownAllowlistProcessors = allowlist.stream()
+ .filter(p -> map.containsKey(p) == false)
+ .collect(Collectors.toUnmodifiableSet());
+ if (unknownAllowlistProcessors.isEmpty() == false) {
+ throw new IllegalArgumentException(
+ "Processor(s) "
+ + unknownAllowlistProcessors
+ + " were defined in ["
+ + PROCESSORS_ALLOWLIST_SETTING.getKey()
+ + "] but do not exist"
+ );
+ }
+ return map.entrySet()
+ .stream()
+ .filter(e -> allowlist.contains(e.getKey()))
+ .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue));
+ }
}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FingerprintProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FingerprintProcessorFactoryTests.java
new file mode 100644
index 0000000000000..74ad4cade7b37
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FingerprintProcessorFactoryTests.java
@@ -0,0 +1,119 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.OpenSearchParseException;
+import org.opensearch.test.OpenSearchTestCase;
+import org.junit.Before;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class FingerprintProcessorFactoryTests extends OpenSearchTestCase {
+
+ private FingerprintProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new FingerprintProcessor.Factory();
+ }
+
+ public void testCreate() throws Exception {
+ Map config = new HashMap<>();
+
+ List fields = null;
+ List excludeFields = null;
+ if (randomBoolean()) {
+ fields = List.of(randomAlphaOfLength(10));
+ config.put("fields", fields);
+ } else {
+ excludeFields = List.of(randomAlphaOfLength(10));
+ config.put("exclude_fields", excludeFields);
+ }
+
+ String targetField = null;
+ if (randomBoolean()) {
+ targetField = randomAlphaOfLength(10);
+ }
+ config.put("target_field", targetField);
+
+ boolean ignoreMissing = randomBoolean();
+ config.put("ignore_missing", ignoreMissing);
+ String processorTag = randomAlphaOfLength(10);
+ FingerprintProcessor fingerprintProcessor = factory.create(null, processorTag, null, config);
+ assertThat(fingerprintProcessor.getTag(), equalTo(processorTag));
+ assertThat(fingerprintProcessor.getFields(), equalTo(fields));
+ assertThat(fingerprintProcessor.getExcludeFields(), equalTo(excludeFields));
+ assertThat(fingerprintProcessor.getTargetField(), equalTo(Objects.requireNonNullElse(targetField, "fingerprint")));
+ assertThat(fingerprintProcessor.isIgnoreMissing(), equalTo(ignoreMissing));
+ }
+
+ public void testCreateWithFields() throws Exception {
+ Map config = new HashMap<>();
+ config.put("fields", List.of(randomAlphaOfLength(10)));
+ config.put("exclude_fields", List.of(randomAlphaOfLength(10)));
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[fields] either fields or exclude_fields can be set"));
+ }
+
+ config = new HashMap<>();
+ List fields = new ArrayList<>();
+ if (randomBoolean()) {
+ fields.add(null);
+ } else {
+ fields.add("");
+ }
+ config.put("fields", fields);
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[fields] field name cannot be null nor empty"));
+ }
+
+ config = new HashMap<>();
+ List excludeFields = new ArrayList<>();
+ if (randomBoolean()) {
+ excludeFields.add(null);
+ } else {
+ excludeFields.add("");
+ }
+ config.put("exclude_fields", excludeFields);
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[exclude_fields] field name cannot be null nor empty"));
+ }
+ }
+
+ public void testCreateWithHashMethod() throws Exception {
+ Map config = new HashMap<>();
+ List fields = List.of(randomAlphaOfLength(10));
+ config.put("fields", fields);
+ config.put("hash_method", randomAlphaOfLength(10));
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(
+ e.getMessage(),
+ equalTo("[hash_method] hash method must be MD5@2.16.0, SHA-1@2.16.0, SHA-256@2.16.0 or SHA3-256@2.16.0")
+ );
+ }
+ }
+}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FingerprintProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FingerprintProcessorTests.java
new file mode 100644
index 0000000000000..67a82f28fb763
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/FingerprintProcessorTests.java
@@ -0,0 +1,176 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.ingest.IngestDocument;
+import org.opensearch.ingest.Processor;
+import org.opensearch.ingest.RandomDocumentPicks;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class FingerprintProcessorTests extends OpenSearchTestCase {
+ private final List hashMethods = List.of("MD5@2.16.0", "SHA-1@2.16.0", "SHA-256@2.16.0", "SHA3-256@2.16.0");
+
+ public void testGenerateFingerprint() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ List fields = null;
+ List excludeFields = null;
+ if (randomBoolean()) {
+ fields = new ArrayList<>();
+ for (int i = 0; i < randomIntBetween(1, 10); i++) {
+ fields.add(RandomDocumentPicks.addRandomField(random(), ingestDocument, randomAlphaOfLength(10)));
+ }
+ } else {
+ excludeFields = new ArrayList<>();
+ for (int i = 0; i < randomIntBetween(1, 10); i++) {
+ excludeFields.add(RandomDocumentPicks.addRandomField(random(), ingestDocument, randomAlphaOfLength(10)));
+ }
+ }
+
+ String targetField = "fingerprint";
+ if (randomBoolean()) {
+ targetField = randomAlphaOfLength(10);
+ }
+
+ String hashMethod = randomFrom(hashMethods);
+ Processor processor = createFingerprintProcessor(fields, excludeFields, targetField, hashMethod, false);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetField), equalTo(true));
+ }
+
+ public void testCreateFingerprintProcessorFailed() {
+ List fields = new ArrayList<>();
+ if (randomBoolean()) {
+ fields.add(null);
+ } else {
+ fields.add("");
+ }
+ fields.add(randomAlphaOfLength(10));
+
+ assertThrows(
+ "field name in [fields] cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> createFingerprintProcessor(fields, null, null, randomFrom(hashMethods), false)
+ );
+
+ List excludeFields = new ArrayList<>();
+ if (randomBoolean()) {
+ excludeFields.add(null);
+ } else {
+ excludeFields.add("");
+ }
+ excludeFields.add(randomAlphaOfLength(10));
+
+ assertThrows(
+ "field name in [exclude_fields] cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> createFingerprintProcessor(null, excludeFields, null, randomFrom(hashMethods), false)
+ );
+
+ assertThrows(
+ "either fields or exclude_fields can be set",
+ IllegalArgumentException.class,
+ () -> createFingerprintProcessor(
+ List.of(randomAlphaOfLength(10)),
+ List.of(randomAlphaOfLength(10)),
+ null,
+ randomFrom(hashMethods),
+ false
+ )
+ );
+
+ assertThrows(
+ "hash method must be MD5@2.16.0, SHA-1@2.16.0, SHA-256@2.16.0 or SHA3-256@2.16.0",
+ IllegalArgumentException.class,
+ () -> createFingerprintProcessor(Collections.emptyList(), null, "fingerprint", randomAlphaOfLength(10), false)
+ );
+ }
+
+ public void testEmptyFieldAndExcludeFields() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ List fields = null;
+ List excludeFields = null;
+ if (randomBoolean()) {
+ fields = new ArrayList<>();
+ } else {
+ excludeFields = new ArrayList<>();
+ }
+ String targetField = "fingerprint";
+ if (randomBoolean()) {
+ targetField = randomAlphaOfLength(10);
+ }
+
+ String hashMethod = randomFrom(hashMethods);
+ Processor processor = createFingerprintProcessor(fields, excludeFields, targetField, hashMethod, false);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetField), equalTo(true));
+ }
+
+ public void testIgnoreMissing() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String nonExistingFieldName = RandomDocumentPicks.randomNonExistingFieldName(random(), ingestDocument);
+ List nonExistingFields = List.of(nonExistingFieldName);
+ Processor processor = createFingerprintProcessor(nonExistingFields, null, "fingerprint", randomFrom(hashMethods), false);
+ assertThrows(
+ "field [" + nonExistingFieldName + "] doesn't exist",
+ IllegalArgumentException.class,
+ () -> processor.execute(ingestDocument)
+ );
+
+ String targetField = "fingerprint";
+ Processor processorWithIgnoreMissing = createFingerprintProcessor(
+ nonExistingFields,
+ null,
+ "fingerprint",
+ randomFrom(hashMethods),
+ true
+ );
+ processorWithIgnoreMissing.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetField), equalTo(false));
+ }
+
+ public void testIgnoreMetadataFields() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ List metadataFields = ingestDocument.getMetadata()
+ .keySet()
+ .stream()
+ .map(IngestDocument.Metadata::getFieldName)
+ .collect(Collectors.toList());
+
+ String existingFieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomAlphaOfLength(10));
+ List fields = List.of(existingFieldName, metadataFields.get(randomIntBetween(0, metadataFields.size() - 1)));
+
+ String targetField = "fingerprint";
+ String algorithm = randomFrom(hashMethods);
+ Processor processor = createFingerprintProcessor(fields, null, targetField, algorithm, false);
+
+ processor.execute(ingestDocument);
+ String fingerprint = ingestDocument.getFieldValue(targetField, String.class);
+
+ processor = createFingerprintProcessor(List.of(existingFieldName), null, targetField, algorithm, false);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(targetField, String.class), equalTo(fingerprint));
+ }
+
+ private FingerprintProcessor createFingerprintProcessor(
+ List fields,
+ List excludeFields,
+ String targetField,
+ String hashMethod,
+ boolean ignoreMissing
+ ) {
+ return new FingerprintProcessor(randomAlphaOfLength(10), null, fields, excludeFields, targetField, hashMethod, ignoreMissing);
+ }
+}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/IngestCommonModulePluginTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/IngestCommonModulePluginTests.java
new file mode 100644
index 0000000000000..b0c1e0fdbaa63
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/IngestCommonModulePluginTests.java
@@ -0,0 +1,109 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.common.settings.Settings;
+import org.opensearch.env.TestEnvironment;
+import org.opensearch.ingest.Processor;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Set;
+
+public class IngestCommonModulePluginTests extends OpenSearchTestCase {
+
+ public void testAllowlist() throws IOException {
+ runAllowlistTest(List.of());
+ runAllowlistTest(List.of("date"));
+ runAllowlistTest(List.of("set"));
+ runAllowlistTest(List.of("copy", "date"));
+ runAllowlistTest(List.of("date", "set", "copy"));
+ }
+
+ private void runAllowlistTest(List allowlist) throws IOException {
+ final Settings settings = Settings.builder()
+ .putList(IngestCommonModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey(), allowlist)
+ .build();
+ try (IngestCommonModulePlugin plugin = new IngestCommonModulePlugin()) {
+ assertEquals(Set.copyOf(allowlist), plugin.getProcessors(createParameters(settings)).keySet());
+ }
+ }
+
+ public void testAllowlistNotSpecified() throws IOException {
+ final Settings.Builder builder = Settings.builder();
+ builder.remove(IngestCommonModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey());
+ final Settings settings = builder.build();
+ try (IngestCommonModulePlugin plugin = new IngestCommonModulePlugin()) {
+ final Set expected = Set.of(
+ "append",
+ "urldecode",
+ "sort",
+ "fail",
+ "trim",
+ "set",
+ "fingerprint",
+ "pipeline",
+ "json",
+ "join",
+ "kv",
+ "bytes",
+ "date",
+ "drop",
+ "community_id",
+ "lowercase",
+ "convert",
+ "copy",
+ "gsub",
+ "dot_expander",
+ "rename",
+ "remove_by_pattern",
+ "html_strip",
+ "remove",
+ "csv",
+ "grok",
+ "date_index_name",
+ "foreach",
+ "script",
+ "dissect",
+ "uppercase",
+ "split"
+ );
+ assertEquals(expected, plugin.getProcessors(createParameters(settings)).keySet());
+ }
+ }
+
+ public void testAllowlistHasNonexistentProcessors() throws IOException {
+ final Settings settings = Settings.builder()
+ .putList(IngestCommonModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey(), List.of("threeve"))
+ .build();
+ try (IngestCommonModulePlugin plugin = new IngestCommonModulePlugin()) {
+ IllegalArgumentException e = expectThrows(
+ IllegalArgumentException.class,
+ () -> plugin.getProcessors(createParameters(settings))
+ );
+ assertTrue(e.getMessage(), e.getMessage().contains("threeve"));
+ }
+ }
+
+ private static Processor.Parameters createParameters(Settings settings) {
+ return new Processor.Parameters(
+ TestEnvironment.newEnvironment(Settings.builder().put(settings).put("path.home", "").build()),
+ null,
+ null,
+ null,
+ () -> 0L,
+ (a, b) -> null,
+ null,
+ null,
+ $ -> {},
+ null
+ );
+ }
+}
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml
index 2a816f0386667..9bf4faf53a999 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml
@@ -86,3 +86,19 @@
- do:
nodes.info: {}
- contains: { nodes.$cluster_manager.ingest.processors: { type: community_id } }
+
+---
+"Fingerprint processor exists":
+ - skip:
+ version: " - 2.15.99"
+ features: contains
+ reason: "fingerprint processor was introduced in 2.16.0 and contains is a newly added assertion"
+ - do:
+ cluster.state: {}
+
+ # Get cluster-manager node id
+ - set: { cluster_manager_node: cluster_manager }
+
+ - do:
+ nodes.info: {}
+ - contains: { nodes.$cluster_manager.ingest.processors: { type: fingerprint } }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml
index a66f02d6b6a6d..984c67d39757d 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml
@@ -278,3 +278,78 @@ teardown:
body: {source_field: "fooBar", foo: {foo: "bar"}}
- match: { error.root_cause.0.type: "illegal_argument_exception" }
- match: { error.root_cause.0.reason: "Iterable object is self-referencing itself (ingest script)" }
+
+---
+"Test painless data types":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "script" : {
+ "source" : "ctx.byte = (byte)127;ctx.short = (short)32767;ctx.int = (int)2147483647;ctx.long = (long)9223372036854775807L;ctx.float = (float)0.1;ctx.double = (double)0.1;ctx.boolean = (boolean)true"
+ }
+ },
+ {
+ "script" : {
+ "source" : "ctx.other_field = 'other_field'"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {source_field: "FooBar"}
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.byte: 127 }
+ - match: { _source.int: 2147483647 }
+ - match: { _source.long: 9223372036854775807 }
+ - gt: { _source.float: 0.0 }
+ - lt: { _source.float: 0.2 }
+ - gt: { _source.double: 0.0 }
+ - lt: { _source.double: 0.2 }
+ - match: { _source.boolean: true }
+
+---
+"Test char type fails":
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "script" : {
+ "source" : "ctx.char = (char)'a'"
+ }
+ },
+ {
+ "script" : {
+ "source" : "ctx.other_field = 'other_field'"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: bad_request
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {source_field: "FooBar"}
+ - match: { error.root_cause.0.type: "illegal_argument_exception" }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/340_fingerprint_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/340_fingerprint_processor.yml
new file mode 100644
index 0000000000000..04568916239f4
--- /dev/null
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/340_fingerprint_processor.yml
@@ -0,0 +1,786 @@
+---
+teardown:
+ - do:
+ ingest.delete_pipeline:
+ id: "1"
+ ignore: 404
+
+---
+"Test creat fingerprint processor":
+ - skip:
+ version: " - 2.15.99"
+ reason: "introduced in 2.16.0"
+ - do:
+ catch: /field name cannot be null nor empty/
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields": [null]
+ }
+ }
+ ]
+ }
+ - do:
+ catch: /field name cannot be null nor empty/
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "exclude_fields": [""]
+ }
+ }
+ ]
+ }
+ - do:
+ catch: /either fields or exclude\_fields can be set/
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields": ["foo"],
+ "exclude_fields": ["bar"]
+ }
+ }
+ ]
+ }
+
+ - do:
+ catch: /hash method must be MD5@2.16.0\, SHA\-1@2.16.0, SHA\-256@2.16.0 or SHA3\-256@2.16.0/
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields": ["foo"],
+ "hash_method": "non-existing"
+ }
+ }
+ ]
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields" : ["foo"],
+ "target_field" : "fingerprint_field",
+ "hash_method": "SHA-256@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+---
+"Test fingerprint processor with ignore_missing":
+ - skip:
+ version: " - 2.15.99"
+ reason: "introduced in 2.16.0"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields" : ["foo"]
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /field \[foo\] doesn't exist/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ bar: "bar"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields" : ["foo", "bar"],
+ "ignore_missing" : true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: "foo"
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.fingerprint: "SHA-1@2.16.0:YqpBTuHXCPV04j/7lGfWeUl8Tyo=" }
+
+---
+"Test fingerprint processor with custom target field":
+ - skip:
+ version: " - 2.15.99"
+ reason: "introduced in 2.16.0"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields" : ["foo"],
+ "target_field" : "target"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: "foo"
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.target: "SHA-1@2.16.0:YqpBTuHXCPV04j/7lGfWeUl8Tyo=" }
+
+---
+"Test fingerprint processor with non-primitive fields and SHA-1":
+ - skip:
+ version: " - 2.15.99"
+ reason: "introduced in 2.16.0"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields" : ["foo", "bar", "zoo"]
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.fingerprint: "SHA-1@2.16.0:KYJ4pc4ouFmAbgZGp7CfNoykZeo=" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 2
+ - match: { _source.fingerprint: "SHA-1@2.16.0:KYJ4pc4ouFmAbgZGp7CfNoykZeo=" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields":[]
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 3
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 3
+ - match: { _source.fingerprint: "SHA-1@2.16.0:KYJ4pc4ouFmAbgZGp7CfNoykZeo=" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "exclude_fields":[]
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 4
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 4
+ - match: { _source.fingerprint: "SHA-1@2.16.0:KYJ4pc4ouFmAbgZGp7CfNoykZeo=" }
+
+---
+"Test fingerprint processor with non-primitive fields and MD5":
+ - skip:
+ version: " - 2.15.99"
+ reason: "introduced in 2.16.0"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields" : ["foo", "bar", "zoo"],
+ "hash_method" : "MD5@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.fingerprint: "MD5@2.16.0:NovpcJ+MYHzEZtCewcDPTQ==" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "hash_method" : "MD5@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 2
+ - match: { _source.fingerprint: "MD5@2.16.0:NovpcJ+MYHzEZtCewcDPTQ==" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields":[],
+ "hash_method" : "MD5@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 3
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 3
+ - match: { _source.fingerprint: "MD5@2.16.0:NovpcJ+MYHzEZtCewcDPTQ==" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "exclude_fields":[],
+ "hash_method" : "MD5@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 4
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 4
+ - match: { _source.fingerprint: "MD5@2.16.0:NovpcJ+MYHzEZtCewcDPTQ==" }
+
+
+---
+"Test fingerprint processor with non-primitive fields and SHA-256":
+ - skip:
+ version: " - 2.15.99"
+ reason: "introduced in 2.16.0"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields" : ["foo", "bar", "zoo"],
+ "hash_method" : "SHA-256@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.fingerprint: "SHA-256@2.16.0:Sdlg0BodM3n1my4BvaTfJCPrvHxfrxno0kCLfMaC+XY=" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "hash_method" : "SHA-256@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 2
+ - match: { _source.fingerprint: "SHA-256@2.16.0:Sdlg0BodM3n1my4BvaTfJCPrvHxfrxno0kCLfMaC+XY=" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields":[],
+ "hash_method" : "SHA-256@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 3
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 3
+ - match: { _source.fingerprint: "SHA-256@2.16.0:Sdlg0BodM3n1my4BvaTfJCPrvHxfrxno0kCLfMaC+XY=" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "exclude_fields":[],
+ "hash_method" : "SHA-256@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 4
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 4
+ - match: { _source.fingerprint: "SHA-256@2.16.0:Sdlg0BodM3n1my4BvaTfJCPrvHxfrxno0kCLfMaC+XY=" }
+
+---
+"Test fingerprint processor with non-primitive fields and SHA3-256":
+ - skip:
+ version: " - 2.15.99"
+ reason: "introduced in 2.16.0"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields" : ["foo", "bar", "zoo"],
+ "hash_method" : "SHA3-256@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.fingerprint: "SHA3-256@2.16.0:+GZCkMLEMkUA/4IrEZEZZYsVMbZdpJ92ppN3wUsFYOI=" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "hash_method" : "SHA3-256@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 2
+ - match: { _source.fingerprint: "SHA3-256@2.16.0:+GZCkMLEMkUA/4IrEZEZZYsVMbZdpJ92ppN3wUsFYOI=" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "fields":[],
+ "hash_method" : "SHA3-256@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 3
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 3
+ - match: { _source.fingerprint: "SHA3-256@2.16.0:+GZCkMLEMkUA/4IrEZEZZYsVMbZdpJ92ppN3wUsFYOI=" }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "fingerprint" : {
+ "exclude_fields":[],
+ "hash_method" : "SHA3-256@2.16.0"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 4
+ pipeline: "1"
+ body: {
+ foo: [1, 2, 3],
+ bar: {
+ field: {
+ innerField: "inner"
+ }
+ },
+ zoo: null
+ }
+ - do:
+ get:
+ index: test
+ id: 4
+ - match: { _source.fingerprint: "SHA3-256@2.16.0:+GZCkMLEMkUA/4IrEZEZZYsVMbZdpJ92ppN3wUsFYOI=" }
diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.17.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.17.0.jar.sha1
deleted file mode 100644
index 66bf7ed6ecce8..0000000000000
--- a/modules/ingest-geoip/licenses/jackson-annotations-2.17.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-880a742337010da4c851f843d8cac150e22dff9f
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.17.2.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..411e1d62459fd
--- /dev/null
+++ b/modules/ingest-geoip/licenses/jackson-annotations-2.17.2.jar.sha1
@@ -0,0 +1 @@
+147b7b9412ffff24339f8aba080b292448e08698
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.17.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.17.0.jar.sha1
deleted file mode 100644
index c0e4bb0c56849..0000000000000
--- a/modules/ingest-geoip/licenses/jackson-databind-2.17.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7173e9e1d4bc6d7ca03bc4eeedcd548b8b580b34
\ No newline at end of file
diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.17.2.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.17.2.jar.sha1
new file mode 100644
index 0000000000000..f2b4dbdc5decb
--- /dev/null
+++ b/modules/ingest-geoip/licenses/jackson-databind-2.17.2.jar.sha1
@@ -0,0 +1 @@
+e6deb029e5901e027c129341fac39e515066b68c
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.11.0-snapshot-fb97840.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.11.0-snapshot-fb97840.jar.sha1
deleted file mode 100644
index f4da6e39aeeb8..0000000000000
--- a/modules/lang-expression/licenses/lucene-expressions-9.11.0-snapshot-fb97840.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ab914b48665f484315b78e4b6787aa42f5966bb6
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.12.0-snapshot-847316d.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.12.0-snapshot-847316d.jar.sha1
new file mode 100644
index 0000000000000..83dd8e657bdd5
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-9.12.0-snapshot-847316d.jar.sha1
@@ -0,0 +1 @@
+b866103bbaca4141c152deca9252bd137026dafc
\ No newline at end of file
diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle
index bcf5c07ea8c64..a836124f94b41 100644
--- a/modules/lang-mustache/build.gradle
+++ b/modules/lang-mustache/build.gradle
@@ -38,7 +38,7 @@ opensearchplugin {
}
dependencies {
- api "com.github.spullara.mustache.java:compiler:0.9.13"
+ api "com.github.spullara.mustache.java:compiler:0.9.14"
}
restResources {
diff --git a/modules/lang-mustache/licenses/compiler-0.9.13.jar.sha1 b/modules/lang-mustache/licenses/compiler-0.9.13.jar.sha1
deleted file mode 100644
index 70d53aac260eb..0000000000000
--- a/modules/lang-mustache/licenses/compiler-0.9.13.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-60666500a7dce7a5d3e17c09b46ea6f037192bd5
\ No newline at end of file
diff --git a/modules/lang-mustache/licenses/compiler-0.9.14.jar.sha1 b/modules/lang-mustache/licenses/compiler-0.9.14.jar.sha1
new file mode 100644
index 0000000000000..29069ac90817a
--- /dev/null
+++ b/modules/lang-mustache/licenses/compiler-0.9.14.jar.sha1
@@ -0,0 +1 @@
+e6df8b5aabb80d6eb6d8fef312a56d66b7659ba6
\ No newline at end of file
diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java
index 0d498e16154c8..3d48e96117a1c 100644
--- a/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java
+++ b/modules/lang-painless/src/test/java/org/opensearch/painless/WhenThingsGoWrongTests.java
@@ -354,6 +354,9 @@ public void testInvalidAssignment() {
assertEquals(iae.getMessage(), "invalid assignment: cannot assign a value to addition operation [+]");
iae = expectScriptThrows(IllegalArgumentException.class, () -> exec("Double.x() = 1;"));
assertEquals(iae.getMessage(), "invalid assignment: cannot assign a value to method call [x/0]");
+
+ expectScriptThrows(UnsupportedOperationException.class, () -> exec("params['modifyingParamsMap'] = 2;"));
+ expectScriptThrows(UnsupportedOperationException.class, () -> exec("params.modifyingParamsMap = 2;"));
}
public void testCannotResolveSymbol() {
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/17_update_error.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/17_update_error.yml
index 3d6db1b781caf..fdbc6de37e3ea 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/17_update_error.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/17_update_error.yml
@@ -13,3 +13,50 @@
- match: { error.root_cause.0.position.offset: 13 }
- match: { error.root_cause.0.position.start: 0 }
- match: { error.root_cause.0.position.end: 38 }
+
+---
+"Test modifying params map from script leads to exception":
+ - skip:
+ features: "node_selector"
+
+ - do:
+ put_script:
+ id: "except"
+ body: {"script": {"lang": "painless", "source": "params.that = 3"}}
+
+ - do:
+ indices.create:
+ index: "test"
+ body:
+ settings:
+ index:
+ number_of_shards: 1
+ number_of_replicas: 0
+ mappings:
+ properties:
+ this:
+ type: "integer"
+ that:
+ type: "integer"
+
+ - do:
+ index:
+ index: "test"
+ id: 1
+ body: {"this": 1, "that": 2}
+
+ - do:
+ catch: /unsupported_operation_exception/
+ node_selector:
+ version: "2.15.0 - "
+ update:
+ index: "test"
+ id: 1
+ body:
+ script:
+ id: "except"
+ params: {"this": 2}
+
+ - match: { error.caused_by.position.offset: 6 }
+ - match: { error.caused_by.position.start: 0 }
+ - match: { error.caused_by.position.end: 15 }
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/10_derived_field_index_mapping_definition.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/10_derived_field_index_mapping_definition.yml
new file mode 100644
index 0000000000000..4f700c3b83e8f
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/10_derived_field_index_mapping_definition.yml
@@ -0,0 +1,421 @@
+"Test derived_field supported type using index mapping definition":
+ - skip:
+ version: " - 2.14.99"
+ reason: "derived_field feature was added in 2.15"
+
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ properties:
+ text:
+ type: text
+ keyword:
+ type: keyword
+ long:
+ type: long
+ float:
+ type: float
+ double:
+ type: double
+ date:
+ type: date
+ geo:
+ type: geo_point
+ ip:
+ type: ip
+ boolean:
+ type: boolean
+ array_of_long:
+ type: long
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_text_prefilter_field:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ derived_long:
+ type: long
+ script: "emit(params._source[\"long\"])"
+ derived_float:
+ type: float
+ script: "emit(params._source[\"float\"])"
+ derived_double:
+ type: double
+ script: "emit(params._source[\"double\"])"
+ derived_date:
+ type: date
+ script: "emit(ZonedDateTime.parse(params._source[\"date\"]).toInstant().toEpochMilli())"
+ derived_geo:
+ type: geo_point
+ script: "emit(params._source[\"geo\"][0], params._source[\"geo\"][1])"
+ derived_ip:
+ type: ip
+ script: "emit(params._source[\"ip\"])"
+ derived_boolean:
+ type: boolean
+ script: "emit(params._source[\"boolean\"])"
+ derived_array_of_long:
+ type: long
+ script: "emit(params._source[\"array_of_long\"][0]);emit(params._source[\"array_of_long\"][1]);"
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "peter piper",
+ keyword: "foo",
+ long: 1,
+ float: 1.0,
+ double: 1.0,
+ date: "2017-01-01T00:00:00Z",
+ geo: [0.0, 20.0],
+ ip: "192.168.0.1",
+ boolean: true,
+ array_of_long: [1, 2],
+ json_field: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_long\": [1, 2]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {
+ text: "piper picked a peck",
+ keyword: "bar",
+ long: 2,
+ float: 2.0,
+ double: 2.0,
+ date: "2017-01-02T00:00:00Z",
+ geo: [10.0, 30.0],
+ ip: "192.168.0.2",
+ boolean: false,
+ array_of_long: [2, 3],
+ json_field: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_long\": [2, 3]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 3
+ body: {
+ text: "peck of pickled peppers",
+ keyword: "baz",
+ long: -3,
+ float: -3.0,
+ double: -3.0,
+ date: "2017-01-03T00:00:00Z",
+ geo: [20.0, 40.0],
+ ip: "192.168.0.3",
+ boolean: true,
+ array_of_long: [3, 4],
+ json_field: "{\"keyword\":\"json_keyword3\",\"long\":30,\"float\":30.0,\"double\":30.0,\"date\":\"2021-03-01T00:00:00Z\",\"ip\":\"10.0.0.3\",\"boolean\":true, \"array_of_long\": [3, 4]}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 4
+ body: {
+ text: "pickled peppers",
+ keyword: "qux",
+ long: 4,
+ float: 4.0,
+ double: 4.0,
+ date: "2017-01-04T00:00:00Z",
+ geo: [30.0, 50.0],
+ ip: "192.168.0.4",
+ boolean: false,
+ array_of_long: [4, 5],
+ json_field: "{\"keyword\":\"json_keyword4\",\"long\":40,\"float\":40.0,\"double\":40.0,\"date\":\"2021-04-01T00:00:00Z\",\"ip\":\"10.0.0.4\",\"boolean\":false, \"array_of_long\": [4, 5]}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 5
+ body: {
+ text: "peppers",
+ keyword: "quux",
+ long: 5,
+ float: 5.0,
+ double: 5.0,
+ date: "2017-01-05T00:00:00Z",
+ geo: [40.0, 60.0],
+ ip: "192.168.0.5",
+ boolean: true,
+ array_of_long: [5, 6],
+ json_field: "{\"keyword\":\"json_keyword5\",\"long\":50,\"float\":50.0,\"double\":50.0,\"date\":\"2021-05-01T00:00:00Z\",\"ip\":\"10.0.0.5\",\"boolean\":true, \"array_of_long\": [5, 6]}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+
+ # Tests for derived_text
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ match_phrase:
+ derived_text:
+ query: "peter piper"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_keyword
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_keyword:
+ value: "foo"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_long:
+ gte: 1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_float
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_float:
+ gte: 1.0
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_double
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_double:
+ gte: 1.0
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_date
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_date:
+ gte: "2017-01-02"
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_geo
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ geo_distance:
+ distance: "20km"
+ derived_geo:
+ lat: 0.0
+ lon: 20.0
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_ip
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_ip:
+ value: "192.168.0.1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_boolean
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_boolean:
+ value: true
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_array_of_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_array_of_long:
+ gte: 3
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.keyword
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_object.keyword:
+ value: "json_keyword1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_object.long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.long:
+ gte: 11
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.float
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.float:
+ gte: 10.1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.double
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.double:
+ gte: 10.1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.date
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.date:
+ gte: "2021-03-01"
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_object.ip
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_object.ip:
+ value: "10.0.0.1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_object.boolean
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ term:
+ derived_object.boolean:
+ value: true
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_object.array_of_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ range:
+ derived_object.array_of_long:
+ gte: 3
+
+ - match: { hits.total: 4 }
+
+ # Tests for query string
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ q: "derived_keyword:foo"
+
+ - match: { hits.total: 1 }
+
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ q: derived_object.keyword:json_keyword1
+
+ - match: { hits.total: 1 }
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/20_derived_field_put_mapping.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/20_derived_field_put_mapping.yml
new file mode 100644
index 0000000000000..0370fd94e8548
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/20_derived_field_put_mapping.yml
@@ -0,0 +1,123 @@
+---
+"Test create and update mapping for derived fields":
+ - skip:
+ version: " - 2.14.99"
+ reason: "derived_field feature was added in 2.15"
+ - do:
+ indices.create:
+ index: test_index
+
+ - do:
+ indices.put_mapping:
+ index: test_index
+ body:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_text_prefilter_field:
+ type: keyword
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ derived_date:
+ type: date
+ script: "emit(params._source[\"keyword\"])"
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ indices.get_mapping:
+ index: test_index
+
+ - match: {test_index.mappings.derived.derived_text.type: text}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.type: keyword}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.prefilter_field: text}
+ - match: {test_index.mappings.derived.derived_date.type: date}
+ - match: {test_index.mappings.derived.derived_object.type: object}
+ - match: {test_index.mappings.derived.derived_object.properties.keyword: keyword}
+ - match: {test_index.mappings.derived.derived_object.prefilter_field: json_field}
+
+
+ - do:
+ indices.put_mapping:
+ index: test_index
+ body:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: keyword
+ script: "emit(params._source[\"text\"])"
+ derived_text_prefilter_field:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ derived_date:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ derived_object:
+ type: object
+ properties:
+ keyword: text
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ format: "dd-MM-yyyy"
+ ignore_malformed: true
+
+ - do:
+ indices.get_mapping:
+ index: test_index
+
+ - match: {test_index.mappings.derived.derived_text.type: keyword}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.type: text}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.prefilter_field: text}
+ - match: {test_index.mappings.derived.derived_date.type: keyword}
+ - match: {test_index.mappings.derived.derived_object.type: object}
+ - match: {test_index.mappings.derived.derived_object.properties.keyword: text}
+ - match: {test_index.mappings.derived.derived_object.prefilter_field: text}
+ - match: {test_index.mappings.derived.derived_object.format: "dd-MM-yyyy"}
+ - match: {test_index.mappings.derived.derived_object.ignore_malformed: true}
+
+
+ - do:
+ indices.put_mapping:
+ index: test_index
+ body:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ ignore_malformed: false
+
+ - do:
+ indices.get_mapping:
+ index: test_index
+
+ - match: {test_index.mappings.derived.derived_text.type: keyword}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.type: text}
+ - match: {test_index.mappings.derived.derived_text_prefilter_field.prefilter_field: text}
+ - match: {test_index.mappings.derived.derived_date.type: keyword}
+ - match: {test_index.mappings.derived.derived_object.type: object}
+ - match: {test_index.mappings.derived.derived_object.properties.keyword: keyword}
+ - match: {test_index.mappings.derived.derived_object.prefilter_field: json_field}
+ - is_false: test_index.mappings.derived.derived_object.ignore_malformed
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/30_derived_field_search_definition.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/30_derived_field_search_definition.yml
new file mode 100644
index 0000000000000..bb619dce63010
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/30_derived_field_search_definition.yml
@@ -0,0 +1,489 @@
+"Test derived_field supported type using search definition":
+ - skip:
+ version: " - 2.14.99"
+ reason: "derived_field feature was added in 2.15"
+
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ properties:
+ text:
+ type: text
+ keyword:
+ type: keyword
+ long:
+ type: long
+ float:
+ type: float
+ double:
+ type: double
+ date:
+ type: date
+ geo:
+ type: geo_point
+ ip:
+ type: ip
+ boolean:
+ type: boolean
+ array_of_long:
+ type: long
+ json_field:
+ type: text
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "peter piper",
+ keyword: "foo",
+ long: 1,
+ float: 1.0,
+ double: 1.0,
+ date: "2017-01-01T00:00:00Z",
+ geo: [0.0, 20.0],
+ ip: "192.168.0.1",
+ boolean: true,
+ array_of_long: [1, 2],
+ json_field: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_long\": [1, 2]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {
+ text: "piper picked a peck",
+ keyword: "bar",
+ long: 2,
+ float: 2.0,
+ double: 2.0,
+ date: "2017-01-02T00:00:00Z",
+ geo: [10.0, 30.0],
+ ip: "192.168.0.2",
+ boolean: false,
+ array_of_long: [2, 3],
+ json_field: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_long\": [2, 3]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 3
+ body: {
+ text: "peck of pickled peppers",
+ keyword: "baz",
+ long: -3,
+ float: -3.0,
+ double: -3.0,
+ date: "2017-01-03T00:00:00Z",
+ geo: [20.0, 40.0],
+ ip: "192.168.0.3",
+ boolean: true,
+ array_of_long: [3, 4],
+ json_field: "{\"keyword\":\"json_keyword3\",\"long\":30,\"float\":30.0,\"double\":30.0,\"date\":\"2021-03-01T00:00:00Z\",\"ip\":\"10.0.0.3\",\"boolean\":true, \"array_of_long\": [3, 4]}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 4
+ body: {
+ text: "pickled peppers",
+ keyword: "qux",
+ long: 4,
+ float: 4.0,
+ double: 4.0,
+ date: "2017-01-04T00:00:00Z",
+ geo: [30.0, 50.0],
+ ip: "192.168.0.4",
+ boolean: false,
+ array_of_long: [4, 5],
+ json_field: "{\"keyword\":\"json_keyword4\",\"long\":40,\"float\":40.0,\"double\":40.0,\"date\":\"2021-04-01T00:00:00Z\",\"ip\":\"10.0.0.4\",\"boolean\":false, \"array_of_long\": [4, 5]}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 5
+ body: {
+ text: "peppers",
+ keyword: "quux",
+ long: 5,
+ float: 5.0,
+ double: 5.0,
+ date: "2017-01-05T00:00:00Z",
+ geo: [40.0, 60.0],
+ ip: "192.168.0.5",
+ boolean: true,
+ array_of_long: [5, 6],
+ json_field: "{\"keyword\":\"json_keyword5\",\"long\":50,\"float\":50.0,\"double\":50.0,\"date\":\"2021-05-01T00:00:00Z\",\"ip\":\"10.0.0.5\",\"boolean\":true, \"array_of_long\": [5, 6]}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+
+ # Tests for derived_text
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ query:
+ match_phrase:
+ derived_text:
+ query: "peter piper"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_keyword
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ query:
+ term:
+ derived_keyword:
+ value: "foo"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_long:
+ type: long
+ script: "emit(params._source[\"long\"])"
+ query:
+ range:
+ derived_long:
+ gte: 1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_float
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_float:
+ type: float
+ script: "emit(params._source[\"float\"])"
+ query:
+ range:
+ derived_float:
+ gte: 1.0
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_double
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_double:
+ type: double
+ script: "emit(params._source[\"double\"])"
+ query:
+ range:
+ derived_double:
+ gte: 1.0
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_date
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_date:
+ type: date
+ script: "emit(ZonedDateTime.parse(params._source[\"date\"]).toInstant().toEpochMilli())"
+ query:
+ range:
+ derived_date:
+ gte: "2017-01-02"
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_geo
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_geo:
+ type: geo_point
+ script: "emit(params._source[\"geo\"][0], params._source[\"geo\"][1])"
+ query:
+ geo_distance:
+ distance: "20km"
+ derived_geo:
+ lat: 0.0
+ lon: 20.0
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_ip
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_ip:
+ type: ip
+ script: "emit(params._source[\"ip\"])"
+ query:
+ term:
+ derived_ip:
+ value: "192.168.0.1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_boolean
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_boolean:
+ type: boolean
+ script: "emit(params._source[\"boolean\"])"
+ query:
+ term:
+ derived_boolean:
+ value: true
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_array_of_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_array_of_long:
+ type: long
+ script: "emit(params._source[\"array_of_long\"][0]);emit(params._source[\"array_of_long\"][1]);"
+ query:
+ range:
+ derived_array_of_long:
+ gte: 3
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.keyword
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ term:
+ derived_object.keyword:
+ value: "json_keyword1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_object.long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.long:
+ gte: 11
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.float
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.float:
+ gte: 10.1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.double
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.double:
+ gte: 10.1
+
+ - match: { hits.total: 4 }
+
+ # Tests for derived_object.date
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.date:
+ gte: "2021-03-01"
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_object.ip
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ term:
+ derived_object.ip:
+ value: "10.0.0.1"
+
+ - match: { hits.total: 1 }
+
+ # Tests for derived_object.boolean
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ term:
+ derived_object.boolean:
+ value: true
+
+ - match: { hits.total: 3 }
+
+ # Tests for derived_object.array_of_long
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ query:
+ range:
+ derived_object.array_of_long:
+ gte: 3
+
+ - match: { hits.total: 4 }
+
+ # Tests for query string
+ - do:
+ search:
+ body:
+ derived:
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ rest_total_hits_as_int: true
+ index: test
+ q: "derived_keyword:foo"
+
+ - match: { hits.total: 1 }
+
+ - do:
+ search:
+ body:
+ derived:
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ rest_total_hits_as_int: true
+ index: test
+ q: derived_object.keyword:json_keyword1
+
+ - match: { hits.total: 1 }
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/40_derived_field_fetch_and_highlight.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/40_derived_field_fetch_and_highlight.yml
new file mode 100644
index 0000000000000..52a897c341419
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/40_derived_field_fetch_and_highlight.yml
@@ -0,0 +1,279 @@
+setup:
+ - skip:
+ version: " - 2.14.99"
+ reason: "derived_field feature was added in 2.15"
+
+---
+"Test basic field retrieval":
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ properties:
+ text:
+ type: text
+ keyword:
+ type: keyword
+ long:
+ type: long
+ float:
+ type: float
+ double:
+ type: double
+ date:
+ type: date
+ geo:
+ type: geo_point
+ ip:
+ type: ip
+ boolean:
+ type: boolean
+ array_of_long:
+ type: long
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_text_prefilter_field:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ prefilter_field: "text"
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ derived_long:
+ type: long
+ script: "emit(params._source[\"long\"])"
+ derived_float:
+ type: float
+ script: "emit(params._source[\"float\"])"
+ derived_double:
+ type: double
+ script: "emit(params._source[\"double\"])"
+ derived_date:
+ type: date
+ script: "emit(ZonedDateTime.parse(params._source[\"date\"]).toInstant().toEpochMilli())"
+ derived_geo:
+ type: geo_point
+ script: "emit(params._source[\"geo\"][0], params._source[\"geo\"][1])"
+ derived_ip:
+ type: ip
+ script: "emit(params._source[\"ip\"])"
+ derived_boolean:
+ type: boolean
+ script: "emit(params._source[\"boolean\"])"
+ derived_array_of_long:
+ type: long
+ script: "emit(params._source[\"array_of_long\"][0]);emit(params._source[\"array_of_long\"][1]);"
+ derived_object:
+ type: object
+ properties:
+ keyword: keyword
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+ format: "yyyy-MM-dd"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "peter piper",
+ keyword: "foo",
+ long: 1,
+ float: 1.0,
+ double: 1.0,
+ date: "2017-01-01T00:00:00Z",
+ geo: [0.0, 20.0],
+ ip: "192.168.0.1",
+ boolean: true,
+ array_of_long: [1, 2],
+ json_field: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_long\": [1, 2]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {
+ text: "piper picked a peck",
+ keyword: "bar",
+ long: 2,
+ float: 2.0,
+ double: 2.0,
+ date: "2017-01-02T00:00:00Z",
+ geo: [10.0, 30.0],
+ ip: "192.168.0.2",
+ boolean: false,
+ array_of_long: [2, 3],
+ json_field: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_long\": [2, 3]}}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+
+ - do:
+ search:
+ index: test
+ body:
+ fields: [derived_text, derived_keyword, derived_long, derived_float, derived_double, derived_date, derived_geo, derived_ip, derived_boolean, derived_array_of_long,
+ derived_object, derived_object.keyword, derived_object.long, derived_object.float, derived_object.double, derived_object.date, derived_object.ip, derived_object.boolean, derived_object.array_of_long]
+
+ - is_true: hits.hits.0._id
+ - is_true: hits.hits.0._source
+
+ - match: { hits.hits.0.fields.derived_text.0: "peter piper" }
+ - match: { hits.hits.0.fields.derived_keyword.0: foo }
+ - match: { hits.hits.0.fields.derived_long.0: 1 }
+ - match: { hits.hits.0.fields.derived_float.0: 1.0 }
+ - match: { hits.hits.0.fields.derived_double.0: 1 }
+ - match: { hits.hits.0.fields.derived_date.0: 2017-01-01T00:00:00.000Z }
+ - match: { hits.hits.0.fields.derived_geo.0.lat: 0.0 }
+ - match: { hits.hits.0.fields.derived_geo.0.lon: 20.0 }
+ - match: { hits.hits.0.fields.derived_ip.0: 192.168.0.1 }
+ - match: { hits.hits.0.fields.derived_array_of_long.0: 1 }
+ - match: { hits.hits.0.fields.derived_array_of_long.1: 2 }
+ - match: { hits.hits.0.fields.derived_object.0: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_long\": [1, 2]}}" }
+ - match: { hits.hits.0.fields.derived_object\.keyword.0: json_keyword1 }
+ - match: { hits.hits.0.fields.derived_object\.long.0: 10 }
+ - match: { hits.hits.0.fields.derived_object\.float.0: 10.0 }
+ - match: { hits.hits.0.fields.derived_object\.double.0: 10.0 }
+ - match: { hits.hits.0.fields.derived_object\.date.0: 2021-01-01 }
+ - match: { hits.hits.0.fields.derived_object\.ip.0: 10.0.0.1 }
+ - match: { hits.hits.0.fields.derived_object\.boolean.0: true }
+ - match: { hits.hits.0.fields.derived_object\.array_of_long.0: 1 }
+ - match: { hits.hits.0.fields.derived_object\.array_of_long.1: 2 }
+
+ - match: { hits.hits.1.fields.derived_text.0: "piper picked a peck" }
+ - match: { hits.hits.1.fields.derived_keyword.0: bar }
+ - match: { hits.hits.1.fields.derived_long.0: 2 }
+ - match: { hits.hits.1.fields.derived_float.0: 2.0 }
+ - match: { hits.hits.1.fields.derived_double.0: 2 }
+ - match: { hits.hits.1.fields.derived_date.0: 2017-01-02T00:00:00.000Z }
+ - match: { hits.hits.1.fields.derived_geo.0.lat: 10.0 }
+ - match: { hits.hits.1.fields.derived_geo.0.lon: 30.0 }
+ - match: { hits.hits.1.fields.derived_ip.0: 192.168.0.2 }
+ - match: { hits.hits.1.fields.derived_array_of_long.0: 2 }
+ - match: { hits.hits.1.fields.derived_array_of_long.1: 3 }
+ - match: { hits.hits.1.fields.derived_object.0: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_long\": [2, 3]}}" }
+ - match: { hits.hits.1.fields.derived_object\.keyword.0: json_keyword2 }
+ - match: { hits.hits.1.fields.derived_object\.long.0: 20 }
+ - match: { hits.hits.1.fields.derived_object\.float.0: 20.0 }
+ - match: { hits.hits.1.fields.derived_object\.double.0: 20.0 }
+ - match: { hits.hits.1.fields.derived_object\.date.0: 2021-02-01 }
+ - match: { hits.hits.1.fields.derived_object\.ip.0: 10.0.0.2 }
+ - match: { hits.hits.1.fields.derived_object\.boolean.0: false }
+ - match: { hits.hits.1.fields.derived_object\.array_of_long.0: 2 }
+ - match: { hits.hits.1.fields.derived_object\.array_of_long.1: 3 }
+
+
+---
+"Test highlight":
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ properties:
+ text:
+ type: text
+ array_of_text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_keyword:
+ type: keyword
+ script: "emit(params._source[\"keyword\"])"
+ derived_array_of_text:
+ type: text
+ script: "emit(params._source[\"array_of_text\"][0]);emit(params._source[\"array_of_text\"][1]);"
+ derived_object:
+ type: object
+ properties:
+ array_of_text: text
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "peter piper",
+ keyword: "foo",
+ long: 1,
+ float: 1.0,
+ double: 1.0,
+ date: "2017-01-01T00:00:00Z",
+ geo: [0.0, 20.0],
+ ip: "192.168.0.1",
+ boolean: true,
+ array_of_text: ["The quick brown fox is brown", "The quick brown fox is black"],
+ json_field: "{\"keyword\":\"json_keyword1\",\"long\":10,\"float\":10.0,\"double\":10.0,\"date\":\"2021-01-01T00:00:00Z\",\"ip\":\"10.0.0.1\",\"boolean\":true, \"array_of_text\": [\"The quick brown fox is brown\", \"The quick brown fox is black\"]}}"
+ }
+
+ - do:
+ index:
+ index: test
+ id: 2
+ body: {
+ text: "piper picked a peck",
+ keyword: "bar",
+ long: 2,
+ float: 2.0,
+ double: 2.0,
+ date: "2017-01-02T00:00:00Z",
+ geo: [10.0, 30.0],
+ ip: "192.168.0.2",
+ boolean: false,
+ array_of_text: ["The quick brown fox is brown", "The quick brown fox is black"],
+ json_field: "{\"keyword\":\"json_keyword2\",\"long\":20,\"float\":20.0,\"double\":20.0,\"date\":\"2021-02-01T00:00:00Z\",\"ip\":\"10.0.0.2\",\"boolean\":false, \"array_of_text\": [\"The quick brown fox is brown\", \"The quick brown fox is black\"]}}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ body: { "query" : {"multi_match" : { "query" : "piper", "fields" : [ "derived_text"] } },
+ "fields": [derived_text],
+ "highlight" : { "type" : "unified", "fields" : { "derived_text" : {} } }
+ }
+
+ - match: {hits.hits.0.highlight.derived_text.0: "peter piper"}
+
+
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ body: { "query" : {"multi_match" : { "query" : "quick brown", "fields" : [ "derived_array_of_text"] } },
+ "fields": [derived_array_of_text],
+ "highlight" : { "type" : "unified", "fields" : { "derived_array_of_text" : {} } }
+ }
+
+ - match: {hits.hits.0.highlight.derived_array_of_text.0: "The quick brown fox is brown"}
+
+ - do:
+ search:
+ rest_total_hits_as_int: true
+ index: test
+ body:
+ query:
+ match_phrase:
+ derived_object.array_of_text:
+ query: "quick brown"
+ highlight:
+ type: unified
+ fields:
+ derived_object.array_of_text: {}
+
+ - match: {hits.hits.0.highlight.derived_object\.array_of_text.0: "The quick brown fox is brown"}
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/50_derived_field_default_analyzer.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/50_derived_field_default_analyzer.yml
new file mode 100644
index 0000000000000..e10c9cb3c133f
--- /dev/null
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/derived_fields/50_derived_field_default_analyzer.yml
@@ -0,0 +1,105 @@
+---
+"Test default index analyzer simple is applied on derived fields":
+ - do:
+ indices.create:
+ index: test
+ body:
+ settings:
+ index.analysis.analyzer.default.type: simple
+ mappings:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_object:
+ type: object
+ properties:
+ array_of_text: text
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "Email: example@example.com, Visit https://example.com for more info.",
+ json_field: "{\"array_of_text\": [\"Email: example@example.com, Visit https://example.com for more info.\", \"Email: example@example.com, Visit https://example.com for more info.\"]}}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+ - do:
+ search:
+ index: test
+ q: "derived_text:example.com"
+ analyzer: standard
+
+ - match: { hits.total.value: 0 }
+
+ - do:
+ search:
+ index: test
+ q: "derived_text:example.com"
+ analyzer: simple
+
+ - match: { hits.total.value: 1 }
+
+---
+"Test default index analyzer standard is applied on derived fields":
+ - do:
+ indices.create:
+ index: test
+ body:
+ settings:
+ index.analysis.analyzer.default.type: standard
+ mappings:
+ properties:
+ text:
+ type: text
+ json_field:
+ type: text
+ derived:
+ derived_text:
+ type: text
+ script: "emit(params._source[\"text\"])"
+ derived_object:
+ type: object
+ properties:
+ array_of_text: text
+ script: "emit(params._source[\"json_field\"])"
+ prefilter_field: "json_field"
+
+ - do:
+ index:
+ index: test
+ id: 1
+ body: {
+ text: "Email: example@example.com, Visit https://example.com for more info.",
+ json_field: "{\"array_of_text\": [\"Email: example@example.com, Visit https://example.com for more info.\", \"Email: example@example.com, Visit https://example.com for more info.\"]}}"
+ }
+
+ - do:
+ indices.refresh:
+ index: [test]
+ - do:
+ search:
+ index: test
+ q: "derived_object.array_of_text:example.com"
+ analyzer: standard
+
+ - match: { hits.total.value: 1 }
+
+ - do:
+ search:
+ index: test
+ q: "derived_object.array_of_text:example.com"
+ analyzer: simple
+
+ - match: { hits.total.value: 1 }
diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java
index 400d867296e5f..3115dce6c10a5 100644
--- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java
@@ -35,6 +35,7 @@
import com.fasterxml.jackson.core.JsonParseException;
import org.apache.lucene.document.Field;
+import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
@@ -165,7 +166,7 @@ public ScaledFloatFieldMapper build(BuilderContext context) {
public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, c.getSettings()));
- public static final class ScaledFloatFieldType extends SimpleMappedFieldType {
+ public static final class ScaledFloatFieldType extends SimpleMappedFieldType implements NumericPointEncoder {
private final double scalingFactor;
private final Double nullValue;
@@ -188,6 +189,21 @@ public ScaledFloatFieldType(String name, double scalingFactor) {
this(name, true, false, true, Collections.emptyMap(), scalingFactor, null);
}
+ @Override
+ public byte[] encodePoint(Number value) {
+ assert value instanceof Double;
+ double doubleValue = (Double) value;
+ byte[] point = new byte[Long.BYTES];
+ if (doubleValue == Double.POSITIVE_INFINITY) {
+ LongPoint.encodeDimension(Long.MAX_VALUE, point, 0);
+ } else if (doubleValue == Double.NEGATIVE_INFINITY) {
+ LongPoint.encodeDimension(Long.MIN_VALUE, point, 0);
+ } else {
+ LongPoint.encodeDimension(Math.round(scale(value)), point, 0);
+ }
+ return point;
+ }
+
public double getScalingFactor() {
return scalingFactor;
}
diff --git a/modules/opensearch-dashboards/src/main/java/org/opensearch/dashboards/OpenSearchDashboardsModulePlugin.java b/modules/opensearch-dashboards/src/main/java/org/opensearch/dashboards/OpenSearchDashboardsModulePlugin.java
index 09fd52ff65c66..6d5020336eb0b 100644
--- a/modules/opensearch-dashboards/src/main/java/org/opensearch/dashboards/OpenSearchDashboardsModulePlugin.java
+++ b/modules/opensearch-dashboards/src/main/java/org/opensearch/dashboards/OpenSearchDashboardsModulePlugin.java
@@ -54,6 +54,7 @@
import org.opensearch.rest.action.admin.indices.RestRefreshAction;
import org.opensearch.rest.action.admin.indices.RestUpdateSettingsAction;
import org.opensearch.rest.action.document.RestBulkAction;
+import org.opensearch.rest.action.document.RestBulkStreamingAction;
import org.opensearch.rest.action.document.RestDeleteAction;
import org.opensearch.rest.action.document.RestGetAction;
import org.opensearch.rest.action.document.RestIndexAction;
@@ -127,6 +128,7 @@ public List getRestHandlers(
new OpenSearchDashboardsWrappedRestHandler(new RestMultiGetAction(settings)),
new OpenSearchDashboardsWrappedRestHandler(new RestSearchAction()),
new OpenSearchDashboardsWrappedRestHandler(new RestBulkAction(settings)),
+ new OpenSearchDashboardsWrappedRestHandler(new RestBulkStreamingAction(settings)),
new OpenSearchDashboardsWrappedRestHandler(new RestDeleteAction()),
new OpenSearchDashboardsWrappedRestHandler(new RestDeleteByQueryAction()),
diff --git a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
index 6536d474f5abc..27cef3f7d7251 100644
--- a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
+++ b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
@@ -47,6 +47,7 @@
import org.opensearch.core.xcontent.ToXContent;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.repositories.fs.FsRepository;
+import org.opensearch.test.junit.annotations.TestIssueLogging;
import org.opensearch.test.rest.yaml.ClientYamlTestCandidate;
import org.opensearch.test.rest.yaml.OpenSearchClientYamlSuiteTestCase;
import org.junit.Before;
@@ -63,6 +64,7 @@
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.notNullValue;
+@TestIssueLogging(value = "_root:TRACE", issueUrl = "https://github.com/opensearch-project/OpenSearch/issues/9117")
public class RepositoryURLClientYamlTestSuiteIT extends OpenSearchClientYamlSuiteTestCase {
public RepositoryURLClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java
index 5378a6721efb2..1574621a8200e 100644
--- a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java
+++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java
@@ -8,24 +8,61 @@
package org.opensearch.search.pipeline.common;
+import org.opensearch.common.settings.Setting;
+import org.opensearch.common.settings.Settings;
import org.opensearch.plugins.Plugin;
import org.opensearch.plugins.SearchPipelinePlugin;
import org.opensearch.search.pipeline.Processor;
+import org.opensearch.search.pipeline.SearchPhaseResultsProcessor;
import org.opensearch.search.pipeline.SearchRequestProcessor;
import org.opensearch.search.pipeline.SearchResponseProcessor;
+import java.util.List;
import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.stream.Collectors;
/**
* Plugin providing common search request/response processors for use in search pipelines.
*/
public class SearchPipelineCommonModulePlugin extends Plugin implements SearchPipelinePlugin {
+ static final Setting> REQUEST_PROCESSORS_ALLOWLIST_SETTING = Setting.listSetting(
+ "search.pipeline.common.request.processors.allowed",
+ List.of(),
+ Function.identity(),
+ Setting.Property.NodeScope
+ );
+
+ static final Setting> RESPONSE_PROCESSORS_ALLOWLIST_SETTING = Setting.listSetting(
+ "search.pipeline.common.response.processors.allowed",
+ List.of(),
+ Function.identity(),
+ Setting.Property.NodeScope
+ );
+
+ static final Setting> SEARCH_PHASE_RESULTS_PROCESSORS_ALLOWLIST_SETTING = Setting.listSetting(
+ "search.pipeline.common.search.phase.results.processors.allowed",
+ List.of(),
+ Function.identity(),
+ Setting.Property.NodeScope
+ );
+
/**
* No constructor needed, but build complains if we don't have a constructor with JavaDoc.
*/
public SearchPipelineCommonModulePlugin() {}
+ @Override
+ public List> getSettings() {
+ return List.of(
+ REQUEST_PROCESSORS_ALLOWLIST_SETTING,
+ RESPONSE_PROCESSORS_ALLOWLIST_SETTING,
+ SEARCH_PHASE_RESULTS_PROCESSORS_ALLOWLIST_SETTING
+ );
+ }
+
/**
* Returns a map of processor factories.
*
@@ -34,25 +71,62 @@ public SearchPipelineCommonModulePlugin() {}
*/
@Override
public Map> getRequestProcessors(Parameters parameters) {
- return Map.of(
- FilterQueryRequestProcessor.TYPE,
- new FilterQueryRequestProcessor.Factory(parameters.namedXContentRegistry),
- ScriptRequestProcessor.TYPE,
- new ScriptRequestProcessor.Factory(parameters.scriptService),
- OversampleRequestProcessor.TYPE,
- new OversampleRequestProcessor.Factory()
+ return filterForAllowlistSetting(
+ REQUEST_PROCESSORS_ALLOWLIST_SETTING,
+ parameters.env.settings(),
+ Map.of(
+ FilterQueryRequestProcessor.TYPE,
+ new FilterQueryRequestProcessor.Factory(parameters.namedXContentRegistry),
+ ScriptRequestProcessor.TYPE,
+ new ScriptRequestProcessor.Factory(parameters.scriptService),
+ OversampleRequestProcessor.TYPE,
+ new OversampleRequestProcessor.Factory()
+ )
);
}
@Override
public Map> getResponseProcessors(Parameters parameters) {
- return Map.of(
- RenameFieldResponseProcessor.TYPE,
- new RenameFieldResponseProcessor.Factory(),
- TruncateHitsResponseProcessor.TYPE,
- new TruncateHitsResponseProcessor.Factory(),
- CollapseResponseProcessor.TYPE,
- new CollapseResponseProcessor.Factory()
+ return filterForAllowlistSetting(
+ RESPONSE_PROCESSORS_ALLOWLIST_SETTING,
+ parameters.env.settings(),
+ Map.of(
+ RenameFieldResponseProcessor.TYPE,
+ new RenameFieldResponseProcessor.Factory(),
+ TruncateHitsResponseProcessor.TYPE,
+ new TruncateHitsResponseProcessor.Factory(),
+ CollapseResponseProcessor.TYPE,
+ new CollapseResponseProcessor.Factory()
+ )
);
}
+
+ @Override
+ public Map> getSearchPhaseResultsProcessors(Parameters parameters) {
+ return filterForAllowlistSetting(SEARCH_PHASE_RESULTS_PROCESSORS_ALLOWLIST_SETTING, parameters.env.settings(), Map.of());
+ }
+
+ private Map> filterForAllowlistSetting(
+ Setting> allowlistSetting,
+ Settings settings,
+ Map> map
+ ) {
+ if (allowlistSetting.exists(settings) == false) {
+ return Map.copyOf(map);
+ }
+ final Set allowlist = Set.copyOf(allowlistSetting.get(settings));
+ // Assert that no unknown processors are defined in the allowlist
+ final Set unknownAllowlistProcessors = allowlist.stream()
+ .filter(p -> map.containsKey(p) == false)
+ .collect(Collectors.toUnmodifiableSet());
+ if (unknownAllowlistProcessors.isEmpty() == false) {
+ throw new IllegalArgumentException(
+ "Processor(s) " + unknownAllowlistProcessors + " were defined in [" + allowlistSetting.getKey() + "] but do not exist"
+ );
+ }
+ return map.entrySet()
+ .stream()
+ .filter(e -> allowlist.contains(e.getKey()))
+ .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue));
+ }
}
diff --git a/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePluginTests.java b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePluginTests.java
new file mode 100644
index 0000000000000..519468ebe17ff
--- /dev/null
+++ b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePluginTests.java
@@ -0,0 +1,106 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.search.pipeline.common;
+
+import org.opensearch.common.settings.Settings;
+import org.opensearch.env.TestEnvironment;
+import org.opensearch.plugins.SearchPipelinePlugin;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.BiFunction;
+
+public class SearchPipelineCommonModulePluginTests extends OpenSearchTestCase {
+
+ public void testRequestProcessorAllowlist() throws IOException {
+ final String key = SearchPipelineCommonModulePlugin.REQUEST_PROCESSORS_ALLOWLIST_SETTING.getKey();
+ runAllowlistTest(key, List.of(), SearchPipelineCommonModulePlugin::getRequestProcessors);
+ runAllowlistTest(key, List.of("filter_query"), SearchPipelineCommonModulePlugin::getRequestProcessors);
+ runAllowlistTest(key, List.of("script"), SearchPipelineCommonModulePlugin::getRequestProcessors);
+ runAllowlistTest(key, List.of("oversample", "script"), SearchPipelineCommonModulePlugin::getRequestProcessors);
+ runAllowlistTest(key, List.of("filter_query", "script", "oversample"), SearchPipelineCommonModulePlugin::getRequestProcessors);
+
+ final IllegalArgumentException e = expectThrows(
+ IllegalArgumentException.class,
+ () -> runAllowlistTest(key, List.of("foo"), SearchPipelineCommonModulePlugin::getRequestProcessors)
+ );
+ assertTrue(e.getMessage(), e.getMessage().contains("foo"));
+ }
+
+ public void testResponseProcessorAllowlist() throws IOException {
+ final String key = SearchPipelineCommonModulePlugin.RESPONSE_PROCESSORS_ALLOWLIST_SETTING.getKey();
+ runAllowlistTest(key, List.of(), SearchPipelineCommonModulePlugin::getResponseProcessors);
+ runAllowlistTest(key, List.of("rename_field"), SearchPipelineCommonModulePlugin::getResponseProcessors);
+ runAllowlistTest(key, List.of("truncate_hits"), SearchPipelineCommonModulePlugin::getResponseProcessors);
+ runAllowlistTest(key, List.of("collapse", "truncate_hits"), SearchPipelineCommonModulePlugin::getResponseProcessors);
+ runAllowlistTest(
+ key,
+ List.of("rename_field", "truncate_hits", "collapse"),
+ SearchPipelineCommonModulePlugin::getResponseProcessors
+ );
+
+ final IllegalArgumentException e = expectThrows(
+ IllegalArgumentException.class,
+ () -> runAllowlistTest(key, List.of("foo"), SearchPipelineCommonModulePlugin::getResponseProcessors)
+ );
+ assertTrue(e.getMessage(), e.getMessage().contains("foo"));
+ }
+
+ public void testSearchPhaseResultsProcessorAllowlist() throws IOException {
+ final String key = SearchPipelineCommonModulePlugin.SEARCH_PHASE_RESULTS_PROCESSORS_ALLOWLIST_SETTING.getKey();
+ runAllowlistTest(key, List.of(), SearchPipelineCommonModulePlugin::getSearchPhaseResultsProcessors);
+
+ final IllegalArgumentException e = expectThrows(
+ IllegalArgumentException.class,
+ () -> runAllowlistTest(key, List.of("foo"), SearchPipelineCommonModulePlugin::getSearchPhaseResultsProcessors)
+ );
+ assertTrue(e.getMessage(), e.getMessage().contains("foo"));
+ }
+
+ private void runAllowlistTest(
+ String settingKey,
+ List allowlist,
+ BiFunction> function
+ ) throws IOException {
+ final Settings settings = Settings.builder().putList(settingKey, allowlist).build();
+ try (SearchPipelineCommonModulePlugin plugin = new SearchPipelineCommonModulePlugin()) {
+ assertEquals(Set.copyOf(allowlist), function.apply(plugin, createParameters(settings)).keySet());
+ }
+ }
+
+ public void testAllowlistNotSpecified() throws IOException {
+ final Settings settings = Settings.EMPTY;
+ try (SearchPipelineCommonModulePlugin plugin = new SearchPipelineCommonModulePlugin()) {
+ assertEquals(Set.of("oversample", "filter_query", "script"), plugin.getRequestProcessors(createParameters(settings)).keySet());
+ assertEquals(
+ Set.of("rename_field", "truncate_hits", "collapse"),
+ plugin.getResponseProcessors(createParameters(settings)).keySet()
+ );
+ assertEquals(Set.of(), plugin.getSearchPhaseResultsProcessors(createParameters(settings)).keySet());
+ }
+ }
+
+ private static SearchPipelinePlugin.Parameters createParameters(Settings settings) {
+ return new SearchPipelinePlugin.Parameters(
+ TestEnvironment.newEnvironment(Settings.builder().put(settings).put("path.home", "").build()),
+ null,
+ null,
+ null,
+ () -> 0L,
+ (a, b) -> null,
+ null,
+ null,
+ $ -> {},
+ null
+ );
+ }
+}
diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle
index 83c4db80b7798..a8a165df637a2 100644
--- a/modules/transport-netty4/build.gradle
+++ b/modules/transport-netty4/build.gradle
@@ -235,11 +235,14 @@ thirdPartyAudit {
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields',
'io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields',
'io.netty.util.internal.shaded.org.jctools.queues.LinkedQueueNode',
+ 'io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueConsumerIndexField',
+ 'io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueProducerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField',
'io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeAccess',
'io.netty.util.internal.shaded.org.jctools.util.UnsafeRefArrayAccess',
+ 'io.netty.util.internal.shaded.org.jctools.util.UnsafeLongArrayAccess',
'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator',
'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$1',
'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator$2',
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.109.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.109.Final.jar.sha1
deleted file mode 100644
index 76b51cdae3867..0000000000000
--- a/modules/transport-netty4/licenses/netty-buffer-4.1.109.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9d21d602ad7c639fa16b1d26559065d310a34c51
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.111.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.111.Final.jar.sha1
new file mode 100644
index 0000000000000..6784ac6c3b64f
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-buffer-4.1.111.Final.jar.sha1
@@ -0,0 +1 @@
+b54863f578939e135d3b3aea610284ae57c188cf
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.109.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.109.Final.jar.sha1
deleted file mode 100644
index 1bccee872152d..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-4.1.109.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-16e0b2beb49318a549d3ba5d66d707bd5daa8c97
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.111.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.111.Final.jar.sha1
new file mode 100644
index 0000000000000..3d86194de9213
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-4.1.111.Final.jar.sha1
@@ -0,0 +1 @@
+a6762ec00a6d268f9980741f5b755838bcd658bf
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.109.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.109.Final.jar.sha1
deleted file mode 100644
index 3423fb94e8497..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-http-4.1.109.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6dca43cedc0b2dc6bf57bdc85fce6ffca3e6b72a
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.111.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.111.Final.jar.sha1
new file mode 100644
index 0000000000000..4ef1adb818300
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.111.Final.jar.sha1
@@ -0,0 +1 @@
+c6ecbc452321e632bf3cea0f9758839b650455c7
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.109.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.109.Final.jar.sha1
deleted file mode 100644
index b83ad36222d07..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.109.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6bd4a54b69a81356393f6e4621bad40754f8a5a2
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.111.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.111.Final.jar.sha1
new file mode 100644
index 0000000000000..06c86b8fda557
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.111.Final.jar.sha1
@@ -0,0 +1 @@
+f0cca5df75bfb4f858d0435f601d8b1cae1de054
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.109.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.109.Final.jar.sha1
deleted file mode 100644
index 5172500557f8b..0000000000000
--- a/modules/transport-netty4/licenses/netty-common-4.1.109.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-da63e54ee1ca69abf4206cb74fadef7f50850911
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.111.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.111.Final.jar.sha1
new file mode 100644
index 0000000000000..16cb1cce7f504
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-common-4.1.111.Final.jar.sha1
@@ -0,0 +1 @@
+58210befcb31adbcadd5724966a061444db91863
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.109.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.109.Final.jar.sha1
deleted file mode 100644
index cabe61b300523..0000000000000
--- a/modules/transport-netty4/licenses/netty-handler-4.1.109.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9167863307b3c44cc12262e7b5512de3499b9c4a
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.111.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.111.Final.jar.sha1
new file mode 100644
index 0000000000000..2f70f791f65ed
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-handler-4.1.111.Final.jar.sha1
@@ -0,0 +1 @@
+2bc6a58ad2e9e279634b6e55022e8dcd3c175cc4
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.109.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.109.Final.jar.sha1
deleted file mode 100644
index 14e21cc0cdb60..0000000000000
--- a/modules/transport-netty4/licenses/netty-resolver-4.1.109.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-055485ac976e27c8bb67ee111a8490c58f67b70c
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.111.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.111.Final.jar.sha1
new file mode 100644
index 0000000000000..621cbf58f3133
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-resolver-4.1.111.Final.jar.sha1
@@ -0,0 +1 @@
+3493179999f211dc49714319f81da2be86523a3b
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.109.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.109.Final.jar.sha1
deleted file mode 100644
index 6b23d0883e31f..0000000000000
--- a/modules/transport-netty4/licenses/netty-transport-4.1.109.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-79e3b07d58ef03c7a860d48f932b720675aa8bd3
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.111.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.111.Final.jar.sha1
new file mode 100644
index 0000000000000..ac96e7545ed58
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-4.1.111.Final.jar.sha1
@@ -0,0 +1 @@
+24e97cf14ea9d80afe4c5ab69066b587fccc154a
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.109.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.109.Final.jar.sha1
deleted file mode 100644
index 5afeb9627c9b5..0000000000000
--- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.109.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-da7fe1e6943cbab8ee48df2beadc2c8304f347a2
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.111.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.111.Final.jar.sha1
new file mode 100644
index 0000000000000..0847ac3034db7
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.111.Final.jar.sha1
@@ -0,0 +1 @@
+acafc128cddafa021bc0b48b0788eb0e118add5e
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0-snapshot-fb97840.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0-snapshot-fb97840.jar.sha1
deleted file mode 100644
index 594733c11402c..0000000000000
--- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0-snapshot-fb97840.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f9cd7bec33c8cf3b891976cb674ffe9c97f8747f
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0-snapshot-847316d.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0-snapshot-847316d.jar.sha1
new file mode 100644
index 0000000000000..80e254ed3d098
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0-snapshot-847316d.jar.sha1
@@ -0,0 +1 @@
+04436942995a4952ce5654126dfb767d6335674e
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0-snapshot-fb97840.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0-snapshot-fb97840.jar.sha1
deleted file mode 100644
index c46e06b8c87e4..0000000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0-snapshot-fb97840.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c244a56bf7cd171a19379c96f1d20c477a34578d
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0-snapshot-847316d.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0-snapshot-847316d.jar.sha1
new file mode 100644
index 0000000000000..3baed2a6e660b
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0-snapshot-847316d.jar.sha1
@@ -0,0 +1 @@
+85918e24fc3bf63fcd953807ab2eb3fa55c987c2
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0-snapshot-fb97840.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0-snapshot-fb97840.jar.sha1
deleted file mode 100644
index a79c34a127920..0000000000000
--- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0-snapshot-fb97840.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-da26df43f2b0d7c2dfecbf208cae0772a5e382ca
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0-snapshot-847316d.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0-snapshot-847316d.jar.sha1
new file mode 100644
index 0000000000000..4e9327112d412
--- /dev/null
+++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0-snapshot-847316d.jar.sha1
@@ -0,0 +1 @@
+15e425e9cc0ab9d65fac3c919199a24dfa3631eb
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0-snapshot-fb97840.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0-snapshot-fb97840.jar.sha1
deleted file mode 100644
index f2b08020be1ad..0000000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0-snapshot-fb97840.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f752ffa5ee4697b04643214236138f3defdee2f4
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0-snapshot-847316d.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0-snapshot-847316d.jar.sha1
new file mode 100644
index 0000000000000..7e7e9fe5b22b4
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0-snapshot-847316d.jar.sha1
@@ -0,0 +1 @@
+3d16c18348e7d4a00cb83100c43f3e21239d224e
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0-snapshot-fb97840.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0-snapshot-fb97840.jar.sha1
deleted file mode 100644
index 969a05905eaf0..0000000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0-snapshot-fb97840.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-73fe44fe755aef72e7293b2ffdd934beb631429d
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0-snapshot-847316d.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0-snapshot-847316d.jar.sha1
new file mode 100644
index 0000000000000..98e0ecc9cbb89
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0-snapshot-847316d.jar.sha1
@@ -0,0 +1 @@
+2ef6d9dffc6816d3cd04a54fe1ee43e13f850a37
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0-snapshot-fb97840.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0-snapshot-fb97840.jar.sha1
deleted file mode 100644
index fdf0bd39e217e..0000000000000
--- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0-snapshot-fb97840.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c2b48831b25e1c7e8f683a63d1505c2d133256d3
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0-snapshot-847316d.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0-snapshot-847316d.jar.sha1
new file mode 100644
index 0000000000000..ef675f2b9702e
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0-snapshot-847316d.jar.sha1
@@ -0,0 +1 @@
+e72b2262f5393d9ff255fb901297d4e6790e9102
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0-snapshot-fb97840.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0-snapshot-fb97840.jar.sha1
deleted file mode 100644
index 0042415700453..0000000000000
--- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0-snapshot-fb97840.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-757f8b29f103f82b6fb6948634e93dd497c9d7a8
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0-snapshot-847316d.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0-snapshot-847316d.jar.sha1
new file mode 100644
index 0000000000000..d8bbac27fd360
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0-snapshot-847316d.jar.sha1
@@ -0,0 +1 @@
+416ac44b2e76592c9e85338798cae93c3cf5475e
\ No newline at end of file
diff --git a/plugins/cache-ehcache/build.gradle b/plugins/cache-ehcache/build.gradle
index 4fc5e44f58c3a..5747624e2fb69 100644
--- a/plugins/cache-ehcache/build.gradle
+++ b/plugins/cache-ehcache/build.gradle
@@ -24,6 +24,7 @@ versions << [
dependencies {
api "org.ehcache:ehcache:${versions.ehcache}"
+ api "org.slf4j:slf4j-api:${versions.slf4j}"
}
thirdPartyAudit {
@@ -78,10 +79,9 @@ thirdPartyAudit {
'org.osgi.framework.BundleActivator',
'org.osgi.framework.BundleContext',
'org.osgi.framework.ServiceReference',
- 'org.slf4j.Logger',
- 'org.slf4j.LoggerFactory',
- 'org.slf4j.Marker',
- 'org.slf4j.event.Level'
+ 'org.slf4j.impl.StaticLoggerBinder',
+ 'org.slf4j.impl.StaticMDCBinder',
+ 'org.slf4j.impl.StaticMarkerBinder'
)
}
@@ -90,13 +90,3 @@ tasks.named("bundlePlugin").configure {
into 'config'
}
}
-
-test {
- // TODO: Adding permission in plugin-security.policy doesn't seem to work.
- systemProperty 'tests.security.manager', 'false'
-}
-
-internalClusterTest {
- // TODO: Remove this later once we have a way.
- systemProperty 'tests.security.manager', 'false'
-}
diff --git a/plugins/cache-ehcache/licenses/slf4j-api-1.7.36.jar.sha1 b/plugins/cache-ehcache/licenses/slf4j-api-1.7.36.jar.sha1
new file mode 100644
index 0000000000000..77b9917528382
--- /dev/null
+++ b/plugins/cache-ehcache/licenses/slf4j-api-1.7.36.jar.sha1
@@ -0,0 +1 @@
+6c62681a2f655b49963a5983b8b0950a6120ae14
\ No newline at end of file
diff --git a/plugins/cache-ehcache/licenses/slf4j-api-LICENSE.txt b/plugins/cache-ehcache/licenses/slf4j-api-LICENSE.txt
new file mode 100644
index 0000000000000..54512cc08d16b
--- /dev/null
+++ b/plugins/cache-ehcache/licenses/slf4j-api-LICENSE.txt
@@ -0,0 +1,21 @@
+Copyright (c) 2004-2022 QOS.ch
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-NOTICE.txt b/plugins/cache-ehcache/licenses/slf4j-api-NOTICE.txt
similarity index 100%
rename from plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-NOTICE.txt
rename to plugins/cache-ehcache/licenses/slf4j-api-NOTICE.txt
diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java
index 9a4dce1067b61..b4c62fbf85cb8 100644
--- a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java
+++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/store/disk/EhcacheDiskCache.java
@@ -42,6 +42,8 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
import java.time.Duration;
import java.util.Arrays;
import java.util.Iterator;
@@ -175,57 +177,60 @@ private EhcacheDiskCache(Builder builder) {
@SuppressWarnings({ "rawtypes" })
private Cache buildCache(Duration expireAfterAccess, Builder