diff --git a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
index 5e8ced116a1ff..7782e805da27c 100644
--- a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
+++ b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
@@ -889,7 +889,11 @@ public void testWithMultiplePercolatorFields() throws Exception {
assertThat(e.getCause().getMessage(), equalTo("a document can only contain one percolator query"));
}
- public void testPercolateQueryWithNestedDocuments() throws Exception {
+ /**
+ * Mapping for percolator tests that use nested "employee" documents.
+ * Includes query (percolator), id (keyword), companyname (text), and employee (nested with name).
+ */
+ private XContentBuilder nestedPercolatorMapping() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder();
mapping.startObject()
.startObject("properties")
@@ -912,7 +916,11 @@ public void testPercolateQueryWithNestedDocuments() throws Exception {
.endObject()
.endObject()
.endObject();
- assertAcked(indicesAdmin().prepareCreate("test").setMapping(mapping));
+ return mapping;
+ }
+
+ public void testPercolateQueryWithNestedDocuments() throws Exception {
+ assertAcked(indicesAdmin().prepareCreate("test").setMapping(nestedPercolatorMapping()));
prepareIndex("test").setId("q1")
.setSource(
jsonBuilder().startObject()
@@ -1368,4 +1376,119 @@ public void testKnnQueryNotSupportedInPercolator() throws IOException {
assertThat(exception.getMessage(), containsString("the [knn] query is unsupported inside a percolator"));
}
+ public void testPercolatorBooleanQueriesWithConcurrency() throws Exception {
+ assertAcked(
+ indicesAdmin().prepareCreate("test")
+ .setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1))
+ .setMapping("field1", "type=long", "query", "type=percolator")
+ );
+
+ prepareIndex("test").setId("1")
+ .setSource(
+ jsonBuilder().startObject()
+ .field("query", boolQuery().must(rangeQuery("field1").from(10).to(12)).must(rangeQuery("field1").from(12).to(14)))
+ .endObject()
+ )
+ .get();
+ prepareIndex("test").setId("2")
+ .setSource(
+ jsonBuilder().startObject()
+ .field("query", boolQuery().must(rangeQuery("field1").from(3).to(4)).must(rangeQuery("field1").from(4).to(6)))
+ .endObject()
+ )
+ .get();
+ prepareIndex("test").setId("3")
+ .setSource(
+ jsonBuilder().startObject()
+ .field("query", boolQuery().must(rangeQuery("field1").from(10).to(12)).must(rangeQuery("field1").from(12).to(14)))
+ .endObject()
+ )
+ .get();
+ prepareIndex("test").setId("4")
+ .setSource(
+ jsonBuilder().startObject()
+ .field("query", boolQuery().must(rangeQuery("field1").from(3).to(4)).must(rangeQuery("field1").from(4).to(6)))
+ .endObject()
+ )
+ .get();
+ prepareIndex("test").setId("5")
+ .setSource(
+ jsonBuilder().startObject()
+ .field("query", boolQuery().must(rangeQuery("field1").from(10).to(12)).must(rangeQuery("field1").from(12).to(14)))
+ .endObject()
+ )
+ .get();
+ prepareIndex("test").setId("6")
+ .setSource(
+ jsonBuilder().startObject()
+ .field("query", boolQuery().must(rangeQuery("field1").from(3).to(4)).must(rangeQuery("field1").from(4).to(6)))
+ .endObject()
+ )
+ .get();
+
+ indicesAdmin().prepareRefresh().get();
+
+ BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 12).endObject());
+ assertResponse(prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)), response -> {
+ assertHitCount(response, 3);
+ });
+ }
+
+ public void testPercolatorNestedQueriesWithConcurrency() throws Exception {
+ assertAcked(
+ indicesAdmin().prepareCreate("test")
+ .setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1))
+ .setMapping(nestedPercolatorMapping())
+ );
+
+ QueryBuilder nestedVirginia = QueryBuilders.nestedQuery(
+ "employee",
+ QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND),
+ ScoreMode.Avg
+ );
+ QueryBuilder nestedTony = QueryBuilders.nestedQuery(
+ "employee",
+ QueryBuilders.matchQuery("employee.name", "tony stark").operator(Operator.AND),
+ ScoreMode.Avg
+ );
+
+ prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("query", nestedVirginia).endObject()).get();
+ prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("query", nestedTony).endObject()).get();
+ prepareIndex("test").setId("3").setSource(jsonBuilder().startObject().field("query", nestedVirginia).endObject()).get();
+ prepareIndex("test").setId("4").setSource(jsonBuilder().startObject().field("query", nestedTony).endObject()).get();
+ prepareIndex("test").setId("5").setSource(jsonBuilder().startObject().field("query", nestedVirginia).endObject()).get();
+ prepareIndex("test").setId("6").setSource(jsonBuilder().startObject().field("query", nestedTony).endObject()).get();
+
+ indicesAdmin().prepareRefresh().get();
+
+ BytesReference source = BytesReference.bytes(
+ jsonBuilder().startObject()
+ .startArray("employee")
+ .startObject()
+ .field("name", "virginia potts")
+ .endObject()
+ .startObject()
+ .field("name", "tony stark")
+ .endObject()
+ .endArray()
+ .endObject()
+ );
+ assertResponse(prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)), response -> {
+ assertHitCount(response, 6);
+ });
+
+ BytesReference sourceVirginiaOnly = BytesReference.bytes(
+ jsonBuilder().startObject()
+ .startArray("employee")
+ .startObject()
+ .field("name", "virginia potts")
+ .endObject()
+ .endArray()
+ .endObject()
+ );
+ assertResponse(prepareSearch().setQuery(new PercolateQueryBuilder("query", sourceVirginiaOnly, XContentType.JSON)), response -> {
+ assertHitCount(response, 3);
+ });
+ }
+
}
diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java
index 5f5c6222f3771..ac3629e179f83 100644
--- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java
+++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java
@@ -495,9 +495,7 @@ protected Analyzer getWrappedAnalyzer(String fieldName) {
PercolatorFieldMapper.PercolatorFieldType pft = (PercolatorFieldMapper.PercolatorFieldType) fieldType;
String queryName = this.name != null ? this.name : pft.name();
- SearchExecutionContext percolateShardContext = wrap(context);
- percolateShardContext = PercolatorFieldMapper.configureContext(percolateShardContext, pft.mapUnmappedFieldsAsText);
- PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext);
+ PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, pft.mapUnmappedFieldsAsText, context);
return pft.percolateQuery(queryName, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated());
}
@@ -536,7 +534,11 @@ static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection
{
@@ -547,21 +549,24 @@ static PercolateQuery.QueryStore createStore(MappedFieldType queryBuilderFieldTy
}
return docId -> {
if (binaryDocValues.advanceExact(docId)) {
+ // create a shallow copy and set overrides
+ var percolateShardContext = newPercolateSearchContext(context, mapUnmappedFieldsAsText);
+
BytesRef qbSource = binaryDocValues.binaryValue();
QueryBuilder queryBuilder = readQueryBuilder(qbSource, registry, indexVersion, () -> {
// query builder is written in an incompatible format, fall-back to reading it from source
- if (context.isSourceEnabled() == false) {
+ if (percolateShardContext.isSourceEnabled() == false) {
throw new ElasticsearchException(
"Unable to read percolator query. Original transport version is incompatible and source is "
+ "unavailable on index [{}].",
- context.index().getName()
+ percolateShardContext.index().getName()
);
}
LOGGER.warn(
"Reading percolator query from source. For best performance, reindexing of index [{}] is required.",
- context.index().getName()
+ percolateShardContext.index().getName()
);
- SourceProvider sourceProvider = context.createSourceProvider(new SourceFilter(null, null));
+ SourceProvider sourceProvider = percolateShardContext.createSourceProvider(new SourceFilter(null, null));
Source source = sourceProvider.getSource(ctx, docId);
SourceToParse sourceToParse = new SourceToParse(
String.valueOf(docId),
@@ -569,11 +574,11 @@ static PercolateQuery.QueryStore createStore(MappedFieldType queryBuilderFieldTy
source.sourceContentType()
);
- return context.parseDocument(sourceToParse).rootDoc().getBinaryValue(queryBuilderFieldType.name());
+ return percolateShardContext.parseDocument(sourceToParse).rootDoc().getBinaryValue(queryBuilderFieldType.name());
});
- queryBuilder = Rewriteable.rewrite(queryBuilder, context);
- return queryBuilder.toQuery(context);
+ queryBuilder = Rewriteable.rewrite(queryBuilder, percolateShardContext);
+ return queryBuilder.toQuery(percolateShardContext);
} else {
return null;
}
@@ -623,8 +628,18 @@ private static QueryBuilder readQueryBuilder(
}
}
- static SearchExecutionContext wrap(SearchExecutionContext delegate) {
- return new SearchExecutionContext(delegate) {
+ /**
+ * Create a shallow copy of the {@code source} context with specific
+ * overrides for Percolator usage. The shallow copy makes the shared
+ * elements thread safe
+ * @param source The context to copy
+ * @param mapUnmappedFieldsAsText Controls unmapped fields behavior
+ * @return A copy of the source context with overrides
+ */
+ static SearchExecutionContext newPercolateSearchContext(SearchExecutionContext source, boolean mapUnmappedFieldsAsText) {
+ assert source.getClass().isAnonymousClass() == false
+ : "source must not be an anonymous class as overridden methods will be lost when a new SearchExecutionContext is created";
+ var wrapped = new SearchExecutionContext(source) {
@Override
public IndexReader getIndexReader() {
@@ -658,9 +673,9 @@ public > IFD getForField(
) {
IndexFieldData.Builder builder = fieldType.fielddataBuilder(
new FieldDataContext(
- delegate.getFullyQualifiedIndex().getName(),
- delegate.getIndexSettings(),
- delegate::lookup,
+ source.getFullyQualifiedIndex().getName(),
+ source.getIndexSettings(),
+ source::lookup,
this::sourcePath,
fielddataOperation
)
@@ -670,26 +685,54 @@ public > IFD getForField(
return (IFD) builder.build(cache, circuitBreaker);
}
+ // When expanding wildcard fields for term queries, we don't expand to fields that are empty.
+ // This is sane behavior for typical usage. But for percolator, the fields for the may not have any terms
+ // Consequently, we may erroneously skip expanding those term fields.
+ // This override allows mapped field values to expand via wildcard input, even if the field is empty in the shard.
+ @Override
+ public boolean fieldExistsInIndex(String fieldname) {
+ return true;
+ }
+
@Override
public void addNamedQuery(String name, Query query) {
- delegate.addNamedQuery(name, query);
+ source.addNamedQuery(name, query);
}
@Override
public void addCircuitBreakerMemory(long bytes, String label) {
- delegate.addCircuitBreakerMemory(bytes, label);
+ source.addCircuitBreakerMemory(bytes, label);
}
@Override
public long getQueryConstructionMemoryUsed() {
- return delegate.getQueryConstructionMemoryUsed();
+ return source.getQueryConstructionMemoryUsed();
}
@Override
public void releaseQueryConstructionMemory() {
- delegate.releaseQueryConstructionMemory();
+ source.releaseQueryConstructionMemory();
}
};
+
+ // This means that fields in the query need to exist in the mapping prior to registering this query
+ // The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired.
+ //
+ // Even worse when fields mentioned in percolator queries do go added to map after the queries have been registered
+ // then the percolator queries don't work as expected any more.
+ //
+ // Query parsing can't introduce new fields in mappings (which happens when registering a percolator query),
+ // because field type can't be inferred from queries (like document do) so the best option here is to disallow
+ // the usage of unmapped fields in percolator queries to avoid unexpected behaviour
+ //
+ // if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped fields which will be mapped
+ // as an analyzed string.
+ wrapped.setAllowUnmappedFields(false);
+ wrapped.setMapUnmappedFieldAsString(mapUnmappedFieldsAsText);
+ // We need to rewrite queries with name to Lucene NamedQuery to find matched sub-queries of percolator query
+ wrapped.setRewriteToNamedQueries();
+
+ return wrapped;
}
@Override
diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
index 6cd5b527be493..1f952adbd81b0 100644
--- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
+++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
@@ -56,7 +56,6 @@
import org.elasticsearch.index.mapper.RangeType;
import org.elasticsearch.index.mapper.SourceValueFetcher;
import org.elasticsearch.index.mapper.ValueFetcher;
-import org.elasticsearch.index.query.FilteredSearchExecutionContext;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.Rewriteable;
@@ -410,7 +409,7 @@ public void parse(DocumentParserContext context) throws IOException {
throw new IllegalArgumentException("a document can only contain one percolator query");
}
- executionContext = configureContext(executionContext, isMapUnmappedFieldAsText());
+ executionContext = PercolateQueryBuilder.newPercolateSearchContext(executionContext, isMapUnmappedFieldAsText());
QueryBuilder queryBuilder = parseQueryBuilder(context);
// Fetching of terms, shapes and indexed scripts happen during this rewrite:
@@ -512,27 +511,6 @@ void processQuery(Query query, DocumentParserContext context) {
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.fullPath(), result.minimumShouldMatch));
}
- static SearchExecutionContext configureContext(SearchExecutionContext context, boolean mapUnmappedFieldsAsString) {
- SearchExecutionContext wrapped = wrapAllEmptyTextFields(context);
- // This means that fields in the query need to exist in the mapping prior to registering this query
- // The reason that this is required, is that if a field doesn't exist then the query assumes defaults, which may be undesired.
- //
- // Even worse when fields mentioned in percolator queries do go added to map after the queries have been registered
- // then the percolator queries don't work as expected any more.
- //
- // Query parsing can't introduce new fields in mappings (which happens when registering a percolator query),
- // because field type can't be inferred from queries (like document do) so the best option here is to disallow
- // the usage of unmapped fields in percolator queries to avoid unexpected behaviour
- //
- // if index.percolator.map_unmapped_fields_as_string is set to true, query can contain unmapped fields which will be mapped
- // as an analyzed string.
- wrapped.setAllowUnmappedFields(false);
- wrapped.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString);
- // We need to rewrite queries with name to Lucene NamedQuery to find matched sub-queries of percolator query
- wrapped.setRewriteToNamedQueries();
- return wrapped;
- }
-
@Override
public Iterator iterator() {
return Arrays.asList(
@@ -577,17 +555,4 @@ static byte[] encodeRange(String rangeFieldName, byte[] minEncoded, byte[] maxEn
System.arraycopy(maxEncoded, 0, bytes, BinaryRange.BYTES + offset, maxEncoded.length);
return bytes;
}
-
- // When expanding wildcard fields for term queries, we don't expand to fields that are empty.
- // This is sane behavior for typical usage. But for percolator, the fields for the may not have any terms
- // Consequently, we may erroneously skip expanding those term fields.
- // This override allows mapped field values to expand via wildcard input, even if the field is empty in the shard.
- static SearchExecutionContext wrapAllEmptyTextFields(SearchExecutionContext searchExecutionContext) {
- return new FilteredSearchExecutionContext(searchExecutionContext) {
- @Override
- public boolean fieldExistsInIndex(String fieldname) {
- return true;
- }
- };
- }
}
diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java
index 24ec8052c31c9..8c6a248e03110 100644
--- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java
+++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java
@@ -28,11 +28,13 @@
import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
+import org.elasticsearch.index.IndexVersions;
import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.BytesBinaryIndexFieldData;
import org.elasticsearch.index.mapper.BinaryFieldMapper;
import org.elasticsearch.index.mapper.DocumentParserContext;
+import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperBuilderContext;
@@ -50,16 +52,15 @@
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.XContentParserConfiguration;
-import org.mockito.Mockito;
import java.io.IOException;
+import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.BiFunction;
+import java.util.stream.Collectors;
import static org.hamcrest.Matchers.greaterThan;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
public class QueryBuilderStoreTests extends ESTestCase {
@@ -83,7 +84,6 @@ public void testStoringQueryBuilders() throws IOException {
BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder(
MapperBuilderContext.root(false, false)
);
- MappedFieldType.FielddataOperation fielddataOperation = MappedFieldType.FielddataOperation.SEARCH;
try (IndexWriter indexWriter = new IndexWriter(directory, config)) {
for (int i = 0; i < queryBuilders.length; i++) {
@@ -100,18 +100,59 @@ public void testStoringQueryBuilders() throws IOException {
}
}
- SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
- when(searchExecutionContext.indexVersionCreated()).thenReturn(IndexVersion.current());
- when(searchExecutionContext.getWriteableRegistry()).thenReturn(writableRegistry());
- when(searchExecutionContext.getParserConfig()).thenReturn(parserConfig());
- when(searchExecutionContext.getForField(fieldMapper.fieldType(), fielddataOperation)).thenReturn(
- new BytesBinaryIndexFieldData(fieldMapper.fullPath(), CoreValuesSourceType.KEYWORD)
+ IndexVersion indexVersion = IndexVersion.current();
+ NamedWriteableRegistry writeableRegistry = writableRegistry();
+ XContentParserConfiguration parserConfig = parserConfig();
+ BytesBinaryIndexFieldData fieldData = new BytesBinaryIndexFieldData(fieldMapper.fullPath(), CoreValuesSourceType.KEYWORD);
+ BiFunction> indexFieldDataLookup = (mft, fdc) -> fieldData;
+ Settings indexSettingsSettings = indexSettings(indexVersion, 1, 1).build();
+ IndexSettings indexSettings = new IndexSettings(
+ IndexMetadata.builder("test").settings(indexSettingsSettings).build(),
+ Settings.EMPTY
+ );
+ List fieldNames = Arrays.stream(queryBuilders).map(TermQueryBuilder::fieldName).distinct().toList();
+ List keywordMappers = fieldNames.stream()
+ .map(
+ name -> new KeywordFieldMapper.Builder(name, IndexVersions.UPGRADE_TO_LUCENE_9_12_2).build(
+ MapperBuilderContext.root(false, false)
+ )
+ )
+ .collect(Collectors.toList());
+ MappingLookup mappingLookup = MappingLookup.fromMappers(
+ Mapping.EMPTY,
+ keywordMappers,
+ Collections.emptyList(),
+ IndexMode.STANDARD
+ );
+ SearchExecutionContext searchExecutionContext = new SearchExecutionContext(
+ 0,
+ 0,
+ indexSettings,
+ null,
+ indexFieldDataLookup,
+ null,
+ mappingLookup,
+ null,
+ null,
+ parserConfig,
+ writeableRegistry,
+ null,
+ null,
+ System::currentTimeMillis,
+ null,
+ null,
+ () -> true,
+ null,
+ Collections.emptyMap(),
+ null,
+ MapperMetrics.NOOP
+ );
+
+ PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(
+ fieldMapper.fieldType(),
+ randomBoolean(),
+ searchExecutionContext
);
- when(searchExecutionContext.getFieldType(Mockito.anyString())).thenAnswer(invocation -> {
- final String fieldName = (String) invocation.getArguments()[0];
- return new KeywordFieldMapper.KeywordFieldType(fieldName);
- });
- PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(fieldMapper.fieldType(), searchExecutionContext);
try (IndexReader indexReader = DirectoryReader.open(directory)) {
LeafReaderContext leafContext = indexReader.leaves().get(0);
@@ -204,7 +245,11 @@ public void testCircuitBreakerReleasedAfterPerDocumentQueryConstruction() throws
);
SearchExecutionContext searchExecutionContext = new SearchExecutionContext(baseContext, circuitBreaker);
- PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(fieldMapper.fieldType(), searchExecutionContext);
+ PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(
+ fieldMapper.fieldType(),
+ randomBoolean(),
+ searchExecutionContext
+ );
try (IndexReader indexReader = DirectoryReader.open(directory)) {
LeafReaderContext leafContext = indexReader.leaves().get(0);
diff --git a/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java
deleted file mode 100644
index 5e26c6cbc99c8..0000000000000
--- a/server/src/main/java/org/elasticsearch/index/query/FilteredSearchExecutionContext.java
+++ /dev/null
@@ -1,393 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the "Elastic License
- * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
- * Public License v 1"; you may not use this file except in compliance with, at
- * your election, the "Elastic License 2.0", the "GNU Affero General Public
- * License v3.0 only", or the "Server Side Public License, v 1".
- */
-
-package org.elasticsearch.index.query;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.join.BitSetProducer;
-import org.apache.lucene.search.similarities.Similarity;
-import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.client.internal.Client;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
-import org.elasticsearch.core.Nullable;
-import org.elasticsearch.index.Index;
-import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.IndexVersion;
-import org.elasticsearch.index.analysis.IndexAnalyzers;
-import org.elasticsearch.index.analysis.NamedAnalyzer;
-import org.elasticsearch.index.fielddata.IndexFieldData;
-import org.elasticsearch.index.mapper.DocumentParsingException;
-import org.elasticsearch.index.mapper.MappedFieldType;
-import org.elasticsearch.index.mapper.MappingLookup;
-import org.elasticsearch.index.mapper.NestedLookup;
-import org.elasticsearch.index.mapper.ParsedDocument;
-import org.elasticsearch.index.mapper.SourceLoader;
-import org.elasticsearch.index.mapper.SourceToParse;
-import org.elasticsearch.index.query.support.NestedScope;
-import org.elasticsearch.script.Script;
-import org.elasticsearch.script.ScriptContext;
-import org.elasticsearch.search.NestedDocuments;
-import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry;
-import org.elasticsearch.search.lookup.LeafFieldLookupProvider;
-import org.elasticsearch.search.lookup.SearchLookup;
-import org.elasticsearch.search.lookup.SourceFilter;
-import org.elasticsearch.search.lookup.SourceProvider;
-import org.elasticsearch.xcontent.XContentParserConfiguration;
-
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.function.BiConsumer;
-import java.util.function.Function;
-import java.util.function.Predicate;
-
-/**
- * This is NOT a simple clone of the SearchExecutionContext.
- * While it does "clone-esque" things, it delegates everything it can to the passed search execution context.
- *
- * Do NOT use this if you mean to clone the context as you are planning to make modifications
- */
-public class FilteredSearchExecutionContext extends SearchExecutionContext {
- private final SearchExecutionContext in;
-
- public FilteredSearchExecutionContext(SearchExecutionContext in) {
- super(in);
- this.in = in;
- }
-
- @Override
- public Similarity getSearchSimilarity() {
- return in.getSearchSimilarity();
- }
-
- @Override
- public Similarity getDefaultSimilarity() {
- return in.getDefaultSimilarity();
- }
-
- @Override
- public List defaultFields() {
- return in.defaultFields();
- }
-
- @Override
- public boolean queryStringLenient() {
- return in.queryStringLenient();
- }
-
- @Override
- public boolean queryStringAnalyzeWildcard() {
- return in.queryStringAnalyzeWildcard();
- }
-
- @Override
- public boolean queryStringAllowLeadingWildcard() {
- return in.queryStringAllowLeadingWildcard();
- }
-
- @Override
- public BitSetProducer bitsetFilter(Query filter) {
- return in.bitsetFilter(filter);
- }
-
- @Override
- public > IFD getForField(
- MappedFieldType fieldType,
- MappedFieldType.FielddataOperation fielddataOperation
- ) {
- return in.getForField(fieldType, fielddataOperation);
- }
-
- @Override
- public void addNamedQuery(String name, Query query) {
- in.addNamedQuery(name, query);
- }
-
- @Override
- public Map copyNamedQueries() {
- return in.copyNamedQueries();
- }
-
- @Override
- public ParsedDocument parseDocument(SourceToParse source) throws DocumentParsingException {
- return in.parseDocument(source);
- }
-
- @Override
- public NestedLookup nestedLookup() {
- return in.nestedLookup();
- }
-
- @Override
- public boolean hasMappings() {
- return in.hasMappings();
- }
-
- @Override
- public boolean isFieldMapped(String name) {
- return in.isFieldMapped(name);
- }
-
- @Override
- public boolean isMetadataField(String field) {
- return in.isMetadataField(field);
- }
-
- @Override
- public boolean isMultiField(String field) {
- return in.isMultiField(field);
- }
-
- @Override
- public Set sourcePath(String fullName) {
- return in.sourcePath(fullName);
- }
-
- @Override
- public boolean isSourceEnabled() {
- return in.isSourceEnabled();
- }
-
- @Override
- public boolean isSourceSynthetic() {
- return in.isSourceSynthetic();
- }
-
- @Override
- public SourceLoader newSourceLoader(@Nullable SourceFilter filter, boolean forceSyntheticSource) {
- return in.newSourceLoader(filter, forceSyntheticSource);
- }
-
- @Override
- public MappedFieldType buildAnonymousFieldType(String type) {
- return in.buildAnonymousFieldType(type);
- }
-
- @Override
- public Analyzer getIndexAnalyzer(Function unindexedFieldAnalyzer) {
- return in.getIndexAnalyzer(unindexedFieldAnalyzer);
- }
-
- @Override
- public void setAllowedFields(Predicate allowedFields) {
- in.setAllowedFields(allowedFields);
- }
-
- @Override
- public boolean containsBrokenAnalysis(String field) {
- return in.containsBrokenAnalysis(field);
- }
-
- @Override
- public SearchLookup lookup() {
- return in.lookup();
- }
-
- @Override
- public void setLookupProviders(
- SourceProvider sourceProvider,
- Function fieldLookupProvider
- ) {
- in.setLookupProviders(sourceProvider, fieldLookupProvider);
- }
-
- @Override
- public NestedScope nestedScope() {
- return in.nestedScope();
- }
-
- @Override
- public IndexVersion indexVersionCreated() {
- return in.indexVersionCreated();
- }
-
- @Override
- public boolean indexSortedOnField(String field) {
- return in.indexSortedOnField(field);
- }
-
- @Override
- public ParsedQuery toQuery(QueryBuilder queryBuilder) {
- return in.toQuery(queryBuilder);
- }
-
- @Override
- public Index index() {
- return in.index();
- }
-
- @Override
- public FactoryType compile(Script script, ScriptContext context) {
- return in.compile(script, context);
- }
-
- @Override
- public void disableCache() {
- in.disableCache();
- }
-
- @Override
- public void registerAsyncAction(BiConsumer> asyncAction) {
- in.registerAsyncAction(asyncAction);
- }
-
- @Override
- public void executeAsyncActions(ActionListener listener) {
- in.executeAsyncActions(listener);
- }
-
- @Override
- public int getShardId() {
- return in.getShardId();
- }
-
- @Override
- public int getShardRequestIndex() {
- return in.getShardRequestIndex();
- }
-
- @Override
- public long nowInMillis() {
- return in.nowInMillis();
- }
-
- @Override
- public Client getClient() {
- return in.getClient();
- }
-
- @Override
- public IndexReader getIndexReader() {
- return in.getIndexReader();
- }
-
- @Override
- public IndexSearcher searcher() {
- return in.searcher();
- }
-
- @Override
- public boolean fieldExistsInIndex(String fieldname) {
- return in.fieldExistsInIndex(fieldname);
- }
-
- @Override
- public MappingLookup.CacheKey mappingCacheKey() {
- return in.mappingCacheKey();
- }
-
- @Override
- public NestedDocuments getNestedDocuments() {
- return in.getNestedDocuments();
- }
-
- @Override
- public XContentParserConfiguration getParserConfig() {
- return in.getParserConfig();
- }
-
- @Override
- public CoordinatorRewriteContext convertToCoordinatorRewriteContext() {
- return in.convertToCoordinatorRewriteContext();
- }
-
- @Override
- public QueryRewriteContext convertToIndexMetadataContext() {
- return in.convertToIndexMetadataContext();
- }
-
- @Override
- public DataRewriteContext convertToDataRewriteContext() {
- return in.convertToDataRewriteContext();
- }
-
- @Override
- public MappedFieldType getFieldType(String name) {
- return in.getFieldType(name);
- }
-
- @Override
- protected MappedFieldType fieldType(String name) {
- return in.fieldType(name);
- }
-
- @Override
- public IndexAnalyzers getIndexAnalyzers() {
- return in.getIndexAnalyzers();
- }
-
- @Override
- MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) {
- return in.failIfFieldMappingNotFound(name, fieldMapping);
- }
-
- @Override
- public void setAllowUnmappedFields(boolean allowUnmappedFields) {
- in.setAllowUnmappedFields(allowUnmappedFields);
- }
-
- @Override
- public void setMapUnmappedFieldAsString(boolean mapUnmappedFieldAsString) {
- in.setMapUnmappedFieldAsString(mapUnmappedFieldAsString);
- }
-
- @Override
- public NamedWriteableRegistry getWriteableRegistry() {
- return in.getWriteableRegistry();
- }
-
- @Override
- public ValuesSourceRegistry getValuesSourceRegistry() {
- return in.getValuesSourceRegistry();
- }
-
- @Override
- public boolean allowExpensiveQueries() {
- return in.allowExpensiveQueries();
- }
-
- @Override
- public boolean hasAsyncActions() {
- return in.hasAsyncActions();
- }
-
- @Override
- public Index getFullyQualifiedIndex() {
- return in.getFullyQualifiedIndex();
- }
-
- @Override
- public IndexSettings getIndexSettings() {
- return in.getIndexSettings();
- }
-
- @Override
- public boolean indexMatches(String pattern) {
- return in.indexMatches(pattern);
- }
-
- @Override
- public Set getMatchingFieldNames(String pattern) {
- return in.getMatchingFieldNames(pattern);
- }
-
- @Override
- public void setRewriteToNamedQueries() {
- in.setRewriteToNamedQueries();
- }
-
- @Override
- public boolean rewriteToNamedQuery() {
- return in.rewriteToNamedQuery();
- }
-}
diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java
index 635de32c02d7e..9828c9fa71971 100644
--- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java
+++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java
@@ -88,6 +88,12 @@
*
* This context is used in several components of search execution, including
* building queries and fetching hits.
+ *
+ * The context is not designed to be thread-safe and is not expected to be
+ * shared between multiple threads. The exception is the Percolator that
+ * runs multiple queries simultaneously with the same context and will mutate
+ * elements of the context that are not threadsafe. Percolator makes copies of
+ * the context before executing each query.
*/
public class SearchExecutionContext extends QueryRewriteContext {