Skip to content
5 changes: 5 additions & 0 deletions docs/changelog/144827.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
area: Search
issues: []
pr: 144827
summary: Fix circuit breaker leak in percolator query construction
type: bug
Original file line number Diff line number Diff line change
Expand Up @@ -674,6 +674,21 @@ public <IFD extends IndexFieldData<?>> IFD getForField(
public void addNamedQuery(String name, Query query) {
delegate.addNamedQuery(name, query);
}

@Override
public void addCircuitBreakerMemory(long bytes, String label) {
delegate.addCircuitBreakerMemory(bytes, label);
}

@Override
public long getQueryConstructionMemoryUsed() {
return delegate.getQueryConstructionMemoryUsed();
}

@Override
public void releaseQueryConstructionMemory() {
delegate.releaseQueryConstructionMemory();
}
};
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,28 +19,45 @@
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.TransportVersion;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.core.CheckedFunction;
import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.BytesBinaryIndexFieldData;
import org.elasticsearch.index.mapper.BinaryFieldMapper;
import org.elasticsearch.index.mapper.DocumentParserContext;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperBuilderContext;
import org.elasticsearch.index.mapper.MapperMetrics;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.TestDocumentParserContext;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.RegexpQueryBuilder;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WildcardQueryBuilder;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.mockito.Mockito;

import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.function.BiFunction;

import static org.hamcrest.Matchers.greaterThan;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

Expand Down Expand Up @@ -108,4 +125,105 @@ public void testStoringQueryBuilders() throws IOException {
}
}
}

public void testCircuitBreakerReleasedAfterPerDocumentQueryConstruction() throws IOException {
CircuitBreaker circuitBreaker = newLimitedBreaker(ByteSizeValue.ofMb(100));

String fieldName = "keyword_field";
QueryBuilder[] queryBuilders = new QueryBuilder[] {
new WildcardQueryBuilder(fieldName, "test*pattern*with*wildcards"),
new RegexpQueryBuilder(fieldName, ".*test.*regexp.*pattern.*"),
new WildcardQueryBuilder(fieldName, "another*wildcard*query"),
new RegexpQueryBuilder(fieldName, "prefix[0-9]+suffix"), };

try (Directory directory = newDirectory()) {
IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer());
config.setMergePolicy(NoMergePolicy.INSTANCE);
BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder(
MapperBuilderContext.root(false, false)
);

IndexVersion indexVersion = IndexVersion.current();
try (IndexWriter indexWriter = new IndexWriter(directory, config)) {
for (QueryBuilder queryBuilder : queryBuilders) {
DocumentParserContext documentParserContext = new TestDocumentParserContext();
PercolatorFieldMapper.createQueryBuilderField(
indexVersion,
TransportVersion.current(),
fieldMapper,
queryBuilder,
documentParserContext
);
indexWriter.addDocument(documentParserContext.doc());
}
}

NamedWriteableRegistry writeableRegistry = writableRegistry();
XContentParserConfiguration parserConfig = parserConfig();
Settings indexSettingsSettings = indexSettings(indexVersion, 1, 1).build();
IndexSettings indexSettings = new IndexSettings(
IndexMetadata.builder("test").settings(indexSettingsSettings).build(),
Settings.EMPTY
);

KeywordFieldMapper keywordMapper = new KeywordFieldMapper.Builder(fieldName, indexSettings.getIndexVersionCreated()).build(
MapperBuilderContext.root(false, false)
);
MappingLookup mappingLookup = MappingLookup.fromMappers(
Mapping.EMPTY,
List.of(keywordMapper),
Collections.emptyList(),
IndexMode.STANDARD
);

BytesBinaryIndexFieldData fieldData = new BytesBinaryIndexFieldData(fieldMapper.fullPath(), CoreValuesSourceType.KEYWORD);
BiFunction<MappedFieldType, FieldDataContext, IndexFieldData<?>> indexFieldDataLookup = (mft, fdc) -> fieldData;

SearchExecutionContext baseContext = new SearchExecutionContext(
0,
0,
indexSettings,
null,
indexFieldDataLookup,
null,
mappingLookup,
null,
null,
parserConfig,
writeableRegistry,
null,
null,
System::currentTimeMillis,
null,
null,
() -> true,
null,
Collections.emptyMap(),
null,
MapperMetrics.NOOP
);
SearchExecutionContext searchExecutionContext = new SearchExecutionContext(baseContext, circuitBreaker);

PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(fieldMapper.fieldType(), searchExecutionContext);

try (IndexReader indexReader = DirectoryReader.open(directory)) {
LeafReaderContext leafContext = indexReader.leaves().get(0);
CheckedFunction<Integer, Query, IOException> queries = queryStore.getQueries(leafContext);
assertEquals(queryBuilders.length, leafContext.reader().numDocs());

long baselineUsed = circuitBreaker.getUsed();
for (int i = 0; i < queryBuilders.length; i++) {
queries.apply(i);
assertThat(
"CB bytes should still be tracked (not leaked) after document " + i,
circuitBreaker.getUsed(),
greaterThan(baselineUsed)
);
}

searchExecutionContext.releaseQueryConstructionMemory();
assertEquals("All CB bytes must be released after the request-end release", baselineUsed, circuitBreaker.getUsed());
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,12 @@ public Suggestion<? extends Entry<? extends Option>> innerExecute(
.createParser(searchExecutionContext.getParserConfig(), querySource)
) {
QueryBuilder innerQueryBuilder = AbstractQueryBuilder.parseTopLevelQuery(parser);
final ParsedQuery parsedQuery = searchExecutionContext.toQuery(innerQueryBuilder);
collateMatch = Lucene.exists(searcher, parsedQuery.query());
try {
final ParsedQuery parsedQuery = searchExecutionContext.toQuery(innerQueryBuilder);
collateMatch = Lucene.exists(searcher, parsedQuery.query());
} finally {
searchExecutionContext.releaseQueryConstructionMemory();
}
}
}
if (collateMatch == false && collatePrune == false) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.index.query;

import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MapperBuilderContext;
import org.elasticsearch.index.mapper.MapperMetrics;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.NamedXContentRegistry;

import java.io.IOException;
import java.util.Collections;
import java.util.List;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;

public class InnerHitContextBuilderCircuitBreakerTests extends ESTestCase {

@Override
protected NamedWriteableRegistry writableRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList());
return new NamedWriteableRegistry(searchModule.getNamedWriteables());
}

@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}

public void testCBTrackedDuringInnerHitsAndReleasedAtRequestEnd() throws IOException {
Directory dir = new ByteBuffersDirectory();
// Empty index – we just need a valid IndexSearcher for the context.
try (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new WhitespaceAnalyzer()))) {
// intentionally empty
}

try (DirectoryReader reader = DirectoryReader.open(dir)) {
IndexSearcher searcher = new IndexSearcher(reader);

IndexVersion indexVersion = IndexVersion.current();
Settings indexSettingsSettings = indexSettings(indexVersion, 1, 1).build();
IndexSettings indexSettings = new IndexSettings(
IndexMetadata.builder("test").settings(indexSettingsSettings).build(),
Settings.EMPTY
);
KeywordFieldMapper fieldMapper = new KeywordFieldMapper.Builder("field", indexSettings.getIndexVersionCreated()).build(
MapperBuilderContext.root(false, false)
);
MappingLookup mappingLookup = MappingLookup.fromMappers(
Mapping.EMPTY,
List.of(fieldMapper),
Collections.emptyList(),
IndexMode.STANDARD
);

SearchExecutionContext baseCtx = new SearchExecutionContext(
0,
0,
indexSettings,
null,
null,
null,
mappingLookup,
null,
null,
parserConfig(),
writableRegistry(),
null,
searcher,
System::currentTimeMillis,
null,
null,
() -> true,
null,
Collections.emptyMap(),
null,
MapperMetrics.NOOP
);

CircuitBreaker cb = newLimitedBreaker(ByteSizeValue.ofMb(100));
SearchExecutionContext ctx = new SearchExecutionContext(baseCtx, cb);
QueryBuilder innerQuery = new WildcardQueryBuilder("field", "*test*pattern*");
InnerHitBuilder innerHitBuilder = new InnerHitBuilder("test_inner");
InnerHitContextBuilder builder = new InnerHitContextBuilder(innerQuery, innerHitBuilder, Collections.emptyMap()) {
@Override
protected void doBuild(SearchContext parentSearchContext, InnerHitsContext innerHitsContext) {}
};

InnerHitsContext.InnerHitSubContext subContext = org.mockito.Mockito.mock(InnerHitsContext.InnerHitSubContext.class);

long baselineUsed = cb.getUsed();

int iterations = 5;
for (int i = 0; i < iterations; i++) {
builder.setupInnerHitsContext(ctx, subContext);
assertThat(
"CB bytes must still be tracked (not released early) after iteration " + i,
cb.getUsed(),
greaterThan(baselineUsed)
);
}

ctx.releaseQueryConstructionMemory();
assertThat("All CB bytes must be released after the request-end release", cb.getUsed(), equalTo(baselineUsed));
}
}
}
Loading
Loading