Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
af0a5fb
Fix circuit breaker leak in percolator query construction
drempapis Mar 24, 2026
540da87
Update docs/changelog/144827.yaml
drempapis Mar 24, 2026
886e26a
[CI] Auto commit changes from spotless
Mar 24, 2026
990161e
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 24, 2026
c665773
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 24, 2026
05868b4
update after review
drempapis Mar 25, 2026
143183c
Merge branch 'fix/cb_leak_percolator' of github.com:drempapis/elastic…
drempapis Mar 25, 2026
30dc8ba
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
e0620d6
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
65bc3fe
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
f5da9d5
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
3d0d83a
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
7571e86
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
b9b05eb
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
c25d4ee
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
543bf3f
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
0bbcfe1
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
4685775
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
98b94ec
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 25, 2026
4c01365
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 26, 2026
e6d7a93
Merge branch 'main' into fix/cb_leak_percolator
drempapis Mar 26, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/changelog/144827.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
area: Search
issues: []
pr: 144827
summary: Fix circuit breaker leak in percolator query construction
type: bug
Original file line number Diff line number Diff line change
Expand Up @@ -699,6 +699,21 @@ public boolean fieldExistsInIndex(String fieldname) {
public void addNamedQuery(String name, Query query) {
source.addNamedQuery(name, query);
}

@Override
public void addCircuitBreakerMemory(long bytes, String label) {
source.addCircuitBreakerMemory(bytes, label);
}

@Override
public long getQueryConstructionMemoryUsed() {
return source.getQueryConstructionMemoryUsed();
}

@Override
public void releaseQueryConstructionMemory() {
source.releaseQueryConstructionMemory();
}
};

// This means that fields in the query need to exist in the mapping prior to registering this query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@
import org.apache.lucene.store.Directory;
import org.elasticsearch.TransportVersion;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.core.CheckedFunction;
import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.IndexSettings;
Expand All @@ -39,8 +41,11 @@
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.TestDocumentParserContext;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.RegexpQueryBuilder;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WildcardQueryBuilder;
import org.elasticsearch.script.field.BinaryDocValuesField;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
Expand All @@ -56,6 +61,7 @@
import java.util.stream.Collectors;

import static org.elasticsearch.index.query.SearchExecutionContextHelper.SHARD_SEARCH_STATS;
import static org.hamcrest.Matchers.greaterThan;

public class QueryBuilderStoreTests extends ESTestCase {

Expand Down Expand Up @@ -119,7 +125,8 @@ public void testStoringQueryBuilders() throws IOException {
Collections.emptyList(),
IndexMode.STANDARD
);
SearchExecutionContext searchExecutionContext = new SearchExecutionContext(
CircuitBreaker breaker = newLimitedBreaker(ByteSizeValue.ofMb(100));
SearchExecutionContext baseContext = new SearchExecutionContext(
0,
0,
indexSettings,
Expand All @@ -143,6 +150,7 @@ public void testStoringQueryBuilders() throws IOException {
MapperMetrics.NOOP,
SHARD_SEARCH_STATS
);
SearchExecutionContext searchExecutionContext = new SearchExecutionContext(baseContext, breaker);

PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(
fieldMapper.fieldType(),
Expand All @@ -162,4 +170,114 @@ public void testStoringQueryBuilders() throws IOException {
}
}
}

public void testCircuitBreakerReleasedAfterPerDocumentQueryConstruction() throws IOException {
CircuitBreaker circuitBreaker = newLimitedBreaker(ByteSizeValue.ofMb(100));

String fieldName = "keyword_field";
QueryBuilder[] queryBuilders = new QueryBuilder[] {
new WildcardQueryBuilder(fieldName, "test*pattern*with*wildcards"),
new RegexpQueryBuilder(fieldName, ".*test.*regexp.*pattern.*"),
new WildcardQueryBuilder(fieldName, "another*wildcard*query"),
new RegexpQueryBuilder(fieldName, "prefix[0-9]+suffix"), };

try (Directory directory = newDirectory()) {
IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer());
config.setMergePolicy(NoMergePolicy.INSTANCE);
BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder(
MapperBuilderContext.root(false, false)
);

IndexVersion indexVersion = IndexVersion.current();
try (IndexWriter indexWriter = new IndexWriter(directory, config)) {
for (QueryBuilder queryBuilder : queryBuilders) {
DocumentParserContext documentParserContext = new TestDocumentParserContext();
PercolatorFieldMapper.createQueryBuilderField(
indexVersion,
TransportVersion.current(),
fieldMapper,
queryBuilder,
documentParserContext
);
indexWriter.addDocument(documentParserContext.doc());
}
}

NamedWriteableRegistry writeableRegistry = writableRegistry();
XContentParserConfiguration parserConfig = parserConfig();
Settings indexSettingsSettings = indexSettings(indexVersion, 1, 1).build();
IndexSettings indexSettings = new IndexSettings(
IndexMetadata.builder("test").settings(indexSettingsSettings).build(),
Settings.EMPTY
);

KeywordFieldMapper keywordMapper = new KeywordFieldMapper.Builder(fieldName, indexSettings).build(
MapperBuilderContext.root(false, false)
);
MappingLookup mappingLookup = MappingLookup.fromMappers(
Mapping.EMPTY,
List.of(keywordMapper),
Collections.emptyList(),
IndexMode.STANDARD
);

BytesBinaryIndexFieldData fieldData = new BytesBinaryIndexFieldData(
fieldMapper.fullPath(),
CoreValuesSourceType.KEYWORD,
BinaryDocValuesField::new
);
BiFunction<MappedFieldType, FieldDataContext, IndexFieldData<?>> indexFieldDataLookup = (mft, fdc) -> fieldData;

SearchExecutionContext baseContext = new SearchExecutionContext(
0,
0,
indexSettings,
null,
indexFieldDataLookup,
null,
mappingLookup,
null,
null,
parserConfig,
writeableRegistry,
null,
null,
System::currentTimeMillis,
null,
null,
() -> true,
null,
Collections.emptyMap(),
null,
MapperMetrics.NOOP,
SHARD_SEARCH_STATS
);
SearchExecutionContext searchExecutionContext = new SearchExecutionContext(baseContext, circuitBreaker);

PercolateQuery.QueryStore queryStore = PercolateQueryBuilder.createStore(
fieldMapper.fieldType(),
false,
searchExecutionContext
);

try (IndexReader indexReader = DirectoryReader.open(directory)) {
LeafReaderContext leafContext = indexReader.leaves().get(0);
CheckedFunction<Integer, Query, IOException> queries = queryStore.getQueries(leafContext);
assertEquals(queryBuilders.length, leafContext.reader().numDocs());

long baselineUsed = circuitBreaker.getUsed();
for (int i = 0; i < queryBuilders.length; i++) {
queries.apply(i);
assertThat(
"CB bytes should still be tracked (not leaked) after document " + i,
circuitBreaker.getUsed(),
greaterThan(baselineUsed)
);
}

searchExecutionContext.releaseQueryConstructionMemory();
assertEquals("All CB bytes must be released after the request-end release", baselineUsed, circuitBreaker.getUsed());
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,12 @@ public Suggestion<? extends Entry<? extends Option>> innerExecute(
.createParser(searchExecutionContext.getParserConfig(), querySource)
) {
QueryBuilder innerQueryBuilder = AbstractQueryBuilder.parseTopLevelQuery(parser);
final ParsedQuery parsedQuery = searchExecutionContext.toQuery(innerQueryBuilder);
collateMatch = Lucene.exists(searcher, parsedQuery.query());
try {
final ParsedQuery parsedQuery = searchExecutionContext.toQuery(innerQueryBuilder);
collateMatch = Lucene.exists(searcher, parsedQuery.query());
} finally {
searchExecutionContext.releaseQueryConstructionMemory();
}
}
}
if (collateMatch == false && collatePrune == false) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.index.query;

import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MapperBuilderContext;
import org.elasticsearch.index.mapper.MapperMetrics;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.NamedXContentRegistry;

import java.io.IOException;
import java.util.Collections;
import java.util.List;

import static org.elasticsearch.index.query.SearchExecutionContextHelper.SHARD_SEARCH_STATS;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;

public class InnerHitContextBuilderCircuitBreakerTests extends ESTestCase {

@Override
protected NamedWriteableRegistry writableRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList());
return new NamedWriteableRegistry(searchModule.getNamedWriteables());
}

@Override
protected NamedXContentRegistry xContentRegistry() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}

public void testCBTrackedDuringInnerHitsAndReleasedAtRequestEnd() throws IOException {
Directory dir = new ByteBuffersDirectory();
// Empty index – we just need a valid IndexSearcher for the context.
try (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new WhitespaceAnalyzer()))) {
// intentionally empty
}

try (DirectoryReader reader = DirectoryReader.open(dir)) {
IndexSearcher searcher = new IndexSearcher(reader);

IndexVersion indexVersion = IndexVersion.current();
Settings indexSettingsSettings = indexSettings(indexVersion, 1, 1).build();
IndexSettings indexSettings = new IndexSettings(
IndexMetadata.builder("test").settings(indexSettingsSettings).build(),
Settings.EMPTY
);
KeywordFieldMapper fieldMapper = new KeywordFieldMapper.Builder("field", indexSettings).build(
MapperBuilderContext.root(false, false)
);
MappingLookup mappingLookup = MappingLookup.fromMappers(
Mapping.EMPTY,
List.of(fieldMapper),
Collections.emptyList(),
IndexMode.STANDARD
);

SearchExecutionContext baseCtx = new SearchExecutionContext(
0,
0,
indexSettings,
null,
null,
null,
mappingLookup,
null,
null,
parserConfig(),
writableRegistry(),
null,
searcher,
System::currentTimeMillis,
null,
null,
() -> true,
null,
Collections.emptyMap(),
null,
MapperMetrics.NOOP,
SHARD_SEARCH_STATS
);

CircuitBreaker cb = newLimitedBreaker(ByteSizeValue.ofMb(100));
SearchExecutionContext ctx = new SearchExecutionContext(baseCtx, cb);
QueryBuilder innerQuery = new WildcardQueryBuilder("field", "*test*pattern*");
InnerHitBuilder innerHitBuilder = new InnerHitBuilder("test_inner");
InnerHitContextBuilder builder = new InnerHitContextBuilder(innerQuery, innerHitBuilder, Collections.emptyMap()) {
@Override
protected void doBuild(SearchContext parentSearchContext, InnerHitsContext innerHitsContext) {}
};

InnerHitsContext.InnerHitSubContext subContext = org.mockito.Mockito.mock(InnerHitsContext.InnerHitSubContext.class);

long baselineUsed = cb.getUsed();

int iterations = 5;
for (int i = 0; i < iterations; i++) {
builder.setupInnerHitsContext(ctx, subContext);
assertThat(
"CB bytes must still be tracked (not released early) after iteration " + i,
cb.getUsed(),
greaterThan(baselineUsed)
);
}

ctx.releaseQueryConstructionMemory();
assertThat("All CB bytes must be released after the request-end release", cb.getUsed(), equalTo(baselineUsed));
}
}
}
Loading
Loading