From 2302f3c825b2ecccad5ce2b15f9e87dc90c18866 Mon Sep 17 00:00:00 2001 From: Andre van de Ven Date: Fri, 13 Jun 2025 14:12:54 -0700 Subject: [PATCH 01/95] added timing for entire fetch phase Signed-off-by: Andre van de Ven --- .../org/opensearch/search/SearchService.java | 6 + .../opensearch/search/fetch/FetchPhase.java | 155 ++++++++++-------- .../search/profile/ProfileShardResult.java | 12 ++ .../opensearch/search/profile/Profilers.java | 7 + .../profile/SearchProfileShardResults.java | 18 +- .../profile/fetch/FetchProfileBreakdown.java | 18 ++ .../fetch/FetchProfileShardResult.java | 74 +++++++++ .../search/profile/fetch/FetchProfiler.java | 25 +++ .../search/profile/fetch/FetchTimingType.java | 22 +++ .../fetch/InternalFetchProfileTree.java | 32 ++++ .../search/profile/fetch/package-info.java | 11 ++ 11 files changed, 307 insertions(+), 73 deletions(-) create mode 100644 server/src/main/java/org/opensearch/search/profile/fetch/FetchProfileBreakdown.java create mode 100644 server/src/main/java/org/opensearch/search/profile/fetch/FetchProfileShardResult.java create mode 100644 server/src/main/java/org/opensearch/search/profile/fetch/FetchProfiler.java create mode 100644 server/src/main/java/org/opensearch/search/profile/fetch/FetchTimingType.java create mode 100644 server/src/main/java/org/opensearch/search/profile/fetch/InternalFetchProfileTree.java create mode 100644 server/src/main/java/org/opensearch/search/profile/fetch/package-info.java diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 2dae9de95c7a6..999d352f519d0 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -128,7 +128,9 @@ import org.opensearch.search.internal.ShardSearchContextId; import org.opensearch.search.internal.ShardSearchRequest; import org.opensearch.search.lookup.SearchLookup; +import org.opensearch.search.profile.ProfileShardResult; import org.opensearch.search.profile.Profilers; +import org.opensearch.search.profile.SearchProfileShardResults; import org.opensearch.search.query.QueryPhase; import org.opensearch.search.query.QuerySearchRequest; import org.opensearch.search.query.QuerySearchResult; @@ -756,6 +758,10 @@ private QueryFetchSearchResult executeFetchPhase(ReaderContext reader, SearchCon try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, afterQueryTime)) { shortcutDocIdsToLoad(context); fetchPhase.execute(context); + if (context.getProfilers() != null) { + ProfileShardResult shardResults = SearchProfileShardResults.buildShardResults(context.getProfilers(), context.request()); + context.queryResult().profileResults(shardResults); + } if (reader.singleSession()) { freeReaderContext(reader.id()); } diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java index df37b7dbfda98..8dd7e2cb69c88 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java @@ -70,6 +70,8 @@ import org.opensearch.search.internal.SearchContext; import org.opensearch.search.lookup.SearchLookup; import org.opensearch.search.lookup.SourceLookup; +import org.opensearch.search.profile.Timer; +import org.opensearch.search.profile.fetch.FetchTimingType; import java.io.IOException; import java.util.ArrayList; @@ -103,92 +105,103 @@ public FetchPhase(List fetchSubPhases) { } public void execute(SearchContext context) { - if (LOGGER.isTraceEnabled()) { - LOGGER.trace("{}", new SearchContextSourcePrinter(context)); + Timer timer = null; + if (context.getProfilers() != null) { + timer = context.getProfilers().getFetchProfiler().getQueryBreakdown("fetch").getTimer(FetchTimingType.EXECUTE_FETCH_PHASE); + timer.start(); } + try { + if (LOGGER.isTraceEnabled()) { + LOGGER.trace("{}", new SearchContextSourcePrinter(context)); + } - if (context.isCancelled()) { - throw new TaskCancelledException("cancelled task with reason: " + context.getTask().getReasonCancelled()); - } + if (context.isCancelled()) { + throw new TaskCancelledException("cancelled task with reason: " + context.getTask().getReasonCancelled()); + } - if (context.docIdsToLoadSize() == 0) { - // no individual hits to process, so we shortcut - context.fetchResult() - .hits(new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), context.queryResult().getMaxScore())); - return; - } + if (context.docIdsToLoadSize() == 0) { + // no individual hits to process, so we shortcut + context.fetchResult() + .hits(new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), context.queryResult().getMaxScore())); + return; + } - DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()]; - for (int index = 0; index < context.docIdsToLoadSize(); index++) { - docs[index] = new DocIdToIndex(context.docIdsToLoad()[context.docIdsToLoadFrom() + index], index); - } - // make sure that we iterate in doc id order - Arrays.sort(docs); + DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()]; + for (int index = 0; index < context.docIdsToLoadSize(); index++) { + docs[index] = new DocIdToIndex(context.docIdsToLoad()[context.docIdsToLoadFrom() + index], index); + } + // make sure that we iterate in doc id order + Arrays.sort(docs); - Map> storedToRequestedFields = new HashMap<>(); - FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields); + Map> storedToRequestedFields = new HashMap<>(); + FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields); - FetchContext fetchContext = new FetchContext(context); + FetchContext fetchContext = new FetchContext(context); - SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; + SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; - List processors = getProcessors(context.shardTarget(), fetchContext); + List processors = getProcessors(context.shardTarget(), fetchContext); - int currentReaderIndex = -1; - LeafReaderContext currentReaderContext = null; - CheckedBiConsumer fieldReader = null; - boolean hasSequentialDocs = hasSequentialDocs(docs); - for (int index = 0; index < context.docIdsToLoadSize(); index++) { - if (context.isCancelled()) { - throw new TaskCancelledException("cancelled task with reason: " + context.getTask().getReasonCancelled()); - } - int docId = docs[index].docId; - try { - int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves()); - if (currentReaderIndex != readerIndex) { - currentReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); - currentReaderIndex = readerIndex; - if (currentReaderContext.reader() instanceof SequentialStoredFieldsLeafReader - && hasSequentialDocs - && docs.length >= 10) { - // All the docs to fetch are adjacent but Lucene stored fields are optimized - // for random access and don't optimize for sequential access - except for merging. - // So we do a little hack here and pretend we're going to do merges in order to - // get better sequential access. - SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) currentReaderContext.reader(); - fieldReader = lf.getSequentialStoredFieldsReader()::document; - } else { - fieldReader = currentReaderContext.reader().storedFields()::document; + int currentReaderIndex = -1; + LeafReaderContext currentReaderContext = null; + CheckedBiConsumer fieldReader = null; + boolean hasSequentialDocs = hasSequentialDocs(docs); + for (int index = 0; index < context.docIdsToLoadSize(); index++) { + if (context.isCancelled()) { + throw new TaskCancelledException("cancelled task with reason: " + context.getTask().getReasonCancelled()); + } + int docId = docs[index].docId; + try { + int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves()); + if (currentReaderIndex != readerIndex) { + currentReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); + currentReaderIndex = readerIndex; + if (currentReaderContext.reader() instanceof SequentialStoredFieldsLeafReader + && hasSequentialDocs + && docs.length >= 10) { + // All the docs to fetch are adjacent but Lucene stored fields are optimized + // for random access and don't optimize for sequential access - except for merging. + // So we do a little hack here and pretend we're going to do merges in order to + // get better sequential access. + SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) currentReaderContext.reader(); + fieldReader = lf.getSequentialStoredFieldsReader()::document; + } else { + fieldReader = currentReaderContext.reader().storedFields()::document; + } + for (FetchSubPhaseProcessor processor : processors) { + processor.setNextReader(currentReaderContext); + } } + assert currentReaderContext != null; + HitContext hit = prepareHitContext( + context, + fetchContext.searchLookup(), + fieldsVisitor, + docId, + storedToRequestedFields, + currentReaderContext, + fieldReader + ); for (FetchSubPhaseProcessor processor : processors) { - processor.setNextReader(currentReaderContext); + processor.process(hit); } + hits[docs[index].index] = hit.hit(); + } catch (Exception e) { + throw new FetchPhaseExecutionException(context.shardTarget(), "Error running fetch phase for doc [" + docId + "]", e); } - assert currentReaderContext != null; - HitContext hit = prepareHitContext( - context, - fetchContext.searchLookup(), - fieldsVisitor, - docId, - storedToRequestedFields, - currentReaderContext, - fieldReader - ); - for (FetchSubPhaseProcessor processor : processors) { - processor.process(hit); - } - hits[docs[index].index] = hit.hit(); - } catch (Exception e) { - throw new FetchPhaseExecutionException(context.shardTarget(), "Error running fetch phase for doc [" + docId + "]", e); } - } - if (context.isCancelled()) { - throw new TaskCancelledException("cancelled task with reason: " + context.getTask().getReasonCancelled()); - } - - TotalHits totalHits = context.queryResult().getTotalHits(); - context.fetchResult().hits(new SearchHits(hits, totalHits, context.queryResult().getMaxScore())); + if (context.isCancelled()) { + throw new TaskCancelledException("cancelled task with reason: " + context.getTask().getReasonCancelled()); + } + TotalHits totalHits = context.queryResult().getTotalHits(); + context.fetchResult().hits(new SearchHits(hits, totalHits, context.queryResult().getMaxScore())); + } finally { + if (timer != null) { + timer.stop(); + context.getProfilers().getFetchProfiler().pollLastElement(); + } + } } List getProcessors(SearchShardTarget target, FetchContext context) { diff --git a/server/src/main/java/org/opensearch/search/profile/ProfileShardResult.java b/server/src/main/java/org/opensearch/search/profile/ProfileShardResult.java index 8ff622152ee70..84f856332ce3d 100644 --- a/server/src/main/java/org/opensearch/search/profile/ProfileShardResult.java +++ b/server/src/main/java/org/opensearch/search/profile/ProfileShardResult.java @@ -37,6 +37,7 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.search.profile.aggregation.AggregationProfileShardResult; +import org.opensearch.search.profile.fetch.FetchProfileShardResult; import org.opensearch.search.profile.query.QueryProfileShardResult; import java.io.IOException; @@ -56,14 +57,19 @@ public class ProfileShardResult implements Writeable { private final AggregationProfileShardResult aggProfileShardResult; + private final FetchProfileShardResult fetchProfileResult; + private NetworkTime networkTime; public ProfileShardResult( List queryProfileResults, AggregationProfileShardResult aggProfileShardResult, + FetchProfileShardResult fetchProfileResult, + NetworkTime networkTime ) { this.aggProfileShardResult = aggProfileShardResult; + this.fetchProfileResult = fetchProfileResult; this.queryProfileResults = Collections.unmodifiableList(queryProfileResults); this.networkTime = networkTime; } @@ -77,6 +83,7 @@ public ProfileShardResult(StreamInput in) throws IOException { } this.queryProfileResults = Collections.unmodifiableList(queryProfileResults); this.aggProfileShardResult = new AggregationProfileShardResult(in); + this.fetchProfileResult = new FetchProfileShardResult(in); this.networkTime = new NetworkTime(in); } @@ -87,6 +94,7 @@ public void writeTo(StreamOutput out) throws IOException { queryShardResult.writeTo(out); } aggProfileShardResult.writeTo(out); + fetchProfileResult.writeTo(out); networkTime.writeTo(out); } @@ -98,6 +106,10 @@ public AggregationProfileShardResult getAggregationProfileResults() { return aggProfileShardResult; } + public FetchProfileShardResult getFetchProfileResult() { + return fetchProfileResult; + } + public NetworkTime getNetworkTime() { return networkTime; } diff --git a/server/src/main/java/org/opensearch/search/profile/Profilers.java b/server/src/main/java/org/opensearch/search/profile/Profilers.java index 5aaf1d670313e..b394da4c97d2e 100644 --- a/server/src/main/java/org/opensearch/search/profile/Profilers.java +++ b/server/src/main/java/org/opensearch/search/profile/Profilers.java @@ -36,6 +36,7 @@ import org.opensearch.search.internal.ContextIndexSearcher; import org.opensearch.search.profile.aggregation.AggregationProfiler; import org.opensearch.search.profile.aggregation.ConcurrentAggregationProfiler; +import org.opensearch.search.profile.fetch.FetchProfiler; import org.opensearch.search.profile.query.ConcurrentQueryProfileTree; import org.opensearch.search.profile.query.ConcurrentQueryProfiler; import org.opensearch.search.profile.query.InternalQueryProfileTree; @@ -56,6 +57,7 @@ public final class Profilers { private final ContextIndexSearcher searcher; private final List queryProfilers; private final AggregationProfiler aggProfiler; + private final FetchProfiler fetchProfiler; private final boolean isConcurrentSegmentSearchEnabled; /** Sole constructor. This {@link Profilers} instance will initially wrap one {@link QueryProfiler}. */ @@ -64,6 +66,7 @@ public Profilers(ContextIndexSearcher searcher, boolean isConcurrentSegmentSearc this.isConcurrentSegmentSearchEnabled = isConcurrentSegmentSearchEnabled; this.queryProfilers = new ArrayList<>(); this.aggProfiler = isConcurrentSegmentSearchEnabled ? new ConcurrentAggregationProfiler() : new AggregationProfiler(); + this.fetchProfiler = new FetchProfiler(); addQueryProfiler(); } @@ -92,4 +95,8 @@ public AggregationProfiler getAggregationProfiler() { return aggProfiler; } + public FetchProfiler getFetchProfiler() { + return fetchProfiler; + } + } diff --git a/server/src/main/java/org/opensearch/search/profile/SearchProfileShardResults.java b/server/src/main/java/org/opensearch/search/profile/SearchProfileShardResults.java index 53cf1f2739e77..44cf9e5d66ba3 100644 --- a/server/src/main/java/org/opensearch/search/profile/SearchProfileShardResults.java +++ b/server/src/main/java/org/opensearch/search/profile/SearchProfileShardResults.java @@ -41,6 +41,9 @@ import org.opensearch.search.internal.ShardSearchRequest; import org.opensearch.search.profile.aggregation.AggregationProfileShardResult; import org.opensearch.search.profile.aggregation.AggregationProfiler; +import org.opensearch.search.profile.fetch.FetchProfileShardResult; +import org.opensearch.search.profile.fetch.FetchProfiler; +import org.opensearch.search.profile.fetch.FetchTimingType; import org.opensearch.search.profile.query.QueryProfileShardResult; import org.opensearch.search.profile.query.QueryProfiler; @@ -117,6 +120,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.endArray(); profileShardResult.getAggregationProfileResults().toXContent(builder, params); + profileShardResult.getFetchProfileResult().toXContent(builder, params); builder.endObject(); } builder.endArray().endObject(); @@ -149,6 +153,7 @@ private static void parseSearchProfileResultsEntry(XContentParser parser, Map queryProfileResults = new ArrayList<>(); AggregationProfileShardResult aggProfileShardResult = null; + FetchProfileShardResult fetchProfileShardResult = null; String id = null; String currentFieldName = null; long inboundNetworkTime = 0; @@ -173,6 +178,8 @@ private static void parseSearchProfileResultsEntry(XContentParser parser, Map queryProfilers = profilers.getQueryProfilers(); AggregationProfiler aggProfiler = profilers.getAggregationProfiler(); + FetchProfiler fetchProfiler = profilers.getFetchProfiler(); List queryResults = new ArrayList<>(queryProfilers.size()); for (QueryProfiler queryProfiler : queryProfilers) { QueryProfileShardResult result = new QueryProfileShardResult( @@ -206,11 +214,17 @@ public static ProfileShardResult buildShardResults(Profilers profilers, ShardSea queryResults.add(result); } AggregationProfileShardResult aggResults = new AggregationProfileShardResult(aggProfiler.getTree()); + long fetchTime = 0L; + List fetchTree = fetchProfiler.getTree(); + if (!fetchTree.isEmpty()) { + fetchTime = fetchTree.get(0).getTime(); + } + FetchProfileShardResult fetchResult = new FetchProfileShardResult(fetchTime); NetworkTime networkTime = new NetworkTime(0, 0); if (request != null) { networkTime.setInboundNetworkTime(request.getInboundNetworkTime()); networkTime.setOutboundNetworkTime(request.getOutboundNetworkTime()); } - return new ProfileShardResult(queryResults, aggResults, networkTime); + return new ProfileShardResult(queryResults, aggResults, fetchResult, networkTime); } } diff --git a/server/src/main/java/org/opensearch/search/profile/fetch/FetchProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/fetch/FetchProfileBreakdown.java new file mode 100644 index 0000000000000..31ba37bb0286e --- /dev/null +++ b/server/src/main/java/org/opensearch/search/profile/fetch/FetchProfileBreakdown.java @@ -0,0 +1,18 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.profile.fetch; + +import org.opensearch.search.profile.AbstractProfileBreakdown; + + +public class FetchProfileBreakdown extends AbstractProfileBreakdown { + public FetchProfileBreakdown() { + super(FetchTimingType.class); + } +} diff --git a/server/src/main/java/org/opensearch/search/profile/fetch/FetchProfileShardResult.java b/server/src/main/java/org/opensearch/search/profile/fetch/FetchProfileShardResult.java new file mode 100644 index 0000000000000..dd5b3a3f246dd --- /dev/null +++ b/server/src/main/java/org/opensearch/search/profile/fetch/FetchProfileShardResult.java @@ -0,0 +1,74 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.profile.fetch; + +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; + +import java.io.IOException; + +import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; +@ExperimentalApi() +public class FetchProfileShardResult implements Writeable, ToXContentFragment { + public static final String FETCH = "fetch"; + public static final String TIME_IN_NANOS = "time_in_nanos"; + + private final long fetchTime; + + public FetchProfileShardResult(long fetchTime) { + this.fetchTime = fetchTime; + } + + public FetchProfileShardResult(StreamInput in) throws IOException { + this.fetchTime = in.readLong(); + } + + public long getFetchTime() { + return fetchTime; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(fetchTime); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject(FETCH).field(TIME_IN_NANOS, fetchTime).endObject(); + } + + public static FetchProfileShardResult fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + String currentFieldName = null; + long time = 0; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (TIME_IN_NANOS.equals(currentFieldName)) { + time = parser.longValue(); + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + return new FetchProfileShardResult(time); + } + + + +} diff --git a/server/src/main/java/org/opensearch/search/profile/fetch/FetchProfiler.java b/server/src/main/java/org/opensearch/search/profile/fetch/FetchProfiler.java new file mode 100644 index 0000000000000..5eaa30d9fd8ae --- /dev/null +++ b/server/src/main/java/org/opensearch/search/profile/fetch/FetchProfiler.java @@ -0,0 +1,25 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.profile.fetch; + +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.search.profile.AbstractProfiler; + +/** + * Profiler for the fetch phase. + */ +@ExperimentalApi() +public class FetchProfiler extends AbstractProfiler { + /** + * Creates a new FetchProfiler. + */ + public FetchProfiler() { + super(new InternalFetchProfileTree()); + } +} diff --git a/server/src/main/java/org/opensearch/search/profile/fetch/FetchTimingType.java b/server/src/main/java/org/opensearch/search/profile/fetch/FetchTimingType.java new file mode 100644 index 0000000000000..b61a2dfa82503 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/profile/fetch/FetchTimingType.java @@ -0,0 +1,22 @@ +package org.opensearch.search.profile.fetch;/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +import java.util.Locale; + +/** + * Timing points for fetch phase profiling. + */ +public enum FetchTimingType { + /** Time spent executing the fetch phase. */ + EXECUTE_FETCH_PHASE; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } +} diff --git a/server/src/main/java/org/opensearch/search/profile/fetch/InternalFetchProfileTree.java b/server/src/main/java/org/opensearch/search/profile/fetch/InternalFetchProfileTree.java new file mode 100644 index 0000000000000..de3a3edbef7f2 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/profile/fetch/InternalFetchProfileTree.java @@ -0,0 +1,32 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.profile.fetch; + +import org.opensearch.search.profile.AbstractInternalProfileTree; +/** + * Profiling tree for fetch operations. + */ +public class InternalFetchProfileTree extends AbstractInternalProfileTree { + + @Override + protected FetchProfileBreakdown createProfileBreakdown() { + return new FetchProfileBreakdown(); + } + + @Override + protected String getTypeFromElement(String element) { + return element; + } + + @Override + protected String getDescriptionFromElement(String element) { + return element; + } +} + diff --git a/server/src/main/java/org/opensearch/search/profile/fetch/package-info.java b/server/src/main/java/org/opensearch/search/profile/fetch/package-info.java new file mode 100644 index 0000000000000..ab5f1179e424e --- /dev/null +++ b/server/src/main/java/org/opensearch/search/profile/fetch/package-info.java @@ -0,0 +1,11 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ +/** + * Profiling classes for the fetch portion of a search request. + */ +package org.opensearch.search.profile.fetch; From 2466119de3a7c9db605bdfbe93b4199fe17b638b Mon Sep 17 00:00:00 2001 From: Andre van de Ven Date: Tue, 1 Jul 2025 10:22:27 -0700 Subject: [PATCH 02/95] added more granular fetch profiling Signed-off-by: Andre van de Ven --- .idea/runConfigurations/Debug_OpenSearch.xml | 6 +- .../opensearch/search/fetch/FetchPhase.java | 140 ++++++++++++++---- .../profile/SearchProfileShardResults.java | 6 +- .../fetch/FetchProfileShardResult.java | 55 +++---- .../search/profile/fetch/FetchTimingType.java | 18 ++- .../opensearch/search/query/QueryPhase.java | 1 + 6 files changed, 166 insertions(+), 60 deletions(-) diff --git a/.idea/runConfigurations/Debug_OpenSearch.xml b/.idea/runConfigurations/Debug_OpenSearch.xml index 0d8bf59823acf..c18046f873477 100644 --- a/.idea/runConfigurations/Debug_OpenSearch.xml +++ b/.idea/runConfigurations/Debug_OpenSearch.xml @@ -6,6 +6,10 @@