diff --git a/docs/changelog/91640.yaml b/docs/changelog/91640.yaml new file mode 100644 index 0000000000000..e581b03d523c8 --- /dev/null +++ b/docs/changelog/91640.yaml @@ -0,0 +1,5 @@ +pr: 91640 +summary: Add profiling plugin +area: Search +type: enhancement +issues: [] diff --git a/x-pack/plugin/profiler/build.gradle b/x-pack/plugin/profiler/build.gradle new file mode 100644 index 0000000000000..80624ec9a1815 --- /dev/null +++ b/x-pack/plugin/profiler/build.gradle @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +apply plugin: 'elasticsearch.internal-cluster-test' +apply plugin: 'elasticsearch.internal-es-plugin' + +esplugin { + name 'x-pack-profiling' + description 'The profiler plugin adds support for retrieving data from Universal Profiler.' + classname 'org.elasticsearch.xpack.profiler.ProfilingPlugin' +} diff --git a/x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/GetProfilingActionIT.java b/x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/GetProfilingActionIT.java new file mode 100644 index 0000000000000..0c6038186c91b --- /dev/null +++ b/x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/GetProfilingActionIT.java @@ -0,0 +1,315 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.logging.log4j.LogManager; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Cancellable; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.MockScriptPlugin; +import org.elasticsearch.search.lookup.LeafStoredFieldsLookup; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.transport.netty4.Netty4Plugin; +import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CancellationException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; + +import static org.elasticsearch.action.support.ActionTestUtils.wrapAsRestResponseListener; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 1) +public class GetProfilingActionIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return List.of(ProfilingPlugin.class, ScriptedBlockPlugin.class, getTestTransportPlugin()); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(ProfilingPlugin.PROFILING_ENABLED.getKey(), true) + .put(NetworkModule.TRANSPORT_TYPE_KEY, Netty4Plugin.NETTY_TRANSPORT_NAME) + .put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME) + .build(); + } + + @Override + protected boolean addMockHttpTransport() { + return false; // enable http + } + + @Override + protected boolean ignoreExternalCluster() { + return true; + } + + private byte[] read(String resource) throws IOException { + return GetProfilingAction.class.getClassLoader().getResourceAsStream(resource).readAllBytes(); + } + + private void createIndex(String name, String bodyFileName) throws Exception { + client().admin().indices().prepareCreate(name).setSource(read(bodyFileName), XContentType.JSON).execute().get(); + } + + private void indexDoc(String index, String id, Map source) { + IndexResponse indexResponse = client().prepareIndex(index).setId(id).setSource(source).get(); + assertEquals(RestStatus.CREATED, indexResponse.status()); + } + + @Before + public void setupData() throws Exception { + + for (String idx : EventsIndex.indexNames()) { + createIndex(idx, "events.json"); + } + createIndex("profiling-stackframes", "stackframes.json"); + createIndex("profiling-stacktraces", "stacktraces.json"); + createIndex("profiling-executables", "executables.json"); + ensureGreen(); + + // ensure that we have this in every index, so we find an event + for (String idx : EventsIndex.indexNames()) { + indexDoc( + idx, + "QjoLteG7HX3VUUXr-J4kHQ", + Map.of("@timestamp", 1668761065, "Stacktrace.id", "QjoLteG7HX3VUUXr-J4kHQ", "Stacktrace.count", 1) + ); + } + + indexDoc( + "profiling-stacktraces", + "QjoLteG7HX3VUUXr-J4kHQ", + Map.of("Stacktrace.frame.ids", "QCCDqjSg3bMK1C4YRK6TiwAAAAAAEIpf", "Stacktrace.frame.types", "AQI") + ); + indexDoc( + "profiling-stackframes", + "QCCDqjSg3bMK1C4YRK6TiwAAAAAAEIpf", + Map.of("Stackframe.function.name", "_raw_spin_unlock_irqrestore") + ); + indexDoc("profiling-executables", "QCCDqjSg3bMK1C4YRK6Tiw", Map.of("Executable.file.name", "libc.so.6")); + + refresh(); + } + + public void testGetProfilingDataUnfiltered() throws Exception { + GetProfilingRequest request = new GetProfilingRequest(1, null); + GetProfilingResponse response = client().execute(GetProfilingAction.INSTANCE, request).get(); + assertEquals(RestStatus.OK, response.status()); + assertEquals(1, response.getTotalFrames()); + assertNotNull(response.getStackTraces()); + StackTrace stackTrace = response.getStackTraces().get("QjoLteG7HX3VUUXr-J4kHQ"); + assertArrayEquals(new int[] { 1083999 }, stackTrace.addressOrLines); + assertArrayEquals(new String[] { "QCCDqjSg3bMK1C4YRK6Tiw" }, stackTrace.fileIds); + assertArrayEquals(new String[] { "QCCDqjSg3bMK1C4YRK6TiwAAAAAAEIpf" }, stackTrace.frameIds); + assertArrayEquals(new int[] { 2 }, stackTrace.typeIds); + + assertNotNull(response.getStackFrames()); + StackFrame stackFrame = response.getStackFrames().get("QCCDqjSg3bMK1C4YRK6TiwAAAAAAEIpf"); + assertEquals("_raw_spin_unlock_irqrestore", stackFrame.functionName); + assertNotNull(response.getStackTraceEvents()); + assertEquals(1, (int) response.getStackTraceEvents().get("QjoLteG7HX3VUUXr-J4kHQ")); + + assertNotNull(response.getExecutables()); + assertNotNull("libc.so.6", response.getExecutables().get("QCCDqjSg3bMK1C4YRK6Tiw")); + } + + public void testAutomaticCancellation() throws Exception { + Request restRequest = new Request("POST", "/_profiling/stacktraces"); + restRequest.setEntity(new StringEntity(""" + { + "sample_size": 10000, + "query": { + "bool": { + "filter": [ + { + "script": { + "script": { + "lang": "mockscript", + "source": "search_block", + "params": {} + } + } + } + ] + } + } + } + """, ContentType.APPLICATION_JSON.withCharset(StandardCharsets.UTF_8))); + verifyCancellation(GetProfilingAction.NAME, restRequest); + } + + void verifyCancellation(String action, Request restRequest) throws Exception { + Map nodeIdToName = readNodesInfo(); + List plugins = initBlockFactory(); + + PlainActionFuture future = PlainActionFuture.newFuture(); + Cancellable cancellable = getRestClient().performRequestAsync(restRequest, wrapAsRestResponseListener(future)); + + awaitForBlock(plugins); + Collection profilingTasks = collectProfilingRelatedTasks(action); + cancellable.cancel(); + ensureTasksAreCancelled(profilingTasks, nodeIdToName::get); + + disableBlocks(plugins); + expectThrows(CancellationException.class, future::actionGet); + } + + private static Map readNodesInfo() { + Map nodeIdToName = new HashMap<>(); + NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().get(); + assertFalse(nodesInfoResponse.hasFailures()); + for (NodeInfo node : nodesInfoResponse.getNodes()) { + nodeIdToName.put(node.getNode().getId(), node.getNode().getName()); + } + return nodeIdToName; + } + + private static Collection collectProfilingRelatedTasks(String transportAction) { + SetOnce profilingTask = new SetOnce<>(); + Map> taskToParent = new HashMap<>(); + ListTasksResponse listTasksResponse = client().admin().cluster().prepareListTasks().get(); + for (TaskInfo task : listTasksResponse.getTasks()) { + TaskId parentTaskId = task.parentTaskId(); + if (parentTaskId != null) { + if (taskToParent.containsKey(parentTaskId) == false) { + taskToParent.put(parentTaskId, new HashSet<>()); + } + taskToParent.get(parentTaskId).add(task.taskId()); + } + if (task.action().equals(transportAction)) { + profilingTask.set(task); + } + } + assertNotNull(profilingTask.get()); + return taskToParent.get(profilingTask.get().taskId()); + } + + private static void ensureTasksAreCancelled(Collection taskIds, Function nodeIdToName) throws Exception { + assertBusy(() -> { + for (TaskId taskId : taskIds) { + String nodeName = nodeIdToName.apply(taskId.getNodeId()); + TaskManager taskManager = internalCluster().getInstance(TransportService.class, nodeName).getTaskManager(); + Task task = taskManager.getTask(taskId.getId()); + assertThat(task, instanceOf(CancellableTask.class)); + assertTrue(((CancellableTask) task).isCancelled()); + } + }); + } + + private static List initBlockFactory() { + List plugins = new ArrayList<>(); + for (PluginsService pluginsService : internalCluster().getDataNodeInstances(PluginsService.class)) { + plugins.addAll(pluginsService.filterPlugins(ScriptedBlockPlugin.class)); + } + for (ScriptedBlockPlugin plugin : plugins) { + plugin.reset(); + plugin.enableBlock(); + // Allow to execute one search and only block starting with the second one. This + // is done so we have at least one child action and can check that all active children + // are cancelled with the parent action. + plugin.setSlack(1); + } + return plugins; + } + + private void awaitForBlock(List plugins) throws Exception { + assertBusy(() -> { + int numberOfBlockedPlugins = 0; + for (ScriptedBlockPlugin plugin : plugins) { + numberOfBlockedPlugins += plugin.hits.get(); + } + logger.info("The plugin blocked on {} shards", numberOfBlockedPlugins); + assertThat(numberOfBlockedPlugins, greaterThan(0)); + }, 10, TimeUnit.SECONDS); + } + + private static void disableBlocks(List plugins) { + for (ScriptedBlockPlugin plugin : plugins) { + plugin.disableBlock(); + } + } + + public static class ScriptedBlockPlugin extends MockScriptPlugin { + static final String SCRIPT_NAME = "search_block"; + + private final AtomicInteger hits = new AtomicInteger(); + + private final AtomicInteger slack = new AtomicInteger(0); + + private final AtomicBoolean shouldBlock = new AtomicBoolean(true); + + void reset() { + hits.set(0); + } + + void disableBlock() { + shouldBlock.set(false); + } + + void enableBlock() { + shouldBlock.set(true); + } + + void setSlack(int slack) { + this.slack.set(slack); + } + + @Override + public Map, Object>> pluginScripts() { + return Collections.singletonMap(SCRIPT_NAME, params -> { + LeafStoredFieldsLookup fieldsLookup = (LeafStoredFieldsLookup) params.get("_fields"); + LogManager.getLogger(GetProfilingActionIT.class).info("Blocking on the document {}", fieldsLookup.get("_id")); + hits.incrementAndGet(); + if (slack.decrementAndGet() < 0) { + try { + waitUntil(() -> shouldBlock.get() == false); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + return true; + }); + } + } +} diff --git a/x-pack/plugin/profiler/src/internalClusterTest/resources/events.json b/x-pack/plugin/profiler/src/internalClusterTest/resources/events.json new file mode 100644 index 0000000000000..855c754191db5 --- /dev/null +++ b/x-pack/plugin/profiler/src/internalClusterTest/resources/events.json @@ -0,0 +1,78 @@ +{ + "settings": { + "index": { + "number_of_shards": "4", + "max_result_window": 150000, + "refresh_interval": "10s", + "sort": { + "field": [ + "service.name", + "@timestamp", + "orchestrator.resource.name", + "container.name", + "process.thread.name", + "host.id" + ] + } + }, + "codec": "best_compression" + }, + "mappings": { + "_doc": { + "_source": { + "enabled": false + }, + "properties": { + "ecs.version": { + "type": "keyword", + "index": true + }, + "service.name": { + "type": "keyword" + }, + "@timestamp": { + "type": "date", + "format": "epoch_second" + }, + "host.id": { + "type": "keyword" + }, + "Stacktrace.id": { + "type": "keyword", + "index": false + }, + "orchestrator.resource.name": { + "type": "keyword" + }, + "container.name": { + "type": "keyword" + }, + "process.thread.name": { + "type": "keyword" + }, + "Stacktrace.count": { + "type": "short", + "index": false + }, + "agent.version": { + "type": "keyword" + }, + "host.ip": { + "type": "ip" + }, + "host.ipstring": { + "type": "keyword" + }, + "host.name": { + "type": "keyword" + }, + "os.kernel": { + "type": "keyword" + }, + "tags": { + "type": "keyword" + } + } + } + } +} diff --git a/x-pack/plugin/profiler/src/internalClusterTest/resources/executables.json b/x-pack/plugin/profiler/src/internalClusterTest/resources/executables.json new file mode 100644 index 0000000000000..f5db60318bcf2 --- /dev/null +++ b/x-pack/plugin/profiler/src/internalClusterTest/resources/executables.json @@ -0,0 +1,32 @@ +{ + "settings": { + "index": { + "refresh_interval": "10s" + } + }, + "mappings": { + "_doc": { + "_source": { + "mode": "synthetic" + }, + "properties": { + "ecs.version": { + "type": "keyword", + "index": true + }, + "Executable.build.id": { + "type": "keyword", + "index": true + }, + "Executable.file.name": { + "type": "keyword", + "index": true + }, + "@timestamp": { + "type": "date", + "format": "epoch_second" + } + } + } + } +} diff --git a/x-pack/plugin/profiler/src/internalClusterTest/resources/stackframes.json b/x-pack/plugin/profiler/src/internalClusterTest/resources/stackframes.json new file mode 100644 index 0000000000000..179f57214267e --- /dev/null +++ b/x-pack/plugin/profiler/src/internalClusterTest/resources/stackframes.json @@ -0,0 +1,41 @@ +{ + "settings": { + "index": { + "number_of_shards": "16", + "refresh_interval": "10s" + } + }, + "mappings": { + "_doc": { + "_source": { + "mode": "synthetic" + }, + "properties": { + "ecs.version": { + "type": "keyword", + "index": true + }, + "Stackframe.line.number": { + "type": "integer", + "index": false + }, + "Stackframe.file.name": { + "type": "keyword", + "index": false + }, + "Stackframe.source.type": { + "type": "short", + "index": false + }, + "Stackframe.function.name": { + "type": "keyword", + "index": false + }, + "Stackframe.function.offset": { + "type": "integer", + "index": false + } + } + } + } +} diff --git a/x-pack/plugin/profiler/src/internalClusterTest/resources/stacktraces.json b/x-pack/plugin/profiler/src/internalClusterTest/resources/stacktraces.json new file mode 100644 index 0000000000000..0b72dccdfe5c5 --- /dev/null +++ b/x-pack/plugin/profiler/src/internalClusterTest/resources/stacktraces.json @@ -0,0 +1,38 @@ +{ + "settings": { + "index": { + "number_of_shards": "16", + "refresh_interval": "10s", + "sort": { + "field": [ + "Stacktrace.frame.ids" + ] + } + } + }, + "mappings": { + "_doc": { + "_source": { + "mode": "synthetic" + }, + "properties": { + "ecs.version": { + "type": "keyword", + "index": true + }, + "Stacktrace.frame.ids": { + "type": "keyword", + "index": false + }, + "Stacktrace.frame.types": { + "type": "keyword", + "index": false + }, + "@timestamp": { + "type": "date", + "format": "epoch_second" + } + } + } + } +} diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/EventsIndex.java b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/EventsIndex.java new file mode 100644 index 0000000000000..441a5bbd6cd2b --- /dev/null +++ b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/EventsIndex.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Locale; +import java.util.Set; + +public final class EventsIndex { + private static final String PREFIX = "profiling-events"; + private static final String ALL_EVENTS = PREFIX + "-all"; + + private static final int SAMPLING_FACTOR = 5; + + private static final int MIN_EXPONENT = 1; + + private static final int MAX_EXPONENT = 11; + + public static final EventsIndex FULL_INDEX = new EventsIndex(ALL_EVENTS, 1, 1); + + // Start with counting the results in the index down-sampled by 5^6. + // That is in the middle of our down-sampled indexes. + public static final EventsIndex MEDIUM_DOWNSAMPLED = fromFactorAndExponent(SAMPLING_FACTOR, 6); + + private final String name; + + private final int samplingFactor; + + private final int exponent; + + private EventsIndex(String name, int samplingFactor, int exponent) { + this.name = name; + this.samplingFactor = samplingFactor; + this.exponent = exponent; + } + + public String getName() { + return name; + } + + public int getExponent() { + return exponent; + } + + public double getSampleRate() { + return Math.pow(1.0d / samplingFactor, exponent); + } + + public EventsIndex getResampledIndex(long targetSampleSize, long currentSampleSize) { + return EventsIndex.getSampledIndex(targetSampleSize, currentSampleSize, this.getExponent()); + } + + // Return the index that has between targetSampleSize..targetSampleSize*samplingFactor entries. + // The starting point is the number of entries from the profiling-events-5pow index. + private static EventsIndex getSampledIndex(long targetSampleSize, long sampleCountFromInitialExp, int initialExp) { + if (sampleCountFromInitialExp == 0) { + return FULL_INDEX; + } + int exp = initialExp - (int) Math.round( + Math.log((targetSampleSize * SAMPLING_FACTOR) / (double) sampleCountFromInitialExp) / Math.log(SAMPLING_FACTOR) + ) + 1; + + if (exp < MIN_EXPONENT) { + return FULL_INDEX; + } + if (exp > MAX_EXPONENT) { + exp = MAX_EXPONENT; + } + return fromFactorAndExponent(SAMPLING_FACTOR, exp); + } + + private static EventsIndex fromFactorAndExponent(int factor, int exp) { + return new EventsIndex(indexName(factor, exp), factor, exp); + } + + private static String indexName(int factor, int exp) { + return String.format(Locale.ROOT, "%s-%dpow%02d", PREFIX, factor, exp); + } + + public static Collection indexNames() { + Set names = new HashSet<>(); + names.add(EventsIndex.ALL_EVENTS); + for (int exp = MIN_EXPONENT; exp <= MAX_EXPONENT; exp++) { + names.add(indexName(SAMPLING_FACTOR, exp)); + } + return Collections.unmodifiableSet(names); + } +} diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingAction.java b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingAction.java new file mode 100644 index 0000000000000..eaec2e4b7739a --- /dev/null +++ b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingAction.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.action.ActionType; + +public final class GetProfilingAction extends ActionType { + public static final GetProfilingAction INSTANCE = new GetProfilingAction(); + public static final String NAME = "indices:data/read/profiling"; + + private GetProfilingAction() { + super(NAME, GetProfilingResponse::new); + } +} diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingRequest.java b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingRequest.java new file mode 100644 index 0000000000000..13c3b2e38ea45 --- /dev/null +++ b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingRequest.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.index.query.AbstractQueryBuilder.parseTopLevelQuery; + +/** + * A request to get profiling details + */ +public class GetProfilingRequest extends ActionRequest implements IndicesRequest { + public static final ParseField QUERY_FIELD = new ParseField("query"); + public static final ParseField SAMPLE_SIZE_FIELD = new ParseField("sample_size"); + + private QueryBuilder query; + + private Integer sampleSize; + + public GetProfilingRequest() { + this(null, null); + } + + public GetProfilingRequest(Integer sampleSize, QueryBuilder query) { + this.sampleSize = sampleSize; + this.query = query; + } + + public GetProfilingRequest(StreamInput in) throws IOException { + this.query = in.readOptionalNamedWriteable(QueryBuilder.class); + this.sampleSize = in.readOptionalInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalNamedWriteable(query); + out.writeOptionalInt(sampleSize); + } + + public Integer getSampleSize() { + return sampleSize; + } + + public QueryBuilder getQuery() { + return query; + } + + public void parseXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + String currentFieldName = null; + if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) { + throw new ParsingException( + parser.getTokenLocation(), + "Expected [" + XContentParser.Token.START_OBJECT + "] but found [" + token + "]", + parser.getTokenLocation() + ); + } + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (SAMPLE_SIZE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + this.sampleSize = parser.intValue(); + } else { + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + this.query = parseTopLevelQuery(parser); + } + } else { + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); + } + } + + token = parser.nextToken(); + if (token != null) { + throw new ParsingException(parser.getTokenLocation(), "Unexpected token [" + token + "] found after the main object."); + } + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (sampleSize == null) { + validationException = addValidationError("[" + SAMPLE_SIZE_FIELD.getPreferredName() + "] is mandatory", validationException); + } else if (sampleSize <= 0) { + validationException = addValidationError( + "[" + SAMPLE_SIZE_FIELD.getPreferredName() + "] must be greater or equals than 1, got: " + sampleSize, + validationException + ); + } + return validationException; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, null, parentTaskId, headers) { + @Override + public String getDescription() { + // generating description lazily since the query could be large + StringBuilder sb = new StringBuilder(); + sb.append("sample_size[").append(sampleSize).append("]"); + if (query == null) { + sb.append(", query[]"); + } else { + sb.append(", query[").append(Strings.toString(query)).append("]"); + } + return sb.toString(); + } + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GetProfilingRequest that = (GetProfilingRequest) o; + return Objects.equals(query, that.query) && Objects.equals(sampleSize, that.sampleSize); + } + + @Override + public int hashCode() { + return Objects.hash(query, sampleSize); + } + + @Override + public String[] indices() { + Set indices = new HashSet<>(); + indices.add("profiling-stacktraces"); + indices.add("profiling-stackframes"); + indices.add("profiling-executables"); + indices.addAll(EventsIndex.indexNames()); + return indices.toArray(new String[0]); + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.STRICT_EXPAND_OPEN; + } + + @Override + public boolean includeDataStreams() { + return true; + } +} diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingResponse.java b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingResponse.java new file mode 100644 index 0000000000000..46f0f0604915e --- /dev/null +++ b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingResponse.java @@ -0,0 +1,222 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.StatusToXContentObject; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.rest.RestStatus.OK; + +public class GetProfilingResponse extends ActionResponse implements StatusToXContentObject { + @Nullable + private final Map stackTraces; + @Nullable + private final Map stackFrames; + @Nullable + private final Map executables; + @Nullable + private final Map stackTraceEvents; + private final int totalFrames; + @Nullable + private final Exception error; + + public GetProfilingResponse(StreamInput in) throws IOException { + this.stackTraces = in.readBoolean() + ? in.readMap( + StreamInput::readString, + i -> new StackTrace(i.readIntArray(), i.readStringArray(), i.readStringArray(), i.readIntArray()) + ) + : null; + this.stackFrames = in.readBoolean() + ? in.readMap( + StreamInput::readString, + i -> new StackFrame( + i.readOptionalString(), + i.readOptionalString(), + i.readOptionalInt(), + i.readOptionalInt(), + i.readOptionalInt() + ) + ) + : null; + this.executables = in.readBoolean() ? in.readMap(StreamInput::readString, StreamInput::readString) : null; + this.stackTraceEvents = in.readBoolean() ? in.readMap(StreamInput::readString, StreamInput::readInt) : null; + this.totalFrames = in.readInt(); + this.error = in.readBoolean() ? in.readException() : null; + } + + public GetProfilingResponse( + Map stackTraces, + Map stackFrames, + Map executables, + Map stackTraceEvents, + int totalFrames + ) { + this(stackTraces, stackFrames, executables, stackTraceEvents, totalFrames, null); + } + + public GetProfilingResponse(Exception error) { + this(null, null, null, null, 0, error); + } + + private GetProfilingResponse( + Map stackTraces, + Map stackFrames, + Map executables, + Map stackTraceEvents, + int totalFrames, + Exception error + ) { + this.stackTraces = stackTraces; + this.stackFrames = stackFrames; + this.executables = executables; + this.stackTraceEvents = stackTraceEvents; + this.totalFrames = totalFrames; + this.error = error; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (stackTraces != null) { + out.writeBoolean(true); + out.writeMap(stackTraces, StreamOutput::writeString, (o, v) -> { + o.writeIntArray(v.addressOrLines); + o.writeStringArray(v.fileIds); + o.writeStringArray(v.frameIds); + o.writeIntArray(v.typeIds); + }); + } else { + out.writeBoolean(false); + } + if (stackFrames != null) { + out.writeBoolean(true); + out.writeMap(stackFrames, StreamOutput::writeString, (o, v) -> { + o.writeOptionalString(v.fileName); + o.writeOptionalString(v.functionName); + o.writeOptionalInt(v.functionOffset); + o.writeOptionalInt(v.lineNumber); + o.writeOptionalInt(v.sourceType); + }); + } else { + out.writeBoolean(false); + } + if (executables != null) { + out.writeBoolean(true); + out.writeMap(executables, StreamOutput::writeString, StreamOutput::writeString); + } else { + out.writeBoolean(false); + } + if (stackTraceEvents != null) { + out.writeBoolean(true); + out.writeMap(stackTraceEvents, StreamOutput::writeString, StreamOutput::writeInt); + } else { + out.writeBoolean(false); + } + out.writeInt(totalFrames); + if (error != null) { + out.writeBoolean(true); + out.writeException(error); + } else { + out.writeBoolean(false); + } + } + + @Override + public RestStatus status() { + return error != null ? ExceptionsHelper.status(ExceptionsHelper.unwrapCause(error)) : OK; + } + + public Map getStackTraces() { + return stackTraces; + } + + public Map getStackFrames() { + return stackFrames; + } + + public Map getExecutables() { + return executables; + } + + public Map getStackTraceEvents() { + return stackTraceEvents; + } + + public int getTotalFrames() { + return totalFrames; + } + + public Exception getError() { + return error; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (stackTraces != null) { + builder.startObject("stack_traces"); + builder.mapContents(stackTraces); + builder.endObject(); + } + if (stackFrames != null) { + builder.startObject("stack_frames"); + builder.mapContents(stackFrames); + builder.endObject(); + } + if (executables != null) { + builder.startObject("executables"); + builder.mapContents(executables); + builder.endObject(); + } + if (stackTraceEvents != null) { + builder.startObject("stack_trace_events"); + builder.mapContents(stackTraceEvents); + builder.endObject(); + } + builder.field("total_frames", totalFrames); + if (error != null) { + builder.startObject("error"); + ElasticsearchException.generateThrowableXContent(builder, params, error); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GetProfilingResponse response = (GetProfilingResponse) o; + return totalFrames == response.totalFrames + && Objects.equals(stackTraces, response.stackTraces) + && Objects.equals(stackFrames, response.stackFrames) + && Objects.equals(executables, response.executables) + && Objects.equals(stackTraceEvents, response.stackTraceEvents) + && Objects.equals(error, response.error); + } + + @Override + public int hashCode() { + return Objects.hash(stackTraces, stackFrames, executables, stackTraceEvents, totalFrames, error); + } +} diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingPlugin.java b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingPlugin.java new file mode 100644 index 0000000000000..bc6fb4b9dc1d8 --- /dev/null +++ b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingPlugin.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tracing.Tracer; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xcontent.NamedXContentRegistry; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.function.Supplier; + +import static java.util.Collections.singletonList; + +public class ProfilingPlugin extends Plugin implements ActionPlugin { + private static final Logger logger = LogManager.getLogger(ProfilingPlugin.class); + public static final Setting PROFILING_ENABLED = Setting.boolSetting( + "xpack.profiling.enabled", + false, + Setting.Property.NodeScope + ); + private static final int REQUIRED_MAX_BUCKETS = 150_000; + private final Settings settings; + private final boolean enabled; + + public ProfilingPlugin(Settings settings) { + this.settings = settings; + this.enabled = PROFILING_ENABLED.get(settings); + } + + @Override + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier repositoriesServiceSupplier, + Tracer tracer, + AllocationDeciders allocationDeciders + ) { + logger.info("Profiling is {}", enabled ? "enabled" : "disabled"); + return super.createComponents( + client, + clusterService, + threadPool, + resourceWatcherService, + scriptService, + xContentRegistry, + environment, + nodeEnvironment, + namedWriteableRegistry, + indexNameExpressionResolver, + repositoriesServiceSupplier, + tracer, + allocationDeciders + ); + } + + @Override + public List getRestHandlers( + final Settings settings, + final RestController restController, + final ClusterSettings clusterSettings, + final IndexScopedSettings indexScopedSettings, + final SettingsFilter settingsFilter, + final IndexNameExpressionResolver indexNameExpressionResolver, + final Supplier nodesInCluster + ) { + if (enabled) { + return singletonList(new RestGetProfilingAction()); + } else { + return Collections.emptyList(); + } + } + + @Override + public List> getSettings() { + return List.of(PROFILING_ENABLED); + } + + @Override + public Settings additionalSettings() { + // workaround until https://github.com/elastic/elasticsearch/issues/91776 is implemented + final Settings.Builder builder = Settings.builder(); + if (enabled) { + if (MultiBucketConsumerService.MAX_BUCKET_SETTING.exists(settings) == false) { + logger.debug("Overriding [{}] to [{}].", MultiBucketConsumerService.MAX_BUCKET_SETTING, REQUIRED_MAX_BUCKETS); + builder.put(MultiBucketConsumerService.MAX_BUCKET_SETTING.getKey(), REQUIRED_MAX_BUCKETS); + } else { + Integer configuredMaxBuckets = MultiBucketConsumerService.MAX_BUCKET_SETTING.get(settings); + if (configuredMaxBuckets != null && configuredMaxBuckets < REQUIRED_MAX_BUCKETS) { + final String message = String.format( + Locale.ROOT, + "Profiling requires [%s] to be set at least to [%d] but was configured to [%d].", + MultiBucketConsumerService.MAX_BUCKET_SETTING.getKey(), + REQUIRED_MAX_BUCKETS, + configuredMaxBuckets + ); + throw new IllegalArgumentException(message); + } + } + } + return builder.build(); + } + + @Override + public List> getActions() { + return List.of(new ActionHandler<>(GetProfilingAction.INSTANCE, TransportGetProfilingAction.class)); + } +} diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/RestGetProfilingAction.java b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/RestGetProfilingAction.java new file mode 100644 index 0000000000000..3b831f257e710 --- /dev/null +++ b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/RestGetProfilingAction.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestCancellableNodeClient; +import org.elasticsearch.rest.action.RestStatusToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestGetProfilingAction extends BaseRestHandler { + @Override + public List routes() { + return List.of(new Route(GET, "/_profiling/stacktraces"), new Route(POST, "/_profiling/stacktraces")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + GetProfilingRequest getProfilingRequest = new GetProfilingRequest(); + request.applyContentParser(getProfilingRequest::parseXContent); + + return channel -> { + RestStatusToXContentListener listener = new RestStatusToXContentListener<>(channel); + RestCancellableNodeClient cancelClient = new RestCancellableNodeClient(client, request.getHttpChannel()); + cancelClient.execute(GetProfilingAction.INSTANCE, getProfilingRequest, listener); + }; + } + + @Override + public String getName() { + return "get_profiling_action"; + } +} diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackFrame.java b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackFrame.java new file mode 100644 index 0000000000000..928277fbe7e09 --- /dev/null +++ b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackFrame.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +final class StackFrame implements ToXContentObject { + String fileName; + String functionName; + Integer functionOffset; + Integer lineNumber; + Integer sourceType; + + StackFrame(String fileName, String functionName, Integer functionOffset, Integer lineNumber, Integer sourceType) { + this.fileName = fileName; + this.functionName = functionName; + this.functionOffset = functionOffset; + this.lineNumber = lineNumber; + this.sourceType = sourceType; + } + + public static StackFrame fromSource(Map source) { + return new StackFrame( + ObjectPath.eval("Stackframe.file.name", source), + ObjectPath.eval("Stackframe.function.name", source), + ObjectPath.eval("Stackframe.function.offset", source), + ObjectPath.eval("Stackframe.line.number", source), + ObjectPath.eval("Stackframe.source.type", source) + ); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("file_name", this.fileName); + builder.field("function_name", this.functionName); + builder.field("function_offset", this.functionOffset); + builder.field("line_number", this.lineNumber); + builder.field("source_type", this.sourceType); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + StackFrame that = (StackFrame) o; + return Objects.equals(fileName, that.fileName) + && Objects.equals(functionName, that.functionName) + && Objects.equals(functionOffset, that.functionOffset) + && Objects.equals(lineNumber, that.lineNumber) + && Objects.equals(sourceType, that.sourceType); + } + + @Override + public int hashCode() { + return Objects.hash(fileName, functionName, functionOffset, lineNumber, sourceType); + } +} diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackTrace.java b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackTrace.java new file mode 100644 index 0000000000000..22c4c71887dca --- /dev/null +++ b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackTrace.java @@ -0,0 +1,231 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; + +final class StackTrace implements ToXContentObject { + int[] addressOrLines; + String[] fileIds; + String[] frameIds; + int[] typeIds; + + StackTrace(int[] addressOrLines, String[] fileIds, String[] frameIds, int[] typeIds) { + this.addressOrLines = addressOrLines; + this.fileIds = fileIds; + this.frameIds = frameIds; + this.typeIds = typeIds; + } + + private static final int BASE64_FRAME_ID_LENGTH = 32; + + private static final String SAFE_BASE64_ENCODER = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz01234456789-_"; + + // tag::noformat + private static final int[] SAFE_BASE64_DECODER = new int[] { + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62, 0, 0, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 0, 0, 0, 0, + 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, + 25, 0, 0, 0, 0, 63, 0, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, + 45, 46, 47, 48, 49, 50, 51, 0, 0, 0, 0, 0 + }; + // end::noformat + + /** + * + * runLengthDecodeBase64Url decodes a run-length encoding for the base64-encoded input string. + * E.g. the string 'BQADAg' is converted into an int array like [0, 0, 0, 0, 0, 2, 2, 2]. + * The motivating intent for this method is to unpack a base64-encoded run-length encoding + * without using intermediate storage. + * + * This method relies on these assumptions and details: + * - array encoded using run-length and base64 always returns string of length 0, 3, or 6 (mod 8) + * - since original array is composed of int, we ignore Unicode codepoints + * + * @param input A base64-encoded string. + * @param size Decoded length of the input. + * @param capacity Capacity of the underlying array (>= size). + * + * @return Corresponding numbers that are encoded in the input. + */ + // package-private for testing + static int[] runLengthDecodeBase64Url(String input, int size, int capacity) { + int[] output = new int[capacity]; + int multipleOf8 = size / 8; + int remainder = size % 8; + + int n; + int count; + int value; + int i; + int j = 0; + + for (i = 0; i < multipleOf8 * 8; i += 8) { + n = (charCodeAt(input, i) << 26) | (charCodeAt(input, i + 1) << 20) | (charCodeAt(input, i + 2) << 14) | (charCodeAt( + input, + i + 3 + ) << 8) | (charCodeAt(input, i + 4) << 2) | (charCodeAt(input, i + 5) >> 4); + + count = (n >> 24) & 0xff; + value = (n >> 16) & 0xff; + + Arrays.fill(output, j, j + count, value); + j += count; + + count = (n >> 8) & 0xff; + value = n & 0xff; + + Arrays.fill(output, j, j + count, value); + j += count; + + n = ((charCodeAt(input, i + 5) & 0xf) << 12) | (charCodeAt(input, i + 6) << 6) | charCodeAt(input, i + 7); + + count = (n >> 8) & 0xff; + value = n & 0xff; + + Arrays.fill(output, j, j + count, value); + j += count; + } + + if (remainder == 6) { + n = (charCodeAt(input, i) << 26) | (charCodeAt(input, i + 1) << 20) | (charCodeAt(input, i + 2) << 14) | (charCodeAt( + input, + i + 3 + ) << 8) | (charCodeAt(input, i + 4) << 2) | (charCodeAt(input, i + 5) >> 4); + + count = (n >> 24) & 0xff; + value = (n >> 16) & 0xff; + + Arrays.fill(output, j, j + count, value); + j += count; + + count = (n >> 8) & 0xff; + value = n & 0xff; + + Arrays.fill(output, j, j + count, value); + j += count; + } else if (remainder == 3) { + n = (charCodeAt(input, i) << 12) | (charCodeAt(input, i + 1) << 6) | charCodeAt(input, i + 2); + n >>= 2; + + count = (n >> 8) & 0xff; + value = n & 0xff; + + Arrays.fill(output, j, j + count, value); + j += count; + } + return output; + } + + // package-private for testing + static int getAddressFromStackFrameID(String frameID) { + int address = charCodeAt(frameID, 21) & 0xf; + address <<= 6; + address += charCodeAt(frameID, 22); + address <<= 6; + address += charCodeAt(frameID, 23); + address <<= 6; + address += charCodeAt(frameID, 24); + address <<= 6; + address += charCodeAt(frameID, 25); + address <<= 6; + address += charCodeAt(frameID, 26); + address <<= 6; + address += charCodeAt(frameID, 27); + address <<= 6; + address += charCodeAt(frameID, 28); + address <<= 6; + address += charCodeAt(frameID, 29); + address <<= 6; + address += charCodeAt(frameID, 30); + address <<= 6; + address += charCodeAt(frameID, 31); + return address; + } + + private static int charCodeAt(String input, int i) { + return SAFE_BASE64_DECODER[input.charAt(i) & 0x7f]; + } + + // package-private for testing + static String getFileIDFromStackFrameID(String frameID) { + return frameID.substring(0, 21) + SAFE_BASE64_ENCODER.charAt(frameID.charAt(21) & 0x30); + } + + public static StackTrace fromSource(Map source) { + String inputFrameIDs = ObjectPath.eval("Stacktrace.frame.ids", source); + String inputFrameTypes = ObjectPath.eval("Stacktrace.frame.types", source); + int countsFrameIDs = inputFrameIDs.length() / BASE64_FRAME_ID_LENGTH; + + String[] fileIDs = new String[countsFrameIDs]; + String[] frameIDs = new String[countsFrameIDs]; + int[] addressOrLines = new int[countsFrameIDs]; + + // Step 1: Convert the base64-encoded frameID list into two separate + // lists (frame IDs and file IDs), both of which are also base64-encoded. + // + // To get the frame ID, we grab the next 32 bytes. + // + // To get the file ID, we grab the first 22 bytes of the frame ID. + // However, since the file ID is base64-encoded using 21.33 bytes + // (16 * 4 / 3), then the 22 bytes have an extra 4 bits from the + // address (see diagram in definition of EncodedStackTrace). + for (int i = 0, pos = 0; i < countsFrameIDs; i++, pos += BASE64_FRAME_ID_LENGTH) { + String frameID = inputFrameIDs.substring(pos, pos + BASE64_FRAME_ID_LENGTH); + frameIDs[i] = frameID; + fileIDs[i] = getFileIDFromStackFrameID(frameID); + addressOrLines[i] = getAddressFromStackFrameID(frameID); + } + + // Step 2: Convert the run-length byte encoding into a list of uint8s. + int[] typeIDs = runLengthDecodeBase64Url(inputFrameTypes, inputFrameTypes.length(), countsFrameIDs); + + return new StackTrace(addressOrLines, fileIDs, frameIDs, typeIDs); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.array("address_or_lines", this.addressOrLines); + builder.array("file_ids", this.fileIds); + builder.array("frame_ids", this.frameIds); + builder.array("type_ids", this.typeIds); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + StackTrace that = (StackTrace) o; + return Arrays.equals(addressOrLines, that.addressOrLines) + && Arrays.equals(fileIds, that.fileIds) + && Arrays.equals(frameIds, that.frameIds) + && Arrays.equals(typeIds, that.typeIds); + } + + @Override + public int hashCode() { + int result = Arrays.hashCode(addressOrLines); + result = 31 * result + Arrays.hashCode(fileIds); + result = 31 * result + Arrays.hashCode(frameIds); + result = 31 * result + Arrays.hashCode(typeIds); + return result; + } +} diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/TransportGetProfilingAction.java b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/TransportGetProfilingAction.java new file mode 100644 index 0000000000000..fa9c9f1fd15f7 --- /dev/null +++ b/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/TransportGetProfilingAction.java @@ -0,0 +1,338 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.get.MultiGetItemResponse; +import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Sum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.ObjectPath; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Random; +import java.util.Set; + +public class TransportGetProfilingAction extends HandledTransportAction { + private final NodeClient nodeClient; + private final TransportService transportService; + + @Inject + public TransportGetProfilingAction(TransportService transportService, ActionFilters actionFilters, NodeClient nodeClient) { + super(GetProfilingAction.NAME, transportService, actionFilters, GetProfilingRequest::new); + this.nodeClient = nodeClient; + this.transportService = transportService; + } + + @Override + protected void doExecute(Task submitTask, GetProfilingRequest request, ActionListener submitListener) { + Client client = new ParentTaskAssigningClient(this.nodeClient, transportService.getLocalNode(), submitTask); + EventsIndex mediumDownsampled = EventsIndex.MEDIUM_DOWNSAMPLED; + client.prepareSearch(mediumDownsampled.getName()) + .setSize(0) + .setQuery(request.getQuery()) + .setTrackTotalHits(true) + .execute(new ActionListener<>() { + @Override + public void onResponse(SearchResponse searchResponse) { + long sampleCount = searchResponse.getHits().getTotalHits().value; + EventsIndex resampledIndex = mediumDownsampled.getResampledIndex(request.getSampleSize(), sampleCount); + searchEventGroupByStackTrace(client, request, resampledIndex, submitListener); + } + + @Override + public void onFailure(Exception e) { + submitListener.onFailure(e); + } + }); + } + + private void searchEventGroupByStackTrace( + Client client, + GetProfilingRequest request, + EventsIndex eventsIndex, + ActionListener submitListener + ) { + GetProfilingResponseBuilder responseBuilder = new GetProfilingResponseBuilder(); + client.prepareSearch(eventsIndex.getName()) + .setTrackTotalHits(false) + .setQuery(request.getQuery()) + .addAggregation( + new TermsAggregationBuilder("group_by") + // 'size' should be max 100k, but might be slightly more. Better be on the safe side. + .size(150_000) + .field("Stacktrace.id") + // 'execution_hint: map' skips the slow building of ordinals that we don't need. + // Especially with high cardinality fields, this makes aggregations really slow. + .executionHint("map") + .subAggregation(new SumAggregationBuilder("count").field("Stacktrace.count")) + ) + .addAggregation(new SumAggregationBuilder("total_count").field("Stacktrace.count")) + .execute(new ActionListener<>() { + @Override + public void onResponse(SearchResponse searchResponse) { + Sum totalCountAgg = searchResponse.getAggregations().get("total_count"); + long totalCount = Math.round(totalCountAgg.value()); + Resampler resampler = new Resampler(request, eventsIndex.getSampleRate(), totalCount); + StringTerms stacktraces = searchResponse.getAggregations().get("group_by"); + Map stackTraceEvents = Maps.newHashMapWithExpectedSize(stacktraces.getBuckets().size()); + for (StringTerms.Bucket bucket : stacktraces.getBuckets()) { + Sum count = bucket.getAggregations().get("count"); + int finalCount = resampler.adjustSampleCount((int) count.value()); + if (finalCount > 0) { + stackTraceEvents.put(bucket.getKeyAsString(), finalCount); + } + } + if (stackTraceEvents.isEmpty() == false) { + responseBuilder.setStackTraceEvents(stackTraceEvents); + retrieveStackTraces(client, responseBuilder, submitListener); + } else { + submitListener.onResponse(responseBuilder.build()); + } + } + + @Override + public void onFailure(Exception e) { + submitListener.onFailure(e); + } + }); + } + + private void retrieveStackTraces( + Client client, + GetProfilingResponseBuilder responseBuilder, + ActionListener submitListener + ) { + client.prepareMultiGet() + .addIds("profiling-stacktraces", responseBuilder.getStackTraceEvents().keySet()) + .setRealtime(true) + .execute(new ActionListener<>() { + @Override + public void onResponse(MultiGetResponse multiGetItemResponses) { + Map stackTracePerId = new HashMap<>(); + Set stackFrameIds = new HashSet<>(); + Set executableIds = new HashSet<>(); + int totalFrames = 0; + for (MultiGetItemResponse trace : multiGetItemResponses) { + if (trace.isFailed() == false && trace.getResponse().isExists()) { + String id = trace.getId(); + StackTrace stacktrace = StackTrace.fromSource(trace.getResponse().getSource()); + stackTracePerId.put(id, stacktrace); + totalFrames += stacktrace.frameIds.length; + stackFrameIds.addAll(Arrays.asList(stacktrace.frameIds)); + executableIds.addAll(Arrays.asList(stacktrace.fileIds)); + } + } + responseBuilder.setStackTraces(stackTracePerId); + responseBuilder.setTotalFrames(totalFrames); + retrieveStackTraceDetails(client, responseBuilder, stackFrameIds, executableIds, submitListener); + } + + @Override + public void onFailure(Exception e) { + submitListener.onFailure(e); + } + }); + } + + private void retrieveStackTraceDetails( + Client client, + GetProfilingResponseBuilder responseBuilder, + Set stackFrameIds, + Set executableIds, + ActionListener submitListener + ) { + + DetailsHandler handler = new DetailsHandler(responseBuilder, submitListener); + + if (stackFrameIds.isEmpty()) { + handler.onStackFramesResponse(new MultiGetResponse(new MultiGetItemResponse[0])); + } else { + client.prepareMultiGet().addIds("profiling-stackframes", stackFrameIds).setRealtime(true).execute(new ActionListener<>() { + @Override + public void onResponse(MultiGetResponse multiGetItemResponses) { + handler.onStackFramesResponse(multiGetItemResponses); + } + + @Override + public void onFailure(Exception e) { + submitListener.onFailure(e); + } + }); + } + // no data dependency - we can do this concurrently + if (executableIds.isEmpty()) { + handler.onExecutableDetailsResponse(new MultiGetResponse(new MultiGetItemResponse[0])); + } else { + client.prepareMultiGet().addIds("profiling-executables", executableIds).setRealtime(true).execute(new ActionListener<>() { + @Override + public void onResponse(MultiGetResponse multiGetItemResponses) { + handler.onExecutableDetailsResponse(multiGetItemResponses); + } + + @Override + public void onFailure(Exception e) { + submitListener.onFailure(e); + } + }); + } + } + + private static class Resampler { + private final boolean requiresResampling; + + private final Random r; + + private final double sampleRate; + + private final double p; + + Resampler(GetProfilingRequest request, double sampleRate, long totalCount) { + // Manually reduce sample count if totalCount exceeds sampleSize by 10%. + if (totalCount > request.getSampleSize() * 1.1) { + this.requiresResampling = true; + // Make the RNG predictable to get reproducible results. + this.r = new Random(request.hashCode()); + this.sampleRate = sampleRate; + this.p = (double) request.getSampleSize() / totalCount; + } else { + this.requiresResampling = false; + this.r = null; + this.sampleRate = sampleRate; + this.p = 1.0d; + } + } + + public int adjustSampleCount(int originalCount) { + if (requiresResampling) { + int newCount = 0; + for (int i = 0; i < originalCount; i++) { + if (r.nextDouble() < p) { + newCount++; + } + } + if (newCount > 0) { + // Adjust the sample counts from down-sampled to fully sampled. + // Be aware that downsampling drops entries from stackTraceEvents, so that + // the sum of the upscaled count values is less that totalCount. + return (int) Math.floor(newCount / (sampleRate * p)); + } else { + return 0; + } + } else { + return originalCount; + } + } + } + + /** + * Collects stack trace details which are retrieved concurrently and sends a response only when all details are known. + */ + private static class DetailsHandler { + private final GetProfilingResponseBuilder builder; + private final ActionListener submitListener; + private volatile Map executables; + private volatile Map stackFrames; + + private DetailsHandler(GetProfilingResponseBuilder builder, ActionListener submitListener) { + this.builder = builder; + this.submitListener = submitListener; + } + + public void onStackFramesResponse(MultiGetResponse multiGetItemResponses) { + Map stackFrames = new HashMap<>(); + for (MultiGetItemResponse frame : multiGetItemResponses) { + if (frame.isFailed() == false && frame.getResponse().isExists()) { + stackFrames.put(frame.getId(), StackFrame.fromSource(frame.getResponse().getSource())); + } + } + // publish to object state only when completely done, otherwise mayFinish() could run twice + this.stackFrames = stackFrames; + mayFinish(); + } + + public void onExecutableDetailsResponse(MultiGetResponse multiGetItemResponses) { + Map executables = new HashMap<>(); + for (MultiGetItemResponse executable : multiGetItemResponses) { + if (executable.isFailed() == false && executable.getResponse().isExists()) { + executables.put(executable.getId(), ObjectPath.eval("Executable.file.name", executable.getResponse().getSource())); + } + } + // publish to object state only when completely done, otherwise mayFinish() could run twice + this.executables = executables; + mayFinish(); + } + + public void mayFinish() { + if (executables != null && stackFrames != null) { + builder.setExecutables(executables); + builder.setStackFrames(stackFrames); + submitListener.onResponse(builder.build()); + } + } + } + + private static class GetProfilingResponseBuilder { + private Map stackTraces; + private int totalFrames; + private Map stackFrames; + private Map executables; + private Map stackTraceEvents; + private Exception error; + + public void setStackTraces(Map stackTraces) { + this.stackTraces = stackTraces; + } + + public void setTotalFrames(int totalFrames) { + this.totalFrames = totalFrames; + } + + public void setStackFrames(Map stackFrames) { + this.stackFrames = stackFrames; + } + + public void setExecutables(Map executables) { + this.executables = executables; + } + + public void setStackTraceEvents(Map stackTraceEvents) { + this.stackTraceEvents = stackTraceEvents; + } + + public Map getStackTraceEvents() { + return stackTraceEvents; + } + + public void setError(Exception error) { + this.error = error; + } + + public GetProfilingResponse build() { + if (error != null) { + return new GetProfilingResponse(error); + } else { + return new GetProfilingResponse(stackTraces, stackFrames, executables, stackTraceEvents, totalFrames); + } + } + } +} diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/EventsIndexTests.java b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/EventsIndexTests.java new file mode 100644 index 0000000000000..b653fecfe37ff --- /dev/null +++ b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/EventsIndexTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.test.ESTestCase; + +public class EventsIndexTests extends ESTestCase { + public void testFullIndex() { + EventsIndex idx = EventsIndex.FULL_INDEX; + assertEquals("profiling-events-all", idx.getName()); + assertEquals(1.0d, idx.getSampleRate(), 1e-3); + } + + public void testResampledIndexSameSize() { + EventsIndex resampledIndex = EventsIndex.MEDIUM_DOWNSAMPLED.getResampledIndex(100, 100); + assertEquals("profiling-events-5pow06", resampledIndex.getName()); + assertEquals(Math.pow(1.0d / 5.0d, 6.0d), resampledIndex.getSampleRate(), 1e-9); + } + + public void testResampledIndexDifferentSizes() { + assertResampledIndex("profiling-events-5pow01", Math.pow(5.0d, 5)); + assertResampledIndex("profiling-events-5pow02", Math.pow(5.0d, 4)); + assertResampledIndex("profiling-events-5pow03", Math.pow(5.0d, 3)); + + assertResampledIndex("profiling-events-5pow04", Math.pow(5.0d, 2)); + assertResampledIndex("profiling-events-5pow05", Math.pow(5.0d, 1)); + + assertResampledIndex("profiling-events-5pow06", Math.pow(5.0d, 0)); + assertResampledIndex("profiling-events-5pow07", Math.pow(5.0d, -1)); + assertResampledIndex("profiling-events-5pow08", Math.pow(5.0d, -2)); + + assertResampledIndex("profiling-events-5pow09", Math.pow(5.0d, -3)); + assertResampledIndex("profiling-events-5pow10", Math.pow(5.0d, -4)); + assertResampledIndex("profiling-events-5pow11", Math.pow(5.0d, -5)); + } + + private void assertResampledIndex(String expectedName, double ratio) { + long currentSampleSize = 10_000_000L; + long targetSampleSize = (long) (currentSampleSize * ratio); + EventsIndex e = EventsIndex.MEDIUM_DOWNSAMPLED; + assertEquals(expectedName, e.getResampledIndex(targetSampleSize, currentSampleSize).getName()); + } +} diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingRequestTests.java b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingRequestTests.java new file mode 100644 index 0000000000000..d4394b2b1926b --- /dev/null +++ b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingRequestTests.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static java.util.Collections.emptyList; + +public class GetProfilingRequestTests extends ESTestCase { + public void testSerialization() throws IOException { + Integer sampleSize = randomBoolean() ? randomIntBetween(0, Integer.MAX_VALUE) : null; + QueryBuilder query = randomBoolean() ? new BoolQueryBuilder() : null; + + GetProfilingRequest request = new GetProfilingRequest(sampleSize, query); + try (BytesStreamOutput out = new BytesStreamOutput()) { + request.writeTo(out); + try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry())) { + GetProfilingRequest deserialized = new GetProfilingRequest(in); + assertEquals(sampleSize, deserialized.getSampleSize()); + assertEquals(query, deserialized.getQuery()); + } + } + } + + public void testParseValidXContent() throws IOException { + try (XContentParser content = createParser(XContentFactory.jsonBuilder() + //tag::noformat + .startObject() + .field("sample_size", 500) + .startObject("query") + .startObject("range") + .startObject("@timestamp") + .field("gte", "2022-10-05") + .endObject() + .endObject() + .endObject() + .endObject() + //end::noformat + )) { + + GetProfilingRequest profilingRequest = new GetProfilingRequest(); + profilingRequest.parseXContent(content); + + assertEquals(Integer.valueOf(500), profilingRequest.getSampleSize()); + // a basic check suffices here + assertEquals("@timestamp", ((RangeQueryBuilder) profilingRequest.getQuery()).fieldName()); + } + } + + public void testParseXContentUnrecognizedField() throws IOException { + try (XContentParser content = createParser(XContentFactory.jsonBuilder() + //tag::noformat + .startObject() + // should be sample_size + .field("sample-size", 500) + .startObject("query") + .startObject("range") + .startObject("@timestamp") + .field("gte", "2022-10-05") + .endObject() + .endObject() + .endObject() + .endObject() + //end::noformat + )) { + + GetProfilingRequest profilingRequest = new GetProfilingRequest(); + ParsingException ex = expectThrows(ParsingException.class, () -> profilingRequest.parseXContent(content)); + assertEquals("Unknown key for a VALUE_NUMBER in [sample-size].", ex.getMessage()); + } + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + // to register the query parser + return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, emptyList()).getNamedXContents()); + } + + @Override + protected NamedWriteableRegistry writableRegistry() { + return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, emptyList()).getNamedWriteables()); + } +} diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingResponseTests.java b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingResponseTests.java new file mode 100644 index 0000000000000..a0613be3ab427 --- /dev/null +++ b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingResponseTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.util.Map; +import java.util.function.Supplier; + +public class GetProfilingResponseTests extends AbstractWireSerializingTestCase { + private T randomNullable(Supplier v) { + return randomBoolean() ? v.get() : null; + } + + private T randomNullable(T v) { + return randomBoolean() ? v : null; + } + + @Override + protected GetProfilingResponse createTestInstance() { + int totalFrames = randomIntBetween(1, 100); + + Map stackTraces = randomNullable( + Map.of( + "QjoLteG7HX3VUUXr-J4kHQ", + new StackTrace( + new int[] { 1083999 }, + new String[] { "QCCDqjSg3bMK1C4YRK6Tiw" }, + new String[] { "QCCDqjSg3bMK1C4YRK6TiwAAAAAAEIpf" }, + new int[] { 2 } + ) + ) + ); + Map stackFrames = randomNullable( + Map.of( + "QCCDqjSg3bMK1C4YRK6TiwAAAAAAEIpf", + new StackFrame( + randomNullable(() -> randomAlphaOfLength(20)), + randomNullable(() -> randomAlphaOfLength(20)), + randomNullable(() -> randomIntBetween(1, Integer.MAX_VALUE)), + randomNullable(() -> randomIntBetween(1, 30_000)), + randomNullable(() -> randomIntBetween(1, 10)) + ) + ) + ); + Map executables = randomNullable(Map.of("QCCDqjSg3bMK1C4YRK6Tiw", "libc.so.6")); + Map stackTraceEvents = randomNullable(Map.of(randomAlphaOfLength(12), randomIntBetween(1, 200))); + + return new GetProfilingResponse(stackTraces, stackFrames, executables, stackTraceEvents, totalFrames); + } + + @Override + protected Writeable.Reader instanceReader() { + return GetProfilingResponse::new; + } +} diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/RestGetProfilingActionTests.java b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/RestGetProfilingActionTests.java new file mode 100644 index 0000000000000..d9a3532b451f9 --- /dev/null +++ b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/RestGetProfilingActionTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; + +import java.util.Collections; + +import static java.util.Collections.emptyList; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class RestGetProfilingActionTests extends RestActionTestCase { + @Before + public void setUpAction() { + controller().registerHandler(new RestGetProfilingAction()); + } + + public void testPrepareEmptyRequest() { + SetOnce executeCalled = new SetOnce<>(); + verifyingClient.setExecuteLocallyVerifier((actionType, request) -> { + assertThat(request, instanceOf(GetProfilingRequest.class)); + GetProfilingRequest profilingRequest = (GetProfilingRequest) request; + assertThat(profilingRequest.getSampleSize(), nullValue()); + assertThat(profilingRequest.getQuery(), nullValue()); + executeCalled.set(true); + return new GetProfilingResponse( + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + 0 + ); + }); + RestRequest profilingRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) + .withPath("/_profiling/stacktraces") + .withContent(new BytesArray("{}"), XContentType.JSON) + .build(); + dispatchRequest(profilingRequest); + assertThat(executeCalled.get(), equalTo(true)); + } + + public void testPrepareParameterizedRequest() { + SetOnce executeCalled = new SetOnce<>(); + verifyingClient.setExecuteLocallyVerifier((actionType, request) -> { + assertThat(request, instanceOf(GetProfilingRequest.class)); + GetProfilingRequest profilingRequest = (GetProfilingRequest) request; + assertThat(profilingRequest.getSampleSize(), is(10000)); + assertThat(profilingRequest.getQuery(), notNullValue(QueryBuilder.class)); + executeCalled.set(true); + return new GetProfilingResponse( + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + 0 + ); + }); + RestRequest profilingRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) + .withPath("/_profiling/stacktraces") + .withContent(new BytesArray(""" + { + "sample_size": 10000, + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "gte": "2022-10-05", + "lt": "2022-12-05" + } + } + } + ] + } + } + } + """), XContentType.JSON) + .build(); + dispatchRequest(profilingRequest); + assertThat(executeCalled.get(), equalTo(true)); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + // to register the query parser + return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, emptyList()).getNamedXContents()); + } +} diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackFrameTests.java b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackFrameTests.java new file mode 100644 index 0000000000000..36cfe24bd0019 --- /dev/null +++ b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackFrameTests.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class StackFrameTests extends ESTestCase { + public void testCreateFromSource() { + // tag::noformat + StackFrame frame = StackFrame.fromSource( + Map.of("Stackframe", Map.of( + "file", Map.of("name", "Main.java"), + "function", Map.of( + "name", "helloWorld", + "offset", 31733 + ), + "line", Map.of("number", 22), + "source", Map.of("type", 3)) + ) + ); + // end::noformat + assertEquals(Integer.valueOf(3), frame.sourceType); + assertEquals("Main.java", frame.fileName); + assertEquals("helloWorld", frame.functionName); + assertEquals(Integer.valueOf(31733), frame.functionOffset); + assertEquals(Integer.valueOf(22), frame.lineNumber); + } + + public void testToXContent() throws IOException { + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) + .startObject() + .field("file_name", "Main.java") + .field("function_name", "helloWorld") + .field("function_offset", 31733) + .field("line_number", 22) + .field("source_type", 3) + .endObject(); + + XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); + StackFrame stackTrace = new StackFrame("Main.java", "helloWorld", 31733, 22, 3); + stackTrace.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType); + } + + public void testEquality() { + StackFrame frame = new StackFrame("Main.java", "helloWorld", 31733, 22, 3); + EqualsHashCodeTestUtils.checkEqualsAndHashCode( + frame, + (o -> new StackFrame(o.fileName, o.functionName, o.functionOffset, o.lineNumber, o.sourceType)) + ); + + } +} diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackTraceTests.java b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackTraceTests.java new file mode 100644 index 0000000000000..30a525811478b --- /dev/null +++ b/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackTraceTests.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiler; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class StackTraceTests extends ESTestCase { + public void testDecodeFrameId() { + String frameId = "AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u"; + // base64 encoded representation of the tuple (5, 478) + assertEquals("AAAAAAAAAAUAAAAAAAAB3g", StackTrace.getFileIDFromStackFrameID(frameId)); + assertEquals(1027822, StackTrace.getAddressFromStackFrameID(frameId)); + } + + public void testRunlengthDecodeUniqueValues() { + // 0 - 9 (reversed) + String encodedFrameTypes = "AQkBCAEHAQYBBQEEAQMBAgEBAQA"; + int[] actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 10); + assertArrayEquals(new int[] { 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 }, actual); + } + + public void testRunlengthDecodeSingleValue() { + // "4", repeated ten times + String encodedFrameTypes = "CgQ"; + int[] actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 10); + assertArrayEquals(new int[] { 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 }, actual); + } + + public void testRunlengthDecodeFillsGap() { + // "2", repeated three times + String encodedFrameTypes = "AwI"; + int[] actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 5); + // zeroes should be appended for the last two values which are not present in the encoded representation. + assertArrayEquals(new int[] { 2, 2, 2, 0, 0 }, actual); + } + + public void testRunlengthDecodeMixedValue() { + // 4 + String encodedFrameTypes = "BQADAg"; + int[] actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 8); + assertArrayEquals(new int[] { 0, 0, 0, 0, 0, 2, 2, 2 }, actual); + } + + public void testCreateFromSource() { + String ids = "AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u"; + String types = "AQI"; + // tag::noformat + StackTrace stackTrace = StackTrace.fromSource( + Map.of("Stacktrace", + Map.of("frame", + Map.of( + "ids", ids, + "types", types) + ) + ) + ); + // end::noformat + assertArrayEquals(new String[] { "AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u" }, stackTrace.frameIds); + assertArrayEquals(new String[] { "AAAAAAAAAAUAAAAAAAAB3g" }, stackTrace.fileIds); + assertArrayEquals(new int[] { 1027822 }, stackTrace.addressOrLines); + assertArrayEquals(new int[] { 2 }, stackTrace.typeIds); + } + + public void testToXContent() throws IOException { + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) + .startObject() + .array("address_or_lines", new int[] { 1027822 }) + .array("file_ids", "AAAAAAAAAAUAAAAAAAAB3g") + .array("frame_ids", "AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u") + .array("type_ids", new int[] { 2 }) + .endObject(); + + XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); + StackTrace stackTrace = new StackTrace( + new int[] { 1027822 }, + new String[] { "AAAAAAAAAAUAAAAAAAAB3g" }, + new String[] { "AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u" }, + new int[] { 2 } + ); + stackTrace.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType); + } + + public void testEquality() { + StackTrace stackTrace = new StackTrace( + new int[] { 1027822 }, + new String[] { "AAAAAAAAAAUAAAAAAAAB3g" }, + new String[] { "AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u" }, + new int[] { 2 } + ); + + EqualsHashCodeTestUtils.checkEqualsAndHashCode( + stackTrace, + (o -> new StackTrace(o.addressOrLines.clone(), o.fileIds.clone(), o.frameIds.clone(), o.typeIds.clone())) + ); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 57d3a3aec9723..65037651cf4cb 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -437,6 +437,7 @@ public class Constants { "indices:data/read/mtv", "indices:data/read/mtv[shard]", "indices:data/read/open_point_in_time", + "indices:data/read/profiling", "indices:data/read/rank_eval", "indices:data/read/scroll", "indices:data/read/scroll/clear",