diff --git a/hadoop-hdds/client/pom.xml b/hadoop-hdds/client/pom.xml index 917d90f88e86..9c94e152a9f2 100644 --- a/hadoop-hdds/client/pom.xml +++ b/hadoop-hdds/client/pom.xml @@ -32,14 +32,6 @@ com.google.guava guava - - io.opentracing - opentracing-api - - - io.opentracing - opentracing-util - jakarta.annotation jakarta.annotation-api diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java index 84d66970e23a..b07cee4097c0 100644 --- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java @@ -21,9 +21,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; @@ -536,10 +533,8 @@ public XceiverClientReply sendCommandAsync( ContainerCommandRequestProto request) throws IOException, ExecutionException, InterruptedException { - Span span = GlobalTracer.get() - .buildSpan("XceiverClientGrpc." + request.getCmdType().name()).start(); - - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan( + "XceiverClientGrpc." + request.getCmdType().name())) { ContainerCommandRequestProto.Builder builder = ContainerCommandRequestProto.newBuilder(request) @@ -553,9 +548,6 @@ public XceiverClientReply sendCommandAsync( asyncReply.getResponse().get(); } return asyncReply; - - } finally { - span.finish(); } } diff --git a/hadoop-hdds/common/pom.xml b/hadoop-hdds/common/pom.xml index 3138ac4e1177..0216b808a7cf 100644 --- a/hadoop-hdds/common/pom.xml +++ b/hadoop-hdds/common/pom.xml @@ -68,16 +68,28 @@ picocli - io.jaegertracing - jaeger-core + io.opentelemetry + opentelemetry-api - io.opentracing - opentracing-api + io.opentelemetry + opentelemetry-context - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-exporter-otlp + + + io.opentelemetry + opentelemetry-sdk + + + io.opentelemetry + opentelemetry-sdk-common + + + io.opentelemetry + opentelemetry-sdk-trace jakarta.annotation @@ -156,11 +168,6 @@ org.yaml snakeyaml - - io.jaegertracing - jaeger-client - runtime - org.apache.ratis ratis-metrics-dropwizard3 diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java index bbedf1e75081..a934fc513720 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java @@ -20,9 +20,7 @@ import static java.util.Collections.singletonList; import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.BLOCK_TOKEN_VERIFICATION_FAILED; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.trace.Span; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -155,17 +153,17 @@ static T tryEachDatanode(Pipeline pipeline, try { return op.apply(d); } catch (IOException e) { - Span span = GlobalTracer.get().activeSpan(); + Span span = TracingUtil.getActiveSpan(); if (e instanceof StorageContainerException) { StorageContainerException sce = (StorageContainerException)e; // Block token expired. There's no point retrying other DN. // Throw the exception to request a new block token right away. if (sce.getResult() == BLOCK_TOKEN_VERIFICATION_FAILED) { - span.log("block token verification failed at DN " + d); + span.addEvent("block token verification failed at DN " + d); throw e; } } - span.log("failed to connect to DN " + d); + span.addEvent("failed to connect to DN " + d); excluded.add(d); if (excluded.size() < pipeline.size()) { LOG.warn(toErrorMessage.apply(d) @@ -372,18 +370,15 @@ public static ContainerProtos.ReadChunkResponseProto readChunk( builder.setEncodedToken(token.encodeToUrlString()); } - Span span = GlobalTracer.get() - .buildSpan("readChunk").start(); - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { - span.setTag("offset", chunk.getOffset()) - .setTag("length", chunk.getLen()) - .setTag("block", blockID.toString()); + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan("readChunk")) { + Span span = TracingUtil.getActiveSpan(); + span.setAttribute("offset", chunk.getOffset()) + .setAttribute("length", chunk.getLen()) + .setAttribute("block", blockID.toString()); return tryEachDatanode(xceiverClient.getPipeline(), d -> readChunk(xceiverClient, chunk, blockID, validators, builder, d), d -> toErrorMessage(chunk, blockID, d)); - } finally { - span.finish(); } } @@ -394,8 +389,7 @@ private static ContainerProtos.ReadChunkResponseProto readChunk( DatanodeDetails d) throws IOException { ContainerCommandRequestProto.Builder requestBuilder = builder .setDatanodeUuid(d.getUuidString()); - Span span = GlobalTracer.get().activeSpan(); - String traceId = TracingUtil.exportSpan(span); + String traceId = TracingUtil.exportCurrentSpan(); if (traceId != null) { requestBuilder = requestBuilder.setTraceID(traceId); } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java index 044137fab6d4..af1b77facd18 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java @@ -17,9 +17,8 @@ package org.apache.hadoop.hdds.tracing; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import org.apache.ratis.thirdparty.io.grpc.ForwardingServerCallListener.SimpleForwardingServerCallListener; import org.apache.ratis.thirdparty.io.grpc.Metadata; import org.apache.ratis.thirdparty.io.grpc.ServerCall; @@ -41,14 +40,15 @@ public Listener interceptCall( next.startCall(call, headers)) { @Override public void onMessage(ReqT message) { + Span span = TracingUtil .importAndCreateSpan( call.getMethodDescriptor().getFullMethodName(), headers.get(GrpcClientInterceptor.TRACING_HEADER)); - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (Scope ignored = span.makeCurrent()) { super.onMessage(message); } finally { - span.finish(); + span.end(); } } }; diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java deleted file mode 100644 index f22393a50d3c..000000000000 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hdds.tracing; - -import io.jaegertracing.internal.JaegerSpanContext; -import io.jaegertracing.internal.exceptions.EmptyTracerStateStringException; -import io.jaegertracing.internal.exceptions.MalformedTracerStateStringException; -import io.jaegertracing.internal.exceptions.TraceIdOutOfBoundException; -import io.jaegertracing.spi.Codec; -import io.opentracing.propagation.Format; -import java.math.BigInteger; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A jaeger codec to save the current tracing context as a string. - */ -public class StringCodec implements Codec { - - private static final Logger LOG = LoggerFactory.getLogger(StringCodec.class); - public static final StringFormat FORMAT = new StringFormat(); - - @Override - public JaegerSpanContext extract(StringBuilder s) { - if (s == null) { - throw new EmptyTracerStateStringException(); - } - String value = s.toString(); - if (!"".equals(value)) { - String[] parts = value.split(":"); - if (parts.length != 4) { - if (LOG.isDebugEnabled()) { - LOG.debug("MalformedTracerStateString: {}", value); - } - throw new MalformedTracerStateStringException(value); - } else { - String traceId = parts[0]; - if (traceId.length() <= 32 && !traceId.isEmpty()) { - return new JaegerSpanContext(high(traceId), - (new BigInteger(traceId, 16)).longValue(), - (new BigInteger(parts[1], 16)).longValue(), - (new BigInteger(parts[2], 16)).longValue(), - (new BigInteger(parts[3], 16)).byteValue()); - } else { - throw new TraceIdOutOfBoundException( - "Trace id [" + traceId + "] length is not within 1 and 32"); - } - } - } else { - throw new EmptyTracerStateStringException(); - } - } - - @Override - public void inject(JaegerSpanContext context, StringBuilder string) { - int intFlag = context.getFlags() & 255; - string.append(context.getTraceId()) - .append(':').append(Long.toHexString(context.getSpanId())) - .append(':').append(Long.toHexString(context.getParentId())) - .append(':').append(Integer.toHexString(intFlag)); - } - - private static long high(String hexString) { - if (hexString.length() > 16) { - int highLength = hexString.length() - 16; - String highString = hexString.substring(0, highLength); - return (new BigInteger(highString, 16)).longValue(); - } else { - return 0L; - } - } - - /** - * The format to save the context as text. - *

- * Using the mutable StringBuilder instead of plain String. - */ - public static final class StringFormat implements Format { - } - -} diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java index b8560c46853a..95e735b8965b 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java @@ -19,9 +19,6 @@ import static java.util.Collections.emptyMap; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.util.Arrays; @@ -67,10 +64,7 @@ public Object invoke(Object proxy, Method method, Object[] args) method.getName()); } - Span span = GlobalTracer.get().buildSpan( - name + "." + method.getName()) - .start(); - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan(name + "." + method.getName())) { try { return delegateMethod.invoke(delegate, args); } catch (Exception ex) { @@ -79,8 +73,6 @@ public Object invoke(Object proxy, Method method, Object[] args) } else { throw ex; } - } finally { - span.finish(); } } } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java index 867802120ef2..560f3876c114 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java @@ -17,27 +17,44 @@ package org.apache.hadoop.hdds.tracing; -import io.jaegertracing.Configuration; -import io.jaegertracing.internal.JaegerTracer; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.SpanContext; -import io.opentracing.Tracer; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.StatusCode; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; import java.lang.reflect.Proxy; +import java.util.HashMap; +import java.util.Map; import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.ratis.util.function.CheckedRunnable; import org.apache.ratis.util.function.CheckedSupplier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility class to collect all the tracing helper methods. */ public final class TracingUtil { - + private static final Logger LOG = LoggerFactory.getLogger(TracingUtil.class); private static final String NULL_SPAN_AS_STRING = ""; + private static final String OTEL_EXPORTER_OTLP_ENDPOINT = "OTEL_EXPORTER_OTLP_ENDPOINT"; + private static final String OTEL_EXPORTER_OTLP_ENDPOINT_DEFAULT = "http://localhost:4317"; + private static final String OTEL_TRACES_SAMPLER_ARG = "OTEL_TRACES_SAMPLER_ARG"; + private static final double OTEL_TRACES_SAMPLER_RATIO_DEFAULT = 1.0; private static volatile boolean isInit = false; + private static Tracer tracer = OpenTelemetry.noop().getTracer("noop"); private TracingUtil() { } @@ -47,38 +64,68 @@ private TracingUtil() { */ public static void initTracing( String serviceName, ConfigurationSource conf) { - if (!GlobalTracer.isRegistered() && isTracingEnabled(conf)) { - Configuration config = Configuration.fromEnv(serviceName); - JaegerTracer tracer = config.getTracerBuilder() - .registerExtractor(StringCodec.FORMAT, new StringCodec()) - .registerInjector(StringCodec.FORMAT, new StringCodec()) - .build(); - GlobalTracer.registerIfAbsent(tracer); + if (!isTracingEnabled(conf) || isInit) { + return; + } + + try { + initialize(serviceName); isInit = true; + LOG.info("Initialized tracing service: {}", serviceName); + } catch (Exception e) { + LOG.error("Failed to initialize tracing", e); } } - /** - * Export the active tracing span as a string. - * - * @return encoded tracing context. - */ - public static String exportCurrentSpan() { - return exportSpan(GlobalTracer.get().activeSpan()); + private static void initialize(String serviceName) { + String otelEndPoint = System.getenv(OTEL_EXPORTER_OTLP_ENDPOINT); + if (otelEndPoint == null || otelEndPoint.isEmpty()) { + otelEndPoint = OTEL_EXPORTER_OTLP_ENDPOINT_DEFAULT; + } + + double samplerRatio = OTEL_TRACES_SAMPLER_RATIO_DEFAULT; + try { + String sampleStrRatio = System.getenv(OTEL_TRACES_SAMPLER_ARG); + if (sampleStrRatio != null && !sampleStrRatio.isEmpty()) { + samplerRatio = Double.parseDouble(System.getenv(OTEL_TRACES_SAMPLER_ARG)); + } + } catch (NumberFormatException ex) { + // ignore and use the default value. + } + + Resource resource = Resource.create(Attributes.of(AttributeKey.stringKey("service.name"), serviceName)); + OtlpGrpcSpanExporter spanExporter = OtlpGrpcSpanExporter.builder() + .setEndpoint(otelEndPoint) + .build(); + + SimpleSpanProcessor spanProcessor = SimpleSpanProcessor.builder(spanExporter).build(); + SdkTracerProvider tracerProvider = SdkTracerProvider.builder() + .addSpanProcessor(spanProcessor) + .setResource(resource) + .setSampler(Sampler.traceIdRatioBased(samplerRatio)) + .build(); + OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() + .setTracerProvider(tracerProvider) + .build(); + tracer = openTelemetry.getTracer(serviceName); } /** - * Export the specific span as a string. + * Export the active tracing span as a string. * * @return encoded tracing context. */ - public static String exportSpan(Span span) { - if (span != null && isInit) { - StringBuilder builder = new StringBuilder(); - GlobalTracer.get().inject(span.context(), StringCodec.FORMAT, builder); - return builder.toString(); + public static String exportCurrentSpan() { + Span currentSpan = Span.current(); + if (!currentSpan.getSpanContext().isValid()) { + return NULL_SPAN_AS_STRING; } - return NULL_SPAN_AS_STRING; + + StringBuilder builder = new StringBuilder(); + W3CTraceContextPropagator propagator = W3CTraceContextPropagator.getInstance(); + propagator.inject(Context.current(), builder, + (carrier, key, value) -> carrier.append(key).append('=').append(value).append(';')); + return builder.toString(); } /** @@ -87,25 +134,18 @@ public static String exportSpan(Span span) { * @param name name of the newly created scope * @param encodedParent Encoded parent span (could be null or empty) * - * @return OpenTracing scope. + * @return Tracing scope. */ public static Span importAndCreateSpan(String name, String encodedParent) { - Tracer tracer = GlobalTracer.get(); - return tracer.buildSpan(name) - .asChildOf(extractParent(encodedParent, tracer)) - .start(); - } - - private static SpanContext extractParent(String parent, Tracer tracer) { - if (!GlobalTracer.isRegistered()) { - return null; + if (encodedParent == null || encodedParent.isEmpty()) { + return tracer.spanBuilder(name).setNoParent().startSpan(); } - if (parent == null || parent.isEmpty()) { - return null; - } - - return tracer.extract(StringCodec.FORMAT, new StringBuilder(parent)); + W3CTraceContextPropagator propagator = W3CTraceContextPropagator.getInstance(); + Context extract = propagator.extract(Context.current(), encodedParent, new TextExtractor()); + return tracer.spanBuilder(name) + .setParent(extract) + .startSpan(); } /** @@ -142,8 +182,7 @@ public static boolean isTracingEnabled( */ public static void executeInNewSpan(String spanName, CheckedRunnable runnable) throws E { - Span span = GlobalTracer.get() - .buildSpan(spanName).start(); + Span span = tracer.spanBuilder(spanName).setNoParent().startSpan(); executeInSpan(span, runnable); } @@ -152,8 +191,7 @@ public static void executeInNewSpan(String spanName, */ public static R executeInNewSpan(String spanName, CheckedSupplier supplier) throws E { - Span span = GlobalTracer.get() - .buildSpan(spanName).start(); + Span span = tracer.spanBuilder(spanName).setNoParent().startSpan(); return executeInSpan(span, supplier); } @@ -163,13 +201,14 @@ public static R executeInNewSpan(String spanName, */ private static R executeInSpan(Span span, CheckedSupplier supplier) throws E { - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (Scope ignored = span.makeCurrent()) { return supplier.get(); } catch (Exception ex) { - span.setTag("failed", true); + span.addEvent("Failed with exception: " + ex.getMessage()); + span.setStatus(StatusCode.ERROR); throw ex; } finally { - span.finish(); + span.end(); } } @@ -178,13 +217,14 @@ private static R executeInSpan(Span span, */ private static void executeInSpan(Span span, CheckedRunnable runnable) throws E { - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (Scope ignored = span.makeCurrent()) { runnable.run(); } catch (Exception ex) { - span.setTag("failed", true); + span.addEvent("Failed with exception: " + ex.getMessage()); + span.setStatus(StatusCode.ERROR); throw ex; } finally { - span.finish(); + span.end(); } } @@ -203,12 +243,60 @@ public static void executeAsChildSpan(String spanName, * This is a simplified way to use span as there is no way to add any tag * in case of Exceptions. */ - public static AutoCloseable createActivatedSpan(String spanName) { - Span span = GlobalTracer.get().buildSpan(spanName).start(); - Scope scope = GlobalTracer.get().activateSpan(span); + public static TraceCloseable createActivatedSpan(String spanName) { + Span span = tracer.spanBuilder(spanName).setNoParent().startSpan(); + Scope scope = span.makeCurrent(); return () -> { scope.close(); - span.finish(); + span.end(); }; } + + public static Span getActiveSpan() { + return Span.current(); + } + + /** + * AutoCloseable interface for tracing span but no exception is thrown in close. + */ + public interface TraceCloseable extends AutoCloseable { + @Override + void close(); + } + + /** + * A TextMapGetter implementation to extract tracing info from String. + */ + public static class TextExtractor implements io.opentelemetry.context.propagation.TextMapGetter { + private Map map = new HashMap<>(); + + @Override + public Iterable keys(String carrier) { + if (map.isEmpty()) { + parse(carrier); + } + return map.keySet(); + } + + @Override + public String get(String carrier, String key) { + if (map.isEmpty()) { + parse(carrier); + } + return map.get(key); + } + + private void parse(String carrier) { + if (carrier == null || carrier.isEmpty()) { + return; + } + String[] parts = carrier.split(";"); + for (String part : parts) { + String[] kv = part.split("="); + if (kv.length == 2) { + map.put(kv[0].trim(), kv[1].trim()); + } + } + } + } } diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java deleted file mode 100644 index aab23d5da007..000000000000 --- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hdds.tracing; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.jaegertracing.internal.JaegerSpanContext; -import io.jaegertracing.internal.exceptions.EmptyTracerStateStringException; -import io.jaegertracing.internal.exceptions.MalformedTracerStateStringException; -import org.junit.jupiter.api.Test; - -class TestStringCodec { - - @Test - void testExtract() { - StringCodec codec = new StringCodec(); - - assertThrows(EmptyTracerStateStringException.class, - () -> codec.extract(null)); - - StringBuilder sb = new StringBuilder().append("123"); - MalformedTracerStateStringException malformedException = - assertThrows(MalformedTracerStateStringException.class, - () -> codec.extract(sb)); - assertEquals("String does not match tracer state format: 123", - malformedException.getMessage()); - - sb.append(":456:789"); - malformedException = - assertThrows(MalformedTracerStateStringException.class, - () -> codec.extract(sb)); - assertEquals("String does not match tracer state format: 123:456:789", - malformedException.getMessage()); - - sb.append(":66"); - JaegerSpanContext context = codec.extract(sb); - StringBuilder injected = new StringBuilder(); - codec.inject(context, injected); - - String expectedTraceId = pad("123"); - assertEquals(expectedTraceId, context.getTraceId()); - assertEquals(expectedTraceId + ":456:789:66", injected.toString()); - } - - private static String pad(String s) { - return "0000000000000000".substring(s.length()) + s; - } -} diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java index 6519031b2d31..bc500ddddab2 100644 --- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java @@ -22,9 +22,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; -import io.jaegertracing.Configuration; -import io.jaegertracing.internal.JaegerTracer; -import io.opentracing.util.GlobalTracer; import org.apache.hadoop.hdds.conf.InMemoryConfiguration; import org.apache.hadoop.hdds.conf.MutableConfigurationSource; import org.apache.hadoop.hdds.scm.ScmConfigKeys; @@ -47,10 +44,8 @@ public void testDefaultMethod() { @Test public void testInitTracing() { - Configuration config = Configuration.fromEnv("testInitTracing"); - JaegerTracer tracer = config.getTracerBuilder().build(); - GlobalTracer.registerIfAbsent(tracer); - try (AutoCloseable ignored = TracingUtil.createActivatedSpan("initTracing")) { + TracingUtil.initTracing("testInitTracing", tracingEnabled()); + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan("initTracing")) { exportCurrentSpan(); } catch (Exception e) { fail("Should not get exception"); diff --git a/hadoop-hdds/container-service/pom.xml b/hadoop-hdds/container-service/pom.xml index b46e46e4f2a1..0c455d269591 100644 --- a/hadoop-hdds/container-service/pom.xml +++ b/hadoop-hdds/container-service/pom.xml @@ -79,12 +79,12 @@ netty-transport - io.opentracing - opentracing-api + io.opentelemetry + opentelemetry-api - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-context jakarta.annotation diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java index d29aaba3c78b..93a342a95c17 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java @@ -19,9 +19,8 @@ import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import java.io.IOException; import java.net.BindException; import java.util.Collections; @@ -222,7 +221,7 @@ public void submitRequest(ContainerCommandRequestProto request, .importAndCreateSpan( "XceiverServerGrpc." + request.getCmdType().name(), request.getTraceID()); - try (Scope scope = GlobalTracer.get().activateSpan(span)) { + try (Scope ignore = span.makeCurrent()) { ContainerProtos.ContainerCommandResponseProto response = storageContainer.dispatch(request, null); if (response.getResult() != ContainerProtos.Result.SUCCESS) { @@ -230,7 +229,7 @@ public void submitRequest(ContainerCommandRequestProto request, response.getResult()); } } finally { - span.finish(); + span.end(); } } diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java index 02167d750b04..0a242546d2a4 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java @@ -30,9 +30,8 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; @@ -662,8 +661,7 @@ public void submitRequest(ContainerCommandRequestProto request, .importAndCreateSpan( "XceiverServerRatis." + request.getCmdType().name(), request.getTraceID()); - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { - + try (Scope ignored = span.makeCurrent()) { RaftClientRequest raftClientRequest = createRaftClientRequest(request, pipelineID, RaftClientRequest.writeRequestType()); @@ -679,7 +677,7 @@ public void submitRequest(ContainerCommandRequestProto request, } processReply(reply); } finally { - span.finish(); + span.end(); } } diff --git a/hadoop-hdds/docs/content/feature/Observability.md b/hadoop-hdds/docs/content/feature/Observability.md index 117c13186cbf..cc6df3aebe29 100644 --- a/hadoop-hdds/docs/content/feature/Observability.md +++ b/hadoop-hdds/docs/content/feature/Observability.md @@ -97,8 +97,8 @@ Repeat the same for [Object Metrics](https://raw.githubusercontent.com/apache/oz ## Distributed tracing Distributed tracing can help to understand performance bottleneck with visualizing end-to-end performance. - -Ozone uses [jaeger](https://jaegertracing.io) tracing library to collect traces which can send tracing data to any compatible backend (Zipkin, ...). +Ozone makes use of [OpenTelemetry](https://opentelemetry.io/) API for tracing and uses otlp with Grpc format for sending traces. +[jaeger](https://jaegertracing.io) tracing library as collector can collect traces from Ozone over default port 4317 (as default). Tracing is turned off by default, but can be turned on with `hdds.tracing.enabled` from `ozone-site.xml` @@ -109,17 +109,14 @@ Tracing is turned off by default, but can be turned on with `hdds.tracing.enable ``` -Jaeger client can be configured with environment variables as documented [here](https://github.com/jaegertracing/jaeger-client-java/blob/master/jaeger-core/README.md): - -For example: +Below are the configuration steps for setting the collector endpoint and sampling strategy. Set these environment variables to be set for each Ozone component (OM, SCM, datanode) and for the Ozone client to enable tracing. -```shell -JAEGER_SAMPLER_PARAM=0.01 -JAEGER_SAMPLER_TYPE=probabilistic -JAEGER_AGENT_HOST=jaeger +``` +OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 +OTEL_TRACES_SAMPLER_ARG=0.01 ``` -This configuration will record 1% of the requests to limit the performance overhead. For more information about jaeger sampling [check the documentation](https://www.jaegertracing.io/docs/1.18/sampling/#client-sampling-configuration) +This configuration will record 1% of the requests to limit the performance overhead. ## ozone insight diff --git a/hadoop-hdds/docs/content/feature/Observability.zh.md b/hadoop-hdds/docs/content/feature/Observability.zh.md index 7a5c67b4cdd4..3f706759bade 100644 --- a/hadoop-hdds/docs/content/feature/Observability.zh.md +++ b/hadoop-hdds/docs/content/feature/Observability.zh.md @@ -55,7 +55,8 @@ scrape_configs: ## 分布式跟踪 分布式跟踪可以通过可视化端到端的性能来帮助了解性能瓶颈。 -Ozone 使用 [jaeger](https://jaegertracing.io) 跟踪库收集跟踪,可以将跟踪数据发送到任何兼容的后端(Zipkin,…)。 +Ozone 使用 [OpenTelemetry](https://opentelemetry.io/) API 进行跟踪,并使用 Grpc 格式的 otlp 发送跟踪信息。 +jaeger 跟踪库作为收集器可以通过默认端口 4317(默认)从 Ozone 收集跟踪信息。 默认情况下,跟踪功能是关闭的,可以通过 `ozon-site.xml` 的 `hdds.tracing.enabled` 配置变量打开。 @@ -66,17 +67,14 @@ Ozone 使用 [jaeger](https://jaegertracing.io) 跟踪库收集跟踪,可以 ``` -Jaeger 客户端可以用环境变量进行配置,如[这份](https://github.com/jaegertracing/jaeger-client-java/blob/master/jaeger-core/README.md)文档所述。 +以下是提供收集器端点和采样策略所需的配置。这些是需要为每个 Ozone 组件(OM、SCM、DataNode)和 Ozone 客户端设置的环境变量,以启用 Shell 等跟踪功能。 -例如: - -```shell -JAEGER_SAMPLER_PARAM=0.01 -JAEGER_SAMPLER_TYPE=probabilistic -JAEGER_AGENT_HOST=jaeger +``` +OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 +OTEL_TRACES_SAMPLER_ARG=0.01 ``` -此配置将记录1%的请求,以限制性能开销。有关 Jaeger 抽样的更多信息,请查看[文档](https://www.jaegertracing.io/docs/1.18/sampling/#client-sampling-configuration)。 +此配置将记录1%的请求,以限制性能开销。 ## Ozone Insight Ozone Insight 是一个用于检查 Ozone 集群当前状态的工具,它可以显示特定组件的日志记录、指标和配置。 @@ -214,4 +212,4 @@ status: VOLUME_ALREADY_EXISTS 实际上 `ozone insight` 是通过 HTTP 端点来检索所需的信息(`/conf`、`/prom`和`/logLevel`端点),它在安全环境中还不被支持。 - \ No newline at end of file + diff --git a/hadoop-hdds/framework/pom.xml b/hadoop-hdds/framework/pom.xml index 2e05a3b9d819..fed0f112553d 100644 --- a/hadoop-hdds/framework/pom.xml +++ b/hadoop-hdds/framework/pom.xml @@ -93,8 +93,8 @@ - io.opentracing - opentracing-api + io.opentelemetry + opentelemetry-api io.prometheus diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java index 31f9962d476b..94ac89633738 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hdds.server; import com.google.protobuf.ServiceException; -import io.opentracing.Span; +import io.opentelemetry.api.trace.Span; import java.util.function.UnaryOperator; import org.apache.hadoop.hdds.tracing.TracingUtil; import org.apache.hadoop.hdds.utils.ProtocolMessageMetrics; @@ -98,7 +98,7 @@ public RESPONSE processRequest( return response; } finally { - span.finish(); + span.end(); } } diff --git a/hadoop-ozone/dist/src/main/compose/ozone/monitoring.conf b/hadoop-ozone/dist/src/main/compose/ozone/monitoring.conf index 7fcf1cc8cbc1..cbd6e665a57f 100644 --- a/hadoop-ozone/dist/src/main/compose/ozone/monitoring.conf +++ b/hadoop-ozone/dist/src/main/compose/ozone/monitoring.conf @@ -19,8 +19,7 @@ OZONE-SITE.XML_hdds.tracing.enabled=true OZONE-SITE.XML_ozone.metastore.rocksdb.statistics=ALL HDFS-SITE.XML_rpc.metrics.quantile.enable=true HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300 -JAEGER_SAMPLER_PARAM=1 -JAEGER_SAMPLER_TYPE=const -JAEGER_AGENT_HOST=jaeger +OTEL_EXPORTER_OTLP_ENDPOINT=jaeger +OTEL_TRACES_SAMPLER_ARG=1 #Enable this variable to print out all hadoop rpc traffic to the stdout. See http://byteman.jboss.org/ to define your own instrumentation. #BYTEMAN_SCRIPT_URL=https://raw.githubusercontent.com/apache/hadoop/trunk/dev-support/byteman/hadooprpc.btm diff --git a/hadoop-ozone/dist/src/main/compose/ozone/monitoring.yaml b/hadoop-ozone/dist/src/main/compose/ozone/monitoring.yaml index 59a26d55d84b..e977ebeccdba 100644 --- a/hadoop-ozone/dist/src/main/compose/ozone/monitoring.yaml +++ b/hadoop-ozone/dist/src/main/compose/ozone/monitoring.yaml @@ -26,6 +26,8 @@ services: COLLECTOR_ZIPKIN_HTTP_PORT: 9411 ports: - 16686:16686 + - 4317:4317 + - 4318:4318 prometheus: image: prom/prometheus volumes: diff --git a/hadoop-ozone/dist/src/main/k8s/definitions/jaeger/jaeger.yaml b/hadoop-ozone/dist/src/main/k8s/definitions/jaeger/jaeger.yaml index 4796092657ca..22921df2ed92 100644 --- a/hadoop-ozone/dist/src/main/k8s/definitions/jaeger/jaeger.yaml +++ b/hadoop-ozone/dist/src/main/k8s/definitions/jaeger/jaeger.yaml @@ -49,6 +49,8 @@ spec: ports: - containerPort: 16686 name: web + - containerPort: 4317 + name: otlp-grpc env: - name: COLLECTOR_ZIPKIN_HTTP_PORT value: "9411" diff --git a/hadoop-ozone/dist/src/main/k8s/definitions/ozone/definitions/tracing.yaml b/hadoop-ozone/dist/src/main/k8s/definitions/ozone/definitions/tracing.yaml index 007b8d125176..ce0eacd62e13 100644 --- a/hadoop-ozone/dist/src/main/k8s/definitions/ozone/definitions/tracing.yaml +++ b/hadoop-ozone/dist/src/main/k8s/definitions/ozone/definitions/tracing.yaml @@ -25,9 +25,7 @@ description: Enable jaeger tracing - .* - env value: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/datanode-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/datanode-statefulset.yaml index b22212ff79ea..9d44145547da 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/datanode-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/datanode-statefulset.yaml @@ -49,11 +49,9 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger volumeMounts: - name: data diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/freon/freon-deployment.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/freon/freon-deployment.yaml index 3d39bf690503..e8ef05ff27e8 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/freon/freon-deployment.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/freon/freon-deployment.yaml @@ -45,9 +45,7 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/httpfs-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/httpfs-statefulset.yaml index 59abe8547f64..70edd4a00c64 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/httpfs-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/httpfs-statefulset.yaml @@ -48,11 +48,9 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger volumeMounts: - name: data diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/jaeger-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/jaeger-statefulset.yaml index 51410148f77d..45e671a85c52 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/jaeger-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/jaeger-statefulset.yaml @@ -37,6 +37,8 @@ spec: ports: - containerPort: 16686 name: web + - containerPort: 4317 + name: otlp-grpc env: - name: COLLECTOR_ZIPKIN_HTTP_PORT value: "9411" diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/om-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/om-statefulset.yaml index 36df22c81c98..6cde54dc3439 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/om-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/om-statefulset.yaml @@ -50,11 +50,9 @@ spec: value: scm-0.scm:9876 - name: ENSURE_OM_INITIALIZED value: /data/metadata/om/current/VERSION - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger livenessProbe: tcpSocket: diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/recon-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/recon-statefulset.yaml index 6466c29595cf..7a7757c0ceeb 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/recon-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/recon-statefulset.yaml @@ -48,11 +48,9 @@ spec: env: - name: WAITFOR value: scm-0.scm:9876 - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger livenessProbe: tcpSocket: diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/s3g-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/s3g-statefulset.yaml index 30b71e0a3528..7e405922fd89 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/s3g-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/s3g-statefulset.yaml @@ -48,11 +48,9 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger volumeMounts: - name: data diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/scm-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/scm-statefulset.yaml index 246f8c411e05..1fcbd17d4efe 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/scm-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/scm-statefulset.yaml @@ -66,11 +66,9 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger volumeMounts: - name: data diff --git a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt index 3c0a10e5f619..10f312c8c91a 100644 --- a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt +++ b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt @@ -299,8 +299,7 @@ Apache License 2.0 com.jolbox:bonecp com.lmax:disruptor com.nimbusds:nimbus-jose-jwt - com.squareup.okhttp3:okhttp - com.squareup.okio:okio + com.squareup.okhttp3:okhttp-jvm com.squareup.okio:okio-jvm commons-beanutils:commons-beanutils commons-cli:commons-cli @@ -325,10 +324,6 @@ Apache License 2.0 io.grpc:grpc-protobuf-lite io.grpc:grpc-stub io.grpc:grpc-util - io.jaegertracing:jaeger-client - io.jaegertracing:jaeger-core - io.jaegertracing:jaeger-thrift - io.jaegertracing:jaeger-tracerresolver io.netty:netty-buffer io.netty:netty-codec io.netty:netty-codec-http @@ -347,10 +342,19 @@ Apache License 2.0 io.netty:netty-transport-native-unix-common io.opencensus:opencensus-api io.opencensus:opencensus-contrib-grpc-metrics - io.opentracing.contrib:opentracing-tracerresolver - io.opentracing:opentracing-api - io.opentracing:opentracing-noop - io.opentracing:opentracing-util + io.opentelemetry:opentelemetry-api + io.opentelemetry:opentelemetry-common + io.opentelemetry:opentelemetry-context + io.opentelemetry:opentelemetry-exporter-common + io.opentelemetry:opentelemetry-exporter-otlp-common + io.opentelemetry:opentelemetry-exporter-otlp + io.opentelemetry:opentelemetry-exporter-sender-okhttp + io.opentelemetry:opentelemetry-sdk + io.opentelemetry:opentelemetry-sdk-common + io.opentelemetry:opentelemetry-sdk-common-extension-autoconfigure-spi + io.opentelemetry:opentelemetry-sdk-logs + io.opentelemetry:opentelemetry-sdk-metrics + io.opentelemetry:opentelemetry-sdk-trace io.perfmark:perfmark-api io.prometheus:simpleclient io.prometheus:simpleclient_common @@ -418,7 +422,6 @@ Apache License 2.0 org.apache.ratis:ratis-shell org.apache.ratis:ratis-thirdparty-misc org.apache.ratis:ratis-tools - org.apache.thrift:libthrift org.apache.zookeeper:zookeeper org.apache.zookeeper:zookeeper-jute org.eclipse.jetty:jetty-client @@ -435,9 +438,6 @@ Apache License 2.0 org.jboss.weld.servlet:weld-servlet-shaded org.jetbrains:annotations org.jetbrains.kotlin:kotlin-stdlib - org.jetbrains.kotlin:kotlin-stdlib-common - org.jetbrains.kotlin:kotlin-stdlib-jdk7 - org.jetbrains.kotlin:kotlin-stdlib-jdk8 org.jheaps:jheaps org.jooq:jooq org.jooq:jooq-codegen diff --git a/hadoop-ozone/dist/src/main/license/jar-report.txt b/hadoop-ozone/dist/src/main/license/jar-report.txt index 9aeb1285178a..efd83ca9d4fb 100644 --- a/hadoop-ozone/dist/src/main/license/jar-report.txt +++ b/hadoop-ozone/dist/src/main/license/jar-report.txt @@ -99,10 +99,6 @@ share/ozone/lib/jackson-datatype-jsr310.jar share/ozone/lib/jackson-jaxrs-base.jar share/ozone/lib/jackson-jaxrs-json-provider.jar share/ozone/lib/jackson-module-jaxb-annotations.jar -share/ozone/lib/jaeger-client.jar -share/ozone/lib/jaeger-core.jar -share/ozone/lib/jaeger-thrift.jar -share/ozone/lib/jaeger-tracerresolver.jar share/ozone/lib/jakarta.activation.jar share/ozone/lib/jakarta.activation-api.jar share/ozone/lib/jakarta.annotation-api.jar @@ -173,11 +169,7 @@ share/ozone/lib/kerby-asn1.jar share/ozone/lib/kerby-config.jar share/ozone/lib/kerby-pkix.jar share/ozone/lib/kerby-util.jar -share/ozone/lib/kotlin-stdlib-common.jar -share/ozone/lib/kotlin-stdlib-jdk7.jar -share/ozone/lib/kotlin-stdlib-jdk8.jar share/ozone/lib/kotlin-stdlib.jar -share/ozone/lib/libthrift.jar share/ozone/lib/listenablefuture-empty-to-avoid-conflict-with-guava.jar share/ozone/lib/log4j-api.jar share/ozone/lib/log4j-core.jar @@ -204,13 +196,21 @@ share/ozone/lib/netty-transport-native-epoll.Final-linux-x86_64.jar share/ozone/lib/netty-transport-native-epoll.Final.jar share/ozone/lib/netty-transport-native-unix-common.Final.jar share/ozone/lib/nimbus-jose-jwt.jar -share/ozone/lib/okhttp.jar +share/ozone/lib/okhttp-jvm.jar share/ozone/lib/okio-jvm.jar -share/ozone/lib/okio.jar -share/ozone/lib/opentracing-api.jar -share/ozone/lib/opentracing-noop.jar -share/ozone/lib/opentracing-tracerresolver.jar -share/ozone/lib/opentracing-util.jar +share/ozone/lib/opentelemetry-api.jar +share/ozone/lib/opentelemetry-common.jar +share/ozone/lib/opentelemetry-context.jar +share/ozone/lib/opentelemetry-exporter-common.jar +share/ozone/lib/opentelemetry-exporter-otlp-common.jar +share/ozone/lib/opentelemetry-exporter-otlp.jar +share/ozone/lib/opentelemetry-exporter-sender-okhttp.jar +share/ozone/lib/opentelemetry-sdk-common.jar +share/ozone/lib/opentelemetry-sdk-extension-autoconfigure-spi.jar +share/ozone/lib/opentelemetry-sdk-logs.jar +share/ozone/lib/opentelemetry-sdk-metrics.jar +share/ozone/lib/opentelemetry-sdk-trace.jar +share/ozone/lib/opentelemetry-sdk.jar share/ozone/lib/orc-core.jar share/ozone/lib/orc-shims.jar share/ozone/lib/osgi-resource-locator.jar diff --git a/hadoop-ozone/freon/pom.xml b/hadoop-ozone/freon/pom.xml index 670bb651068b..bceadd99f8ca 100644 --- a/hadoop-ozone/freon/pom.xml +++ b/hadoop-ozone/freon/pom.xml @@ -71,12 +71,8 @@ metrics-core - io.opentracing - opentracing-api - - - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-api org.apache.commons diff --git a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java index 3203129ada47..ac58f7185548 100644 --- a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java +++ b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java @@ -23,9 +23,7 @@ import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.ScheduledReporter; import com.codahale.metrics.Slf4jReporter; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.trace.StatusCode; import java.io.IOException; import java.io.InputStream; import java.time.Duration; @@ -49,6 +47,7 @@ import org.apache.hadoop.hdds.conf.TimeDurationUtil; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; +import org.apache.hadoop.hdds.tracing.TracingUtil; import org.apache.hadoop.hdds.utils.HAUtils; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; @@ -212,16 +211,14 @@ protected void taskLoopCompleted() { * @param taskId unique ID of the task */ private void tryNextTask(TaskProvider provider, long taskId) { - Span span = GlobalTracer.get().buildSpan(spanName).start(); - try (Scope scope = GlobalTracer.get().activateSpan(span)) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan(spanName)) { provider.executeNextTask(taskId); successCounter.incrementAndGet(); } catch (Exception e) { - span.setTag("failure", true); + TracingUtil.getActiveSpan().addEvent("failure with exception: " + e.getMessage()); + TracingUtil.getActiveSpan().setStatus(StatusCode.ERROR); failureCounter.incrementAndGet(); LOG.error("Error on executing task {}", taskId, e); - } finally { - span.finish(); } } diff --git a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java index b59916f1a50f..87ebf95f0b06 100644 --- a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java +++ b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java @@ -742,7 +742,7 @@ private boolean createVolume(int volumeNumber) { String volumeName = "vol-" + volumeNumber + "-" + RandomStringUtils.secure().nextNumeric(5); LOG.trace("Creating volume: {}", volumeName); - try (AutoCloseable scope = TracingUtil + try (TracingUtil.TraceCloseable scope = TracingUtil .createActivatedSpan("createVolume")) { long start = System.nanoTime(); objectStore.createVolume(volumeName); @@ -774,7 +774,7 @@ private boolean createBucket(int globalBucketNumber) { RandomStringUtils.secure().nextNumeric(5); LOG.trace("Creating bucket: {} in volume: {}", bucketName, volume.getName()); - try (AutoCloseable scope = TracingUtil + try (TracingUtil.TraceCloseable scope = TracingUtil .createActivatedSpan("createBucket")) { long start = System.nanoTime(); @@ -817,7 +817,7 @@ private boolean createKey(long globalKeyNumber) { LOG.trace("Adding key: {} in bucket: {} of volume: {}", keyName, bucketName, volumeName); try { - try (AutoCloseable scope = TracingUtil.createActivatedSpan("createKey")) { + try (TracingUtil.TraceCloseable scope = TracingUtil.createActivatedSpan("createKey")) { long keyCreateStart = System.nanoTime(); try (OzoneOutputStream os = bucket.createKey(keyName, keySize.toBytes(), replicationConfig, new HashMap<>())) { @@ -867,7 +867,7 @@ private boolean cleanVolume(int volumeNumber) { OzoneVolume volume = getVolume(volumeNumber); String volumeName = volume.getName(); LOG.trace("Cleaning volume: {}", volumeName); - try (AutoCloseable scope = TracingUtil + try (TracingUtil.TraceCloseable scope = TracingUtil .createActivatedSpan("cleanVolume")) { objectStore.deleteVolume(volumeName); numberOfVolumesCleaned.getAndIncrement(); diff --git a/hadoop-ozone/httpfsgateway/pom.xml b/hadoop-ozone/httpfsgateway/pom.xml index f0b96351ddcf..058b60c5a0f4 100644 --- a/hadoop-ozone/httpfsgateway/pom.xml +++ b/hadoop-ozone/httpfsgateway/pom.xml @@ -106,6 +106,11 @@ commons-codec runtime + + javax.annotation + javax.annotation-api + runtime + org.apache.curator curator-framework diff --git a/hadoop-ozone/ozonefs-common/pom.xml b/hadoop-ozone/ozonefs-common/pom.xml index 14b09d87ab77..8483ec4cfda5 100644 --- a/hadoop-ozone/ozonefs-common/pom.xml +++ b/hadoop-ozone/ozonefs-common/pom.xml @@ -34,12 +34,8 @@ guava - io.opentracing - opentracing-api - - - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-api jakarta.annotation diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java index d355f59899d6..ca717f7a8765 100644 --- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java +++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java @@ -37,8 +37,7 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.trace.Span; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; @@ -246,8 +245,8 @@ public FSDataInputStream open(Path path, int bufferSize) throws IOException { final String key = pathToKey(path); return TracingUtil.executeInNewSpan("ofs open", () -> { - Span span = GlobalTracer.get().activeSpan(); - span.setTag("path", key); + Span span = TracingUtil.getActiveSpan(); + span.setAttribute("path", key); return new FSDataInputStream(createFSInputStream(adapter.readFile(key))); }); } @@ -397,9 +396,9 @@ public boolean rename(Path src, Path dst) throws IOException { } private boolean renameInSpan(Path src, Path dst) throws IOException { - Span span = GlobalTracer.get().activeSpan(); - span.setTag("src", src.toString()) - .setTag("dst", dst.toString()); + Span span = TracingUtil.getActiveSpan(); + span.setAttribute("src", src.toString()) + .setAttribute("dst", dst.toString()); incrementCounter(Statistic.INVOCATION_RENAME, 1); statistics.incrementWriteOps(1); if (src.equals(dst)) { diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java index 236ea4458bef..e4133ae57a59 100644 --- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java +++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java @@ -17,9 +17,6 @@ package org.apache.hadoop.fs.ozone; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; @@ -56,33 +53,25 @@ public OzoneFSInputStream(InputStream inputStream, Statistics statistics) { @Override public int read() throws IOException { - Span span = GlobalTracer.get() - .buildSpan("OzoneFSInputStream.read").start(); - try (Scope scope = GlobalTracer.get().activateSpan(span)) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan("OzoneFSInputStream.read")) { int byteRead = inputStream.read(); if (statistics != null && byteRead >= 0) { statistics.incrementBytesRead(1); } return byteRead; - } finally { - span.finish(); } } @Override public int read(byte[] b, int off, int len) throws IOException { - Span span = GlobalTracer.get() - .buildSpan("OzoneFSInputStream.read").start(); - try (Scope scope = GlobalTracer.get().activateSpan(span)) { - span.setTag("offset", off) - .setTag("length", len); + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan("OzoneFSInputStream.read")) { + TracingUtil.getActiveSpan().setAttribute("offset", off) + .setAttribute("length", len); int bytesRead = inputStream.read(b, off, len); if (statistics != null && bytesRead >= 0) { statistics.incrementBytesRead(bytesRead); } return bytesRead; - } finally { - span.finish(); } } diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java index 534a0dba1d74..b278dd33eb54 100644 --- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java +++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java @@ -17,8 +17,6 @@ package org.apache.hadoop.fs.ozone; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.io.OutputStream; import org.apache.hadoop.fs.Syncable; @@ -50,8 +48,7 @@ public void write(int b) throws IOException { public void write(byte[] b, int off, int len) throws IOException { TracingUtil.executeInNewSpan("OzoneFSOutputStream.write", () -> { - Span span = GlobalTracer.get().activeSpan(); - span.setTag("length", len); + TracingUtil.getActiveSpan().setAttribute("length", len); outputStream.write(b, off, len); }); } diff --git a/hadoop-ozone/ozonefs-hadoop2/pom.xml b/hadoop-ozone/ozonefs-hadoop2/pom.xml index 311a14bd5e3d..ccc072e7fc75 100644 --- a/hadoop-ozone/ozonefs-hadoop2/pom.xml +++ b/hadoop-ozone/ozonefs-hadoop2/pom.xml @@ -27,14 +27,6 @@ org.apache.hadoop.ozone.shaded - - javax.annotation - javax.annotation-api - - - javax.servlet - javax.servlet-api - org.apache.hadoop hadoop-hdfs-client diff --git a/hadoop-ozone/ozonefs-hadoop3/pom.xml b/hadoop-ozone/ozonefs-hadoop3/pom.xml index b34fd0d9eb75..1d26aafc2d9d 100644 --- a/hadoop-ozone/ozonefs-hadoop3/pom.xml +++ b/hadoop-ozone/ozonefs-hadoop3/pom.xml @@ -29,14 +29,6 @@ org.apache.hadoop.ozone.shaded - - javax.annotation - javax.annotation-api - - - javax.servlet - javax.servlet-api - org.apache.hadoop.thirdparty hadoop-shaded-protobuf_3_25 diff --git a/hadoop-ozone/ozonefs/pom.xml b/hadoop-ozone/ozonefs/pom.xml index e9b1820c3ee3..e260d4df7968 100644 --- a/hadoop-ozone/ozonefs/pom.xml +++ b/hadoop-ozone/ozonefs/pom.xml @@ -30,12 +30,8 @@ - io.opentracing - opentracing-api - - - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-api org.apache.hadoop diff --git a/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/RootedOzoneFileSystem.java b/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/RootedOzoneFileSystem.java index 3a4f481f6fa6..74c4a30e9914 100644 --- a/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/RootedOzoneFileSystem.java +++ b/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/RootedOzoneFileSystem.java @@ -19,7 +19,6 @@ import static org.apache.hadoop.ozone.OzoneConsts.FORCE_LEASE_RECOVERY_ENV; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.io.InputStream; import java.net.URI; @@ -145,7 +144,7 @@ public boolean recoverLease(final Path f) throws IOException { } private boolean recoverLeaseTraced(final Path f) throws IOException { - GlobalTracer.get().activeSpan().setTag("path", f.toString()); + TracingUtil.getActiveSpan().setAttribute("path", f.toString()); statistics.incrementWriteOps(1); LOG.trace("recoverLease() path:{}", f); Path qualifiedPath = makeQualified(f); @@ -183,7 +182,7 @@ public boolean isFileClosed(Path f) throws IOException { } private boolean isFileClosedTraced(Path f) throws IOException { - GlobalTracer.get().activeSpan().setTag("path", f.toString()); + TracingUtil.getActiveSpan().setAttribute("fs.operation", "isFileClosed"); statistics.incrementWriteOps(1); LOG.trace("isFileClosed() path:{}", f); Path qualifiedPath = makeQualified(f); diff --git a/hadoop-ozone/s3gateway/pom.xml b/hadoop-ozone/s3gateway/pom.xml index b17e218413e7..9b94f4c42734 100644 --- a/hadoop-ozone/s3gateway/pom.xml +++ b/hadoop-ozone/s3gateway/pom.xml @@ -63,16 +63,8 @@ picocli - io.opentracing - opentracing-api - - - io.opentracing - opentracing-noop - - - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-api jakarta.annotation diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java index dd9d7cdf170f..4e95a4849e69 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java @@ -17,11 +17,6 @@ package org.apache.hadoop.ozone.s3; -import io.opentracing.Scope; -import io.opentracing.ScopeManager; -import io.opentracing.Span; -import io.opentracing.noop.NoopSpan; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.io.OutputStream; import javax.ws.rs.container.ContainerRequestContext; @@ -31,18 +26,18 @@ import javax.ws.rs.container.ResourceInfo; import javax.ws.rs.core.Context; import javax.ws.rs.ext.Provider; +import org.apache.hadoop.hdds.tracing.TracingUtil; import org.apache.hadoop.ozone.client.io.WrappedOutputStream; /** - * Filter used to add jaeger tracing span. + * Filter used to add tracing span. */ @Provider public class TracingFilter implements ContainerRequestFilter, ContainerResponseFilter { - public static final String TRACING_SCOPE = "TRACING_SCOPE"; - public static final String TRACING_SPAN = "TRACING_SPAN"; + public static final String TRACING_SPAN_CLOSABLE = "TRACING_SPAN_CLOSABLE"; @Context private ResourceInfo resourceInfo; @@ -51,51 +46,44 @@ public class TracingFilter implements ContainerRequestFilter, public void filter(ContainerRequestContext requestContext) { finishAndCloseActiveSpan(); - Span span = GlobalTracer.get().buildSpan( - resourceInfo.getResourceClass().getSimpleName() + "." + - resourceInfo.getResourceMethod().getName()).start(); - Scope scope = GlobalTracer.get().activateSpan(span); - requestContext.setProperty(TRACING_SCOPE, scope); - requestContext.setProperty(TRACING_SPAN, span); + TracingUtil.TraceCloseable activatedSpan = + TracingUtil.createActivatedSpan(resourceInfo.getResourceClass().getSimpleName() + "." + + resourceInfo.getResourceMethod().getName()); + requestContext.setProperty(TRACING_SPAN_CLOSABLE, activatedSpan); } @Override public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext) { - final Scope scope = (Scope) requestContext.getProperty(TRACING_SCOPE); - final Span span = (Span) requestContext.getProperty(TRACING_SPAN); + final TracingUtil.TraceCloseable spanClosable + = (TracingUtil.TraceCloseable) requestContext.getProperty(TRACING_SPAN_CLOSABLE); // HDDS-7064: Operation performed while writing StreamingOutput response // should only be closed once the StreamingOutput callback has completely // written the data to the destination OutputStream out = responseContext.getEntityStream(); - if (out != null && !(span instanceof NoopSpan)) { + if (out != null) { responseContext.setEntityStream(new WrappedOutputStream(out) { @Override public void close() throws IOException { super.close(); - finishAndClose(scope, span); + finishAndClose(spanClosable); } }); } else { - finishAndClose(scope, span); + finishAndClose(spanClosable); } } - private static void finishAndClose(Scope scope, Span span) { - if (scope != null) { - scope.close(); - } - if (span != null) { - span.finish(); + private static void finishAndClose(TracingUtil.TraceCloseable spanClosable) { + try { + spanClosable.close(); + } catch (Exception e) { + // Do nothing } finishAndCloseActiveSpan(); } private static void finishAndCloseActiveSpan() { - ScopeManager scopeManager = GlobalTracer.get().scopeManager(); - if (scopeManager != null && scopeManager.activeSpan() != null) { - scopeManager.activeSpan().finish(); - scopeManager.activate(null); - } + TracingUtil.getActiveSpan().end(); } } diff --git a/pom.xml b/pom.xml index b51c4176f40a..ce7bd7dbcbee 100644 --- a/pom.xml +++ b/pom.xml @@ -108,7 +108,6 @@ 1.71.0 2.16.2 0.8.13 - 1.8.1 2.1.1 2.6.1 2.0.2 @@ -178,7 +177,7 @@ 16.14.2 4.12.0 - 0.33.0 + 1.54.0 1.7.1 Joshua Tree 2.1.0-SNAPSHOT @@ -538,35 +537,34 @@ ${dropwizard-metrics.version} - io.jaegertracing - jaeger-client - ${jaeger.version} - - - org.apache.tomcat.embed - tomcat-embed-core - - + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-context + ${opentelemetry.version} - io.jaegertracing - jaeger-core - ${jaeger.version} + io.opentelemetry + opentelemetry-exporter-otlp + ${opentelemetry.version} - io.opentracing - opentracing-api - ${opentracing.version} + io.opentelemetry + opentelemetry-sdk + ${opentelemetry.version} - io.opentracing - opentracing-noop - ${opentracing.version} + io.opentelemetry + opentelemetry-sdk-common + ${opentelemetry.version} - io.opentracing - opentracing-util - ${opentracing.version} + io.opentelemetry + opentelemetry-sdk-trace + ${opentelemetry.version} io.prometheus