From c1d94aad78dee2bd0bd7ec18414ae91d1dd622ba Mon Sep 17 00:00:00 2001 From: sumitagrawl Date: Tue, 20 May 2025 11:00:33 +0530 Subject: [PATCH 01/16] HDDS-12926. remove *.tmp.* exclusion in DU --- .../src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java index a89e914398c9..266244763cea 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java @@ -31,7 +31,6 @@ public class DUFactory implements SpaceUsageCheckFactory { private static final String DU_CACHE_FILE = "scmUsed"; - private static final String EXCLUDE_PATTERN = "*.tmp.*"; private Conf conf; @@ -46,7 +45,7 @@ public SpaceUsageCheckFactory setConfiguration( public SpaceUsageCheckParams paramsFor(File dir) { Duration refreshPeriod = conf.getRefreshPeriod(); - SpaceUsageSource source = new DU(dir, EXCLUDE_PATTERN); + SpaceUsageSource source = new DU(dir, null); SpaceUsagePersistence persistence = new SaveSpaceUsageToFile( new File(dir, DU_CACHE_FILE), refreshPeriod); From 9b5cd3ec766e7600696979f5259c5ac7f340f401 Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Fri, 12 Sep 2025 22:05:18 +0530 Subject: [PATCH 02/16] HDDS-13680. migrate tracing to opentelemetry --- hadoop-hdds/client/pom.xml | 8 - .../hadoop/hdds/scm/XceiverClientGrpc.java | 13 +- hadoop-hdds/common/pom.xml | 35 ++-- .../scm/storage/ContainerProtocolCalls.java | 43 ++-- .../hdds/tracing/GrpcServerInterceptor.java | 10 +- .../hadoop/hdds/tracing/StringCodec.java | 96 --------- .../hadoop/hdds/tracing/TraceAllMethod.java | 10 +- .../hadoop/hdds/tracing/TracingUtil.java | 185 ++++++++++++------ .../hadoop/hdds/tracing/TestStringCodec.java | 64 ------ .../hadoop/hdds/tracing/TestTracingUtil.java | 7 +- hadoop-hdds/container-service/pom.xml | 10 +- .../transport/server/XceiverServerGrpc.java | 9 +- .../server/ratis/XceiverServerRatis.java | 10 +- hadoop-hdds/framework/pom.xml | 5 +- .../OzoneProtocolMessageDispatcher.java | 4 +- .../src/main/compose/ozone/monitoring.yaml | 2 + .../dist/src/main/license/bin/LICENSE.txt | 14 +- hadoop-ozone/freon/pom.xml | 9 +- .../ozone/freon/BaseFreonGenerator.java | 13 +- hadoop-ozone/ozonefs-common/pom.xml | 9 +- .../fs/ozone/BasicRootedOzoneFileSystem.java | 13 +- .../hadoop/fs/ozone/OzoneFSInputStream.java | 23 +-- .../hadoop/fs/ozone/OzoneFSOutputStream.java | 5 +- hadoop-ozone/ozonefs-hadoop2/pom.xml | 8 - hadoop-ozone/ozonefs-hadoop3/pom.xml | 8 - hadoop-ozone/ozonefs/pom.xml | 9 +- .../fs/ozone/RootedOzoneFileSystem.java | 5 +- hadoop-ozone/s3gateway/pom.xml | 13 +- .../apache/hadoop/ozone/s3/TracingFilter.java | 47 ++--- pom.xml | 44 ++--- 30 files changed, 280 insertions(+), 451 deletions(-) delete mode 100644 hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java delete mode 100644 hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java diff --git a/hadoop-hdds/client/pom.xml b/hadoop-hdds/client/pom.xml index 917d90f88e86..9c94e152a9f2 100644 --- a/hadoop-hdds/client/pom.xml +++ b/hadoop-hdds/client/pom.xml @@ -32,14 +32,6 @@ com.google.guava guava - - io.opentracing - opentracing-api - - - io.opentracing - opentracing-util - jakarta.annotation jakarta.annotation-api diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java index 84d66970e23a..9d5b44284ec1 100644 --- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java @@ -21,9 +21,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; @@ -536,10 +533,7 @@ public XceiverClientReply sendCommandAsync( ContainerCommandRequestProto request) throws IOException, ExecutionException, InterruptedException { - Span span = GlobalTracer.get() - .buildSpan("XceiverClientGrpc." + request.getCmdType().name()).start(); - - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (AutoCloseable ignored = TracingUtil.createActivatedSpan("XceiverClientGrpc." + request.getCmdType().name())) { ContainerCommandRequestProto.Builder builder = ContainerCommandRequestProto.newBuilder(request) @@ -553,9 +547,8 @@ public XceiverClientReply sendCommandAsync( asyncReply.getResponse().get(); } return asyncReply; - - } finally { - span.finish(); + } catch (Exception e) { + throw new IOException(e); } } diff --git a/hadoop-hdds/common/pom.xml b/hadoop-hdds/common/pom.xml index 3138ac4e1177..80c1b3577bda 100644 --- a/hadoop-hdds/common/pom.xml +++ b/hadoop-hdds/common/pom.xml @@ -68,16 +68,34 @@ picocli - io.jaegertracing - jaeger-core + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} - io.opentracing - opentracing-api + io.opentelemetry + opentelemetry-context + ${opentelemetry.version} - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-exporter-otlp + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk-common + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk-trace + ${opentelemetry.version} jakarta.annotation @@ -156,11 +174,6 @@ org.yaml snakeyaml - - io.jaegertracing - jaeger-client - runtime - org.apache.ratis ratis-metrics-dropwizard3 diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java index bbedf1e75081..83b95d8afd0f 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java @@ -20,9 +20,8 @@ import static java.util.Collections.singletonList; import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.BLOCK_TOKEN_VERIFICATION_FAILED; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.StatusCode; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -155,17 +154,17 @@ static T tryEachDatanode(Pipeline pipeline, try { return op.apply(d); } catch (IOException e) { - Span span = GlobalTracer.get().activeSpan(); + Span span = TracingUtil.getActiveSpan(); if (e instanceof StorageContainerException) { StorageContainerException sce = (StorageContainerException)e; // Block token expired. There's no point retrying other DN. // Throw the exception to request a new block token right away. if (sce.getResult() == BLOCK_TOKEN_VERIFICATION_FAILED) { - span.log("block token verification failed at DN " + d); + span.addEvent("block token verification failed at DN " + d); throw e; } } - span.log("failed to connect to DN " + d); + span.addEvent("failed to connect to DN " + d); excluded.add(d); if (excluded.size() < pipeline.size()) { LOG.warn(toErrorMessage.apply(d) @@ -372,30 +371,28 @@ public static ContainerProtos.ReadChunkResponseProto readChunk( builder.setEncodedToken(token.encodeToUrlString()); } - Span span = GlobalTracer.get() - .buildSpan("readChunk").start(); - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { - span.setTag("offset", chunk.getOffset()) - .setTag("length", chunk.getLen()) - .setTag("block", blockID.toString()); - return tryEachDatanode(xceiverClient.getPipeline(), - d -> readChunk(xceiverClient, chunk, blockID, - validators, builder, d), - d -> toErrorMessage(chunk, blockID, d)); - } finally { - span.finish(); - } + try (AutoCloseable ignored = TracingUtil.createActivatedSpan("readChunk")) { + Span span = TracingUtil.getActiveSpan(); + span.setAttribute("offset", chunk.getOffset()) + .setAttribute("length", chunk.getLen()) + .setAttribute("block", blockID.toString()); + return tryEachDatanode(xceiverClient.getPipeline(), + d -> readChunk(xceiverClient, chunk, blockID, + validators, builder, d), + d -> toErrorMessage(chunk, blockID, d)); + } catch (Exception e) { + throw new IOException(e); + } } - private static ContainerProtos.ReadChunkResponseProto readChunk( + private static ContainerProtos.ReadChunkResponseProto readChunk( XceiverClientSpi xceiverClient, ChunkInfo chunk, DatanodeBlockID blockID, List validators, ContainerCommandRequestProto.Builder builder, DatanodeDetails d) throws IOException { - ContainerCommandRequestProto.Builder requestBuilder = builder + ContainerCommandRequestProto.Builder requestBuilder = builder .setDatanodeUuid(d.getUuidString()); - Span span = GlobalTracer.get().activeSpan(); - String traceId = TracingUtil.exportSpan(span); + String traceId = TracingUtil.exportCurrentSpan(); if (traceId != null) { requestBuilder = requestBuilder.setTraceID(traceId); } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java index 044137fab6d4..af1b77facd18 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/GrpcServerInterceptor.java @@ -17,9 +17,8 @@ package org.apache.hadoop.hdds.tracing; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import org.apache.ratis.thirdparty.io.grpc.ForwardingServerCallListener.SimpleForwardingServerCallListener; import org.apache.ratis.thirdparty.io.grpc.Metadata; import org.apache.ratis.thirdparty.io.grpc.ServerCall; @@ -41,14 +40,15 @@ public Listener interceptCall( next.startCall(call, headers)) { @Override public void onMessage(ReqT message) { + Span span = TracingUtil .importAndCreateSpan( call.getMethodDescriptor().getFullMethodName(), headers.get(GrpcClientInterceptor.TRACING_HEADER)); - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (Scope ignored = span.makeCurrent()) { super.onMessage(message); } finally { - span.finish(); + span.end(); } } }; diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java deleted file mode 100644 index f22393a50d3c..000000000000 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hdds.tracing; - -import io.jaegertracing.internal.JaegerSpanContext; -import io.jaegertracing.internal.exceptions.EmptyTracerStateStringException; -import io.jaegertracing.internal.exceptions.MalformedTracerStateStringException; -import io.jaegertracing.internal.exceptions.TraceIdOutOfBoundException; -import io.jaegertracing.spi.Codec; -import io.opentracing.propagation.Format; -import java.math.BigInteger; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A jaeger codec to save the current tracing context as a string. - */ -public class StringCodec implements Codec { - - private static final Logger LOG = LoggerFactory.getLogger(StringCodec.class); - public static final StringFormat FORMAT = new StringFormat(); - - @Override - public JaegerSpanContext extract(StringBuilder s) { - if (s == null) { - throw new EmptyTracerStateStringException(); - } - String value = s.toString(); - if (!"".equals(value)) { - String[] parts = value.split(":"); - if (parts.length != 4) { - if (LOG.isDebugEnabled()) { - LOG.debug("MalformedTracerStateString: {}", value); - } - throw new MalformedTracerStateStringException(value); - } else { - String traceId = parts[0]; - if (traceId.length() <= 32 && !traceId.isEmpty()) { - return new JaegerSpanContext(high(traceId), - (new BigInteger(traceId, 16)).longValue(), - (new BigInteger(parts[1], 16)).longValue(), - (new BigInteger(parts[2], 16)).longValue(), - (new BigInteger(parts[3], 16)).byteValue()); - } else { - throw new TraceIdOutOfBoundException( - "Trace id [" + traceId + "] length is not within 1 and 32"); - } - } - } else { - throw new EmptyTracerStateStringException(); - } - } - - @Override - public void inject(JaegerSpanContext context, StringBuilder string) { - int intFlag = context.getFlags() & 255; - string.append(context.getTraceId()) - .append(':').append(Long.toHexString(context.getSpanId())) - .append(':').append(Long.toHexString(context.getParentId())) - .append(':').append(Integer.toHexString(intFlag)); - } - - private static long high(String hexString) { - if (hexString.length() > 16) { - int highLength = hexString.length() - 16; - String highString = hexString.substring(0, highLength); - return (new BigInteger(highString, 16)).longValue(); - } else { - return 0L; - } - } - - /** - * The format to save the context as text. - *

- * Using the mutable StringBuilder instead of plain String. - */ - public static final class StringFormat implements Format { - } - -} diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java index b8560c46853a..965574091aee 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java @@ -19,9 +19,6 @@ import static java.util.Collections.emptyMap; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.util.Arrays; @@ -67,10 +64,7 @@ public Object invoke(Object proxy, Method method, Object[] args) method.getName()); } - Span span = GlobalTracer.get().buildSpan( - name + "." + method.getName()) - .start(); - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (AutoCloseable ignored = TracingUtil.createActivatedSpan(name + "." + method.getName())) { try { return delegateMethod.invoke(delegate, args); } catch (Exception ex) { @@ -79,8 +73,6 @@ public Object invoke(Object proxy, Method method, Object[] args) } else { throw ex; } - } finally { - span.finish(); } } } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java index 867802120ef2..bbb03d32c8b4 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java @@ -17,14 +17,24 @@ package org.apache.hadoop.hdds.tracing; -import io.jaegertracing.Configuration; -import io.jaegertracing.internal.JaegerTracer; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.SpanContext; -import io.opentracing.Tracer; -import io.opentracing.util.GlobalTracer; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.StatusCode; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.Scope; +import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import java.lang.reflect.Proxy; +import java.util.HashMap; +import java.util.Map; +import io.opentelemetry.sdk.trace.samplers.Sampler; import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.ratis.util.function.CheckedRunnable; @@ -34,10 +44,15 @@ * Utility class to collect all the tracing helper methods. */ public final class TracingUtil { - private static final String NULL_SPAN_AS_STRING = ""; + private static final String OTEL_EXPORTER_OTLP_ENDPOINT = "OTEL_EXPORTER_OTLP_ENDPOINT"; + private static final String OTEL_EXPORTER_OTLP_ENDPOINT_DEFAULT = "http://localhost:4317"; + private static final String OTEL_TRACES_SAMPLER_ARG = "OTEL_TRACES_SAMPLER_ARG"; + private static final double OTEL_TRACES_SAMPLER_RATIO_DEFAULT = 1.0; private static volatile boolean isInit = false; + private static OpenTelemetry openTelemetry = OpenTelemetry.noop(); + private static Tracer tracer = openTelemetry.getTracer("noop");; private TracingUtil() { } @@ -47,15 +62,41 @@ private TracingUtil() { */ public static void initTracing( String serviceName, ConfigurationSource conf) { - if (!GlobalTracer.isRegistered() && isTracingEnabled(conf)) { - Configuration config = Configuration.fromEnv(serviceName); - JaegerTracer tracer = config.getTracerBuilder() - .registerExtractor(StringCodec.FORMAT, new StringCodec()) - .registerInjector(StringCodec.FORMAT, new StringCodec()) - .build(); - GlobalTracer.registerIfAbsent(tracer); - isInit = true; + if (!isTracingEnabled(conf) || isInit) { + return; + } + + String otelEndPoint = System.getenv(OTEL_EXPORTER_OTLP_ENDPOINT); + if (otelEndPoint == null || otelEndPoint.isEmpty()) { + otelEndPoint = OTEL_EXPORTER_OTLP_ENDPOINT_DEFAULT; + } + + double samplerRatio = OTEL_TRACES_SAMPLER_RATIO_DEFAULT; + try { + String sampleStrRatio = System.getenv(OTEL_TRACES_SAMPLER_ARG); + if (sampleStrRatio == null || sampleStrRatio.isEmpty()) { + samplerRatio = Double.parseDouble(System.getenv(OTEL_TRACES_SAMPLER_ARG)); + } + } catch (NumberFormatException ex) { + // ignore and use the default value. } + + Resource resource = Resource.create(Attributes.of(AttributeKey.stringKey("service.name"), serviceName)); + OtlpGrpcSpanExporter spanExporter = OtlpGrpcSpanExporter.builder() + .setEndpoint(otelEndPoint) + .build(); + + SimpleSpanProcessor spanProcessor = SimpleSpanProcessor.builder(spanExporter).build(); + SdkTracerProvider tracerProvider = SdkTracerProvider.builder() + .addSpanProcessor(spanProcessor) + .setResource(resource) + .setSampler(Sampler.traceIdRatioBased(samplerRatio)) + .build(); + openTelemetry = OpenTelemetrySdk.builder() + .setTracerProvider(tracerProvider) + .build(); + tracer = openTelemetry.getTracer(serviceName); + isInit = true; } /** @@ -64,21 +105,16 @@ public static void initTracing( * @return encoded tracing context. */ public static String exportCurrentSpan() { - return exportSpan(GlobalTracer.get().activeSpan()); - } - - /** - * Export the specific span as a string. - * - * @return encoded tracing context. - */ - public static String exportSpan(Span span) { - if (span != null && isInit) { - StringBuilder builder = new StringBuilder(); - GlobalTracer.get().inject(span.context(), StringCodec.FORMAT, builder); - return builder.toString(); + Span currentSpan = Span.current(); + if (!currentSpan.getSpanContext().isValid()) { + return NULL_SPAN_AS_STRING; } - return NULL_SPAN_AS_STRING; + + StringBuilder builder = new StringBuilder(); + W3CTraceContextPropagator propagator = W3CTraceContextPropagator.getInstance(); + propagator.inject(Context.current(), builder, + (carrier, key, value) -> carrier.append(key).append("=").append(value).append(";")); + return builder.toString(); } /** @@ -87,25 +123,18 @@ public static String exportSpan(Span span) { * @param name name of the newly created scope * @param encodedParent Encoded parent span (could be null or empty) * - * @return OpenTracing scope. + * @return Tracing scope. */ public static Span importAndCreateSpan(String name, String encodedParent) { - Tracer tracer = GlobalTracer.get(); - return tracer.buildSpan(name) - .asChildOf(extractParent(encodedParent, tracer)) - .start(); - } - - private static SpanContext extractParent(String parent, Tracer tracer) { - if (!GlobalTracer.isRegistered()) { - return null; - } - - if (parent == null || parent.isEmpty()) { - return null; + if (encodedParent == null || encodedParent.isEmpty()) { + return tracer.spanBuilder(name).setNoParent().startSpan(); } - return tracer.extract(StringCodec.FORMAT, new StringBuilder(parent)); + W3CTraceContextPropagator propagator = W3CTraceContextPropagator.getInstance(); + Context extract = propagator.extract(Context.current(), encodedParent, new TextExtractor()); + return tracer.spanBuilder(name) + .setParent(extract) + .startSpan(); } /** @@ -142,8 +171,7 @@ public static boolean isTracingEnabled( */ public static void executeInNewSpan(String spanName, CheckedRunnable runnable) throws E { - Span span = GlobalTracer.get() - .buildSpan(spanName).start(); + Span span = tracer.spanBuilder(spanName).setNoParent().startSpan(); executeInSpan(span, runnable); } @@ -152,8 +180,7 @@ public static void executeInNewSpan(String spanName, */ public static R executeInNewSpan(String spanName, CheckedSupplier supplier) throws E { - Span span = GlobalTracer.get() - .buildSpan(spanName).start(); + Span span = tracer.spanBuilder(spanName).setNoParent().startSpan(); return executeInSpan(span, supplier); } @@ -163,13 +190,14 @@ public static R executeInNewSpan(String spanName, */ private static R executeInSpan(Span span, CheckedSupplier supplier) throws E { - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (Scope ignored = span.makeCurrent()) { return supplier.get(); } catch (Exception ex) { - span.setTag("failed", true); + span.addEvent("Failed with exception: " + ex.getMessage()); + span.setStatus(StatusCode.ERROR); throw ex; } finally { - span.finish(); + span.end(); } } @@ -178,13 +206,14 @@ private static R executeInSpan(Span span, */ private static void executeInSpan(Span span, CheckedRunnable runnable) throws E { - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { + try (Scope ignored = span.makeCurrent()) { runnable.run(); } catch (Exception ex) { - span.setTag("failed", true); + span.addEvent("Failed with exception: " + ex.getMessage()); + span.setStatus(StatusCode.ERROR); throw ex; } finally { - span.finish(); + span.end(); } } @@ -204,11 +233,51 @@ public static void executeAsChildSpan(String spanName, * in case of Exceptions. */ public static AutoCloseable createActivatedSpan(String spanName) { - Span span = GlobalTracer.get().buildSpan(spanName).start(); - Scope scope = GlobalTracer.get().activateSpan(span); + Span span = tracer.spanBuilder(spanName).setNoParent().startSpan(); + Scope scope = span.makeCurrent(); return () -> { scope.close(); - span.finish(); + span.end(); }; } + + public static Span getActiveSpan() { + return Span.current(); + } + + /** + * A TextMapGetter implementation to extract tracing info from String. + */ + public static class TextExtractor implements io.opentelemetry.context.propagation.TextMapGetter { + private Map map = new HashMap<>(); + + @Override + public Iterable keys(String carrier) { + if (map.isEmpty()) { + parse(carrier); + } + return map.keySet(); + } + + @Override + public String get(String carrier, String key) { + if (map.isEmpty()) { + parse(carrier); + } + return map.get(key); + } + + private void parse(String carrier) { + if (carrier == null || carrier.isEmpty()) { + return; + } + String[] parts = carrier.split(";"); + for (String part : parts) { + String[] kv = part.split("="); + if (kv.length == 2) { + map.put(kv[0].trim(), kv[1].trim()); + } + } + } + } } diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java deleted file mode 100644 index aab23d5da007..000000000000 --- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hdds.tracing; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import io.jaegertracing.internal.JaegerSpanContext; -import io.jaegertracing.internal.exceptions.EmptyTracerStateStringException; -import io.jaegertracing.internal.exceptions.MalformedTracerStateStringException; -import org.junit.jupiter.api.Test; - -class TestStringCodec { - - @Test - void testExtract() { - StringCodec codec = new StringCodec(); - - assertThrows(EmptyTracerStateStringException.class, - () -> codec.extract(null)); - - StringBuilder sb = new StringBuilder().append("123"); - MalformedTracerStateStringException malformedException = - assertThrows(MalformedTracerStateStringException.class, - () -> codec.extract(sb)); - assertEquals("String does not match tracer state format: 123", - malformedException.getMessage()); - - sb.append(":456:789"); - malformedException = - assertThrows(MalformedTracerStateStringException.class, - () -> codec.extract(sb)); - assertEquals("String does not match tracer state format: 123:456:789", - malformedException.getMessage()); - - sb.append(":66"); - JaegerSpanContext context = codec.extract(sb); - StringBuilder injected = new StringBuilder(); - codec.inject(context, injected); - - String expectedTraceId = pad("123"); - assertEquals(expectedTraceId, context.getTraceId()); - assertEquals(expectedTraceId + ":456:789:66", injected.toString()); - } - - private static String pad(String s) { - return "0000000000000000".substring(s.length()) + s; - } -} diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java index 6519031b2d31..96e0dec329ab 100644 --- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java @@ -22,9 +22,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; -import io.jaegertracing.Configuration; -import io.jaegertracing.internal.JaegerTracer; -import io.opentracing.util.GlobalTracer; import org.apache.hadoop.hdds.conf.InMemoryConfiguration; import org.apache.hadoop.hdds.conf.MutableConfigurationSource; import org.apache.hadoop.hdds.scm.ScmConfigKeys; @@ -47,9 +44,7 @@ public void testDefaultMethod() { @Test public void testInitTracing() { - Configuration config = Configuration.fromEnv("testInitTracing"); - JaegerTracer tracer = config.getTracerBuilder().build(); - GlobalTracer.registerIfAbsent(tracer); + TracingUtil.initTracing("testInitTracing", tracingEnabled()); try (AutoCloseable ignored = TracingUtil.createActivatedSpan("initTracing")) { exportCurrentSpan(); } catch (Exception e) { diff --git a/hadoop-hdds/container-service/pom.xml b/hadoop-hdds/container-service/pom.xml index b46e46e4f2a1..5cdded6e7e69 100644 --- a/hadoop-hdds/container-service/pom.xml +++ b/hadoop-hdds/container-service/pom.xml @@ -79,12 +79,14 @@ netty-transport - io.opentracing - opentracing-api + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-context + ${opentelemetry.version} jakarta.annotation diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java index d29aaba3c78b..fb8c38b5d504 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java @@ -19,9 +19,6 @@ import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.net.BindException; import java.util.Collections; @@ -30,6 +27,8 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeID; @@ -222,7 +221,7 @@ public void submitRequest(ContainerCommandRequestProto request, .importAndCreateSpan( "XceiverServerGrpc." + request.getCmdType().name(), request.getTraceID()); - try (Scope scope = GlobalTracer.get().activateSpan(span)) { + try (Scope ignore = span.makeCurrent()) { ContainerProtos.ContainerCommandResponseProto response = storageContainer.dispatch(request, null); if (response.getResult() != ContainerProtos.Result.SUCCESS) { @@ -230,7 +229,7 @@ public void submitRequest(ContainerCommandRequestProto request, response.getResult()); } } finally { - span.finish(); + span.end(); } } diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java index 6661823f9a10..1e5e705fe82f 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java @@ -30,9 +30,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; @@ -53,6 +50,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import org.apache.hadoop.hdds.DatanodeVersion; import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.HddsUtils; @@ -662,8 +661,7 @@ public void submitRequest(ContainerCommandRequestProto request, .importAndCreateSpan( "XceiverServerRatis." + request.getCmdType().name(), request.getTraceID()); - try (Scope ignored = GlobalTracer.get().activateSpan(span)) { - + try (Scope ignored = span.makeCurrent()) { RaftClientRequest raftClientRequest = createRaftClientRequest(request, pipelineID, RaftClientRequest.writeRequestType()); @@ -679,7 +677,7 @@ public void submitRequest(ContainerCommandRequestProto request, } processReply(reply); } finally { - span.finish(); + span.end(); } } diff --git a/hadoop-hdds/framework/pom.xml b/hadoop-hdds/framework/pom.xml index 2e05a3b9d819..32e4d4e9ba34 100644 --- a/hadoop-hdds/framework/pom.xml +++ b/hadoop-hdds/framework/pom.xml @@ -93,8 +93,9 @@ - io.opentracing - opentracing-api + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} io.prometheus diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java index 31f9962d476b..6f46e84fed07 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hdds.server; import com.google.protobuf.ServiceException; -import io.opentracing.Span; import java.util.function.UnaryOperator; +import io.opentelemetry.api.trace.Span; import org.apache.hadoop.hdds.tracing.TracingUtil; import org.apache.hadoop.hdds.utils.ProtocolMessageMetrics; import org.apache.ratis.util.UncheckedAutoCloseable; @@ -98,7 +98,7 @@ public RESPONSE processRequest( return response; } finally { - span.finish(); + span.end(); } } diff --git a/hadoop-ozone/dist/src/main/compose/ozone/monitoring.yaml b/hadoop-ozone/dist/src/main/compose/ozone/monitoring.yaml index 59a26d55d84b..e977ebeccdba 100644 --- a/hadoop-ozone/dist/src/main/compose/ozone/monitoring.yaml +++ b/hadoop-ozone/dist/src/main/compose/ozone/monitoring.yaml @@ -26,6 +26,8 @@ services: COLLECTOR_ZIPKIN_HTTP_PORT: 9411 ports: - 16686:16686 + - 4317:4317 + - 4318:4318 prometheus: image: prom/prometheus volumes: diff --git a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt index 3c0a10e5f619..1983fd2eb6dd 100644 --- a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt +++ b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt @@ -325,10 +325,6 @@ Apache License 2.0 io.grpc:grpc-protobuf-lite io.grpc:grpc-stub io.grpc:grpc-util - io.jaegertracing:jaeger-client - io.jaegertracing:jaeger-core - io.jaegertracing:jaeger-thrift - io.jaegertracing:jaeger-tracerresolver io.netty:netty-buffer io.netty:netty-codec io.netty:netty-codec-http @@ -347,10 +343,12 @@ Apache License 2.0 io.netty:netty-transport-native-unix-common io.opencensus:opencensus-api io.opencensus:opencensus-contrib-grpc-metrics - io.opentracing.contrib:opentracing-tracerresolver - io.opentracing:opentracing-api - io.opentracing:opentracing-noop - io.opentracing:opentracing-util + io.opentelemetry:opentelemetry-api + io.opentelemetry:opentelemetry-context + io.opentelemetry:opentelemetry-exporter-otlp + io.opentelemetry:opentelemetry-sdk + io.opentelemetry:opentelemetry-sdk-common + io.opentelemetry:opentelemetry-sdk-trace io.perfmark:perfmark-api io.prometheus:simpleclient io.prometheus:simpleclient_common diff --git a/hadoop-ozone/freon/pom.xml b/hadoop-ozone/freon/pom.xml index 670bb651068b..ef5d4626d3fc 100644 --- a/hadoop-ozone/freon/pom.xml +++ b/hadoop-ozone/freon/pom.xml @@ -71,12 +71,9 @@ metrics-core - io.opentracing - opentracing-api - - - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} org.apache.commons diff --git a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java index 3203129ada47..9b5b967e11aa 100644 --- a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java +++ b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java @@ -23,9 +23,6 @@ import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.ScheduledReporter; import com.codahale.metrics.Slf4jReporter; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.io.InputStream; import java.time.Duration; @@ -43,12 +40,14 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; +import io.opentelemetry.api.trace.StatusCode; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.RandomStringUtils; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.TimeDurationUtil; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; +import org.apache.hadoop.hdds.tracing.TracingUtil; import org.apache.hadoop.hdds.utils.HAUtils; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; @@ -212,16 +211,14 @@ protected void taskLoopCompleted() { * @param taskId unique ID of the task */ private void tryNextTask(TaskProvider provider, long taskId) { - Span span = GlobalTracer.get().buildSpan(spanName).start(); - try (Scope scope = GlobalTracer.get().activateSpan(span)) { + try (AutoCloseable ignored = TracingUtil.createActivatedSpan(spanName)) { provider.executeNextTask(taskId); successCounter.incrementAndGet(); } catch (Exception e) { - span.setTag("failure", true); + TracingUtil.getActiveSpan().addEvent("failure with exception: " + e.getMessage()); + TracingUtil.getActiveSpan().setStatus(StatusCode.ERROR); failureCounter.incrementAndGet(); LOG.error("Error on executing task {}", taskId, e); - } finally { - span.finish(); } } diff --git a/hadoop-ozone/ozonefs-common/pom.xml b/hadoop-ozone/ozonefs-common/pom.xml index 14b09d87ab77..5eff439a34a6 100644 --- a/hadoop-ozone/ozonefs-common/pom.xml +++ b/hadoop-ozone/ozonefs-common/pom.xml @@ -34,12 +34,9 @@ guava - io.opentracing - opentracing-api - - - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} jakarta.annotation diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java index d355f59899d6..93cb18a00159 100644 --- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java +++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java @@ -37,8 +37,6 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; @@ -53,6 +51,7 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; +import io.opentelemetry.api.trace.Span; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.ContentSummary; @@ -246,8 +245,8 @@ public FSDataInputStream open(Path path, int bufferSize) throws IOException { final String key = pathToKey(path); return TracingUtil.executeInNewSpan("ofs open", () -> { - Span span = GlobalTracer.get().activeSpan(); - span.setTag("path", key); + Span span = TracingUtil.getActiveSpan(); + span.setAttribute("path", key); return new FSDataInputStream(createFSInputStream(adapter.readFile(key))); }); } @@ -397,9 +396,9 @@ public boolean rename(Path src, Path dst) throws IOException { } private boolean renameInSpan(Path src, Path dst) throws IOException { - Span span = GlobalTracer.get().activeSpan(); - span.setTag("src", src.toString()) - .setTag("dst", dst.toString()); + Span span = TracingUtil.getActiveSpan(); + span.setAttribute("src", src.toString()) + .setAttribute("dst", dst.toString()); incrementCounter(Statistic.INVOCATION_RENAME, 1); statistics.incrementWriteOps(1); if (src.equals(dst)) { diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java index 236ea4458bef..f5d23271a54a 100644 --- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java +++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java @@ -17,9 +17,6 @@ package org.apache.hadoop.fs.ozone; -import io.opentracing.Scope; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; @@ -56,33 +53,29 @@ public OzoneFSInputStream(InputStream inputStream, Statistics statistics) { @Override public int read() throws IOException { - Span span = GlobalTracer.get() - .buildSpan("OzoneFSInputStream.read").start(); - try (Scope scope = GlobalTracer.get().activateSpan(span)) { + try (AutoCloseable ignored = TracingUtil.createActivatedSpan("OzoneFSInputStream.read")) { int byteRead = inputStream.read(); if (statistics != null && byteRead >= 0) { statistics.incrementBytesRead(1); } return byteRead; - } finally { - span.finish(); + } catch (Exception e) { + throw new IOException(e); } } @Override public int read(byte[] b, int off, int len) throws IOException { - Span span = GlobalTracer.get() - .buildSpan("OzoneFSInputStream.read").start(); - try (Scope scope = GlobalTracer.get().activateSpan(span)) { - span.setTag("offset", off) - .setTag("length", len); + try (AutoCloseable ignored = TracingUtil.createActivatedSpan("OzoneFSInputStream.read")) { + TracingUtil.getActiveSpan().setAttribute("offset", off) + .setAttribute("length", len); int bytesRead = inputStream.read(b, off, len); if (statistics != null && bytesRead >= 0) { statistics.incrementBytesRead(bytesRead); } return bytesRead; - } finally { - span.finish(); + } catch (Exception e) { + throw new IOException(e); } } diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java index 534a0dba1d74..b278dd33eb54 100644 --- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java +++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSOutputStream.java @@ -17,8 +17,6 @@ package org.apache.hadoop.fs.ozone; -import io.opentracing.Span; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.io.OutputStream; import org.apache.hadoop.fs.Syncable; @@ -50,8 +48,7 @@ public void write(int b) throws IOException { public void write(byte[] b, int off, int len) throws IOException { TracingUtil.executeInNewSpan("OzoneFSOutputStream.write", () -> { - Span span = GlobalTracer.get().activeSpan(); - span.setTag("length", len); + TracingUtil.getActiveSpan().setAttribute("length", len); outputStream.write(b, off, len); }); } diff --git a/hadoop-ozone/ozonefs-hadoop2/pom.xml b/hadoop-ozone/ozonefs-hadoop2/pom.xml index 311a14bd5e3d..ccc072e7fc75 100644 --- a/hadoop-ozone/ozonefs-hadoop2/pom.xml +++ b/hadoop-ozone/ozonefs-hadoop2/pom.xml @@ -27,14 +27,6 @@ org.apache.hadoop.ozone.shaded - - javax.annotation - javax.annotation-api - - - javax.servlet - javax.servlet-api - org.apache.hadoop hadoop-hdfs-client diff --git a/hadoop-ozone/ozonefs-hadoop3/pom.xml b/hadoop-ozone/ozonefs-hadoop3/pom.xml index fe83d1a8f5c4..84f4b233eace 100644 --- a/hadoop-ozone/ozonefs-hadoop3/pom.xml +++ b/hadoop-ozone/ozonefs-hadoop3/pom.xml @@ -33,14 +33,6 @@ com.google.guava guava - - javax.annotation - javax.annotation-api - - - javax.servlet - javax.servlet-api - org.apache.hadoop.thirdparty hadoop-shaded-protobuf_3_25 diff --git a/hadoop-ozone/ozonefs/pom.xml b/hadoop-ozone/ozonefs/pom.xml index 39c866975d0b..a951aeb90741 100644 --- a/hadoop-ozone/ozonefs/pom.xml +++ b/hadoop-ozone/ozonefs/pom.xml @@ -34,12 +34,9 @@ guava - io.opentracing - opentracing-api - - - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} org.apache.hadoop diff --git a/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/RootedOzoneFileSystem.java b/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/RootedOzoneFileSystem.java index 31b695b0a074..8d14e27aa5e9 100644 --- a/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/RootedOzoneFileSystem.java +++ b/hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/RootedOzoneFileSystem.java @@ -20,7 +20,6 @@ import static org.apache.hadoop.ozone.OzoneConsts.FORCE_LEASE_RECOVERY_ENV; import com.google.common.base.Strings; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.io.InputStream; import java.net.URI; @@ -146,7 +145,7 @@ public boolean recoverLease(final Path f) throws IOException { } private boolean recoverLeaseTraced(final Path f) throws IOException { - GlobalTracer.get().activeSpan().setTag("path", f.toString()); + TracingUtil.getActiveSpan().setAttribute("path", f.toString()); statistics.incrementWriteOps(1); LOG.trace("recoverLease() path:{}", f); Path qualifiedPath = makeQualified(f); @@ -184,7 +183,7 @@ public boolean isFileClosed(Path f) throws IOException { } private boolean isFileClosedTraced(Path f) throws IOException { - GlobalTracer.get().activeSpan().setTag("path", f.toString()); + TracingUtil.getActiveSpan().setAttribute("fs.operation", "isFileClosed"); statistics.incrementWriteOps(1); LOG.trace("isFileClosed() path:{}", f); Path qualifiedPath = makeQualified(f); diff --git a/hadoop-ozone/s3gateway/pom.xml b/hadoop-ozone/s3gateway/pom.xml index b17e218413e7..b399452b2b35 100644 --- a/hadoop-ozone/s3gateway/pom.xml +++ b/hadoop-ozone/s3gateway/pom.xml @@ -63,16 +63,9 @@ picocli - io.opentracing - opentracing-api - - - io.opentracing - opentracing-noop - - - io.opentracing - opentracing-util + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} jakarta.annotation diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java index dd9d7cdf170f..a66833c0f8eb 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java @@ -17,11 +17,6 @@ package org.apache.hadoop.ozone.s3; -import io.opentracing.Scope; -import io.opentracing.ScopeManager; -import io.opentracing.Span; -import io.opentracing.noop.NoopSpan; -import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.io.OutputStream; import javax.ws.rs.container.ContainerRequestContext; @@ -31,18 +26,18 @@ import javax.ws.rs.container.ResourceInfo; import javax.ws.rs.core.Context; import javax.ws.rs.ext.Provider; +import org.apache.hadoop.hdds.tracing.TracingUtil; import org.apache.hadoop.ozone.client.io.WrappedOutputStream; /** - * Filter used to add jaeger tracing span. + * Filter used to add tracing span. */ @Provider public class TracingFilter implements ContainerRequestFilter, ContainerResponseFilter { - public static final String TRACING_SCOPE = "TRACING_SCOPE"; - public static final String TRACING_SPAN = "TRACING_SPAN"; + public static final String TRACING_SPAN_CLOSABLE = "TRACING_SPAN_CLOSABLE"; @Context private ResourceInfo resourceInfo; @@ -51,51 +46,43 @@ public class TracingFilter implements ContainerRequestFilter, public void filter(ContainerRequestContext requestContext) { finishAndCloseActiveSpan(); - Span span = GlobalTracer.get().buildSpan( - resourceInfo.getResourceClass().getSimpleName() + "." + - resourceInfo.getResourceMethod().getName()).start(); - Scope scope = GlobalTracer.get().activateSpan(span); - requestContext.setProperty(TRACING_SCOPE, scope); - requestContext.setProperty(TRACING_SPAN, span); + AutoCloseable activatedSpan = + TracingUtil.createActivatedSpan(resourceInfo.getResourceClass().getSimpleName() + "." + + resourceInfo.getResourceMethod().getName()); + requestContext.setProperty(TRACING_SPAN_CLOSABLE, activatedSpan); } @Override public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext) { - final Scope scope = (Scope) requestContext.getProperty(TRACING_SCOPE); - final Span span = (Span) requestContext.getProperty(TRACING_SPAN); + final AutoCloseable spanClosable = (AutoCloseable) requestContext.getProperty(TRACING_SPAN_CLOSABLE); // HDDS-7064: Operation performed while writing StreamingOutput response // should only be closed once the StreamingOutput callback has completely // written the data to the destination OutputStream out = responseContext.getEntityStream(); - if (out != null && !(span instanceof NoopSpan)) { + if (out != null) { responseContext.setEntityStream(new WrappedOutputStream(out) { @Override public void close() throws IOException { super.close(); - finishAndClose(scope, span); + finishAndClose(spanClosable); } }); } else { - finishAndClose(scope, span); + finishAndClose(spanClosable); } } - private static void finishAndClose(Scope scope, Span span) { - if (scope != null) { - scope.close(); - } - if (span != null) { - span.finish(); + private static void finishAndClose(AutoCloseable spanClosable) { + try { + spanClosable.close(); + } catch (Exception e) { + // Do nothing } finishAndCloseActiveSpan(); } private static void finishAndCloseActiveSpan() { - ScopeManager scopeManager = GlobalTracer.get().scopeManager(); - if (scopeManager != null && scopeManager.activeSpan() != null) { - scopeManager.activeSpan().finish(); - scopeManager.activate(null); - } + TracingUtil.getActiveSpan().end(); } } diff --git a/pom.xml b/pom.xml index 21bebc4788ac..856175a3ca08 100644 --- a/pom.xml +++ b/pom.xml @@ -105,7 +105,6 @@ 1.71.0 2.16.2 0.8.13 - 1.8.1 2.1.1 2.6.1 2.0.2 @@ -174,7 +173,7 @@ 16.14.2 4.12.0 - 0.33.0 + 1.54.0 1.7.1 Joshua Tree 2.1.0-SNAPSHOT @@ -534,35 +533,34 @@ ${dropwizard-metrics.version} - io.jaegertracing - jaeger-client - ${jaeger.version} - - - org.apache.tomcat.embed - tomcat-embed-core - - + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-context + ${opentelemetry.version} - io.jaegertracing - jaeger-core - ${jaeger.version} + io.opentelemetry + opentelemetry-exporter-otlp + ${opentelemetry.version} - io.opentracing - opentracing-api - ${opentracing.version} + io.opentelemetry + opentelemetry-sdk + ${opentelemetry.version} - io.opentracing - opentracing-noop - ${opentracing.version} + io.opentelemetry + opentelemetry-sdk-common + ${opentelemetry.version} - io.opentracing - opentracing-util - ${opentracing.version} + io.opentelemetry + opentelemetry-sdk-trace + ${opentelemetry.version} io.prometheus From cc7a7256e74dd56ef242de33ce6cfb53b3f64390 Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Mon, 15 Sep 2025 13:02:30 +0530 Subject: [PATCH 03/16] fix checkstyle, pmd, dependency --- .../scm/storage/ContainerProtocolCalls.java | 29 +++++++++---------- .../hadoop/hdds/tracing/TracingUtil.java | 11 ++++--- .../transport/server/XceiverServerGrpc.java | 4 +-- .../server/ratis/XceiverServerRatis.java | 4 +-- .../OzoneProtocolMessageDispatcher.java | 2 +- .../dist/src/main/license/bin/LICENSE.txt | 8 +++++ .../dist/src/main/license/jar-report.txt | 28 +++++++++--------- .../ozone/freon/BaseFreonGenerator.java | 2 +- .../fs/ozone/BasicRootedOzoneFileSystem.java | 2 +- 9 files changed, 48 insertions(+), 42 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java index 83b95d8afd0f..577aec27a01a 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java @@ -21,7 +21,6 @@ import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.BLOCK_TOKEN_VERIFICATION_FAILED; import io.opentelemetry.api.trace.Span; -import io.opentelemetry.api.trace.StatusCode; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -371,26 +370,26 @@ public static ContainerProtos.ReadChunkResponseProto readChunk( builder.setEncodedToken(token.encodeToUrlString()); } - try (AutoCloseable ignored = TracingUtil.createActivatedSpan("readChunk")) { - Span span = TracingUtil.getActiveSpan(); - span.setAttribute("offset", chunk.getOffset()) - .setAttribute("length", chunk.getLen()) - .setAttribute("block", blockID.toString()); - return tryEachDatanode(xceiverClient.getPipeline(), - d -> readChunk(xceiverClient, chunk, blockID, - validators, builder, d), - d -> toErrorMessage(chunk, blockID, d)); - } catch (Exception e) { - throw new IOException(e); - } + try (AutoCloseable ignored = TracingUtil.createActivatedSpan("readChunk")) { + Span span = TracingUtil.getActiveSpan(); + span.setAttribute("offset", chunk.getOffset()) + .setAttribute("length", chunk.getLen()) + .setAttribute("block", blockID.toString()); + return tryEachDatanode(xceiverClient.getPipeline(), + d -> readChunk(xceiverClient, chunk, blockID, + validators, builder, d), + d -> toErrorMessage(chunk, blockID, d)); + } catch (Exception e) { + throw new IOException(e); + } } - private static ContainerProtos.ReadChunkResponseProto readChunk( + private static ContainerProtos.ReadChunkResponseProto readChunk( XceiverClientSpi xceiverClient, ChunkInfo chunk, DatanodeBlockID blockID, List validators, ContainerCommandRequestProto.Builder builder, DatanodeDetails d) throws IOException { - ContainerCommandRequestProto.Builder requestBuilder = builder + ContainerCommandRequestProto.Builder requestBuilder = builder .setDatanodeUuid(d.getUuidString()); String traceId = TracingUtil.exportCurrentSpan(); if (traceId != null) { diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java index bbb03d32c8b4..508fa4a6eefa 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java @@ -29,12 +29,12 @@ import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.resources.Resource; -import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import io.opentelemetry.sdk.trace.SdkTracerProvider; import java.lang.reflect.Proxy; import java.util.HashMap; import java.util.Map; -import io.opentelemetry.sdk.trace.samplers.Sampler; import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.ratis.util.function.CheckedRunnable; @@ -51,8 +51,7 @@ public final class TracingUtil { private static final double OTEL_TRACES_SAMPLER_RATIO_DEFAULT = 1.0; private static volatile boolean isInit = false; - private static OpenTelemetry openTelemetry = OpenTelemetry.noop(); - private static Tracer tracer = openTelemetry.getTracer("noop");; + private static Tracer tracer = OpenTelemetry.noop().getTracer("noop"); private TracingUtil() { } @@ -92,7 +91,7 @@ public static void initTracing( .setResource(resource) .setSampler(Sampler.traceIdRatioBased(samplerRatio)) .build(); - openTelemetry = OpenTelemetrySdk.builder() + OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() .setTracerProvider(tracerProvider) .build(); tracer = openTelemetry.getTracer(serviceName); @@ -113,7 +112,7 @@ public static String exportCurrentSpan() { StringBuilder builder = new StringBuilder(); W3CTraceContextPropagator propagator = W3CTraceContextPropagator.getInstance(); propagator.inject(Context.current(), builder, - (carrier, key, value) -> carrier.append(key).append("=").append(value).append(";")); + (carrier, key, value) -> carrier.append(key).append('=').append(value).append(';')); return builder.toString(); } diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java index fb8c38b5d504..93a342a95c17 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/XceiverServerGrpc.java @@ -19,6 +19,8 @@ import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import java.io.IOException; import java.net.BindException; import java.util.Collections; @@ -27,8 +29,6 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.context.Scope; import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.DatanodeID; diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java index 1e5e705fe82f..4c6d442199ee 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/XceiverServerRatis.java @@ -30,6 +30,8 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; @@ -50,8 +52,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; -import io.opentelemetry.api.trace.Span; -import io.opentelemetry.context.Scope; import org.apache.hadoop.hdds.DatanodeVersion; import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.HddsUtils; diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java index 6f46e84fed07..94ac89633738 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/OzoneProtocolMessageDispatcher.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hdds.server; import com.google.protobuf.ServiceException; -import java.util.function.UnaryOperator; import io.opentelemetry.api.trace.Span; +import java.util.function.UnaryOperator; import org.apache.hadoop.hdds.tracing.TracingUtil; import org.apache.hadoop.hdds.utils.ProtocolMessageMetrics; import org.apache.ratis.util.UncheckedAutoCloseable; diff --git a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt index 1983fd2eb6dd..1582b7760367 100644 --- a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt +++ b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt @@ -300,6 +300,7 @@ Apache License 2.0 com.lmax:disruptor com.nimbusds:nimbus-jose-jwt com.squareup.okhttp3:okhttp + com.squareup.okhttp3:okhttp-jvm com.squareup.okio:okio com.squareup.okio:okio-jvm commons-beanutils:commons-beanutils @@ -344,10 +345,17 @@ Apache License 2.0 io.opencensus:opencensus-api io.opencensus:opencensus-contrib-grpc-metrics io.opentelemetry:opentelemetry-api + io.opentelemetry:opentelemetry-common io.opentelemetry:opentelemetry-context + io.opentelemetry:opentelemetry-exporter-common + io.opentelemetry:opentelemetry-exporter-otlp-common io.opentelemetry:opentelemetry-exporter-otlp + io.opentelemetry:opentelemetry-exporter-sender-okhttp io.opentelemetry:opentelemetry-sdk io.opentelemetry:opentelemetry-sdk-common + io.opentelemetry:opentelemetry-sdk-common-extension-autoconfigure-spi + io.opentelemetry:opentelemetry-sdk-logs + io.opentelemetry:opentelemetry-sdk-metrics io.opentelemetry:opentelemetry-sdk-trace io.perfmark:perfmark-api io.prometheus:simpleclient diff --git a/hadoop-ozone/dist/src/main/license/jar-report.txt b/hadoop-ozone/dist/src/main/license/jar-report.txt index ef45ef12566d..fb12b1d01b40 100644 --- a/hadoop-ozone/dist/src/main/license/jar-report.txt +++ b/hadoop-ozone/dist/src/main/license/jar-report.txt @@ -99,10 +99,6 @@ share/ozone/lib/jackson-datatype-jsr310.jar share/ozone/lib/jackson-jaxrs-base.jar share/ozone/lib/jackson-jaxrs-json-provider.jar share/ozone/lib/jackson-module-jaxb-annotations.jar -share/ozone/lib/jaeger-client.jar -share/ozone/lib/jaeger-core.jar -share/ozone/lib/jaeger-thrift.jar -share/ozone/lib/jaeger-tracerresolver.jar share/ozone/lib/jakarta.activation.jar share/ozone/lib/jakarta.activation-api.jar share/ozone/lib/jakarta.annotation-api.jar @@ -173,11 +169,7 @@ share/ozone/lib/kerby-asn1.jar share/ozone/lib/kerby-config.jar share/ozone/lib/kerby-pkix.jar share/ozone/lib/kerby-util.jar -share/ozone/lib/kotlin-stdlib-common.jar -share/ozone/lib/kotlin-stdlib-jdk7.jar -share/ozone/lib/kotlin-stdlib-jdk8.jar share/ozone/lib/kotlin-stdlib.jar -share/ozone/lib/libthrift.jar share/ozone/lib/listenablefuture-empty-to-avoid-conflict-with-guava.jar share/ozone/lib/log4j-api.jar share/ozone/lib/log4j-core.jar @@ -204,13 +196,21 @@ share/ozone/lib/netty-transport-native-epoll.Final-linux-x86_64.jar share/ozone/lib/netty-transport-native-epoll.Final.jar share/ozone/lib/netty-transport-native-unix-common.Final.jar share/ozone/lib/nimbus-jose-jwt.jar -share/ozone/lib/okhttp.jar +share/ozone/lib/okhttp-jvm.jar share/ozone/lib/okio-jvm.jar -share/ozone/lib/okio.jar -share/ozone/lib/opentracing-api.jar -share/ozone/lib/opentracing-noop.jar -share/ozone/lib/opentracing-tracerresolver.jar -share/ozone/lib/opentracing-util.jar +share/ozone/lib/opentelemetry-api.jar +share/ozone/lib/opentelemetry-common.jar +share/ozone/lib/opentelemetry-context.jar +share/ozone/lib/opentelemetry-exporter-common.jar +share/ozone/lib/opentelemetry-exporter-otlp-common.jar +share/ozone/lib/opentelemetry-exporter-otlp.jar +share/ozone/lib/opentelemetry-exporter-sender-okhttp.jar +share/ozone/lib/opentelemetry-sdk-common.jar +share/ozone/lib/opentelemetry-sdk-extension-autoconfigure-spi.jar +share/ozone/lib/opentelemetry-sdk-logs.jar +share/ozone/lib/opentelemetry-sdk-metrics.jar +share/ozone/lib/opentelemetry-sdk-trace.jar +share/ozone/lib/opentelemetry-sdk.jar share/ozone/lib/orc-core.jar share/ozone/lib/orc-shims.jar share/ozone/lib/osgi-resource-locator.jar diff --git a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java index 9b5b967e11aa..b3fdc0c1de53 100644 --- a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java +++ b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java @@ -23,6 +23,7 @@ import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.ScheduledReporter; import com.codahale.metrics.Slf4jReporter; +import io.opentelemetry.api.trace.StatusCode; import java.io.IOException; import java.io.InputStream; import java.time.Duration; @@ -40,7 +41,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; -import io.opentelemetry.api.trace.StatusCode; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.RandomStringUtils; import org.apache.hadoop.hdds.conf.OzoneConfiguration; diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java index 93cb18a00159..ca717f7a8765 100644 --- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java +++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/BasicRootedOzoneFileSystem.java @@ -37,6 +37,7 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; +import io.opentelemetry.api.trace.Span; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; @@ -51,7 +52,6 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -import io.opentelemetry.api.trace.Span; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.ContentSummary; From bc0e11818f3272a1688888380ed6e3150da13b4c Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Mon, 15 Sep 2025 13:09:10 +0530 Subject: [PATCH 04/16] fix dependency --- hadoop-ozone/dist/src/main/license/bin/LICENSE.txt | 6 ------ 1 file changed, 6 deletions(-) diff --git a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt index 1582b7760367..10f312c8c91a 100644 --- a/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt +++ b/hadoop-ozone/dist/src/main/license/bin/LICENSE.txt @@ -299,9 +299,7 @@ Apache License 2.0 com.jolbox:bonecp com.lmax:disruptor com.nimbusds:nimbus-jose-jwt - com.squareup.okhttp3:okhttp com.squareup.okhttp3:okhttp-jvm - com.squareup.okio:okio com.squareup.okio:okio-jvm commons-beanutils:commons-beanutils commons-cli:commons-cli @@ -424,7 +422,6 @@ Apache License 2.0 org.apache.ratis:ratis-shell org.apache.ratis:ratis-thirdparty-misc org.apache.ratis:ratis-tools - org.apache.thrift:libthrift org.apache.zookeeper:zookeeper org.apache.zookeeper:zookeeper-jute org.eclipse.jetty:jetty-client @@ -441,9 +438,6 @@ Apache License 2.0 org.jboss.weld.servlet:weld-servlet-shaded org.jetbrains:annotations org.jetbrains.kotlin:kotlin-stdlib - org.jetbrains.kotlin:kotlin-stdlib-common - org.jetbrains.kotlin:kotlin-stdlib-jdk7 - org.jetbrains.kotlin:kotlin-stdlib-jdk8 org.jheaps:jheaps org.jooq:jooq org.jooq:jooq-codegen From 94090d83964c471252a087e608d044c77753df95 Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Mon, 15 Sep 2025 13:36:18 +0530 Subject: [PATCH 05/16] fix checkstyle --- .../main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java index 508fa4a6eefa..5f75299867cc 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java @@ -29,9 +29,9 @@ import io.opentelemetry.exporter.otlp.trace.OtlpGrpcSpanExporter; import io.opentelemetry.sdk.OpenTelemetrySdk; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; import io.opentelemetry.sdk.trace.samplers.Sampler; -import io.opentelemetry.sdk.trace.SdkTracerProvider; import java.lang.reflect.Proxy; import java.util.HashMap; import java.util.Map; From 191a0493eab133e801d35f24a9a94be7904794b3 Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Mon, 15 Sep 2025 20:19:02 +0530 Subject: [PATCH 06/16] fix test cases --- .../apache/hadoop/hdds/tracing/TracingUtil.java | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java index 5f75299867cc..f9deff620222 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java @@ -39,11 +39,14 @@ import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.ratis.util.function.CheckedRunnable; import org.apache.ratis.util.function.CheckedSupplier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility class to collect all the tracing helper methods. */ public final class TracingUtil { + private static final Logger LOG = LoggerFactory.getLogger(TracingUtil.class); private static final String NULL_SPAN_AS_STRING = ""; private static final String OTEL_EXPORTER_OTLP_ENDPOINT = "OTEL_EXPORTER_OTLP_ENDPOINT"; private static final String OTEL_EXPORTER_OTLP_ENDPOINT_DEFAULT = "http://localhost:4317"; @@ -65,6 +68,16 @@ public static void initTracing( return; } + try { + initialize(serviceName); + isInit = true; + LOG.info("Initialized tracing service: {}", serviceName); + } catch (Exception e) { + LOG.error("Failed to initialize tracing", e); + } + } + + private static void initialize(String serviceName) { String otelEndPoint = System.getenv(OTEL_EXPORTER_OTLP_ENDPOINT); if (otelEndPoint == null || otelEndPoint.isEmpty()) { otelEndPoint = OTEL_EXPORTER_OTLP_ENDPOINT_DEFAULT; @@ -73,7 +86,7 @@ public static void initTracing( double samplerRatio = OTEL_TRACES_SAMPLER_RATIO_DEFAULT; try { String sampleStrRatio = System.getenv(OTEL_TRACES_SAMPLER_ARG); - if (sampleStrRatio == null || sampleStrRatio.isEmpty()) { + if (sampleStrRatio != null && !sampleStrRatio.isEmpty()) { samplerRatio = Double.parseDouble(System.getenv(OTEL_TRACES_SAMPLER_ARG)); } } catch (NumberFormatException ex) { @@ -95,7 +108,6 @@ public static void initTracing( .setTracerProvider(tracerProvider) .build(); tracer = openTelemetry.getTracer(serviceName); - isInit = true; } /** From 3595cae372e60a5b6797170d5685211e8ac117cd Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Tue, 16 Sep 2025 14:20:02 +0530 Subject: [PATCH 07/16] fix test case failure --- .../java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java | 2 ++ .../hadoop/hdds/scm/storage/ContainerProtocolCalls.java | 2 ++ .../java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java | 4 ++++ 3 files changed, 8 insertions(+) diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java index 9d5b44284ec1..810db306b24b 100644 --- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java @@ -547,6 +547,8 @@ public XceiverClientReply sendCommandAsync( asyncReply.getResponse().get(); } return asyncReply; + } catch (IOException e) { + throw e; } catch (Exception e) { throw new IOException(e); } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java index 577aec27a01a..22b5f323f155 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java @@ -379,6 +379,8 @@ public static ContainerProtos.ReadChunkResponseProto readChunk( d -> readChunk(xceiverClient, chunk, blockID, validators, builder, d), d -> toErrorMessage(chunk, blockID, d)); + } catch (IOException e) { + throw e; } catch (Exception e) { throw new IOException(e); } diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java index f5d23271a54a..5c6e905343e0 100644 --- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java +++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java @@ -59,6 +59,8 @@ public int read() throws IOException { statistics.incrementBytesRead(1); } return byteRead; + } catch (IOException e) { + throw e; } catch (Exception e) { throw new IOException(e); } @@ -74,6 +76,8 @@ public int read(byte[] b, int off, int len) throws IOException { statistics.incrementBytesRead(bytesRead); } return bytesRead; + } catch (IOException e) { + throw e; } catch (Exception e) { throw new IOException(e); } From 4d87485d64b2fd9230b015d01ec91480481e25ee Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Tue, 16 Sep 2025 16:39:29 +0530 Subject: [PATCH 08/16] test case failure fix --- .../apache/hadoop/hdds/scm/XceiverClientGrpc.java | 7 ++----- .../hdds/scm/storage/ContainerProtocolCalls.java | 6 +----- .../apache/hadoop/hdds/tracing/TraceAllMethod.java | 2 +- .../org/apache/hadoop/hdds/tracing/TracingUtil.java | 7 ++++++- .../apache/hadoop/hdds/tracing/TestTracingUtil.java | 2 +- .../hadoop/ozone/freon/BaseFreonGenerator.java | 2 +- .../hadoop/ozone/freon/RandomKeyGenerator.java | 8 ++++---- .../apache/hadoop/fs/ozone/OzoneFSInputStream.java | 12 ++---------- .../org/apache/hadoop/ozone/s3/TracingFilter.java | 7 ++++--- 9 files changed, 22 insertions(+), 31 deletions(-) diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java index 810db306b24b..b07cee4097c0 100644 --- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java @@ -533,7 +533,8 @@ public XceiverClientReply sendCommandAsync( ContainerCommandRequestProto request) throws IOException, ExecutionException, InterruptedException { - try (AutoCloseable ignored = TracingUtil.createActivatedSpan("XceiverClientGrpc." + request.getCmdType().name())) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan( + "XceiverClientGrpc." + request.getCmdType().name())) { ContainerCommandRequestProto.Builder builder = ContainerCommandRequestProto.newBuilder(request) @@ -547,10 +548,6 @@ public XceiverClientReply sendCommandAsync( asyncReply.getResponse().get(); } return asyncReply; - } catch (IOException e) { - throw e; - } catch (Exception e) { - throw new IOException(e); } } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java index 22b5f323f155..a934fc513720 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java @@ -370,7 +370,7 @@ public static ContainerProtos.ReadChunkResponseProto readChunk( builder.setEncodedToken(token.encodeToUrlString()); } - try (AutoCloseable ignored = TracingUtil.createActivatedSpan("readChunk")) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan("readChunk")) { Span span = TracingUtil.getActiveSpan(); span.setAttribute("offset", chunk.getOffset()) .setAttribute("length", chunk.getLen()) @@ -379,10 +379,6 @@ public static ContainerProtos.ReadChunkResponseProto readChunk( d -> readChunk(xceiverClient, chunk, blockID, validators, builder, d), d -> toErrorMessage(chunk, blockID, d)); - } catch (IOException e) { - throw e; - } catch (Exception e) { - throw new IOException(e); } } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java index 965574091aee..95e735b8965b 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TraceAllMethod.java @@ -64,7 +64,7 @@ public Object invoke(Object proxy, Method method, Object[] args) method.getName()); } - try (AutoCloseable ignored = TracingUtil.createActivatedSpan(name + "." + method.getName())) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan(name + "." + method.getName())) { try { return delegateMethod.invoke(delegate, args); } catch (Exception ex) { diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java index f9deff620222..cd81cfac8726 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java @@ -243,7 +243,7 @@ public static void executeAsChildSpan(String spanName, * This is a simplified way to use span as there is no way to add any tag * in case of Exceptions. */ - public static AutoCloseable createActivatedSpan(String spanName) { + public static TraceCloseable createActivatedSpan(String spanName) { Span span = tracer.spanBuilder(spanName).setNoParent().startSpan(); Scope scope = span.makeCurrent(); return () -> { @@ -256,6 +256,11 @@ public static Span getActiveSpan() { return Span.current(); } + public interface TraceCloseable extends AutoCloseable { + @Override + void close(); + } + /** * A TextMapGetter implementation to extract tracing info from String. */ diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java index 96e0dec329ab..bc500ddddab2 100644 --- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestTracingUtil.java @@ -45,7 +45,7 @@ public void testDefaultMethod() { @Test public void testInitTracing() { TracingUtil.initTracing("testInitTracing", tracingEnabled()); - try (AutoCloseable ignored = TracingUtil.createActivatedSpan("initTracing")) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan("initTracing")) { exportCurrentSpan(); } catch (Exception e) { fail("Should not get exception"); diff --git a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java index b3fdc0c1de53..ac58f7185548 100644 --- a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java +++ b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/BaseFreonGenerator.java @@ -211,7 +211,7 @@ protected void taskLoopCompleted() { * @param taskId unique ID of the task */ private void tryNextTask(TaskProvider provider, long taskId) { - try (AutoCloseable ignored = TracingUtil.createActivatedSpan(spanName)) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan(spanName)) { provider.executeNextTask(taskId); successCounter.incrementAndGet(); } catch (Exception e) { diff --git a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java index b59916f1a50f..87ebf95f0b06 100644 --- a/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java +++ b/hadoop-ozone/freon/src/main/java/org/apache/hadoop/ozone/freon/RandomKeyGenerator.java @@ -742,7 +742,7 @@ private boolean createVolume(int volumeNumber) { String volumeName = "vol-" + volumeNumber + "-" + RandomStringUtils.secure().nextNumeric(5); LOG.trace("Creating volume: {}", volumeName); - try (AutoCloseable scope = TracingUtil + try (TracingUtil.TraceCloseable scope = TracingUtil .createActivatedSpan("createVolume")) { long start = System.nanoTime(); objectStore.createVolume(volumeName); @@ -774,7 +774,7 @@ private boolean createBucket(int globalBucketNumber) { RandomStringUtils.secure().nextNumeric(5); LOG.trace("Creating bucket: {} in volume: {}", bucketName, volume.getName()); - try (AutoCloseable scope = TracingUtil + try (TracingUtil.TraceCloseable scope = TracingUtil .createActivatedSpan("createBucket")) { long start = System.nanoTime(); @@ -817,7 +817,7 @@ private boolean createKey(long globalKeyNumber) { LOG.trace("Adding key: {} in bucket: {} of volume: {}", keyName, bucketName, volumeName); try { - try (AutoCloseable scope = TracingUtil.createActivatedSpan("createKey")) { + try (TracingUtil.TraceCloseable scope = TracingUtil.createActivatedSpan("createKey")) { long keyCreateStart = System.nanoTime(); try (OzoneOutputStream os = bucket.createKey(keyName, keySize.toBytes(), replicationConfig, new HashMap<>())) { @@ -867,7 +867,7 @@ private boolean cleanVolume(int volumeNumber) { OzoneVolume volume = getVolume(volumeNumber); String volumeName = volume.getName(); LOG.trace("Cleaning volume: {}", volumeName); - try (AutoCloseable scope = TracingUtil + try (TracingUtil.TraceCloseable scope = TracingUtil .createActivatedSpan("cleanVolume")) { objectStore.deleteVolume(volumeName); numberOfVolumesCleaned.getAndIncrement(); diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java index 5c6e905343e0..e4133ae57a59 100644 --- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java +++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/OzoneFSInputStream.java @@ -53,22 +53,18 @@ public OzoneFSInputStream(InputStream inputStream, Statistics statistics) { @Override public int read() throws IOException { - try (AutoCloseable ignored = TracingUtil.createActivatedSpan("OzoneFSInputStream.read")) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan("OzoneFSInputStream.read")) { int byteRead = inputStream.read(); if (statistics != null && byteRead >= 0) { statistics.incrementBytesRead(1); } return byteRead; - } catch (IOException e) { - throw e; - } catch (Exception e) { - throw new IOException(e); } } @Override public int read(byte[] b, int off, int len) throws IOException { - try (AutoCloseable ignored = TracingUtil.createActivatedSpan("OzoneFSInputStream.read")) { + try (TracingUtil.TraceCloseable ignored = TracingUtil.createActivatedSpan("OzoneFSInputStream.read")) { TracingUtil.getActiveSpan().setAttribute("offset", off) .setAttribute("length", len); int bytesRead = inputStream.read(b, off, len); @@ -76,10 +72,6 @@ public int read(byte[] b, int off, int len) throws IOException { statistics.incrementBytesRead(bytesRead); } return bytesRead; - } catch (IOException e) { - throw e; - } catch (Exception e) { - throw new IOException(e); } } diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java index a66833c0f8eb..4e95a4849e69 100644 --- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java +++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/TracingFilter.java @@ -46,7 +46,7 @@ public class TracingFilter implements ContainerRequestFilter, public void filter(ContainerRequestContext requestContext) { finishAndCloseActiveSpan(); - AutoCloseable activatedSpan = + TracingUtil.TraceCloseable activatedSpan = TracingUtil.createActivatedSpan(resourceInfo.getResourceClass().getSimpleName() + "." + resourceInfo.getResourceMethod().getName()); requestContext.setProperty(TRACING_SPAN_CLOSABLE, activatedSpan); @@ -55,7 +55,8 @@ public void filter(ContainerRequestContext requestContext) { @Override public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext) { - final AutoCloseable spanClosable = (AutoCloseable) requestContext.getProperty(TRACING_SPAN_CLOSABLE); + final TracingUtil.TraceCloseable spanClosable + = (TracingUtil.TraceCloseable) requestContext.getProperty(TRACING_SPAN_CLOSABLE); // HDDS-7064: Operation performed while writing StreamingOutput response // should only be closed once the StreamingOutput callback has completely // written the data to the destination @@ -73,7 +74,7 @@ public void close() throws IOException { } } - private static void finishAndClose(AutoCloseable spanClosable) { + private static void finishAndClose(TracingUtil.TraceCloseable spanClosable) { try { spanClosable.close(); } catch (Exception e) { From 12e1a33f7fceee001a8085bc2bf80defb9f80f97 Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Tue, 16 Sep 2025 16:44:00 +0530 Subject: [PATCH 09/16] checkstyle fix --- .../main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java index cd81cfac8726..560f3876c114 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/TracingUtil.java @@ -256,6 +256,9 @@ public static Span getActiveSpan() { return Span.current(); } + /** + * AutoCloseable interface for tracing span but no exception is thrown in close. + */ public interface TraceCloseable extends AutoCloseable { @Override void close(); From a6dfaa293b25006a53c0cdadd7edbb0dc6d77759 Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Tue, 16 Sep 2025 22:09:45 +0530 Subject: [PATCH 10/16] fix failure --- hadoop-ozone/ozonefs-hadoop2/pom.xml | 8 ++++++++ hadoop-ozone/ozonefs-hadoop3/pom.xml | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/hadoop-ozone/ozonefs-hadoop2/pom.xml b/hadoop-ozone/ozonefs-hadoop2/pom.xml index ccc072e7fc75..311a14bd5e3d 100644 --- a/hadoop-ozone/ozonefs-hadoop2/pom.xml +++ b/hadoop-ozone/ozonefs-hadoop2/pom.xml @@ -27,6 +27,14 @@ org.apache.hadoop.ozone.shaded + + javax.annotation + javax.annotation-api + + + javax.servlet + javax.servlet-api + org.apache.hadoop hadoop-hdfs-client diff --git a/hadoop-ozone/ozonefs-hadoop3/pom.xml b/hadoop-ozone/ozonefs-hadoop3/pom.xml index 84f4b233eace..fe83d1a8f5c4 100644 --- a/hadoop-ozone/ozonefs-hadoop3/pom.xml +++ b/hadoop-ozone/ozonefs-hadoop3/pom.xml @@ -33,6 +33,14 @@ com.google.guava guava + + javax.annotation + javax.annotation-api + + + javax.servlet + javax.servlet-api + org.apache.hadoop.thirdparty hadoop-shaded-protobuf_3_25 From 0061ae4aa8f238b5481a812a1a7202940ec9ac90 Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Tue, 16 Sep 2025 22:44:41 +0530 Subject: [PATCH 11/16] update doc and configuration --- .../docs/content/feature/Observability.md | 18 ++++++++---------- .../docs/content/feature/Observability.zh.md | 18 ++++++++---------- .../src/main/compose/ozone/monitoring.conf | 5 ++--- .../main/k8s/definitions/jaeger/jaeger.yaml | 2 ++ .../definitions/ozone/definitions/tracing.yaml | 6 ++---- .../ozone-dev/datanode-statefulset.yaml | 6 ++---- .../ozone-dev/freon/freon-deployment.yaml | 6 ++---- .../examples/ozone-dev/httpfs-statefulset.yaml | 6 ++---- .../examples/ozone-dev/jaeger-statefulset.yaml | 2 ++ .../k8s/examples/ozone-dev/om-statefulset.yaml | 6 ++---- .../examples/ozone-dev/recon-statefulset.yaml | 6 ++---- .../examples/ozone-dev/s3g-statefulset.yaml | 6 ++---- .../examples/ozone-dev/scm-statefulset.yaml | 6 ++---- 13 files changed, 38 insertions(+), 55 deletions(-) diff --git a/hadoop-hdds/docs/content/feature/Observability.md b/hadoop-hdds/docs/content/feature/Observability.md index 117c13186cbf..04c977da0bf1 100644 --- a/hadoop-hdds/docs/content/feature/Observability.md +++ b/hadoop-hdds/docs/content/feature/Observability.md @@ -97,8 +97,8 @@ Repeat the same for [Object Metrics](https://raw.githubusercontent.com/apache/oz ## Distributed tracing Distributed tracing can help to understand performance bottleneck with visualizing end-to-end performance. - -Ozone uses [jaeger](https://jaegertracing.io) tracing library to collect traces which can send tracing data to any compatible backend (Zipkin, ...). +Ozone makes use of [OpenTelemetry](https://opentelemetry.io/) API for tracing and uses otlp with Grpc format for sending traces. +[jaeger](https://jaegertracing.io) tracing library as collector can collect traces from Ozone over default port 4317 (as default). Tracing is turned off by default, but can be turned on with `hdds.tracing.enabled` from `ozone-site.xml` @@ -109,17 +109,15 @@ Tracing is turned off by default, but can be turned on with `hdds.tracing.enable ``` -Jaeger client can be configured with environment variables as documented [here](https://github.com/jaegertracing/jaeger-client-java/blob/master/jaeger-core/README.md): - -For example: +Below are the configuration to be done to provide collector endpoint and sampling strategy. These are environment variables to be set for each Ozone component (OM, SCM, datanode) +and Ozone Client to enable traces such as shell. -```shell -JAEGER_SAMPLER_PARAM=0.01 -JAEGER_SAMPLER_TYPE=probabilistic -JAEGER_AGENT_HOST=jaeger +``` +OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 +OTEL_TRACES_SAMPLER_ARG=0.01 ``` -This configuration will record 1% of the requests to limit the performance overhead. For more information about jaeger sampling [check the documentation](https://www.jaegertracing.io/docs/1.18/sampling/#client-sampling-configuration) +This configuration will record 1% of the requests to limit the performance overhead. ## ozone insight diff --git a/hadoop-hdds/docs/content/feature/Observability.zh.md b/hadoop-hdds/docs/content/feature/Observability.zh.md index 7a5c67b4cdd4..3f706759bade 100644 --- a/hadoop-hdds/docs/content/feature/Observability.zh.md +++ b/hadoop-hdds/docs/content/feature/Observability.zh.md @@ -55,7 +55,8 @@ scrape_configs: ## 分布式跟踪 分布式跟踪可以通过可视化端到端的性能来帮助了解性能瓶颈。 -Ozone 使用 [jaeger](https://jaegertracing.io) 跟踪库收集跟踪,可以将跟踪数据发送到任何兼容的后端(Zipkin,…)。 +Ozone 使用 [OpenTelemetry](https://opentelemetry.io/) API 进行跟踪,并使用 Grpc 格式的 otlp 发送跟踪信息。 +jaeger 跟踪库作为收集器可以通过默认端口 4317(默认)从 Ozone 收集跟踪信息。 默认情况下,跟踪功能是关闭的,可以通过 `ozon-site.xml` 的 `hdds.tracing.enabled` 配置变量打开。 @@ -66,17 +67,14 @@ Ozone 使用 [jaeger](https://jaegertracing.io) 跟踪库收集跟踪,可以 ``` -Jaeger 客户端可以用环境变量进行配置,如[这份](https://github.com/jaegertracing/jaeger-client-java/blob/master/jaeger-core/README.md)文档所述。 +以下是提供收集器端点和采样策略所需的配置。这些是需要为每个 Ozone 组件(OM、SCM、DataNode)和 Ozone 客户端设置的环境变量,以启用 Shell 等跟踪功能。 -例如: - -```shell -JAEGER_SAMPLER_PARAM=0.01 -JAEGER_SAMPLER_TYPE=probabilistic -JAEGER_AGENT_HOST=jaeger +``` +OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 +OTEL_TRACES_SAMPLER_ARG=0.01 ``` -此配置将记录1%的请求,以限制性能开销。有关 Jaeger 抽样的更多信息,请查看[文档](https://www.jaegertracing.io/docs/1.18/sampling/#client-sampling-configuration)。 +此配置将记录1%的请求,以限制性能开销。 ## Ozone Insight Ozone Insight 是一个用于检查 Ozone 集群当前状态的工具,它可以显示特定组件的日志记录、指标和配置。 @@ -214,4 +212,4 @@ status: VOLUME_ALREADY_EXISTS 实际上 `ozone insight` 是通过 HTTP 端点来检索所需的信息(`/conf`、`/prom`和`/logLevel`端点),它在安全环境中还不被支持。 - \ No newline at end of file + diff --git a/hadoop-ozone/dist/src/main/compose/ozone/monitoring.conf b/hadoop-ozone/dist/src/main/compose/ozone/monitoring.conf index 7fcf1cc8cbc1..cbd6e665a57f 100644 --- a/hadoop-ozone/dist/src/main/compose/ozone/monitoring.conf +++ b/hadoop-ozone/dist/src/main/compose/ozone/monitoring.conf @@ -19,8 +19,7 @@ OZONE-SITE.XML_hdds.tracing.enabled=true OZONE-SITE.XML_ozone.metastore.rocksdb.statistics=ALL HDFS-SITE.XML_rpc.metrics.quantile.enable=true HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300 -JAEGER_SAMPLER_PARAM=1 -JAEGER_SAMPLER_TYPE=const -JAEGER_AGENT_HOST=jaeger +OTEL_EXPORTER_OTLP_ENDPOINT=jaeger +OTEL_TRACES_SAMPLER_ARG=1 #Enable this variable to print out all hadoop rpc traffic to the stdout. See http://byteman.jboss.org/ to define your own instrumentation. #BYTEMAN_SCRIPT_URL=https://raw.githubusercontent.com/apache/hadoop/trunk/dev-support/byteman/hadooprpc.btm diff --git a/hadoop-ozone/dist/src/main/k8s/definitions/jaeger/jaeger.yaml b/hadoop-ozone/dist/src/main/k8s/definitions/jaeger/jaeger.yaml index 4796092657ca..22921df2ed92 100644 --- a/hadoop-ozone/dist/src/main/k8s/definitions/jaeger/jaeger.yaml +++ b/hadoop-ozone/dist/src/main/k8s/definitions/jaeger/jaeger.yaml @@ -49,6 +49,8 @@ spec: ports: - containerPort: 16686 name: web + - containerPort: 4317 + name: otlp-grpc env: - name: COLLECTOR_ZIPKIN_HTTP_PORT value: "9411" diff --git a/hadoop-ozone/dist/src/main/k8s/definitions/ozone/definitions/tracing.yaml b/hadoop-ozone/dist/src/main/k8s/definitions/ozone/definitions/tracing.yaml index 007b8d125176..ce0eacd62e13 100644 --- a/hadoop-ozone/dist/src/main/k8s/definitions/ozone/definitions/tracing.yaml +++ b/hadoop-ozone/dist/src/main/k8s/definitions/ozone/definitions/tracing.yaml @@ -25,9 +25,7 @@ description: Enable jaeger tracing - .* - env value: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/datanode-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/datanode-statefulset.yaml index b22212ff79ea..9d44145547da 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/datanode-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/datanode-statefulset.yaml @@ -49,11 +49,9 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger volumeMounts: - name: data diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/freon/freon-deployment.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/freon/freon-deployment.yaml index 3d39bf690503..e8ef05ff27e8 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/freon/freon-deployment.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/freon/freon-deployment.yaml @@ -45,9 +45,7 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/httpfs-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/httpfs-statefulset.yaml index 59abe8547f64..70edd4a00c64 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/httpfs-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/httpfs-statefulset.yaml @@ -48,11 +48,9 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger volumeMounts: - name: data diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/jaeger-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/jaeger-statefulset.yaml index 51410148f77d..45e671a85c52 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/jaeger-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/jaeger-statefulset.yaml @@ -37,6 +37,8 @@ spec: ports: - containerPort: 16686 name: web + - containerPort: 4317 + name: otlp-grpc env: - name: COLLECTOR_ZIPKIN_HTTP_PORT value: "9411" diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/om-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/om-statefulset.yaml index 36df22c81c98..6cde54dc3439 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/om-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/om-statefulset.yaml @@ -50,11 +50,9 @@ spec: value: scm-0.scm:9876 - name: ENSURE_OM_INITIALIZED value: /data/metadata/om/current/VERSION - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger livenessProbe: tcpSocket: diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/recon-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/recon-statefulset.yaml index 6466c29595cf..7a7757c0ceeb 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/recon-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/recon-statefulset.yaml @@ -48,11 +48,9 @@ spec: env: - name: WAITFOR value: scm-0.scm:9876 - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger livenessProbe: tcpSocket: diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/s3g-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/s3g-statefulset.yaml index 30b71e0a3528..7e405922fd89 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/s3g-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/s3g-statefulset.yaml @@ -48,11 +48,9 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger volumeMounts: - name: data diff --git a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/scm-statefulset.yaml b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/scm-statefulset.yaml index 246f8c411e05..1fcbd17d4efe 100644 --- a/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/scm-statefulset.yaml +++ b/hadoop-ozone/dist/src/main/k8s/examples/ozone-dev/scm-statefulset.yaml @@ -66,11 +66,9 @@ spec: - configMapRef: name: config env: - - name: JAEGER_SAMPLER_TYPE - value: probabilistic - - name: JAEGER_SAMPLER_PARAM + - name: OTEL_TRACES_SAMPLER_ARG value: "0.01" - - name: JAEGER_AGENT_HOST + - name: OTEL_EXPORTER_OTLP_ENDPOINT value: jaeger-0.jaeger volumeMounts: - name: data From c80be02e418b4e88e72b5c50d0192455796705ef Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Wed, 17 Sep 2025 10:06:19 +0530 Subject: [PATCH 12/16] dependency fix for analyze failure --- hadoop-ozone/httpfsgateway/pom.xml | 4 ++++ hadoop-ozone/ozonefs-hadoop2/pom.xml | 8 -------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/hadoop-ozone/httpfsgateway/pom.xml b/hadoop-ozone/httpfsgateway/pom.xml index f0b96351ddcf..dfd23ed3da1e 100644 --- a/hadoop-ozone/httpfsgateway/pom.xml +++ b/hadoop-ozone/httpfsgateway/pom.xml @@ -60,6 +60,10 @@ jakarta.ws.rs jakarta.ws.rs-api + + javax.annotation + javax.annotation-api + javax.servlet javax.servlet-api diff --git a/hadoop-ozone/ozonefs-hadoop2/pom.xml b/hadoop-ozone/ozonefs-hadoop2/pom.xml index 311a14bd5e3d..ccc072e7fc75 100644 --- a/hadoop-ozone/ozonefs-hadoop2/pom.xml +++ b/hadoop-ozone/ozonefs-hadoop2/pom.xml @@ -27,14 +27,6 @@ org.apache.hadoop.ozone.shaded - - javax.annotation - javax.annotation-api - - - javax.servlet - javax.servlet-api - org.apache.hadoop hadoop-hdfs-client From 37f4d7dff4c1d6191b987ad62b8dce2461ccc48b Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Wed, 17 Sep 2025 10:13:14 +0530 Subject: [PATCH 13/16] annotation-api as runtime for jetty --- hadoop-ozone/httpfsgateway/pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/hadoop-ozone/httpfsgateway/pom.xml b/hadoop-ozone/httpfsgateway/pom.xml index dfd23ed3da1e..50ac697fba81 100644 --- a/hadoop-ozone/httpfsgateway/pom.xml +++ b/hadoop-ozone/httpfsgateway/pom.xml @@ -63,6 +63,7 @@ javax.annotation javax.annotation-api + runtime javax.servlet From 6bb82b402f584e049bcce397976e9990fbecab25 Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Wed, 17 Sep 2025 11:08:38 +0530 Subject: [PATCH 14/16] fix dependency --- hadoop-ozone/httpfsgateway/pom.xml | 10 +++++----- hadoop-ozone/ozonefs-hadoop3/pom.xml | 8 -------- 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/hadoop-ozone/httpfsgateway/pom.xml b/hadoop-ozone/httpfsgateway/pom.xml index 50ac697fba81..058b60c5a0f4 100644 --- a/hadoop-ozone/httpfsgateway/pom.xml +++ b/hadoop-ozone/httpfsgateway/pom.xml @@ -60,11 +60,6 @@ jakarta.ws.rs jakarta.ws.rs-api - - javax.annotation - javax.annotation-api - runtime - javax.servlet javax.servlet-api @@ -111,6 +106,11 @@ commons-codec runtime + + javax.annotation + javax.annotation-api + runtime + org.apache.curator curator-framework diff --git a/hadoop-ozone/ozonefs-hadoop3/pom.xml b/hadoop-ozone/ozonefs-hadoop3/pom.xml index fe83d1a8f5c4..84f4b233eace 100644 --- a/hadoop-ozone/ozonefs-hadoop3/pom.xml +++ b/hadoop-ozone/ozonefs-hadoop3/pom.xml @@ -33,14 +33,6 @@ com.google.guava guava - - javax.annotation - javax.annotation-api - - - javax.servlet - javax.servlet-api - org.apache.hadoop.thirdparty hadoop-shaded-protobuf_3_25 From 66226d394821fcfb3935961c31fed082407a0c42 Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Wed, 17 Sep 2025 19:31:45 +0530 Subject: [PATCH 15/16] fix review comment --- hadoop-hdds/common/pom.xml | 6 ------ hadoop-hdds/container-service/pom.xml | 2 -- hadoop-hdds/framework/pom.xml | 1 - hadoop-ozone/freon/pom.xml | 1 - hadoop-ozone/ozonefs-common/pom.xml | 1 - hadoop-ozone/ozonefs/pom.xml | 1 - hadoop-ozone/s3gateway/pom.xml | 1 - 7 files changed, 13 deletions(-) diff --git a/hadoop-hdds/common/pom.xml b/hadoop-hdds/common/pom.xml index 80c1b3577bda..0216b808a7cf 100644 --- a/hadoop-hdds/common/pom.xml +++ b/hadoop-hdds/common/pom.xml @@ -70,32 +70,26 @@ io.opentelemetry opentelemetry-api - ${opentelemetry.version} io.opentelemetry opentelemetry-context - ${opentelemetry.version} io.opentelemetry opentelemetry-exporter-otlp - ${opentelemetry.version} io.opentelemetry opentelemetry-sdk - ${opentelemetry.version} io.opentelemetry opentelemetry-sdk-common - ${opentelemetry.version} io.opentelemetry opentelemetry-sdk-trace - ${opentelemetry.version} jakarta.annotation diff --git a/hadoop-hdds/container-service/pom.xml b/hadoop-hdds/container-service/pom.xml index 5cdded6e7e69..0c455d269591 100644 --- a/hadoop-hdds/container-service/pom.xml +++ b/hadoop-hdds/container-service/pom.xml @@ -81,12 +81,10 @@ io.opentelemetry opentelemetry-api - ${opentelemetry.version} io.opentelemetry opentelemetry-context - ${opentelemetry.version} jakarta.annotation diff --git a/hadoop-hdds/framework/pom.xml b/hadoop-hdds/framework/pom.xml index 32e4d4e9ba34..fed0f112553d 100644 --- a/hadoop-hdds/framework/pom.xml +++ b/hadoop-hdds/framework/pom.xml @@ -95,7 +95,6 @@ io.opentelemetry opentelemetry-api - ${opentelemetry.version} io.prometheus diff --git a/hadoop-ozone/freon/pom.xml b/hadoop-ozone/freon/pom.xml index ef5d4626d3fc..bceadd99f8ca 100644 --- a/hadoop-ozone/freon/pom.xml +++ b/hadoop-ozone/freon/pom.xml @@ -73,7 +73,6 @@ io.opentelemetry opentelemetry-api - ${opentelemetry.version} org.apache.commons diff --git a/hadoop-ozone/ozonefs-common/pom.xml b/hadoop-ozone/ozonefs-common/pom.xml index 5eff439a34a6..8483ec4cfda5 100644 --- a/hadoop-ozone/ozonefs-common/pom.xml +++ b/hadoop-ozone/ozonefs-common/pom.xml @@ -36,7 +36,6 @@ io.opentelemetry opentelemetry-api - ${opentelemetry.version} jakarta.annotation diff --git a/hadoop-ozone/ozonefs/pom.xml b/hadoop-ozone/ozonefs/pom.xml index a951aeb90741..84ce096e6f39 100644 --- a/hadoop-ozone/ozonefs/pom.xml +++ b/hadoop-ozone/ozonefs/pom.xml @@ -36,7 +36,6 @@ io.opentelemetry opentelemetry-api - ${opentelemetry.version} org.apache.hadoop diff --git a/hadoop-ozone/s3gateway/pom.xml b/hadoop-ozone/s3gateway/pom.xml index b399452b2b35..9b94f4c42734 100644 --- a/hadoop-ozone/s3gateway/pom.xml +++ b/hadoop-ozone/s3gateway/pom.xml @@ -65,7 +65,6 @@ io.opentelemetry opentelemetry-api - ${opentelemetry.version} jakarta.annotation From 93499e9f43557211be0bae4eefda9155e938147f Mon Sep 17 00:00:00 2001 From: Sumit Agrawal Date: Fri, 19 Sep 2025 21:46:08 +0530 Subject: [PATCH 16/16] fix review comment --- hadoop-hdds/docs/content/feature/Observability.md | 3 +-- hadoop-ozone/ozonefs-hadoop3/pom.xml | 8 -------- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/hadoop-hdds/docs/content/feature/Observability.md b/hadoop-hdds/docs/content/feature/Observability.md index 04c977da0bf1..cc6df3aebe29 100644 --- a/hadoop-hdds/docs/content/feature/Observability.md +++ b/hadoop-hdds/docs/content/feature/Observability.md @@ -109,8 +109,7 @@ Tracing is turned off by default, but can be turned on with `hdds.tracing.enable ``` -Below are the configuration to be done to provide collector endpoint and sampling strategy. These are environment variables to be set for each Ozone component (OM, SCM, datanode) -and Ozone Client to enable traces such as shell. +Below are the configuration steps for setting the collector endpoint and sampling strategy. Set these environment variables to be set for each Ozone component (OM, SCM, datanode) and for the Ozone client to enable tracing. ``` OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4317 diff --git a/hadoop-ozone/ozonefs-hadoop3/pom.xml b/hadoop-ozone/ozonefs-hadoop3/pom.xml index b34fd0d9eb75..1d26aafc2d9d 100644 --- a/hadoop-ozone/ozonefs-hadoop3/pom.xml +++ b/hadoop-ozone/ozonefs-hadoop3/pom.xml @@ -29,14 +29,6 @@ org.apache.hadoop.ozone.shaded - - javax.annotation - javax.annotation-api - - - javax.servlet - javax.servlet-api - org.apache.hadoop.thirdparty hadoop-shaded-protobuf_3_25