diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 938d0c4506022..14fd51cc13a73 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -424,6 +424,11 @@
lz4-java
provided
+
+ io.opentelemetry
+ opentelemetry-api
+ ${opentelemetry.version}
+
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
index 01559e1fd773c..287c22d00313e 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
@@ -432,3 +432,13 @@ export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
# By default, Hadoop uses jsvc which needs to know to launch a
# server jvm.
# export HADOOP_REGISTRYDNS_SECURE_EXTRA_OPTS="-jvm server"
+
+###
+# Opentelemetry Trace Exporters Configuration
+###
+# export OPENTELEMETRY_JAVAAGENT_PATH="$(find $HADOOP_HOME/share/hadoop/tools/lib/ -name opentelemetry-javaagent*)"
+# export HADOOP_TRACE_OPTS="-javaagent:$OPENTELEMETRY_JAVAAGENT_PATH -Dotel.traces.exporter=jaeger -Dotel.metrics.exporter=none"
+# export HDFS_NAMENODE_OPTS="$HDFS_NAMENODE_OPTS $HADOOP_TRACE_OPTS -Dotel.resource.attributes=service.name=hdfs-namenode"
+# export HDFS_DATANODE_OPTS="$HDFS_DATANODE_OPTS $HADOOP_TRACE_OPTS -Dotel.resource.attributes=service.name=hdfs-datanode"
+# export HDFS_SECONDARYNAMENODE_OPTS="$HDFS_SECONDARYNAMENODE_OPTS $HADOOP_TRACE_OPTS -Dotel.resource.attributes=service.name=hdfs-secondarynamenode"
+# export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS $HADOOP_TRACE_OPTS -Dotel.resource.attributes=service.name=hadoop-client"
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
index 73258661ec191..7acff7452be8b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
@@ -301,7 +301,7 @@ private TableListing createOptionTableListing() {
public int run(String[] argv) {
// initialize FsShell
init();
- Tracer tracer = new Tracer.Builder("FsShell").
+ Tracer tracer = new Tracer.Builder().
conf(TraceUtils.wrapHadoopConf(SHELL_HTRACE_PREFIX, getConf())).
build();
int exitCode = -1;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsTracer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsTracer.java
index 5b48b35566706..5bb2986133496 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsTracer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsTracer.java
@@ -38,7 +38,7 @@ public final class FsTracer {
public static synchronized Tracer get(Configuration conf) {
if (instance == null) {
- instance = new Tracer.Builder("FSClient").
+ instance = new Tracer.Builder().
conf(TraceUtils.wrapHadoopConf(CommonConfigurationKeys.
FS_CLIENT_HTRACE_PREFIX, conf)).
build();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
index 53497e9707807..a05f5186d86a7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
@@ -2901,15 +2901,15 @@ private void processRpcRequest(RpcRequestHeaderProto header,
Span span = null;
if (header.hasTraceInfo()) {
RPCTraceInfoProto traceInfoProto = header.getTraceInfo();
- if (traceInfoProto.hasSpanContext()) {
+ if (traceInfoProto.getOpenSpanContextMap() != null) {
if (tracer == null) {
setTracer(Tracer.curThreadTracer());
}
if (tracer != null) {
// If the incoming RPC included tracing info, always continue the
// trace
- SpanContext spanCtx = TraceUtils.byteStringToSpanContext(
- traceInfoProto.getSpanContext());
+ SpanContext spanCtx = TraceUtils.mapToSpanContext(
+ traceInfoProto.getOpenSpanContextMap());
if (spanCtx != null) {
span = tracer.newSpan(
RpcClientUtil.toTraceName(rpcRequest.toString()), spanCtx);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/Span.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/Span.java
index 197b29fa3dfe4..44343e0882623 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/Span.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/Span.java
@@ -17,28 +17,62 @@
*/
package org.apache.hadoop.tracing;
+import io.opentelemetry.context.Scope;
+
import java.io.Closeable;
+/***
+ * This class is a wrapper class on top of opentelemetry Span class
+ * avoiding direct dependency on opentelemetry API.
+ */
public class Span implements Closeable {
-
+ private io.opentelemetry.api.trace.Span openSpan;
public Span() {
}
+ public Span(io.opentelemetry.api.trace.Span openSpan){
+ this.openSpan = openSpan;
+ }
+
public Span addKVAnnotation(String key, String value) {
+ if(openSpan != null){
+ openSpan.setAttribute(key, value);
+ }
return this;
}
public Span addTimelineAnnotation(String msg) {
+ if(openSpan != null){
+ openSpan.addEvent(msg);
+ }
return this;
}
public SpanContext getContext() {
+ if(openSpan != null){
+ return new SpanContext(openSpan.getSpanContext());
+ }
return null;
}
public void finish() {
+ close();
}
public void close() {
+ if(openSpan != null){
+ openSpan.end();
+ }
+ }
+
+ /***
+ * This method activates the current span on the current thread
+ * @return the scope for the current span
+ */
+ public Scope makeCurrent() {
+ if(openSpan != null){
+ return openSpan.makeCurrent();
+ }
+ return null;
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/SpanContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/SpanContext.java
index 363e94dc85dba..d6140897eda0f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/SpanContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/SpanContext.java
@@ -18,15 +18,76 @@
package org.apache.hadoop.tracing;
import java.io.Closeable;
+import java.util.HashMap;
+import java.util.Map;
+
+import io.opentelemetry.api.trace.TraceFlags;
+import io.opentelemetry.api.trace.TraceState;
+import io.opentelemetry.api.trace.TraceStateBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Wrapper class for SpanContext to avoid using OpenTracing/OpenTelemetry
* SpanContext class directly for better separation.
*/
-public class SpanContext implements Closeable {
- public SpanContext() {
+public class SpanContext implements Closeable {
+ public static final Logger LOG = LoggerFactory.getLogger(SpanContext.class.getName());
+ private static final String TRACE_ID = "TRACE_ID";
+ private static final String SPAN_ID = "SPAN_ID";
+ private static final String TRACE_FLAGS = "TRACE_FLAGS";
+
+
+ private io.opentelemetry.api.trace.SpanContext openSpanContext;
+ public SpanContext(io.opentelemetry.api.trace.SpanContext openSpanContext) {
+ this.openSpanContext = openSpanContext;
}
public void close() {
+
+ }
+
+ public Map getKVSpanContext(){
+ if(openSpanContext != null){
+ //TODO: may we should move this to Proto
+ Map kvMap = new HashMap<>();
+ kvMap.put(TRACE_ID, openSpanContext.getTraceId());
+ kvMap.put(SPAN_ID, openSpanContext.getSpanId());
+ kvMap.put(TRACE_FLAGS, openSpanContext.getTraceFlags().asHex());
+ kvMap.putAll(openSpanContext.getTraceState().asMap());
+ return kvMap;
+ }
+ return null;
+ }
+
+ static SpanContext buildFromKVMap(Map kvMap){
+ try{
+ String traceId = kvMap.get(TRACE_ID);
+ String spanId = kvMap.get(SPAN_ID);
+ String traceFlagsHex = kvMap.get(TRACE_FLAGS);
+ if(traceId == null || spanId == null || traceFlagsHex == null){
+ return null;
+ }
+ TraceFlags traceFlags = TraceFlags.fromHex(traceFlagsHex, 0);
+ TraceStateBuilder traceStateBuilder = TraceState.builder();
+ for(Map.Entry keyValue: kvMap.entrySet()){
+ if(keyValue.getKey().equals(TRACE_ID) || keyValue.getKey().equals(SPAN_ID) || keyValue.getKey().equals(TRACE_FLAGS)){
+ continue;
+ }
+ traceStateBuilder.put(keyValue.getKey(), keyValue.getValue());
+ }
+ TraceState traceState = traceStateBuilder.build();
+ io.opentelemetry.api.trace.SpanContext spanContext = io.opentelemetry.api.trace.SpanContext.createFromRemoteParent(traceId, spanId, traceFlags, traceState );
+ return new SpanContext(spanContext);
+ } catch (Exception e){
+ LOG.error("Error in processing remote context :", e);
+ return null;
+ }
+
+
+ }
+
+ public io.opentelemetry.api.trace.SpanContext getOpenSpanContext() {
+ return openSpanContext;
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceScope.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceScope.java
index 2abf9cb7ec2bf..fa5229d38c429 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceScope.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceScope.java
@@ -17,23 +17,32 @@
*/
package org.apache.hadoop.tracing;
+import io.opentelemetry.context.Scope;
+
import java.io.Closeable;
public class TraceScope implements Closeable {
Span span;
-
+ Scope openScope;
public TraceScope(Span span) {
this.span = span;
+ if(span != null){
+ this.openScope = span.makeCurrent();
+ }
}
// Add tag to the span
public void addKVAnnotation(String key, String value) {
+ span.addKVAnnotation(key, value);
}
public void addKVAnnotation(String key, Number value) {
+ span.addKVAnnotation(key, value.toString());
}
+
public void addTimelineAnnotation(String msg) {
+ span.addTimelineAnnotation(msg);
}
public Span span() {
@@ -51,6 +60,9 @@ public void detach() {
}
public void close() {
+ if(openScope != null){
+ openScope.close();
+ }
if (span != null) {
span.close();
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
index b218493780ee1..79301088a8b4f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
@@ -19,17 +19,20 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.thirdparty.protobuf.ByteString;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
/**
* This class provides utility functions for tracing.
*/
@InterfaceAudience.Private
public class TraceUtils {
+ public static final Logger LOG = LoggerFactory.getLogger(TraceUtils.class.getName());
static final String DEFAULT_HADOOP_TRACE_PREFIX = "hadoop.htrace.";
- public static TraceConfiguration wrapHadoopConf(final String prefix,
- final Configuration conf) {
+ public static TraceConfiguration wrapHadoopConf(final String prefix, final Configuration conf) {
return null;
}
@@ -37,11 +40,14 @@ public static Tracer createAndRegisterTracer(String name) {
return null;
}
- public static SpanContext byteStringToSpanContext(ByteString byteString) {
- return null;
+ public static SpanContext mapToSpanContext(Map kvMap) {
+ return SpanContext.buildFromKVMap(kvMap);
}
- public static ByteString spanContextToByteString(SpanContext context) {
- return null;
+ public static Map spanContextToMap(SpanContext context) {
+ if (context == null) {
+ return null;
+ }
+ return context.getKVSpanContext();
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/Tracer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/Tracer.java
index a99b004b542f4..4bfad1eb3eb5f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/Tracer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/Tracer.java
@@ -17,22 +17,24 @@
*/
package org.apache.hadoop.tracing;
+import io.opentelemetry.api.GlobalOpenTelemetry;
+import io.opentelemetry.context.Context;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* No-Op Tracer (for now) to remove HTrace without changing too many files.
*/
public class Tracer {
+ public static final Logger LOG = LoggerFactory.getLogger(Tracer.class.getName());
+ private static final String INSTRUMENTATION_NAME = "io.opentelemetry.contrib.hadoop";
// Singleton
- private static final Tracer globalTracer = null;
- private final NullTraceScope nullTraceScope;
- private final String name;
+ private static Tracer globalTracer = null;
+ io.opentelemetry.api.trace.Tracer OTelTracer = GlobalOpenTelemetry.getTracer(INSTRUMENTATION_NAME);
public final static String SPAN_RECEIVER_CLASSES_KEY =
"span.receiver.classes";
- public Tracer(String name) {
- this.name = name;
- nullTraceScope = NullTraceScope.INSTANCE;
- }
+ private Tracer() {}
// Keeping this function at the moment for HTrace compatiblity,
// in fact all threads share a single global tracer for OpenTracing.
@@ -45,53 +47,52 @@ public static Tracer curThreadTracer() {
* @return org.apache.hadoop.tracing.Span
*/
public static Span getCurrentSpan() {
- return null;
+ io.opentelemetry.api.trace.Span span = io.opentelemetry.api.trace.Span.current();
+ return span.getSpanContext().isValid()? new Span(span): null;
}
public TraceScope newScope(String description) {
- return nullTraceScope;
+ Span span = new Span(OTelTracer.spanBuilder(description).startSpan());
+ return new TraceScope(span);
}
public Span newSpan(String description, SpanContext spanCtx) {
- return new Span();
+ io.opentelemetry.api.trace.Span parentSpan = io.opentelemetry.api.trace.Span.wrap(spanCtx.getOpenSpanContext());
+ io.opentelemetry.api.trace.Span span = OTelTracer.spanBuilder(description).setParent(Context.current().with(parentSpan)).startSpan();
+ return new Span(span);
}
public TraceScope newScope(String description, SpanContext spanCtx) {
- return nullTraceScope;
+ if(spanCtx == null){
+ return new TraceScope(new Span(io.opentelemetry.api.trace.Span.getInvalid()));
+ }
+ return new TraceScope(newSpan(description, spanCtx));
}
public TraceScope newScope(String description, SpanContext spanCtx,
boolean finishSpanOnClose) {
- return nullTraceScope;
+ return new TraceScope(newSpan(description, spanCtx));
}
public TraceScope activateSpan(Span span) {
- return nullTraceScope;
+ return new TraceScope(span);
}
public void close() {
}
- public String getName() {
- return name;
- }
-
+
public static class Builder {
- static Tracer globalTracer;
- private String name;
+ static Tracer globalTracer = new Tracer();
- public Builder(final String name) {
- this.name = name;
+ public Builder() {
}
public Builder conf(TraceConfiguration conf) {
return this;
}
- public Tracer build() {
- if (globalTracer == null) {
- globalTracer = new Tracer(name);
- }
+ public synchronized Tracer build() {
return globalTracer;
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
index 883c19c5e7750..fa0c7ceb8e6be 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
@@ -186,8 +186,8 @@ public static RpcRequestHeaderProto makeRpcRequestHeader(RPC.RpcKind rpcKind,
Span span = Tracer.getCurrentSpan();
if (span != null) {
RPCTraceInfoProto.Builder traceInfoProtoBuilder =
- RPCTraceInfoProto.newBuilder().setSpanContext(
- TraceUtils.spanContextToByteString(span.getContext()));
+ RPCTraceInfoProto.newBuilder().putAllOpenSpanContext(
+ TraceUtils.spanContextToMap(span.getContext()));
result.setTraceInfo(traceInfoProtoBuilder);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto b/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
index d9becf722e982..04b7075c48228 100644
--- a/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
+++ b/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
@@ -64,6 +64,7 @@ message RPCTraceInfoProto {
optional int64 traceId = 1; // parentIdHigh
optional int64 parentId = 2; // parentIdLow
optional bytes spanContext = 3; // Trace SpanContext
+ map openSpanContext = 4; //Opentelemetry SpanContext
}
/**
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTracer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTracer.java
new file mode 100644
index 0000000000000..b971fee35a00f
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTracer.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.tracing;
+
+import org.apache.hadoop.test.HadoopTestBase;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class TestTracer extends HadoopTestBase {
+ @Test
+ public void testNoopTracer(){
+ Tracer tracer = new Tracer.Builder().build();
+ assertNotNull(tracer.OTelTracer);
+ }
+
+}
+
+
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java
index 85ee3ce4f4987..88541724ecf8d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java
@@ -91,8 +91,8 @@ static BaseHeaderProto buildBaseHeader(ExtendedBlock blk,
Span span = Tracer.getCurrentSpan();
if (span != null) {
DataTransferTraceInfoProto.Builder traceInfoProtoBuilder =
- DataTransferTraceInfoProto.newBuilder().setSpanContext(
- TraceUtils.spanContextToByteString(span.getContext()));
+ DataTransferTraceInfoProto.newBuilder().putAllSpanContext(
+ TraceUtils.spanContextToMap(span.getContext()));
builder.setTraceInfo(traceInfoProtoBuilder);
}
return builder.build();
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Sender.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Sender.java
index 3d81a62993efc..cf29410aea0cb 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Sender.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Sender.java
@@ -216,8 +216,8 @@ public void releaseShortCircuitFds(SlotId slotId) throws IOException {
Span span = Tracer.getCurrentSpan();
if (span != null) {
DataTransferTraceInfoProto.Builder traceInfoProtoBuilder =
- DataTransferTraceInfoProto.newBuilder().setSpanContext(
- TraceUtils.spanContextToByteString(span.getContext()));
+ DataTransferTraceInfoProto.newBuilder().putAllSpanContext(
+ TraceUtils.spanContextToMap(span.getContext()));
builder.setTraceInfo(traceInfoProtoBuilder);
}
ReleaseShortCircuitAccessRequestProto proto = builder.build();
@@ -232,8 +232,8 @@ public void requestShortCircuitShm(String clientName) throws IOException {
Span span = Tracer.getCurrentSpan();
if (span != null) {
DataTransferTraceInfoProto.Builder traceInfoProtoBuilder =
- DataTransferTraceInfoProto.newBuilder().setSpanContext(
- TraceUtils.spanContextToByteString(span.getContext()));
+ DataTransferTraceInfoProto.newBuilder().putAllSpanContext(
+ TraceUtils.spanContextToMap(span.getContext()));
builder.setTraceInfo(traceInfoProtoBuilder);
}
ShortCircuitShmRequestProto proto = builder.build();
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
index 5356cd6961699..a8ec8df788f25 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
@@ -61,7 +61,7 @@ message BaseHeaderProto {
message DataTransferTraceInfoProto {
optional uint64 traceId = 1;
optional uint64 parentId = 2;
- optional bytes spanContext = 3;
+ map spanContext = 3;
}
message ClientOperationHeaderProto {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Receiver.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Receiver.java
index 8bcfb199ff5a9..058526e1bd61e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Receiver.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Receiver.java
@@ -22,6 +22,7 @@
import java.io.DataInputStream;
import java.io.IOException;
+import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -78,13 +79,15 @@ protected final Op readOp() throws IOException {
return Op.read(in);
}
- private TraceScope continueTraceSpan(ByteString spanContextBytes,
+ private TraceScope continueTraceSpan(Map spanContextMap,
String description) {
TraceScope scope = null;
- SpanContext spanContext =
- TraceUtils.byteStringToSpanContext(spanContextBytes);
- if (spanContext != null) {
- scope = tracer.newScope(description, spanContext);
+ if(!spanContextMap.isEmpty()){
+ SpanContext spanContext =
+ TraceUtils.mapToSpanContext(spanContextMap);
+ if (spanContext != null) {
+ scope = tracer.newScope(description, spanContext);
+ }
}
return scope;
}
@@ -96,7 +99,7 @@ private TraceScope continueTraceSpan(ClientOperationHeaderProto header,
private TraceScope continueTraceSpan(BaseHeaderProto header,
String description) {
- return continueTraceSpan(header.getTraceInfo().getSpanContext(),
+ return continueTraceSpan(header.getTraceInfo().getSpanContextMap(),
description);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
index 3e8831d8087c7..951705e0f8e83 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java
@@ -190,7 +190,7 @@ public void setConf(Configuration conf) {
}
if (this.tracer == null) {
- this.tracer = new Tracer.Builder("JournalNode").
+ this.tracer = new Tracer.Builder().
conf(TraceUtils.wrapHadoopConf("journalnode.htrace", conf)).
build();
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index 8fb009dab850a..02d3910d1fa74 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -476,7 +476,7 @@ public static InetSocketAddress createSocketAddr(String target) {
private final SocketFactory socketFactory;
private static Tracer createTracer(Configuration conf) {
- return new Tracer.Builder("DataNode").
+ return new Tracer.Builder().
conf(TraceUtils.wrapHadoopConf(DATANODE_HTRACE_PREFIX, conf)).
build();
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
index cd74511e6bbca..f56e60259f8ae 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
@@ -1133,7 +1133,7 @@ public NameNode(Configuration conf) throws IOException {
protected NameNode(Configuration conf, NamenodeRole role)
throws IOException {
super(conf);
- this.tracer = new Tracer.Builder("NameNode").
+ this.tracer = new Tracer.Builder().
conf(TraceUtils.wrapHadoopConf(NAMENODE_HTRACE_PREFIX, conf)).
build();
this.role = role;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
index bc6df0141c646..0f3c4093a19c9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
@@ -230,7 +230,7 @@ public class NamenodeFsck implements DataEncryptionKeyFactory {
this.staleInterval =
conf.getLong(DFSConfigKeys.DFS_NAMENODE_STALE_DATANODE_INTERVAL_KEY,
DFSConfigKeys.DFS_NAMENODE_STALE_DATANODE_INTERVAL_DEFAULT);
- this.tracer = new Tracer.Builder("NamenodeFsck").
+ this.tracer = new Tracer.Builder().
conf(TraceUtils.wrapHadoopConf("namenode.fsck.htrace.", conf)).
build();
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 3be88553b056f..240043ca74a2f 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -218,6 +218,9 @@
9.31
v12.22.1
v1.22.5
+ 1.10.11
+ 1.3.0
+ 1.3.0
1.10.13
1.20
@@ -974,7 +977,12 @@
-
+
+ io.opentelemetry.javaagent
+ opentelemetry-javaagent
+ ${opentelemetry-instrumentation.version}
+ all
+
io.netty
netty-all
diff --git a/hadoop-tools/hadoop-streaming/pom.xml b/hadoop-tools/hadoop-streaming/pom.xml
index 33e6ca8ff7491..f3bf67d9fd1e8 100644
--- a/hadoop-tools/hadoop-streaming/pom.xml
+++ b/hadoop-tools/hadoop-streaming/pom.xml
@@ -130,6 +130,12 @@
bcpkix-jdk15on
test
+
+ io.opentelemetry.javaagent
+ opentelemetry-javaagent
+ ${opentelemetry-instrumentation.version}
+ all
+