Skip to content

Commit 3ce9405

Browse files
authored
Experimental option to suppress messaging receive spans (#4187)
* Experimental option to suppress messaging receive spans * Kafka streams too * Better conditionals * Remove oops * Extract base class for kafka streams tests * Spotless
1 parent 0f3d0cb commit 3ce9405

File tree

10 files changed

+518
-110
lines changed

10 files changed

+518
-110
lines changed

instrumentation-api/src/main/java/io/opentelemetry/instrumentation/api/config/ExperimentalConfig.java

+5
Original file line numberDiff line numberDiff line change
@@ -28,4 +28,9 @@ public boolean suppressControllerSpans() {
2828
public boolean suppressViewSpans() {
2929
return config.getBoolean("otel.instrumentation.common.experimental.suppress-view-spans", false);
3030
}
31+
32+
public boolean suppressMessagingReceiveSpans() {
33+
return config.getBoolean(
34+
"otel.instrumentation.common.experimental.suppress-messaging-receive-spans", false);
35+
}
3136
}

instrumentation/kafka-clients/kafka-clients-0.11/javaagent/build.gradle.kts

+12-1
Original file line numberDiff line numberDiff line change
@@ -39,10 +39,21 @@ tasks {
3939
jvmArgs("-Dotel.instrumentation.kafka.client-propagation.enabled=false")
4040
}
4141

42-
named<Test>("test") {
42+
val testReceiveSpansDisabled by registering(Test::class) {
43+
filter {
44+
includeTestsMatching("KafkaClientSuppressReceiveSpansTest")
45+
isFailOnNoMatchingTests = false
46+
}
47+
include("**/KafkaClientSuppressReceiveSpansTest.*")
48+
jvmArgs("-Dotel.instrumentation.common.experimental.suppress-messaging-receive-spans=true")
49+
}
50+
51+
test {
4352
dependsOn(testPropagationDisabled)
53+
dependsOn(testReceiveSpansDisabled)
4454
filter {
4555
excludeTestsMatching("KafkaClientPropagationDisabledTest")
56+
excludeTestsMatching("KafkaClientSuppressReceiveSpansTest")
4657
isFailOnNoMatchingTests = false
4758
}
4859
}

instrumentation/kafka-clients/kafka-clients-0.11/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/kafkaclients/KafkaSingletons.java

+9-2
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
package io.opentelemetry.javaagent.instrumentation.kafkaclients;
77

88
import io.opentelemetry.api.GlobalOpenTelemetry;
9+
import io.opentelemetry.instrumentation.api.config.ExperimentalConfig;
910
import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter;
1011
import io.opentelemetry.instrumentation.api.instrumenter.InstrumenterBuilder;
1112
import io.opentelemetry.instrumentation.api.instrumenter.SpanKindExtractor;
@@ -52,6 +53,7 @@ private static Instrumenter<ReceivedRecords, Void> buildConsumerReceiveInstrumen
5253
GlobalOpenTelemetry.get(), INSTRUMENTATION_NAME, spanNameExtractor)
5354
.addAttributesExtractor(attributesExtractor)
5455
.setTimeExtractors(ReceivedRecords::startTime, (request, response, error) -> request.now())
56+
.setDisabled(ExperimentalConfig.get().suppressMessagingReceiveSpans())
5557
.newInstrumenter(SpanKindExtractor.alwaysConsumer());
5658
}
5759

@@ -69,12 +71,17 @@ private static Instrumenter<ReceivedRecords, Void> buildConsumerReceiveInstrumen
6971
if (KafkaConsumerExperimentalAttributesExtractor.isEnabled()) {
7072
builder.addAttributesExtractor(new KafkaConsumerExperimentalAttributesExtractor());
7173
}
72-
if (KafkaPropagation.isPropagationEnabled()) {
74+
75+
if (!KafkaPropagation.isPropagationEnabled()) {
76+
return builder.newInstrumenter(SpanKindExtractor.alwaysConsumer());
77+
} else if (ExperimentalConfig.get().suppressMessagingReceiveSpans()) {
78+
return builder.newConsumerInstrumenter(new KafkaHeadersGetter());
79+
} else {
7380
builder.addSpanLinksExtractor(
7481
SpanLinksExtractor.fromUpstreamRequest(
7582
GlobalOpenTelemetry.getPropagators(), new KafkaHeadersGetter()));
83+
return builder.newInstrumenter(SpanKindExtractor.alwaysConsumer());
7684
}
77-
return builder.newInstrumenter(SpanKindExtractor.alwaysConsumer());
7885
}
7986

8087
public static Instrumenter<ProducerRecord<?, ?>, Void> producerInstrumenter() {
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ import static io.opentelemetry.api.trace.SpanKind.CONSUMER
1515
import static io.opentelemetry.api.trace.SpanKind.INTERNAL
1616
import static io.opentelemetry.api.trace.SpanKind.PRODUCER
1717

18-
class KafkaClientPropagationEnabledTest extends KafkaClientBaseTest {
18+
class KafkaClientDefaultTest extends KafkaClientBaseTest {
1919

2020
def "test kafka produce and consume"() {
2121
when:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,182 @@
1+
/*
2+
* Copyright The OpenTelemetry Authors
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
import io.opentelemetry.semconv.trace.attributes.SemanticAttributes
7+
import org.apache.kafka.clients.producer.ProducerRecord
8+
import org.apache.kafka.common.TopicPartition
9+
10+
import java.time.Duration
11+
12+
import static io.opentelemetry.api.trace.SpanKind.CONSUMER
13+
import static io.opentelemetry.api.trace.SpanKind.INTERNAL
14+
import static io.opentelemetry.api.trace.SpanKind.PRODUCER
15+
16+
class KafkaClientSuppressReceiveSpansTest extends KafkaClientBaseTest {
17+
18+
def "test kafka produce and consume"() {
19+
when:
20+
String greeting = "Hello Kafka!"
21+
runWithSpan("parent") {
22+
producer.send(new ProducerRecord(SHARED_TOPIC, greeting)) { meta, ex ->
23+
if (ex == null) {
24+
runWithSpan("producer callback") {}
25+
} else {
26+
runWithSpan("producer exception: " + ex) {}
27+
}
28+
}
29+
}
30+
31+
then:
32+
// check that the message was received
33+
def records = consumer.poll(Duration.ofSeconds(5).toMillis())
34+
for (record in records) {
35+
runWithSpan("processing") {
36+
assert record.value() == greeting
37+
assert record.key() == null
38+
}
39+
}
40+
41+
assertTraces(1) {
42+
trace(0, 5) {
43+
span(0) {
44+
name "parent"
45+
kind INTERNAL
46+
hasNoParent()
47+
}
48+
span(1) {
49+
name SHARED_TOPIC + " send"
50+
kind PRODUCER
51+
childOf span(0)
52+
attributes {
53+
"${SemanticAttributes.MESSAGING_SYSTEM.key}" "kafka"
54+
"${SemanticAttributes.MESSAGING_DESTINATION.key}" SHARED_TOPIC
55+
"${SemanticAttributes.MESSAGING_DESTINATION_KIND.key}" "topic"
56+
}
57+
}
58+
span(2) {
59+
name SHARED_TOPIC + " process"
60+
kind CONSUMER
61+
childOf span(1)
62+
attributes {
63+
"${SemanticAttributes.MESSAGING_SYSTEM.key}" "kafka"
64+
"${SemanticAttributes.MESSAGING_DESTINATION.key}" SHARED_TOPIC
65+
"${SemanticAttributes.MESSAGING_DESTINATION_KIND.key}" "topic"
66+
"${SemanticAttributes.MESSAGING_OPERATION.key}" "process"
67+
"${SemanticAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES.key}" Long
68+
"${SemanticAttributes.MESSAGING_KAFKA_PARTITION.key}" { it >= 0 }
69+
"kafka.offset" Long
70+
"kafka.record.queue_time_ms" { it >= 0 }
71+
}
72+
}
73+
span(3) {
74+
name "processing"
75+
childOf span(2)
76+
}
77+
span(4) {
78+
name "producer callback"
79+
kind INTERNAL
80+
childOf span(0)
81+
}
82+
}
83+
}
84+
}
85+
86+
def "test pass through tombstone"() {
87+
when:
88+
producer.send(new ProducerRecord<>(SHARED_TOPIC, null))
89+
90+
then:
91+
// check that the message was received
92+
def records = consumer.poll(Duration.ofSeconds(5).toMillis())
93+
for (record in records) {
94+
assert record.value() == null
95+
assert record.key() == null
96+
}
97+
98+
assertTraces(1) {
99+
trace(0, 2) {
100+
span(0) {
101+
name SHARED_TOPIC + " send"
102+
kind PRODUCER
103+
hasNoParent()
104+
attributes {
105+
"${SemanticAttributes.MESSAGING_SYSTEM.key}" "kafka"
106+
"${SemanticAttributes.MESSAGING_DESTINATION.key}" SHARED_TOPIC
107+
"${SemanticAttributes.MESSAGING_DESTINATION_KIND.key}" "topic"
108+
"${SemanticAttributes.MESSAGING_KAFKA_TOMBSTONE.key}" true
109+
}
110+
}
111+
span(1) {
112+
name SHARED_TOPIC + " process"
113+
kind CONSUMER
114+
childOf span(0)
115+
attributes {
116+
"${SemanticAttributes.MESSAGING_SYSTEM.key}" "kafka"
117+
"${SemanticAttributes.MESSAGING_DESTINATION.key}" SHARED_TOPIC
118+
"${SemanticAttributes.MESSAGING_DESTINATION_KIND.key}" "topic"
119+
"${SemanticAttributes.MESSAGING_OPERATION.key}" "process"
120+
"${SemanticAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES.key}" Long
121+
"${SemanticAttributes.MESSAGING_KAFKA_PARTITION.key}" { it >= 0 }
122+
"${SemanticAttributes.MESSAGING_KAFKA_TOMBSTONE.key}" true
123+
"kafka.offset" Long
124+
"kafka.record.queue_time_ms" { it >= 0 }
125+
}
126+
}
127+
}
128+
}
129+
}
130+
131+
def "test records(TopicPartition) kafka consume"() {
132+
setup:
133+
def partition = 0
134+
135+
when: "send message"
136+
def greeting = "Hello from MockConsumer!"
137+
producer.send(new ProducerRecord<>(SHARED_TOPIC, partition, null, greeting))
138+
139+
then: "wait for PRODUCER span"
140+
waitForTraces(1)
141+
142+
when: "receive messages"
143+
def consumerRecords = consumer.poll(Duration.ofSeconds(5).toMillis())
144+
def recordsInPartition = consumerRecords.records(new TopicPartition(SHARED_TOPIC, partition))
145+
for (record in recordsInPartition) {
146+
assert record.value() == greeting
147+
assert record.key() == null
148+
}
149+
150+
then:
151+
assertTraces(1) {
152+
trace(0, 2) {
153+
span(0) {
154+
name SHARED_TOPIC + " send"
155+
kind PRODUCER
156+
hasNoParent()
157+
attributes {
158+
"${SemanticAttributes.MESSAGING_SYSTEM.key}" "kafka"
159+
"${SemanticAttributes.MESSAGING_DESTINATION.key}" SHARED_TOPIC
160+
"${SemanticAttributes.MESSAGING_DESTINATION_KIND.key}" "topic"
161+
"${SemanticAttributes.MESSAGING_KAFKA_PARTITION.key}" { it >= 0 }
162+
}
163+
}
164+
span(1) {
165+
name SHARED_TOPIC + " process"
166+
kind CONSUMER
167+
childOf span(0)
168+
attributes {
169+
"${SemanticAttributes.MESSAGING_SYSTEM.key}" "kafka"
170+
"${SemanticAttributes.MESSAGING_DESTINATION.key}" SHARED_TOPIC
171+
"${SemanticAttributes.MESSAGING_DESTINATION_KIND.key}" "topic"
172+
"${SemanticAttributes.MESSAGING_OPERATION.key}" "process"
173+
"${SemanticAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES.key}" Long
174+
"${SemanticAttributes.MESSAGING_KAFKA_PARTITION.key}" { it >= 0 }
175+
"kafka.offset" Long
176+
"kafka.record.queue_time_ms" { it >= 0 }
177+
}
178+
}
179+
}
180+
}
181+
}
182+
}

instrumentation/kafka-streams-0.11/javaagent/build.gradle.kts

+18-1
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,27 @@ dependencies {
2525
}
2626

2727
tasks {
28-
test {
28+
withType<Test>().configureEach {
2929
usesService(gradle.sharedServices.registrations["testcontainersBuildService"].service)
3030

3131
// TODO run tests both with and without experimental span attributes
3232
jvmArgs("-Dotel.instrumentation.kafka.experimental-span-attributes=true")
3333
}
34+
35+
val testReceiveSpansDisabled by registering(Test::class) {
36+
filter {
37+
includeTestsMatching("KafkaStreamsSuppressReceiveSpansTest")
38+
isFailOnNoMatchingTests = false
39+
}
40+
include("**/KafkaStreamsSuppressReceiveSpansTest.*")
41+
jvmArgs("-Dotel.instrumentation.common.experimental.suppress-messaging-receive-spans=true")
42+
}
43+
44+
test {
45+
dependsOn(testReceiveSpansDisabled)
46+
filter {
47+
excludeTestsMatching("KafkaStreamsSuppressReceiveSpansTest")
48+
isFailOnNoMatchingTests = false
49+
}
50+
}
3451
}

instrumentation/kafka-streams-0.11/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/kafkastreams/KafkaStreamsSingletons.java

+8-2
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
package io.opentelemetry.javaagent.instrumentation.kafkastreams;
77

88
import io.opentelemetry.api.GlobalOpenTelemetry;
9+
import io.opentelemetry.instrumentation.api.config.ExperimentalConfig;
910
import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter;
1011
import io.opentelemetry.instrumentation.api.instrumenter.InstrumenterBuilder;
1112
import io.opentelemetry.instrumentation.api.instrumenter.SpanKindExtractor;
@@ -40,12 +41,17 @@ public final class KafkaStreamsSingletons {
4041
if (KafkaConsumerExperimentalAttributesExtractor.isEnabled()) {
4142
builder.addAttributesExtractor(new KafkaConsumerExperimentalAttributesExtractor());
4243
}
43-
if (KafkaPropagation.isPropagationEnabled()) {
44+
45+
if (!KafkaPropagation.isPropagationEnabled()) {
46+
return builder.newInstrumenter(SpanKindExtractor.alwaysConsumer());
47+
} else if (ExperimentalConfig.get().suppressMessagingReceiveSpans()) {
48+
return builder.newConsumerInstrumenter(new KafkaHeadersGetter());
49+
} else {
4450
builder.addSpanLinksExtractor(
4551
SpanLinksExtractor.fromUpstreamRequest(
4652
GlobalOpenTelemetry.getPropagators(), new KafkaHeadersGetter()));
53+
return builder.newInstrumenter(SpanKindExtractor.alwaysConsumer());
4754
}
48-
return builder.newInstrumenter(SpanKindExtractor.alwaysConsumer());
4955
}
5056

5157
public static Instrumenter<ConsumerRecord<?, ?>, Void> instrumenter() {

0 commit comments

Comments
 (0)