Skip to content

Commit

Permalink
Merge branch 'master' into jb/jfr_stackdepth
Browse files Browse the repository at this point in the history
  • Loading branch information
jbachorik authored Dec 13, 2023
2 parents 3b7f05e + 1878db6 commit 12f5d1c
Show file tree
Hide file tree
Showing 27 changed files with 538 additions and 70 deletions.
37 changes: 37 additions & 0 deletions .gitlab/benchmarks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,40 @@ benchmarks:

KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: dd-trace-java
FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY: "true"

.dsm-kafka-benchmarks:
stage: benchmarks
rules:
- if: $CI_PIPELINE_SOURCE != "schedule"
changes:
paths:
- dd-java-agent/instrumentation/kafka*/**/*
compare_to: "master"
when: on_success
- when: manual
tags: ["runner:apm-k8s-tweaked-metal"]
interruptible: true
timeout: 1h
image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/benchmarking-platform:java-dsm-kafka
script:
- git clone --branch java/kafka-dsm-overhead https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform.git platform && cd platform
- ./steps/run-benchmarks.sh
artifacts:
name: "artifacts"
when: always
paths:
- platform/artifacts/
expire_in: 3 months
variables:
FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY: "true"

dsm-kafka-producer-benchmark:
extends: .dsm-kafka-benchmarks
variables:
BP_KAFKA_SCENARIO_DIR: producer-benchmark

dsm-kafka-consumer-benchmark:
extends: .dsm-kafka-benchmarks
variables:
BP_KAFKA_SCENARIO_DIR: consumer-benchmark

Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,9 @@ public interface KnownAddresses {
// XXX: Not really used yet, but it's a known address and we should not treat it as unknown.
Address<Object> GRPC_SERVER_REQUEST_METADATA = new Address<>("grpc.server.request.metadata");

// XXX: Not really used yet, but it's a known address and we should not treat it as unknown.
Address<Object> GRAPHQL_SERVER_ALL_RESOLVERS = new Address<>("graphql.server.all_resolvers");

Address<String> USER_ID = new Address<>("usr.id");

Address<Map<String, Object>> WAF_CONTEXT_PROCESSOR = new Address<>("waf.context.processor");
Expand Down Expand Up @@ -148,6 +151,8 @@ static Address<?> forName(String name) {
return GRPC_SERVER_REQUEST_MESSAGE;
case "grpc.server.request.metadata":
return GRPC_SERVER_REQUEST_METADATA;
case "graphql.server.all_resolvers":
return GRAPHQL_SERVER_ALL_RESOLVERS;
case "usr.id":
return USER_ID;
case "waf.context.processor":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,15 @@ class KnownAddressesSpecification extends Specification {
'server.request.headers.no_cookies',
'grpc.server.request.message',
'grpc.server.request.metadata',
'graphql.server.all_resolvers',
'usr.id',
'waf.context.processor',
]
}

void 'number of known addresses is expected number'() {
expect:
Address.instanceCount() == 24
Address.instanceCount() == 25
KnownAddresses.WAF_CONTEXT_PROCESSOR.serial == Address.instanceCount() - 1
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,21 +35,10 @@ public DBMCompatibleConnectionInstrumentation() {
// Classes to cover all currently supported
// db types for the Database Monitoring product
static final String[] CONCRETE_TYPES = {
// should cover mysql
"com.mysql.jdbc.Connection",
"com.mysql.jdbc.jdbc1.Connection",
"com.mysql.jdbc.jdbc2.Connection",
"com.mysql.jdbc.ConnectionImpl",
"com.mysql.jdbc.JDBC4Connection",
"com.mysql.cj.jdbc.ConnectionImpl",
// should cover Oracle
"oracle.jdbc.driver.PhysicalConnection",
// complete
"org.mariadb.jdbc.MySQLConnection",
// MariaDB Connector/J v2.x
"org.mariadb.jdbc.MariaDbConnection",
// MariaDB Connector/J v3.x
"org.mariadb.jdbc.Connection",
"com.microsoft.sqlserver.jdbc.SQLServerConnection",
// jtds (for SQL Server and Sybase)
"net.sourceforge.jtds.jdbc.ConnectionJDBC2", // 1.2
"net.sourceforge.jtds.jdbc.JtdsConnection", // 1.3
// postgresql seems to be complete
"org.postgresql.jdbc.PgConnection",
"org.postgresql.jdbc1.Connection",
Expand All @@ -62,8 +51,8 @@ public DBMCompatibleConnectionInstrumentation() {
"postgresql.Connection",
// EDB version of postgresql
"com.edb.jdbc.PgConnection",
// aws-mysql-jdbc
"software.aws.rds.jdbc.mysql.shading.com.mysql.cj.jdbc.ConnectionImpl",
// should cover Oracle
"oracle.jdbc.driver.PhysicalConnection",
};

@Override
Expand Down Expand Up @@ -111,7 +100,7 @@ public static String onEnter(
final DBInfo dbInfo =
JDBCDecorator.parseDBInfo(
connection, InstrumentationContext.get(Connection.class, DBInfo.class));
sql = SQLCommenter.prepend(sql, DECORATE.getDbService(dbInfo));
sql = SQLCommenter.append(sql, DECORATE.getDbService(dbInfo));
return inputSql;
}
return sql;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,24 +24,34 @@
import net.bytebuddy.asm.Advice;

@AutoService(Instrumenter.class)
public class SQLServerConnectionInstrumentation extends AbstractConnectionInstrumentation
public class MySQLConnectionInstrumentation extends AbstractConnectionInstrumentation
implements Instrumenter.ForKnownTypes {

/**
* Instrumentation class for connections for SQL Server, which is a Database Monitoring supported
* DB *
* Instrumentation class for connections for MySQL, which is a Database Monitoring supported DB *
*/
public SQLServerConnectionInstrumentation() {
super("jdbc", "dbm-sqlserver");
public MySQLConnectionInstrumentation() {
super("jdbc", "dbm-mysql");
}

// Classes to cover all currently supported
// db types for the Database Monitoring product
static final String[] CONCRETE_TYPES = {
"com.microsoft.sqlserver.jdbc.SQLServerConnection",
// jtds (for SQL Server and Sybase)
"net.sourceforge.jtds.jdbc.ConnectionJDBC2", // 1.2
"net.sourceforge.jtds.jdbc.JtdsConnection", // 1.3
// should cover mysql
"com.mysql.jdbc.Connection",
"com.mysql.jdbc.jdbc1.Connection",
"com.mysql.jdbc.jdbc2.Connection",
"com.mysql.jdbc.ConnectionImpl",
"com.mysql.jdbc.JDBC4Connection",
"com.mysql.cj.jdbc.ConnectionImpl",
// complete
"org.mariadb.jdbc.MySQLConnection",
// MariaDB Connector/J v2.x
"org.mariadb.jdbc.MariaDbConnection",
// MariaDB Connector/J v3.x
"org.mariadb.jdbc.Connection",
// aws-mysql-jdbc
"software.aws.rds.jdbc.mysql.shading.com.mysql.cj.jdbc.ConnectionImpl",
};

@Override
Expand All @@ -63,7 +73,7 @@ public void adviceTransformations(AdviceTransformation transformation) {
.and(takesArgument(0, String.class))
// Also include CallableStatement, which is a subtype of PreparedStatement
.and(returns(hasInterface(named("java.sql.PreparedStatement")))),
SQLServerConnectionInstrumentation.class.getName() + "$ConnectionAdvice");
MySQLConnectionInstrumentation.class.getName() + "$ConnectionAdvice");
}

@Override
Expand All @@ -89,7 +99,7 @@ public static String onEnter(
final DBInfo dbInfo =
JDBCDecorator.parseDBInfo(
connection, InstrumentationContext.get(Connection.class, DBInfo.class));
sql = SQLCommenter.append(sql, DECORATE.getDbService(dbInfo));
sql = SQLCommenter.prepend(sql, DECORATE.getDbService(dbInfo));
return inputSql;
}
return sql;
Expand Down
4 changes: 2 additions & 2 deletions dd-java-agent/instrumentation/maven-3.2.1/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ dependencies {
testImplementation group: 'org.apache.maven.resolver', name: 'maven-resolver-transport-http', version: '1.0.3'

latestDepTestImplementation group: 'org.apache.maven', name: 'maven-embedder', version: '+'
latestDepTestImplementation group: 'org.apache.maven.resolver', name: 'maven-resolver-connector-basic', version: '1.+'
latestDepTestImplementation group: 'org.apache.maven.resolver', name: 'maven-resolver-transport-http', version: '1.+'
latestDepTestImplementation group: 'org.apache.maven.resolver', name: 'maven-resolver-connector-basic', version: '+'
latestDepTestImplementation group: 'org.apache.maven.resolver', name: 'maven-resolver-transport-http', version: '+'
latestDepTestImplementation group: 'org.fusesource.jansi', name: 'jansi', version: '+'
}
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ private Collection<MavenTestExecution> getTestExecutionsByJvmPath(
MavenSession session, MavenProject project) {
List<MavenTestExecution> testExecutions = new ArrayList<>();
try {
PlexusContainer container = session.getContainer();
PlexusContainer container = MavenUtils.getContainer(session);

MavenPluginManager mavenPluginManager = container.lookup(MavenPluginManager.class);
BuildPluginManager buildPluginManager = container.lookup(BuildPluginManager.class);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package datadog.trace.instrumentation.maven3;

import datadog.trace.util.MethodHandles;
import java.lang.invoke.MethodHandle;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand All @@ -13,6 +15,7 @@
import org.apache.maven.model.Plugin;
import org.apache.maven.plugin.MojoExecution;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.util.xml.Xpp3Dom;

Expand Down Expand Up @@ -273,4 +276,22 @@ private static Xpp3Dom getChild(Xpp3Dom parent, String[] path, boolean createIfN
}
return current;
}

private static final MethodHandles METHOD_HANDLES =
new MethodHandles(PlexusContainer.class.getClassLoader());
private static final MethodHandle SESSION_FIELD =
METHOD_HANDLES.privateFieldGetter(MavenSession.class, "session");
private static final MethodHandle CONTAINER_FIELD =
METHOD_HANDLES.privateFieldGetter(
"org.apache.maven.internal.impl.DefaultSession", "container");

public static PlexusContainer getContainer(MavenSession mavenSession) {
PlexusContainer container = mavenSession.getContainer();
if (container != null) {
return container;
}
Object /* org.apache.maven.internal.impl.DefaultSession */ session =
METHOD_HANDLES.invoke(SESSION_FIELD, mavenSession);
return METHOD_HANDLES.invoke(CONTAINER_FIELD, session);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -175,10 +175,10 @@ class OpenTelemetry14Test extends AgentTestRunner {

def expectedLinksTag = """
[
{ traceId: "${traceId}",
spanId: "${spanId}",
traceFlags: 1,
traceState: "string-key=string-value"}
{ trace_id: "${traceId}",
span_id: "${spanId}",
flags: 1,
tracestate: "string-key=string-value"}
]"""

when:
Expand Down Expand Up @@ -212,10 +212,10 @@ class OpenTelemetry14Test extends AgentTestRunner {
def traceId = "1234567890abcdef1234567890abcde$it" as String
def spanId = "fedcba098765432$it" as String
def traceState = TraceState.builder().put('string-key', 'string-value'+it).build()
links << """{ traceId: "${traceId}",
spanId: "${spanId}",
traceFlags: 1,
traceState: "string-key=string-value$it"}"""
links << """{ trace_id: "${traceId}",
span_id: "${spanId}",
flags: 1,
tracestate: "string-key=string-value$it"}"""
spanBuilder.addLink(SpanContext.create(traceId, spanId, TraceFlags.getSampled(), traceState))
}
def expectedLinksTag = "[${links.join(',')}]" as String
Expand Down Expand Up @@ -244,10 +244,10 @@ class OpenTelemetry14Test extends AgentTestRunner {

def expectedLinksTag = """
[
{ traceId: "${traceId}",
spanId: "${spanId}",
traceFlags: 1,
traceState: "string-key=string-value"
{ trace_id: "${traceId}",
span_id: "${spanId}",
flags: 1,
tracestate: "string-key=string-value"
${ expectedAttributes == null ? "" : ", attributes: " + expectedAttributes }}
]"""

Expand Down Expand Up @@ -282,12 +282,12 @@ class OpenTelemetry14Test extends AgentTestRunner {
def traceId = "1234567890abcdef1234567890abcdef" as String
def spanId = "fedcba0987654321" as String

def expectedTraceStateJson = expectedTraceState == null ? '' : ", traceState: \"$expectedTraceState\""
def expectedTraceStateJson = expectedTraceState == null ? '' : ", tracestate: \"$expectedTraceState\""
def expectedLinksTag = """
[
{ traceId: "${traceId}",
spanId: "${spanId}",
traceFlags: 1
{ trace_id: "${traceId}",
span_id: "${spanId}",
flags: 1
$expectedTraceStateJson
}
]"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import java.util.ArrayList;
import org.apache.spark.SparkConf;
import org.apache.spark.scheduler.SparkListenerJobStart;
import org.apache.spark.scheduler.StageInfo;

/**
* DatadogSparkListener compiled for Scala 2.12
Expand Down Expand Up @@ -36,4 +37,14 @@ protected String getSparkJobName(SparkListenerJobStart jobStart) {
protected int getStageCount(SparkListenerJobStart jobStart) {
return jobStart.stageInfos().length();
}

@Override
protected int[] getStageParentIds(StageInfo info) {
int[] parentIds = new int[info.parentIds().length()];
for (int i = 0; i < parentIds.length; i++) {
parentIds[i] = (int) info.parentIds().apply(i);
}

return parentIds;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import java.util.ArrayList;
import org.apache.spark.SparkConf;
import org.apache.spark.scheduler.SparkListenerJobStart;
import org.apache.spark.scheduler.StageInfo;

/**
* DatadogSparkListener compiled for Scala 2.13
Expand Down Expand Up @@ -36,4 +37,14 @@ protected String getSparkJobName(SparkListenerJobStart jobStart) {
protected int getStageCount(SparkListenerJobStart jobStart) {
return jobStart.stageInfos().length();
}

@Override
protected int[] getStageParentIds(StageInfo info) {
int[] parentIds = new int[info.parentIds().length()];
for (int i = 0; i < parentIds.length; i++) {
parentIds[i] = (int) info.parentIds().apply(i);
}

return parentIds;
}
}
Loading

0 comments on commit 12f5d1c

Please sign in to comment.