Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 51 additions & 1 deletion genai_otel_conformance/classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,15 @@ def _has_attr_prefix(attrs: dict[str, object], prefix: str) -> bool:
return any(name == prefix or name.startswith(f"{prefix}.") for name in attrs)


_MEMORY_OP_NAMES = {
"create_memory_store",
"search_memory",
"update_memory",
"delete_memory",
"delete_memory_store",
}


class SpanInfo(NamedTuple):
"""Pre-extracted span fields passed to each classifier."""
name_lower: str
Expand Down Expand Up @@ -77,6 +86,7 @@ def _is_invoke_agent_like(ctx: SpanInfo) -> bool:
or (
_has_any_attr(ctx.attrs, "gen_ai.agent.name", "gen_ai.agent.id")
and ctx.op_name != "create_agent"
and ctx.op_name not in _MEMORY_OP_NAMES
)
or _has_any_attr(ctx.attrs, "crewai.agent.id", "crewai.agent.role")
or (
Expand Down Expand Up @@ -146,6 +156,44 @@ def _is_retrieval_span(ctx: SpanInfo) -> bool:
]


def _classify_memory_span(ctx: SpanInfo) -> set[str]:
matched_types: set[str] = set()

if ctx.op_name in _MEMORY_OP_NAMES:
matched_types.add(ctx.op_name)
return matched_types

if not any(name.startswith("gen_ai.memory.") for name in ctx.attrs):
return matched_types

if (
ctx.attrs.get("gen_ai.memory.query.text") is not None
or ctx.attrs.get("gen_ai.memory.search.result.count") is not None
):
matched_types.add("search_memory")
return matched_types

if ctx.attrs.get("gen_ai.memory.record.content") is not None:
matched_types.add("update_memory")
return matched_types

if ctx.attrs.get("gen_ai.memory.store.id") is None:
return matched_types

if "delete" in ctx.name_lower and "store" in ctx.name_lower:
matched_types.add("delete_memory_store")
elif "delete" in ctx.name_lower:
matched_types.add("delete_memory")
elif "create" in ctx.name_lower or "init" in ctx.name_lower:
matched_types.add("create_memory_store")
elif "search" in ctx.name_lower or "query" in ctx.name_lower or "recall" in ctx.name_lower:
matched_types.add("search_memory")
elif "update" in ctx.name_lower or "add" in ctx.name_lower or "remember" in ctx.name_lower:
matched_types.add("update_memory")

return matched_types


def _classify_span(span_name: str, span_attrs: dict[str, object]) -> set[str]:
"""Classify a span into span types using heuristics on individual span data."""
ctx = SpanInfo(
Expand All @@ -156,11 +204,13 @@ def _classify_span(span_name: str, span_attrs: dict[str, object]) -> set[str]:
attrs=span_attrs,
)

return {
matched_types = {
span_type
for span_type, predicate in _SPAN_TYPE_CLASSIFIERS
if predicate(ctx)
}
matched_types.update(_classify_memory_span(ctx))
return matched_types


def _infer_operation_name(span_name: str, attrs: dict[str, object]) -> str:
Expand Down
74 changes: 74 additions & 0 deletions genai_otel_conformance/specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,9 @@ def attrs_for_requirement_level(self, level: RequirementLevel) -> tuple[str, ...
"gen_ai.usage.output_tokens",
]

_MEMORY_COND_REQUIRED = ["server.port"]
_MEMORY_RECOMMENDED = ["server.address"]

SPAN_TYPE_SPECS: dict[str, SignalTypeSpec] = {
"inference": SignalTypeSpec(
label="Inference",
Expand Down Expand Up @@ -204,6 +207,72 @@ def attrs_for_requirement_level(self, level: RequirementLevel) -> tuple[str, ...
"gen_ai.output.messages",
),
),
"create_memory_store": SignalTypeSpec(
label="Create Memory Store",
discriminator_attrs=frozenset({
"gen_ai.memory.store.id",
}),
required=tuple(_COMMON_REQUIRED + _PROVIDER_REQUIRED),
conditionally_required=tuple(_COMMON_COND_REQUIRED + _MEMORY_COND_REQUIRED + [
"gen_ai.memory.store.id",
]),
recommended=tuple(_MEMORY_RECOMMENDED),
opt_in=(),
),
"search_memory": SignalTypeSpec(
label="Search Memory",
discriminator_attrs=frozenset({
"gen_ai.memory.query.text",
"gen_ai.memory.search.result.count",
}),
required=tuple(_COMMON_REQUIRED + _PROVIDER_REQUIRED),
conditionally_required=tuple(_COMMON_COND_REQUIRED + _MEMORY_COND_REQUIRED + [
"gen_ai.memory.store.id",
]),
recommended=tuple(_MEMORY_RECOMMENDED),
opt_in=(
"gen_ai.memory.query.text",
"gen_ai.memory.search.result.count",
),
),
"update_memory": SignalTypeSpec(
label="Update Memory",
discriminator_attrs=frozenset({
"gen_ai.memory.record.id",
"gen_ai.memory.record.content",
}),
required=tuple(_COMMON_REQUIRED + _PROVIDER_REQUIRED),
conditionally_required=tuple(_COMMON_COND_REQUIRED + _MEMORY_COND_REQUIRED + [
"gen_ai.memory.store.id",
"gen_ai.memory.record.id",
]),
recommended=tuple(_MEMORY_RECOMMENDED),
opt_in=("gen_ai.memory.record.content",),
),
"delete_memory": SignalTypeSpec(
label="Delete Memory",
discriminator_attrs=frozenset({
"gen_ai.memory.store.id",
"gen_ai.memory.record.id",
}),
required=tuple(_COMMON_REQUIRED + _PROVIDER_REQUIRED),
conditionally_required=tuple(_COMMON_COND_REQUIRED + _MEMORY_COND_REQUIRED + [
"gen_ai.memory.store.id",
"gen_ai.memory.record.id",
]),
recommended=tuple(_MEMORY_RECOMMENDED),
opt_in=(),
),
"delete_memory_store": SignalTypeSpec(
label="Delete Memory Store",
discriminator_attrs=frozenset({"gen_ai.memory.store.id"}),
required=tuple(_COMMON_REQUIRED + _PROVIDER_REQUIRED),
conditionally_required=tuple(_COMMON_COND_REQUIRED + _MEMORY_COND_REQUIRED + [
"gen_ai.memory.store.id",
]),
recommended=tuple(_MEMORY_RECOMMENDED),
opt_in=(),
),
}

SPAN_TYPE_ORDER = [
Expand All @@ -215,6 +284,11 @@ def attrs_for_requirement_level(self, level: RequirementLevel) -> tuple[str, ...
"embeddings",
"retrieval",
"execute_tool",
"create_memory_store",
"search_memory",
"update_memory",
"delete_memory",
"delete_memory_store",
]

EVENT_TYPE_SPECS: dict[str, SignalTypeSpec] = {
Expand Down
2 changes: 1 addition & 1 deletion run_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
lang: python | js | java | dotnet
lib: the library under test (may contain hyphens, e.g. spring-ai)
ecosystem: the instrumentation source — otelcontrib, openllmetry,
openinference, or native
openinference, prototype, or native

Requires:
- Python 3.12+ (for mock server)
Expand Down
2 changes: 2 additions & 0 deletions tests/java/aws-bedrock/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ repositories {

dependencies {
implementation("software.amazon.awssdk:bedrockruntime:2.42.13")
implementation("software.amazon.awssdk:bedrockagentcore:2.42.13")
implementation("software.amazon.awssdk:bedrockagentcorecontrol:2.42.13")
implementation("software.amazon.awssdk:apache-client:2.42.13")
implementation("io.opentelemetry.instrumentation:opentelemetry-aws-sdk-2.2:2.26.0-alpha")
implementation("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure:1.60.1")
Expand Down
40 changes: 40 additions & 0 deletions tests/java/aws-bedrock/data-prototype.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,46 @@
"gen_ai.usage.output_tokens",
"server.address",
"server.port"
],
"create_memory_store": [
"gen_ai.memory.store.id",
"gen_ai.operation.name",
"gen_ai.provider.name",
"server.address",
"server.port"
],
"search_memory": [
"gen_ai.memory.query.text",
"gen_ai.memory.search.result.count",
"gen_ai.memory.store.id",
"gen_ai.operation.name",
"gen_ai.provider.name",
"server.address",
"server.port"
],
"update_memory": [
"gen_ai.memory.record.content",
"gen_ai.memory.record.id",
"gen_ai.memory.store.id",
"gen_ai.operation.name",
"gen_ai.provider.name",
"server.address",
"server.port"
],
"delete_memory": [
"gen_ai.memory.record.id",
"gen_ai.memory.store.id",
"gen_ai.operation.name",
"gen_ai.provider.name",
"server.address",
"server.port"
],
"delete_memory_store": [
"gen_ai.memory.store.id",
"gen_ai.operation.name",
"gen_ai.provider.name",
"server.address",
"server.port"
]
},
"events": {
Expand Down
Loading