Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions sdk/agentguard/cost.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,10 @@ class UnknownModelWarning(UserWarning):
("anthropic", "claude-3-5-sonnet-20241022"): (0.003, 0.015),
("anthropic", "claude-3-5-haiku-20241022"): (0.0008, 0.004),
("anthropic", "claude-3-opus-20240229"): (0.015, 0.075),
("anthropic", "claude-sonnet-4-20250514"): (0.003, 0.015),
("anthropic", "claude-sonnet-4-5-20250929"): (0.003, 0.015),
("anthropic", "claude-haiku-4-5-20251001"): (0.0008, 0.004),
("anthropic", "claude-opus-4-20250515"): (0.015, 0.075),
("anthropic", "claude-opus-4-6"): (0.015, 0.075),
# Google
("google", "gemini-1.5-pro"): (0.00125, 0.005),
Expand Down
8 changes: 4 additions & 4 deletions sdk/agentguard/instrument.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def _traced_openai_create(
) -> Any:
"""Shared traced wrapper for sync OpenAI create calls."""
model = kwargs.get("model", "unknown")
with tracer.trace(f"llm.openai.{model}") as ctx:
with tracer.trace(f"llm.openai.{model}", data={"model": model, "provider": "openai"}) as ctx:

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Serialize model before attaching it to span data

This now stores model in span data without sanitization, so a non-JSON-serializable model value (for example an object/enum passed through wrapper code) will make sink emission fail in TraceContext.__enter__ when JsonlFileSink/StdoutSink calls json.dumps(event), aborting the LLM call before original_create runs. Previously this path only interpolated model into the span name (stringified), so this commit introduces a new hard failure mode for malformed-but-commonly-seen dynamic inputs.

Useful? React with 👍 / 👎.

result = original(*args, **kwargs)
_emit_llm_result(ctx, budget_guard, model, "openai", getattr(result, "usage", None))
return result
Expand Down Expand Up @@ -295,7 +295,7 @@ def _patch_anthropic_instance(client: Any, tracer: Any, budget_guard: Any = None
@functools.wraps(original_create)
def traced_create(*args: Any, **kwargs: Any) -> Any:
model = kwargs.get("model", "unknown")
with tracer.trace(f"llm.anthropic.{model}") as ctx:
with tracer.trace(f"llm.anthropic.{model}", data={"model": model, "provider": "anthropic"}) as ctx:
result = original_create(*args, **kwargs)
_emit_llm_result(ctx, budget_guard, model, "anthropic", getattr(result, "usage", None))
return result
Expand Down Expand Up @@ -436,7 +436,7 @@ def _patch_openai_async_instance(client: Any, tracer: Any, budget_guard: Any = N
@functools.wraps(original_create)
async def traced_create(*args: Any, **kwargs: Any) -> Any:
model = kwargs.get("model", "unknown")
async with tracer.trace(f"llm.openai.{model}") as ctx:
async with tracer.trace(f"llm.openai.{model}", data={"model": model, "provider": "openai"}) as ctx:
result = await original_create(*args, **kwargs)
_emit_llm_result(ctx, budget_guard, model, "openai", getattr(result, "usage", None))
return result
Expand Down Expand Up @@ -494,7 +494,7 @@ def _patch_anthropic_async_instance(client: Any, tracer: Any, budget_guard: Any
@functools.wraps(original_create)
async def traced_create(*args: Any, **kwargs: Any) -> Any:
model = kwargs.get("model", "unknown")
async with tracer.trace(f"llm.anthropic.{model}") as ctx:
async with tracer.trace(f"llm.anthropic.{model}", data={"model": model, "provider": "anthropic"}) as ctx:
result = await original_create(*args, **kwargs)
_emit_llm_result(ctx, budget_guard, model, "anthropic", getattr(result, "usage", None))
return result
Expand Down