Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions packages/ai-semantic-conventions/src/SemanticAttributes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ export const SpanAttributes = {
LLM_CHAT_STOP_SEQUENCES: "llm.chat.stop_sequences",
LLM_REQUEST_FUNCTIONS: "llm.request.functions",

// AI SDK
AI_OPERATION_ID: "ai.operationId",

// Vector DB
VECTOR_DB_VENDOR: "db.system",
VECTOR_DB_QUERY_TOP_K: "db.vector.query.top_k",
Expand Down
69 changes: 63 additions & 6 deletions packages/traceloop-sdk/src/lib/tracing/ai-sdk-transformations.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,10 @@ import { ReadableSpan, Span } from "@opentelemetry/sdk-trace-node";
import {
SpanAttributes,
TraceloopSpanKindValues,
LLMRequestTypeValues,
} from "@traceloop/ai-semantic-conventions";

const { AI_OPERATION_ID } = SpanAttributes;
import {
ATTR_GEN_AI_AGENT_NAME,
ATTR_GEN_AI_COMPLETION,
Expand Down Expand Up @@ -490,27 +493,81 @@ const transformVendor = (attributes: Record<string, any>): void => {
}
};

/**
* Derives and sets the llm.request.type attribute for AI SDK operations.
*
* The transformLlmRequestType function determines the request type (e.g., "chat")
* by examining either the span name or the ai.operationId attribute. This dual
* approach handles cases where the span name has already been transformed by
* onSpanStart (e.g., "ai.generateText" -> "run.ai"), ensuring the llm.request.type
* attribute is set correctly even after prior transformations.
*
* Fixes: https://github.com/traceloop/openllmetry-js/issues/882
*/
const transformLlmRequestType = (
attributes: Record<string, any>,
nameToCheck?: string,
): void => {
if (!nameToCheck || attributes[SpanAttributes.LLM_REQUEST_TYPE]) {
return;
}

let requestType: string | undefined;
if (
nameToCheck.includes("generateText") ||
nameToCheck.includes("streamText") ||
nameToCheck.includes("generateObject") ||
nameToCheck.includes("streamObject")
) {
requestType = LLMRequestTypeValues.CHAT;
}
// Note: completion, rerank are not currently used by AI SDK
// embedding operations are handled separately by the SDK

if (requestType) {
attributes[SpanAttributes.LLM_REQUEST_TYPE] = requestType;
}
};

const transformOperationName = (
attributes: Record<string, any>,
spanName?: string,
): void => {
if (!spanName) return;
// Check ai.operationId attribute first (set by Vercel AI SDK)
// This is more reliable since span name may have been transformed already
const operationIdValue = attributes[AI_OPERATION_ID];

// Ensure operationId is a string before using it (may be non-string in some cases)
const operationId =
typeof operationIdValue === "string" ? operationIdValue : undefined;

// Use operationId if available, otherwise fall back to spanName
const nameToCheck = operationId || spanName;
if (!nameToCheck) return;

let operationName: string | undefined;
if (
spanName.includes("generateText") ||
spanName.includes("streamText") ||
spanName.includes("generateObject") ||
spanName.includes("streamObject")
nameToCheck.includes("generateText") ||
nameToCheck.includes("streamText") ||
nameToCheck.includes("generateObject") ||
nameToCheck.includes("streamObject")
) {
operationName = "chat";
} else if (spanName === "ai.toolCall" || spanName.endsWith(".tool")) {
} else if (
nameToCheck === "ai.toolCall" ||
nameToCheck.endsWith(".tool") ||
spanName === "ai.toolCall" ||
(spanName && spanName.endsWith(".tool"))
) {
operationName = "execute_tool";
}

if (operationName) {
attributes[ATTR_GEN_AI_OPERATION_NAME] = operationName;
}

// Also set llm.request.type for AI SDK spans
transformLlmRequestType(attributes, nameToCheck);
};

const transformModelId = (attributes: Record<string, any>): void => {
Expand Down