OpenInference JS
    Preparing search index...
    AISemanticConventions: {
        EMBEDDING_TEXT: "ai.value";
        EMBEDDING_TEXTS: "ai.values";
        EMBEDDING_VECTOR: "ai.embedding";
        EMBEDDING_VECTORS: "ai.embeddings";
        METADATA: "ai.telemetry.metadata";
        MODEL_ID: "ai.model.id";
        PROMPT: "ai.prompt";
        PROMPT_MESSAGES: "ai.prompt.messages";
        RESPONSE_OBJECT: "ai.response.object";
        RESPONSE_TEXT: "ai.response.text";
        RESPONSE_TOOL_CALLS: "ai.response.toolCalls";
        SETTINGS: "ai.settings";
        TOKEN_COUNT_COMPLETION: "ai.usage.completionTokens";
        TOKEN_COUNT_INPUT: "ai.usage.inputTokens";
        TOKEN_COUNT_OUTPUT: "ai.usage.outputTokens";
        TOKEN_COUNT_PROMPT: "ai.usage.promptTokens";
        TOOL_CALL_ARGS: "ai.toolCall.args";
        TOOL_CALL_ID: "ai.toolCall.id";
        TOOL_CALL_NAME: "ai.toolCall.name";
        TOOL_CALL_RESULT: "ai.toolCall.result";
    } = ...

    The semantic conventions used by the Vercel AI SDK.

    Type Declaration

    • EMBEDDING_TEXT: "ai.value"
    • EMBEDDING_TEXTS: "ai.values"
    • EMBEDDING_VECTOR: "ai.embedding"
    • EMBEDDING_VECTORS: "ai.embeddings"
    • METADATA: "ai.telemetry.metadata"
    • MODEL_ID: "ai.model.id"
    • PROMPT: "ai.prompt"
    • PROMPT_MESSAGES: "ai.prompt.messages"
    • RESPONSE_OBJECT: "ai.response.object"
    • RESPONSE_TEXT: "ai.response.text"
    • RESPONSE_TOOL_CALLS: "ai.response.toolCalls"
    • SETTINGS: "ai.settings"
    • TOKEN_COUNT_COMPLETION: "ai.usage.completionTokens"
    • TOKEN_COUNT_INPUT: "ai.usage.inputTokens"
    • TOKEN_COUNT_OUTPUT: "ai.usage.outputTokens"
    • TOKEN_COUNT_PROMPT: "ai.usage.promptTokens"
    • TOOL_CALL_ARGS: "ai.toolCall.args"
    • TOOL_CALL_ID: "ai.toolCall.id"
    • TOOL_CALL_NAME: "ai.toolCall.name"
    • TOOL_CALL_RESULT: "ai.toolCall.result"