Parameters to create a prompt

interface CreatePromptParams {
    client?: Client<paths, `${string}/${string}`>;
    description?: string;
    name: string;
    source_prompt_id?: null | string;
    version: {
        description?: null | string;
        invocation_parameters:
            | {
                anthropic: {
                    max_tokens: number;
                    stop_sequences?: string[];
                    temperature?: number;
                    top_p?: number;
                };
                type: "anthropic";
            }
            | {
                azure_openai: {
                    frequency_penalty?: number;
                    max_completion_tokens?: number;
                    max_tokens?: number;
                    presence_penalty?: number;
                    reasoning_effort?: "low"
                    | "medium"
                    | "high";
                    seed?: number;
                    temperature?: number;
                    top_p?: number;
                };
                type: "azure_openai";
            }
            | {
                google: {
                    frequency_penalty?: number;
                    max_output_tokens?: number;
                    presence_penalty?: number;
                    stop_sequences?: string[];
                    temperature?: number;
                    top_k?: number;
                    top_p?: number;
                };
                type: "google";
            }
            | {
                openai: {
                    frequency_penalty?: number;
                    max_completion_tokens?: number;
                    max_tokens?: number;
                    presence_penalty?: number;
                    reasoning_effort?: "low"
                    | "medium"
                    | "high";
                    seed?: number;
                    temperature?: number;
                    top_p?: number;
                };
                type: "openai";
            };
        model_name: string;
        model_provider: "OPENAI"
        | "AZURE_OPENAI"
        | "ANTHROPIC"
        | "GOOGLE";
        response_format?:
            | null
            | {
                json_schema: {
                    description?: string;
                    name: string;
                    schema?: Record<string, unknown>;
                    strict?: boolean;
                };
                type: "json_schema";
            };
        template: | {
            messages: {
                content: | string
                | (
                    | { text: string; type: "text" }
                    | {
                        tool_call: { arguments: string; name: string; type: "function" };
                        tool_call_id: string;
                        type: "tool_call";
                    }
                    | {
                        tool_call_id: string;
                        tool_result: | null
                        | string
                        | number
                        | boolean
                        | Record<(...), (...)>
                        | (...)[];
                        type: "tool_result";
                    }
                )[];
                role: | "user"
                | "assistant"
                | "model"
                | "ai"
                | "tool"
                | "system"
                | "developer";
            }[];
            type: "chat";
        }
        | { template: string; type: "string" };
        template_format: "MUSTACHE" | "F_STRING" | "NONE";
        template_type: "STR" | "CHAT";
        tools?:
            | null
            | {
                disable_parallel_tool_calls?: boolean;
                tool_choice?: | { type: "none" }
                | { type: "one_or_more" }
                | { function_name: string; type: "specific_function" }
                | { type: "zero_or_more" };
                tools: {
                    function: {
                        description?: string;
                        name: string;
                        parameters?: Record<string, unknown>;
                        strict?: boolean;
                    };
                    type: "function";
                }[];
                type: "tools";
            };
    };
}

Hierarchy (View Summary)

Properties

client?: Client<paths, `${string}/${string}`>

An instance of the Phoenix client. If not provided, the client will be created using the default configuration.

description?: string

The description of the prompt

name: string

The name of the prompt

source_prompt_id?: null | string

Source Prompt Id

version: {
    description?: null | string;
    invocation_parameters:
        | {
            anthropic: {
                max_tokens: number;
                stop_sequences?: string[];
                temperature?: number;
                top_p?: number;
            };
            type: "anthropic";
        }
        | {
            azure_openai: {
                frequency_penalty?: number;
                max_completion_tokens?: number;
                max_tokens?: number;
                presence_penalty?: number;
                reasoning_effort?: "low"
                | "medium"
                | "high";
                seed?: number;
                temperature?: number;
                top_p?: number;
            };
            type: "azure_openai";
        }
        | {
            google: {
                frequency_penalty?: number;
                max_output_tokens?: number;
                presence_penalty?: number;
                stop_sequences?: string[];
                temperature?: number;
                top_k?: number;
                top_p?: number;
            };
            type: "google";
        }
        | {
            openai: {
                frequency_penalty?: number;
                max_completion_tokens?: number;
                max_tokens?: number;
                presence_penalty?: number;
                reasoning_effort?: "low"
                | "medium"
                | "high";
                seed?: number;
                temperature?: number;
                top_p?: number;
            };
            type: "openai";
        };
    model_name: string;
    model_provider: "OPENAI"
    | "AZURE_OPENAI"
    | "ANTHROPIC"
    | "GOOGLE";
    response_format?:
        | null
        | {
            json_schema: {
                description?: string;
                name: string;
                schema?: Record<string, unknown>;
                strict?: boolean;
            };
            type: "json_schema";
        };
    template: | {
        messages: {
            content: | string
            | (
                | { text: string; type: "text" }
                | {
                    tool_call: { arguments: string; name: string; type: "function" };
                    tool_call_id: string;
                    type: "tool_call";
                }
                | {
                    tool_call_id: string;
                    tool_result: | null
                    | string
                    | number
                    | boolean
                    | Record<(...), (...)>
                    | (...)[];
                    type: "tool_result";
                }
            )[];
            role: | "user"
            | "assistant"
            | "model"
            | "ai"
            | "tool"
            | "system"
            | "developer";
        }[];
        type: "chat";
    }
    | { template: string; type: "string" };
    template_format: "MUSTACHE" | "F_STRING" | "NONE";
    template_type: "STR" | "CHAT";
    tools?:
        | null
        | {
            disable_parallel_tool_calls?: boolean;
            tool_choice?: | { type: "none" }
            | { type: "one_or_more" }
            | { function_name: string; type: "specific_function" }
            | { type: "zero_or_more" };
            tools: {
                function: {
                    description?: string;
                    name: string;
                    parameters?: Record<string, unknown>;
                    strict?: boolean;
                };
                type: "function";
            }[];
            type: "tools";
        };
}

The prompt version to push onto the history of the prompt

Type declaration

  • Optionaldescription?: null | string

    Description

  • invocation_parameters:
        | {
            anthropic: {
                max_tokens: number;
                stop_sequences?: string[];
                temperature?: number;
                top_p?: number;
            };
            type: "anthropic";
        }
        | {
            azure_openai: {
                frequency_penalty?: number;
                max_completion_tokens?: number;
                max_tokens?: number;
                presence_penalty?: number;
                reasoning_effort?: "low"
                | "medium"
                | "high";
                seed?: number;
                temperature?: number;
                top_p?: number;
            };
            type: "azure_openai";
        }
        | {
            google: {
                frequency_penalty?: number;
                max_output_tokens?: number;
                presence_penalty?: number;
                stop_sequences?: string[];
                temperature?: number;
                top_k?: number;
                top_p?: number;
            };
            type: "google";
        }
        | {
            openai: {
                frequency_penalty?: number;
                max_completion_tokens?: number;
                max_tokens?: number;
                presence_penalty?: number;
                reasoning_effort?: "low"
                | "medium"
                | "high";
                seed?: number;
                temperature?: number;
                top_p?: number;
            };
            type: "openai";
        }

    Invocation Parameters

  • model_name: string

    Model Name

  • model_provider: "OPENAI" | "AZURE_OPENAI" | "ANTHROPIC" | "GOOGLE"
  • Optionalresponse_format?:
        | null
        | {
            json_schema: {
                description?: string;
                name: string;
                schema?: Record<string, unknown>;
                strict?: boolean;
            };
            type: "json_schema";
        }

    Response Format

  • template:
        | {
            messages: {
                content: | string
                | (
                    | { text: string; type: "text" }
                    | {
                        tool_call: { arguments: string; name: string; type: "function" };
                        tool_call_id: string;
                        type: "tool_call";
                    }
                    | {
                        tool_call_id: string;
                        tool_result: | null
                        | string
                        | number
                        | boolean
                        | Record<(...), (...)>
                        | (...)[];
                        type: "tool_result";
                    }
                )[];
                role: | "user"
                | "assistant"
                | "model"
                | "ai"
                | "tool"
                | "system"
                | "developer";
            }[];
            type: "chat";
        }
        | { template: string; type: "string" }

    Template

  • template_format: "MUSTACHE" | "F_STRING" | "NONE"
  • template_type: "STR" | "CHAT"
  • Optionaltools?:
        | null
        | {
            disable_parallel_tool_calls?: boolean;
            tool_choice?: | { type: "none" }
            | { type: "one_or_more" }
            | { function_name: string; type: "specific_function" }
            | { type: "zero_or_more" };
            tools: {
                function: {
                    description?: string;
                    name: string;
                    parameters?: Record<string, unknown>;
                    strict?: boolean;
                };
                type: "function";
            }[];
            type: "tools";
        }