Arize Phoenix TS
    Preparing search index...

    Parameters to create a prompt

    interface CreatePromptParams {
        client?: {
            config: ClientOptions;
            DELETE: ClientMethod<paths, "delete", `${string}/${string}`>;
            GET: ClientMethod<paths, "get", `${string}/${string}`>;
            HEAD: ClientMethod<paths, "head", `${string}/${string}`>;
            OPTIONS: ClientMethod<paths, "options", `${string}/${string}`>;
            PATCH: ClientMethod<paths, "patch", `${string}/${string}`>;
            POST: ClientMethod<paths, "post", `${string}/${string}`>;
            PUT: ClientMethod<paths, "put", `${string}/${string}`>;
            TRACE: ClientMethod<paths, "trace", `${string}/${string}`>;
            eject(...middleware: Middleware[]): void;
            use(...middleware: Middleware[]): void;
        };
        description?: string;
        metadata?: Record<string, unknown>;
        name: string;
        source_prompt_id?: string | null;
        version: {
            description?: string | null;
            invocation_parameters:
                | {
                    anthropic: {
                        max_tokens: number;
                        stop_sequences?: string[];
                        temperature?: number;
                        thinking?: | { type: "disabled" }
                        | { budget_tokens: number; type: "enabled" };
                        top_p?: number;
                    };
                    type: "anthropic";
                }
                | {
                    aws: { max_tokens?: number; temperature?: number; top_p?: number };
                    type: "aws";
                }
                | {
                    azure_openai: {
                        frequency_penalty?: number;
                        max_completion_tokens?: number;
                        max_tokens?: number;
                        presence_penalty?: number;
                        reasoning_effort?: | "none"
                        | "minimal"
                        | "low"
                        | "medium"
                        | "high"
                        | "xhigh";
                        seed?: number;
                        temperature?: number;
                        top_p?: number;
                    };
                    type: "azure_openai";
                }
                | {
                    deepseek: {
                        frequency_penalty?: number;
                        max_completion_tokens?: number;
                        max_tokens?: number;
                        presence_penalty?: number;
                        reasoning_effort?: | "none"
                        | "minimal"
                        | "low"
                        | "medium"
                        | "high"
                        | "xhigh";
                        seed?: number;
                        temperature?: number;
                        top_p?: number;
                    };
                    type: "deepseek";
                }
                | {
                    google: {
                        frequency_penalty?: number;
                        max_output_tokens?: number;
                        presence_penalty?: number;
                        stop_sequences?: string[];
                        temperature?: number;
                        top_k?: number;
                        top_p?: number;
                    };
                    type: "google";
                }
                | {
                    ollama: {
                        frequency_penalty?: number;
                        max_completion_tokens?: number;
                        max_tokens?: number;
                        presence_penalty?: number;
                        reasoning_effort?: | "none"
                        | "minimal"
                        | "low"
                        | "medium"
                        | "high"
                        | "xhigh";
                        seed?: number;
                        temperature?: number;
                        top_p?: number;
                    };
                    type: "ollama";
                }
                | {
                    openai: {
                        frequency_penalty?: number;
                        max_completion_tokens?: number;
                        max_tokens?: number;
                        presence_penalty?: number;
                        reasoning_effort?: | "none"
                        | "minimal"
                        | "low"
                        | "medium"
                        | "high"
                        | "xhigh";
                        seed?: number;
                        temperature?: number;
                        top_p?: number;
                    };
                    type: "openai";
                }
                | {
                    type: "xai";
                    xai: {
                        frequency_penalty?: number;
                        max_completion_tokens?: number;
                        max_tokens?: number;
                        presence_penalty?: number;
                        reasoning_effort?: | "none"
                        | "minimal"
                        | "low"
                        | "medium"
                        | "high"
                        | "xhigh";
                        seed?: number;
                        temperature?: number;
                        top_p?: number;
                    };
                };
            model_name: string;
            model_provider: | "OPENAI"
            | "AZURE_OPENAI"
            | "ANTHROPIC"
            | "GOOGLE"
            | "DEEPSEEK"
            | "XAI"
            | "OLLAMA"
            | "AWS";
            response_format?: | {
                json_schema: {
                    description?: string;
                    name: string;
                    schema?: { [key: string]: unknown };
                    strict?: boolean;
                };
                type: "json_schema";
            }
            | null;
            template: | {
                messages: {
                    content: | string
                    | (
                        | { text: string; type: "text" }
                        | {
                            tool_call: { arguments: string; name: string; type: "function" };
                            tool_call_id: string;
                            type: "tool_call";
                        }
                        | {
                            tool_call_id: string;
                            tool_result: | string
                            | number
                            | boolean
                            | { [key: ...]: ... }
                            | (...)[]
                            | null;
                            type: "tool_result";
                        }
                    )[];
                    role: | "user"
                    | "assistant"
                    | "model"
                    | "ai"
                    | "tool"
                    | "system"
                    | "developer";
                }[];
                type: "chat";
            }
            | { template: string; type: "string" };
            template_format: "NONE" | "MUSTACHE" | "F_STRING";
            template_type: "STR" | "CHAT";
            tools?:
                | {
                    disable_parallel_tool_calls?: boolean;
                    tool_choice?: | { type: "none" }
                    | { type: "one_or_more" }
                    | { function_name: string; type: "specific_function" }
                    | { type: "zero_or_more" };
                    tools: {
                        function: {
                            description?: string;
                            name: string;
                            parameters?: { [key: string]: unknown };
                            strict?: boolean;
                        };
                        type: "function";
                    }[];
                    type: "tools";
                }
                | null;
        };
    }

    Hierarchy (View Summary)

    Index

    Properties

    client?: {
        config: ClientOptions;
        DELETE: ClientMethod<paths, "delete", `${string}/${string}`>;
        GET: ClientMethod<paths, "get", `${string}/${string}`>;
        HEAD: ClientMethod<paths, "head", `${string}/${string}`>;
        OPTIONS: ClientMethod<paths, "options", `${string}/${string}`>;
        PATCH: ClientMethod<paths, "patch", `${string}/${string}`>;
        POST: ClientMethod<paths, "post", `${string}/${string}`>;
        PUT: ClientMethod<paths, "put", `${string}/${string}`>;
        TRACE: ClientMethod<paths, "trace", `${string}/${string}`>;
        eject(...middleware: Middleware[]): void;
        use(...middleware: Middleware[]): void;
    }

    An instance of the Phoenix client. If not provided, the client will be created using the default configuration.

    Type Declaration

    • config: ClientOptions
    • DELETE: ClientMethod<paths, "delete", `${string}/${string}`>

      Call a DELETE endpoint

    • GET: ClientMethod<paths, "get", `${string}/${string}`>

      Call a GET endpoint

    • HEAD: ClientMethod<paths, "head", `${string}/${string}`>

      Call a HEAD endpoint

    • OPTIONS: ClientMethod<paths, "options", `${string}/${string}`>

      Call a OPTIONS endpoint

    • PATCH: ClientMethod<paths, "patch", `${string}/${string}`>

      Call a PATCH endpoint

    • POST: ClientMethod<paths, "post", `${string}/${string}`>

      Call a POST endpoint

    • PUT: ClientMethod<paths, "put", `${string}/${string}`>

      Call a PUT endpoint

    • TRACE: ClientMethod<paths, "trace", `${string}/${string}`>

      Call a TRACE endpoint

    • eject: function
      • Unregister middleware

        Parameters

        • ...middleware: Middleware[]

        Returns void

    • use: function
      • Register middleware

        Parameters

        • ...middleware: Middleware[]

        Returns void

    description?: string

    The description of the prompt

    metadata?: Record<string, unknown>

    Optional metadata for the prompt as a JSON object

    { "environment": "production", "version": "1.0" }
    
    name: string

    The name of the prompt

    source_prompt_id?: string | null

    Source Prompt Id

    version: {
        description?: string | null;
        invocation_parameters:
            | {
                anthropic: {
                    max_tokens: number;
                    stop_sequences?: string[];
                    temperature?: number;
                    thinking?: | { type: "disabled" }
                    | { budget_tokens: number; type: "enabled" };
                    top_p?: number;
                };
                type: "anthropic";
            }
            | {
                aws: { max_tokens?: number; temperature?: number; top_p?: number };
                type: "aws";
            }
            | {
                azure_openai: {
                    frequency_penalty?: number;
                    max_completion_tokens?: number;
                    max_tokens?: number;
                    presence_penalty?: number;
                    reasoning_effort?: | "none"
                    | "minimal"
                    | "low"
                    | "medium"
                    | "high"
                    | "xhigh";
                    seed?: number;
                    temperature?: number;
                    top_p?: number;
                };
                type: "azure_openai";
            }
            | {
                deepseek: {
                    frequency_penalty?: number;
                    max_completion_tokens?: number;
                    max_tokens?: number;
                    presence_penalty?: number;
                    reasoning_effort?: | "none"
                    | "minimal"
                    | "low"
                    | "medium"
                    | "high"
                    | "xhigh";
                    seed?: number;
                    temperature?: number;
                    top_p?: number;
                };
                type: "deepseek";
            }
            | {
                google: {
                    frequency_penalty?: number;
                    max_output_tokens?: number;
                    presence_penalty?: number;
                    stop_sequences?: string[];
                    temperature?: number;
                    top_k?: number;
                    top_p?: number;
                };
                type: "google";
            }
            | {
                ollama: {
                    frequency_penalty?: number;
                    max_completion_tokens?: number;
                    max_tokens?: number;
                    presence_penalty?: number;
                    reasoning_effort?: | "none"
                    | "minimal"
                    | "low"
                    | "medium"
                    | "high"
                    | "xhigh";
                    seed?: number;
                    temperature?: number;
                    top_p?: number;
                };
                type: "ollama";
            }
            | {
                openai: {
                    frequency_penalty?: number;
                    max_completion_tokens?: number;
                    max_tokens?: number;
                    presence_penalty?: number;
                    reasoning_effort?: | "none"
                    | "minimal"
                    | "low"
                    | "medium"
                    | "high"
                    | "xhigh";
                    seed?: number;
                    temperature?: number;
                    top_p?: number;
                };
                type: "openai";
            }
            | {
                type: "xai";
                xai: {
                    frequency_penalty?: number;
                    max_completion_tokens?: number;
                    max_tokens?: number;
                    presence_penalty?: number;
                    reasoning_effort?: | "none"
                    | "minimal"
                    | "low"
                    | "medium"
                    | "high"
                    | "xhigh";
                    seed?: number;
                    temperature?: number;
                    top_p?: number;
                };
            };
        model_name: string;
        model_provider: | "OPENAI"
        | "AZURE_OPENAI"
        | "ANTHROPIC"
        | "GOOGLE"
        | "DEEPSEEK"
        | "XAI"
        | "OLLAMA"
        | "AWS";
        response_format?: | {
            json_schema: {
                description?: string;
                name: string;
                schema?: { [key: string]: unknown };
                strict?: boolean;
            };
            type: "json_schema";
        }
        | null;
        template: | {
            messages: {
                content: | string
                | (
                    | { text: string; type: "text" }
                    | {
                        tool_call: { arguments: string; name: string; type: "function" };
                        tool_call_id: string;
                        type: "tool_call";
                    }
                    | {
                        tool_call_id: string;
                        tool_result: | string
                        | number
                        | boolean
                        | { [key: ...]: ... }
                        | (...)[]
                        | null;
                        type: "tool_result";
                    }
                )[];
                role: | "user"
                | "assistant"
                | "model"
                | "ai"
                | "tool"
                | "system"
                | "developer";
            }[];
            type: "chat";
        }
        | { template: string; type: "string" };
        template_format: "NONE" | "MUSTACHE" | "F_STRING";
        template_type: "STR" | "CHAT";
        tools?:
            | {
                disable_parallel_tool_calls?: boolean;
                tool_choice?: | { type: "none" }
                | { type: "one_or_more" }
                | { function_name: string; type: "specific_function" }
                | { type: "zero_or_more" };
                tools: {
                    function: {
                        description?: string;
                        name: string;
                        parameters?: { [key: string]: unknown };
                        strict?: boolean;
                    };
                    type: "function";
                }[];
                type: "tools";
            }
            | null;
    }

    The prompt version to push onto the history of the prompt

    Type Declaration

    • Optionaldescription?: string | null

      Description

    • invocation_parameters:
          | {
              anthropic: {
                  max_tokens: number;
                  stop_sequences?: string[];
                  temperature?: number;
                  thinking?: | { type: "disabled" }
                  | { budget_tokens: number; type: "enabled" };
                  top_p?: number;
              };
              type: "anthropic";
          }
          | {
              aws: { max_tokens?: number; temperature?: number; top_p?: number };
              type: "aws";
          }
          | {
              azure_openai: {
                  frequency_penalty?: number;
                  max_completion_tokens?: number;
                  max_tokens?: number;
                  presence_penalty?: number;
                  reasoning_effort?: | "none"
                  | "minimal"
                  | "low"
                  | "medium"
                  | "high"
                  | "xhigh";
                  seed?: number;
                  temperature?: number;
                  top_p?: number;
              };
              type: "azure_openai";
          }
          | {
              deepseek: {
                  frequency_penalty?: number;
                  max_completion_tokens?: number;
                  max_tokens?: number;
                  presence_penalty?: number;
                  reasoning_effort?: | "none"
                  | "minimal"
                  | "low"
                  | "medium"
                  | "high"
                  | "xhigh";
                  seed?: number;
                  temperature?: number;
                  top_p?: number;
              };
              type: "deepseek";
          }
          | {
              google: {
                  frequency_penalty?: number;
                  max_output_tokens?: number;
                  presence_penalty?: number;
                  stop_sequences?: string[];
                  temperature?: number;
                  top_k?: number;
                  top_p?: number;
              };
              type: "google";
          }
          | {
              ollama: {
                  frequency_penalty?: number;
                  max_completion_tokens?: number;
                  max_tokens?: number;
                  presence_penalty?: number;
                  reasoning_effort?: | "none"
                  | "minimal"
                  | "low"
                  | "medium"
                  | "high"
                  | "xhigh";
                  seed?: number;
                  temperature?: number;
                  top_p?: number;
              };
              type: "ollama";
          }
          | {
              openai: {
                  frequency_penalty?: number;
                  max_completion_tokens?: number;
                  max_tokens?: number;
                  presence_penalty?: number;
                  reasoning_effort?: | "none"
                  | "minimal"
                  | "low"
                  | "medium"
                  | "high"
                  | "xhigh";
                  seed?: number;
                  temperature?: number;
                  top_p?: number;
              };
              type: "openai";
          }
          | {
              type: "xai";
              xai: {
                  frequency_penalty?: number;
                  max_completion_tokens?: number;
                  max_tokens?: number;
                  presence_penalty?: number;
                  reasoning_effort?: | "none"
                  | "minimal"
                  | "low"
                  | "medium"
                  | "high"
                  | "xhigh";
                  seed?: number;
                  temperature?: number;
                  top_p?: number;
              };
          }

      Invocation Parameters

    • model_name: string

      Model Name

    • model_provider:
          | "OPENAI"
          | "AZURE_OPENAI"
          | "ANTHROPIC"
          | "GOOGLE"
          | "DEEPSEEK"
          | "XAI"
          | "OLLAMA"
          | "AWS"
    • Optionalresponse_format?:
          | {
              json_schema: {
                  description?: string;
                  name: string;
                  schema?: { [key: string]: unknown };
                  strict?: boolean;
              };
              type: "json_schema";
          }
          | null

      Response Format

    • template:
          | {
              messages: {
                  content: | string
                  | (
                      | { text: string; type: "text" }
                      | {
                          tool_call: { arguments: string; name: string; type: "function" };
                          tool_call_id: string;
                          type: "tool_call";
                      }
                      | {
                          tool_call_id: string;
                          tool_result: | string
                          | number
                          | boolean
                          | { [key: ...]: ... }
                          | (...)[]
                          | null;
                          type: "tool_result";
                      }
                  )[];
                  role: | "user"
                  | "assistant"
                  | "model"
                  | "ai"
                  | "tool"
                  | "system"
                  | "developer";
              }[];
              type: "chat";
          }
          | { template: string; type: "string" }

      Template

    • template_format: "NONE" | "MUSTACHE" | "F_STRING"
    • template_type: "STR" | "CHAT"
    • Optionaltools?:
          | {
              disable_parallel_tool_calls?: boolean;
              tool_choice?: | { type: "none" }
              | { type: "one_or_more" }
              | { function_name: string; type: "specific_function" }
              | { type: "zero_or_more" };
              tools: {
                  function: {
                      description?: string;
                      name: string;
                      parameters?: { [key: string]: unknown };
                      strict?: boolean;
                  };
                  type: "function";
              }[];
              type: "tools";
          }
          | null