---
title: "OpenAI Compatible Proxy LLM Configuration"
description: "OpenAI Compatible Proxy LLM"
---

import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";

<ConfigDetail config={{
  "name": "OpenAICompatibleDeployModelParameters",
  "description": "OpenAI Compatible Proxy LLM",
  "documentationUrl": "https://platform.openai.com/docs/api-reference/chat",
  "parameters": [
    {
      "name": "name",
      "type": "string",
      "required": true,
      "description": "The name of the model."
    },
    {
      "name": "backend",
      "type": "string",
      "required": false,
      "description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name."
    },
    {
      "name": "provider",
      "type": "string",
      "required": false,
      "description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
      "defaultValue": "proxy/openai"
    },
    {
      "name": "verbose",
      "type": "boolean",
      "required": false,
      "description": "Show verbose output.",
      "defaultValue": "False"
    },
    {
      "name": "concurrency",
      "type": "integer",
      "required": false,
      "description": "Model concurrency limit",
      "defaultValue": "100"
    },
    {
      "name": "prompt_template",
      "type": "string",
      "required": false,
      "description": "Prompt template. If None, the prompt template is automatically determined from model. Just for local deployment."
    },
    {
      "name": "context_length",
      "type": "integer",
      "required": false,
      "description": "The context length of the OpenAI API. If None, it is determined by the model."
    },
    {
      "name": "reasoning_model",
      "type": "boolean",
      "required": false,
      "description": "Whether the model is a reasoning model. If None, it is automatically determined from model."
    },
    {
      "name": "api_base",
      "type": "string",
      "required": false,
      "description": "The base url of the OpenAI API.",
      "defaultValue": "${env:OPENAI_API_BASE:-https://api.openai.com/v1}"
    },
    {
      "name": "api_key",
      "type": "string",
      "required": false,
      "description": "The API key of the OpenAI API.",
      "defaultValue": "${env:OPENAI_API_KEY}"
    },
    {
      "name": "api_type",
      "type": "string",
      "required": false,
      "description": "The type of the OpenAI API, if you use Azure, it can be: azure"
    },
    {
      "name": "api_version",
      "type": "string",
      "required": false,
      "description": "The version of the OpenAI API."
    },
    {
      "name": "http_proxy",
      "type": "string",
      "required": false,
      "description": "The http or https proxy to use openai"
    }
  ]
}} />

