Muyuan / librechat.yaml
qitongwei's picture
Update librechat.yaml
45ab523 verified
# Configuration version (required)
version: 1.0.0
# Cache settings: Set to true to enable caching
cache: true
# Definition of custom endpoints
endpoints:
custom:
# Mandrill
- name: "Mandrill"
apiKey: "user_provided"
baseURL: "https://api.mandrillai.tech/v1"
models:
default: ["gpt-4-vision-preview", "gpt-4", "gpt-4-1106-preview", "gpt-3.5-turbo", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-0301", "gemini-pro", "gemini-pro-vision"]
fetch: true
titleConvo: false
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "Mandrill"
iconURL: "https://cdn.discordapp.com/attachments/1171849907812978800/1193980279233187912/Mandrill.png"
# OpenRouter.ai Example
- name: "Reverse Proxy"
# For `apiKey` and `baseURL`, you can use environment variables that you define.
# recommended environment variables:
# Known issue: you should not use `OPENROUTER_API_KEY` as it will then override the `openAI` endpoint to use OpenRouter as well.
apiKey: "user_provided"
baseURL: "user_provided"
models:
default: ["gpt-4-vision-preview", "gpt-4", "gpt-4-1106-preview", "gpt-4-0125-preview", "gpt-3.5-turbo", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-0613", "gpt-4-32k", "gpt-4-0314", "gpt-4-0613", "gpt-4-turbo-2024-04-09", "gpt-4-all"]
fetch: true
titleConvo: false
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "AI"
# See the Custom Configuration Guide for more information:
# https://docs.librechat.ai/install/configuration/custom_config.html