File size: 1,690 Bytes
3772222 151317e b30b0e9 151317e 015bf88 151317e 3772222 2e13d7f cb4ac33 015bf88 bc2f9b2 3772222 b30b0e9 015bf88 b30b0e9 015bf88 3772222 015bf88 3772222 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
# Configuration version (required)
version: 1.0.0
# Cache settings: Set to true to enable caching
cache: true
# Definition of custom endpoints
endpoints:
custom:
# Mandrill
- name: "Mandrill"
apiKey: "user_provided"
baseURL: "https://api.mandrillai.tech/v1"
models:
default: ["gpt-4-vision-preview", "gpt-4", "gpt-4-1106-preview", "gpt-3.5-turbo", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-0301", "gemini-pro", "gemini-pro-vision"]
fetch: true
titleConvo: false
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "Mandrill"
iconURL: "https://cdn.discordapp.com/attachments/1171849907812978800/1193980279233187912/Mandrill.png"
# OpenRouter.ai Example
- name: "Reverse Proxy"
# For `apiKey` and `baseURL`, you can use environment variables that you define.
# recommended environment variables:
# Known issue: you should not use `OPENROUTER_API_KEY` as it will then override the `openAI` endpoint to use OpenRouter as well.
apiKey: "user_provided"
baseURL: "user_provided"
models:
default: ["gpt-4-vision-preview", "gpt-4", "gpt-4-1106-preview", "gpt-3.5-turbo", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-0613", "gpt-4-32k", "gpt-4-0314", "gpt-4-0613"]
fetch: true
titleConvo: false
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "AI"
# See the Custom Configuration Guide for more information:
# https://docs.librechat.ai/install/configuration/custom_config.html |