LibreChat / librechat.yaml
mrbesher's picture
Update librechat.yaml
bab2100 verified
raw
history blame
3.01 kB
# Configuration version (required)
version: 1.1.5
# Cache settings: Set to true to enable caching
cache: true
# Definition of custom endpoints
endpoints:
# assistants:
# disableBuilder: false # Disable Assistants Builder Interface by setting to `true`
# pollIntervalMs: 750 # Polling interval for checking assistant updates
# timeoutMs: 180000 # Timeout for assistant operations
# # Should only be one or the other, either `supportedIds` or `excludedIds`
# supportedIds: ["asst_supportedAssistantId1", "asst_supportedAssistantId2"]
# # excludedIds: ["asst_excludedAssistantId"]
custom:
#groq
- name: "groq"
apiKey: "${GROQ_API_KEY}"
baseURL: "https://api.groq.com/openai/v1/"
models:
default:
[
'llama3-70b-8192',
'llama3-8b-8192',
'llama2-70b-4096',
'mixtral-8x7b-32768',
'gemma-7b-it',
]
fetch: true
titleConvo: true
titleModel: "mixtral-8x7b-32768"
summarize: false
summaryModel: "mixtral-8x7b-32768"
forcePrompt: false
modelDisplayLabel: "groq"
# Mistral AI API
- name: "Mistral"
apiKey: "user_provided"
baseURL: "https://api.mistral.ai/v1"
models:
default: ["mistral-tiny", "mistral-small", "mistral-medium", "mistral-large-latest"]
fetch: true
titleConvo: true
titleMethod: "completion"
titleModel: "open-mistral-7b"
summarize: false
summaryModel: "open-mistral-7b"
forcePrompt: false
modelDisplayLabel: "Mistral"
dropParams: ["stop", "user", "frequency_penalty", "presence_penalty"]
# Preplexity
- name: "Perplexity"
apiKey: "user_provided"
baseURL: "https://api.perplexity.ai/"
models:
default: [
"llama-3-sonar-small-32k-chat",
"llama-3-sonar-small-32k-online",
"llama-3-sonar-large-32k-chat",
"llama-3-sonar-large-32k-online",
"mixtral-8x7b-instruct",
"llama-3-8b-instruct",
"llama-3-70b-instruct"
]
fetch: false # fetching list of models is not supported
titleConvo: true
titleModel: "sonar-medium-chat"
summarize: false
summaryModel: "sonar-medium-chat"
forcePrompt: false
dropParams: ["stop", "frequency_penalty"]
modelDisplayLabel: "Perplexity"
# OpenRouter
- name: 'OpenRouter'
# Known issue: you should not use `OPENROUTER_API_KEY` as it will then override the `openAI` endpoint to use OpenRouter as well.
apiKey: '${OPENROUTER_KEY}'
baseURL: 'https://openrouter.ai/api/v1'
models:
default: ['meta-llama/llama-3-70b-instruct']
fetch: true
titleConvo: true
titleModel: 'meta-llama/llama-3-8b-instruct'
# Recommended: Drop the stop parameter from the request as Openrouter models use a variety of stop tokens.
dropParams: ['stop']
modelDisplayLabel: 'OpenRouter'