Update config.yaml
Browse files- config.yaml +24 -0
config.yaml
CHANGED
@@ -62,6 +62,30 @@ model_list:
|
|
62 |
rpm: 30 # 15 * # of keys
|
63 |
tpm: 2000000 # 1,000,000 * # of keys
|
64 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
- model_name: gpt-4o-mini-k2
|
66 |
litellm_params:
|
67 |
model: github/gpt-4o-mini
|
|
|
62 |
rpm: 30 # 15 * # of keys
|
63 |
tpm: 2000000 # 1,000,000 * # of keys
|
64 |
|
65 |
+
- model_name: Mistral-large-2411-k1
|
66 |
+
litellm_params:
|
67 |
+
model: github/Mistral-large-2411
|
68 |
+
api_base: https://models.inference.ai.azure.com
|
69 |
+
api_key: os.environ/GITHUB_API_KEY1
|
70 |
+
- model_name: github/Mistral-large-2411
|
71 |
+
litellm_params:
|
72 |
+
model: github/Mistral-large-2411
|
73 |
+
api_key: os.environ/GITHUB_API_KEY1
|
74 |
+
rpm: 30 # 15 * # of keys
|
75 |
+
tpm: 2000000 # 1,000,000 * # of keys
|
76 |
+
|
77 |
+
- model_name: Mistral-large-2411-k2
|
78 |
+
litellm_params:
|
79 |
+
model: github/Mistral-large-2411
|
80 |
+
api_base: https://models.inference.ai.azure.com
|
81 |
+
api_key: os.environ/GITHUB_API_KEY2
|
82 |
+
- model_name: github/Mistral-large-2411
|
83 |
+
litellm_params:
|
84 |
+
model: github/Mistral-large-2411
|
85 |
+
api_key: os.environ/GITHUB_API_KEY2
|
86 |
+
rpm: 30 # 15 * # of keys
|
87 |
+
tpm: 2000000 # 1,000,000 * # of keys
|
88 |
+
|
89 |
- model_name: gpt-4o-mini-k2
|
90 |
litellm_params:
|
91 |
model: github/gpt-4o-mini
|