nsarrazin HF staff commited on
Commit
aab7222
1 Parent(s): f02ffb2

Make mixtral the default model for new users (#672)

Browse files
Files changed (1) hide show
  1. .env.template +28 -28
.env.template CHANGED
@@ -1,6 +1,34 @@
1
  # template used in production for HuggingChat.
2
 
3
  MODELS=`[
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  {
5
  "name": "meta-llama/Llama-2-70b-chat-hf",
6
  "description": "The latest and biggest model from Meta, fine-tuned for chat.",
@@ -180,34 +208,6 @@ MODELS=`[
180
  "prompt": "How do I make a delicious lemon cheesecake?"
181
  }
182
  ]
183
- },
184
- {
185
- "name" : "mistralai/Mixtral-8x7B-Instruct-v0.1",
186
- "description" : "The latest MoE model from Mistral AI! 8x7B and outperforms Llama 2 70B in most benchmarks.",
187
- "websiteUrl" : "https://mistral.ai/news/mixtral-of-experts/",
188
- "preprompt" : "",
189
- "chatPromptTemplate": "<s> {{#each messages}}{{#ifUser}}[INST]{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}} {{content}}</s> {{/ifAssistant}}{{/each}}",
190
- "parameters" : {
191
- "temperature" : 0.6,
192
- "top_p" : 0.95,
193
- "repetition_penalty" : 1.2,
194
- "top_k" : 50,
195
- "truncate" : 24576,
196
- "max_new_tokens" : 8192,
197
- "stop" : ["</s>"]
198
- },
199
- "promptExamples" : [
200
- {
201
- "title": "Write an email from bullet list",
202
- "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
203
- }, {
204
- "title": "Code a snake game",
205
- "prompt": "Code a basic snake game in python, give explanations for each step."
206
- }, {
207
- "title": "Assist in a task",
208
- "prompt": "How do I make a delicious lemon cheesecake?"
209
- }
210
- ]
211
  }
212
  ]`
213
 
 
1
  # template used in production for HuggingChat.
2
 
3
  MODELS=`[
4
+ {
5
+ "name" : "mistralai/Mixtral-8x7B-Instruct-v0.1",
6
+ "description" : "The latest MoE model from Mistral AI! 8x7B and outperforms Llama 2 70B in most benchmarks.",
7
+ "websiteUrl" : "https://mistral.ai/news/mixtral-of-experts/",
8
+ "preprompt" : "",
9
+ "chatPromptTemplate": "<s> {{#each messages}}{{#ifUser}}[INST]{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}} {{content}}</s> {{/ifAssistant}}{{/each}}",
10
+ "parameters" : {
11
+ "temperature" : 0.6,
12
+ "top_p" : 0.95,
13
+ "repetition_penalty" : 1.2,
14
+ "top_k" : 50,
15
+ "truncate" : 24576,
16
+ "max_new_tokens" : 8192,
17
+ "stop" : ["</s>"]
18
+ },
19
+ "promptExamples" : [
20
+ {
21
+ "title": "Write an email from bullet list",
22
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
23
+ }, {
24
+ "title": "Code a snake game",
25
+ "prompt": "Code a basic snake game in python, give explanations for each step."
26
+ }, {
27
+ "title": "Assist in a task",
28
+ "prompt": "How do I make a delicious lemon cheesecake?"
29
+ }
30
+ ]
31
+ },
32
  {
33
  "name": "meta-llama/Llama-2-70b-chat-hf",
34
  "description": "The latest and biggest model from Meta, fine-tuned for chat.",
 
208
  "prompt": "How do I make a delicious lemon cheesecake?"
209
  }
210
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
211
  }
212
  ]`
213