nsarrazin HF staff commited on
Commit
c51be65
1 Parent(s): 5f4c031

Reorder models in HuggingChat (#1060)

Browse files
Files changed (1) hide show
  1. .env.template +57 -56
.env.template CHANGED
@@ -109,6 +109,36 @@ MODELS=`[
109
  ]
110
  },
111
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
  "name" : "google/gemma-1.1-7b-it",
113
  "description": "Gemma 7B 1.1 is the latest release in the Gemma family of lightweight models built by Google, trained using a novel RLHF method.",
114
  "websiteUrl" : "https://blog.google/technology/developers/gemma-open-models/",
@@ -135,14 +165,26 @@ MODELS=`[
135
  "stop" : ["<end_of_turn>"]
136
  }
137
  },
138
- {
139
- "name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
140
- "description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
141
- "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/nous-logo.png",
142
- "websiteUrl" : "https://nousresearch.com/",
143
- "modelUrl": "https://huggingface.co/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
144
- "tokenizer": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
145
- "chatPromptTemplate" : "{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
 
 
 
 
 
 
 
 
 
 
 
 
146
  "promptExamples": [
147
  {
148
  "title": "Write an email from bullet list",
@@ -154,25 +196,7 @@ MODELS=`[
154
  "title": "Assist in a task",
155
  "prompt": "How do I make a delicious lemon cheesecake?"
156
  }
157
- ],
158
- "parameters": {
159
- "temperature": 0.7,
160
- "top_p": 0.95,
161
- "repetition_penalty": 1,
162
- "top_k": 50,
163
- "truncate": 24576,
164
- "max_new_tokens": 2048,
165
- "stop": ["<|im_end|>"]
166
- }
167
- },
168
- {
169
- "name": "meta-llama/Meta-Llama-3-8B-Instruct",
170
- "tokenizer" : "philschmid/meta-llama-3-tokenizer",
171
- "parameters": {
172
- "temperature": 0.1,
173
- "stop": ["<|eot_id|>"],
174
- },
175
- "unlisted": true
176
  },
177
  {
178
  "name": "microsoft/Phi-3-mini-4k-instruct",
@@ -199,37 +223,14 @@ MODELS=`[
199
  }
200
  ]
201
  },
202
- {
203
- "name": "mistralai/Mistral-7B-Instruct-v0.2",
204
- "displayName": "mistralai/Mistral-7B-Instruct-v0.2",
205
- "description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
206
- "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
207
- "websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
208
- "modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2",
209
- "tokenizer": "mistralai/Mistral-7B-Instruct-v0.2",
210
- "preprompt": "",
211
- "chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
212
  "parameters": {
213
- "temperature": 0.3,
214
- "top_p": 0.95,
215
- "repetition_penalty": 1.2,
216
- "top_k": 50,
217
- "truncate": 3072,
218
- "max_new_tokens": 1024,
219
- "stop": ["</s>"]
220
  },
221
- "promptExamples": [
222
- {
223
- "title": "Write an email from bullet list",
224
- "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
225
- }, {
226
- "title": "Code a snake game",
227
- "prompt": "Code a basic snake game in python, give explanations for each step."
228
- }, {
229
- "title": "Assist in a task",
230
- "prompt": "How do I make a delicious lemon cheesecake?"
231
- }
232
- ]
233
  }
234
  ]`
235
 
 
109
  ]
110
  },
111
  {
112
+ "name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
113
+ "description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
114
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/nous-logo.png",
115
+ "websiteUrl" : "https://nousresearch.com/",
116
+ "modelUrl": "https://huggingface.co/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
117
+ "tokenizer": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
118
+ "chatPromptTemplate" : "{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
119
+ "promptExamples": [
120
+ {
121
+ "title": "Write an email from bullet list",
122
+ "prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
123
+ }, {
124
+ "title": "Code a snake game",
125
+ "prompt": "Code a basic snake game in python, give explanations for each step."
126
+ }, {
127
+ "title": "Assist in a task",
128
+ "prompt": "How do I make a delicious lemon cheesecake?"
129
+ }
130
+ ],
131
+ "parameters": {
132
+ "temperature": 0.7,
133
+ "top_p": 0.95,
134
+ "repetition_penalty": 1,
135
+ "top_k": 50,
136
+ "truncate": 24576,
137
+ "max_new_tokens": 2048,
138
+ "stop": ["<|im_end|>"]
139
+ }
140
+ },
141
+ {
142
  "name" : "google/gemma-1.1-7b-it",
143
  "description": "Gemma 7B 1.1 is the latest release in the Gemma family of lightweight models built by Google, trained using a novel RLHF method.",
144
  "websiteUrl" : "https://blog.google/technology/developers/gemma-open-models/",
 
165
  "stop" : ["<end_of_turn>"]
166
  }
167
  },
168
+
169
+ {
170
+ "name": "mistralai/Mistral-7B-Instruct-v0.2",
171
+ "displayName": "mistralai/Mistral-7B-Instruct-v0.2",
172
+ "description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
173
+ "logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
174
+ "websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
175
+ "modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2",
176
+ "tokenizer": "mistralai/Mistral-7B-Instruct-v0.2",
177
+ "preprompt": "",
178
+ "chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
179
+ "parameters": {
180
+ "temperature": 0.3,
181
+ "top_p": 0.95,
182
+ "repetition_penalty": 1.2,
183
+ "top_k": 50,
184
+ "truncate": 3072,
185
+ "max_new_tokens": 1024,
186
+ "stop": ["</s>"]
187
+ },
188
  "promptExamples": [
189
  {
190
  "title": "Write an email from bullet list",
 
196
  "title": "Assist in a task",
197
  "prompt": "How do I make a delicious lemon cheesecake?"
198
  }
199
+ ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
  },
201
  {
202
  "name": "microsoft/Phi-3-mini-4k-instruct",
 
223
  }
224
  ]
225
  },
226
+ {
227
+ "name": "meta-llama/Meta-Llama-3-8B-Instruct",
228
+ "tokenizer" : "philschmid/meta-llama-3-tokenizer",
 
 
 
 
 
 
 
229
  "parameters": {
230
+ "temperature": 0.1,
231
+ "stop": ["<|eot_id|>"],
 
 
 
 
 
232
  },
233
+ "unlisted": true
 
 
 
 
 
 
 
 
 
 
 
234
  }
235
  ]`
236