Use llama 3 8B for tasks (#1052)
Browse files- .env.template +4 -28
.env.template
CHANGED
@@ -166,36 +166,12 @@ MODELS=`[
|
|
166 |
}
|
167 |
},
|
168 |
{
|
169 |
-
"name": "
|
170 |
-
"
|
171 |
-
"description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
|
172 |
-
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
|
173 |
-
"websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
|
174 |
-
"modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.1",
|
175 |
-
"tokenizer": "mistralai/Mistral-7B-Instruct-v0.1",
|
176 |
-
"preprompt": "",
|
177 |
-
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
|
178 |
"parameters": {
|
179 |
"temperature": 0.1,
|
180 |
-
"
|
181 |
-
"repetition_penalty": 1.2,
|
182 |
-
"top_k": 50,
|
183 |
-
"truncate": 3072,
|
184 |
-
"max_new_tokens": 1024,
|
185 |
-
"stop": ["</s>"]
|
186 |
},
|
187 |
-
"promptExamples": [
|
188 |
-
{
|
189 |
-
"title": "Write an email from bullet list",
|
190 |
-
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
|
191 |
-
}, {
|
192 |
-
"title": "Code a snake game",
|
193 |
-
"prompt": "Code a basic snake game in python, give explanations for each step."
|
194 |
-
}, {
|
195 |
-
"title": "Assist in a task",
|
196 |
-
"prompt": "How do I make a delicious lemon cheesecake?"
|
197 |
-
}
|
198 |
-
],
|
199 |
"unlisted": true
|
200 |
},
|
201 |
{
|
@@ -246,7 +222,7 @@ OLD_MODELS=`[
|
|
246 |
{"name": "openchat/openchat-3.5-0106"}
|
247 |
]`
|
248 |
|
249 |
-
TASK_MODEL='meta-llama/Meta-Llama-3-
|
250 |
|
251 |
TEXT_EMBEDDING_MODELS = `[
|
252 |
{
|
|
|
166 |
}
|
167 |
},
|
168 |
{
|
169 |
+
"name": "meta-llama/Meta-Llama-3-8B-Instruct",
|
170 |
+
"tokenizer" : "philschmid/meta-llama-3-tokenizer",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
171 |
"parameters": {
|
172 |
"temperature": 0.1,
|
173 |
+
"stop": ["<|eot_id|>"],
|
|
|
|
|
|
|
|
|
|
|
174 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
175 |
"unlisted": true
|
176 |
},
|
177 |
{
|
|
|
222 |
{"name": "openchat/openchat-3.5-0106"}
|
223 |
]`
|
224 |
|
225 |
+
TASK_MODEL='meta-llama/Meta-Llama-3-8B-Instruct'
|
226 |
|
227 |
TEXT_EMBEDDING_MODELS = `[
|
228 |
{
|