File size: 13,304 Bytes
22d7b97 f93eee7 09fc057 f93eee7 1549695 f93eee7 8582ce1 0fc95fb 8582ce1 f2384c6 dff58d2 f2384c6 8582ce1 f93eee7 aab7222 25c844d aab7222 bfec739 50d8483 aab7222 5f78b90 c51be65 fce611a bc00b35 5f78b90 fce611a 5f78b90 c51be65 50e0ab9 c51be65 c5fac96 69b2997 c5fac96 e3ca107 c5fac96 af66ff6 c5fac96 4f3a64f c51be65 4f3a64f c51be65 3d2e621 4f3a64f c51be65 f93eee7 22d7b97 9b99c58 884fe73 26e028a a8ca669 a3a5e3c 29b7d2a 884fe73 9187ced a0a9c1c 9187ced 96d25a6 9567da6 96d25a6 22d7b97 59bfb21 22d7b97 830b0b6 73a5c0d a149a6a 22d7b97 dddb85f 0410c78 22d7b97 6022aed c30d191 992a8de 3f5871c 7d34920 9405754 3890de7 3f5871c dff58d2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 |
# template used in production for HuggingChat.
MODELS=`[
{
"name" : "CohereForAI/c4ai-command-r-plus",
"tokenizer": "Xenova/c4ai-command-r-v01-tokenizer",
"description": "Command R+ is Cohere's latest LLM and is the first open weight model to beat GPT4 in the Chatbot Arena!",
"modelUrl": "https://huggingface.co/CohereForAI/c4ai-command-r-plus",
"websiteUrl": "https://docs.cohere.com/docs/command-r-plus",
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/cohere-logo.png",
"parameters": {
"stop": ["<|END_OF_TURN_TOKEN|>"],
"truncate" : 28672,
"max_new_tokens" : 4096,
"temperature" : 0.3
},
"promptExamples" : [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
},
{
"name" : "meta-llama/Meta-Llama-3-70B-Instruct",
"description": "Generation over generation, Meta Llama 3 demonstrates state-of-the-art performance on a wide range of industry benchmarks and offers new capabilities, including improved reasoning.",
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/meta-logo.png",
"modelUrl": "https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct",
"websiteUrl": "https://llama.meta.com/llama3/",
"tokenizer" : "philschmid/meta-llama-3-tokenizer",
"promptExamples" : [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
],
"parameters": {
"stop": ["<|eot_id|>"],
"truncate": 6144,
"max_new_tokens": 2047
}
},
{
"name" : "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
"tokenizer": "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
"description": "Zephyr 141B-A35B is a fine-tuned version of Mistral 8x22B, trained using ORPO, a novel alignment algorithm.",
"modelUrl": "https://huggingface.co/HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
"websiteUrl": "https://huggingface.co/HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1",
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/zephyr-logo.png",
"parameters": {
"truncate" : 24576,
"max_new_tokens" : 8192,
},
"preprompt" : "You are Zephyr, an assistant developed by KAIST AI, Argilla, and Hugging Face. You should give concise responses to very simple questions, but provide thorough responses to more complex and open-ended questions. You are happy to help with writing, analysis, question answering, math, coding, and all sorts of other tasks.",
"promptExamples" : [
{
"title": "Write a poem",
"prompt": "Write a poem to help me remember the first 10 elements on the periodic table, giving each element its own line."
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
},
{
"name" : "mistralai/Mixtral-8x7B-Instruct-v0.1",
"description" : "The latest MoE model from Mistral AI! 8x7B and outperforms Llama 2 70B in most benchmarks.",
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
"websiteUrl" : "https://mistral.ai/news/mixtral-of-experts/",
"modelUrl": "https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1",
"tokenizer": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"preprompt" : "",
"chatPromptTemplate": "<s> {{#each messages}}{{#ifUser}}[INST]{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}} {{content}}</s> {{/ifAssistant}}{{/each}}",
"parameters" : {
"temperature" : 0.6,
"top_p" : 0.95,
"repetition_penalty" : 1.2,
"top_k" : 50,
"truncate" : 24576,
"max_new_tokens" : 8192,
"stop" : ["</s>"]
},
"promptExamples" : [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
},
{
"name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/nous-logo.png",
"websiteUrl" : "https://nousresearch.com/",
"modelUrl": "https://huggingface.co/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"tokenizer": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"chatPromptTemplate" : "{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
],
"parameters": {
"temperature": 0.7,
"top_p": 0.95,
"repetition_penalty": 1,
"top_k": 50,
"truncate": 24576,
"max_new_tokens": 2048,
"stop": ["<|im_end|>"]
}
},
{
"name" : "google/gemma-1.1-7b-it",
"description": "Gemma 7B 1.1 is the latest release in the Gemma family of lightweight models built by Google, trained using a novel RLHF method.",
"websiteUrl" : "https://blog.google/technology/developers/gemma-open-models/",
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/google-logo.png",
"modelUrl": "https://huggingface.co/google/gemma-1.1-7b-it",
"preprompt": "",
"chatPromptTemplate" : "{{#each messages}}{{#ifUser}}<start_of_turn>user\n{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}}<end_of_turn>\n<start_of_turn>model\n{{/ifUser}}{{#ifAssistant}}{{content}}<end_of_turn>\n{{/ifAssistant}}{{/each}}",
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
],
"parameters": {
"do_sample": true,
"truncate": 7168,
"max_new_tokens": 1024,
"stop" : ["<end_of_turn>"]
}
},
{
"name": "mistralai/Mistral-7B-Instruct-v0.2",
"displayName": "mistralai/Mistral-7B-Instruct-v0.2",
"description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png",
"websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
"modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2",
"tokenizer": "mistralai/Mistral-7B-Instruct-v0.2",
"preprompt": "",
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
"parameters": {
"temperature": 0.3,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 3072,
"max_new_tokens": 1024,
"stop": ["</s>"]
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
},
{
"name": "microsoft/Phi-3-mini-4k-instruct",
"tokenizer": "microsoft/Phi-3-mini-4k-instruct",
"description" : "Phi-3 Mini-4K-Instruct is a 3.8B parameters, lightweight, state-of-the-art open model built upon datasets used for Phi-2.",
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/microsoft-logo.png",
"modelUrl": "https://huggingface.co/microsoft/Phi-3-mini-4k-instruct",
"websiteUrl": "https://azure.microsoft.com/en-us/blog/introducing-phi-3-redefining-whats-possible-with-slms/",
"preprompt": "",
"chatPromptTemplate": "<s>{{preprompt}}{{#each messages}}{{#ifUser}}<|user|>\n{{content}}<|end|>\n<|assistant|>\n{{/ifUser}}{{#ifAssistant}}{{content}}<|end|>\n{{/ifAssistant}}{{/each}}",
"parameters": {
"stop": ["<|end|>", "<|endoftext|>", "<|assistant|>"],
"max_new_tokens": 1024,
"truncate": 3071
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
},
{
"name": "meta-llama/Meta-Llama-3-8B-Instruct",
"tokenizer" : "philschmid/meta-llama-3-tokenizer",
"parameters": {
"temperature": 0.1,
"stop": ["<|eot_id|>"],
"truncate": 1024,
},
"unlisted": true
}
]`
OLD_MODELS=`[
{"name":"bigcode/starcoder"},
{"name":"OpenAssistant/oasst-sft-6-llama-30b-xor"},
{"name":"HuggingFaceH4/zephyr-7b-alpha"},
{"name":"openchat/openchat_3.5"},
{"name":"openchat/openchat-3.5-1210"},
{"name": "tiiuae/falcon-180B-chat"},
{"name": "codellama/CodeLlama-34b-Instruct-hf"},
{"name": "google/gemma-7b-it"},
{"name": "meta-llama/Llama-2-70b-chat-hf"},
{"name": "codellama/CodeLlama-70b-Instruct-hf"},
{"name": "openchat/openchat-3.5-0106"}
]`
TASK_MODEL='meta-llama/Meta-Llama-3-8B-Instruct'
TEXT_EMBEDDING_MODELS = `[
{
"name": "bge-base-en-v1-5-sxa",
"displayName": "bge-base-en-v1-5-sxa",
"chunkCharLength": 512,
"endpoints": [
{ "type": "tei",
"url" : "https://huggingchat-tei.hf.space/"
}
]
}
]`
APP_BASE="/chat"
PUBLIC_ORIGIN=https://huggingface.co
PUBLIC_SHARE_PREFIX=https://hf.co/chat
PUBLIC_ANNOUNCEMENT_BANNERS=`[]`
PUBLIC_APP_NAME=HuggingChat
PUBLIC_APP_ASSETS=huggingchat
PUBLIC_APP_COLOR=yellow
PUBLIC_APP_DESCRIPTION="Making the community's best AI chat models available to everyone."
PUBLIC_APP_DISCLAIMER_MESSAGE="Disclaimer: AI is an area of active research with known problems such as biased generation and misinformation. Do not use this application for high-stakes decisions or advice."
PUBLIC_APP_DATA_SHARING=0
PUBLIC_APP_DISCLAIMER=1
PUBLIC_PLAUSIBLE_SCRIPT_URL="/js/script.js"
PUBLIC_APPLE_APP_ID=6476778843
# Not part of the .env but set as other variables in the space
# ADDRESS_HEADER=X-Forwarded-For
# XFF_DEPTH=2
ENABLE_ASSISTANTS=true
ENABLE_ASSISTANTS_RAG=true
REQUIRE_FEATURED_ASSISTANTS=true
EXPOSE_API=true
ALTERNATIVE_REDIRECT_URLS=`[
huggingchat://login/callback
]`
WEBSEARCH_BLOCKLIST=`["youtube.com", "twitter.com"]`
|