Spaces:
Running
Running
# template used in production for HuggingChat. | |
MODELS=`[ | |
{ | |
"name" : "CohereForAI/c4ai-command-r-plus", | |
"tokenizer": "Xenova/c4ai-command-r-v01-tokenizer", | |
"description": "Command R+ is Cohere's latest LLM and is the first open weight model to beat GPT4 in the Chatbot Arena!", | |
"modelUrl": "https://huggingface.co/CohereForAI/c4ai-command-r-plus", | |
"websiteUrl": "https://docs.cohere.com/docs/command-r-plus", | |
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/cohere-logo.png", | |
"parameters": { | |
"stop": ["<|END_OF_TURN_TOKEN|>"], | |
"truncate" : 24576, | |
"max_new_tokens" : 8192, | |
"temperature" : 0.3 | |
} | |
}, | |
{ | |
"name" : "mistralai/Mixtral-8x7B-Instruct-v0.1", | |
"description" : "The latest MoE model from Mistral AI! 8x7B and outperforms Llama 2 70B in most benchmarks.", | |
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png", | |
"websiteUrl" : "https://mistral.ai/news/mixtral-of-experts/", | |
"modelUrl": "https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1", | |
"tokenizer": "mistralai/Mixtral-8x7B-Instruct-v0.1", | |
"preprompt" : "", | |
"chatPromptTemplate": "<s> {{#each messages}}{{#ifUser}}[INST]{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}} {{content}}</s> {{/ifAssistant}}{{/each}}", | |
"parameters" : { | |
"temperature" : 0.6, | |
"top_p" : 0.95, | |
"repetition_penalty" : 1.2, | |
"top_k" : 50, | |
"truncate" : 24576, | |
"max_new_tokens" : 8192, | |
"stop" : ["</s>"] | |
}, | |
"promptExamples" : [ | |
{ | |
"title": "Write an email from bullet list", | |
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)" | |
}, { | |
"title": "Code a snake game", | |
"prompt": "Code a basic snake game in python, give explanations for each step." | |
}, { | |
"title": "Assist in a task", | |
"prompt": "How do I make a delicious lemon cheesecake?" | |
} | |
] | |
}, | |
{ | |
"name" : "google/gemma-1.1-7b-it", | |
"description": "Gemma 7B 1.1 is the latest release in the Gemma family of lightweight models built by Google, trained using a novel RLHF method.", | |
"websiteUrl" : "https://blog.google/technology/developers/gemma-open-models/", | |
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/google-logo.png", | |
"modelUrl": "https://huggingface.co/google/gemma-1.1-7b-it", | |
"preprompt": "", | |
"chatPromptTemplate" : "{{#each messages}}{{#ifUser}}<start_of_turn>user\n{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}}<end_of_turn>\n<start_of_turn>model\n{{/ifUser}}{{#ifAssistant}}{{content}}<end_of_turn>\n{{/ifAssistant}}{{/each}}", | |
"promptExamples": [ | |
{ | |
"title": "Write an email from bullet list", | |
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)" | |
}, { | |
"title": "Code a snake game", | |
"prompt": "Code a basic snake game in python, give explanations for each step." | |
}, { | |
"title": "Assist in a task", | |
"prompt": "How do I make a delicious lemon cheesecake?" | |
} | |
], | |
"parameters": { | |
"do_sample": true, | |
"truncate": 7168, | |
"max_new_tokens": 1024, | |
"stop" : ["<end_of_turn>"] | |
} | |
}, | |
{ | |
"name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", | |
"description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.", | |
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/nous-logo.png", | |
"websiteUrl" : "https://nousresearch.com/", | |
"modelUrl": "https://huggingface.co/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", | |
"tokenizer": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", | |
"chatPromptTemplate" : "{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}", | |
"promptExamples": [ | |
{ | |
"title": "Write an email from bullet list", | |
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)" | |
}, { | |
"title": "Code a snake game", | |
"prompt": "Code a basic snake game in python, give explanations for each step." | |
}, { | |
"title": "Assist in a task", | |
"prompt": "How do I make a delicious lemon cheesecake?" | |
} | |
], | |
"parameters": { | |
"temperature": 0.7, | |
"top_p": 0.95, | |
"repetition_penalty": 1, | |
"top_k": 50, | |
"truncate": 24576, | |
"max_new_tokens": 2048, | |
"stop": ["<|im_end|>"] | |
} | |
}, | |
{ | |
"name": "mistralai/Mistral-7B-Instruct-v0.1", | |
"displayName": "mistralai/Mistral-7B-Instruct-v0.1", | |
"description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.", | |
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png", | |
"websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/", | |
"modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.1", | |
"tokenizer": "mistralai/Mistral-7B-Instruct-v0.1", | |
"preprompt": "", | |
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}", | |
"parameters": { | |
"temperature": 0.1, | |
"top_p": 0.95, | |
"repetition_penalty": 1.2, | |
"top_k": 50, | |
"truncate": 3072, | |
"max_new_tokens": 1024, | |
"stop": ["</s>"] | |
}, | |
"promptExamples": [ | |
{ | |
"title": "Write an email from bullet list", | |
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)" | |
}, { | |
"title": "Code a snake game", | |
"prompt": "Code a basic snake game in python, give explanations for each step." | |
}, { | |
"title": "Assist in a task", | |
"prompt": "How do I make a delicious lemon cheesecake?" | |
} | |
], | |
"unlisted": true | |
}, | |
{ | |
"name": "mistralai/Mistral-7B-Instruct-v0.2", | |
"displayName": "mistralai/Mistral-7B-Instruct-v0.2", | |
"description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.", | |
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/mistral-logo.png", | |
"websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/", | |
"modelUrl": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2", | |
"tokenizer": "mistralai/Mistral-7B-Instruct-v0.2", | |
"preprompt": "", | |
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}", | |
"parameters": { | |
"temperature": 0.3, | |
"top_p": 0.95, | |
"repetition_penalty": 1.2, | |
"top_k": 50, | |
"truncate": 3072, | |
"max_new_tokens": 1024, | |
"stop": ["</s>"] | |
}, | |
"promptExamples": [ | |
{ | |
"title": "Write an email from bullet list", | |
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)" | |
}, { | |
"title": "Code a snake game", | |
"prompt": "Code a basic snake game in python, give explanations for each step." | |
}, { | |
"title": "Assist in a task", | |
"prompt": "How do I make a delicious lemon cheesecake?" | |
} | |
] | |
} | |
]` | |
OLD_MODELS=`[ | |
{"name":"bigcode/starcoder"}, | |
{"name":"OpenAssistant/oasst-sft-6-llama-30b-xor"}, | |
{"name":"HuggingFaceH4/zephyr-7b-alpha"}, | |
{"name":"openchat/openchat_3.5"}, | |
{"name":"openchat/openchat-3.5-1210"}, | |
{"name": "tiiuae/falcon-180B-chat"}, | |
{"name": "codellama/CodeLlama-34b-Instruct-hf"}, | |
{"name": "google/gemma-7b-it"}, | |
{"name": "meta-llama/Llama-2-70b-chat-hf"}, | |
{"name": "codellama/CodeLlama-70b-Instruct-hf"}, | |
{"name": "openchat/openchat-3.5-0106"} | |
]` | |
TASK_MODEL='mistralai/Mistral-7B-Instruct-v0.1' | |
APP_BASE="/chat" | |
PUBLIC_ORIGIN=https://huggingface.co | |
PUBLIC_SHARE_PREFIX=https://hf.co/chat | |
PUBLIC_ANNOUNCEMENT_BANNERS=`[]` | |
PUBLIC_APP_NAME=HuggingChat | |
PUBLIC_APP_ASSETS=huggingchat | |
PUBLIC_APP_COLOR=yellow | |
PUBLIC_APP_DESCRIPTION="Making the community's best AI chat models available to everyone." | |
PUBLIC_APP_DISCLAIMER_MESSAGE="Disclaimer: AI is an area of active research with known problems such as biased generation and misinformation. Do not use this application for high-stakes decisions or advice." | |
PUBLIC_APP_DATA_SHARING=1 | |
PUBLIC_APP_DISCLAIMER=1 | |
PUBLIC_GOOGLE_ANALYTICS_ID=G-8Q63TH4CSL | |
PUBLIC_PLAUSIBLE_SCRIPT_URL="/js/script.js" | |
# Not part of the .env but set as other variables in the space | |
# ADDRESS_HEADER=X-Forwarded-For | |
# XFF_DEPTH=2 | |
ENABLE_ASSISTANTS=true | |
ENABLE_ASSISTANTS_RAG=true | |
EXPOSE_API=true | |
ALTERNATIVE_REDIRECT_URLS=`[ | |
huggingchat://login/callback | |
]` | |
WEBSEARCH_BLOCKLIST=`["youtube.com", "twitter.com"]` |