Spaces:
Paused
Paused
Add CohereForAI/c4ai-command-r-plus-4bit as Command-R-Plus-Lite
#415
by
deleted
- opened
- .env.template +29 -0
.env.template
CHANGED
@@ -1,6 +1,35 @@
|
|
1 |
# template used in production for HuggingChat.
|
2 |
|
3 |
MODELS=`[
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
{
|
5 |
"name" : "CohereForAI/c4ai-command-r-plus",
|
6 |
"tokenizer": "Xenova/c4ai-command-r-v01-tokenizer",
|
|
|
1 |
# template used in production for HuggingChat.
|
2 |
|
3 |
MODELS=`[
|
4 |
+
{
|
5 |
+
"name" : "CohereForAI/c4ai-command-r-plus-lite",
|
6 |
+
"tokenizer": "Xenova/c4ai-command-r-v01-tokenizer",
|
7 |
+
"description": "Command R+ is Cohere's latest LLM and is the first open weight model to beat GPT4 in the Chatbot Arena! This is 4bit quantized version with 64K context window.",
|
8 |
+
"modelUrl": "https://huggingface.co/CohereForAI/c4ai-command-r-plus-4bit",
|
9 |
+
"websiteUrl": "https://docs.cohere.com/docs/command-r-plus",
|
10 |
+
"logoUrl": "https://huggingface.co/datasets/huggingchat/models-logo/resolve/main/cohere-logo.png",
|
11 |
+
"parameters": {
|
12 |
+
"stop": ["<|END_OF_TURN_TOKEN|>"],
|
13 |
+
"top_P" : 0.1,
|
14 |
+
"repetition_penalty" : 0.8,
|
15 |
+
"top_k" : 20,
|
16 |
+
"truncate" : 65536,
|
17 |
+
"max_new_tokens" : 8192,
|
18 |
+
"temperature" : 0.3
|
19 |
+
},
|
20 |
+
"promptExamples" : [
|
21 |
+
{
|
22 |
+
"title": "Write an email from bullet list",
|
23 |
+
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
|
24 |
+
}, {
|
25 |
+
"title": "Code a snake game",
|
26 |
+
"prompt": "Code a basic snake game in python, give explanations for each step."
|
27 |
+
}, {
|
28 |
+
"title": "Assist in a task",
|
29 |
+
"prompt": "How do I make a delicious lemon cheesecake?"
|
30 |
+
}
|
31 |
+
]
|
32 |
+
},
|
33 |
{
|
34 |
"name" : "CohereForAI/c4ai-command-r-plus",
|
35 |
"tokenizer": "Xenova/c4ai-command-r-v01-tokenizer",
|