Upload models.json
Browse files- models.json +147 -10
models.json
CHANGED
@@ -1,11 +1,148 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
"
|
8 |
-
"
|
9 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
}
|
11 |
-
|
|
|
1 |
+
[
|
2 |
+
{
|
3 |
+
"name": "StableLM 12B",
|
4 |
+
"filename": "stablelm-2-12b-chat-Q4_0.gguf",
|
5 |
+
"url": "https://huggingface.co/SHMIS/ABS_AI_Helper/resolve/main/stablelm-2-12b-chat-Q4_0.gguf?download=true",
|
6 |
+
"promptTemplate": "<|im_start|>user\n%1<|im_end|>\n<|im_start|>assistant\n%2<|im_end|>\n",
|
7 |
+
"systemPrompt": "%1\n",
|
8 |
+
"stopWord": ["<|end|>"],
|
9 |
+
"type": "model",
|
10 |
+
"requireRam": 17
|
11 |
+
},
|
12 |
+
{
|
13 |
+
"name": "Phi-3 medium",
|
14 |
+
"filename": "phi-3-medium-4k-instruct-q4_0.gguf",
|
15 |
+
"url": "https://huggingface.co/SHMIS/ABS_AI_Helper/resolve/main/phi-3-medium-4k-instruct-q4_0.gguf?download=true",
|
16 |
+
"promptTemplate": "<|user|>\n%1<|end|>\n<|assistant|>\n%2<|end|>\n<|end|><|end|><|end|>\n",
|
17 |
+
"systemPrompt": "<|system|>\n%1<|end|>\n",
|
18 |
+
"stopWord": ["<|end|>"],
|
19 |
+
"type": "model",
|
20 |
+
"requireRam": 17
|
21 |
+
},
|
22 |
+
{
|
23 |
+
"name": "vicuna-13b-16k",
|
24 |
+
"filename": "vicuna-13b-v1.5-16k.Q4_0.gguf",
|
25 |
+
"url": "https://huggingface.co/TheBloke/vicuna-13B-v1.5-16K-GGUF/resolve/main/vicuna-13b-v1.5-16k.Q4_0.gguf?download=true",
|
26 |
+
"promptTemplate": "User:\n%1\nAssistant:\n%2\n",
|
27 |
+
"systemPrompt": "System:\n%1\n",
|
28 |
+
"stopWord": ["<s>","</s>"],
|
29 |
+
"type": "model",
|
30 |
+
"requireRam": 17
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"name": "Llama 3 8b neural",
|
34 |
+
"filename": "llama-3-neural-chat-v2.2-8B.Q4_0.gguf",
|
35 |
+
"url": "https://huggingface.co/SHMIS/ABS_AI_Helper/resolve/main/llama-3-neural-chat-v2.2-8B.Q4_0.gguf?download=true",
|
36 |
+
"promptTemplate": "<|im_start|>user\n%1<|im_end|>\n<|im_start|>assistant\n%2<|im_end|>\n",
|
37 |
+
"systemPrompt": "<|im_start|>system\n%1<|im_end|>\n",
|
38 |
+
"stopWord": ["<|im_end|>"],
|
39 |
+
"type": "model",
|
40 |
+
"requireRam": 12
|
41 |
+
},
|
42 |
+
{
|
43 |
+
"name": "Llama 3 8b",
|
44 |
+
"filename": "Meta-Llama-3-8B-Instruct.Q4_0.gguf",
|
45 |
+
"url": "https://huggingface.co/QuantFactory/Meta-Llama-3-8B-Instruct-GGUF/resolve/main/Meta-Llama-3-8B-Instruct.Q4_0.gguf?download=true",
|
46 |
+
"promptTemplate": "<|start_header_id|>user<|end_header_id|>\n%1\n<|eot_id|>\n<|start_header_id|>assistant<|end_header_id|>\n%2\n<|eot_id|>\n",
|
47 |
+
"systemPrompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n%1\n<|eot_id|>\n",
|
48 |
+
"stopWord": ["<|eot_id|>"],
|
49 |
+
"type": "model",
|
50 |
+
"requireRam": 12
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"name": "openchat 3.6",
|
54 |
+
"filename": "openchat-3.6-8b-20240522.Q4_0.gguf",
|
55 |
+
"url": "https://huggingface.co/crusoeai/openchat-3.6-8b-20240522-GGUF/resolve/main/openchat-3.6-8b-20240522.Q4_0.gguf?download=true",
|
56 |
+
"promptTemplate": "<|start_header_id|>GPT4 Correct User<|end_header_id|>\n%1<|eot_id|>\n<|start_header_id|>GPT4 Correct Assistant<|end_header_id|>\n%2<|eot_id|>\n",
|
57 |
+
"systemPrompt": "<|start_header_id|>GPT4 Correct System<|end_header_id|>\n%1<|eot_id|>\n",
|
58 |
+
"stopWord": ["<|eot_id|>"],
|
59 |
+
"type": "model",
|
60 |
+
"requireRam": 12
|
61 |
+
},
|
62 |
+
{
|
63 |
+
"name": "QWen 1.5 MoE",
|
64 |
+
"filename": "Qwen1.5-MoE-A2.7B-Chat.Q4_0.gguf",
|
65 |
+
"url": "https://huggingface.co/RichardErkhov/Qwen_-_Qwen1.5-MoE-A2.7B-Chat-gguf/resolve/main/Qwen1.5-MoE-A2.7B-Chat.Q4_0.gguf?download=true",
|
66 |
+
"promptTemplate": "<|im_start|>user\n%1\n<|im_end|>\n<|im_start|>assistant\n%2\n<|im_end|>\n",
|
67 |
+
"systemPrompt": "<|im_start|>system\n%1\n<|im_end|>\n",
|
68 |
+
"stopWord": ["<|im_end|>", "<|im_start|>"],
|
69 |
+
"type": "model",
|
70 |
+
"requireRam": 12
|
71 |
+
},
|
72 |
+
{
|
73 |
+
"name": "Phi-3 mini nvidia",
|
74 |
+
"filename": "phi3-4k-it-nvidia-chat-q4-GGUF.gguf",
|
75 |
+
"url": "https://huggingface.co/SHMIS/ABS_AI_Helper/resolve/main/phi3-4k-it-nvidia-chat-q4-GGUF.gguf?download=true",
|
76 |
+
"promptTemplate": "<|user|>\n%1<|end|>\n<|assistant|>\n%2<|end|>\n",
|
77 |
+
"systemPrompt": "<|system|>\n%1<|end|>\n",
|
78 |
+
"stopWord": [ "<|end|>" ],
|
79 |
+
"type": "model",
|
80 |
+
"requireRam": 0
|
81 |
+
},
|
82 |
+
{
|
83 |
+
"name": "Phi-3 mini",
|
84 |
+
"filename": "Phi-3-mini-4k-instruct.Q4_0.gguf",
|
85 |
+
"url": "https://huggingface.co/PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed/resolve/main/Phi-3-mini-128k-instruct.Q4_0.gguf?download=true",
|
86 |
+
"promptTemplate": "<|user|>\n%1<|end|>\n<|assistant|>\n%2<|end|>\n",
|
87 |
+
"systemPrompt": "<|system|>\n%1<|end|>\n",
|
88 |
+
"stopWord": ["<|end|>"],
|
89 |
+
"type": "model",
|
90 |
+
"requireRam": 0
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"name": "gemma 2b",
|
94 |
+
"filename": "gemma-2b-it.Q4_0.gguf",
|
95 |
+
"url": "https://huggingface.co/mlabonne/gemma-2b-it-GGUF/resolve/main/gemma-2b-it.Q4_0.gguf?download=true",
|
96 |
+
"promptTemplate": "<start_of_turn>user\n%1\n<end_of_turn>\n<start_of_turn>model\n%2\n<end_of_turn>\n",
|
97 |
+
"systemPrompt": "<start_of_turn>system\n%1<end_of_turn>\n",
|
98 |
+
"stopWord": ["<end_of_turn>"],
|
99 |
+
"type": "model",
|
100 |
+
"requireRam": 0
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"name": "QWen 1.5 4b",
|
104 |
+
"filename": "qwen1_5-4b-chat-q4_0.gguf",
|
105 |
+
"url": "https://huggingface.co/Qwen/Qwen1.5-4B-Chat-GGUF/resolve/main/qwen1_5-4b-chat-q4_0.gguf?download=true",
|
106 |
+
"promptTemplate": "<|user|>\n%1<|end|>\n<|assistant|>\n%2<|end|>\n",
|
107 |
+
"systemPrompt": "<|system|>\n%1<|end|>\n",
|
108 |
+
"stopWord": ["<|end|>"],
|
109 |
+
"type": "model",
|
110 |
+
"requireRam": 0
|
111 |
+
},
|
112 |
+
{
|
113 |
+
"name": "QWen 1.5 1.8b",
|
114 |
+
"filename": "qwen1_5-1_8b-chat-q4_0.gguf",
|
115 |
+
"url": "https://huggingface.co/Qwen/Qwen1.5-1.8B-Chat-GGUF/resolve/main/qwen1_5-1_8b-chat-q4_0.gguf?download=true",
|
116 |
+
"promptTemplate": "<|user|>\n%1<|end|>\n<|assistant|>\n%2<|end|>\n",
|
117 |
+
"systemPrompt": "<|system|>\n%1<|end|>\n",
|
118 |
+
"stopWord": ["<|end|>"],
|
119 |
+
"type": "model",
|
120 |
+
"requireRam": 0
|
121 |
+
},
|
122 |
+
{
|
123 |
+
"name": "QWen 1.5 0.5b",
|
124 |
+
"filename": "qwen1_5-0_5b-chat-q4_0.gguf",
|
125 |
+
"url": "https://huggingface.co/Qwen/Qwen1.5-0.5B-Chat-GGUF/resolve/main/qwen1_5-0_5b-chat-q4_0.gguf?download=true",
|
126 |
+
"promptTemplate": "<|user|>\n%1<|end|>\n<|assistant|>\n%2<|end|>\n",
|
127 |
+
"systemPrompt": "<|system|>\n%1<|end|>\n",
|
128 |
+
"stopWord": ["<|end|>"],
|
129 |
+
"type": "model",
|
130 |
+
"requireRam": 0
|
131 |
+
},
|
132 |
+
{
|
133 |
+
"name": "all-MiniLM-L12-v2.F16.gguf",
|
134 |
+
"filename": "all-MiniLM-L12-v2.F16.gguf",
|
135 |
+
"type": "embedding",
|
136 |
+
"requireRam": 0
|
137 |
+
},
|
138 |
+
{
|
139 |
+
"name": "TTS",
|
140 |
+
"type": "TTS",
|
141 |
+
"url": "https://huggingface.co/SHMIS/ABS_AI_Helper/resolve/main/tts.zip?download=true"
|
142 |
+
},
|
143 |
+
{
|
144 |
+
"name": "STT",
|
145 |
+
"type": "STT",
|
146 |
+
"url": "https://huggingface.co/SHMIS/ABS_AI_Helper/resolve/main/stt.zip?download=true"
|
147 |
}
|
148 |
+
]
|