Jamie@TitanML commited on
Commit
627b16f
·
1 Parent(s): 3f92378

Update ggml_cache.json

Browse files
Files changed (1) hide show
  1. ggml_cache.json +72 -72
ggml_cache.json CHANGED
@@ -11,6 +11,18 @@
11
  "model_type": "llama",
12
  "tokenizer": "llama-local"
13
  },
 
 
 
 
 
 
 
 
 
 
 
 
14
  "llama-2-7b-chat": {
15
  "repo": "TheBloke/Llama-2-7B-Chat-GGML",
16
  "filename": "llama-2-7b-chat.ggmlv3.q4_0.bin",
@@ -23,6 +35,18 @@
23
  "model_type": "llama",
24
  "tokenizer": "llama-local"
25
  },
 
 
 
 
 
 
 
 
 
 
 
 
26
  "llama-2-13b": {
27
  "repo": "TheBloke/Llama-2-13B-GGML",
28
  "filename": "llama-2-13b.ggmlv3.q4_0.bin",
@@ -35,6 +59,18 @@
35
  "model_type": "llama",
36
  "tokenizer": "llama-local"
37
  },
 
 
 
 
 
 
 
 
 
 
 
 
38
  "llama-2-13b-chat": {
39
  "repo": "TheBloke/Llama-2-13B-Chat-GGML",
40
  "filename": "llama-2-13b-chat.ggmlv3.q4_0.bin",
@@ -47,6 +83,18 @@
47
  "model_type": "llama",
48
  "tokenizer": "llama-local"
49
  },
 
 
 
 
 
 
 
 
 
 
 
 
50
  "llama-2-70b": {
51
  "repo": "TheBloke/Llama-2-70B-GGML",
52
  "filename": "llama-2-70b.ggmlv3.q4_0.bin",
@@ -59,6 +107,18 @@
59
  "model_type": "llama",
60
  "tokenizer": "llama-local"
61
  },
 
 
 
 
 
 
 
 
 
 
 
 
62
  "llama-2-70b-chat": {
63
  "repo": "TheBloke/Llama-2-70B-Chat-GGML",
64
  "filename": "llama-2-70b-chat.ggmlv3.q4_0.bin",
@@ -71,6 +131,18 @@
71
  "model_type": "llama",
72
  "tokenizer": "llama-local"
73
  },
 
 
 
 
 
 
 
 
 
 
 
 
74
  "falcon-7b": {
75
  "repo": "TheBloke/falcon-7b-instruct-GGML",
76
  "filename": "falcon-7b-instruct.ggccv1.q4_0.bin",
@@ -100,77 +172,5 @@
100
  "filename": "ggml-model.bin",
101
  "model_type": "gpt2",
102
  "tokenizer": "gpt2"
103
- },
104
- "llama-2-70b-chat-q2": {
105
- "repo": "TheBloke/Llama-2-70B-Chat-GGML",
106
- "filename": "llama-2-70b-chat.ggmlv3.q2_K.bin",
107
- "model_type": "llama",
108
- "tokenizer": "llama-local"
109
- },
110
- "llama-2-70b-q2": {
111
- "repo": "TheBloke/Llama-2-70B-GGML",
112
- "filename": "llama-2-70b.ggmlv3.q2_K.bin",
113
- "model_type": "llama",
114
- "tokenizer": "llama-local"
115
- },
116
- "llama-2-70b-q3_k_s": {
117
- "repo": "TheBloke/Llama-2-70B-GGML",
118
- "filename": "llama-2-70b.ggmlv3.q3_K_S.bin",
119
- "model_type": "llama",
120
- "tokenizer": "llama-local"
121
- },
122
- "llama-2-70b-q3_k_m": {
123
- "repo": "TheBloke/Llama-2-70B-GGML",
124
- "filename": "llama-2-70b.ggmlv3.q3_K_M.bin",
125
- "model_type": "llama",
126
- "tokenizer": "llama-local"
127
- },
128
- "llama-2-70b-q3_k_l": {
129
- "repo": "TheBloke/Llama-2-70B-GGML",
130
- "filename": "llama-2-70b.ggmlv3.q3_K_L.bin",
131
- "model_type": "llama",
132
- "tokenizer": "llama-local"
133
- },
134
- "llama-2-70b-q4_0": {
135
- "repo": "TheBloke/Llama-2-70B-GGML",
136
- "filename": "llama-2-70b.ggmlv3.q4_0.bin",
137
- "model_type": "llama",
138
- "tokenizer": "llama-local"
139
- },
140
- "llama-2-70b-q4_1": {
141
- "repo": "TheBloke/Llama-2-70B-GGML",
142
- "filename": "llama-2-70b.ggmlv3.q4_1.bin",
143
- "model_type": "llama",
144
- "tokenizer": "llama-local"
145
- },
146
- "llama-2-70b-q4_k_m": {
147
- "repo": "TheBloke/Llama-2-70B-GGML",
148
- "filename": "llama-2-70b.ggmlv3.q4_K_M.bin",
149
- "model_type": "llama",
150
- "tokenizer": "llama-local"
151
- },
152
- "llama-2-70b-q4_k_s": {
153
- "repo": "TheBloke/Llama-2-70B-GGML",
154
- "filename": "llama-2-70b.ggmlv3.q4_K_S.bin",
155
- "model_type": "llama",
156
- "tokenizer": "llama-local"
157
- },
158
- "llama-2-70b-q5_0": {
159
- "repo": "TheBloke/Llama-2-70B-GGML",
160
- "filename": "llama-2-70b.ggmlv3.q5_0.bin",
161
- "model_type": "llama",
162
- "tokenizer": "llama-local"
163
- },
164
- "llama-2-70b-q5_k_m": {
165
- "repo": "TheBloke/Llama-2-70B-GGML",
166
- "filename": "llama-2-70b.ggmlv3.q5_K_M.bin",
167
- "model_type": "llama",
168
- "tokenizer": "llama-local"
169
- },
170
- "llama-2-70b-q5_k_s": {
171
- "repo": "TheBloke/Llama-2-70B-GGML",
172
- "filename": "llama-2-70b.ggmlv3.q5_K_S.bin",
173
- "model_type": "llama",
174
- "tokenizer": "llama-local"
175
  }
176
  }
 
11
  "model_type": "llama",
12
  "tokenizer": "llama-local"
13
  },
14
+ "llama-2-7b-hf": {
15
+ "repo": "TheBloke/Llama-2-7B-GGML",
16
+ "filename": "llama-2-7b.ggmlv3.q4_0.bin",
17
+ "model_type": "llama",
18
+ "tokenizer": "llama-local"
19
+ },
20
+ "meta-llama/llama-2-7b-hf": {
21
+ "repo": "TheBloke/Llama-2-7B-GGML",
22
+ "filename": "llama-2-7b.ggmlv3.q4_0.bin",
23
+ "model_type": "llama",
24
+ "tokenizer": "llama-local"
25
+ },
26
  "llama-2-7b-chat": {
27
  "repo": "TheBloke/Llama-2-7B-Chat-GGML",
28
  "filename": "llama-2-7b-chat.ggmlv3.q4_0.bin",
 
35
  "model_type": "llama",
36
  "tokenizer": "llama-local"
37
  },
38
+ "llama-2-7b-chat-hf": {
39
+ "repo": "TheBloke/Llama-2-7B-Chat-GGML",
40
+ "filename": "llama-2-7b-chat.ggmlv3.q4_0.bin",
41
+ "model_type": "llama",
42
+ "tokenizer": "llama-local"
43
+ },
44
+ "meta-llama/llama-2-7b-chat-hf": {
45
+ "repo": "TheBloke/Llama-2-7B-Chat-GGML",
46
+ "filename": "llama-2-7b-chat.ggmlv3.q4_0.bin",
47
+ "model_type": "llama",
48
+ "tokenizer": "llama-local"
49
+ },
50
  "llama-2-13b": {
51
  "repo": "TheBloke/Llama-2-13B-GGML",
52
  "filename": "llama-2-13b.ggmlv3.q4_0.bin",
 
59
  "model_type": "llama",
60
  "tokenizer": "llama-local"
61
  },
62
+ "llama-2-13b-hf": {
63
+ "repo": "TheBloke/Llama-2-13B-GGML",
64
+ "filename": "llama-2-13b.ggmlv3.q4_0.bin",
65
+ "model_type": "llama",
66
+ "tokenizer": "llama-local"
67
+ },
68
+ "meta-llama/llama-2-13b-hf": {
69
+ "repo": "TheBloke/Llama-2-13B-GGML",
70
+ "filename": "llama-2-13b.ggmlv3.q4_0.bin",
71
+ "model_type": "llama",
72
+ "tokenizer": "llama-local"
73
+ },
74
  "llama-2-13b-chat": {
75
  "repo": "TheBloke/Llama-2-13B-Chat-GGML",
76
  "filename": "llama-2-13b-chat.ggmlv3.q4_0.bin",
 
83
  "model_type": "llama",
84
  "tokenizer": "llama-local"
85
  },
86
+ "llama-2-13b-chat-hf": {
87
+ "repo": "TheBloke/Llama-2-13B-Chat-GGML",
88
+ "filename": "llama-2-13b-chat.ggmlv3.q4_0.bin",
89
+ "model_type": "llama",
90
+ "tokenizer": "llama-local"
91
+ },
92
+ "meta-llama/llama-2-13b-chat-hf": {
93
+ "repo": "TheBloke/Llama-2-13B-Chat-GGML",
94
+ "filename": "llama-2-13b-chat.ggmlv3.q4_0.bin",
95
+ "model_type": "llama",
96
+ "tokenizer": "llama-local"
97
+ },
98
  "llama-2-70b": {
99
  "repo": "TheBloke/Llama-2-70B-GGML",
100
  "filename": "llama-2-70b.ggmlv3.q4_0.bin",
 
107
  "model_type": "llama",
108
  "tokenizer": "llama-local"
109
  },
110
+ "llama-2-70b-hf": {
111
+ "repo": "TheBloke/Llama-2-70B-GGML",
112
+ "filename": "llama-2-70b.ggmlv3.q4_0.bin",
113
+ "model_type": "llama",
114
+ "tokenizer": "llama-local"
115
+ },
116
+ "meta-llama/llama-2-70b-hf": {
117
+ "repo": "TheBloke/Llama-2-70B-GGML",
118
+ "filename": "llama-2-70b.ggmlv3.q4_0.bin",
119
+ "model_type": "llama",
120
+ "tokenizer": "llama-local"
121
+ },
122
  "llama-2-70b-chat": {
123
  "repo": "TheBloke/Llama-2-70B-Chat-GGML",
124
  "filename": "llama-2-70b-chat.ggmlv3.q4_0.bin",
 
131
  "model_type": "llama",
132
  "tokenizer": "llama-local"
133
  },
134
+ "llama-2-70b-chat-hf": {
135
+ "repo": "TheBloke/Llama-2-70B-Chat-GGML",
136
+ "filename": "llama-2-70b-chat.ggmlv3.q4_0.bin",
137
+ "model_type": "llama",
138
+ "tokenizer": "llama-local"
139
+ },
140
+ "meta-llama/llama-2-70b-chat-hf": {
141
+ "repo": "TheBloke/Llama-2-70B-Chat-GGML",
142
+ "filename": "llama-2-70b-chat.ggmlv3.q4_0.bin",
143
+ "model_type": "llama",
144
+ "tokenizer": "llama-local"
145
+ },
146
  "falcon-7b": {
147
  "repo": "TheBloke/falcon-7b-instruct-GGML",
148
  "filename": "falcon-7b-instruct.ggccv1.q4_0.bin",
 
172
  "filename": "ggml-model.bin",
173
  "model_type": "gpt2",
174
  "tokenizer": "gpt2"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
175
  }
176
  }