LoneStriker commited on
Commit
e9795ce
1 Parent(s): 72fa393

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -1,35 +1,5 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ Master-Yi-9B-Q3_K_L.gguf filter=lfs diff=lfs merge=lfs -text
2
+ Master-Yi-9B-Q4_K_M.gguf filter=lfs diff=lfs merge=lfs -text
3
+ Master-Yi-9B-Q5_K_M.gguf filter=lfs diff=lfs merge=lfs -text
4
+ Master-Yi-9B-Q6_K.gguf filter=lfs diff=lfs merge=lfs -text
5
+ Master-Yi-9B-Q8_0.gguf filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Master-Yi-9B-Q3_K_L.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6aad122349c65764d0014194937e29a57627383d66fcc0f0fab588f142aff03
3
+ size 4690751840
Master-Yi-9B-Q4_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba231b6084df9504f52df75b0f6babc30ca71a623aaa5576e3b6036887a8be02
3
+ size 5328957792
Master-Yi-9B-Q5_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b9c2ab821ba4496977e48d36de2e5819a2ff5d87f200eae389860dbb40f91b3
3
+ size 6258258272
Master-Yi-9B-Q6_K.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b0ac99ea3e3e64dc85580aca1f4d4e52c3f124f5c8f0314029a03bae219b3f9
3
+ size 7245640032
Master-Yi-9B-Q8_0.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59e5b90d5800d3908b29a97674b9c7a56a5e17938676248facdc7a6e7dc2e6ea
3
+ size 9383915872
Master-Yi-9B.webp ADDED
README.md ADDED
@@ -0,0 +1,483 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ ---
4
+
5
+ ## Model Description
6
+
7
+ Master is a collection of LLMs trained using human-collected seed questions and regenerate the answers with a mixture of high performance Open-source LLMs.
8
+
9
+ **Master-Yi-9B** is trained using the ORPO technique. The model shows strong abilities in reasoning on coding and math questions.
10
+
11
+ **Quantized Version**: [Here](https://huggingface.co/qnguyen3/Master-Yi-9B-GGUF)
12
+
13
+ **Master-Yi-9B-Vision**: **Coming Soon**
14
+
15
+
16
+ ![img](https://huggingface.co/qnguyen3/Master-Yi-9B/resolve/main/Master-Yi-9B.webp)
17
+
18
+ ## Prompt Template
19
+
20
+ ```
21
+ <|im_start|>system
22
+ You are a helpful AI assistant.<|im_end|>
23
+ <|im_start|>user
24
+ What is the meaning of life?<|im_end|>
25
+ <|im_start|>assistant
26
+ ```
27
+
28
+ ## Examples
29
+
30
+ ![image/png](https://cdn-uploads.huggingface.co/production/uploads/630430583926de1f7ec62c6b/E27JmdRAMrHQacM50-lBk.png)
31
+
32
+ ![image/png](https://cdn-uploads.huggingface.co/production/uploads/630430583926de1f7ec62c6b/z0HS4bxHFQzPe0gZlvCzZ.png)
33
+
34
+ ## Inference Code
35
+
36
+ ```python
37
+ from transformers import AutoModelForCausalLM, AutoTokenizer
38
+ import torch
39
+ device = "cuda" # the device to load the model onto
40
+
41
+ model = AutoModelForCausalLM.from_pretrained(
42
+ "vilm/VinaLlama2-14B",
43
+ torch_dtype='auto',
44
+ device_map="auto"
45
+ )
46
+ tokenizer = AutoTokenizer.from_pretrained("vilm/VinaLlama2-14B")
47
+
48
+ prompt = "What is the mearning of life?"
49
+ messages = [
50
+ {"role": "system", "content": "You are a helpful AI assistant."},
51
+ {"role": "user", "content": prompt}
52
+ ]
53
+ text = tokenizer.apply_chat_template(
54
+ messages,
55
+ tokenize=False,
56
+ add_generation_prompt=True
57
+ )
58
+ model_inputs = tokenizer([text], return_tensors="pt").to(device)
59
+
60
+ generated_ids = model.generate(
61
+ model_inputs.input_ids,
62
+ max_new_tokens=1024,
63
+ eos_token_id=tokenizer.eos_token_id,
64
+ temperature=0.25,
65
+ )
66
+ generated_ids = [
67
+ output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
68
+ ]
69
+
70
+ response = tokenizer.batch_decode(generated_ids)[0]
71
+ print(response)
72
+ ```
73
+
74
+ ## Benchmarks
75
+
76
+ ### Nous Benchmark:
77
+
78
+ | Model |AGIEval|GPT4All|TruthfulQA|Bigbench|Average|
79
+ |---------------------------------------------------|------:|------:|---------:|-------:|------:|
80
+ |[Master-Yi-9B](https://huggingface.co/qnguyen3/Master-Yi-9B)| 43.55| 71.48| 48.54| 41.43| 51.25|
81
+
82
+
83
+ ### AGIEval
84
+ ```
85
+ | Task |Version| Metric |Value| |Stderr|
86
+ |------------------------------|------:|--------|----:|---|-----:|
87
+ |agieval_aqua_rat | 0|acc |35.83|± | 3.01|
88
+ | | |acc_norm|31.89|± | 2.93|
89
+ |agieval_logiqa_en | 0|acc |38.25|± | 1.91|
90
+ | | |acc_norm|37.79|± | 1.90|
91
+ |agieval_lsat_ar | 0|acc |23.04|± | 2.78|
92
+ | | |acc_norm|20.43|± | 2.66|
93
+ |agieval_lsat_lr | 0|acc |48.04|± | 2.21|
94
+ | | |acc_norm|42.75|± | 2.19|
95
+ |agieval_lsat_rc | 0|acc |61.34|± | 2.97|
96
+ | | |acc_norm|52.79|± | 3.05|
97
+ |agieval_sat_en | 0|acc |79.13|± | 2.84|
98
+ | | |acc_norm|72.33|± | 3.12|
99
+ |agieval_sat_en_without_passage| 0|acc |44.17|± | 3.47|
100
+ | | |acc_norm|42.72|± | 3.45|
101
+ |agieval_sat_math | 0|acc |52.27|± | 3.38|
102
+ | | |acc_norm|47.73|± | 3.38|
103
+
104
+ Average: 43.55%
105
+ ```
106
+
107
+ ### GPT4All
108
+ ```
109
+ | Task |Version| Metric |Value| |Stderr|
110
+ |-------------|------:|--------|----:|---|-----:|
111
+ |arc_challenge| 0|acc |54.95|± | 1.45|
112
+ | | |acc_norm|58.70|± | 1.44|
113
+ |arc_easy | 0|acc |82.28|± | 0.78|
114
+ | | |acc_norm|81.10|± | 0.80|
115
+ |boolq | 1|acc |86.15|± | 0.60|
116
+ |hellaswag | 0|acc |59.16|± | 0.49|
117
+ | | |acc_norm|77.53|± | 0.42|
118
+ |openbookqa | 0|acc |37.40|± | 2.17|
119
+ | | |acc_norm|44.00|± | 2.22|
120
+ |piqa | 0|acc |79.00|± | 0.95|
121
+ | | |acc_norm|80.25|± | 0.93|
122
+ |winogrande | 0|acc |72.61|± | 1.25|
123
+
124
+ Average: 71.48%
125
+ ```
126
+
127
+ ### TruthfulQA
128
+ ```
129
+ | Task |Version|Metric|Value| |Stderr|
130
+ |-------------|------:|------|----:|---|-----:|
131
+ |truthfulqa_mc| 1|mc1 |33.05|± | 1.65|
132
+ | | |mc2 |48.54|± | 1.54|
133
+
134
+ Average: 48.54%
135
+ ```
136
+
137
+ ### Bigbench
138
+ ```
139
+ | Task |Version| Metric |Value| |Stderr|
140
+ |------------------------------------------------|------:|---------------------|----:|---|-----:|
141
+ |bigbench_causal_judgement | 0|multiple_choice_grade|54.74|± | 3.62|
142
+ |bigbench_date_understanding | 0|multiple_choice_grade|68.02|± | 2.43|
143
+ |bigbench_disambiguation_qa | 0|multiple_choice_grade|40.31|± | 3.06|
144
+ |bigbench_geometric_shapes | 0|multiple_choice_grade|30.36|± | 2.43|
145
+ | | |exact_str_match | 2.23|± | 0.78|
146
+ |bigbench_logical_deduction_five_objects | 0|multiple_choice_grade|26.00|± | 1.96|
147
+ |bigbench_logical_deduction_seven_objects | 0|multiple_choice_grade|20.71|± | 1.53|
148
+ |bigbench_logical_deduction_three_objects | 0|multiple_choice_grade|44.00|± | 2.87|
149
+ |bigbench_movie_recommendation | 0|multiple_choice_grade|35.00|± | 2.14|
150
+ |bigbench_navigate | 0|multiple_choice_grade|58.40|± | 1.56|
151
+ |bigbench_reasoning_about_colored_objects | 0|multiple_choice_grade|61.80|± | 1.09|
152
+ |bigbench_ruin_names | 0|multiple_choice_grade|42.41|± | 2.34|
153
+ |bigbench_salient_translation_error_detection | 0|multiple_choice_grade|31.56|± | 1.47|
154
+ |bigbench_snarks | 0|multiple_choice_grade|55.25|± | 3.71|
155
+ |bigbench_sports_understanding | 0|multiple_choice_grade|69.37|± | 1.47|
156
+ |bigbench_temporal_sequences | 0|multiple_choice_grade|27.70|± | 1.42|
157
+ |bigbench_tracking_shuffled_objects_five_objects | 0|multiple_choice_grade|21.36|± | 1.16|
158
+ |bigbench_tracking_shuffled_objects_seven_objects| 0|multiple_choice_grade|14.69|± | 0.85|
159
+ |bigbench_tracking_shuffled_objects_three_objects| 0|multiple_choice_grade|44.00|± | 2.87|
160
+
161
+ Average: 41.43%
162
+ ```
163
+
164
+ **Average score**: 51.25%
165
+
166
+
167
+ ### OpenLLM Benchmark:
168
+
169
+ | Model |ARC |HellaSwag|MMLU |TruthfulQA|Winogrande|GSM8K|Average|
170
+ |---------------------------------------------------|---:|--------:|----:|---------:|---------:|----:|------:|
171
+ |[Master-Yi-9B](https://huggingface.co/qnguyen3/Master-Yi-9B)|61.6| 79.89|69.95| 48.59| 77.35|67.48| 67.48|
172
+
173
+ ### ARC
174
+ ```
175
+ | Task |Version| Metric | Value | |Stderr|
176
+ |-------------|------:|--------------------|-------------|---|------|
177
+ |arc_challenge| 1|acc,none | 0.59| | |
178
+ | | |acc_stderr,none | 0.01| | |
179
+ | | |acc_norm,none | 0.62| | |
180
+ | | |acc_norm_stderr,none| 0.01| | |
181
+ | | |alias |arc_challenge| | |
182
+
183
+ Average: 61.6%
184
+ ```
185
+
186
+ ### HellaSwag
187
+ ```
188
+ | Task |Version| Metric | Value | |Stderr|
189
+ |---------|------:|--------------------|---------|---|------|
190
+ |hellaswag| 1|acc,none | 0.61| | |
191
+ | | |acc_stderr,none | 0| | |
192
+ | | |acc_norm,none | 0.80| | |
193
+ | | |acc_norm_stderr,none| 0| | |
194
+ | | |alias |hellaswag| | |
195
+
196
+ Average: 79.89%
197
+ ```
198
+
199
+ ### MMLU
200
+ ```
201
+ | Task |Version| Metric | Value | |Stderr|
202
+ |----------------------------------------|-------|---------------|---------------------------------------|---|------|
203
+ |mmlu |N/A |acc,none | 0.7| | |
204
+ | | |acc_stderr,none| 0| | |
205
+ | | |alias |mmlu | | |
206
+ |mmlu_abstract_algebra | 0|alias | - abstract_algebra | | |
207
+ | | |acc,none |0.46 | | |
208
+ | | |acc_stderr,none|0.05 | | |
209
+ |mmlu_anatomy | 0|alias | - anatomy | | |
210
+ | | |acc,none |0.64 | | |
211
+ | | |acc_stderr,none|0.04 | | |
212
+ |mmlu_astronomy | 0|alias | - astronomy | | |
213
+ | | |acc,none |0.77 | | |
214
+ | | |acc_stderr,none|0.03 | | |
215
+ |mmlu_business_ethics | 0|alias | - business_ethics | | |
216
+ | | |acc,none |0.76 | | |
217
+ | | |acc_stderr,none|0.04 | | |
218
+ |mmlu_clinical_knowledge | 0|alias | - clinical_knowledge | | |
219
+ | | |acc,none |0.71 | | |
220
+ | | |acc_stderr,none|0.03 | | |
221
+ |mmlu_college_biology | 0|alias | - college_biology | | |
222
+ | | |acc,none |0.82 | | |
223
+ | | |acc_stderr,none|0.03 | | |
224
+ |mmlu_college_chemistry | 0|alias | - college_chemistry | | |
225
+ | | |acc,none |0.52 | | |
226
+ | | |acc_stderr,none|0.05 | | |
227
+ |mmlu_college_computer_science | 0|alias | - college_computer_science | | |
228
+ | | |acc,none |0.56 | | |
229
+ | | |acc_stderr,none|0.05 | | |
230
+ |mmlu_college_mathematics | 0|alias | - college_mathematics | | |
231
+ | | |acc,none |0.44 | | |
232
+ | | |acc_stderr,none|0.05 | | |
233
+ |mmlu_college_medicine | 0|alias | - college_medicine | | |
234
+ | | |acc,none |0.72 | | |
235
+ | | |acc_stderr,none|0.03 | | |
236
+ |mmlu_college_physics | 0|alias | - college_physics | | |
237
+ | | |acc,none |0.45 | | |
238
+ | | |acc_stderr,none|0.05 | | |
239
+ |mmlu_computer_security | 0|alias | - computer_security | | |
240
+ | | |acc,none |0.81 | | |
241
+ | | |acc_stderr,none|0.04 | | |
242
+ |mmlu_conceptual_physics | 0|alias | - conceptual_physics | | |
243
+ | | |acc,none |0.74 | | |
244
+ | | |acc_stderr,none|0.03 | | |
245
+ |mmlu_econometrics | 0|alias | - econometrics | | |
246
+ | | |acc,none |0.65 | | |
247
+ | | |acc_stderr,none|0.04 | | |
248
+ |mmlu_electrical_engineering | 0|alias | - electrical_engineering | | |
249
+ | | |acc,none |0.72 | | |
250
+ | | |acc_stderr,none|0.04 | | |
251
+ |mmlu_elementary_mathematics | 0|alias | - elementary_mathematics | | |
252
+ | | |acc,none |0.62 | | |
253
+ | | |acc_stderr,none|0.02 | | |
254
+ |mmlu_formal_logic | 0|alias | - formal_logic | | |
255
+ | | |acc,none |0.57 | | |
256
+ | | |acc_stderr,none|0.04 | | |
257
+ |mmlu_global_facts | 0|alias | - global_facts | | |
258
+ | | |acc,none |0.46 | | |
259
+ | | |acc_stderr,none|0.05 | | |
260
+ |mmlu_high_school_biology | 0|alias | - high_school_biology | | |
261
+ | | |acc,none |0.86 | | |
262
+ | | |acc_stderr,none|0.02 | | |
263
+ |mmlu_high_school_chemistry | 0|alias | - high_school_chemistry | | |
264
+ | | |acc,none |0.67 | | |
265
+ | | |acc_stderr,none|0.03 | | |
266
+ |mmlu_high_school_computer_science | 0|alias | - high_school_computer_science | | |
267
+ | | |acc,none |0.84 | | |
268
+ | | |acc_stderr,none|0.04 | | |
269
+ |mmlu_high_school_european_history | 0|alias | - high_school_european_history | | |
270
+ | | |acc,none |0.82 | | |
271
+ | | |acc_stderr,none|0.03 | | |
272
+ |mmlu_high_school_geography | 0|alias | - high_school_geography | | |
273
+ | | |acc,none |0.86 | | |
274
+ | | |acc_stderr,none|0.02 | | |
275
+ |mmlu_high_school_government_and_politics| 0|alias | - high_school_government_and_politics| | |
276
+ | | |acc,none |0.90 | | |
277
+ | | |acc_stderr,none|0.02 | | |
278
+ |mmlu_high_school_macroeconomics | 0|alias | - high_school_macroeconomics | | |
279
+ | | |acc,none |0.75 | | |
280
+ | | |acc_stderr,none|0.02 | | |
281
+ |mmlu_high_school_mathematics | 0|alias | - high_school_mathematics | | |
282
+ | | |acc,none |0.43 | | |
283
+ | | |acc_stderr,none|0.03 | | |
284
+ |mmlu_high_school_microeconomics | 0|alias | - high_school_microeconomics | | |
285
+ | | |acc,none |0.86 | | |
286
+ | | |acc_stderr,none|0.02 | | |
287
+ |mmlu_high_school_physics | 0|alias | - high_school_physics | | |
288
+ | | |acc,none |0.45 | | |
289
+ | | |acc_stderr,none|0.04 | | |
290
+ |mmlu_high_school_psychology | 0|alias | - high_school_psychology | | |
291
+ | | |acc,none |0.87 | | |
292
+ | | |acc_stderr,none|0.01 | | |
293
+ |mmlu_high_school_statistics | 0|alias | - high_school_statistics | | |
294
+ | | |acc,none |0.68 | | |
295
+ | | |acc_stderr,none|0.03 | | |
296
+ |mmlu_high_school_us_history | 0|alias | - high_school_us_history | | |
297
+ | | |acc,none |0.85 | | |
298
+ | | |acc_stderr,none|0.02 | | |
299
+ |mmlu_high_school_world_history | 0|alias | - high_school_world_history | | |
300
+ | | |acc,none |0.85 | | |
301
+ | | |acc_stderr,none|0.02 | | |
302
+ |mmlu_human_aging | 0|alias | - human_aging | | |
303
+ | | |acc,none |0.76 | | |
304
+ | | |acc_stderr,none|0.03 | | |
305
+ |mmlu_human_sexuality | 0|alias | - human_sexuality | | |
306
+ | | |acc,none |0.78 | | |
307
+ | | |acc_stderr,none|0.04 | | |
308
+ |mmlu_humanities |N/A |alias | - humanities | | |
309
+ | | |acc,none |0.63 | | |
310
+ | | |acc_stderr,none|0.01 | | |
311
+ |mmlu_international_law | 0|alias | - international_law | | |
312
+ | | |acc,none |0.79 | | |
313
+ | | |acc_stderr,none|0.04 | | |
314
+ |mmlu_jurisprudence | 0|alias | - jurisprudence | | |
315
+ | | |acc,none |0.79 | | |
316
+ | | |acc_stderr,none|0.04 | | |
317
+ |mmlu_logical_fallacies | 0|alias | - logical_fallacies | | |
318
+ | | |acc,none |0.80 | | |
319
+ | | |acc_stderr,none|0.03 | | |
320
+ |mmlu_machine_learning | 0|alias | - machine_learning | | |
321
+ | | |acc,none |0.52 | | |
322
+ | | |acc_stderr,none|0.05 | | |
323
+ |mmlu_management | 0|alias | - management | | |
324
+ | | |acc,none |0.83 | | |
325
+ | | |acc_stderr,none|0.04 | | |
326
+ |mmlu_marketing | 0|alias | - marketing | | |
327
+ | | |acc,none |0.89 | | |
328
+ | | |acc_stderr,none|0.02 | | |
329
+ |mmlu_medical_genetics | 0|alias | - medical_genetics | | |
330
+ | | |acc,none |0.78 | | |
331
+ | | |acc_stderr,none|0.04 | | |
332
+ |mmlu_miscellaneous | 0|alias | - miscellaneous | | |
333
+ | | |acc,none |0.85 | | |
334
+ | | |acc_stderr,none|0.01 | | |
335
+ |mmlu_moral_disputes | 0|alias | - moral_disputes | | |
336
+ | | |acc,none |0.75 | | |
337
+ | | |acc_stderr,none|0.02 | | |
338
+ |mmlu_moral_scenarios | 0|alias | - moral_scenarios | | |
339
+ | | |acc,none |0.48 | | |
340
+ | | |acc_stderr,none|0.02 | | |
341
+ |mmlu_nutrition | 0|alias | - nutrition | | |
342
+ | | |acc,none |0.77 | | |
343
+ | | |acc_stderr,none|0.02 | | |
344
+ |mmlu_other |N/A |alias | - other | | |
345
+ | | |acc,none |0.75 | | |
346
+ | | |acc_stderr,none|0.01 | | |
347
+ |mmlu_philosophy | 0|alias | - philosophy | | |
348
+ | | |acc,none |0.78 | | |
349
+ | | |acc_stderr,none|0.02 | | |
350
+ |mmlu_prehistory | 0|alias | - prehistory | | |
351
+ | | |acc,none |0.77 | | |
352
+ | | |acc_stderr,none|0.02 | | |
353
+ |mmlu_professional_accounting | 0|alias | - professional_accounting | | |
354
+ | | |acc,none |0.57 | | |
355
+ | | |acc_stderr,none|0.03 | | |
356
+ |mmlu_professional_law | 0|alias | - professional_law | | |
357
+ | | |acc,none |0.50 | | |
358
+ | | |acc_stderr,none|0.01 | | |
359
+ |mmlu_professional_medicine | 0|alias | - professional_medicine | | |
360
+ | | |acc,none |0.71 | | |
361
+ | | |acc_stderr,none|0.03 | | |
362
+ |mmlu_professional_psychology | 0|alias | - professional_psychology | | |
363
+ | | |acc,none |0.73 | | |
364
+ | | |acc_stderr,none|0.02 | | |
365
+ |mmlu_public_relations | 0|alias | - public_relations | | |
366
+ | | |acc,none |0.76 | | |
367
+ | | |acc_stderr,none|0.04 | | |
368
+ |mmlu_security_studies | 0|alias | - security_studies | | |
369
+ | | |acc,none |0.78 | | |
370
+ | | |acc_stderr,none|0.03 | | |
371
+ |mmlu_social_sciences |N/A |alias | - social_sciences | | |
372
+ | | |acc,none |0.81 | | |
373
+ | | |acc_stderr,none|0.01 | | |
374
+ |mmlu_sociology | 0|alias | - sociology | | |
375
+ | | |acc,none |0.86 | | |
376
+ | | |acc_stderr,none|0.02 | | |
377
+ |mmlu_stem |N/A |alias | - stem | | |
378
+ | | |acc,none |0.65 | | |
379
+ | | |acc_stderr,none|0.01 | | |
380
+ |mmlu_us_foreign_policy | 0|alias | - us_foreign_policy | | |
381
+ | | |acc,none |0.92 | | |
382
+ | | |acc_stderr,none|0.03 | | |
383
+ |mmlu_virology | 0|alias | - virology | | |
384
+ | | |acc,none |0.58 | | |
385
+ | | |acc_stderr,none|0.04 | | |
386
+ |mmlu_world_religions | 0|alias | - world_religions | | |
387
+ | | |acc,none |0.82 | | |
388
+ | | |acc_stderr,none|0.03 | | |
389
+
390
+ Average: 69.95%
391
+ ```
392
+
393
+ ### TruthfulQA
394
+ ```
395
+ | Task |Version| Metric | Value | |Stderr|
396
+ |--------------|-------|-----------------------|-----------------|---|------|
397
+ |truthfulqa |N/A |bleu_acc,none | 0.45| | |
398
+ | | |bleu_acc_stderr,none | 0.02| | |
399
+ | | |rouge1_acc,none | 0.45| | |
400
+ | | |rouge1_acc_stderr,none | 0.02| | |
401
+ | | |rouge2_diff,none | 0.92| | |
402
+ | | |rouge2_diff_stderr,none| 1.07| | |
403
+ | | |bleu_max,none | 23.77| | |
404
+ | | |bleu_max_stderr,none | 0.81| | |
405
+ | | |rouge2_acc,none | 0.38| | |
406
+ | | |rouge2_acc_stderr,none | 0.02| | |
407
+ | | |acc,none | 0.41| | |
408
+ | | |acc_stderr,none | 0.01| | |
409
+ | | |rougeL_diff,none | 1.57| | |
410
+ | | |rougeL_diff_stderr,none| 0.93| | |
411
+ | | |rougeL_acc,none | 0.46| | |
412
+ | | |rougeL_acc_stderr,none | 0.02| | |
413
+ | | |bleu_diff,none | 1.38| | |
414
+ | | |bleu_diff_stderr,none | 0.75| | |
415
+ | | |rouge2_max,none | 33.01| | |
416
+ | | |rouge2_max_stderr,none | 1.05| | |
417
+ | | |rouge1_diff,none | 1.72| | |
418
+ | | |rouge1_diff_stderr,none| 0.92| | |
419
+ | | |rougeL_max,none | 45.25| | |
420
+ | | |rougeL_max_stderr,none | 0.92| | |
421
+ | | |rouge1_max,none | 48.29| | |
422
+ | | |rouge1_max_stderr,none | 0.90| | |
423
+ | | |alias |truthfulqa | | |
424
+ |truthfulqa_gen| 3|bleu_max,none | 23.77| | |
425
+ | | |bleu_max_stderr,none | 0.81| | |
426
+ | | |bleu_acc,none | 0.45| | |
427
+ | | |bleu_acc_stderr,none | 0.02| | |
428
+ | | |bleu_diff,none | 1.38| | |
429
+ | | |bleu_diff_stderr,none | 0.75| | |
430
+ | | |rouge1_max,none | 48.29| | |
431
+ | | |rouge1_max_stderr,none | 0.90| | |
432
+ | | |rouge1_acc,none | 0.45| | |
433
+ | | |rouge1_acc_stderr,none | 0.02| | |
434
+ | | |rouge1_diff,none | 1.72| | |
435
+ | | |rouge1_diff_stderr,none| 0.92| | |
436
+ | | |rouge2_max,none | 33.01| | |
437
+ | | |rouge2_max_stderr,none | 1.05| | |
438
+ | | |rouge2_acc,none | 0.38| | |
439
+ | | |rouge2_acc_stderr,none | 0.02| | |
440
+ | | |rouge2_diff,none | 0.92| | |
441
+ | | |rouge2_diff_stderr,none| 1.07| | |
442
+ | | |rougeL_max,none | 45.25| | |
443
+ | | |rougeL_max_stderr,none | 0.92| | |
444
+ | | |rougeL_acc,none | 0.46| | |
445
+ | | |rougeL_acc_stderr,none | 0.02| | |
446
+ | | |rougeL_diff,none | 1.57| | |
447
+ | | |rougeL_diff_stderr,none| 0.93| | |
448
+ | | |alias | - truthfulqa_gen| | |
449
+ |truthfulqa_mc1| 2|acc,none | 0.33| | |
450
+ | | |acc_stderr,none | 0.02| | |
451
+ | | |alias | - truthfulqa_mc1| | |
452
+ |truthfulqa_mc2| 2|acc,none | 0.49| | |
453
+ | | |acc_stderr,none | 0.02| | |
454
+ | | |alias | - truthfulqa_mc2| | |
455
+
456
+ Average: 48.59%
457
+ ```
458
+
459
+ ### Winogrande
460
+ ```
461
+ | Task |Version| Metric | Value | |Stderr|
462
+ |----------|------:|---------------|----------|---|------|
463
+ |winogrande| 1|acc,none | 0.77| | |
464
+ | | |acc_stderr,none| 0.01| | |
465
+ | | |alias |winogrande| | |
466
+
467
+ Average: 77.35%
468
+ ```
469
+
470
+ ### GSM8K
471
+ ```
472
+ |Task |Version| Metric |Value| |Stderr|
473
+ |-----|------:|-----------------------------------|-----|---|------|
474
+ |gsm8k| 3|exact_match,strict-match | 0.67| | |
475
+ | | |exact_match_stderr,strict-match | 0.01| | |
476
+ | | |exact_match,flexible-extract | 0.68| | |
477
+ | | |exact_match_stderr,flexible-extract| 0.01| | |
478
+ | | |alias |gsm8k| | |
479
+
480
+ Average: 67.48%
481
+ ```
482
+
483
+ **Average score**: 67.48%