Ubuntu commited on
Commit
8d6bb89
β€’
1 Parent(s): 1e9df44

added 3 evals

Browse files
Files changed (20) hide show
  1. Cedille/fr-boris/results_2024-10-28T07-50-37.539400.json +18 -18
  2. Qwen/Qwen2.5-3B-Instruct/results_2024-11-03T18-25-56.604265.json +0 -0
  3. Qwen/Qwen2.5-3B-Instruct/results_2024-11-03T18-25-56.604265_norm.json +26 -0
  4. Qwen/Qwen2.5-7B-Instruct/results_2024-11-03T19-24-47.152663.json +0 -0
  5. croissantllm/CroissantLLMChat-v0.1/results_2024-10-23T16-17-20.647816_norm.json β†’ Qwen/Qwen2.5-7B-Instruct/results_2024-11-03T19-24-47.152663_norm.json +8 -8
  6. bofenghuang/vigostral-7b-chat/results_2024-10-28T07-11-45.958853.json +16 -16
  7. bofenghuang/vigostral-7b-chat/results_2024-10-28T07-11-45.958853_norm.json +1 -1
  8. croissantllm/CroissantLLMChat-v0.1/results_2024-11-07T07-31-07.403461_norm.json +26 -0
  9. google/gemma-2-9b/results_2024-10-23T15-43-20.426375_norm.json +1 -1
  10. ibm-granite/granite-3.0-8b-instruct/results_2024-10-26T09-12-37.981264.json +0 -0
  11. ibm-granite/{ibm-granite__granite-3.0-8b-instruct β†’ granite-3.0-8b-instruct}/results_2024-10-26T10-18-22.771337_norm.json +1 -1
  12. jpacifico/Chocolatine-14B-Instruct-DPO-v1.2/results_2024-11-06T22-46-29.057763.json +0 -0
  13. jpacifico/Chocolatine-14B-Instruct-DPO-v1.2/{results_2024-10-26T15-37-41.793620_norm.json β†’ results_2024-11-06T22-46-29.057763_norm.json} +5 -5
  14. jpacifico/Chocolatine-3B-Instruct-DPO-v1.2/results_2024-11-06T21-42-12.123871.json +0 -0
  15. jpacifico/Chocolatine-3B-Instruct-DPO-v1.2/{results_2024-10-26T13-36-16.081824_norm.json β†’ results_2024-11-06T21-42-12.123871_norm.json} +6 -6
  16. meta-llama/Llama-3.1-8B/results_2024-10-26T11-52-10.553665_norm.json +1 -1
  17. microsoft/Phi-3-medium-4k-instruct/results_2024-11-03T22-21-03.380896.json +0 -0
  18. microsoft/Phi-3-medium-4k-instruct/results_2024-11-03T22-21-03.380896_norm.json +26 -0
  19. mistralai/Ministral-8B-Instruct-2410/results_2024-11-06T21-08-53.159172.json +0 -0
  20. mistralai/Ministral-8B-Instruct-2410/{results_2024-10-26T12-54-54.727669_norm.json β†’ results_2024-11-06T21-08-53.159172_norm.json} +6 -6
Cedille/fr-boris/results_2024-10-28T07-50-37.539400.json CHANGED
@@ -153,34 +153,34 @@
153
  "alias": " - leaderboard_math_hard_fr"
154
  },
155
  "leaderboard_math_algebra_hard_fr": {
156
- "alias": " - leaderboard_math_algebra_hard_fr",
157
- "exact_match,none": 0.005714285714285714,
158
- "exact_match_stderr,none": 0.0040348171857270635
159
  },
160
  "leaderboard_math_counting_and_prob_hard_fr": {
161
- "alias": " - leaderboard_math_counting_and_prob_hard_fr",
162
- "exact_match,none": 0.01020408163265306,
163
- "exact_match_stderr,none": 0.007196850575679085
164
  },
165
  "leaderboard_math_geometry_hard_fr": {
166
- "alias": " - leaderboard_math_geometry_hard_fr",
167
- "exact_match,none": 0.0048543689320388345,
168
- "exact_match_stderr,none": 0.004854368932038835
169
  },
170
  "leaderboard_math_num_theory_hard_fr": {
171
- "alias": " - leaderboard_math_num_theory_hard_fr",
172
- "exact_match,none": 0.013824884792626729,
173
- "exact_match_stderr,none": 0.007944762237164824
174
  },
175
  "leaderboard_math_prealgebra_hard_fr": {
176
- "alias": " - leaderboard_math_prealgebra_hard_fr",
177
- "exact_match,none": 0.008849557522123894,
178
- "exact_match_stderr,none": 0.008849557522123894
179
  },
180
  "leaderboard_math_precalculus_hard_fr": {
181
- "alias": " - leaderboard_math_precalculus_hard_fr",
182
- "exact_match,none": 0.011904761904761904,
183
- "exact_match_stderr,none": 0.011904761904761906
184
  },
185
  "leaderboard_mmlu_fr": {
186
  "alias": " - leaderboard_mmlu_fr",
 
153
  "alias": " - leaderboard_math_hard_fr"
154
  },
155
  "leaderboard_math_algebra_hard_fr": {
156
+ "alias": " - leaderboard_math_algebra_hard_fr",
157
+ "exact_match,none": 0.011428571428571429,
158
+ "exact_match_stderr,none": 0.0056896727396618515
159
  },
160
  "leaderboard_math_counting_and_prob_hard_fr": {
161
+ "alias": " - leaderboard_math_counting_and_prob_hard_fr",
162
+ "exact_match,none": 0.00510204081632653,
163
+ "exact_match_stderr,none": 0.005102040816326531
164
  },
165
  "leaderboard_math_geometry_hard_fr": {
166
+ "alias": " - leaderboard_math_geometry_hard_fr",
167
+ "exact_match,none": 0.0,
168
+ "exact_match_stderr,none": 0.0
169
  },
170
  "leaderboard_math_num_theory_hard_fr": {
171
+ "alias": " - leaderboard_math_num_theory_hard_fr",
172
+ "exact_match,none": 0.004608294930875576,
173
+ "exact_match_stderr,none": 0.004608294930875576
174
  },
175
  "leaderboard_math_prealgebra_hard_fr": {
176
+ "alias": " - leaderboard_math_prealgebra_hard_fr",
177
+ "exact_match,none": 0.0,
178
+ "exact_match_stderr,none": 0.0
179
  },
180
  "leaderboard_math_precalculus_hard_fr": {
181
+ "alias": " - leaderboard_math_precalculus_hard_fr",
182
+ "exact_match,none": 0.0,
183
+ "exact_match_stderr,none": 0.0
184
  },
185
  "leaderboard_mmlu_fr": {
186
  "alias": " - leaderboard_mmlu_fr",
Qwen/Qwen2.5-3B-Instruct/results_2024-11-03T18-25-56.604265.json ADDED
The diff for this file is too large to render. See raw diff
 
Qwen/Qwen2.5-3B-Instruct/results_2024-11-03T18-25-56.604265_norm.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_name": "Qwen/Qwen2.5-3B-Instruct",
4
+ "model_dtype": "torch.float16"
5
+ },
6
+ "results": {
7
+ "BBH-fr": {
8
+ "metric_name": 0.20600000000000002
9
+ },
10
+ "GPQA-fr": {
11
+ "metric_name": 0.049100000000000005
12
+ },
13
+ "IFEval-fr": {
14
+ "metric_name": 0.1564
15
+ },
16
+ "MUSR-fr": {
17
+ "metric_name": 0.0208
18
+ },
19
+ "MATH Lvl5-fr": {
20
+ "metric_name": 0.1788
21
+ },
22
+ "MMMLU-fr": {
23
+ "metric_name": 0.5069
24
+ }
25
+ }
26
+ }
Qwen/Qwen2.5-7B-Instruct/results_2024-11-03T19-24-47.152663.json ADDED
The diff for this file is too large to render. See raw diff
 
croissantllm/CroissantLLMChat-v0.1/results_2024-10-23T16-17-20.647816_norm.json β†’ Qwen/Qwen2.5-7B-Instruct/results_2024-11-03T19-24-47.152663_norm.json RENAMED
@@ -1,26 +1,26 @@
1
  {
2
  "config": {
3
- "model_dtype": "torch.float16",
4
- "model_name": "croissantllm/CroissantLLMChat-v0.1"
5
  },
6
  "results": {
7
  "BBH-fr": {
8
- "metric_name": 0.0307
9
  },
10
  "GPQA-fr": {
11
- "metric_name": 0.0089
12
  },
13
  "IFEval-fr": {
14
- "metric_name": 0.1117
15
  },
16
  "MUSR-fr": {
17
- "metric_name": 0.0251
18
  },
19
  "MATH Lvl5-fr": {
20
- "metric_name": 0.0051
21
  },
22
  "MMMLU-fr": {
23
- "metric_name": 0.1677
24
  }
25
  }
26
  }
 
1
  {
2
  "config": {
3
+ "model_name": "Qwen/Qwen2.5-7B-Instruct",
4
+ "model_dtype": "torch.float16"
5
  },
6
  "results": {
7
  "BBH-fr": {
8
+ "metric_name": 0.3245
9
  },
10
  "GPQA-fr": {
11
+ "metric_name": 0.11320000000000001
12
  },
13
  "IFEval-fr": {
14
+ "metric_name": 0.1655
15
  },
16
  "MUSR-fr": {
17
+ "metric_name": 0.049
18
  },
19
  "MATH Lvl5-fr": {
20
+ "metric_name": 0.3528
21
  },
22
  "MMMLU-fr": {
23
+ "metric_name": 0.6006
24
  }
25
  }
26
  }
bofenghuang/vigostral-7b-chat/results_2024-10-28T07-11-45.958853.json CHANGED
@@ -153,32 +153,32 @@
153
  "alias": " - leaderboard_math_hard_fr"
154
  },
155
  "leaderboard_math_algebra_hard_fr": {
156
- "alias": " - leaderboard_math_algebra_hard_fr",
157
- "exact_match,none": 0.045714285714285714,
158
- "exact_match_stderr,none": 0.01118027450902648
159
  },
160
  "leaderboard_math_counting_and_prob_hard_fr": {
161
- "alias": " - leaderboard_math_counting_and_prob_hard_fr",
162
- "exact_match,none": 0.02040816326530612,
163
- "exact_match_stderr,none": 0.010125284456261915
164
  },
165
  "leaderboard_math_geometry_hard_fr": {
166
- "alias": " - leaderboard_math_geometry_hard_fr",
167
- "exact_match,none": 0.014563106796116505,
168
- "exact_match_stderr,none": 0.008366898401515055
169
  },
170
  "leaderboard_math_num_theory_hard_fr": {
171
- "alias": " - leaderboard_math_num_theory_hard_fr",
172
- "exact_match,none": 0.02304147465437788,
173
- "exact_match_stderr,none": 0.010208603193741367
174
  },
175
  "leaderboard_math_prealgebra_hard_fr": {
176
- "alias": " - leaderboard_math_prealgebra_hard_fr",
177
- "exact_match,none": 0.07964601769911504,
178
- "exact_match_stderr,none": 0.025582941017507025
179
  },
180
  "leaderboard_math_precalculus_hard_fr": {
181
- "alias": " - leaderboard_math_precalculus_hard_fr",
182
  "exact_match,none": 0.011904761904761904,
183
  "exact_match_stderr,none": 0.011904761904761906
184
  },
 
153
  "alias": " - leaderboard_math_hard_fr"
154
  },
155
  "leaderboard_math_algebra_hard_fr": {
156
+ "alias": " - leaderboard_math_algebra_hard_fr",
157
+ "exact_match,none": 0.04285714285714286,
158
+ "exact_match_stderr,none": 0.010841447603579576
159
  },
160
  "leaderboard_math_counting_and_prob_hard_fr": {
161
+ "alias": " - leaderboard_math_counting_and_prob_hard_fr",
162
+ "exact_match,none": 0.015306122448979591,
163
+ "exact_match_stderr,none": 0.008791559199116585
164
  },
165
  "leaderboard_math_geometry_hard_fr": {
166
+ "alias": " - leaderboard_math_geometry_hard_fr",
167
+ "exact_match,none": 0.019417475728155338,
168
+ "exact_match_stderr,none": 0.009637436498668234
169
  },
170
  "leaderboard_math_num_theory_hard_fr": {
171
+ "alias": " - leaderboard_math_num_theory_hard_fr",
172
+ "exact_match,none": 0.03686635944700461,
173
+ "exact_match_stderr,none": 0.012821284204351168
174
  },
175
  "leaderboard_math_prealgebra_hard_fr": {
176
+ "alias": " - leaderboard_math_prealgebra_hard_fr",
177
+ "exact_match,none": 0.07079646017699115,
178
+ "exact_match_stderr,none": 0.024235511394033897
179
  },
180
  "leaderboard_math_precalculus_hard_fr": {
181
+ "alias": " - leaderboard_math_precalculus_hard_fr",
182
  "exact_match,none": 0.011904761904761904,
183
  "exact_match_stderr,none": 0.011904761904761906
184
  },
bofenghuang/vigostral-7b-chat/results_2024-10-28T07-11-45.958853_norm.json CHANGED
@@ -17,7 +17,7 @@
17
  "metric_name": 0.055
18
  },
19
  "MATH Lvl5-fr": {
20
- "metric_name": 0.0325
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.43060000000000004
 
17
  "metric_name": 0.055
18
  },
19
  "MATH Lvl5-fr": {
20
+ "metric_name": 0.0329
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.43060000000000004
croissantllm/CroissantLLMChat-v0.1/results_2024-11-07T07-31-07.403461_norm.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_name": "croissantllm/CroissantLLMChat-v0.1",
4
+ "model_dtype": "torch.float16"
5
+ },
6
+ "results": {
7
+ "BBH-fr": {
8
+ "metric_name": 0.030699999999999998
9
+ },
10
+ "GPQA-fr": {
11
+ "metric_name": 0.0078000000000000005
12
+ },
13
+ "IFEval-fr": {
14
+ "metric_name": 0.113
15
+ },
16
+ "MUSR-fr": {
17
+ "metric_name": 0.025099999999999997
18
+ },
19
+ "MATH Lvl5-fr": {
20
+ "metric_name": 0.0075
21
+ },
22
+ "MMMLU-fr": {
23
+ "metric_name": 0.1672
24
+ }
25
+ }
26
+ }
google/gemma-2-9b/results_2024-10-23T15-43-20.426375_norm.json CHANGED
@@ -17,7 +17,7 @@
17
  "metric_name": 0.0583
18
  },
19
  "MATH Lvl5-fr": {
20
- "metric_name": 0.1066
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.5659
 
17
  "metric_name": 0.0583
18
  },
19
  "MATH Lvl5-fr": {
20
+ "metric_name": 0.1069
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.5659
ibm-granite/granite-3.0-8b-instruct/results_2024-10-26T09-12-37.981264.json ADDED
The diff for this file is too large to render. See raw diff
 
ibm-granite/{ibm-granite__granite-3.0-8b-instruct β†’ granite-3.0-8b-instruct}/results_2024-10-26T10-18-22.771337_norm.json RENAMED
@@ -17,7 +17,7 @@
17
  "metric_name": 0.0355
18
  },
19
  "MATH Lvl5-fr": {
20
- "metric_name": 0.1003
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.4316
 
17
  "metric_name": 0.0355
18
  },
19
  "MATH Lvl5-fr": {
20
+ "metric_name": 0.0984
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.4316
jpacifico/Chocolatine-14B-Instruct-DPO-v1.2/results_2024-11-06T22-46-29.057763.json ADDED
The diff for this file is too large to render. See raw diff
 
jpacifico/Chocolatine-14B-Instruct-DPO-v1.2/{results_2024-10-26T15-37-41.793620_norm.json β†’ results_2024-11-06T22-46-29.057763_norm.json} RENAMED
@@ -5,19 +5,19 @@
5
  },
6
  "results": {
7
  "BBH-fr": {
8
- "metric_name": 0.3988
9
  },
10
  "GPQA-fr": {
11
- "metric_name": 0.1164
12
  },
13
  "IFEval-fr": {
14
- "metric_name": 0.1231
15
  },
16
  "MUSR-fr": {
17
- "metric_name": 0.0947
18
  },
19
  "MATH Lvl5-fr": {
20
- "metric_name": 0.1988
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.6514
 
5
  },
6
  "results": {
7
  "BBH-fr": {
8
+ "metric_name": 0.39899999999999997
9
  },
10
  "GPQA-fr": {
11
+ "metric_name": 0.1209
12
  },
13
  "IFEval-fr": {
14
+ "metric_name": 0.12359999999999999
15
  },
16
  "MUSR-fr": {
17
+ "metric_name": 0.092
18
  },
19
  "MATH Lvl5-fr": {
20
+ "metric_name": 0.1734
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.6514
jpacifico/Chocolatine-3B-Instruct-DPO-v1.2/results_2024-11-06T21-42-12.123871.json ADDED
The diff for this file is too large to render. See raw diff
 
jpacifico/Chocolatine-3B-Instruct-DPO-v1.2/{results_2024-10-26T13-36-16.081824_norm.json β†’ results_2024-11-06T21-42-12.123871_norm.json} RENAMED
@@ -5,22 +5,22 @@
5
  },
6
  "results": {
7
  "BBH-fr": {
8
- "metric_name": 0.2752
9
  },
10
  "GPQA-fr": {
11
- "metric_name": 0.0726
12
  },
13
  "IFEval-fr": {
14
- "metric_name": 0.132
15
  },
16
  "MUSR-fr": {
17
- "metric_name": 0.0424
18
  },
19
  "MATH Lvl5-fr": {
20
- "metric_name": 0.1259
21
  },
22
  "MMMLU-fr": {
23
- "metric_name": 0.5151
24
  }
25
  }
26
  }
 
5
  },
6
  "results": {
7
  "BBH-fr": {
8
+ "metric_name": 0.27449999999999997
9
  },
10
  "GPQA-fr": {
11
+ "metric_name": 0.0718
12
  },
13
  "IFEval-fr": {
14
+ "metric_name": 0.1362
15
  },
16
  "MUSR-fr": {
17
+ "metric_name": 0.0355
18
  },
19
  "MATH Lvl5-fr": {
20
+ "metric_name": 0.1379
21
  },
22
  "MMMLU-fr": {
23
+ "metric_name": 0.5153
24
  }
25
  }
26
  }
meta-llama/Llama-3.1-8B/results_2024-10-26T11-52-10.553665_norm.json CHANGED
@@ -17,7 +17,7 @@
17
  "metric_name": 0.0387
18
  },
19
  "MATH Lvl5-fr": {
20
- "metric_name": 0.0321
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.47479999999999994
 
17
  "metric_name": 0.0387
18
  },
19
  "MATH Lvl5-fr": {
20
+ "metric_name": 0.0429
21
  },
22
  "MMMLU-fr": {
23
  "metric_name": 0.47479999999999994
microsoft/Phi-3-medium-4k-instruct/results_2024-11-03T22-21-03.380896.json ADDED
The diff for this file is too large to render. See raw diff
 
microsoft/Phi-3-medium-4k-instruct/results_2024-11-03T22-21-03.380896_norm.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_name": "microsoft/Phi-3-medium-4k-instruct",
4
+ "model_dtype": "torch.float16"
5
+ },
6
+ "results": {
7
+ "BBH-fr": {
8
+ "metric_name": 0.39590000000000003
9
+ },
10
+ "GPQA-fr": {
11
+ "metric_name": 0.1142
12
+ },
13
+ "IFEval-fr": {
14
+ "metric_name": 0.13369999999999999
15
+ },
16
+ "MUSR-fr": {
17
+ "metric_name": 0.0894
18
+ },
19
+ "MATH Lvl5-fr": {
20
+ "metric_name": 0.1841
21
+ },
22
+ "MMMLU-fr": {
23
+ "metric_name": 0.6520999999999999
24
+ }
25
+ }
26
+ }
mistralai/Ministral-8B-Instruct-2410/results_2024-11-06T21-08-53.159172.json ADDED
The diff for this file is too large to render. See raw diff
 
mistralai/Ministral-8B-Instruct-2410/{results_2024-10-26T12-54-54.727669_norm.json β†’ results_2024-11-06T21-08-53.159172_norm.json} RENAMED
@@ -5,22 +5,22 @@
5
  },
6
  "results": {
7
  "BBH-fr": {
8
- "metric_name": 0.2152
9
  },
10
  "GPQA-fr": {
11
- "metric_name": 0.11
12
  },
13
  "IFEval-fr": {
14
- "metric_name": 0.1019
15
  },
16
  "MUSR-fr": {
17
- "metric_name": 0.0749
18
  },
19
  "MATH Lvl5-fr": {
20
- "metric_name": 0.1579
21
  },
22
  "MMMLU-fr": {
23
- "metric_name": 0.494
24
  }
25
  }
26
  }
 
5
  },
6
  "results": {
7
  "BBH-fr": {
8
+ "metric_name": 0.2119
9
  },
10
  "GPQA-fr": {
11
+ "metric_name": 0.11349999999999999
12
  },
13
  "IFEval-fr": {
14
+ "metric_name": 0.1038
15
  },
16
  "MUSR-fr": {
17
+ "metric_name": 0.0759
18
  },
19
  "MATH Lvl5-fr": {
20
+ "metric_name": 0.1559
21
  },
22
  "MMMLU-fr": {
23
+ "metric_name": 0.4936
24
  }
25
  }
26
  }