Ubuntu
commited on
Commit
β’
8d6bb89
1
Parent(s):
1e9df44
added 3 evals
Browse files- Cedille/fr-boris/results_2024-10-28T07-50-37.539400.json +18 -18
- Qwen/Qwen2.5-3B-Instruct/results_2024-11-03T18-25-56.604265.json +0 -0
- Qwen/Qwen2.5-3B-Instruct/results_2024-11-03T18-25-56.604265_norm.json +26 -0
- Qwen/Qwen2.5-7B-Instruct/results_2024-11-03T19-24-47.152663.json +0 -0
- croissantllm/CroissantLLMChat-v0.1/results_2024-10-23T16-17-20.647816_norm.json β Qwen/Qwen2.5-7B-Instruct/results_2024-11-03T19-24-47.152663_norm.json +8 -8
- bofenghuang/vigostral-7b-chat/results_2024-10-28T07-11-45.958853.json +16 -16
- bofenghuang/vigostral-7b-chat/results_2024-10-28T07-11-45.958853_norm.json +1 -1
- croissantllm/CroissantLLMChat-v0.1/results_2024-11-07T07-31-07.403461_norm.json +26 -0
- google/gemma-2-9b/results_2024-10-23T15-43-20.426375_norm.json +1 -1
- ibm-granite/granite-3.0-8b-instruct/results_2024-10-26T09-12-37.981264.json +0 -0
- ibm-granite/{ibm-granite__granite-3.0-8b-instruct β granite-3.0-8b-instruct}/results_2024-10-26T10-18-22.771337_norm.json +1 -1
- jpacifico/Chocolatine-14B-Instruct-DPO-v1.2/results_2024-11-06T22-46-29.057763.json +0 -0
- jpacifico/Chocolatine-14B-Instruct-DPO-v1.2/{results_2024-10-26T15-37-41.793620_norm.json β results_2024-11-06T22-46-29.057763_norm.json} +5 -5
- jpacifico/Chocolatine-3B-Instruct-DPO-v1.2/results_2024-11-06T21-42-12.123871.json +0 -0
- jpacifico/Chocolatine-3B-Instruct-DPO-v1.2/{results_2024-10-26T13-36-16.081824_norm.json β results_2024-11-06T21-42-12.123871_norm.json} +6 -6
- meta-llama/Llama-3.1-8B/results_2024-10-26T11-52-10.553665_norm.json +1 -1
- microsoft/Phi-3-medium-4k-instruct/results_2024-11-03T22-21-03.380896.json +0 -0
- microsoft/Phi-3-medium-4k-instruct/results_2024-11-03T22-21-03.380896_norm.json +26 -0
- mistralai/Ministral-8B-Instruct-2410/results_2024-11-06T21-08-53.159172.json +0 -0
- mistralai/Ministral-8B-Instruct-2410/{results_2024-10-26T12-54-54.727669_norm.json β results_2024-11-06T21-08-53.159172_norm.json} +6 -6
Cedille/fr-boris/results_2024-10-28T07-50-37.539400.json
CHANGED
@@ -153,34 +153,34 @@
|
|
153 |
"alias": " - leaderboard_math_hard_fr"
|
154 |
},
|
155 |
"leaderboard_math_algebra_hard_fr": {
|
156 |
-
"alias": "
|
157 |
-
"exact_match,none": 0.
|
158 |
-
"exact_match_stderr,none": 0.
|
159 |
},
|
160 |
"leaderboard_math_counting_and_prob_hard_fr": {
|
161 |
-
"alias": "
|
162 |
-
"exact_match,none": 0.
|
163 |
-
"exact_match_stderr,none": 0.
|
164 |
},
|
165 |
"leaderboard_math_geometry_hard_fr": {
|
166 |
-
"alias": "
|
167 |
-
"exact_match,none": 0.
|
168 |
-
"exact_match_stderr,none": 0.
|
169 |
},
|
170 |
"leaderboard_math_num_theory_hard_fr": {
|
171 |
-
"alias": "
|
172 |
-
"exact_match,none": 0.
|
173 |
-
"exact_match_stderr,none": 0.
|
174 |
},
|
175 |
"leaderboard_math_prealgebra_hard_fr": {
|
176 |
-
"alias": "
|
177 |
-
"exact_match,none": 0.
|
178 |
-
"exact_match_stderr,none": 0.
|
179 |
},
|
180 |
"leaderboard_math_precalculus_hard_fr": {
|
181 |
-
"alias": "
|
182 |
-
"exact_match,none": 0.
|
183 |
-
"exact_match_stderr,none": 0.
|
184 |
},
|
185 |
"leaderboard_mmlu_fr": {
|
186 |
"alias": " - leaderboard_mmlu_fr",
|
|
|
153 |
"alias": " - leaderboard_math_hard_fr"
|
154 |
},
|
155 |
"leaderboard_math_algebra_hard_fr": {
|
156 |
+
"alias": " - leaderboard_math_algebra_hard_fr",
|
157 |
+
"exact_match,none": 0.011428571428571429,
|
158 |
+
"exact_match_stderr,none": 0.0056896727396618515
|
159 |
},
|
160 |
"leaderboard_math_counting_and_prob_hard_fr": {
|
161 |
+
"alias": " - leaderboard_math_counting_and_prob_hard_fr",
|
162 |
+
"exact_match,none": 0.00510204081632653,
|
163 |
+
"exact_match_stderr,none": 0.005102040816326531
|
164 |
},
|
165 |
"leaderboard_math_geometry_hard_fr": {
|
166 |
+
"alias": " - leaderboard_math_geometry_hard_fr",
|
167 |
+
"exact_match,none": 0.0,
|
168 |
+
"exact_match_stderr,none": 0.0
|
169 |
},
|
170 |
"leaderboard_math_num_theory_hard_fr": {
|
171 |
+
"alias": " - leaderboard_math_num_theory_hard_fr",
|
172 |
+
"exact_match,none": 0.004608294930875576,
|
173 |
+
"exact_match_stderr,none": 0.004608294930875576
|
174 |
},
|
175 |
"leaderboard_math_prealgebra_hard_fr": {
|
176 |
+
"alias": " - leaderboard_math_prealgebra_hard_fr",
|
177 |
+
"exact_match,none": 0.0,
|
178 |
+
"exact_match_stderr,none": 0.0
|
179 |
},
|
180 |
"leaderboard_math_precalculus_hard_fr": {
|
181 |
+
"alias": " - leaderboard_math_precalculus_hard_fr",
|
182 |
+
"exact_match,none": 0.0,
|
183 |
+
"exact_match_stderr,none": 0.0
|
184 |
},
|
185 |
"leaderboard_mmlu_fr": {
|
186 |
"alias": " - leaderboard_mmlu_fr",
|
Qwen/Qwen2.5-3B-Instruct/results_2024-11-03T18-25-56.604265.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
Qwen/Qwen2.5-3B-Instruct/results_2024-11-03T18-25-56.604265_norm.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"model_name": "Qwen/Qwen2.5-3B-Instruct",
|
4 |
+
"model_dtype": "torch.float16"
|
5 |
+
},
|
6 |
+
"results": {
|
7 |
+
"BBH-fr": {
|
8 |
+
"metric_name": 0.20600000000000002
|
9 |
+
},
|
10 |
+
"GPQA-fr": {
|
11 |
+
"metric_name": 0.049100000000000005
|
12 |
+
},
|
13 |
+
"IFEval-fr": {
|
14 |
+
"metric_name": 0.1564
|
15 |
+
},
|
16 |
+
"MUSR-fr": {
|
17 |
+
"metric_name": 0.0208
|
18 |
+
},
|
19 |
+
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.1788
|
21 |
+
},
|
22 |
+
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.5069
|
24 |
+
}
|
25 |
+
}
|
26 |
+
}
|
Qwen/Qwen2.5-7B-Instruct/results_2024-11-03T19-24-47.152663.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
croissantllm/CroissantLLMChat-v0.1/results_2024-10-23T16-17-20.647816_norm.json β Qwen/Qwen2.5-7B-Instruct/results_2024-11-03T19-24-47.152663_norm.json
RENAMED
@@ -1,26 +1,26 @@
|
|
1 |
{
|
2 |
"config": {
|
3 |
-
"
|
4 |
-
"
|
5 |
},
|
6 |
"results": {
|
7 |
"BBH-fr": {
|
8 |
-
"metric_name": 0.
|
9 |
},
|
10 |
"GPQA-fr": {
|
11 |
-
"metric_name": 0.
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
-
"metric_name": 0.
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
-
"metric_name": 0.
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
-
"metric_name": 0.
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
-
"metric_name": 0.
|
24 |
}
|
25 |
}
|
26 |
}
|
|
|
1 |
{
|
2 |
"config": {
|
3 |
+
"model_name": "Qwen/Qwen2.5-7B-Instruct",
|
4 |
+
"model_dtype": "torch.float16"
|
5 |
},
|
6 |
"results": {
|
7 |
"BBH-fr": {
|
8 |
+
"metric_name": 0.3245
|
9 |
},
|
10 |
"GPQA-fr": {
|
11 |
+
"metric_name": 0.11320000000000001
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
+
"metric_name": 0.1655
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
+
"metric_name": 0.049
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.3528
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.6006
|
24 |
}
|
25 |
}
|
26 |
}
|
bofenghuang/vigostral-7b-chat/results_2024-10-28T07-11-45.958853.json
CHANGED
@@ -153,32 +153,32 @@
|
|
153 |
"alias": " - leaderboard_math_hard_fr"
|
154 |
},
|
155 |
"leaderboard_math_algebra_hard_fr": {
|
156 |
-
"alias": "
|
157 |
-
"exact_match,none": 0.
|
158 |
-
"exact_match_stderr,none": 0.
|
159 |
},
|
160 |
"leaderboard_math_counting_and_prob_hard_fr": {
|
161 |
-
"alias": "
|
162 |
-
"exact_match,none": 0.
|
163 |
-
"exact_match_stderr,none": 0.
|
164 |
},
|
165 |
"leaderboard_math_geometry_hard_fr": {
|
166 |
-
"alias": "
|
167 |
-
"exact_match,none": 0.
|
168 |
-
"exact_match_stderr,none": 0.
|
169 |
},
|
170 |
"leaderboard_math_num_theory_hard_fr": {
|
171 |
-
"alias": "
|
172 |
-
"exact_match,none": 0.
|
173 |
-
"exact_match_stderr,none": 0.
|
174 |
},
|
175 |
"leaderboard_math_prealgebra_hard_fr": {
|
176 |
-
"alias": "
|
177 |
-
"exact_match,none": 0.
|
178 |
-
"exact_match_stderr,none": 0.
|
179 |
},
|
180 |
"leaderboard_math_precalculus_hard_fr": {
|
181 |
-
"alias": "
|
182 |
"exact_match,none": 0.011904761904761904,
|
183 |
"exact_match_stderr,none": 0.011904761904761906
|
184 |
},
|
|
|
153 |
"alias": " - leaderboard_math_hard_fr"
|
154 |
},
|
155 |
"leaderboard_math_algebra_hard_fr": {
|
156 |
+
"alias": " - leaderboard_math_algebra_hard_fr",
|
157 |
+
"exact_match,none": 0.04285714285714286,
|
158 |
+
"exact_match_stderr,none": 0.010841447603579576
|
159 |
},
|
160 |
"leaderboard_math_counting_and_prob_hard_fr": {
|
161 |
+
"alias": " - leaderboard_math_counting_and_prob_hard_fr",
|
162 |
+
"exact_match,none": 0.015306122448979591,
|
163 |
+
"exact_match_stderr,none": 0.008791559199116585
|
164 |
},
|
165 |
"leaderboard_math_geometry_hard_fr": {
|
166 |
+
"alias": " - leaderboard_math_geometry_hard_fr",
|
167 |
+
"exact_match,none": 0.019417475728155338,
|
168 |
+
"exact_match_stderr,none": 0.009637436498668234
|
169 |
},
|
170 |
"leaderboard_math_num_theory_hard_fr": {
|
171 |
+
"alias": " - leaderboard_math_num_theory_hard_fr",
|
172 |
+
"exact_match,none": 0.03686635944700461,
|
173 |
+
"exact_match_stderr,none": 0.012821284204351168
|
174 |
},
|
175 |
"leaderboard_math_prealgebra_hard_fr": {
|
176 |
+
"alias": " - leaderboard_math_prealgebra_hard_fr",
|
177 |
+
"exact_match,none": 0.07079646017699115,
|
178 |
+
"exact_match_stderr,none": 0.024235511394033897
|
179 |
},
|
180 |
"leaderboard_math_precalculus_hard_fr": {
|
181 |
+
"alias": " - leaderboard_math_precalculus_hard_fr",
|
182 |
"exact_match,none": 0.011904761904761904,
|
183 |
"exact_match_stderr,none": 0.011904761904761906
|
184 |
},
|
bofenghuang/vigostral-7b-chat/results_2024-10-28T07-11-45.958853_norm.json
CHANGED
@@ -17,7 +17,7 @@
|
|
17 |
"metric_name": 0.055
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
-
"metric_name": 0.
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.43060000000000004
|
|
|
17 |
"metric_name": 0.055
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.0329
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.43060000000000004
|
croissantllm/CroissantLLMChat-v0.1/results_2024-11-07T07-31-07.403461_norm.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"model_name": "croissantllm/CroissantLLMChat-v0.1",
|
4 |
+
"model_dtype": "torch.float16"
|
5 |
+
},
|
6 |
+
"results": {
|
7 |
+
"BBH-fr": {
|
8 |
+
"metric_name": 0.030699999999999998
|
9 |
+
},
|
10 |
+
"GPQA-fr": {
|
11 |
+
"metric_name": 0.0078000000000000005
|
12 |
+
},
|
13 |
+
"IFEval-fr": {
|
14 |
+
"metric_name": 0.113
|
15 |
+
},
|
16 |
+
"MUSR-fr": {
|
17 |
+
"metric_name": 0.025099999999999997
|
18 |
+
},
|
19 |
+
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.0075
|
21 |
+
},
|
22 |
+
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.1672
|
24 |
+
}
|
25 |
+
}
|
26 |
+
}
|
google/gemma-2-9b/results_2024-10-23T15-43-20.426375_norm.json
CHANGED
@@ -17,7 +17,7 @@
|
|
17 |
"metric_name": 0.0583
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
-
"metric_name": 0.
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.5659
|
|
|
17 |
"metric_name": 0.0583
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.1069
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.5659
|
ibm-granite/granite-3.0-8b-instruct/results_2024-10-26T09-12-37.981264.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
ibm-granite/{ibm-granite__granite-3.0-8b-instruct β granite-3.0-8b-instruct}/results_2024-10-26T10-18-22.771337_norm.json
RENAMED
@@ -17,7 +17,7 @@
|
|
17 |
"metric_name": 0.0355
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
-
"metric_name": 0.
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.4316
|
|
|
17 |
"metric_name": 0.0355
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.0984
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.4316
|
jpacifico/Chocolatine-14B-Instruct-DPO-v1.2/results_2024-11-06T22-46-29.057763.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
jpacifico/Chocolatine-14B-Instruct-DPO-v1.2/{results_2024-10-26T15-37-41.793620_norm.json β results_2024-11-06T22-46-29.057763_norm.json}
RENAMED
@@ -5,19 +5,19 @@
|
|
5 |
},
|
6 |
"results": {
|
7 |
"BBH-fr": {
|
8 |
-
"metric_name": 0.
|
9 |
},
|
10 |
"GPQA-fr": {
|
11 |
-
"metric_name": 0.
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
-
"metric_name": 0.
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
-
"metric_name": 0.
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
-
"metric_name": 0.
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.6514
|
|
|
5 |
},
|
6 |
"results": {
|
7 |
"BBH-fr": {
|
8 |
+
"metric_name": 0.39899999999999997
|
9 |
},
|
10 |
"GPQA-fr": {
|
11 |
+
"metric_name": 0.1209
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
+
"metric_name": 0.12359999999999999
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
+
"metric_name": 0.092
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.1734
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.6514
|
jpacifico/Chocolatine-3B-Instruct-DPO-v1.2/results_2024-11-06T21-42-12.123871.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
jpacifico/Chocolatine-3B-Instruct-DPO-v1.2/{results_2024-10-26T13-36-16.081824_norm.json β results_2024-11-06T21-42-12.123871_norm.json}
RENAMED
@@ -5,22 +5,22 @@
|
|
5 |
},
|
6 |
"results": {
|
7 |
"BBH-fr": {
|
8 |
-
"metric_name": 0.
|
9 |
},
|
10 |
"GPQA-fr": {
|
11 |
-
"metric_name": 0.
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
-
"metric_name": 0.
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
-
"metric_name": 0.
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
-
"metric_name": 0.
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
-
"metric_name": 0.
|
24 |
}
|
25 |
}
|
26 |
}
|
|
|
5 |
},
|
6 |
"results": {
|
7 |
"BBH-fr": {
|
8 |
+
"metric_name": 0.27449999999999997
|
9 |
},
|
10 |
"GPQA-fr": {
|
11 |
+
"metric_name": 0.0718
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
+
"metric_name": 0.1362
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
+
"metric_name": 0.0355
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.1379
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.5153
|
24 |
}
|
25 |
}
|
26 |
}
|
meta-llama/Llama-3.1-8B/results_2024-10-26T11-52-10.553665_norm.json
CHANGED
@@ -17,7 +17,7 @@
|
|
17 |
"metric_name": 0.0387
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
-
"metric_name": 0.
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.47479999999999994
|
|
|
17 |
"metric_name": 0.0387
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.0429
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
"metric_name": 0.47479999999999994
|
microsoft/Phi-3-medium-4k-instruct/results_2024-11-03T22-21-03.380896.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
microsoft/Phi-3-medium-4k-instruct/results_2024-11-03T22-21-03.380896_norm.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"model_name": "microsoft/Phi-3-medium-4k-instruct",
|
4 |
+
"model_dtype": "torch.float16"
|
5 |
+
},
|
6 |
+
"results": {
|
7 |
+
"BBH-fr": {
|
8 |
+
"metric_name": 0.39590000000000003
|
9 |
+
},
|
10 |
+
"GPQA-fr": {
|
11 |
+
"metric_name": 0.1142
|
12 |
+
},
|
13 |
+
"IFEval-fr": {
|
14 |
+
"metric_name": 0.13369999999999999
|
15 |
+
},
|
16 |
+
"MUSR-fr": {
|
17 |
+
"metric_name": 0.0894
|
18 |
+
},
|
19 |
+
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.1841
|
21 |
+
},
|
22 |
+
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.6520999999999999
|
24 |
+
}
|
25 |
+
}
|
26 |
+
}
|
mistralai/Ministral-8B-Instruct-2410/results_2024-11-06T21-08-53.159172.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
mistralai/Ministral-8B-Instruct-2410/{results_2024-10-26T12-54-54.727669_norm.json β results_2024-11-06T21-08-53.159172_norm.json}
RENAMED
@@ -5,22 +5,22 @@
|
|
5 |
},
|
6 |
"results": {
|
7 |
"BBH-fr": {
|
8 |
-
"metric_name": 0.
|
9 |
},
|
10 |
"GPQA-fr": {
|
11 |
-
"metric_name": 0.
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
-
"metric_name": 0.
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
-
"metric_name": 0.
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
-
"metric_name": 0.
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
-
"metric_name": 0.
|
24 |
}
|
25 |
}
|
26 |
}
|
|
|
5 |
},
|
6 |
"results": {
|
7 |
"BBH-fr": {
|
8 |
+
"metric_name": 0.2119
|
9 |
},
|
10 |
"GPQA-fr": {
|
11 |
+
"metric_name": 0.11349999999999999
|
12 |
},
|
13 |
"IFEval-fr": {
|
14 |
+
"metric_name": 0.1038
|
15 |
},
|
16 |
"MUSR-fr": {
|
17 |
+
"metric_name": 0.0759
|
18 |
},
|
19 |
"MATH Lvl5-fr": {
|
20 |
+
"metric_name": 0.1559
|
21 |
},
|
22 |
"MMMLU-fr": {
|
23 |
+
"metric_name": 0.4936
|
24 |
}
|
25 |
}
|
26 |
}
|