StarscreamDeceptions commited on
Commit
b879a8b
·
verified ·
1 Parent(s): b5658ed

Upload 8 files

Browse files
Claude-3.5-Sonnet/Claude-3.5-Sonnet/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.7739
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.7847884916678535
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.7463324312775957
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.8174049280729241
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.8277310924369748
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.8237430565446518
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.7595784076342402
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.8049423159094145
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.8166215638797892
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.7942600769121208
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.7894886768266629
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.827303802877083
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.7136447799458766
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.5445805440820396
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.7840763424013673
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "Anthropic/Claude-3.5-Sonnet",
76
+ "model_args": "instruction-tuned=Anthropic/Claude-3.5-Sonnet,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "bfloat16",
86
+ "model_name": "Anthropic/Claude-3.5-Sonnet",
87
+ "model_sha": "main"
88
+ }
89
+ }
GPT4o-mini/GPT4o-mini/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 62.624371782611355
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.62647770972795904
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.5994872525281298
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.6796040450078337
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.6822389972938328
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.6749038598490243
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.6221991169349096
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.6613730237857855
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.6826662868537245
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.644139011536818
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.6354507904856857
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.6884346959122632
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.5311209229454493
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.3803589232303091
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.6591653610596782
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "openai/GPT4o-mini",
76
+ "model_args": "instruction-tuned=openai/GPT4o-mini,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "bfloat16",
86
+ "model_name": "openai/GPT4o-mini",
87
+ "model_sha": "main"
88
+ }
89
+ }
Macro-72B-Chat/Macro-72B-Chat/results_2024-11-11 15_46_20.425378.json CHANGED
@@ -10,46 +10,46 @@
10
  "acc": 0.91234
11
  },
12
  "mmmlu_ar": {
13
- "acc": 0.7933
14
  },
15
  "mmmlu_bn": {
16
- "acc": 0.7656
17
  },
18
  "mmmlu_de": {
19
- "acc": 0.8067
20
  },
21
  "mmmlu_es": {
22
- "acc": 0.8256
23
  },
24
  "mmmlu_fr": {
25
- "acc": 0.8067
26
  },
27
  "mmmlu_hi": {
28
- "acc": 0.7686
29
  },
30
  "mmmlu_id": {
31
- "acc": 0.7920
32
  },
33
  "mmmlu_it": {
34
- "acc": 0.8158
35
  },
36
  "mmmlu_ja": {
37
- "acc": 0.7916
38
  },
39
  "mmmlu_ko": {
40
- "acc": 0.7877
41
  },
42
  "mmmlu_pt": {
43
- "acc": 0.8174
44
  },
45
  "mmmlu_sw": {
46
- "acc": 0.6367
47
  },
48
  "mmmlu_yo": {
49
- "acc": 0.4396
50
  },
51
  "mmmlu_zh": {
52
- "acc": 0.8007
53
  }
54
  },
55
  "versions": {
@@ -73,7 +73,7 @@
73
  },
74
  "config": {
75
  "model": "AIDC/Macro-72B-Chat",
76
- "model_args": "instruction-tuned=AIDC/Macro-72B-Chat,revision=main,dtype=bfloat16",
77
  "num_fewshot": 5,
78
  "batch_size": 1,
79
  "batch_sizes": [],
@@ -86,4 +86,4 @@
86
  "model_name": "AIDC/Macro-72B-Chat",
87
  "model_sha": "main"
88
  }
89
- }
 
10
  "acc": 0.91234
11
  },
12
  "mmmlu_ar": {
13
+ "acc": 0.7933342828656886
14
  },
15
  "mmmlu_bn": {
16
+ "acc": 0.7656316763993734
17
  },
18
  "mmmlu_de": {
19
+ "acc": 0.8066514741489816
20
  },
21
  "mmmlu_es": {
22
+ "acc": 0.825594644637516
23
  },
24
  "mmmlu_fr": {
25
+ "acc": 0.8066514741489816
26
  },
27
  "mmmlu_hi": {
28
+ "acc": 0.768551488391967
29
  },
30
  "mmmlu_id": {
31
+ "acc": 0.7919811992593647
32
  },
33
  "mmmlu_it": {
34
+ "acc": 0.8157669847600056
35
  },
36
  "mmmlu_ja": {
37
+ "acc": 0.791553909699473
38
  },
39
  "mmmlu_ko": {
40
+ "acc": 0.7877083036604472
41
  },
42
  "mmmlu_pt": {
43
+ "acc": 0.8174049280729241
44
  },
45
  "mmmlu_sw": {
46
+ "acc": 0.636732659165361
47
  },
48
  "mmmlu_yo": {
49
+ "acc": 0.4396097422019655
50
  },
51
  "mmmlu_zh": {
52
+ "acc": 0.800669420310497
53
  }
54
  },
55
  "versions": {
 
73
  },
74
  "config": {
75
  "model": "AIDC/Macro-72B-Chat",
76
+ "model_args": "chat-tuned=AIDC/Macro-72B-Chat,revision=main,dtype=bfloat16",
77
  "num_fewshot": 5,
78
  "batch_size": 1,
79
  "batch_sizes": [],
 
86
  "model_name": "AIDC/Macro-72B-Chat",
87
  "model_sha": "main"
88
  }
89
+ }
Macro-7B-Chat/Macro-7B-Chat/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.6004791747207358
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.6056829511465603
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.5435835351089588
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.6591653610596782
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.6773963822817263
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.6757584389688078
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.5434411052556616
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.6234866828087167
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.6541803161942743
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.641860133884062
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.6294687366472013
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.676114513602051
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.43932488249537105
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.371813132032474
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.6654322746047572
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "AIDC/Macro-7B-Chat",
76
+ "model_args": "chat-tuned=AIDC/Macro-7B-Chat,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "bfloat16",
86
+ "model_name": "AIDC/Macro-7B-Chat",
87
+ "model_sha": "main"
88
+ }
89
+ }
aya-23-35B/aya-23-35B/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.5012716951187255
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.5183734510753454
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.3294402506765418
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.5544794188861986
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.5799031476997578
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.5807577268195414
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.47578692493946734
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.5549779233727389
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.5781227745335422
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.5452926933485258
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.5369605469306367
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.5832502492522432
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.3358495940749181
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.3040165218629825
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.5405925081897166
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "CohereForAI/aya-23-35B",
76
+ "model_args": "instruction-tuned=CohereForAI/aya-23-35B,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "float16",
86
+ "model_name": "CohereForAI/aya-23-35B",
87
+ "model_sha": "main"
88
+ }
89
+ }
aya-23-8B/aya-23-8B/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.4096486052047938
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.42073778664007977
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.2743198974505056
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.4325594644637516
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.4785643070787637
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.4687366472012534
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.3886910696481983
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.46695627403503775
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.4707306651474149
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.4456630109670987
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.4364050705027774
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.46859421734795614
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.2617148554336989
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.26442102264634665
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.45698618430423016
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "CohereForAI/aya-23-8B",
76
+ "model_args": "instruction-tuned=CohereForAI/aya-23-8B,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "float16",
86
+ "model_name": "CohereForAI/aya-23-8B",
87
+ "model_sha": "main"
88
+ }
89
+ }
aya-expanse-32b/aya-expanse-32b/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.5892373898712027
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.6156530408773679
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.4390400227887765
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.6470588235294118
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.6753311494089161
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.6746190001424298
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.5874519299245122
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.6542515311209229
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.6645776954849737
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.6434268622703319
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.6243412619285003
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.6719128329297821
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.38363480985614584
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.33392679105540524
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.634097706879362
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "CohereForAI/aya-expanse-32b",
76
+ "model_args": "instruction-tuned=CohereForAI/aya-expanse-32b,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "float16",
86
+ "model_name": "CohereForAI/aya-expanse-32b",
87
+ "model_sha": "main"
88
+ }
89
+ }
aya-expanse-8b/aya-expanse-8b/results_2024-11-11 15_46_20.425378.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc": 0.854321
5
+ },
6
+ "mmmlu": {
7
+ "acc": 0.4819826235578977
8
+ },
9
+ "cmmlu": {
10
+ "acc": 0.91234
11
+ },
12
+ "mmmlu_ar": {
13
+ "acc": 0.4874661729098419
14
+ },
15
+ "mmmlu_bn": {
16
+ "acc": 0.3335707164221621
17
+ },
18
+ "mmmlu_de": {
19
+ "acc": 0.5390969947300954
20
+ },
21
+ "mmmlu_es": {
22
+ "acc": 0.5607463324312776
23
+ },
24
+ "mmmlu_fr": {
25
+ "acc": 0.5550491382993875
26
+ },
27
+ "mmmlu_hi": {
28
+ "acc": 0.4620424440962826
29
+ },
30
+ "mmmlu_id": {
31
+ "acc": 0.5333998005982054
32
+ },
33
+ "mmmlu_it": {
34
+ "acc": 0.5528414755732801
35
+ },
36
+ "mmmlu_ja": {
37
+ "acc": 0.5148839196695627
38
+ },
39
+ "mmmlu_ko": {
40
+ "acc": 0.5066942031049708
41
+ },
42
+ "mmmlu_pt": {
43
+ "acc": 0.5583250249252243
44
+ },
45
+ "mmmlu_sw": {
46
+ "acc": 0.3196125907990315
47
+ },
48
+ "mmmlu_yo": {
49
+ "acc": 0.2988178322176328
50
+ },
51
+ "mmmlu_zh": {
52
+ "acc": 0.5252100840336135
53
+ }
54
+ },
55
+ "versions": {
56
+ "mmlu": 0,
57
+ "mmmlu": 0,
58
+ "cmmlu": 0,
59
+ "mmmlu_ar": 0,
60
+ "mmmlu_bn": 0,
61
+ "mmmlu_de": 0,
62
+ "mmmlu_es": 0,
63
+ "mmmlu_fr": 0,
64
+ "mmmlu_hi": 0,
65
+ "mmmlu_id": 0,
66
+ "mmmlu_it": 0,
67
+ "mmmlu_ja": 0,
68
+ "mmmlu_ko": 0,
69
+ "mmmlu_pt": 0,
70
+ "mmmlu_sw": 0,
71
+ "mmmlu_yo": 0,
72
+ "mmmlu_zh": 0
73
+ },
74
+ "config": {
75
+ "model": "CohereForAI/aya-expanse-8b",
76
+ "model_args": "instruction-tuned=CohereForAI/aya-expanse-8b,revision=main,dtype=bfloat16",
77
+ "num_fewshot": 5,
78
+ "batch_size": 1,
79
+ "batch_sizes": [],
80
+ "device": "cpu",
81
+ "no_cache": true,
82
+ "limit": 20,
83
+ "bootstrap_iters": 100000,
84
+ "description_dict": null,
85
+ "model_dtype": "float16",
86
+ "model_name": "CohereForAI/aya-expanse-8b",
87
+ "model_sha": "main"
88
+ }
89
+ }