dataset
stringlengths
5
115
config
stringlengths
1
162
split
stringlengths
1
228
num_examples
int64
3
341M
column_name
stringlengths
0
77.9k
null_count
int64
0
62.9M
null_proportion
float64
0
1
min
int64
0
9.25M
max
int64
0
1.07B
mean
float64
0
90.4M
median
float64
0
80.1M
std
float64
0
130M
histogram
dict
partial
bool
2 classes
dair-ai/emotion
split
train
16,000
text
0
0
7
300
96.84581
86
55.90495
{ "bin_edges": [ 7, 37, 67, 97, 127, 157, 187, 217, 247, 277, 300 ], "hist": [ 1833, 3789, 3611, 2623, 1804, 1048, 635, 353, 213, 91 ] }
false
dair-ai/emotion
split
validation
2,000
text
0
0
11
295
95.3475
85
54.82376
{ "bin_edges": [ 11, 40, 69, 98, 127, 156, 185, 214, 243, 272, 295 ], "hist": [ 293, 473, 415, 309, 228, 140, 66, 35, 29, 12 ] }
false
dair-ai/emotion
split
test
2,000
text
0
0
14
296
96.5865
86
55.71599
{ "bin_edges": [ 14, 43, 72, 101, 130, 159, 188, 217, 246, 275, 296 ], "hist": [ 346, 466, 377, 308, 214, 144, 69, 45, 24, 7 ] }
false
dair-ai/emotion
unsplit
train
416,809
text
0
0
2
830
97.0284
86
56.19823
{ "bin_edges": [ 2, 85, 168, 251, 334, 417, 500, 583, 666, 749, 830 ], "hist": [ 204631, 162639, 42259, 7256, 17, 4, 2, 0, 0, 1 ] }
false
allenai/qasper
qasper
train
888
abstract
0
0
279
2,022
982.02703
959.5
294.62389
{ "bin_edges": [ 279, 454, 629, 804, 979, 1154, 1329, 1504, 1679, 1854, 2022 ], "hist": [ 16, 79, 151, 219, 193, 127, 57, 25, 17, 4 ] }
false
allenai/qasper
qasper
train
888
id
0
0
10
10
10
10
0
{ "bin_edges": [ 10, 10 ], "hist": [ 888 ] }
false
allenai/qasper
qasper
train
888
title
0
0
12
156
71.95495
70
20.95424
{ "bin_edges": [ 12, 27, 42, 57, 72, 87, 102, 117, 132, 147, 156 ], "hist": [ 6, 43, 166, 247, 217, 136, 47, 19, 6, 1 ] }
false
allenai/qasper
qasper
validation
281
abstract
0
0
338
1,868
953.51957
909
299.11687
{ "bin_edges": [ 338, 492, 646, 800, 954, 1108, 1262, 1416, 1570, 1724, 1868 ], "hist": [ 9, 31, 47, 76, 40, 30, 28, 9, 6, 5 ] }
false
allenai/qasper
qasper
validation
281
id
0
0
10
10
10
10
0
{ "bin_edges": [ 10, 10 ], "hist": [ 281 ] }
false
allenai/qasper
qasper
validation
281
title
0
0
19
142
71.29181
68
21.77187
{ "bin_edges": [ 19, 32, 45, 58, 71, 84, 97, 110, 123, 136, 142 ], "hist": [ 7, 18, 47, 78, 63, 31, 16, 16, 4, 1 ] }
false
allenai/qasper
qasper
test
416
abstract
0
0
252
1,909
930.93029
901.5
295.71791
{ "bin_edges": [ 252, 418, 584, 750, 916, 1082, 1248, 1414, 1580, 1746, 1909 ], "hist": [ 16, 26, 67, 108, 77, 63, 33, 15, 8, 3 ] }
false
allenai/qasper
qasper
test
416
id
0
0
10
10
10
10
0
{ "bin_edges": [ 10, 10 ], "hist": [ 416 ] }
false
allenai/qasper
qasper
test
416
title
0
0
12
153
71.10337
69
22.801
{ "bin_edges": [ 12, 27, 42, 57, 72, 87, 102, 117, 132, 147, 153 ], "hist": [ 8, 28, 76, 111, 88, 65, 28, 9, 2, 1 ] }
false
llm-book/wrime-sentiment
default
test
1,781
datetime
0
0
13
16
14.76811
15
0.77721
{ "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 82, 546, 856, 297 ] }
false
llm-book/wrime-sentiment
default
test
1,781
sentence
0
0
3
154
47.29927
39
33.27888
{ "bin_edges": [ 3, 19, 35, 51, 67, 83, 99, 115, 131, 147, 154 ], "hist": [ 354, 435, 341, 215, 160, 104, 73, 45, 52, 2 ] }
false
llm-book/wrime-sentiment
default
train
20,149
datetime
0
0
13
16
14.68773
15
0.76921
{ "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 1201, 6486, 9866, 2596 ] }
false
llm-book/wrime-sentiment
default
train
20,149
sentence
0
0
1
173
39.52747
30
30.78759
{ "bin_edges": [ 1, 19, 37, 55, 73, 91, 109, 127, 145, 163, 173 ], "hist": [ 5468, 6483, 3644, 1882, 1005, 623, 444, 541, 56, 3 ] }
false
llm-book/wrime-sentiment
default
validation
1,608
datetime
0
0
13
16
14.60261
15
0.80411
{ "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 140, 546, 735, 187 ] }
false
llm-book/wrime-sentiment
default
validation
1,608
sentence
0
0
3
163
53.86754
46
34.72008
{ "bin_edges": [ 3, 20, 37, 54, 71, 88, 105, 122, 139, 156, 163 ], "hist": [ 257, 374, 306, 227, 142, 113, 102, 54, 32, 1 ] }
false
ttxy/resume_ner
default
test
477
label
0
0
9
1,035
160.87631
115
155.29666
{ "bin_edges": [ 9, 112, 215, 318, 421, 524, 627, 730, 833, 936, 1035 ], "hist": [ 222, 156, 44, 23, 13, 6, 8, 1, 2, 2 ] }
false
ttxy/resume_ner
default
test
477
text
0
0
7
333
62.31237
49
52.13344
{ "bin_edges": [ 7, 40, 73, 106, 139, 172, 205, 238, 271, 304, 333 ], "hist": [ 179, 174, 61, 22, 19, 5, 7, 6, 1, 3 ] }
false
ttxy/resume_ner
default
train
3,821
label
0
0
7
1,149
162.03612
121
149.21149
{ "bin_edges": [ 7, 122, 237, 352, 467, 582, 697, 812, 927, 1042, 1149 ], "hist": [ 1940, 1173, 329, 175, 89, 55, 35, 17, 7, 1 ] }
false
ttxy/resume_ner
default
train
3,821
text
0
0
5
355
63.95629
53
50.26945
{ "bin_edges": [ 5, 41, 77, 113, 149, 185, 221, 257, 293, 329, 355 ], "hist": [ 1312, 1526, 480, 244, 109, 71, 37, 23, 15, 4 ] }
false
ttxy/resume_ner
default
validation
463
label
0
0
9
859
145.27214
109
129.88859
{ "bin_edges": [ 9, 95, 181, 267, 353, 439, 525, 611, 697, 783, 859 ], "hist": [ 180, 184, 48, 14, 11, 12, 6, 6, 0, 2 ] }
false
ttxy/resume_ner
default
validation
463
text
0
0
7
355
59
49
45.65881
{ "bin_edges": [ 7, 42, 77, 112, 147, 182, 217, 252, 287, 322, 355 ], "hist": [ 184, 186, 49, 17, 15, 3, 5, 3, 0, 1 ] }
false
fedryanto/UnibQuADV2
plain_text
train
4,010
context
0
0
65
3,845
1,107.15387
827
816.68357
{ "bin_edges": [ 65, 444, 823, 1202, 1581, 1960, 2339, 2718, 3097, 3476, 3845 ], "hist": [ 797, 1184, 599, 594, 212, 148, 198, 162, 71, 45 ] }
false
fedryanto/UnibQuADV2
plain_text
train
4,010
id
0
0
7
7
7
7
0
{ "bin_edges": [ 7, 7 ], "hist": [ 4010 ] }
false
fedryanto/UnibQuADV2
plain_text
train
4,010
question
0
0
11
147
50.14988
47
17.43961
{ "bin_edges": [ 11, 25, 39, 53, 67, 81, 95, 109, 123, 137, 147 ], "hist": [ 102, 1035, 1360, 906, 332, 174, 82, 16, 2, 1 ] }
false
fedryanto/UnibQuADV2
plain_text
validation
1,036
context
0
0
73
4,188
1,119.86873
620
1,219.10519
{ "bin_edges": [ 73, 485, 897, 1309, 1721, 2133, 2545, 2957, 3369, 3781, 4188 ], "hist": [ 382, 322, 79, 63, 2, 68, 0, 1, 0, 119 ] }
false
fedryanto/UnibQuADV2
plain_text
validation
1,036
id
0
0
7
7
7
7
0
{ "bin_edges": [ 7, 7 ], "hist": [ 1036 ] }
false
fedryanto/UnibQuADV2
plain_text
validation
1,036
question
0
0
13
120
50.65251
47
16.24953
{ "bin_edges": [ 13, 24, 35, 46, 57, 68, 79, 90, 101, 112, 120 ], "hist": [ 12, 67, 397, 256, 168, 64, 41, 18, 10, 3 ] }
false
redwoodresearch/generated_stories
default
train
3,825
text
0
0
3,130
6,715
4,849.41752
4,919
512.67936
{ "bin_edges": [ 3130, 3489, 3848, 4207, 4566, 4925, 5284, 5643, 6002, 6361, 6715 ], "hist": [ 32, 162, 301, 411, 1031, 1228, 508, 123, 25, 4 ] }
false
redwoodresearch/generated_stories
default
validation
675
text
0
0
2,838
6,655
4,844.32
4,915
514.73498
{ "bin_edges": [ 2838, 3220, 3602, 3984, 4366, 4748, 5130, 5512, 5894, 6276, 6655 ], "hist": [ 1, 8, 47, 54, 132, 231, 155, 40, 6, 1 ] }
false
ceval/ceval-exam
accountant
test
443
A
0
0
1
110
17.36795
13
15.60652
{ "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 110 ], "hist": [ 212, 96, 77, 32, 11, 9, 3, 2, 0, 1 ] }
false
ceval/ceval-exam
accountant
test
443
B
0
0
1
104
17.89391
13
15.71883
{ "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 104 ], "hist": [ 207, 95, 71, 41, 16, 7, 4, 1, 0, 1 ] }
false
ceval/ceval-exam
accountant
test
443
C
0
0
1
114
18.18284
14
16.41089
{ "bin_edges": [ 1, 13, 25, 37, 49, 61, 73, 85, 97, 109, 114 ], "hist": [ 212, 108, 70, 27, 14, 8, 2, 0, 1, 1 ] }
false
ceval/ceval-exam
accountant
test
443
D
0
0
1
94
17.85553
13
15.44108
{ "bin_edges": [ 1, 11, 21, 31, 41, 51, 61, 71, 81, 91, 94 ], "hist": [ 194, 97, 69, 42, 24, 9, 4, 3, 0, 1 ] }
false
ceval/ceval-exam
accountant
test
443
question
0
0
14
280
58.45372
39
42.39338
{ "bin_edges": [ 14, 41, 68, 95, 122, 149, 176, 203, 230, 257, 280 ], "hist": [ 227, 82, 46, 42, 27, 14, 1, 2, 1, 1 ] }
false
ceval/ceval-exam
accountant
val
49
A
0
0
1
78
16.63265
8
17.51986
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 78 ], "hist": [ 26, 5, 6, 5, 1, 2, 2, 1, 0, 1 ] }
false
ceval/ceval-exam
accountant
val
49
B
0
0
2
94
17.79592
9
20.10077
{ "bin_edges": [ 2, 12, 22, 32, 42, 52, 62, 72, 82, 92, 94 ], "hist": [ 28, 9, 5, 0, 3, 2, 0, 0, 1, 1 ] }
false
ceval/ceval-exam
accountant
val
49
C
0
0
1
64
18.53061
10
17.97927
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 64 ], "hist": [ 20, 7, 8, 2, 3, 3, 0, 4, 1, 1 ] }
false
ceval/ceval-exam
accountant
val
49
D
0
0
1
75
17.46939
11
16.60585
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 75 ], "hist": [ 22, 9, 5, 4, 3, 3, 2, 0, 0, 1 ] }
false
ceval/ceval-exam
accountant
val
49
question
0
0
17
206
59.4898
38
48.18589
{ "bin_edges": [ 17, 36, 55, 74, 93, 112, 131, 150, 169, 188, 206 ], "hist": [ 23, 8, 5, 3, 4, 2, 0, 1, 1, 2 ] }
false
ceval/ceval-exam
advanced_mathematics
test
173
A
0
0
1
215
31.00578
18
35.88637
{ "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 215 ], "hist": [ 109, 23, 17, 14, 2, 4, 1, 1, 0, 2 ] }
false
ceval/ceval-exam
advanced_mathematics
test
173
B
0
0
1
215
30.72254
18
36.1361
{ "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 215 ], "hist": [ 110, 22, 17, 14, 2, 4, 1, 1, 0, 2 ] }
false
ceval/ceval-exam
advanced_mathematics
test
173
C
0
0
1
215
30.93642
18
35.80635
{ "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 215 ], "hist": [ 110, 22, 17, 14, 2, 4, 2, 0, 0, 2 ] }
false
ceval/ceval-exam
advanced_mathematics
test
173
D
0
0
1
215
31.77457
18
35.47552
{ "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 215 ], "hist": [ 107, 26, 16, 14, 2, 4, 1, 1, 0, 2 ] }
false
ceval/ceval-exam
advanced_mathematics
test
173
question
0
0
39
445
113.3237
96
61.42863
{ "bin_edges": [ 39, 80, 121, 162, 203, 244, 285, 326, 367, 408, 445 ], "hist": [ 55, 56, 36, 11, 6, 5, 2, 1, 0, 1 ] }
false
ceval/ceval-exam
art_studies
test
298
A
0
0
1
35
3.9396
3
2.61626
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 204, 86, 6, 1, 0, 0, 0, 0, 1 ] }
false
ceval/ceval-exam
art_studies
test
298
B
0
0
1
15
3.82886
3
2.01699
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 15 ], "hist": [ 88, 113, 73, 14, 6, 1, 2, 1 ] }
false
ceval/ceval-exam
art_studies
test
298
C
0
0
1
22
3.97651
3
2.28804
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 22 ], "hist": [ 156, 109, 25, 6, 1, 0, 0, 1 ] }
false
ceval/ceval-exam
art_studies
test
298
D
0
0
1
18
4.04027
3
2.23495
{ "bin_edges": [ 1, 3, 5, 7, 9, 11, 13, 15, 17, 18 ], "hist": [ 74, 123, 61, 28, 8, 1, 2, 0, 1 ] }
false
ceval/ceval-exam
art_studies
test
298
question
0
0
10
79
22.69128
20
9.73687
{ "bin_edges": [ 10, 17, 24, 31, 38, 45, 52, 59, 66, 73, 79 ], "hist": [ 86, 108, 52, 29, 12, 5, 5, 0, 0, 1 ] }
false
ceval/ceval-exam
art_studies
val
33
A
0
0
1
8
3.81818
3
1.81064
{ "bin_edges": [ 1, 2, 3, 4, 5, 6, 7, 8, 8 ], "hist": [ 1, 9, 7, 5, 4, 4, 2, 1 ] }
false
ceval/ceval-exam
art_studies
val
33
B
0
0
1
8
3.84848
3
1.83918
{ "bin_edges": [ 1, 2, 3, 4, 5, 6, 7, 8, 8 ], "hist": [ 1, 9, 8, 2, 6, 4, 2, 1 ] }
false
ceval/ceval-exam
art_studies
val
33
C
0
0
1
10
3.81818
3
1.9757
{ "bin_edges": [ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10 ], "hist": [ 1, 9, 8, 5, 3, 4, 2, 0, 0, 1 ] }
false
ceval/ceval-exam
art_studies
val
33
D
0
0
2
7
3.48485
3
1.62252
{ "bin_edges": [ 2, 3, 4, 5, 6, 7, 7 ], "hist": [ 12, 9, 3, 5, 1, 3 ] }
false
ceval/ceval-exam
art_studies
val
33
question
0
0
12
65
23.33333
21
11.14862
{ "bin_edges": [ 12, 18, 24, 30, 36, 42, 48, 54, 60, 65 ], "hist": [ 12, 9, 6, 3, 1, 0, 1, 0, 1 ] }
false
ceval/ceval-exam
basic_medicine
test
175
A
0
0
1
29
6.75429
5
4.64265
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 29 ], "hist": [ 36, 79, 23, 13, 16, 4, 2, 0, 0, 2 ] }
false
ceval/ceval-exam
basic_medicine
test
175
B
0
0
1
28
6.80571
5
4.29608
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 28 ], "hist": [ 38, 66, 26, 27, 11, 6, 0, 0, 0, 1 ] }
false
ceval/ceval-exam
basic_medicine
test
175
C
0
0
1
28
7.28571
6
4.89294
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 28 ], "hist": [ 32, 64, 45, 8, 13, 7, 1, 4, 0, 1 ] }
false
ceval/ceval-exam
basic_medicine
test
175
D
0
0
2
37
7.68571
6
5.57174
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 37 ], "hist": [ 83, 49, 21, 11, 6, 2, 1, 1, 1 ] }
false
ceval/ceval-exam
basic_medicine
test
175
question
0
0
7
66
21.19429
20
7.77167
{ "bin_edges": [ 7, 13, 19, 25, 31, 37, 43, 49, 55, 61, 66 ], "hist": [ 10, 60, 66, 24, 10, 2, 0, 1, 1, 1 ] }
false
ceval/ceval-exam
business_administration
test
301
A
0
0
1
44
7.15282
5
7.09342
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 44 ], "hist": [ 180, 66, 21, 17, 6, 4, 4, 0, 3 ] }
false
ceval/ceval-exam
business_administration
test
301
B
0
0
1
74
7.37209
5
8.2487
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 74 ], "hist": [ 237, 32, 22, 6, 1, 1, 0, 0, 1, 1 ] }
false
ceval/ceval-exam
business_administration
test
301
C
0
0
1
82
7.55482
4
9.21454
{ "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 82, 82 ], "hist": [ 243, 32, 13, 9, 1, 0, 0, 2, 0, 1 ] }
false
ceval/ceval-exam
business_administration
test
301
D
0
0
1
71
7.49169
4
8.67779
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 71 ], "hist": [ 232, 41, 15, 7, 2, 0, 2, 1, 1 ] }
false
ceval/ceval-exam
business_administration
test
301
question
0
0
10
281
55.6711
37
47.00718
{ "bin_edges": [ 10, 38, 66, 94, 122, 150, 178, 206, 234, 262, 281 ], "hist": [ 153, 72, 21, 20, 15, 11, 5, 3, 0, 1 ] }
false
ceval/ceval-exam
business_administration
val
33
A
0
0
1
46
9.33333
6
9.84463
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 46 ], "hist": [ 16, 9, 3, 2, 0, 1, 1, 0, 0, 1 ] }
false
ceval/ceval-exam
business_administration
val
33
B
0
0
1
45
10.60606
6
10.99699
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 45 ], "hist": [ 16, 7, 2, 3, 2, 0, 1, 0, 2 ] }
false
ceval/ceval-exam
business_administration
val
33
C
0
0
2
62
12.54545
6
15.13706
{ "bin_edges": [ 2, 9, 16, 23, 30, 37, 44, 51, 58, 62 ], "hist": [ 19, 7, 2, 2, 0, 0, 1, 1, 1 ] }
false
ceval/ceval-exam
business_administration
val
33
D
0
0
2
55
11.36364
6
12.36886
{ "bin_edges": [ 2, 8, 14, 20, 26, 32, 38, 44, 50, 55 ], "hist": [ 22, 2, 3, 2, 1, 1, 1, 0, 1 ] }
false
ceval/ceval-exam
business_administration
val
33
question
0
0
13
171
45.66667
32
39.182
{ "bin_edges": [ 13, 29, 45, 61, 77, 93, 109, 125, 141, 157, 171 ], "hist": [ 14, 10, 2, 1, 2, 1, 1, 0, 1, 1 ] }
false
ceval/ceval-exam
chinese_language_and_literature
test
209
A
0
0
1
24
5.35885
3
4.94785
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 24 ], "hist": [ 105, 55, 15, 11, 10, 8, 1, 4 ] }
false
ceval/ceval-exam
chinese_language_and_literature
test
209
B
0
0
1
31
5.51196
4
5.37812
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 31 ], "hist": [ 133, 35, 20, 10, 6, 2, 1, 2 ] }
false
ceval/ceval-exam
chinese_language_and_literature
test
209
C
0
0
1
37
5.65072
4
5.93373
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 37 ], "hist": [ 142, 28, 15, 8, 7, 5, 1, 2, 0, 1 ] }
false
ceval/ceval-exam
chinese_language_and_literature
test
209
D
0
0
1
41
5.69378
4
5.80633
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 41 ], "hist": [ 152, 27, 13, 10, 4, 1, 1, 0, 1 ] }
false
ceval/ceval-exam
chinese_language_and_literature
test
209
question
0
0
8
86
23.27273
21
10.34767
{ "bin_edges": [ 8, 16, 24, 32, 40, 48, 56, 64, 72, 80, 86 ], "hist": [ 34, 105, 34, 20, 10, 4, 0, 1, 0, 1 ] }
false
ceval/ceval-exam
civil_servant
test
429
A
0
0
1
85
13.5711
9
12.72876
{ "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 82, 85 ], "hist": [ 217, 102, 59, 26, 8, 10, 4, 2, 0, 1 ] }
false
ceval/ceval-exam
civil_servant
test
429
B
0
0
1
78
14.18182
10
13.41831
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 78 ], "hist": [ 193, 104, 58, 35, 16, 11, 4, 3, 3, 2 ] }
false
ceval/ceval-exam
civil_servant
test
429
C
0
0
1
78
13.89977
10
12.57377
{ "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 78 ], "hist": [ 186, 114, 55, 41, 14, 7, 5, 4, 2, 1 ] }
false
ceval/ceval-exam
civil_servant
test
429
D
0
0
1
69
14.71329
10
13.27581
{ "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 69 ], "hist": [ 167, 94, 65, 50, 19, 13, 10, 3, 3, 5 ] }
false
ceval/ceval-exam
civil_servant
test
429
question
0
0
9
315
81.8951
64
67.62804
{ "bin_edges": [ 9, 40, 71, 102, 133, 164, 195, 226, 257, 288, 315 ], "hist": [ 175, 49, 56, 45, 42, 34, 13, 10, 4, 1 ] }
false
ceval/ceval-exam
civil_servant
val
47
A
0
0
2
32
11.42553
11
7.52025
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 11, 10, 10, 6, 5, 2, 2, 1 ] }
false
ceval/ceval-exam
civil_servant
val
47
B
0
0
2
50
11.76596
11
8.84704
{ "bin_edges": [ 2, 7, 12, 17, 22, 27, 32, 37, 42, 47, 50 ], "hist": [ 14, 11, 11, 7, 2, 1, 0, 0, 0, 1 ] }
false
ceval/ceval-exam
civil_servant
val
47
C
0
0
2
30
11.59574
11
7.25816
{ "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 30 ], "hist": [ 10, 6, 6, 6, 10, 2, 3, 2, 0, 2 ] }
false
ceval/ceval-exam
civil_servant
val
47
D
0
0
2
32
12.23404
11
8.15932
{ "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 12, 11, 6, 6, 6, 2, 2, 2 ] }
false
ceval/ceval-exam
civil_servant
val
47
question
0
0
13
283
100.97872
110
72.37508
{ "bin_edges": [ 13, 41, 69, 97, 125, 153, 181, 209, 237, 265, 283 ], "hist": [ 16, 0, 5, 10, 6, 4, 2, 2, 0, 2 ] }
false
ceval/ceval-exam
clinical_medicine
test
200
A
0
0
1
32
7.91
6
5.97498
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 62, 77, 28, 13, 10, 3, 5, 2 ] }
false
ceval/ceval-exam
clinical_medicine
test
200
B
0
0
1
32
7.97
6
5.65589
{ "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 58, 71, 31, 22, 8, 8, 0, 2 ] }
false
ceval/ceval-exam
clinical_medicine
test
200
C
0
0
1
30
8.505
7
5.32369
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 28, 59, 42, 30, 17, 14, 6, 2, 1, 1 ] }
false
ceval/ceval-exam
clinical_medicine
test
200
D
0
0
1
26
8.63
7
5.40641
{ "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 26 ], "hist": [ 31, 57, 41, 28, 14, 19, 4, 5, 1 ] }
false
ceval/ceval-exam
clinical_medicine
test
200
question
0
0
8
180
34.11
21
32.06664
{ "bin_edges": [ 8, 26, 44, 62, 80, 98, 116, 134, 152, 170, 180 ], "hist": [ 132, 31, 9, 9, 3, 5, 6, 3, 1, 1 ] }
false
ceval/ceval-exam
college_economics
test
497
A
0
0
1
49
10.09256
8
7.23368
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 49 ], "hist": [ 156, 149, 99, 47, 29, 10, 2, 3, 1, 1 ] }
false
ceval/ceval-exam
college_economics
test
497
B
0
0
1
42
10.38028
8
7.30487
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 42 ], "hist": [ 147, 154, 103, 45, 26, 10, 8, 3, 1 ] }
false
ceval/ceval-exam
college_economics
test
497
C
0
0
1
41
10.68813
9
7.68596
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 41 ], "hist": [ 147, 140, 100, 52, 30, 14, 8, 5, 1 ] }
false
ceval/ceval-exam
college_economics
test
497
D
0
0
1
51
10.23742
8
7.48385
{ "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 51 ], "hist": [ 212, 141, 74, 40, 19, 7, 3, 0, 1 ] }
false
ceval/ceval-exam
college_economics
test
497
question
0
0
8
235
36.10664
27
27.96453
{ "bin_edges": [ 8, 31, 54, 77, 100, 123, 146, 169, 192, 215, 235 ], "hist": [ 293, 122, 39, 19, 17, 4, 1, 0, 0, 2 ] }
false
ceval/ceval-exam
college_economics
val
55
A
0
0
1
44
11.03636
8
8.91724
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 44 ], "hist": [ 15, 20, 8, 4, 2, 4, 1, 0, 1 ] }
false
ceval/ceval-exam
college_economics
val
55
B
0
0
1
47
11.12727
9
8.15374
{ "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 47 ], "hist": [ 16, 15, 11, 7, 5, 0, 0, 0, 0, 1 ] }
false
README.md exists but content is empty. Use the Edit dataset card button to edit it.
Downloads last month
0
Edit dataset card