hieunguyen1053's picture
Update vinai/PhoGPT-7B5-Instruct/exams_vi.json
ca38298
raw
history blame
1.32 kB
{
"results": {
"exams_dialy_vi": {
"acc": 0.20347826086956522,
"acc_norm": 0.2382608695652174
},
"exams_hoahoc_vi": {
"acc": 0.21478260869565216,
"acc_norm": 0.22608695652173913
},
"exams_lichsu_vi": {
"acc": 0.1932710280373832,
"acc_norm": 0.22803738317757008
},
"exams_toan_vi": {
"acc": 0.2312280701754386,
"acc_norm": 0.24596491228070175
},
"exams_van_vi": {
"acc": 0.20816901408450705,
"acc_norm": 0.25971830985915495
},
"exams_vatly_vi": {
"acc": 0.20941176470588235,
"acc_norm": 0.23529411764705882
},
"exams_sinhhoc_vi": {
"acc": 0.22903225806451613,
"acc_norm": 0.2532258064516129
},
"exams_vi": {
"acc": 0.210704960835509,
"acc_norm": 0.242088772845952
}
},
"versions": {
"exams_dialy_vi": 0,
"exams_hoahoc_vi": 0,
"exams_lichsu_vi": 0,
"exams_toan_vi": 0,
"exams_van_vi": 0,
"exams_sinhhoc_vi": 0,
"exams_vatly_vi": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=vinai/PhoGPT-7B5-Instruct",
"num_fewshot": 5,
"batch_size": null,
"batch_sizes": [],
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}