llm_pt_leaderboard_requests
/
deepseek-ai
/deepseek-llm-7b-base_eval_request_False_bfloat16_Original.json
Update status of deepseek-ai/deepseek-llm-7b-base_eval_request_False_bfloat16_Original to FINISHED
f0e6a18
verified
{ | |
"model": "deepseek-ai/deepseek-llm-7b-base", | |
"base_model": "", | |
"revision": "main", | |
"private": false, | |
"precision": "bfloat16", | |
"params": 7.0, | |
"architectures": "LlamaForCausalLM", | |
"weight_type": "Original", | |
"status": "FINISHED", | |
"submitted_time": "2024-02-05T23:08:46Z", | |
"model_type": "🟢 : pretrained", | |
"source": "script", | |
"job_id": 359, | |
"job_start_time": "2024-04-03T04-04-12.490520", | |
"main_language": "?", | |
"eval_version": "1.1.0", | |
"result_metrics": { | |
"enem_challenge": 0.4177746675997201, | |
"bluex": 0.38386648122392214, | |
"oab_exams": 0.35671981776765377, | |
"assin2_rte": 0.4641752126120648, | |
"assin2_sts": 0.23484125545266332, | |
"faquad_nli": 0.4396551724137931, | |
"hatebr_offensive": 0.5198841409416219, | |
"portuguese_hate_speech": 0.5712846347607053, | |
"tweetsentbr": 0.5455775795733948 | |
}, | |
"result_metrics_average": 0.4370865513717266, | |
"result_metrics_npm": 0.1437023037534111 | |
} |