llm_pt_leaderboard_requests
/
NousResearch
/Nous-Capybara-34B_eval_request_False_bfloat16_Original.json
Update status of NousResearch/Nous-Capybara-34B_eval_request_False_bfloat16_Original to FINISHED
f16e3a9
verified
{ | |
"model": "NousResearch/Nous-Capybara-34B", | |
"base_model": "", | |
"revision": "main", | |
"private": false, | |
"precision": "bfloat16", | |
"params": 34.0, | |
"architectures": "LlamaForCausalLM", | |
"weight_type": "Original", | |
"main_language": "English", | |
"status": "FINISHED", | |
"submitted_time": "2024-04-26T07:21:50Z", | |
"model_type": "🔶 : fine-tuned/fp on domain-specific datasets", | |
"source": "leaderboard", | |
"job_id": 617, | |
"job_start_time": "2024-05-16T07-59-45.028987", | |
"eval_version": "1.1.0", | |
"result_metrics": { | |
"enem_challenge": 0.7116864940517844, | |
"bluex": 0.6300417246175244, | |
"oab_exams": 0.5530751708428246, | |
"assin2_rte": 0.9007100934823724, | |
"assin2_sts": 0.757100596654299, | |
"faquad_nli": 0.7731239092495636, | |
"hatebr_offensive": 0.7408626005155765, | |
"portuguese_hate_speech": 0.7161125319693095, | |
"tweetsentbr": 0.7078849222478135 | |
}, | |
"result_metrics_average": 0.7211775604034519, | |
"result_metrics_npm": 0.5788836162044235 | |
} |