llm_pt_leaderboard_requests
/
HuggingFaceH4
/zephyr-7b-gemma-v0.1_eval_request_False_bfloat16_Original.json
Update status of HuggingFaceH4/zephyr-7b-gemma-v0.1_eval_request_False_bfloat16_Original to FINISHED
6ba45a2
verified
{ | |
"model": "HuggingFaceH4/zephyr-7b-gemma-v0.1", | |
"base_model": "", | |
"revision": "main", | |
"private": false, | |
"precision": "bfloat16", | |
"params": 8.538, | |
"architectures": "GemmaForCausalLM", | |
"weight_type": "Original", | |
"status": "FINISHED", | |
"submitted_time": "2024-03-02T00:49:26Z", | |
"model_type": "💬 : chat models (RLHF, DPO, IFT, ...)", | |
"source": "leaderboard", | |
"job_id": 386, | |
"job_start_time": "2024-04-04T23-04-13.841492", | |
"eval_version": "1.1.0", | |
"result_metrics": { | |
"enem_challenge": 0.5815255423372988, | |
"bluex": 0.47426981919332406, | |
"oab_exams": 0.40728929384965834, | |
"assin2_rte": 0.8604729280813948, | |
"assin2_sts": 0.7259016112950178, | |
"faquad_nli": 0.7486076732673268, | |
"hatebr_offensive": 0.8755151098901099, | |
"portuguese_hate_speech": 0.6244738628649016, | |
"tweetsentbr": 0.6159470691844793 | |
}, | |
"result_metrics_average": 0.6571114344403901, | |
"result_metrics_npm": 0.49463744788470404, | |
"main_language": "English" | |
} |