Adam Jirkovsky
commited on
Commit
•
6d5c270
1
Parent(s):
4ed7e4d
Remove result files
Browse files- czechbench_leaderboard/A_eval_request.json +0 -1
- czechbench_leaderboard/Adam_test_eval_request.json +0 -1
- czechbench_leaderboard/GUI_test_eval_request.json +0 -1
- czechbench_leaderboard/Gemma_test_eval_request.json +0 -1
- czechbench_leaderboard/Ghf_eval_request.json +0 -1
- czechbench_leaderboard/Name_eval_request.json +0 -1
- czechbench_leaderboard/New Link Test_eval_request.json +0 -1
- czechbench_leaderboard/OpenAI placeholder_eval_request.json +0 -1
- czechbench_leaderboard/Test..._eval_request.json +0 -1
- czechbench_leaderboard/aaa_eval_request.json +0 -1
- czechbench_leaderboard/assss_eval_request.json +0 -1
- czechbench_leaderboard/dasdasdas_eval_request.json +0 -1
- czechbench_leaderboard/dddd_eval_request.json +0 -1
- czechbench_leaderboard/monday_test_eval_request.json +0 -1
- czechbench_leaderboard/q_eval_request.json +0 -1
- czechbench_leaderboard/r_eval_request.json +0 -1
- czechbench_leaderboard/rqqq_eval_request.json +0 -1
- czechbench_leaderboard/rrr_eval_request.json +0 -1
- czechbench_leaderboard/test test_eval_request.json +0 -1
- czechbench_leaderboard/test123_x_eval_request.json +0 -1
- czechbench_leaderboard/test3_eval_request.json +0 -1
- czechbench_leaderboard/test_url_eval_request.json +0 -1
- czechbench_leaderboard/upstage_SOLAR-10.7B-Instruct-v1.0_float16_14_eval_request.json +0 -1
- czechbench_leaderboard/vicgalle_Roleplay-Llama-3-8B_float16_14_eval_request.json +0 -1
- czechbench_leaderboard/yyy111xxx_eval_request.json +0 -1
czechbench_leaderboard/A_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "A", "precision": "other", "hf_model_id": "B", "contact_email": "C", "agree_cs": 0.6586921850079744, "anli_cs": 0.5658333333333333, "arc_challenge_cs": 0.8208191126279863, "arc_easy_cs": 0.9212962962962963, "belebele_cs": 0.9039106145251397, "ctkfacts_cs": 0.6989247311827957, "czechnews_cs": 0.799, "fb_comments_cs": 0.766, "gsm8k_cs": 0.5064442759666414, "klokanek_cs": 0.275990099009901, "mall_reviews_cs": 0.6323333333333333, "mmlu_cs": 0.5926015473887815, "sqad_cs": 0.7532621589561092, "subjectivity_cs": 0.828, "truthfulqa_cs": 0.6403940886699507}
|
|
|
|
czechbench_leaderboard/Adam_test_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "Adam_test", "precision": "bfloat16", "hf_model_id": "meta-llama/Meta-Llama-3.1-8B-Instruct", "contact_email": "test@gmail.com", "agree_cs": 0.5311004784688995, "anli_cs": 0.48333333333333334, "arc_challenge_cs": 0.6544368600682594, "arc_easy_cs": 0.7882996632996633, "belebele_cs": 0.8279329608938547, "ctkfacts_cs": 0.6577060931899642, "czechnews_cs": 0.792, "fb_comments_cs": 0.745, "gsm8k_cs": 0.15390447308567096, "klokanek_cs": 0.24257425742574257, "mall_reviews_cs": 0.6523333333333333, "mmlu_cs": 0.5072533849129593, "sqad_cs": 0.7034400948991696, "subjectivity_cs": 0.845, "truthfulqa_cs": 0.4248768472906404}
|
|
|
|
czechbench_leaderboard/GUI_test_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "GUI_test", "precision": "8bit", "hf_model_id": "", "contact_email": "a@bcd.com", "agree_cs": 0.5311004784688995, "anli_cs": 0.48333333333333334, "arc_challenge_cs": 0.6544368600682594, "arc_easy_cs": 0.7882996632996633, "belebele_cs": 0.8279329608938547, "ctkfacts_cs": 0.6577060931899642, "czechnews_cs": 0.792, "fb_comments_cs": 0.745, "gsm8k_cs": 0.15390447308567096, "klokanek_cs": 0.24257425742574257, "mall_reviews_cs": 0.6523333333333333, "mmlu_cs": 0.5072533849129593, "sqad_cs": 0.7034400948991696, "subjectivity_cs": 0.845, "truthfulqa_cs": 0.4248768472906404}
|
|
|
|
czechbench_leaderboard/Gemma_test_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "Gemma_test", "precision": "bfloat16", "hf_model_id": "google/gemma-2-9b-it", "contact_email": "jirkovsky.adam@gmail.com", "agree_cs": 0.6586921850079744, "anli_cs": 0.5658333333333333, "arc_challenge_cs": 0.8208191126279863, "arc_easy_cs": 0.9212962962962963, "belebele_cs": 0.9039106145251397, "ctkfacts_cs": 0.6989247311827957, "czechnews_cs": 0.799, "fb_comments_cs": 0.766, "gsm8k_cs": 0.5064442759666414, "klokanek_cs": 0.275990099009901, "mall_reviews_cs": 0.6323333333333333, "mmlu_cs": 0.5926015473887815, "sqad_cs": 0.7532621589561092, "subjectivity_cs": 0.828, "truthfulqa_cs": 0.6403940886699507}
|
|
|
|
czechbench_leaderboard/Ghf_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "Ghf", "precision": "other", "hf_model_id": "B", "contact_email": "C", "agree_cs": 0.6586921850079744, "anli_cs": 0.5658333333333333, "arc_challenge_cs": 0.8208191126279863, "arc_easy_cs": 0.9212962962962963, "belebele_cs": 0.9039106145251397, "ctkfacts_cs": 0.6989247311827957, "czechnews_cs": 0.799, "fb_comments_cs": 0.766, "gsm8k_cs": 0.5064442759666414, "klokanek_cs": 0.275990099009901, "mall_reviews_cs": 0.6323333333333333, "mmlu_cs": 0.5926015473887815, "sqad_cs": 0.7532621589561092, "subjectivity_cs": 0.828, "truthfulqa_cs": 0.6403940886699507}
|
|
|
|
czechbench_leaderboard/Name_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "Name", "precision": "other", "hf_model_id": "", "contact_email": "mal", "agree_cs": 0.5311004784688995, "anli_cs": 0.48333333333333334, "arc_challenge_cs": 0.6544368600682594, "arc_easy_cs": 0.7882996632996633, "belebele_cs": 0.8279329608938547, "ctkfacts_cs": 0.6577060931899642, "czechnews_cs": 0.792, "fb_comments_cs": 0.745, "gsm8k_cs": 0.15390447308567096, "klokanek_cs": 0.24257425742574257, "mall_reviews_cs": 0.6523333333333333, "mmlu_cs": 0.5072533849129593, "sqad_cs": 0.7034400948991696, "subjectivity_cs": 0.845, "truthfulqa_cs": 0.4248768472906404}
|
|
|
|
czechbench_leaderboard/New Link Test_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "New Link Test", "precision": "other", "hf_model_id": "https://github.com/jirkoada/czechbench_eval_harness/tree/main/lm_eval/tasks/czechbench#readme", "contact_email": "test@vv.cx", "agree_cs": 0.6586921850079744, "anli_cs": 0.5658333333333333, "arc_challenge_cs": 0.8208191126279863, "arc_easy_cs": 0.9212962962962963, "belebele_cs": 0.9039106145251397, "ctkfacts_cs": 0.6989247311827957, "czechnews_cs": 0.799, "fb_comments_cs": 0.766, "gsm8k_cs": 0.5064442759666414, "klokanek_cs": 0.275990099009901, "mall_reviews_cs": 0.6323333333333333, "mmlu_cs": 0.5926015473887815, "sqad_cs": 0.7532621589561092, "subjectivity_cs": 0.828, "truthfulqa_cs": 0.6403940886699507}
|
|
|
|
czechbench_leaderboard/OpenAI placeholder_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "OpenAI placeholder", "precision": "Other", "hf_model_id": "", "contact_email": "fp@tgl.com", "agree_cs": 0.5311004784688995, "anli_cs": 0.48333333333333334, "arc_challenge_cs": 0.6544368600682594, "arc_easy_cs": 0.7882996632996633, "belebele_cs": 0.8279329608938547, "ctkfacts_cs": 0.6577060931899642, "czechnews_cs": 0.792, "fb_comments_cs": 0.745, "gsm8k_cs": 0.15390447308567096, "klokanek_cs": 0.24257425742574257, "mall_reviews_cs": 0.6523333333333333, "mmlu_cs": 0.5072533849129593, "sqad_cs": 0.7034400948991696, "subjectivity_cs": 0.845, "truthfulqa_cs": 0.4248768472906404}
|
|
|
|
czechbench_leaderboard/Test..._eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "Test...", "precision": "other", "hf_model_id": "link", "contact_email": "hhd", "agree_cs": 0.6586921850079744, "anli_cs": 0.5658333333333333, "arc_challenge_cs": 0.8208191126279863, "arc_easy_cs": 0.9212962962962963, "belebele_cs": 0.9039106145251397, "ctkfacts_cs": 0.6989247311827957, "czechnews_cs": 0.799, "fb_comments_cs": 0.766, "gsm8k_cs": 0.5064442759666414, "klokanek_cs": 0.275990099009901, "mall_reviews_cs": 0.6323333333333333, "mmlu_cs": 0.5926015473887815, "sqad_cs": 0.7532621589561092, "subjectivity_cs": 0.828, "truthfulqa_cs": 0.6403940886699507}
|
|
|
|
czechbench_leaderboard/aaa_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "aaa", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/assss_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "assss", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/dasdasdas_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "dasdasdas", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/dddd_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "dddd", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/monday_test_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "monday_test", "precision": "bfloat16", "hf_model_id": "monday_test", "contact_email": "monday_test", "agree_cs": 0.6586921850079744, "anli_cs": 0.5658333333333333, "arc_challenge_cs": 0.8208191126279863, "arc_easy_cs": 0.9212962962962963, "belebele_cs": 0.9039106145251397, "ctkfacts_cs": 0.6989247311827957, "czechnews_cs": 0.799, "fb_comments_cs": 0.766, "gsm8k_cs": 0.5064442759666414, "klokanek_cs": 0.275990099009901, "mall_reviews_cs": 0.6323333333333333, "mmlu_cs": 0.5926015473887815, "sqad_cs": 0.7532621589561092, "subjectivity_cs": 0.828, "truthfulqa_cs": 0.6403940886699507}
|
|
|
|
czechbench_leaderboard/q_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "q", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/r_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "r", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/rqqq_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "rqqq", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/rrr_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "rrr", "precision": "4bit", "hf_model_id": "rrr", "contact_email": "rrr", "agree_cs": 0.5311004784688995, "anli_cs": 0.48333333333333334, "arc_challenge_cs": 0.6544368600682594, "arc_easy_cs": 0.7882996632996633, "belebele_cs": 0.8279329608938547, "ctkfacts_cs": 0.6577060931899642, "czechnews_cs": 0.792, "fb_comments_cs": 0.745, "gsm8k_cs": 0.15390447308567096, "klokanek_cs": 0.24257425742574257, "mall_reviews_cs": 0.6523333333333333, "mmlu_cs": 0.5072533849129593, "sqad_cs": 0.7034400948991696, "subjectivity_cs": 0.845, "truthfulqa_cs": 0.4248768472906404}
|
|
|
|
czechbench_leaderboard/test test_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "test test", "precision": "other", "hf_model_id": "hello", "contact_email": "gt", "agree_cs": 0.6586921850079744, "anli_cs": 0.5658333333333333, "arc_challenge_cs": 0.8208191126279863, "arc_easy_cs": 0.9212962962962963, "belebele_cs": 0.9039106145251397, "ctkfacts_cs": 0.6989247311827957, "czechnews_cs": 0.799, "fb_comments_cs": 0.766, "gsm8k_cs": 0.5064442759666414, "klokanek_cs": 0.275990099009901, "mall_reviews_cs": 0.6323333333333333, "mmlu_cs": 0.5926015473887815, "sqad_cs": 0.7532621589561092, "subjectivity_cs": 0.828, "truthfulqa_cs": 0.6403940886699507}
|
|
|
|
czechbench_leaderboard/test123_x_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "test123_x", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/test3_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "test3", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/test_url_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "test_url", "precision": "bfloat16", "hf_model_id": "http://www.google.com", "contact_email": "email", "agree_cs": 0.6586921850079744, "anli_cs": 0.5658333333333333, "arc_challenge_cs": 0.8208191126279863, "arc_easy_cs": 0.9212962962962963, "belebele_cs": 0.9039106145251397, "ctkfacts_cs": 0.6989247311827957, "czechnews_cs": 0.799, "fb_comments_cs": 0.766, "gsm8k_cs": 0.5064442759666414, "klokanek_cs": 0.275990099009901, "mall_reviews_cs": 0.6323333333333333, "mmlu_cs": 0.5926015473887815, "sqad_cs": 0.7532621589561092, "subjectivity_cs": 0.828, "truthfulqa_cs": 0.6403940886699507}
|
|
|
|
czechbench_leaderboard/upstage_SOLAR-10.7B-Instruct-v1.0_float16_14_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "upstage_SOLAR-10.7B-Instruct-v1.0_float16_14", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "mmlu_cs_abstract_algebra": 0.0, "mmlu_cs_anatomy": 1.0, "mmlu_cs_astronomy": 1.0, "mmlu_cs_business_ethics": 1.0, "mmlu_cs_clinical_knowledge": 0.0, "mmlu_cs_college_biology": 0.0, "mmlu_cs_college_chemistry": 0.0, "mmlu_cs_college_computer_science": 0.0, "mmlu_cs_college_mathematics": 0.0, "mmlu_cs_college_medicine": 1.0, "mmlu_cs_college_physics": 1.0, "mmlu_cs_computer_security": 1.0, "mmlu_cs_conceptual_physics": 0.0, "mmlu_cs_econometrics": 1.0, "mmlu_cs_electrical_engineering": 1.0, "mmlu_cs_elementary_mathematics": 0.0, "mmlu_cs_formal_logic": 1.0, "mmlu_cs_global_facts": 1.0, "mmlu_cs_high_school_biology": 0.0, "mmlu_cs_high_school_chemistry": 0.0, "mmlu_cs_high_school_computer_science": 1.0, "mmlu_cs_high_school_european_history": 1.0, "mmlu_cs_high_school_geography": 0.0, "mmlu_cs_high_school_government_and_politics": 0.0, "mmlu_cs_high_school_macroeconomics": 1.0, "mmlu_cs_high_school_mathematics": 0.0, "mmlu_cs_high_school_microeconomics": 0.0, "mmlu_cs_high_school_physics": 1.0, "mmlu_cs_high_school_psychology": 0.0, "mmlu_cs_high_school_statistics": 1.0, "mmlu_cs_high_school_us_history": 0.0, "mmlu_cs_high_school_world_history": 0.0, "mmlu_cs_human_aging": 1.0, "mmlu_cs_human_sexuality": 1.0, "mmlu_cs_international_law": 1.0, "mmlu_cs_jurisprudence": 0.0, "mmlu_cs_logical_fallacies": 0.0, "mmlu_cs_machine_learning": 0.0, "mmlu_cs_management": 1.0, "mmlu_cs_marketing": 1.0, "mmlu_cs_medical_genetics": 1.0, "mmlu_cs_miscellaneous": 1.0, "mmlu_cs_moral_disputes": 0.0, "mmlu_cs_moral_scenarios": 0.0, "mmlu_cs_nutrition": 1.0, "mmlu_cs_philosophy": 1.0, "mmlu_cs_prehistory": 0.0, "mmlu_cs_professional_accounting": 0.0, "mmlu_cs_professional_medicine": 1.0, "mmlu_cs_professional_psychology": 1.0, "mmlu_cs_public_relations": 1.0, "mmlu_cs_security_studies": 1.0, "mmlu_cs_sociology": 1.0, "mmlu_cs_virology": 0.0, "mmlu_cs_world_religions": 1.0, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/vicgalle_Roleplay-Llama-3-8B_float16_14_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "vicgalle_Roleplay-Llama-3-8B_float16_14", "agree_cs": 0.0, "anli_cs": 0.0, "arc_challenge_cs": 1.0, "arc_easy_cs": 0.0, "belebele_cs": 0.0, "ctkfacts_cs": 1.0, "czechnews_cs": 1.0, "fb_comments_cs": 0.0, "gsm8k_cs": 0.0, "klokanek_cs": 0.0, "mall_reviews_cs": 1.0, "mmlu_cs": 0.5454545454545454, "mmlu_cs_abstract_algebra": 0.0, "mmlu_cs_anatomy": 1.0, "mmlu_cs_astronomy": 1.0, "mmlu_cs_business_ethics": 1.0, "mmlu_cs_clinical_knowledge": 0.0, "mmlu_cs_college_biology": 0.0, "mmlu_cs_college_chemistry": 0.0, "mmlu_cs_college_computer_science": 0.0, "mmlu_cs_college_mathematics": 0.0, "mmlu_cs_college_medicine": 1.0, "mmlu_cs_college_physics": 1.0, "mmlu_cs_computer_security": 1.0, "mmlu_cs_conceptual_physics": 0.0, "mmlu_cs_econometrics": 1.0, "mmlu_cs_electrical_engineering": 1.0, "mmlu_cs_elementary_mathematics": 0.0, "mmlu_cs_formal_logic": 1.0, "mmlu_cs_global_facts": 1.0, "mmlu_cs_high_school_biology": 0.0, "mmlu_cs_high_school_chemistry": 0.0, "mmlu_cs_high_school_computer_science": 1.0, "mmlu_cs_high_school_european_history": 1.0, "mmlu_cs_high_school_geography": 0.0, "mmlu_cs_high_school_government_and_politics": 0.0, "mmlu_cs_high_school_macroeconomics": 1.0, "mmlu_cs_high_school_mathematics": 0.0, "mmlu_cs_high_school_microeconomics": 0.0, "mmlu_cs_high_school_physics": 1.0, "mmlu_cs_high_school_psychology": 0.0, "mmlu_cs_high_school_statistics": 1.0, "mmlu_cs_high_school_us_history": 0.0, "mmlu_cs_high_school_world_history": 0.0, "mmlu_cs_human_aging": 1.0, "mmlu_cs_human_sexuality": 1.0, "mmlu_cs_international_law": 1.0, "mmlu_cs_jurisprudence": 0.0, "mmlu_cs_logical_fallacies": 0.0, "mmlu_cs_machine_learning": 0.0, "mmlu_cs_management": 1.0, "mmlu_cs_marketing": 1.0, "mmlu_cs_medical_genetics": 1.0, "mmlu_cs_miscellaneous": 1.0, "mmlu_cs_moral_disputes": 0.0, "mmlu_cs_moral_scenarios": 0.0, "mmlu_cs_nutrition": 1.0, "mmlu_cs_philosophy": 1.0, "mmlu_cs_prehistory": 0.0, "mmlu_cs_professional_accounting": 0.0, "mmlu_cs_professional_medicine": 1.0, "mmlu_cs_professional_psychology": 1.0, "mmlu_cs_public_relations": 1.0, "mmlu_cs_security_studies": 1.0, "mmlu_cs_sociology": 1.0, "mmlu_cs_virology": 0.0, "mmlu_cs_world_religions": 1.0, "sqad_cs": 1.0, "subjectivity_cs": 1.0, "truthfulqa_cs": 0.0}
|
|
|
|
czechbench_leaderboard/yyy111xxx_eval_request.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"eval_name": "yyy111xxx", "precision": "bfloat16", "hf_model_id": "yyy111xxx", "contact_email": "yyy111xxx", "agree_cs": 0.6586921850079744, "anli_cs": 0.5658333333333333, "arc_challenge_cs": 0.8208191126279863, "arc_easy_cs": 0.9212962962962963, "belebele_cs": 0.9039106145251397, "ctkfacts_cs": 0.6989247311827957, "czechnews_cs": 0.799, "fb_comments_cs": 0.766, "gsm8k_cs": 0.5064442759666414, "klokanek_cs": 0.275990099009901, "mall_reviews_cs": 0.6323333333333333, "mmlu_cs": 0.5926015473887815, "sqad_cs": 0.7532621589561092, "subjectivity_cs": 0.828, "truthfulqa_cs": 0.6403940886699507}
|
|
|
|