{
    "_errors_reason": {
        "CheGeKa": [
            {
                "split": "test",
                "type": "no_split"
            }
        ],
        "MathLogicQA": [
            {
                "doc_id": 739,
                "type": "no_id"
            }
        ],
        "PARus": [
            {
                "type": "no_data_field"
            }
        ],
        "RWSD": [
            {
                "doc_id": 61,
                "type": "doc_output_type_error"
            }
        ],
        "SimpleAr": [
            {
                "example_number": 528,
                "type": "no_id_field_for_doc"
            }
        ],
        "ruEthics": [
            {
                "extension": ".jso",
                "type": "extension"
            }
        ],
        "ruHumanEval": [
            {
                "example_number": 128,
                "type": "no_meta_field_for_doc"
            }
        ],
        "ruOpenBookQA": [
            {
                "type": "no_task"
            }
        ],
        "ruWorldTree": [
            {
                "example_number": 443,
                "type": "no_outputs_field_for_doc"
            }
        ]
    },
    "_global_errors_reason": [],
    "error_reason": {
        "CheGeKa": [
            {
                "comment": "Нет поля test в файле задачи.",
                "split": "test",
                "type": "no_split"
            }
        ],
        "MathLogicQA": [
            {
                "comment": "Нет документа с id в файле задачи.",
                "doc_id": 739,
                "type": "no_id"
            }
        ],
        "PARus": [
            {
                "comment": "Нет поля дата в файле задачи.",
                "type": "no_data_field"
            }
        ],
        "RWSD": [
            {
                "comment": "Тип поля outputs не соответствует ожидаемому типу.",
                "doc_id": 61,
                "type": "doc_output_type_error"
            }
        ],
        "SimpleAr": [
            {
                "comment": "Невозможно получить поле id из файла задачи для документа с порядковым номером (не id) example_number.",
                "example_number": 528,
                "type": "no_id_field_for_doc"
            }
        ],
        "ruEthics": [
            {
                "comment": "Неправильное расширение файла задачи внутри архива.",
                "extension": ".jso",
                "type": "extension"
            }
        ],
        "ruHumanEval": [
            {
                "comment": "Невозможно получить поле meta из файла задачи для документа с порядковым номером (не id) example_number.",
                "example_number": 128,
                "type": "no_meta_field_for_doc"
            }
        ],
        "ruOpenBookQA": [
            {
                "comment": "Нет файла задачи внутри архива.",
                "type": "no_task"
            }
        ],
        "ruWorldTree": [
            {
                "comment": "Невозможно получить поле outputs из файла задачи для документа с порядковым номером (не id) example_number.",
                "example_number": 443,
                "type": "no_outputs_field_for_doc"
            }
        ]
    },
    "errors": {
        "CheGeKa": [
            {
                "split": "test",
                "type": "no_split"
            }
        ],
        "MathLogicQA": [
            {
                "doc_id": 739,
                "type": "no_id"
            }
        ],
        "PARus": [
            {
                "type": "no_data_field"
            }
        ],
        "RWSD": [
            {
                "doc_id": 61,
                "type": "doc_output_type_error"
            }
        ],
        "SimpleAr": [
            {
                "example_number": 528,
                "type": "no_id_field_for_doc"
            }
        ],
        "ruEthics": [
            {
                "extension": ".jso",
                "type": "extension"
            }
        ],
        "ruHumanEval": [
            {
                "example_number": 128,
                "type": "no_meta_field_for_doc"
            }
        ],
        "ruOpenBookQA": [
            {
                "type": "no_task"
            }
        ],
        "ruWorldTree": [
            {
                "example_number": 443,
                "type": "no_outputs_field_for_doc"
            }
        ]
    },
    "global_error_reason": [],
    "id": 1234,
    "results": {
        "BPS": {
            "acc": 0.5
        },
        "LCS": {
            "acc": 0.096
        },
        "MultiQ": {
            "em": 0.0033333333333333335,
            "f1": 0.01452582165181418
        },
        "RCB": {
            "acc": 0.3607305936073059,
            "f1_macro": 0.3597139338827318
        },
        "USE": {
            "grade_norm": 0.05588235294117646
        },
        "ruDetox": {
            "fl": 0.5578551230338372,
            "j": 0.3816129346637701,
            "sim": 0.8054497441265253,
            "sta": 0.8405460153513176
        },
        "ruHHH": {
            "acc": 0.5224719101123596,
            "acc.harmless": 0.5517241379310345,
            "acc.helpful": 0.5254237288135594,
            "acc.honest": 0.4918032786885246
        },
        "ruHateSpeech": {
            "acc": 0.4679245283018868,
            "acc.другое": 0.3770491803278688,
            "acc.женщины": 0.46296296296296297,
            "acc.лгбт": 0.5294117647058824,
            "acc.мигранты": 0.8571428571428571,
            "acc.мужчины": 0.5428571428571428,
            "acc.национальность": 0.4594594594594595
        },
        "ruMMLU": {
            "acc": 0.25078043704474506,
            "acc.abstract_algebra": 0.3,
            "acc.anatomy": 0.4,
            "acc.astronomy": 0.1,
            "acc.business_ethics": 0.2,
            "acc.clinical_knowledge": 0.2727272727272727,
            "acc.college_biology": 0.25925925925925924,
            "acc.college_chemistry": 0.2727272727272727,
            "acc.college_computer_science": 0.36363636363636365,
            "acc.college_mathematics": 0.3,
            "acc.college_medicine": 0.27450980392156865,
            "acc.college_physics": 0.2,
            "acc.computer_security": 0.4,
            "acc.conceptual_physics": 0.2,
            "acc.econometrics": 0.36363636363636365,
            "acc.electrical_engineering": 0.1,
            "acc.elementary_mathematics": 0.2,
            "acc.formal_logic": 0.2,
            "acc.global_facts": 0.5,
            "acc.high_school_biology": 0.23809523809523808,
            "acc.high_school_chemistry": 0.2,
            "acc.high_school_computer_science": 0.08333333333333333,
            "acc.high_school_european_history": 0.18181818181818182,
            "acc.high_school_geography": 0.17721518987341772,
            "acc.high_school_government_and_politics": 0.25925925925925924,
            "acc.high_school_macroeconomics": 0.29411764705882354,
            "acc.high_school_mathematics": 0.3,
            "acc.high_school_microeconomics": 0.13333333333333333,
            "acc.high_school_physics": 0.3,
            "acc.high_school_psychology": 0.3125,
            "acc.high_school_statistics": 0.1,
            "acc.high_school_us_history": 0.3,
            "acc.high_school_world_history": 0.375,
            "acc.human_aging": 0.4,
            "acc.human_sexuality": 0.4,
            "acc.international_law": 0.16666666666666666,
            "acc.jurisprudence": 0.3076923076923077,
            "acc.logical_fallacies": 0.2,
            "acc.machine_learning": 0.3,
            "acc.management": 0.2,
            "acc.marketing": 0.2857142857142857,
            "acc.medical_genetics": 0.18181818181818182,
            "acc.miscellaneous": 0.3181818181818182,
            "acc.moral_disputes": 0.2,
            "acc.moral_scenarios": 0.4,
            "acc.nutrition": 0.14285714285714285,
            "acc.philosophy": 0.35294117647058826,
            "acc.prehistory": 0.2,
            "acc.professional_accounting": 0.3,
            "acc.professional_law": 0.1875,
            "acc.professional_medicine": 0.2,
            "acc.professional_psychology": 0.3,
            "acc.public_relations": 0.14285714285714285,
            "acc.security_studies": 0.4,
            "acc.sociology": 0.3,
            "acc.us_foreign_policy": 0.2,
            "acc.virology": 0.1875,
            "acc.world_religions": 0.2692307692307692
        },
        "ruModAr": {
            "acc": 0.0
        },
        "ruMultiAr": {
            "acc": 0.0
        },
        "ruTiE": {
            "acc": 0.46744186046511627
        },
        "total_score": 0.19325072129873672
    },
    "status": "Failed"
}