--- license: apache-2.0 tags: - merge --- # openmixtral-4x7b-merged openmixtral-4x7b-merged is a merge of the following models: ## 🧩 Configuration ```yaml base_model: mlabonne/Marcoro14-7B-slerp experts: - source_model: openchat/openchat-3.5-1210 positive_prompts: - "chat" - "assistant" - "tell me" - "explain" - source_model: beowolx/CodeNinja-1.0-OpenChat-7B positive_prompts: - "code" - "python" - "javascript" - "programming" - "algorithm" - source_model: maywell/PiVoT-0.1-Starling-LM-RP positive_prompts: - "storywriting" - "write" - "scene" - "story" - "character" - source_model: WizardLM/WizardMath-7B-V1.1 positive_prompts: - "reason" - "math" - "mathematics" - "solve" - "count" tokenizer_source: union ``` Evaluation Result: https://huggingface.co/datasets/open-llm-leaderboard/details_mychen76__openmixtral-4x7b-merged ``` { "all": { "acc": 0.657144834577193, "acc_stderr": 0.03198053543647407, "acc_norm": 0.6572006879598793, "acc_norm_stderr": 0.0326392415851668, "mc1": 0.44430844553243576, "mc1_stderr": 0.01739458625074317, "mc2": 0.6132594486430695, "mc2_stderr": 0.015532509494332434 }, "harness|arc:challenge|25": { "acc": 0.6604095563139932, "acc_stderr": 0.013839039762820166, "acc_norm": 0.6945392491467577, "acc_norm_stderr": 0.01346008047800251 }, "harness|hellaswag|10": { "acc": 0.6901015733917546, "acc_stderr": 0.004615063817741861, "acc_norm": 0.8674566819358693, "acc_norm_stderr": 0.0033838751726700217 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6776315789473685, "acc_stderr": 0.03803510248351585, "acc_norm": 0.6776315789473685, "acc_norm_stderr": 0.03803510248351585 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7358490566037735, "acc_stderr": 0.027134291628741713, "acc_norm": 0.7358490566037735, "acc_norm_stderr": 0.027134291628741713 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6763005780346821, "acc_stderr": 0.0356760379963917, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.0356760379963917 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5531914893617021, "acc_stderr": 0.0325005368436584, "acc_norm": 0.5531914893617021, "acc_norm_stderr": 0.0325005368436584 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555497, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406793, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406793 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8064516129032258, "acc_stderr": 0.022475258525536057, "acc_norm": 0.8064516129032258, "acc_norm_stderr": 0.022475258525536057 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8080808080808081, "acc_stderr": 0.028057791672989017, "acc_norm": 0.8080808080808081, "acc_norm_stderr": 0.028057791672989017 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402534, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402534 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616255, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634335, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634335 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8495412844036697, "acc_stderr": 0.015328563932669237, "acc_norm": 0.8495412844036697, "acc_norm_stderr": 0.015328563932669237 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931796, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931796 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.025530100460233504, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.025530100460233504 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.036809181416738807, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.036809181416738807 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.020930193185179326, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.020930193185179326 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8288633461047255, "acc_stderr": 0.0134682016140663, "acc_norm": 0.8288633461047255, "acc_norm_stderr": 0.0134682016140663 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7369942196531792, "acc_stderr": 0.023703099525258172, "acc_norm": 0.7369942196531792, "acc_norm_stderr": 0.023703099525258172 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.41564245810055866, "acc_stderr": 0.016482782187500676, "acc_norm": 0.41564245810055866, "acc_norm_stderr": 0.016482782187500676 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7320261437908496, "acc_stderr": 0.025360603796242557, "acc_norm": 0.7320261437908496, "acc_norm_stderr": 0.025360603796242557 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7363344051446945, "acc_stderr": 0.02502553850053234, "acc_norm": 0.7363344051446945, "acc_norm_stderr": 0.02502553850053234 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7376543209876543, "acc_stderr": 0.024477222856135107, "acc_norm": 0.7376543209876543, "acc_norm_stderr": 0.024477222856135107 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5141843971631206, "acc_stderr": 0.02981549448368206, "acc_norm": 0.5141843971631206, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46740547588005216, "acc_stderr": 0.012743072942653349, "acc_norm": 0.46740547588005216, "acc_norm_stderr": 0.012743072942653349 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.028332959514031218, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.028332959514031218 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6699346405228758, "acc_stderr": 0.019023726160724553, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.019023726160724553 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7142857142857143, "acc_stderr": 0.0289205832206756, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.0289205832206756 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578337, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578337 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.02917088550072767, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.02917088550072767 }, "harness|truthfulqa:mc|0": { "mc1": 0.44430844553243576, "mc1_stderr": 0.01739458625074317, "mc2": 0.6132594486430695, "mc2_stderr": 0.015532509494332434 }, "harness|winogrande|5": { "acc": 0.8105761641673244, "acc_stderr": 0.011012790432989245 }, "harness|gsm8k|5": { "acc": 0.7119029567854435, "acc_stderr": 0.012474469737197916 } } ```