Nous-Hermes-llama-2-7b_7b_cluster017_partitioned_v3_standardized_017
/
checkpoint-2200
/trainer_state.json
{ | |
"best_metric": 0.4432196617126465, | |
"best_model_checkpoint": "./output_v2/7b_cluster017_Nous-Hermes-llama-2-7b_partitioned_v3_standardized_017/checkpoint-1600", | |
"epoch": 2.6284348864994027, | |
"global_step": 2200, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.01, | |
"learning_rate": 0.0002, | |
"loss": 0.5801, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.02, | |
"learning_rate": 0.0002, | |
"loss": 0.6179, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.04, | |
"learning_rate": 0.0002, | |
"loss": 0.5163, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.05, | |
"learning_rate": 0.0002, | |
"loss": 0.5249, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.06, | |
"learning_rate": 0.0002, | |
"loss": 0.5421, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.07, | |
"learning_rate": 0.0002, | |
"loss": 0.4993, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.08, | |
"learning_rate": 0.0002, | |
"loss": 0.5421, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.1, | |
"learning_rate": 0.0002, | |
"loss": 0.4769, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.11, | |
"learning_rate": 0.0002, | |
"loss": 0.5084, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.12, | |
"learning_rate": 0.0002, | |
"loss": 0.4731, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.13, | |
"learning_rate": 0.0002, | |
"loss": 0.5069, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.14, | |
"learning_rate": 0.0002, | |
"loss": 0.4659, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.16, | |
"learning_rate": 0.0002, | |
"loss": 0.4863, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.17, | |
"learning_rate": 0.0002, | |
"loss": 0.5124, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 0.0002, | |
"loss": 0.5311, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.19, | |
"learning_rate": 0.0002, | |
"loss": 0.5032, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.2, | |
"learning_rate": 0.0002, | |
"loss": 0.5065, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.22, | |
"learning_rate": 0.0002, | |
"loss": 0.4613, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.23, | |
"learning_rate": 0.0002, | |
"loss": 0.517, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.24, | |
"learning_rate": 0.0002, | |
"loss": 0.4761, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.24, | |
"eval_loss": 0.4977516829967499, | |
"eval_runtime": 178.665, | |
"eval_samples_per_second": 5.597, | |
"eval_steps_per_second": 2.799, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.24, | |
"mmlu_eval_accuracy": 0.4731690276039549, | |
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, | |
"mmlu_eval_accuracy_anatomy": 0.6428571428571429, | |
"mmlu_eval_accuracy_astronomy": 0.4375, | |
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, | |
"mmlu_eval_accuracy_college_biology": 0.4375, | |
"mmlu_eval_accuracy_college_chemistry": 0.125, | |
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, | |
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091, | |
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, | |
"mmlu_eval_accuracy_computer_security": 0.45454545454545453, | |
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, | |
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, | |
"mmlu_eval_accuracy_electrical_engineering": 0.5625, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.34146341463414637, | |
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, | |
"mmlu_eval_accuracy_global_facts": 0.6, | |
"mmlu_eval_accuracy_high_school_biology": 0.34375, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, | |
"mmlu_eval_accuracy_high_school_european_history": 0.5, | |
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156, | |
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, | |
"mmlu_eval_accuracy_high_school_psychology": 0.7333333333333333, | |
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5, | |
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, | |
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, | |
"mmlu_eval_accuracy_international_law": 0.6923076923076923, | |
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, | |
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, | |
"mmlu_eval_accuracy_management": 0.6363636363636364, | |
"mmlu_eval_accuracy_marketing": 0.68, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, | |
"mmlu_eval_accuracy_moral_disputes": 0.42105263157894735, | |
"mmlu_eval_accuracy_moral_scenarios": 0.23, | |
"mmlu_eval_accuracy_nutrition": 0.6363636363636364, | |
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, | |
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, | |
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, | |
"mmlu_eval_accuracy_professional_law": 0.32941176470588235, | |
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, | |
"mmlu_eval_accuracy_professional_psychology": 0.34782608695652173, | |
"mmlu_eval_accuracy_public_relations": 0.5833333333333334, | |
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, | |
"mmlu_eval_accuracy_sociology": 0.6363636363636364, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, | |
"mmlu_eval_accuracy_virology": 0.4444444444444444, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.201889930774431, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.25, | |
"learning_rate": 0.0002, | |
"loss": 0.447, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.26, | |
"learning_rate": 0.0002, | |
"loss": 0.5419, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.27, | |
"learning_rate": 0.0002, | |
"loss": 0.46, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.29, | |
"learning_rate": 0.0002, | |
"loss": 0.481, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 0.0002, | |
"loss": 0.4279, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.31, | |
"learning_rate": 0.0002, | |
"loss": 0.462, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.32, | |
"learning_rate": 0.0002, | |
"loss": 0.4866, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.33, | |
"learning_rate": 0.0002, | |
"loss": 0.4565, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.35, | |
"learning_rate": 0.0002, | |
"loss": 0.4579, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.36, | |
"learning_rate": 0.0002, | |
"loss": 0.4585, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.37, | |
"learning_rate": 0.0002, | |
"loss": 0.466, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.38, | |
"learning_rate": 0.0002, | |
"loss": 0.4766, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.39, | |
"learning_rate": 0.0002, | |
"loss": 0.4682, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.41, | |
"learning_rate": 0.0002, | |
"loss": 0.4467, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.42, | |
"learning_rate": 0.0002, | |
"loss": 0.4675, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 0.0002, | |
"loss": 0.4816, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.44, | |
"learning_rate": 0.0002, | |
"loss": 0.4439, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.45, | |
"learning_rate": 0.0002, | |
"loss": 0.4553, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.47, | |
"learning_rate": 0.0002, | |
"loss": 0.4707, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.48, | |
"learning_rate": 0.0002, | |
"loss": 0.4389, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.48, | |
"eval_loss": 0.4804040491580963, | |
"eval_runtime": 178.9419, | |
"eval_samples_per_second": 5.588, | |
"eval_steps_per_second": 2.794, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.48, | |
"mmlu_eval_accuracy": 0.4686810757119835, | |
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, | |
"mmlu_eval_accuracy_anatomy": 0.5, | |
"mmlu_eval_accuracy_astronomy": 0.4375, | |
"mmlu_eval_accuracy_business_ethics": 0.45454545454545453, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.41379310344827586, | |
"mmlu_eval_accuracy_college_biology": 0.4375, | |
"mmlu_eval_accuracy_college_chemistry": 0.125, | |
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, | |
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, | |
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, | |
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, | |
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, | |
"mmlu_eval_accuracy_electrical_engineering": 0.375, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.36585365853658536, | |
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, | |
"mmlu_eval_accuracy_global_facts": 0.5, | |
"mmlu_eval_accuracy_high_school_biology": 0.34375, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, | |
"mmlu_eval_accuracy_high_school_geography": 0.8636363636363636, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.38461538461538464, | |
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, | |
"mmlu_eval_accuracy_high_school_psychology": 0.7666666666666667, | |
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5, | |
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, | |
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, | |
"mmlu_eval_accuracy_international_law": 0.6923076923076923, | |
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, | |
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, | |
"mmlu_eval_accuracy_management": 0.5454545454545454, | |
"mmlu_eval_accuracy_marketing": 0.76, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, | |
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, | |
"mmlu_eval_accuracy_moral_scenarios": 0.23, | |
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, | |
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, | |
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, | |
"mmlu_eval_accuracy_professional_accounting": 0.2903225806451613, | |
"mmlu_eval_accuracy_professional_law": 0.34705882352941175, | |
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, | |
"mmlu_eval_accuracy_professional_psychology": 0.37681159420289856, | |
"mmlu_eval_accuracy_public_relations": 0.5833333333333334, | |
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, | |
"mmlu_eval_accuracy_sociology": 0.6363636363636364, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, | |
"mmlu_eval_accuracy_virology": 0.4444444444444444, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.168120609523422, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.49, | |
"learning_rate": 0.0002, | |
"loss": 0.49, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.5, | |
"learning_rate": 0.0002, | |
"loss": 0.4614, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.51, | |
"learning_rate": 0.0002, | |
"loss": 0.4711, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 0.0002, | |
"loss": 0.4557, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.54, | |
"learning_rate": 0.0002, | |
"loss": 0.4454, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.55, | |
"learning_rate": 0.0002, | |
"loss": 0.4819, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.56, | |
"learning_rate": 0.0002, | |
"loss": 0.4694, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.57, | |
"learning_rate": 0.0002, | |
"loss": 0.4602, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.59, | |
"learning_rate": 0.0002, | |
"loss": 0.4528, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.6, | |
"learning_rate": 0.0002, | |
"loss": 0.4415, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.61, | |
"learning_rate": 0.0002, | |
"loss": 0.4597, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.62, | |
"learning_rate": 0.0002, | |
"loss": 0.437, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.63, | |
"learning_rate": 0.0002, | |
"loss": 0.4649, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.65, | |
"learning_rate": 0.0002, | |
"loss": 0.4552, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.66, | |
"learning_rate": 0.0002, | |
"loss": 0.4517, | |
"step": 550 | |
}, | |
{ | |
"epoch": 0.67, | |
"learning_rate": 0.0002, | |
"loss": 0.4324, | |
"step": 560 | |
}, | |
{ | |
"epoch": 0.68, | |
"learning_rate": 0.0002, | |
"loss": 0.4473, | |
"step": 570 | |
}, | |
{ | |
"epoch": 0.69, | |
"learning_rate": 0.0002, | |
"loss": 0.4611, | |
"step": 580 | |
}, | |
{ | |
"epoch": 0.7, | |
"learning_rate": 0.0002, | |
"loss": 0.4378, | |
"step": 590 | |
}, | |
{ | |
"epoch": 0.72, | |
"learning_rate": 0.0002, | |
"loss": 0.4337, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.72, | |
"eval_loss": 0.4673095643520355, | |
"eval_runtime": 178.8409, | |
"eval_samples_per_second": 5.592, | |
"eval_steps_per_second": 2.796, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.72, | |
"mmlu_eval_accuracy": 0.4657957744296099, | |
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182, | |
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, | |
"mmlu_eval_accuracy_astronomy": 0.4375, | |
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, | |
"mmlu_eval_accuracy_college_biology": 0.4375, | |
"mmlu_eval_accuracy_college_chemistry": 0.125, | |
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453, | |
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, | |
"mmlu_eval_accuracy_computer_security": 0.45454545454545453, | |
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, | |
"mmlu_eval_accuracy_econometrics": 0.25, | |
"mmlu_eval_accuracy_electrical_engineering": 0.3125, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, | |
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, | |
"mmlu_eval_accuracy_global_facts": 0.5, | |
"mmlu_eval_accuracy_high_school_biology": 0.40625, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, | |
"mmlu_eval_accuracy_high_school_european_history": 0.5, | |
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.5, | |
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, | |
"mmlu_eval_accuracy_high_school_psychology": 0.75, | |
"mmlu_eval_accuracy_high_school_statistics": 0.4782608695652174, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5, | |
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, | |
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, | |
"mmlu_eval_accuracy_international_law": 0.6923076923076923, | |
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, | |
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, | |
"mmlu_eval_accuracy_management": 0.5454545454545454, | |
"mmlu_eval_accuracy_marketing": 0.68, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.627906976744186, | |
"mmlu_eval_accuracy_moral_disputes": 0.5, | |
"mmlu_eval_accuracy_moral_scenarios": 0.23, | |
"mmlu_eval_accuracy_nutrition": 0.6363636363636364, | |
"mmlu_eval_accuracy_philosophy": 0.4411764705882353, | |
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, | |
"mmlu_eval_accuracy_professional_accounting": 0.25806451612903225, | |
"mmlu_eval_accuracy_professional_law": 0.3176470588235294, | |
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644, | |
"mmlu_eval_accuracy_professional_psychology": 0.34782608695652173, | |
"mmlu_eval_accuracy_public_relations": 0.5, | |
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, | |
"mmlu_eval_accuracy_sociology": 0.5909090909090909, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, | |
"mmlu_eval_accuracy_virology": 0.3888888888888889, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.1695979737893096, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.73, | |
"learning_rate": 0.0002, | |
"loss": 0.4255, | |
"step": 610 | |
}, | |
{ | |
"epoch": 0.74, | |
"learning_rate": 0.0002, | |
"loss": 0.4492, | |
"step": 620 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 0.0002, | |
"loss": 0.4353, | |
"step": 630 | |
}, | |
{ | |
"epoch": 0.76, | |
"learning_rate": 0.0002, | |
"loss": 0.4388, | |
"step": 640 | |
}, | |
{ | |
"epoch": 0.78, | |
"learning_rate": 0.0002, | |
"loss": 0.4402, | |
"step": 650 | |
}, | |
{ | |
"epoch": 0.79, | |
"learning_rate": 0.0002, | |
"loss": 0.4568, | |
"step": 660 | |
}, | |
{ | |
"epoch": 0.8, | |
"learning_rate": 0.0002, | |
"loss": 0.4703, | |
"step": 670 | |
}, | |
{ | |
"epoch": 0.81, | |
"learning_rate": 0.0002, | |
"loss": 0.4561, | |
"step": 680 | |
}, | |
{ | |
"epoch": 0.82, | |
"learning_rate": 0.0002, | |
"loss": 0.4745, | |
"step": 690 | |
}, | |
{ | |
"epoch": 0.84, | |
"learning_rate": 0.0002, | |
"loss": 0.4384, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.85, | |
"learning_rate": 0.0002, | |
"loss": 0.4472, | |
"step": 710 | |
}, | |
{ | |
"epoch": 0.86, | |
"learning_rate": 0.0002, | |
"loss": 0.4607, | |
"step": 720 | |
}, | |
{ | |
"epoch": 0.87, | |
"learning_rate": 0.0002, | |
"loss": 0.4876, | |
"step": 730 | |
}, | |
{ | |
"epoch": 0.88, | |
"learning_rate": 0.0002, | |
"loss": 0.4575, | |
"step": 740 | |
}, | |
{ | |
"epoch": 0.9, | |
"learning_rate": 0.0002, | |
"loss": 0.4578, | |
"step": 750 | |
}, | |
{ | |
"epoch": 0.91, | |
"learning_rate": 0.0002, | |
"loss": 0.4417, | |
"step": 760 | |
}, | |
{ | |
"epoch": 0.92, | |
"learning_rate": 0.0002, | |
"loss": 0.4305, | |
"step": 770 | |
}, | |
{ | |
"epoch": 0.93, | |
"learning_rate": 0.0002, | |
"loss": 0.4478, | |
"step": 780 | |
}, | |
{ | |
"epoch": 0.94, | |
"learning_rate": 0.0002, | |
"loss": 0.4395, | |
"step": 790 | |
}, | |
{ | |
"epoch": 0.96, | |
"learning_rate": 0.0002, | |
"loss": 0.4188, | |
"step": 800 | |
}, | |
{ | |
"epoch": 0.96, | |
"eval_loss": 0.4581112265586853, | |
"eval_runtime": 178.9111, | |
"eval_samples_per_second": 5.589, | |
"eval_steps_per_second": 2.795, | |
"step": 800 | |
}, | |
{ | |
"epoch": 0.96, | |
"mmlu_eval_accuracy": 0.4708997812184524, | |
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, | |
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, | |
"mmlu_eval_accuracy_astronomy": 0.4375, | |
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, | |
"mmlu_eval_accuracy_college_biology": 0.4375, | |
"mmlu_eval_accuracy_college_chemistry": 0.125, | |
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453, | |
"mmlu_eval_accuracy_college_physics": 0.5454545454545454, | |
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, | |
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, | |
"mmlu_eval_accuracy_econometrics": 0.25, | |
"mmlu_eval_accuracy_electrical_engineering": 0.375, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.3170731707317073, | |
"mmlu_eval_accuracy_formal_logic": 0.2857142857142857, | |
"mmlu_eval_accuracy_global_facts": 0.5, | |
"mmlu_eval_accuracy_high_school_biology": 0.40625, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, | |
"mmlu_eval_accuracy_high_school_european_history": 0.5, | |
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3953488372093023, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.2413793103448276, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.5, | |
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, | |
"mmlu_eval_accuracy_high_school_psychology": 0.7333333333333333, | |
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5, | |
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, | |
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, | |
"mmlu_eval_accuracy_international_law": 0.7692307692307693, | |
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, | |
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, | |
"mmlu_eval_accuracy_management": 0.6363636363636364, | |
"mmlu_eval_accuracy_marketing": 0.76, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6627906976744186, | |
"mmlu_eval_accuracy_moral_disputes": 0.5, | |
"mmlu_eval_accuracy_moral_scenarios": 0.23, | |
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, | |
"mmlu_eval_accuracy_philosophy": 0.47058823529411764, | |
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, | |
"mmlu_eval_accuracy_professional_accounting": 0.1935483870967742, | |
"mmlu_eval_accuracy_professional_law": 0.36470588235294116, | |
"mmlu_eval_accuracy_professional_medicine": 0.3870967741935484, | |
"mmlu_eval_accuracy_professional_psychology": 0.34782608695652173, | |
"mmlu_eval_accuracy_public_relations": 0.5, | |
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, | |
"mmlu_eval_accuracy_sociology": 0.6363636363636364, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, | |
"mmlu_eval_accuracy_virology": 0.3888888888888889, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.1651555353060714, | |
"step": 800 | |
}, | |
{ | |
"epoch": 0.97, | |
"learning_rate": 0.0002, | |
"loss": 0.4485, | |
"step": 810 | |
}, | |
{ | |
"epoch": 0.98, | |
"learning_rate": 0.0002, | |
"loss": 0.4215, | |
"step": 820 | |
}, | |
{ | |
"epoch": 0.99, | |
"learning_rate": 0.0002, | |
"loss": 0.4323, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.0, | |
"learning_rate": 0.0002, | |
"loss": 0.4166, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.02, | |
"learning_rate": 0.0002, | |
"loss": 0.363, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.03, | |
"learning_rate": 0.0002, | |
"loss": 0.425, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.04, | |
"learning_rate": 0.0002, | |
"loss": 0.3954, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.05, | |
"learning_rate": 0.0002, | |
"loss": 0.4146, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 0.0002, | |
"loss": 0.3617, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.08, | |
"learning_rate": 0.0002, | |
"loss": 0.3758, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.09, | |
"learning_rate": 0.0002, | |
"loss": 0.4184, | |
"step": 910 | |
}, | |
{ | |
"epoch": 1.1, | |
"learning_rate": 0.0002, | |
"loss": 0.386, | |
"step": 920 | |
}, | |
{ | |
"epoch": 1.11, | |
"learning_rate": 0.0002, | |
"loss": 0.3808, | |
"step": 930 | |
}, | |
{ | |
"epoch": 1.12, | |
"learning_rate": 0.0002, | |
"loss": 0.3842, | |
"step": 940 | |
}, | |
{ | |
"epoch": 1.14, | |
"learning_rate": 0.0002, | |
"loss": 0.3884, | |
"step": 950 | |
}, | |
{ | |
"epoch": 1.15, | |
"learning_rate": 0.0002, | |
"loss": 0.3743, | |
"step": 960 | |
}, | |
{ | |
"epoch": 1.16, | |
"learning_rate": 0.0002, | |
"loss": 0.3834, | |
"step": 970 | |
}, | |
{ | |
"epoch": 1.17, | |
"learning_rate": 0.0002, | |
"loss": 0.3883, | |
"step": 980 | |
}, | |
{ | |
"epoch": 1.18, | |
"learning_rate": 0.0002, | |
"loss": 0.3831, | |
"step": 990 | |
}, | |
{ | |
"epoch": 1.19, | |
"learning_rate": 0.0002, | |
"loss": 0.3961, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.19, | |
"eval_loss": 0.4571826756000519, | |
"eval_runtime": 178.8234, | |
"eval_samples_per_second": 5.592, | |
"eval_steps_per_second": 2.796, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.19, | |
"mmlu_eval_accuracy": 0.46518318736306025, | |
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, | |
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, | |
"mmlu_eval_accuracy_astronomy": 0.3125, | |
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.4827586206896552, | |
"mmlu_eval_accuracy_college_biology": 0.4375, | |
"mmlu_eval_accuracy_college_chemistry": 0.125, | |
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091, | |
"mmlu_eval_accuracy_college_physics": 0.5454545454545454, | |
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, | |
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, | |
"mmlu_eval_accuracy_econometrics": 0.25, | |
"mmlu_eval_accuracy_electrical_engineering": 0.3125, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293, | |
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, | |
"mmlu_eval_accuracy_global_facts": 0.5, | |
"mmlu_eval_accuracy_high_school_biology": 0.40625, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.4444444444444444, | |
"mmlu_eval_accuracy_high_school_european_history": 0.4444444444444444, | |
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, | |
"mmlu_eval_accuracy_high_school_physics": 0.23529411764705882, | |
"mmlu_eval_accuracy_high_school_psychology": 0.7666666666666667, | |
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5, | |
"mmlu_eval_accuracy_human_aging": 0.6521739130434783, | |
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, | |
"mmlu_eval_accuracy_international_law": 0.7692307692307693, | |
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5555555555555556, | |
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, | |
"mmlu_eval_accuracy_management": 0.6363636363636364, | |
"mmlu_eval_accuracy_marketing": 0.68, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, | |
"mmlu_eval_accuracy_moral_disputes": 0.5263157894736842, | |
"mmlu_eval_accuracy_moral_scenarios": 0.24, | |
"mmlu_eval_accuracy_nutrition": 0.6666666666666666, | |
"mmlu_eval_accuracy_philosophy": 0.4411764705882353, | |
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, | |
"mmlu_eval_accuracy_professional_accounting": 0.22580645161290322, | |
"mmlu_eval_accuracy_professional_law": 0.3352941176470588, | |
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, | |
"mmlu_eval_accuracy_professional_psychology": 0.37681159420289856, | |
"mmlu_eval_accuracy_public_relations": 0.5, | |
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, | |
"mmlu_eval_accuracy_sociology": 0.6818181818181818, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273, | |
"mmlu_eval_accuracy_virology": 0.3888888888888889, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.1887711029567856, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.21, | |
"learning_rate": 0.0002, | |
"loss": 0.4023, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 1.22, | |
"learning_rate": 0.0002, | |
"loss": 0.3878, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 1.23, | |
"learning_rate": 0.0002, | |
"loss": 0.3734, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 1.24, | |
"learning_rate": 0.0002, | |
"loss": 0.3773, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 1.25, | |
"learning_rate": 0.0002, | |
"loss": 0.4206, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 1.27, | |
"learning_rate": 0.0002, | |
"loss": 0.3705, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 0.0002, | |
"loss": 0.3942, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 1.29, | |
"learning_rate": 0.0002, | |
"loss": 0.4041, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 1.3, | |
"learning_rate": 0.0002, | |
"loss": 0.422, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 1.31, | |
"learning_rate": 0.0002, | |
"loss": 0.3907, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 1.33, | |
"learning_rate": 0.0002, | |
"loss": 0.3764, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 1.34, | |
"learning_rate": 0.0002, | |
"loss": 0.4011, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 1.35, | |
"learning_rate": 0.0002, | |
"loss": 0.3779, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 1.36, | |
"learning_rate": 0.0002, | |
"loss": 0.3858, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 1.37, | |
"learning_rate": 0.0002, | |
"loss": 0.4028, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 1.39, | |
"learning_rate": 0.0002, | |
"loss": 0.3845, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 1.4, | |
"learning_rate": 0.0002, | |
"loss": 0.3939, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 1.41, | |
"learning_rate": 0.0002, | |
"loss": 0.3591, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 1.42, | |
"learning_rate": 0.0002, | |
"loss": 0.384, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 1.43, | |
"learning_rate": 0.0002, | |
"loss": 0.4026, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 1.43, | |
"eval_loss": 0.4532225430011749, | |
"eval_runtime": 178.7925, | |
"eval_samples_per_second": 5.593, | |
"eval_steps_per_second": 2.797, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 1.43, | |
"mmlu_eval_accuracy": 0.45916720892795637, | |
"mmlu_eval_accuracy_abstract_algebra": 0.36363636363636365, | |
"mmlu_eval_accuracy_anatomy": 0.5, | |
"mmlu_eval_accuracy_astronomy": 0.4375, | |
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, | |
"mmlu_eval_accuracy_college_biology": 0.3125, | |
"mmlu_eval_accuracy_college_chemistry": 0.125, | |
"mmlu_eval_accuracy_college_computer_science": 0.45454545454545453, | |
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_medicine": 0.45454545454545453, | |
"mmlu_eval_accuracy_college_physics": 0.5454545454545454, | |
"mmlu_eval_accuracy_computer_security": 0.36363636363636365, | |
"mmlu_eval_accuracy_conceptual_physics": 0.38461538461538464, | |
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, | |
"mmlu_eval_accuracy_electrical_engineering": 0.375, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.24390243902439024, | |
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, | |
"mmlu_eval_accuracy_global_facts": 0.4, | |
"mmlu_eval_accuracy_high_school_biology": 0.40625, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.3181818181818182, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, | |
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, | |
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.3103448275862069, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.34615384615384615, | |
"mmlu_eval_accuracy_high_school_physics": 0.23529411764705882, | |
"mmlu_eval_accuracy_high_school_psychology": 0.75, | |
"mmlu_eval_accuracy_high_school_statistics": 0.43478260869565216, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5, | |
"mmlu_eval_accuracy_human_aging": 0.6956521739130435, | |
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333, | |
"mmlu_eval_accuracy_international_law": 0.7692307692307693, | |
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5, | |
"mmlu_eval_accuracy_machine_learning": 0.18181818181818182, | |
"mmlu_eval_accuracy_management": 0.5454545454545454, | |
"mmlu_eval_accuracy_marketing": 0.68, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628, | |
"mmlu_eval_accuracy_moral_disputes": 0.5526315789473685, | |
"mmlu_eval_accuracy_moral_scenarios": 0.26, | |
"mmlu_eval_accuracy_nutrition": 0.5757575757575758, | |
"mmlu_eval_accuracy_philosophy": 0.4411764705882353, | |
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, | |
"mmlu_eval_accuracy_professional_accounting": 0.16129032258064516, | |
"mmlu_eval_accuracy_professional_law": 0.3352941176470588, | |
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, | |
"mmlu_eval_accuracy_professional_psychology": 0.42028985507246375, | |
"mmlu_eval_accuracy_public_relations": 0.5, | |
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, | |
"mmlu_eval_accuracy_sociology": 0.6363636363636364, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, | |
"mmlu_eval_accuracy_virology": 0.3888888888888889, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.0201030533478714, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 1.45, | |
"learning_rate": 0.0002, | |
"loss": 0.3933, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 1.46, | |
"learning_rate": 0.0002, | |
"loss": 0.3587, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 1.47, | |
"learning_rate": 0.0002, | |
"loss": 0.3688, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 1.48, | |
"learning_rate": 0.0002, | |
"loss": 0.3514, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 1.49, | |
"learning_rate": 0.0002, | |
"loss": 0.3732, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 1.51, | |
"learning_rate": 0.0002, | |
"loss": 0.3918, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 1.52, | |
"learning_rate": 0.0002, | |
"loss": 0.387, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 1.53, | |
"learning_rate": 0.0002, | |
"loss": 0.3821, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 1.54, | |
"learning_rate": 0.0002, | |
"loss": 0.3814, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 1.55, | |
"learning_rate": 0.0002, | |
"loss": 0.3925, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 1.57, | |
"learning_rate": 0.0002, | |
"loss": 0.3949, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 0.0002, | |
"loss": 0.3587, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 1.59, | |
"learning_rate": 0.0002, | |
"loss": 0.3992, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 1.6, | |
"learning_rate": 0.0002, | |
"loss": 0.3987, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 1.61, | |
"learning_rate": 0.0002, | |
"loss": 0.3744, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 1.62, | |
"learning_rate": 0.0002, | |
"loss": 0.3741, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 1.64, | |
"learning_rate": 0.0002, | |
"loss": 0.3756, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 1.65, | |
"learning_rate": 0.0002, | |
"loss": 0.3862, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 1.66, | |
"learning_rate": 0.0002, | |
"loss": 0.361, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 1.67, | |
"learning_rate": 0.0002, | |
"loss": 0.3712, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 1.67, | |
"eval_loss": 0.4475863575935364, | |
"eval_runtime": 179.1443, | |
"eval_samples_per_second": 5.582, | |
"eval_steps_per_second": 2.791, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 1.67, | |
"mmlu_eval_accuracy": 0.4631075705147942, | |
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, | |
"mmlu_eval_accuracy_anatomy": 0.5714285714285714, | |
"mmlu_eval_accuracy_astronomy": 0.375, | |
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, | |
"mmlu_eval_accuracy_college_biology": 0.375, | |
"mmlu_eval_accuracy_college_chemistry": 0.125, | |
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, | |
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_physics": 0.6363636363636364, | |
"mmlu_eval_accuracy_computer_security": 0.2727272727272727, | |
"mmlu_eval_accuracy_conceptual_physics": 0.4230769230769231, | |
"mmlu_eval_accuracy_econometrics": 0.25, | |
"mmlu_eval_accuracy_electrical_engineering": 0.4375, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293, | |
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, | |
"mmlu_eval_accuracy_global_facts": 0.5, | |
"mmlu_eval_accuracy_high_school_biology": 0.4375, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, | |
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556, | |
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.3023255813953488, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.46153846153846156, | |
"mmlu_eval_accuracy_high_school_physics": 0.23529411764705882, | |
"mmlu_eval_accuracy_high_school_psychology": 0.7833333333333333, | |
"mmlu_eval_accuracy_high_school_statistics": 0.4782608695652174, | |
"mmlu_eval_accuracy_high_school_us_history": 0.7272727272727273, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, | |
"mmlu_eval_accuracy_human_aging": 0.6086956521739131, | |
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, | |
"mmlu_eval_accuracy_international_law": 0.6923076923076923, | |
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5, | |
"mmlu_eval_accuracy_machine_learning": 0.2727272727272727, | |
"mmlu_eval_accuracy_management": 0.6363636363636364, | |
"mmlu_eval_accuracy_marketing": 0.64, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628, | |
"mmlu_eval_accuracy_moral_disputes": 0.5, | |
"mmlu_eval_accuracy_moral_scenarios": 0.29, | |
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, | |
"mmlu_eval_accuracy_philosophy": 0.5294117647058824, | |
"mmlu_eval_accuracy_prehistory": 0.45714285714285713, | |
"mmlu_eval_accuracy_professional_accounting": 0.16129032258064516, | |
"mmlu_eval_accuracy_professional_law": 0.35294117647058826, | |
"mmlu_eval_accuracy_professional_medicine": 0.3548387096774194, | |
"mmlu_eval_accuracy_professional_psychology": 0.3188405797101449, | |
"mmlu_eval_accuracy_public_relations": 0.5, | |
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, | |
"mmlu_eval_accuracy_sociology": 0.6363636363636364, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, | |
"mmlu_eval_accuracy_virology": 0.3888888888888889, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.180907432194167, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 1.68, | |
"learning_rate": 0.0002, | |
"loss": 0.3929, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 1.7, | |
"learning_rate": 0.0002, | |
"loss": 0.3882, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 1.71, | |
"learning_rate": 0.0002, | |
"loss": 0.3797, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 1.72, | |
"learning_rate": 0.0002, | |
"loss": 0.3992, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 1.73, | |
"learning_rate": 0.0002, | |
"loss": 0.3774, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 1.74, | |
"learning_rate": 0.0002, | |
"loss": 0.3677, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 1.76, | |
"learning_rate": 0.0002, | |
"loss": 0.3697, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 1.77, | |
"learning_rate": 0.0002, | |
"loss": 0.3683, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 1.78, | |
"learning_rate": 0.0002, | |
"loss": 0.3703, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 1.79, | |
"learning_rate": 0.0002, | |
"loss": 0.3953, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 1.8, | |
"learning_rate": 0.0002, | |
"loss": 0.4016, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 1.82, | |
"learning_rate": 0.0002, | |
"loss": 0.374, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 1.83, | |
"learning_rate": 0.0002, | |
"loss": 0.3753, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 1.84, | |
"learning_rate": 0.0002, | |
"loss": 0.3884, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 1.85, | |
"learning_rate": 0.0002, | |
"loss": 0.3588, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 1.86, | |
"learning_rate": 0.0002, | |
"loss": 0.3988, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 1.88, | |
"learning_rate": 0.0002, | |
"loss": 0.3697, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 1.89, | |
"learning_rate": 0.0002, | |
"loss": 0.3937, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 1.9, | |
"learning_rate": 0.0002, | |
"loss": 0.3856, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 1.91, | |
"learning_rate": 0.0002, | |
"loss": 0.363, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 1.91, | |
"eval_loss": 0.4432196617126465, | |
"eval_runtime": 178.8367, | |
"eval_samples_per_second": 5.592, | |
"eval_steps_per_second": 2.796, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 1.91, | |
"mmlu_eval_accuracy": 0.46789885257340896, | |
"mmlu_eval_accuracy_abstract_algebra": 0.2727272727272727, | |
"mmlu_eval_accuracy_anatomy": 0.5, | |
"mmlu_eval_accuracy_astronomy": 0.5, | |
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, | |
"mmlu_eval_accuracy_college_biology": 0.375, | |
"mmlu_eval_accuracy_college_chemistry": 0.25, | |
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727, | |
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091, | |
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, | |
"mmlu_eval_accuracy_computer_security": 0.18181818181818182, | |
"mmlu_eval_accuracy_conceptual_physics": 0.34615384615384615, | |
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, | |
"mmlu_eval_accuracy_electrical_engineering": 0.375, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293, | |
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, | |
"mmlu_eval_accuracy_global_facts": 0.5, | |
"mmlu_eval_accuracy_high_school_biology": 0.40625, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, | |
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, | |
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.32558139534883723, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.3448275862068966, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.34615384615384615, | |
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, | |
"mmlu_eval_accuracy_high_school_psychology": 0.7833333333333333, | |
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5769230769230769, | |
"mmlu_eval_accuracy_human_aging": 0.6086956521739131, | |
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, | |
"mmlu_eval_accuracy_international_law": 0.6923076923076923, | |
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5, | |
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365, | |
"mmlu_eval_accuracy_management": 0.6363636363636364, | |
"mmlu_eval_accuracy_marketing": 0.68, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6744186046511628, | |
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, | |
"mmlu_eval_accuracy_moral_scenarios": 0.25, | |
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, | |
"mmlu_eval_accuracy_philosophy": 0.5294117647058824, | |
"mmlu_eval_accuracy_prehistory": 0.5714285714285714, | |
"mmlu_eval_accuracy_professional_accounting": 0.22580645161290322, | |
"mmlu_eval_accuracy_professional_law": 0.3411764705882353, | |
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, | |
"mmlu_eval_accuracy_professional_psychology": 0.37681159420289856, | |
"mmlu_eval_accuracy_public_relations": 0.5, | |
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, | |
"mmlu_eval_accuracy_sociology": 0.6363636363636364, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.7272727272727273, | |
"mmlu_eval_accuracy_virology": 0.4444444444444444, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.0490072815345877, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 1.92, | |
"learning_rate": 0.0002, | |
"loss": 0.3555, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 1.94, | |
"learning_rate": 0.0002, | |
"loss": 0.4285, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 1.95, | |
"learning_rate": 0.0002, | |
"loss": 0.3708, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 1.96, | |
"learning_rate": 0.0002, | |
"loss": 0.3781, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 1.97, | |
"learning_rate": 0.0002, | |
"loss": 0.368, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 1.98, | |
"learning_rate": 0.0002, | |
"loss": 0.3784, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 2.0, | |
"learning_rate": 0.0002, | |
"loss": 0.3709, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 2.01, | |
"learning_rate": 0.0002, | |
"loss": 0.3228, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 2.02, | |
"learning_rate": 0.0002, | |
"loss": 0.3244, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 2.03, | |
"learning_rate": 0.0002, | |
"loss": 0.304, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 2.04, | |
"learning_rate": 0.0002, | |
"loss": 0.2811, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 2.05, | |
"learning_rate": 0.0002, | |
"loss": 0.3046, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 2.07, | |
"learning_rate": 0.0002, | |
"loss": 0.3062, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 2.08, | |
"learning_rate": 0.0002, | |
"loss": 0.2967, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 2.09, | |
"learning_rate": 0.0002, | |
"loss": 0.2936, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 2.1, | |
"learning_rate": 0.0002, | |
"loss": 0.2899, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 2.11, | |
"learning_rate": 0.0002, | |
"loss": 0.2915, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 2.13, | |
"learning_rate": 0.0002, | |
"loss": 0.3226, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 2.14, | |
"learning_rate": 0.0002, | |
"loss": 0.3043, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 2.15, | |
"learning_rate": 0.0002, | |
"loss": 0.3054, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 2.15, | |
"eval_loss": 0.45656338334083557, | |
"eval_runtime": 179.3489, | |
"eval_samples_per_second": 5.576, | |
"eval_steps_per_second": 2.788, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 2.15, | |
"mmlu_eval_accuracy": 0.45495968338399634, | |
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182, | |
"mmlu_eval_accuracy_anatomy": 0.5, | |
"mmlu_eval_accuracy_astronomy": 0.375, | |
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.5172413793103449, | |
"mmlu_eval_accuracy_college_biology": 0.4375, | |
"mmlu_eval_accuracy_college_chemistry": 0.25, | |
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_mathematics": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, | |
"mmlu_eval_accuracy_computer_security": 0.18181818181818182, | |
"mmlu_eval_accuracy_conceptual_physics": 0.34615384615384615, | |
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, | |
"mmlu_eval_accuracy_electrical_engineering": 0.25, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293, | |
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285, | |
"mmlu_eval_accuracy_global_facts": 0.4, | |
"mmlu_eval_accuracy_high_school_biology": 0.40625, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.4090909090909091, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, | |
"mmlu_eval_accuracy_high_school_european_history": 0.6111111111111112, | |
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.20689655172413793, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.4230769230769231, | |
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, | |
"mmlu_eval_accuracy_high_school_psychology": 0.7666666666666667, | |
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, | |
"mmlu_eval_accuracy_human_aging": 0.5652173913043478, | |
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, | |
"mmlu_eval_accuracy_international_law": 0.6923076923076923, | |
"mmlu_eval_accuracy_jurisprudence": 0.45454545454545453, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5, | |
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365, | |
"mmlu_eval_accuracy_management": 0.7272727272727273, | |
"mmlu_eval_accuracy_marketing": 0.64, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6627906976744186, | |
"mmlu_eval_accuracy_moral_disputes": 0.4473684210526316, | |
"mmlu_eval_accuracy_moral_scenarios": 0.27, | |
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, | |
"mmlu_eval_accuracy_philosophy": 0.5, | |
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, | |
"mmlu_eval_accuracy_professional_accounting": 0.22580645161290322, | |
"mmlu_eval_accuracy_professional_law": 0.3411764705882353, | |
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, | |
"mmlu_eval_accuracy_professional_psychology": 0.34782608695652173, | |
"mmlu_eval_accuracy_public_relations": 0.5, | |
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, | |
"mmlu_eval_accuracy_sociology": 0.5454545454545454, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.6363636363636364, | |
"mmlu_eval_accuracy_virology": 0.3888888888888889, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.0489876337213242, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 2.16, | |
"learning_rate": 0.0002, | |
"loss": 0.2987, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 2.17, | |
"learning_rate": 0.0002, | |
"loss": 0.2989, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 2.19, | |
"learning_rate": 0.0002, | |
"loss": 0.3031, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 2.2, | |
"learning_rate": 0.0002, | |
"loss": 0.3111, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 2.21, | |
"learning_rate": 0.0002, | |
"loss": 0.2976, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 2.22, | |
"learning_rate": 0.0002, | |
"loss": 0.2884, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 2.23, | |
"learning_rate": 0.0002, | |
"loss": 0.2923, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 2.25, | |
"learning_rate": 0.0002, | |
"loss": 0.3239, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 2.26, | |
"learning_rate": 0.0002, | |
"loss": 0.3251, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 2.27, | |
"learning_rate": 0.0002, | |
"loss": 0.3147, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 2.28, | |
"learning_rate": 0.0002, | |
"loss": 0.3403, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 2.29, | |
"learning_rate": 0.0002, | |
"loss": 0.3004, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 2.31, | |
"learning_rate": 0.0002, | |
"loss": 0.3127, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 2.32, | |
"learning_rate": 0.0002, | |
"loss": 0.3, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 2.33, | |
"learning_rate": 0.0002, | |
"loss": 0.3137, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 2.34, | |
"learning_rate": 0.0002, | |
"loss": 0.3002, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 2.35, | |
"learning_rate": 0.0002, | |
"loss": 0.3065, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 2.37, | |
"learning_rate": 0.0002, | |
"loss": 0.2968, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 2.38, | |
"learning_rate": 0.0002, | |
"loss": 0.3102, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 2.39, | |
"learning_rate": 0.0002, | |
"loss": 0.3331, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 2.39, | |
"eval_loss": 0.453141987323761, | |
"eval_runtime": 179.2289, | |
"eval_samples_per_second": 5.579, | |
"eval_steps_per_second": 2.79, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 2.39, | |
"mmlu_eval_accuracy": 0.45313748079668487, | |
"mmlu_eval_accuracy_abstract_algebra": 0.18181818181818182, | |
"mmlu_eval_accuracy_anatomy": 0.5, | |
"mmlu_eval_accuracy_astronomy": 0.375, | |
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, | |
"mmlu_eval_accuracy_college_biology": 0.4375, | |
"mmlu_eval_accuracy_college_chemistry": 0.25, | |
"mmlu_eval_accuracy_college_computer_science": 0.2727272727272727, | |
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, | |
"mmlu_eval_accuracy_college_medicine": 0.4090909090909091, | |
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, | |
"mmlu_eval_accuracy_computer_security": 0.18181818181818182, | |
"mmlu_eval_accuracy_conceptual_physics": 0.3076923076923077, | |
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, | |
"mmlu_eval_accuracy_electrical_engineering": 0.3125, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.2682926829268293, | |
"mmlu_eval_accuracy_formal_logic": 0.21428571428571427, | |
"mmlu_eval_accuracy_global_facts": 0.5, | |
"mmlu_eval_accuracy_high_school_biology": 0.375, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.5555555555555556, | |
"mmlu_eval_accuracy_high_school_european_history": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_geography": 0.8181818181818182, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.6190476190476191, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.27586206896551724, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.34615384615384615, | |
"mmlu_eval_accuracy_high_school_physics": 0.35294117647058826, | |
"mmlu_eval_accuracy_high_school_psychology": 0.7666666666666667, | |
"mmlu_eval_accuracy_high_school_statistics": 0.34782608695652173, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6818181818181818, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, | |
"mmlu_eval_accuracy_human_aging": 0.6521739130434783, | |
"mmlu_eval_accuracy_human_sexuality": 0.3333333333333333, | |
"mmlu_eval_accuracy_international_law": 0.6923076923076923, | |
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5, | |
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365, | |
"mmlu_eval_accuracy_management": 0.6363636363636364, | |
"mmlu_eval_accuracy_marketing": 0.64, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6511627906976745, | |
"mmlu_eval_accuracy_moral_disputes": 0.42105263157894735, | |
"mmlu_eval_accuracy_moral_scenarios": 0.29, | |
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, | |
"mmlu_eval_accuracy_philosophy": 0.5588235294117647, | |
"mmlu_eval_accuracy_prehistory": 0.5142857142857142, | |
"mmlu_eval_accuracy_professional_accounting": 0.22580645161290322, | |
"mmlu_eval_accuracy_professional_law": 0.34705882352941175, | |
"mmlu_eval_accuracy_professional_medicine": 0.45161290322580644, | |
"mmlu_eval_accuracy_professional_psychology": 0.37681159420289856, | |
"mmlu_eval_accuracy_public_relations": 0.5, | |
"mmlu_eval_accuracy_security_studies": 0.48148148148148145, | |
"mmlu_eval_accuracy_sociology": 0.5909090909090909, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, | |
"mmlu_eval_accuracy_virology": 0.4444444444444444, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.14181383113637, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 2.4, | |
"learning_rate": 0.0002, | |
"loss": 0.2785, | |
"step": 2010 | |
}, | |
{ | |
"epoch": 2.41, | |
"learning_rate": 0.0002, | |
"loss": 0.3038, | |
"step": 2020 | |
}, | |
{ | |
"epoch": 2.43, | |
"learning_rate": 0.0002, | |
"loss": 0.3259, | |
"step": 2030 | |
}, | |
{ | |
"epoch": 2.44, | |
"learning_rate": 0.0002, | |
"loss": 0.3308, | |
"step": 2040 | |
}, | |
{ | |
"epoch": 2.45, | |
"learning_rate": 0.0002, | |
"loss": 0.325, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 2.46, | |
"learning_rate": 0.0002, | |
"loss": 0.3271, | |
"step": 2060 | |
}, | |
{ | |
"epoch": 2.47, | |
"learning_rate": 0.0002, | |
"loss": 0.3187, | |
"step": 2070 | |
}, | |
{ | |
"epoch": 2.49, | |
"learning_rate": 0.0002, | |
"loss": 0.3192, | |
"step": 2080 | |
}, | |
{ | |
"epoch": 2.5, | |
"learning_rate": 0.0002, | |
"loss": 0.3227, | |
"step": 2090 | |
}, | |
{ | |
"epoch": 2.51, | |
"learning_rate": 0.0002, | |
"loss": 0.2969, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 2.52, | |
"learning_rate": 0.0002, | |
"loss": 0.334, | |
"step": 2110 | |
}, | |
{ | |
"epoch": 2.53, | |
"learning_rate": 0.0002, | |
"loss": 0.3266, | |
"step": 2120 | |
}, | |
{ | |
"epoch": 2.54, | |
"learning_rate": 0.0002, | |
"loss": 0.3085, | |
"step": 2130 | |
}, | |
{ | |
"epoch": 2.56, | |
"learning_rate": 0.0002, | |
"loss": 0.2917, | |
"step": 2140 | |
}, | |
{ | |
"epoch": 2.57, | |
"learning_rate": 0.0002, | |
"loss": 0.3142, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 2.58, | |
"learning_rate": 0.0002, | |
"loss": 0.2954, | |
"step": 2160 | |
}, | |
{ | |
"epoch": 2.59, | |
"learning_rate": 0.0002, | |
"loss": 0.3341, | |
"step": 2170 | |
}, | |
{ | |
"epoch": 2.6, | |
"learning_rate": 0.0002, | |
"loss": 0.3129, | |
"step": 2180 | |
}, | |
{ | |
"epoch": 2.62, | |
"learning_rate": 0.0002, | |
"loss": 0.2964, | |
"step": 2190 | |
}, | |
{ | |
"epoch": 2.63, | |
"learning_rate": 0.0002, | |
"loss": 0.3069, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 2.63, | |
"eval_loss": 0.45312267541885376, | |
"eval_runtime": 179.1132, | |
"eval_samples_per_second": 5.583, | |
"eval_steps_per_second": 2.792, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 2.63, | |
"mmlu_eval_accuracy": 0.45786090120959316, | |
"mmlu_eval_accuracy_abstract_algebra": 0.45454545454545453, | |
"mmlu_eval_accuracy_anatomy": 0.5, | |
"mmlu_eval_accuracy_astronomy": 0.4375, | |
"mmlu_eval_accuracy_business_ethics": 0.5454545454545454, | |
"mmlu_eval_accuracy_clinical_knowledge": 0.4482758620689655, | |
"mmlu_eval_accuracy_college_biology": 0.4375, | |
"mmlu_eval_accuracy_college_chemistry": 0.25, | |
"mmlu_eval_accuracy_college_computer_science": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_mathematics": 0.2727272727272727, | |
"mmlu_eval_accuracy_college_medicine": 0.36363636363636365, | |
"mmlu_eval_accuracy_college_physics": 0.45454545454545453, | |
"mmlu_eval_accuracy_computer_security": 0.2727272727272727, | |
"mmlu_eval_accuracy_conceptual_physics": 0.34615384615384615, | |
"mmlu_eval_accuracy_econometrics": 0.16666666666666666, | |
"mmlu_eval_accuracy_electrical_engineering": 0.25, | |
"mmlu_eval_accuracy_elementary_mathematics": 0.2926829268292683, | |
"mmlu_eval_accuracy_formal_logic": 0.14285714285714285, | |
"mmlu_eval_accuracy_global_facts": 0.4, | |
"mmlu_eval_accuracy_high_school_biology": 0.4375, | |
"mmlu_eval_accuracy_high_school_chemistry": 0.36363636363636365, | |
"mmlu_eval_accuracy_high_school_computer_science": 0.6666666666666666, | |
"mmlu_eval_accuracy_high_school_european_history": 0.5555555555555556, | |
"mmlu_eval_accuracy_high_school_geography": 0.7727272727272727, | |
"mmlu_eval_accuracy_high_school_government_and_politics": 0.5238095238095238, | |
"mmlu_eval_accuracy_high_school_macroeconomics": 0.37209302325581395, | |
"mmlu_eval_accuracy_high_school_mathematics": 0.3448275862068966, | |
"mmlu_eval_accuracy_high_school_microeconomics": 0.3076923076923077, | |
"mmlu_eval_accuracy_high_school_physics": 0.29411764705882354, | |
"mmlu_eval_accuracy_high_school_psychology": 0.7166666666666667, | |
"mmlu_eval_accuracy_high_school_statistics": 0.391304347826087, | |
"mmlu_eval_accuracy_high_school_us_history": 0.6363636363636364, | |
"mmlu_eval_accuracy_high_school_world_history": 0.5384615384615384, | |
"mmlu_eval_accuracy_human_aging": 0.6086956521739131, | |
"mmlu_eval_accuracy_human_sexuality": 0.4166666666666667, | |
"mmlu_eval_accuracy_international_law": 0.6923076923076923, | |
"mmlu_eval_accuracy_jurisprudence": 0.36363636363636365, | |
"mmlu_eval_accuracy_logical_fallacies": 0.5, | |
"mmlu_eval_accuracy_machine_learning": 0.36363636363636365, | |
"mmlu_eval_accuracy_management": 0.6363636363636364, | |
"mmlu_eval_accuracy_marketing": 0.64, | |
"mmlu_eval_accuracy_medical_genetics": 0.7272727272727273, | |
"mmlu_eval_accuracy_miscellaneous": 0.6976744186046512, | |
"mmlu_eval_accuracy_moral_disputes": 0.5, | |
"mmlu_eval_accuracy_moral_scenarios": 0.27, | |
"mmlu_eval_accuracy_nutrition": 0.6060606060606061, | |
"mmlu_eval_accuracy_philosophy": 0.5588235294117647, | |
"mmlu_eval_accuracy_prehistory": 0.5428571428571428, | |
"mmlu_eval_accuracy_professional_accounting": 0.1935483870967742, | |
"mmlu_eval_accuracy_professional_law": 0.3588235294117647, | |
"mmlu_eval_accuracy_professional_medicine": 0.41935483870967744, | |
"mmlu_eval_accuracy_professional_psychology": 0.391304347826087, | |
"mmlu_eval_accuracy_public_relations": 0.5, | |
"mmlu_eval_accuracy_security_studies": 0.5185185185185185, | |
"mmlu_eval_accuracy_sociology": 0.5454545454545454, | |
"mmlu_eval_accuracy_us_foreign_policy": 0.5454545454545454, | |
"mmlu_eval_accuracy_virology": 0.4444444444444444, | |
"mmlu_eval_accuracy_world_religions": 0.7368421052631579, | |
"mmlu_loss": 1.1481178610855542, | |
"step": 2200 | |
} | |
], | |
"max_steps": 5000, | |
"num_train_epochs": 6, | |
"total_flos": 4.497885537845576e+17, | |
"trial_name": null, | |
"trial_params": null | |
} | |