|
{ |
|
"best_metric": 0.7390481233596802, |
|
"best_model_checkpoint": "./qlora-NeurIPS_repo_rerun3/checkpoint-1984", |
|
"epoch": 1.982017982017982, |
|
"eval_steps": 64, |
|
"global_step": 1984, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 1.2637, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.4686, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.5486, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.3901, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.4859, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.3576, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.344, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.2745, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 1.375, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.4734, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.2e-06, |
|
"loss": 1.2147, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.3805, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.6e-06, |
|
"loss": 1.2849, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.3019, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3e-06, |
|
"loss": 1.2019, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.1411, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.3422, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.1507, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 1.0728, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.0169, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 0.9978, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.177, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 1.1552, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.0804, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-06, |
|
"loss": 1.0948, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.0599, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 1.0642, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 1.0911, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.8e-06, |
|
"loss": 1.0905, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6e-06, |
|
"loss": 0.982, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 1.1842, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.0504, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 0.9268, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.0374, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7e-06, |
|
"loss": 1.0, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.1074, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.4e-06, |
|
"loss": 0.925, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.991, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 0.9742, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.0463, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.2e-06, |
|
"loss": 0.9318, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.0624, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.6e-06, |
|
"loss": 0.9967, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.8e-06, |
|
"loss": 1.0015, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9e-06, |
|
"loss": 0.9986, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.9808, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.4e-06, |
|
"loss": 0.9676, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.8693, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 1.0204, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1e-05, |
|
"loss": 0.872, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.02e-05, |
|
"loss": 0.8843, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.9736, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0600000000000002e-05, |
|
"loss": 0.9687, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.899, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.9228, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 1.0134, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.14e-05, |
|
"loss": 0.7985, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.9846, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.18e-05, |
|
"loss": 0.9279, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.9569, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.22e-05, |
|
"loss": 0.9017, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.8809, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 0.9701, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.8555, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"eval_loss": 0.9695873856544495, |
|
"eval_runtime": 88.8625, |
|
"eval_samples_per_second": 22.507, |
|
"eval_steps_per_second": 11.253, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 0.9658, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 1.069, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 0.8698, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.8364, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.38e-05, |
|
"loss": 0.9105, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4e-05, |
|
"loss": 1.0611, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4200000000000001e-05, |
|
"loss": 0.9089, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.9374, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.46e-05, |
|
"loss": 0.7666, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.943, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.9233, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.9147, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.54e-05, |
|
"loss": 0.8846, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.8737, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.58e-05, |
|
"loss": 0.9275, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.9828, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.62e-05, |
|
"loss": 0.9551, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.8687, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.66e-05, |
|
"loss": 0.8925, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.0557, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.7e-05, |
|
"loss": 0.8112, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.8072, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.7400000000000003e-05, |
|
"loss": 1.07, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.76e-05, |
|
"loss": 1.0323, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.7800000000000002e-05, |
|
"loss": 0.8928, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.8755, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 0.8265, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.8791, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.86e-05, |
|
"loss": 0.9174, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.9284, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9e-05, |
|
"loss": 0.8523, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.8372, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.94e-05, |
|
"loss": 0.964, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.9476, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.98e-05, |
|
"loss": 0.9811, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8888, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.999999414434292e-05, |
|
"loss": 0.83, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999976577378534e-05, |
|
"loss": 0.8956, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999947299127415e-05, |
|
"loss": 0.9507, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999906309623852e-05, |
|
"loss": 1.0065, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.999985360891585e-05, |
|
"loss": 0.8595, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999978919706513e-05, |
|
"loss": 0.8325, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9999713074147126e-05, |
|
"loss": 0.8277, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9999625240250984e-05, |
|
"loss": 0.8909, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9999525695479575e-05, |
|
"loss": 0.8998, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999941443994947e-05, |
|
"loss": 0.7963, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9999291473790975e-05, |
|
"loss": 0.763, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999915679714809e-05, |
|
"loss": 0.7929, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999901041017855e-05, |
|
"loss": 1.0158, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9998852313053783e-05, |
|
"loss": 0.9339, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9998682505958945e-05, |
|
"loss": 0.8396, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9998500989092904e-05, |
|
"loss": 1.0167, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9998307762668236e-05, |
|
"loss": 0.9133, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9998102826911242e-05, |
|
"loss": 0.7984, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999788618206192e-05, |
|
"loss": 0.975, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9997657828373996e-05, |
|
"loss": 0.9946, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9997417766114896e-05, |
|
"loss": 0.9567, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999716599556577e-05, |
|
"loss": 0.8376, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9996902517021478e-05, |
|
"loss": 1.0126, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999662733079058e-05, |
|
"loss": 0.9379, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9996340437195354e-05, |
|
"loss": 0.9359, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9996041836571794e-05, |
|
"loss": 0.9905, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9995731529269605e-05, |
|
"loss": 0.8234, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999540951565219e-05, |
|
"loss": 0.8067, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"eval_loss": 0.9128402471542358, |
|
"eval_runtime": 88.8003, |
|
"eval_samples_per_second": 22.522, |
|
"eval_steps_per_second": 11.261, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9995075796096674e-05, |
|
"loss": 1.0005, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9994730370993887e-05, |
|
"loss": 0.8068, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9994373240748362e-05, |
|
"loss": 0.897, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999400440577835e-05, |
|
"loss": 0.8146, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9993623866515804e-05, |
|
"loss": 0.9914, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9993231623406385e-05, |
|
"loss": 0.9953, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999282767690946e-05, |
|
"loss": 0.8931, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9992412027498106e-05, |
|
"loss": 0.7608, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.99919846756591e-05, |
|
"loss": 0.9624, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.999154562189293e-05, |
|
"loss": 0.8888, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9991094866713788e-05, |
|
"loss": 0.918, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.999063241064956e-05, |
|
"loss": 0.913, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.999015825424185e-05, |
|
"loss": 0.9005, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9989672398045955e-05, |
|
"loss": 0.9251, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9989174842630877e-05, |
|
"loss": 0.8851, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9988665588579314e-05, |
|
"loss": 0.9148, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9988144636487675e-05, |
|
"loss": 0.9347, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9987611986966065e-05, |
|
"loss": 0.8989, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.998706764063828e-05, |
|
"loss": 0.8544, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9986511598141825e-05, |
|
"loss": 0.9415, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9985943860127896e-05, |
|
"loss": 1.0248, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9985364427261394e-05, |
|
"loss": 0.81, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.998477330022091e-05, |
|
"loss": 0.8744, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9984170479698724e-05, |
|
"loss": 0.8918, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9983555966400828e-05, |
|
"loss": 0.8638, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9982929761046893e-05, |
|
"loss": 0.9318, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9982291864370284e-05, |
|
"loss": 0.9215, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.998164227711807e-05, |
|
"loss": 0.9694, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9980981000051e-05, |
|
"loss": 0.9123, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.998030803394351e-05, |
|
"loss": 0.7667, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.997962337958374e-05, |
|
"loss": 0.7759, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.997892703777351e-05, |
|
"loss": 0.888, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9978219009328323e-05, |
|
"loss": 0.8957, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9977499295077375e-05, |
|
"loss": 0.8465, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9976767895863548e-05, |
|
"loss": 0.8462, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9976024812543408e-05, |
|
"loss": 0.7286, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.997527004598719e-05, |
|
"loss": 0.7593, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9974503597078845e-05, |
|
"loss": 0.7721, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.997372546671597e-05, |
|
"loss": 0.8131, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9972935655809868e-05, |
|
"loss": 0.8082, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9972134165285504e-05, |
|
"loss": 0.837, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.997132099608153e-05, |
|
"loss": 0.8104, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.997049614915028e-05, |
|
"loss": 1.0434, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9969659625457752e-05, |
|
"loss": 0.9215, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9968811425983624e-05, |
|
"loss": 0.857, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996795155172125e-05, |
|
"loss": 0.8125, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9967080003677664e-05, |
|
"loss": 0.8614, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9966196782873553e-05, |
|
"loss": 0.9722, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9965301890343293e-05, |
|
"loss": 0.86, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9964395327134913e-05, |
|
"loss": 0.9142, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.996347709431012e-05, |
|
"loss": 0.921, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.996254719294429e-05, |
|
"loss": 0.9794, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.996160562412645e-05, |
|
"loss": 1.0082, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.996065238895931e-05, |
|
"loss": 0.8259, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995968748855923e-05, |
|
"loss": 0.9337, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9958710924056238e-05, |
|
"loss": 0.9283, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9957722696594018e-05, |
|
"loss": 0.9883, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9956722807329913e-05, |
|
"loss": 0.8241, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9955711257434918e-05, |
|
"loss": 0.8128, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9954688048093705e-05, |
|
"loss": 0.7865, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9953653180504578e-05, |
|
"loss": 0.8958, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9952606655879504e-05, |
|
"loss": 0.8916, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.99515484754441e-05, |
|
"loss": 0.9097, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9950478640437635e-05, |
|
"loss": 0.9674, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"eval_loss": 0.8853557109832764, |
|
"eval_runtime": 88.747, |
|
"eval_samples_per_second": 22.536, |
|
"eval_steps_per_second": 11.268, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.994939715211303e-05, |
|
"loss": 0.8201, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9948304011736845e-05, |
|
"loss": 0.9528, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9947199220589292e-05, |
|
"loss": 0.7663, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.994608277996423e-05, |
|
"loss": 0.8377, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9944954691169155e-05, |
|
"loss": 0.7881, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.994381495552521e-05, |
|
"loss": 0.8061, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9942663574367167e-05, |
|
"loss": 0.8536, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9941500549043452e-05, |
|
"loss": 0.9349, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.994032588091612e-05, |
|
"loss": 0.8602, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.993913957136086e-05, |
|
"loss": 0.9261, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9937941621766998e-05, |
|
"loss": 0.8576, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.993673203353749e-05, |
|
"loss": 0.7606, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9935510808088923e-05, |
|
"loss": 0.9837, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9934277946851508e-05, |
|
"loss": 0.8546, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9933033451269095e-05, |
|
"loss": 0.8939, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9931777322799145e-05, |
|
"loss": 0.8029, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9930509562912753e-05, |
|
"loss": 0.8467, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9929230173094633e-05, |
|
"loss": 0.9067, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9927939154843118e-05, |
|
"loss": 0.8858, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9926636509670158e-05, |
|
"loss": 0.9363, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9925322239101325e-05, |
|
"loss": 0.8547, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9923996344675802e-05, |
|
"loss": 0.7235, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.992265882794638e-05, |
|
"loss": 0.9026, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.992130969047948e-05, |
|
"loss": 0.7974, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9919948933855102e-05, |
|
"loss": 1.0075, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9918576559666883e-05, |
|
"loss": 0.829, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.991719256952205e-05, |
|
"loss": 0.7842, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9915796965041438e-05, |
|
"loss": 0.8579, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.991438974785948e-05, |
|
"loss": 0.9034, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9912970919624218e-05, |
|
"loss": 0.8598, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9911540481997282e-05, |
|
"loss": 0.8372, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9910098436653903e-05, |
|
"loss": 0.8992, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9908644785282905e-05, |
|
"loss": 0.833, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.990717952958671e-05, |
|
"loss": 0.7043, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.990570267128132e-05, |
|
"loss": 0.8644, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.990421421209633e-05, |
|
"loss": 0.7286, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.990271415377492e-05, |
|
"loss": 0.8491, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9901202498073863e-05, |
|
"loss": 0.9551, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.98996792467635e-05, |
|
"loss": 0.7676, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9898144401627755e-05, |
|
"loss": 0.8254, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9896597964464145e-05, |
|
"loss": 0.905, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.989503993708374e-05, |
|
"loss": 0.9504, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.98934703213112e-05, |
|
"loss": 0.9368, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9891889118984753e-05, |
|
"loss": 0.9241, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.989029633195619e-05, |
|
"loss": 0.8412, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9888691962090877e-05, |
|
"loss": 0.7261, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.988707601126774e-05, |
|
"loss": 0.9309, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9885448481379272e-05, |
|
"loss": 0.9372, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9883809374331522e-05, |
|
"loss": 0.9394, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.98821586920441e-05, |
|
"loss": 0.8145, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9880496436450175e-05, |
|
"loss": 0.6903, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9878822609496466e-05, |
|
"loss": 0.8398, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9877137213143242e-05, |
|
"loss": 0.7671, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9875440249364323e-05, |
|
"loss": 1.0069, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.987373172014708e-05, |
|
"loss": 0.7955, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.987201162749242e-05, |
|
"loss": 0.9728, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9870279973414804e-05, |
|
"loss": 0.8648, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9868536759942226e-05, |
|
"loss": 0.9275, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9866781989116214e-05, |
|
"loss": 0.8277, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9865015662991832e-05, |
|
"loss": 1.0017, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9863237783637692e-05, |
|
"loss": 0.8565, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9861448353135914e-05, |
|
"loss": 0.8547, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9859647373582157e-05, |
|
"loss": 0.9581, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.985783484708561e-05, |
|
"loss": 0.8914, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"eval_loss": 0.8704828023910522, |
|
"eval_runtime": 88.8069, |
|
"eval_samples_per_second": 22.521, |
|
"eval_steps_per_second": 11.26, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9856010775768974e-05, |
|
"loss": 1.011, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9854175161768482e-05, |
|
"loss": 0.7789, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9852328007233878e-05, |
|
"loss": 0.8411, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.985046931432842e-05, |
|
"loss": 0.7958, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9848599085228878e-05, |
|
"loss": 1.1321, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9846717322125546e-05, |
|
"loss": 0.8746, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9844824027222208e-05, |
|
"loss": 0.9167, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9842919202736165e-05, |
|
"loss": 0.759, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9841002850898217e-05, |
|
"loss": 0.9867, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.983907497395266e-05, |
|
"loss": 0.7754, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9837135574157295e-05, |
|
"loss": 0.7645, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9835184653783413e-05, |
|
"loss": 0.8518, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9833222215115796e-05, |
|
"loss": 0.9464, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.983124826045272e-05, |
|
"loss": 0.6999, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9829262792105947e-05, |
|
"loss": 0.8434, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9827265812400718e-05, |
|
"loss": 0.7315, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.982525732367576e-05, |
|
"loss": 0.9114, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9823237328283277e-05, |
|
"loss": 0.7189, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9821205828588947e-05, |
|
"loss": 0.8595, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.981916282697193e-05, |
|
"loss": 0.7794, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.981710832582484e-05, |
|
"loss": 0.8662, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9815042327553774e-05, |
|
"loss": 0.7789, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9812964834578286e-05, |
|
"loss": 0.7742, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.981087584933139e-05, |
|
"loss": 0.7995, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.980877537425957e-05, |
|
"loss": 0.9092, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9806663411822754e-05, |
|
"loss": 0.9108, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9804539964494323e-05, |
|
"loss": 0.8854, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.980240503476112e-05, |
|
"loss": 1.0661, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9800258625123426e-05, |
|
"loss": 0.8872, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.979810073809497e-05, |
|
"loss": 0.8266, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9795931376202917e-05, |
|
"loss": 0.9558, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.979375054198788e-05, |
|
"loss": 0.9615, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9791558238003895e-05, |
|
"loss": 0.9973, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9789354466818446e-05, |
|
"loss": 0.8993, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9787139231012437e-05, |
|
"loss": 0.783, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9784912533180197e-05, |
|
"loss": 0.8554, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9782674375929483e-05, |
|
"loss": 0.9284, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.978042476188147e-05, |
|
"loss": 0.8688, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.977816369367076e-05, |
|
"loss": 0.7681, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9775891173945348e-05, |
|
"loss": 0.9067, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9773607205366664e-05, |
|
"loss": 0.7519, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9771311790609533e-05, |
|
"loss": 0.9549, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.976900493236218e-05, |
|
"loss": 0.985, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.976668663332625e-05, |
|
"loss": 0.8651, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.976435689621677e-05, |
|
"loss": 0.8236, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9762015723762165e-05, |
|
"loss": 0.8703, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9759663118704263e-05, |
|
"loss": 0.8845, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.975729908379827e-05, |
|
"loss": 1.624, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.975492362181278e-05, |
|
"loss": 0.7728, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9752536735529774e-05, |
|
"loss": 0.7587, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9750138427744608e-05, |
|
"loss": 0.8409, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9747728701266015e-05, |
|
"loss": 0.8308, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9745307558916104e-05, |
|
"loss": 0.8025, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.974287500353035e-05, |
|
"loss": 0.7392, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9740431037957593e-05, |
|
"loss": 0.8281, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.973797566506004e-05, |
|
"loss": 0.9947, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9735508887713252e-05, |
|
"loss": 0.8923, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9733030708806157e-05, |
|
"loss": 0.8989, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.973054113124102e-05, |
|
"loss": 0.9855, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9728040157933465e-05, |
|
"loss": 0.9106, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9725527791812464e-05, |
|
"loss": 0.7688, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9723004035820327e-05, |
|
"loss": 0.8705, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.97204688929127e-05, |
|
"loss": 0.7196, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9717922366058576e-05, |
|
"loss": 0.7742, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"eval_loss": 0.8576830625534058, |
|
"eval_runtime": 88.7515, |
|
"eval_samples_per_second": 22.535, |
|
"eval_steps_per_second": 11.267, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9715364458240263e-05, |
|
"loss": 0.8914, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9712795172453413e-05, |
|
"loss": 0.8865, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9710214511707e-05, |
|
"loss": 0.7747, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.970762247902331e-05, |
|
"loss": 0.8571, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9705019077437958e-05, |
|
"loss": 0.7771, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.970240430999987e-05, |
|
"loss": 0.7629, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.969977817977128e-05, |
|
"loss": 0.9229, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9697140689827733e-05, |
|
"loss": 0.8372, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9694491843258075e-05, |
|
"loss": 0.794, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9691831643164455e-05, |
|
"loss": 0.7288, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9689160092662316e-05, |
|
"loss": 0.8428, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9686477194880393e-05, |
|
"loss": 0.7379, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9683782952960716e-05, |
|
"loss": 0.8157, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9681077370058595e-05, |
|
"loss": 0.7902, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9678360449342622e-05, |
|
"loss": 0.8268, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9675632193994668e-05, |
|
"loss": 0.8701, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.967289260720988e-05, |
|
"loss": 0.8959, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9670141692196672e-05, |
|
"loss": 0.8847, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9667379452176728e-05, |
|
"loss": 0.9147, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9664605890384997e-05, |
|
"loss": 0.758, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9661821010069676e-05, |
|
"loss": 0.7882, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9659024814492237e-05, |
|
"loss": 0.8319, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9656217306927386e-05, |
|
"loss": 0.8644, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.965339849066308e-05, |
|
"loss": 0.8515, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9650568369000532e-05, |
|
"loss": 0.7463, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9647726945254182e-05, |
|
"loss": 0.9596, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9644874222751706e-05, |
|
"loss": 0.8504, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9642010204834027e-05, |
|
"loss": 0.8578, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9639134894855278e-05, |
|
"loss": 0.9057, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.963624829618283e-05, |
|
"loss": 0.9923, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.963335041219726e-05, |
|
"loss": 0.9635, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9630441246292386e-05, |
|
"loss": 0.8831, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9627520801875215e-05, |
|
"loss": 0.8529, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.962458908236597e-05, |
|
"loss": 0.7638, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.962164609119808e-05, |
|
"loss": 0.9547, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.961869183181818e-05, |
|
"loss": 0.8013, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.961572630768609e-05, |
|
"loss": 0.7823, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9612749522274834e-05, |
|
"loss": 1.0659, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.960976147907061e-05, |
|
"loss": 0.8875, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9606762181572814e-05, |
|
"loss": 0.8552, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9603751633294025e-05, |
|
"loss": 0.8147, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.960072983775998e-05, |
|
"loss": 0.8681, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9597696798509604e-05, |
|
"loss": 0.8248, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.959465251909498e-05, |
|
"loss": 0.8806, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9591597003081363e-05, |
|
"loss": 0.8216, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9588530254047166e-05, |
|
"loss": 0.7652, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.958545227558395e-05, |
|
"loss": 0.9276, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9582363071296436e-05, |
|
"loss": 0.7528, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9579262644802486e-05, |
|
"loss": 0.9386, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9576150999733107e-05, |
|
"loss": 0.7804, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9573028139732447e-05, |
|
"loss": 0.8627, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9569894068457783e-05, |
|
"loss": 0.8881, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9566748789579527e-05, |
|
"loss": 0.8651, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9563592306781207e-05, |
|
"loss": 0.8104, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.956042462375949e-05, |
|
"loss": 0.7141, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.955724574422414e-05, |
|
"loss": 0.847, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.955405567189804e-05, |
|
"loss": 0.8132, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9550854410517198e-05, |
|
"loss": 0.6904, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9547641963830698e-05, |
|
"loss": 0.7857, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9544418335600747e-05, |
|
"loss": 0.8009, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9541183529602626e-05, |
|
"loss": 0.8208, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9537937549624734e-05, |
|
"loss": 0.9208, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.953468039946852e-05, |
|
"loss": 0.773, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9531412082948554e-05, |
|
"loss": 0.8532, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"eval_loss": 0.8470838665962219, |
|
"eval_runtime": 88.8518, |
|
"eval_samples_per_second": 22.509, |
|
"eval_steps_per_second": 11.255, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9528132603892456e-05, |
|
"loss": 0.8237, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9524841966140923e-05, |
|
"loss": 0.9781, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.952154017354773e-05, |
|
"loss": 0.8461, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.951822722997971e-05, |
|
"loss": 0.6528, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9514903139316754e-05, |
|
"loss": 0.7626, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9511567905451804e-05, |
|
"loss": 0.9037, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9508221532290864e-05, |
|
"loss": 0.802, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9504864023752974e-05, |
|
"loss": 0.7549, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.950149538377022e-05, |
|
"loss": 0.8103, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.949811561628772e-05, |
|
"loss": 0.8518, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9494724725263623e-05, |
|
"loss": 0.7709, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9491322714669115e-05, |
|
"loss": 0.7942, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9487909588488393e-05, |
|
"loss": 0.9106, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9484485350718673e-05, |
|
"loss": 0.9729, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9481050005370194e-05, |
|
"loss": 0.7691, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9477603556466195e-05, |
|
"loss": 0.8142, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.947414600804292e-05, |
|
"loss": 0.8069, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.947067736414961e-05, |
|
"loss": 0.8064, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.946719762884851e-05, |
|
"loss": 0.8362, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.946370680621484e-05, |
|
"loss": 0.9664, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9460204900336813e-05, |
|
"loss": 0.7727, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9456691915315626e-05, |
|
"loss": 0.8621, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9453167855265442e-05, |
|
"loss": 0.8006, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9449632724313393e-05, |
|
"loss": 0.8032, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9446086526599596e-05, |
|
"loss": 0.8343, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9442529266277104e-05, |
|
"loss": 0.79, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9438960947511938e-05, |
|
"loss": 0.6789, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9435381574483066e-05, |
|
"loss": 0.9176, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9431791151382414e-05, |
|
"loss": 1.098, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.942818968241483e-05, |
|
"loss": 0.6833, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.942457717179811e-05, |
|
"loss": 0.8131, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9420953623762978e-05, |
|
"loss": 1.0117, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.941731904255309e-05, |
|
"loss": 0.8052, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.941367343242501e-05, |
|
"loss": 0.6806, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.941001679764823e-05, |
|
"loss": 0.8556, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9406349142505153e-05, |
|
"loss": 0.8597, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9402670471291078e-05, |
|
"loss": 0.945, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9398980788314218e-05, |
|
"loss": 0.9914, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9395280097895683e-05, |
|
"loss": 0.9351, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.939156840436945e-05, |
|
"loss": 0.8328, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.938784571208242e-05, |
|
"loss": 0.9441, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9384112025394338e-05, |
|
"loss": 0.7824, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9380367348677855e-05, |
|
"loss": 0.8984, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9376611686318468e-05, |
|
"loss": 0.8352, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.937284504271456e-05, |
|
"loss": 0.8329, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.936906742227736e-05, |
|
"loss": 0.7788, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.936527882943096e-05, |
|
"loss": 0.7815, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.93614792686123e-05, |
|
"loss": 0.8727, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9357668744271173e-05, |
|
"loss": 0.873, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9353847260870193e-05, |
|
"loss": 0.7234, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9350014822884818e-05, |
|
"loss": 0.725, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9346171434803342e-05, |
|
"loss": 0.8242, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.934231710112688e-05, |
|
"loss": 0.7766, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.933845182636936e-05, |
|
"loss": 0.7384, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9334575615057523e-05, |
|
"loss": 0.9487, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.933068847173093e-05, |
|
"loss": 0.886, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9326790400941926e-05, |
|
"loss": 0.6873, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9322881407255673e-05, |
|
"loss": 0.7381, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9318961495250118e-05, |
|
"loss": 0.8391, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9315030669515983e-05, |
|
"loss": 0.7742, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.931108893465679e-05, |
|
"loss": 0.7162, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9307136295288823e-05, |
|
"loss": 0.6951, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9303172756041146e-05, |
|
"loss": 0.756, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9299198321555585e-05, |
|
"loss": 0.7503, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"eval_loss": 0.8380163311958313, |
|
"eval_runtime": 88.8662, |
|
"eval_samples_per_second": 22.506, |
|
"eval_steps_per_second": 11.253, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.929521299648672e-05, |
|
"loss": 0.7756, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.92912167855019e-05, |
|
"loss": 0.8496, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.92872096932812e-05, |
|
"loss": 0.8852, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9283191724517462e-05, |
|
"loss": 0.727, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.927916288391625e-05, |
|
"loss": 0.7063, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9275123176195868e-05, |
|
"loss": 0.8145, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9271072606087347e-05, |
|
"loss": 0.8883, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9267011178334432e-05, |
|
"loss": 0.8512, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.926293889769359e-05, |
|
"loss": 0.8944, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9258855768934e-05, |
|
"loss": 0.7907, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9254761796837535e-05, |
|
"loss": 0.8282, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9250656986198785e-05, |
|
"loss": 0.8938, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9246541341825016e-05, |
|
"loss": 0.8208, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.924241486853619e-05, |
|
"loss": 0.8071, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9238277571164947e-05, |
|
"loss": 0.8381, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9234129454556608e-05, |
|
"loss": 0.8005, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9229970523569163e-05, |
|
"loss": 0.8503, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9225800783073267e-05, |
|
"loss": 0.8601, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9221620237952233e-05, |
|
"loss": 0.9007, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.921742889310203e-05, |
|
"loss": 0.8161, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9213226753431272e-05, |
|
"loss": 0.868, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9209013823861217e-05, |
|
"loss": 0.7736, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9204790109325762e-05, |
|
"loss": 0.6409, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9200555614771428e-05, |
|
"loss": 0.8424, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9196310345157366e-05, |
|
"loss": 0.8458, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9192054305455347e-05, |
|
"loss": 0.8427, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9187787500649747e-05, |
|
"loss": 0.8084, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.918350993573756e-05, |
|
"loss": 0.8328, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.917922161572838e-05, |
|
"loss": 0.7588, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9174922545644383e-05, |
|
"loss": 0.6648, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9170612730520352e-05, |
|
"loss": 0.9953, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9166292175403645e-05, |
|
"loss": 0.7162, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9161960885354203e-05, |
|
"loss": 0.8221, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.915761886544453e-05, |
|
"loss": 0.8139, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9153266120759707e-05, |
|
"loss": 0.9388, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.914890265639737e-05, |
|
"loss": 0.8109, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9144528477467705e-05, |
|
"loss": 0.6884, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9140143589093452e-05, |
|
"loss": 0.7648, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9135747996409893e-05, |
|
"loss": 0.8567, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9131341704564845e-05, |
|
"loss": 0.8423, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9126924718718657e-05, |
|
"loss": 0.8746, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.912249704404419e-05, |
|
"loss": 0.9769, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9118058685726843e-05, |
|
"loss": 0.8649, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9113609648964515e-05, |
|
"loss": 0.7338, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.910914993896761e-05, |
|
"loss": 0.7923, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9104679560959038e-05, |
|
"loss": 0.6997, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.910019852017419e-05, |
|
"loss": 0.735, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9095706821860965e-05, |
|
"loss": 0.858, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.909120447127973e-05, |
|
"loss": 0.9726, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.908669147370332e-05, |
|
"loss": 0.6871, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9082167834417064e-05, |
|
"loss": 0.6674, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9077633558718723e-05, |
|
"loss": 0.9081, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9073088651918534e-05, |
|
"loss": 0.7664, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.906853311933919e-05, |
|
"loss": 0.7947, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9063966966315802e-05, |
|
"loss": 0.8494, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9059390198195947e-05, |
|
"loss": 0.7399, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9054802820339617e-05, |
|
"loss": 0.8816, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9050204838119237e-05, |
|
"loss": 0.8472, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9045596256919647e-05, |
|
"loss": 0.843, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9040977082138103e-05, |
|
"loss": 0.7942, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9036347319184262e-05, |
|
"loss": 0.9324, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9031706973480183e-05, |
|
"loss": 0.973, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.902705605046033e-05, |
|
"loss": 0.7232, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.902239455557154e-05, |
|
"loss": 0.8105, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_loss": 0.8321256637573242, |
|
"eval_runtime": 88.8099, |
|
"eval_samples_per_second": 22.52, |
|
"eval_steps_per_second": 11.26, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9017722494273035e-05, |
|
"loss": 0.7939, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.901303987203641e-05, |
|
"loss": 0.8154, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.900834669434564e-05, |
|
"loss": 0.6629, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9003642966697046e-05, |
|
"loss": 0.8167, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8998928694599308e-05, |
|
"loss": 0.7762, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8994203883573468e-05, |
|
"loss": 0.8324, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8989468539152894e-05, |
|
"loss": 0.7046, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8984722666883298e-05, |
|
"loss": 0.7878, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8979966272322717e-05, |
|
"loss": 0.8183, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.897519936104152e-05, |
|
"loss": 0.829, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8970421938622384e-05, |
|
"loss": 0.8358, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.89656340106603e-05, |
|
"loss": 0.8195, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8960835582762556e-05, |
|
"loss": 0.9304, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8956026660548744e-05, |
|
"loss": 0.7643, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.895120724965075e-05, |
|
"loss": 1.0905, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8946377355712725e-05, |
|
"loss": 0.8015, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.894153698439112e-05, |
|
"loss": 0.8747, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8936686141354643e-05, |
|
"loss": 0.8012, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8931824832284267e-05, |
|
"loss": 0.7018, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8926953062873224e-05, |
|
"loss": 0.899, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8922070838827e-05, |
|
"loss": 1.0892, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.891717816586332e-05, |
|
"loss": 0.675, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.891227504971214e-05, |
|
"loss": 0.7579, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8907361496115665e-05, |
|
"loss": 0.7148, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8902437510828303e-05, |
|
"loss": 0.8114, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8897503099616687e-05, |
|
"loss": 0.8054, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.889255826825967e-05, |
|
"loss": 0.746, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8887603022548288e-05, |
|
"loss": 0.8464, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8882637368285794e-05, |
|
"loss": 0.7673, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8877661311287617e-05, |
|
"loss": 0.6507, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8872674857381378e-05, |
|
"loss": 0.7847, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8867678012406863e-05, |
|
"loss": 0.8659, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.886267078221604e-05, |
|
"loss": 0.7525, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8857653172673035e-05, |
|
"loss": 0.813, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.885262518965412e-05, |
|
"loss": 0.7647, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8847586839047728e-05, |
|
"loss": 0.9482, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8842538126754434e-05, |
|
"loss": 0.9578, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.883747905868694e-05, |
|
"loss": 0.7448, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8832409640770075e-05, |
|
"loss": 0.856, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8827329878940805e-05, |
|
"loss": 0.7813, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8822239779148187e-05, |
|
"loss": 0.9091, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8817139347353402e-05, |
|
"loss": 0.6702, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8812028589529726e-05, |
|
"loss": 0.8261, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8806907511662527e-05, |
|
"loss": 0.8176, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8801776119749264e-05, |
|
"loss": 0.7524, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.879663441979946e-05, |
|
"loss": 1.1004, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8791482417834738e-05, |
|
"loss": 0.8084, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8786320119888753e-05, |
|
"loss": 0.7584, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.878114753200725e-05, |
|
"loss": 0.9296, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8775964660247996e-05, |
|
"loss": 0.7239, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.877077151068082e-05, |
|
"loss": 0.8256, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8765568089387588e-05, |
|
"loss": 0.8633, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8760354402462177e-05, |
|
"loss": 0.8125, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8755130456010512e-05, |
|
"loss": 0.7046, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8749896256150516e-05, |
|
"loss": 0.8327, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8744651809012128e-05, |
|
"loss": 0.7625, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8739397120737276e-05, |
|
"loss": 0.64, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.87341321974799e-05, |
|
"loss": 0.8003, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.872885704540591e-05, |
|
"loss": 0.8428, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.872357167069321e-05, |
|
"loss": 0.695, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.871827607953166e-05, |
|
"loss": 0.9279, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8712970278123102e-05, |
|
"loss": 0.8376, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8707654272681317e-05, |
|
"loss": 0.8851, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8702328069432055e-05, |
|
"loss": 0.8739, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"eval_loss": 0.8237754702568054, |
|
"eval_runtime": 88.755, |
|
"eval_samples_per_second": 22.534, |
|
"eval_steps_per_second": 11.267, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8696991674612993e-05, |
|
"loss": 0.8256, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8691645094473758e-05, |
|
"loss": 0.9051, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.868628833527589e-05, |
|
"loss": 0.7162, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.868092140329286e-05, |
|
"loss": 0.999, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8675544304810056e-05, |
|
"loss": 0.8956, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.867015704612476e-05, |
|
"loss": 0.7452, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8664759633546166e-05, |
|
"loss": 0.868, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.865935207339535e-05, |
|
"loss": 0.9255, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8653934372005277e-05, |
|
"loss": 0.7148, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8648506535720783e-05, |
|
"loss": 0.9121, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8643068570898584e-05, |
|
"loss": 0.7663, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8637620483907247e-05, |
|
"loss": 0.7213, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8632162281127196e-05, |
|
"loss": 0.7209, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8626693968950712e-05, |
|
"loss": 0.8532, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.86212155537819e-05, |
|
"loss": 0.7439, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.861572704203671e-05, |
|
"loss": 0.6527, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8610228440142904e-05, |
|
"loss": 0.9329, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8604719754540067e-05, |
|
"loss": 0.923, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.85992009916796e-05, |
|
"loss": 0.7381, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8593672158024697e-05, |
|
"loss": 0.7204, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8588133260050346e-05, |
|
"loss": 0.6572, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.858258430424333e-05, |
|
"loss": 0.6991, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8577025297102204e-05, |
|
"loss": 0.8658, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.857145624513729e-05, |
|
"loss": 0.8149, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8565877154870686e-05, |
|
"loss": 0.8789, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8560288032836235e-05, |
|
"loss": 0.7343, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.855468888557954e-05, |
|
"loss": 0.7667, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8549079719657933e-05, |
|
"loss": 0.7589, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8543460541640487e-05, |
|
"loss": 0.7222, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8537831358107995e-05, |
|
"loss": 0.7407, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8532192175652973e-05, |
|
"loss": 0.7959, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.852654300087964e-05, |
|
"loss": 0.7504, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8520883840403933e-05, |
|
"loss": 0.8011, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.851521470085346e-05, |
|
"loss": 0.6863, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.850953558886754e-05, |
|
"loss": 0.827, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8503846511097153e-05, |
|
"loss": 0.882, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8498147474204955e-05, |
|
"loss": 0.7463, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.849243848486527e-05, |
|
"loss": 0.9459, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8486719549764077e-05, |
|
"loss": 0.8591, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8480990675598995e-05, |
|
"loss": 0.8529, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8475251869079292e-05, |
|
"loss": 0.9178, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8469503136925862e-05, |
|
"loss": 0.9267, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8463744485871232e-05, |
|
"loss": 0.7316, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8457975922659535e-05, |
|
"loss": 0.7726, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8452197454046517e-05, |
|
"loss": 0.8172, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.844640908679952e-05, |
|
"loss": 0.6132, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.844061082769749e-05, |
|
"loss": 0.7698, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8434802683530947e-05, |
|
"loss": 0.7798, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.842898466110199e-05, |
|
"loss": 0.881, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8423156767224286e-05, |
|
"loss": 0.8275, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.841731900872307e-05, |
|
"loss": 0.7459, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8411471392435124e-05, |
|
"loss": 0.9518, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.840561392520877e-05, |
|
"loss": 0.8184, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8399746613903878e-05, |
|
"loss": 0.9292, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8393869465391833e-05, |
|
"loss": 0.6926, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8387982486555557e-05, |
|
"loss": 0.6586, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8382085684289472e-05, |
|
"loss": 0.6964, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.83761790654995e-05, |
|
"loss": 0.9562, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8370262637103087e-05, |
|
"loss": 0.8209, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.836433640602913e-05, |
|
"loss": 0.6311, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8358400379218033e-05, |
|
"loss": 0.7725, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8352454563621664e-05, |
|
"loss": 0.6367, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.834649896620335e-05, |
|
"loss": 0.8048, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.834053359393788e-05, |
|
"loss": 0.7057, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"eval_loss": 0.8161675930023193, |
|
"eval_runtime": 88.8512, |
|
"eval_samples_per_second": 22.51, |
|
"eval_steps_per_second": 11.255, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8334558453811492e-05, |
|
"loss": 0.8085, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.832857355282186e-05, |
|
"loss": 0.8123, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8322578897978082e-05, |
|
"loss": 0.8445, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8316574496300692e-05, |
|
"loss": 0.8441, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8310560354821635e-05, |
|
"loss": 0.7833, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.830453648058426e-05, |
|
"loss": 0.7952, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8298502880643316e-05, |
|
"loss": 0.7673, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.829245956206494e-05, |
|
"loss": 0.8559, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.828640653192665e-05, |
|
"loss": 0.7095, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8280343797317344e-05, |
|
"loss": 0.7709, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8274271365337277e-05, |
|
"loss": 0.7622, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.826818924309807e-05, |
|
"loss": 0.6762, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8262097437722684e-05, |
|
"loss": 0.7914, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.825599595634542e-05, |
|
"loss": 0.817, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8249884806111927e-05, |
|
"loss": 0.797, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.824376399417915e-05, |
|
"loss": 0.7553, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8237633527715373e-05, |
|
"loss": 0.7762, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8231493413900177e-05, |
|
"loss": 0.8008, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.822534365992444e-05, |
|
"loss": 0.8322, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8219184272990333e-05, |
|
"loss": 0.8479, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8213015260311305e-05, |
|
"loss": 0.823, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8206836629112087e-05, |
|
"loss": 0.8383, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8200648386628664e-05, |
|
"loss": 0.8086, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.819445054010828e-05, |
|
"loss": 0.7702, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.818824309680943e-05, |
|
"loss": 0.8369, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8182026064001844e-05, |
|
"loss": 0.7653, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8175799448966483e-05, |
|
"loss": 0.6766, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8169563258995538e-05, |
|
"loss": 0.7201, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.81633175013924e-05, |
|
"loss": 0.8977, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8157062183471682e-05, |
|
"loss": 0.7354, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8150797312559167e-05, |
|
"loss": 0.8104, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8144522895991857e-05, |
|
"loss": 0.7339, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.813823894111791e-05, |
|
"loss": 0.8117, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8131945455296667e-05, |
|
"loss": 0.9314, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8125642445898628e-05, |
|
"loss": 0.6697, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8119329920305438e-05, |
|
"loss": 0.7683, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8113007885909903e-05, |
|
"loss": 0.9921, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.810667635011595e-05, |
|
"loss": 0.6722, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8100335320338644e-05, |
|
"loss": 0.8275, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8093984804004157e-05, |
|
"loss": 0.7587, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.808762480854979e-05, |
|
"loss": 0.8, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8081255341423924e-05, |
|
"loss": 0.7504, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.807487641008604e-05, |
|
"loss": 0.6742, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8068488022006714e-05, |
|
"loss": 0.8, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8062090184667586e-05, |
|
"loss": 0.759, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8055682905561353e-05, |
|
"loss": 0.8355, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8049266192191797e-05, |
|
"loss": 0.7003, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.804284005207372e-05, |
|
"loss": 0.6923, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8036404492732987e-05, |
|
"loss": 0.7639, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8029959521706474e-05, |
|
"loss": 0.7846, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.80235051465421e-05, |
|
"loss": 0.8217, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8017041374798773e-05, |
|
"loss": 0.7391, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.801056821404643e-05, |
|
"loss": 0.7639, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8004085671865987e-05, |
|
"loss": 1.0368, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7997593755849355e-05, |
|
"loss": 0.8913, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7991092473599424e-05, |
|
"loss": 0.8668, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7984581832730042e-05, |
|
"loss": 0.7934, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7978061840866036e-05, |
|
"loss": 0.7442, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7971532505643162e-05, |
|
"loss": 0.821, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7964993834708132e-05, |
|
"loss": 0.7814, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.79584458357186e-05, |
|
"loss": 1.0184, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.795188851634312e-05, |
|
"loss": 0.8157, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7945321884261177e-05, |
|
"loss": 0.9535, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7938745947163166e-05, |
|
"loss": 0.9507, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"eval_loss": 0.8084772825241089, |
|
"eval_runtime": 88.8087, |
|
"eval_samples_per_second": 22.52, |
|
"eval_steps_per_second": 11.26, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.793216071275037e-05, |
|
"loss": 0.6816, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7925566188734963e-05, |
|
"loss": 0.7804, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7918962382840005e-05, |
|
"loss": 0.6319, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.791234930279941e-05, |
|
"loss": 0.7329, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7905726956357973e-05, |
|
"loss": 0.7394, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.789909535127133e-05, |
|
"loss": 0.8789, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7892454495305958e-05, |
|
"loss": 0.6921, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7885804396239175e-05, |
|
"loss": 0.8274, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7879145061859124e-05, |
|
"loss": 0.8109, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.787247649996475e-05, |
|
"loss": 0.6683, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7865798718365833e-05, |
|
"loss": 0.8605, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7859111724882915e-05, |
|
"loss": 0.7162, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7852415527347354e-05, |
|
"loss": 0.8672, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.784571013360128e-05, |
|
"loss": 0.8368, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7838995551497578e-05, |
|
"loss": 0.73, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7832271788899917e-05, |
|
"loss": 0.7813, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.78255388536827e-05, |
|
"loss": 0.7463, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7818796753731088e-05, |
|
"loss": 0.8529, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7812045496940953e-05, |
|
"loss": 0.8398, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7805285091218915e-05, |
|
"loss": 0.7883, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7798515544482293e-05, |
|
"loss": 0.6891, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7791736864659116e-05, |
|
"loss": 0.6995, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7784949059688112e-05, |
|
"loss": 0.8135, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7778152137518686e-05, |
|
"loss": 0.7637, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7771346106110934e-05, |
|
"loss": 0.7548, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.776453097343561e-05, |
|
"loss": 0.7701, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.775770674747413e-05, |
|
"loss": 0.7618, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7750873436218555e-05, |
|
"loss": 0.7747, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.77440310476716e-05, |
|
"loss": 0.7513, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7737179589846595e-05, |
|
"loss": 0.8159, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.77303190707675e-05, |
|
"loss": 0.7794, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7723449498468882e-05, |
|
"loss": 0.7955, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7716570880995913e-05, |
|
"loss": 0.8281, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7709683226404362e-05, |
|
"loss": 0.7063, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7702786542760574e-05, |
|
"loss": 0.769, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.769588083814147e-05, |
|
"loss": 0.797, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7688966120634544e-05, |
|
"loss": 0.792, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7682042398337832e-05, |
|
"loss": 0.7019, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.767510967935993e-05, |
|
"loss": 0.906, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7668167971819956e-05, |
|
"loss": 0.8094, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7661217283847567e-05, |
|
"loss": 0.868, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7654257623582925e-05, |
|
"loss": 0.7853, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7647288999176717e-05, |
|
"loss": 0.7488, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.764031141879011e-05, |
|
"loss": 0.8747, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7633324890594773e-05, |
|
"loss": 0.7876, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7626329422772843e-05, |
|
"loss": 0.7974, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7619325023516936e-05, |
|
"loss": 0.8486, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7612311701030123e-05, |
|
"loss": 0.8405, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7605289463525925e-05, |
|
"loss": 0.6657, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7598258319228306e-05, |
|
"loss": 0.8031, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7591218276371663e-05, |
|
"loss": 0.8462, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7584169343200805e-05, |
|
"loss": 0.7834, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7577111527970966e-05, |
|
"loss": 0.7107, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.757004483894777e-05, |
|
"loss": 0.8267, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.756296928440724e-05, |
|
"loss": 0.8583, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.755588487263578e-05, |
|
"loss": 0.743, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7548791611930173e-05, |
|
"loss": 0.7809, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7541689510597547e-05, |
|
"loss": 0.706, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7534578576955404e-05, |
|
"loss": 0.688, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7527458819331588e-05, |
|
"loss": 0.7293, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7520330246064258e-05, |
|
"loss": 0.9841, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7513192865501916e-05, |
|
"loss": 0.7393, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7506046686003375e-05, |
|
"loss": 0.7296, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7498891715937744e-05, |
|
"loss": 0.8461, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"eval_loss": 0.8009968400001526, |
|
"eval_runtime": 88.857, |
|
"eval_samples_per_second": 22.508, |
|
"eval_steps_per_second": 11.254, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7491727963684442e-05, |
|
"loss": 0.7201, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7484555437633157e-05, |
|
"loss": 0.6904, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7477374146183862e-05, |
|
"loss": 0.7647, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7470184097746793e-05, |
|
"loss": 0.8107, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.746298530074244e-05, |
|
"loss": 0.8447, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7455777763601544e-05, |
|
"loss": 0.8187, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7448561494765078e-05, |
|
"loss": 0.735, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7441336502684236e-05, |
|
"loss": 0.7639, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.743410279582044e-05, |
|
"loss": 0.8614, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7426860382645302e-05, |
|
"loss": 0.8216, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.741960927164065e-05, |
|
"loss": 0.8295, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.741234947129848e-05, |
|
"loss": 0.7761, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.740508099012098e-05, |
|
"loss": 0.8462, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.739780383662049e-05, |
|
"loss": 0.7543, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7390518019319512e-05, |
|
"loss": 0.7176, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7383223546750704e-05, |
|
"loss": 0.8237, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7375920427456846e-05, |
|
"loss": 0.6964, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7368608669990847e-05, |
|
"loss": 0.6851, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7361288282915743e-05, |
|
"loss": 0.7936, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.735395927480467e-05, |
|
"loss": 0.8793, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.734662165424085e-05, |
|
"loss": 0.8362, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.733927542981761e-05, |
|
"loss": 0.8331, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7331920610138344e-05, |
|
"loss": 0.7247, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.732455720381651e-05, |
|
"loss": 0.6166, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7317185219475618e-05, |
|
"loss": 0.8475, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.730980466574924e-05, |
|
"loss": 0.7865, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.730241555128097e-05, |
|
"loss": 0.7173, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7295017884724436e-05, |
|
"loss": 0.7805, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.728761167474327e-05, |
|
"loss": 0.7015, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7280196930011124e-05, |
|
"loss": 0.9915, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7272773659211636e-05, |
|
"loss": 0.8156, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7265341871038438e-05, |
|
"loss": 0.9031, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7257901574195116e-05, |
|
"loss": 0.9122, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.725045277739525e-05, |
|
"loss": 0.9201, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7242995489362352e-05, |
|
"loss": 0.7459, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7235529718829886e-05, |
|
"loss": 0.7885, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7228055474541256e-05, |
|
"loss": 0.8055, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.722057276524978e-05, |
|
"loss": 0.742, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7213081599718694e-05, |
|
"loss": 0.8748, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.720558198672114e-05, |
|
"loss": 0.8843, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7198073935040148e-05, |
|
"loss": 0.7946, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7190557453468636e-05, |
|
"loss": 0.8812, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7183032550809387e-05, |
|
"loss": 0.7318, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7175499235875053e-05, |
|
"loss": 0.8431, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.716795751748814e-05, |
|
"loss": 0.7337, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7160407404480982e-05, |
|
"loss": 0.6533, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.715284890569576e-05, |
|
"loss": 0.8323, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7145282029984472e-05, |
|
"loss": 0.8415, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7137706786208917e-05, |
|
"loss": 0.6874, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7130123183240708e-05, |
|
"loss": 0.6417, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7122531229961235e-05, |
|
"loss": 0.6016, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7114930935261674e-05, |
|
"loss": 0.7699, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7107322308042974e-05, |
|
"loss": 0.7771, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.709970535721583e-05, |
|
"loss": 0.8312, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7092080091700695e-05, |
|
"loss": 0.7228, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.708444652042776e-05, |
|
"loss": 0.8831, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7076804652336932e-05, |
|
"loss": 0.6569, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7069154496377855e-05, |
|
"loss": 0.7842, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.706149606150986e-05, |
|
"loss": 0.7687, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.705382935670198e-05, |
|
"loss": 0.9303, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7046154390932936e-05, |
|
"loss": 0.9519, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.703847117319112e-05, |
|
"loss": 0.9367, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.703077971247459e-05, |
|
"loss": 0.9492, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7023080017791058e-05, |
|
"loss": 0.7635, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"eval_loss": 0.7974832057952881, |
|
"eval_runtime": 88.8572, |
|
"eval_samples_per_second": 22.508, |
|
"eval_steps_per_second": 11.254, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.701537209815788e-05, |
|
"loss": 0.8036, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7007655962602038e-05, |
|
"loss": 0.6495, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6999931620160147e-05, |
|
"loss": 0.8569, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.699219907987842e-05, |
|
"loss": 0.7404, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6984458350812683e-05, |
|
"loss": 0.899, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6976709442028346e-05, |
|
"loss": 0.7222, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.69689523626004e-05, |
|
"loss": 0.8308, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.69611871216134e-05, |
|
"loss": 0.6791, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.695341372816147e-05, |
|
"loss": 0.6438, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.694563219134827e-05, |
|
"loss": 0.7315, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6937842520287007e-05, |
|
"loss": 0.6529, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.693004472410041e-05, |
|
"loss": 0.7812, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6922238811920714e-05, |
|
"loss": 0.7516, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6914424792889677e-05, |
|
"loss": 0.8681, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6906602676158542e-05, |
|
"loss": 0.7108, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.689877247088803e-05, |
|
"loss": 0.8584, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6890934186248343e-05, |
|
"loss": 0.7973, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6883087831419144e-05, |
|
"loss": 0.7312, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6875233415589544e-05, |
|
"loss": 0.7586, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.68673709479581e-05, |
|
"loss": 0.6344, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6859500437732787e-05, |
|
"loss": 0.7058, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6851621894131015e-05, |
|
"loss": 0.9206, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6843735326379584e-05, |
|
"loss": 0.7999, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6835840743714713e-05, |
|
"loss": 0.7462, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6827938155381986e-05, |
|
"loss": 0.7467, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6820027570636385e-05, |
|
"loss": 0.7297, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6812108998742226e-05, |
|
"loss": 0.6459, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6804182448973214e-05, |
|
"loss": 0.7415, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.679624793061237e-05, |
|
"loss": 0.5994, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6788305452952065e-05, |
|
"loss": 0.697, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.678035502529398e-05, |
|
"loss": 0.765, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.677239665694911e-05, |
|
"loss": 0.8578, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6764430357237752e-05, |
|
"loss": 0.7354, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6756456135489487e-05, |
|
"loss": 0.8357, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.674847400104318e-05, |
|
"loss": 0.7987, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.674048396324696e-05, |
|
"loss": 0.7137, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.673248603145821e-05, |
|
"loss": 0.7908, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6724480215043553e-05, |
|
"loss": 0.7826, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6716466523378865e-05, |
|
"loss": 0.772, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.670844496584922e-05, |
|
"loss": 0.8403, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6700415551848926e-05, |
|
"loss": 0.8107, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6692378290781474e-05, |
|
"loss": 0.7894, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6684333192059556e-05, |
|
"loss": 0.9713, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.667628026510504e-05, |
|
"loss": 0.6776, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.666821951934896e-05, |
|
"loss": 0.687, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6660150964231512e-05, |
|
"loss": 0.6554, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6652074609202035e-05, |
|
"loss": 0.6541, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6643990463718996e-05, |
|
"loss": 0.7934, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.663589853725e-05, |
|
"loss": 0.7852, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6627798839271742e-05, |
|
"loss": 0.8408, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6619691379270052e-05, |
|
"loss": 0.8435, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6611576166739814e-05, |
|
"loss": 0.7853, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6603453211185022e-05, |
|
"loss": 0.8569, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6595322522118715e-05, |
|
"loss": 0.7326, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6587184109063002e-05, |
|
"loss": 0.7659, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6579037981549036e-05, |
|
"loss": 0.7959, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6570884149117e-05, |
|
"loss": 0.702, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6562722621316102e-05, |
|
"loss": 0.7658, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.655455340770457e-05, |
|
"loss": 0.749, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6546376517849617e-05, |
|
"loss": 0.6493, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6538191961327466e-05, |
|
"loss": 0.7489, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.65299997477233e-05, |
|
"loss": 0.7172, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6521799886631286e-05, |
|
"loss": 0.6461, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6513592387654527e-05, |
|
"loss": 0.7912, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"eval_loss": 0.7900890111923218, |
|
"eval_runtime": 88.8611, |
|
"eval_samples_per_second": 22.507, |
|
"eval_steps_per_second": 11.254, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6505377260405094e-05, |
|
"loss": 0.6839, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6497154514503976e-05, |
|
"loss": 0.7584, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.648892415958109e-05, |
|
"loss": 0.7187, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.648068620527526e-05, |
|
"loss": 0.808, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.647244066123422e-05, |
|
"loss": 0.6535, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6464187537114578e-05, |
|
"loss": 0.7975, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6455926842581834e-05, |
|
"loss": 0.6695, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.644765858731034e-05, |
|
"loss": 0.6712, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6439382780983315e-05, |
|
"loss": 0.966, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.643109943329281e-05, |
|
"loss": 0.7732, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6422808553939723e-05, |
|
"loss": 0.7593, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6414510152633752e-05, |
|
"loss": 0.7174, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6406204239093426e-05, |
|
"loss": 0.851, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6397890823046055e-05, |
|
"loss": 0.7415, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.638956991422774e-05, |
|
"loss": 0.6796, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6381241522383366e-05, |
|
"loss": 0.7013, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6372905657266566e-05, |
|
"loss": 0.5842, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.636456232863974e-05, |
|
"loss": 0.7475, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6356211546274022e-05, |
|
"loss": 0.821, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.634785331994927e-05, |
|
"loss": 0.9147, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.633948765945407e-05, |
|
"loss": 0.8074, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.633111457458571e-05, |
|
"loss": 0.7101, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.632273407515017e-05, |
|
"loss": 0.8883, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6314346170962118e-05, |
|
"loss": 0.7414, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6305950871844893e-05, |
|
"loss": 0.874, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6297548187630492e-05, |
|
"loss": 0.8274, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.628913812815956e-05, |
|
"loss": 0.7398, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.628072070328139e-05, |
|
"loss": 0.792, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.627229592285388e-05, |
|
"loss": 0.6166, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6263863796743572e-05, |
|
"loss": 0.8314, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6255424334825577e-05, |
|
"loss": 0.6605, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6246977546983624e-05, |
|
"loss": 0.693, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.623852344311001e-05, |
|
"loss": 0.8222, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6230062033105597e-05, |
|
"loss": 0.8411, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.622159332687981e-05, |
|
"loss": 0.8027, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6213117334350625e-05, |
|
"loss": 0.5805, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6204634065444534e-05, |
|
"loss": 0.7377, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6196143530096563e-05, |
|
"loss": 0.8146, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.618764573825024e-05, |
|
"loss": 0.7166, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6179140699857607e-05, |
|
"loss": 0.749, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6170628424879167e-05, |
|
"loss": 0.7324, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6162108923283925e-05, |
|
"loss": 0.738, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.615358220504933e-05, |
|
"loss": 0.7005, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6145048280161294e-05, |
|
"loss": 0.6305, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.613650715861416e-05, |
|
"loss": 0.6826, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.612795885041071e-05, |
|
"loss": 0.7812, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.611940336556213e-05, |
|
"loss": 0.7025, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.611084071408802e-05, |
|
"loss": 0.7932, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6102270906016366e-05, |
|
"loss": 0.6892, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.609369395138355e-05, |
|
"loss": 0.6442, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.60851098602343e-05, |
|
"loss": 0.8101, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.607651864262173e-05, |
|
"loss": 0.9417, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6067920308607266e-05, |
|
"loss": 0.7793, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6059314868260703e-05, |
|
"loss": 0.875, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6050702331660132e-05, |
|
"loss": 0.8437, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6042082708891974e-05, |
|
"loss": 0.9148, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6033456010050935e-05, |
|
"loss": 0.6749, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.602482224524001e-05, |
|
"loss": 0.7153, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.601618142457048e-05, |
|
"loss": 0.6872, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6007533558161877e-05, |
|
"loss": 0.6991, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.599887865614199e-05, |
|
"loss": 0.8357, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5990216728646847e-05, |
|
"loss": 0.817, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5981547785820696e-05, |
|
"loss": 0.7864, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.597287183781602e-05, |
|
"loss": 0.7218, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"eval_loss": 0.7848817110061646, |
|
"eval_runtime": 88.8132, |
|
"eval_samples_per_second": 22.519, |
|
"eval_steps_per_second": 11.26, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5964188894793492e-05, |
|
"loss": 0.8142, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5955498966921975e-05, |
|
"loss": 0.721, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5946802064378512e-05, |
|
"loss": 0.7197, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5938098197348333e-05, |
|
"loss": 0.8155, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5929387376024798e-05, |
|
"loss": 0.7409, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.592066961060943e-05, |
|
"loss": 0.6935, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.591194491131187e-05, |
|
"loss": 0.6564, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.59032132883499e-05, |
|
"loss": 0.8281, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5894474751949386e-05, |
|
"loss": 0.6303, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.588572931234431e-05, |
|
"loss": 0.8682, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5876976979776727e-05, |
|
"loss": 0.7819, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.586821776449677e-05, |
|
"loss": 0.8028, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5859451676762634e-05, |
|
"loss": 0.6827, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5850678726840555e-05, |
|
"loss": 0.6692, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5841898925004813e-05, |
|
"loss": 0.7941, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5833112281537708e-05, |
|
"loss": 0.6954, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5824318806729557e-05, |
|
"loss": 0.7135, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.581551851087867e-05, |
|
"loss": 0.7176, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5806711404291356e-05, |
|
"loss": 0.8105, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5797897497281888e-05, |
|
"loss": 0.9515, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5789076800172515e-05, |
|
"loss": 0.9346, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5780249323293427e-05, |
|
"loss": 0.8533, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.577141507698277e-05, |
|
"loss": 0.749, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5762574071586592e-05, |
|
"loss": 0.7779, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5753726317458884e-05, |
|
"loss": 0.6955, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.574487182496152e-05, |
|
"loss": 0.8278, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5736010604464284e-05, |
|
"loss": 0.8578, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5727142666344823e-05, |
|
"loss": 0.7986, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5718268020988664e-05, |
|
"loss": 0.7456, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5709386678789172e-05, |
|
"loss": 0.6496, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5700498650147577e-05, |
|
"loss": 0.749, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5691603945472923e-05, |
|
"loss": 0.8465, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.568270257518208e-05, |
|
"loss": 0.6407, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.567379454969972e-05, |
|
"loss": 0.7428, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5664879879458316e-05, |
|
"loss": 0.5955, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5655958574898113e-05, |
|
"loss": 0.7153, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5647030646467136e-05, |
|
"loss": 0.7766, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.563809610462116e-05, |
|
"loss": 0.84, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5629154959823706e-05, |
|
"loss": 0.7166, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.562020722254603e-05, |
|
"loss": 0.8409, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5611252903267112e-05, |
|
"loss": 0.7234, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5602292012473634e-05, |
|
"loss": 0.4725, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.559332456065998e-05, |
|
"loss": 0.6734, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5584350558328204e-05, |
|
"loss": 0.6656, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5575370015988056e-05, |
|
"loss": 0.5797, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5566382944156924e-05, |
|
"loss": 0.5901, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5557389353359846e-05, |
|
"loss": 0.8194, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.554838925412951e-05, |
|
"loss": 0.6907, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5539382657006204e-05, |
|
"loss": 0.5917, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5530369572537844e-05, |
|
"loss": 0.615, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.552135001127993e-05, |
|
"loss": 0.6122, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5512323983795565e-05, |
|
"loss": 0.5004, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.55032915006554e-05, |
|
"loss": 0.6083, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5494252572437667e-05, |
|
"loss": 0.746, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.548520720972814e-05, |
|
"loss": 0.6606, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.547615542312012e-05, |
|
"loss": 0.5767, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5467097223214444e-05, |
|
"loss": 0.7176, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.545803262061946e-05, |
|
"loss": 0.7197, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5448961625950996e-05, |
|
"loss": 0.793, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.543988424983239e-05, |
|
"loss": 0.5471, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5430800502894433e-05, |
|
"loss": 0.6157, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.54217103957754e-05, |
|
"loss": 0.8016, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.541261393912098e-05, |
|
"loss": 0.5516, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5403511143584342e-05, |
|
"loss": 0.6016, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"eval_loss": 0.7883525490760803, |
|
"eval_runtime": 88.8444, |
|
"eval_samples_per_second": 22.511, |
|
"eval_steps_per_second": 11.256, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.539440201982604e-05, |
|
"loss": 0.6044, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5385286578514065e-05, |
|
"loss": 0.642, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.537616483032379e-05, |
|
"loss": 0.6362, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5367036785937984e-05, |
|
"loss": 0.6033, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.535790245604678e-05, |
|
"loss": 0.7156, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.534876185134769e-05, |
|
"loss": 0.7627, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5339614982545557e-05, |
|
"loss": 0.8591, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5330461860352566e-05, |
|
"loss": 0.6512, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5321302495488227e-05, |
|
"loss": 0.5996, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.531213689867936e-05, |
|
"loss": 0.6221, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5302965080660085e-05, |
|
"loss": 0.6377, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5293787052171802e-05, |
|
"loss": 0.6357, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.528460282396319e-05, |
|
"loss": 0.6439, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.527541240679019e-05, |
|
"loss": 0.6825, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5266215811415985e-05, |
|
"loss": 0.6813, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5257013048611003e-05, |
|
"loss": 0.6871, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5247804129152881e-05, |
|
"loss": 0.7229, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5238589063826477e-05, |
|
"loss": 0.92, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5229367863423841e-05, |
|
"loss": 0.6344, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5220140538744215e-05, |
|
"loss": 0.7507, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5210907100594008e-05, |
|
"loss": 0.7233, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5201667559786784e-05, |
|
"loss": 0.7648, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5192421927143266e-05, |
|
"loss": 0.7114, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5183170213491302e-05, |
|
"loss": 0.6684, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5173912429665865e-05, |
|
"loss": 0.9303, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5164648586509038e-05, |
|
"loss": 0.6457, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5155378694869996e-05, |
|
"loss": 0.7582, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5146102765605001e-05, |
|
"loss": 0.6437, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5136820809577386e-05, |
|
"loss": 0.5888, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.512753283765754e-05, |
|
"loss": 0.5791, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5118238860722903e-05, |
|
"loss": 0.6762, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5108938889657938e-05, |
|
"loss": 0.6464, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5099632935354136e-05, |
|
"loss": 0.661, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5090321008709992e-05, |
|
"loss": 0.6115, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5081003120630993e-05, |
|
"loss": 0.5149, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5071679282029616e-05, |
|
"loss": 0.6131, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5062349503825298e-05, |
|
"loss": 0.6786, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5053013796944433e-05, |
|
"loss": 0.6981, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5043672172320367e-05, |
|
"loss": 0.6555, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5034324640893365e-05, |
|
"loss": 0.675, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5024971213610618e-05, |
|
"loss": 0.5791, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5015611901426214e-05, |
|
"loss": 0.6364, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5006246715301145e-05, |
|
"loss": 0.7467, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4996875666203263e-05, |
|
"loss": 0.7618, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4987498765107309e-05, |
|
"loss": 0.5805, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4978116022994861e-05, |
|
"loss": 0.5617, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4968727450854349e-05, |
|
"loss": 0.6871, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4959333059681016e-05, |
|
"loss": 0.4785, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4949932860476932e-05, |
|
"loss": 0.6101, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4940526864250969e-05, |
|
"loss": 0.5874, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.493111508201878e-05, |
|
"loss": 0.6337, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4921697524802805e-05, |
|
"loss": 0.6763, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4912274203632235e-05, |
|
"loss": 0.7148, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4902845129543017e-05, |
|
"loss": 0.5686, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4893410313577843e-05, |
|
"loss": 0.6511, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4883969766786113e-05, |
|
"loss": 0.5792, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4874523500223958e-05, |
|
"loss": 0.6251, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4865071524954192e-05, |
|
"loss": 0.6641, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4855613852046316e-05, |
|
"loss": 0.6197, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4846150492576517e-05, |
|
"loss": 0.5618, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4836681457627625e-05, |
|
"loss": 0.6289, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.482720675828913e-05, |
|
"loss": 0.6836, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4817726405657145e-05, |
|
"loss": 0.6174, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4808240410834413e-05, |
|
"loss": 0.6353, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"eval_loss": 0.7834817171096802, |
|
"eval_runtime": 88.7572, |
|
"eval_samples_per_second": 22.533, |
|
"eval_steps_per_second": 11.267, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4798748784930276e-05, |
|
"loss": 0.546, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.478925153906068e-05, |
|
"loss": 0.6468, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4779748684348147e-05, |
|
"loss": 0.5424, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4770240231921769e-05, |
|
"loss": 0.6575, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4760726192917187e-05, |
|
"loss": 0.5227, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.47512065784766e-05, |
|
"loss": 0.6903, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4741681399748724e-05, |
|
"loss": 0.6109, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4732150667888792e-05, |
|
"loss": 0.6478, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.472261439405855e-05, |
|
"loss": 0.6724, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4713072589426221e-05, |
|
"loss": 0.6166, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4703525265166516e-05, |
|
"loss": 0.8078, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4693972432460607e-05, |
|
"loss": 0.6037, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4684414102496112e-05, |
|
"loss": 0.722, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4674850286467099e-05, |
|
"loss": 0.694, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4665280995574044e-05, |
|
"loss": 0.6105, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4655706241023853e-05, |
|
"loss": 0.6524, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4646126034029814e-05, |
|
"loss": 0.7021, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4636540385811617e-05, |
|
"loss": 0.5738, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.462694930759531e-05, |
|
"loss": 0.5898, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4617352810613307e-05, |
|
"loss": 0.6985, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4607750906104365e-05, |
|
"loss": 0.6455, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4598143605313582e-05, |
|
"loss": 0.6854, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4588530919492364e-05, |
|
"loss": 0.5569, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4578912859898436e-05, |
|
"loss": 0.5843, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4569289437795799e-05, |
|
"loss": 0.6106, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4559660664454753e-05, |
|
"loss": 0.6425, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4550026551151858e-05, |
|
"loss": 0.5984, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.454038710916992e-05, |
|
"loss": 0.5803, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4530742349798e-05, |
|
"loss": 0.6532, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4521092284331371e-05, |
|
"loss": 0.5944, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4511436924071536e-05, |
|
"loss": 0.5596, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4501776280326182e-05, |
|
"loss": 0.6126, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.44921103644092e-05, |
|
"loss": 0.5562, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4482439187640646e-05, |
|
"loss": 0.6613, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4472762761346738e-05, |
|
"loss": 0.5612, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4463081096859837e-05, |
|
"loss": 0.5644, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4453394205518457e-05, |
|
"loss": 0.6157, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4443702098667208e-05, |
|
"loss": 1.298, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4434004787656832e-05, |
|
"loss": 0.6685, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4424302283844145e-05, |
|
"loss": 0.6245, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.441459459859206e-05, |
|
"loss": 0.484, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4404881743269547e-05, |
|
"loss": 0.5202, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4395163729251645e-05, |
|
"loss": 0.6549, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4385440567919417e-05, |
|
"loss": 0.6547, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4375712270659966e-05, |
|
"loss": 0.6661, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4365978848866403e-05, |
|
"loss": 0.549, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4356240313937853e-05, |
|
"loss": 0.5194, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.434649667727941e-05, |
|
"loss": 0.7109, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4336747950302159e-05, |
|
"loss": 0.7517, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4326994144423138e-05, |
|
"loss": 0.6625, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4317235271065339e-05, |
|
"loss": 0.6192, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4307471341657679e-05, |
|
"loss": 0.6246, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4297702367635009e-05, |
|
"loss": 0.5272, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4287928360438077e-05, |
|
"loss": 0.6396, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4278149331513534e-05, |
|
"loss": 0.6444, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4268365292313899e-05, |
|
"loss": 0.563, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4258576254297579e-05, |
|
"loss": 0.6338, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4248782228928815e-05, |
|
"loss": 0.7935, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4238983227677705e-05, |
|
"loss": 0.6255, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4229179262020162e-05, |
|
"loss": 0.521, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4219370343437924e-05, |
|
"loss": 0.6963, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4209556483418517e-05, |
|
"loss": 0.696, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4199737693455262e-05, |
|
"loss": 0.6494, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4189913985047254e-05, |
|
"loss": 0.67, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"eval_loss": 0.7798218131065369, |
|
"eval_runtime": 88.7999, |
|
"eval_samples_per_second": 22.523, |
|
"eval_steps_per_second": 11.261, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4180085369699352e-05, |
|
"loss": 0.724, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4170251858922144e-05, |
|
"loss": 0.6832, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4160413464231975e-05, |
|
"loss": 0.5867, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4150570197150889e-05, |
|
"loss": 0.6907, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4140722069206651e-05, |
|
"loss": 0.6492, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4130869091932715e-05, |
|
"loss": 0.4803, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4121011276868203e-05, |
|
"loss": 0.678, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4111148635557923e-05, |
|
"loss": 0.6601, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4101281179552316e-05, |
|
"loss": 0.6202, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4091408920407475e-05, |
|
"loss": 0.6806, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4081531869685107e-05, |
|
"loss": 0.5971, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4071650038952542e-05, |
|
"loss": 0.6096, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4061763439782698e-05, |
|
"loss": 0.7169, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4051872083754088e-05, |
|
"loss": 0.5959, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4041975982450782e-05, |
|
"loss": 0.5372, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4032075147462423e-05, |
|
"loss": 0.5357, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4022169590384183e-05, |
|
"loss": 0.7213, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4012259322816775e-05, |
|
"loss": 0.7302, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.4002344356366422e-05, |
|
"loss": 0.7545, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3992424702644857e-05, |
|
"loss": 0.5941, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3982500373269297e-05, |
|
"loss": 0.6402, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3972571379862431e-05, |
|
"loss": 0.6084, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3962637734052418e-05, |
|
"loss": 0.5551, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3952699447472866e-05, |
|
"loss": 0.6604, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.394275653176281e-05, |
|
"loss": 0.5899, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3932808998566713e-05, |
|
"loss": 0.7172, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3922856859534445e-05, |
|
"loss": 0.6294, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3912900126321268e-05, |
|
"loss": 0.6395, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.390293881058782e-05, |
|
"loss": 0.5647, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3892972924000118e-05, |
|
"loss": 0.6165, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3883002478229521e-05, |
|
"loss": 0.5617, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3873027484952735e-05, |
|
"loss": 0.5541, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3863047955851781e-05, |
|
"loss": 0.6338, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3853063902614006e-05, |
|
"loss": 0.7589, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3843075336932042e-05, |
|
"loss": 0.6101, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.383308227050382e-05, |
|
"loss": 0.6351, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3823084715032528e-05, |
|
"loss": 0.7485, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.381308268222662e-05, |
|
"loss": 0.676, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3803076183799788e-05, |
|
"loss": 0.5848, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3793065231470961e-05, |
|
"loss": 0.6669, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3783049836964276e-05, |
|
"loss": 0.581, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3773030012009076e-05, |
|
"loss": 0.6065, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3763005768339897e-05, |
|
"loss": 0.5967, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.375297711769644e-05, |
|
"loss": 0.6504, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3742944071823578e-05, |
|
"loss": 0.6016, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3732906642471324e-05, |
|
"loss": 0.6782, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3722864841394827e-05, |
|
"loss": 0.4762, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3712818680354356e-05, |
|
"loss": 0.5421, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3702768171115283e-05, |
|
"loss": 0.7471, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3692713325448079e-05, |
|
"loss": 0.5636, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.368265415512829e-05, |
|
"loss": 0.5299, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3672590671936524e-05, |
|
"loss": 0.7712, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3662522887658443e-05, |
|
"loss": 0.8533, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3652450814084741e-05, |
|
"loss": 0.6254, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.364237446301115e-05, |
|
"loss": 0.7531, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3632293846238392e-05, |
|
"loss": 0.8079, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.36222089755722e-05, |
|
"loss": 0.6848, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.361211986282328e-05, |
|
"loss": 0.5443, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3602026519807305e-05, |
|
"loss": 0.7703, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3591928958344914e-05, |
|
"loss": 0.6422, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3581827190261674e-05, |
|
"loss": 0.5909, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3571721227388083e-05, |
|
"loss": 0.6457, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3561611081559556e-05, |
|
"loss": 0.5696, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3551496764616392e-05, |
|
"loss": 0.6306, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_loss": 0.7770239114761353, |
|
"eval_runtime": 88.8543, |
|
"eval_samples_per_second": 22.509, |
|
"eval_steps_per_second": 11.254, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3541378288403797e-05, |
|
"loss": 0.6107, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.353125566477183e-05, |
|
"loss": 0.5641, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3521128905575416e-05, |
|
"loss": 0.6169, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.351099802267432e-05, |
|
"loss": 0.6843, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3500863027933138e-05, |
|
"loss": 0.6537, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3490723933221274e-05, |
|
"loss": 0.4883, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3480580750412952e-05, |
|
"loss": 0.6224, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3470433491387165e-05, |
|
"loss": 0.6812, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3460282168027689e-05, |
|
"loss": 0.5927, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3450126792223054e-05, |
|
"loss": 0.7314, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3439967375866545e-05, |
|
"loss": 0.7307, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.342980393085617e-05, |
|
"loss": 0.702, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3419636469094659e-05, |
|
"loss": 0.5772, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3409465002489449e-05, |
|
"loss": 0.6524, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.339928954295266e-05, |
|
"loss": 0.5867, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3389110102401092e-05, |
|
"loss": 0.4997, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3378926692756214e-05, |
|
"loss": 0.5661, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.336873932594413e-05, |
|
"loss": 0.7669, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3358548013895591e-05, |
|
"loss": 0.5901, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3348352768545957e-05, |
|
"loss": 0.5802, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3338153601835203e-05, |
|
"loss": 0.7795, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3327950525707895e-05, |
|
"loss": 0.4799, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3317743552113175e-05, |
|
"loss": 0.6811, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.330753269300475e-05, |
|
"loss": 0.6215, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3297317960340874e-05, |
|
"loss": 0.501, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3287099366084349e-05, |
|
"loss": 0.731, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3276876922202484e-05, |
|
"loss": 0.5944, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3266650640667113e-05, |
|
"loss": 0.5098, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3256420533454549e-05, |
|
"loss": 0.6702, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3246186612545593e-05, |
|
"loss": 0.6353, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3235948889925511e-05, |
|
"loss": 0.5516, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3225707377584024e-05, |
|
"loss": 0.521, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3215462087515285e-05, |
|
"loss": 0.6003, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3205213031717881e-05, |
|
"loss": 0.4752, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3194960222194796e-05, |
|
"loss": 0.534, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3184703670953425e-05, |
|
"loss": 0.5437, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3174443390005529e-05, |
|
"loss": 0.5653, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3164179391367253e-05, |
|
"loss": 0.5956, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3153911687059084e-05, |
|
"loss": 0.7131, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3143640289105855e-05, |
|
"loss": 0.7432, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.313336520953672e-05, |
|
"loss": 0.7356, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3123086460385148e-05, |
|
"loss": 0.57, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3112804053688907e-05, |
|
"loss": 0.5759, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3102518001490049e-05, |
|
"loss": 0.6925, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3092228315834889e-05, |
|
"loss": 0.6121, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3081935008774003e-05, |
|
"loss": 0.6483, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.30716380923622e-05, |
|
"loss": 0.6398, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3061337578658533e-05, |
|
"loss": 0.627, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3051033479726249e-05, |
|
"loss": 0.577, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3040725807632805e-05, |
|
"loss": 0.7611, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.3030414574449842e-05, |
|
"loss": 0.5717, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.3020099792253163e-05, |
|
"loss": 0.6522, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.3009781473122738e-05, |
|
"loss": 0.5659, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2999459629142672e-05, |
|
"loss": 0.7753, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2989134272401205e-05, |
|
"loss": 0.7436, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2978805414990683e-05, |
|
"loss": 0.5635, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2968473069007556e-05, |
|
"loss": 0.7438, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.295813724655236e-05, |
|
"loss": 0.6258, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2947797959729702e-05, |
|
"loss": 0.6669, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2937455220648244e-05, |
|
"loss": 0.5492, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2927109041420694e-05, |
|
"loss": 0.7355, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2916759434163784e-05, |
|
"loss": 0.6018, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2906406410998269e-05, |
|
"loss": 0.6431, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2896049984048897e-05, |
|
"loss": 0.6572, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"eval_loss": 0.7731580138206482, |
|
"eval_runtime": 88.8257, |
|
"eval_samples_per_second": 22.516, |
|
"eval_steps_per_second": 11.258, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2885690165444407e-05, |
|
"loss": 0.6016, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2875326967317502e-05, |
|
"loss": 0.5719, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2864960401804858e-05, |
|
"loss": 0.5386, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2854590481047078e-05, |
|
"loss": 0.6155, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2844217217188705e-05, |
|
"loss": 0.5936, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2833840622378197e-05, |
|
"loss": 0.5908, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2823460708767908e-05, |
|
"loss": 0.8534, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2813077488514077e-05, |
|
"loss": 0.4497, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2802690973776823e-05, |
|
"loss": 0.5683, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2792301176720118e-05, |
|
"loss": 0.6186, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2781908109511788e-05, |
|
"loss": 0.5901, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2771511784323467e-05, |
|
"loss": 0.7563, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2761112213330628e-05, |
|
"loss": 0.5137, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.275070940871253e-05, |
|
"loss": 0.6203, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2740303382652229e-05, |
|
"loss": 0.5984, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2729894147336543e-05, |
|
"loss": 0.5278, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.271948171495606e-05, |
|
"loss": 0.6971, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2709066097705103e-05, |
|
"loss": 0.5036, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2698647307781733e-05, |
|
"loss": 0.5206, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2688225357387715e-05, |
|
"loss": 0.767, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.267780025872853e-05, |
|
"loss": 0.7009, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2667372024013335e-05, |
|
"loss": 0.6248, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2656940665454967e-05, |
|
"loss": 0.5873, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.264650619526991e-05, |
|
"loss": 0.5126, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2636068625678307e-05, |
|
"loss": 0.7721, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.262562796890392e-05, |
|
"loss": 0.6494, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2615184237174127e-05, |
|
"loss": 0.5494, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.260473744271992e-05, |
|
"loss": 0.6009, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2594287597775859e-05, |
|
"loss": 0.5642, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.258383471458009e-05, |
|
"loss": 0.4378, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2573378805374314e-05, |
|
"loss": 0.8139, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2562919882403774e-05, |
|
"loss": 0.7645, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2552457957917239e-05, |
|
"loss": 0.59, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2541993044167002e-05, |
|
"loss": 0.4468, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.253152515340885e-05, |
|
"loss": 0.5286, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2521054297902062e-05, |
|
"loss": 0.6379, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2510580489909383e-05, |
|
"loss": 0.7154, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.250010374169702e-05, |
|
"loss": 0.5415, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2489624065534618e-05, |
|
"loss": 0.6146, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.247914147369526e-05, |
|
"loss": 0.6026, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2468655978455437e-05, |
|
"loss": 0.6495, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2458167592095043e-05, |
|
"loss": 0.5561, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.244767632689735e-05, |
|
"loss": 0.5976, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2437182195149018e-05, |
|
"loss": 0.5668, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2426685209140048e-05, |
|
"loss": 0.5744, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2416185381163795e-05, |
|
"loss": 0.57, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2405682723516932e-05, |
|
"loss": 0.7311, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2395177248499456e-05, |
|
"loss": 0.4972, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2384668968414655e-05, |
|
"loss": 0.5834, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2374157895569107e-05, |
|
"loss": 0.4149, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.236364404227266e-05, |
|
"loss": 0.5383, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2353127420838422e-05, |
|
"loss": 0.5928, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2342608043582731e-05, |
|
"loss": 0.6482, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2332085922825165e-05, |
|
"loss": 0.5729, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.232156107088851e-05, |
|
"loss": 0.8532, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.231103350009875e-05, |
|
"loss": 0.6038, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2300503222785054e-05, |
|
"loss": 0.5516, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2289970251279761e-05, |
|
"loss": 0.6649, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.227943459791836e-05, |
|
"loss": 0.6155, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2268896275039495e-05, |
|
"loss": 0.6069, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2258355294984919e-05, |
|
"loss": 0.5592, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.224781167009951e-05, |
|
"loss": 0.6174, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2237265412731237e-05, |
|
"loss": 0.6242, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.222671653523115e-05, |
|
"loss": 0.4869, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"eval_loss": 0.7695678472518921, |
|
"eval_runtime": 88.7912, |
|
"eval_samples_per_second": 22.525, |
|
"eval_steps_per_second": 11.262, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2216165049953374e-05, |
|
"loss": 0.7009, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2205610969255085e-05, |
|
"loss": 0.5575, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2195054305496499e-05, |
|
"loss": 0.5356, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2184495071040851e-05, |
|
"loss": 0.6166, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2173933278254402e-05, |
|
"loss": 0.7019, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.216336893950639e-05, |
|
"loss": 0.7516, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2152802067169049e-05, |
|
"loss": 0.6056, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2142232673617576e-05, |
|
"loss": 0.6766, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2131660771230115e-05, |
|
"loss": 0.5917, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2121086372387757e-05, |
|
"loss": 0.6151, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2110509489474514e-05, |
|
"loss": 0.7001, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2099930134877298e-05, |
|
"loss": 0.7416, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2089348320985935e-05, |
|
"loss": 0.6118, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.207876406019311e-05, |
|
"loss": 0.5516, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2068177364894388e-05, |
|
"loss": 0.6856, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2057588247488179e-05, |
|
"loss": 0.8338, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2046996720375732e-05, |
|
"loss": 0.6367, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2036402795961117e-05, |
|
"loss": 0.675, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2025806486651212e-05, |
|
"loss": 0.6259, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2015207804855686e-05, |
|
"loss": 0.7106, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.200460676298699e-05, |
|
"loss": 0.6907, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1994003373460337e-05, |
|
"loss": 0.63, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1983397648693687e-05, |
|
"loss": 0.7309, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1972789601107742e-05, |
|
"loss": 0.5566, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1962179243125918e-05, |
|
"loss": 0.7016, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1951566587174335e-05, |
|
"loss": 0.6313, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1940951645681814e-05, |
|
"loss": 0.5732, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.193033443107984e-05, |
|
"loss": 0.626, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1919714955802573e-05, |
|
"loss": 0.5893, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1909093232286809e-05, |
|
"loss": 0.5866, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1898469272971985e-05, |
|
"loss": 0.6023, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1887843090300149e-05, |
|
"loss": 0.5154, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1877214696715963e-05, |
|
"loss": 0.6451, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1866584104666672e-05, |
|
"loss": 0.5227, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1855951326602095e-05, |
|
"loss": 0.6644, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.184531637497461e-05, |
|
"loss": 0.6916, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1834679262239149e-05, |
|
"loss": 0.6205, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.182404000085316e-05, |
|
"loss": 0.6818, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1813398603276625e-05, |
|
"loss": 0.6786, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1802755081972015e-05, |
|
"loss": 0.6402, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1792109449404292e-05, |
|
"loss": 0.5516, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1781461718040892e-05, |
|
"loss": 0.6143, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1770811900351704e-05, |
|
"loss": 0.5959, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1760160008809073e-05, |
|
"loss": 0.5255, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1749506055887754e-05, |
|
"loss": 0.5965, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1738850054064933e-05, |
|
"loss": 0.565, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1728192015820179e-05, |
|
"loss": 0.5639, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1717531953635468e-05, |
|
"loss": 0.5898, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1706869879995126e-05, |
|
"loss": 0.7147, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1696205807385846e-05, |
|
"loss": 0.7228, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1685539748296653e-05, |
|
"loss": 0.7181, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1674871715218908e-05, |
|
"loss": 0.5916, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1664201720646277e-05, |
|
"loss": 0.5727, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1653529777074733e-05, |
|
"loss": 0.5375, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1642855897002516e-05, |
|
"loss": 0.5874, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1632180092930147e-05, |
|
"loss": 0.6945, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1621502377360392e-05, |
|
"loss": 0.6145, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1610822762798264e-05, |
|
"loss": 0.7252, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1600141261750992e-05, |
|
"loss": 0.6266, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1589457886728017e-05, |
|
"loss": 0.5836, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1578772650240975e-05, |
|
"loss": 0.5595, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1568085564803684e-05, |
|
"loss": 0.569, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1557396642932123e-05, |
|
"loss": 0.5619, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.154670589714443e-05, |
|
"loss": 0.6385, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"eval_loss": 0.7667235136032104, |
|
"eval_runtime": 88.7544, |
|
"eval_samples_per_second": 22.534, |
|
"eval_steps_per_second": 11.267, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1536013339960868e-05, |
|
"loss": 0.5646, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1525318983903826e-05, |
|
"loss": 0.4848, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.15146228414978e-05, |
|
"loss": 0.6505, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1503924925269385e-05, |
|
"loss": 0.7031, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1493225247747237e-05, |
|
"loss": 0.7346, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1482523821462097e-05, |
|
"loss": 0.7447, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1471820658946729e-05, |
|
"loss": 0.5145, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1461115772735949e-05, |
|
"loss": 0.58, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1450409175366584e-05, |
|
"loss": 0.5342, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.143970087937747e-05, |
|
"loss": 0.7084, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1428990897309424e-05, |
|
"loss": 0.6313, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1418279241705244e-05, |
|
"loss": 0.5863, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.140756592510969e-05, |
|
"loss": 0.6794, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1396850960069456e-05, |
|
"loss": 0.6021, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1386134359133183e-05, |
|
"loss": 0.5103, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1375416134851413e-05, |
|
"loss": 0.5243, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1364696299776599e-05, |
|
"loss": 0.6185, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1353974866463069e-05, |
|
"loss": 0.6217, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1343251847467041e-05, |
|
"loss": 0.6739, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1332527255346572e-05, |
|
"loss": 0.6818, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1321801102661576e-05, |
|
"loss": 0.6843, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1311073401973775e-05, |
|
"loss": 0.5697, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.130034416584673e-05, |
|
"loss": 0.733, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1289613406845776e-05, |
|
"loss": 0.5466, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1278881137538052e-05, |
|
"loss": 0.633, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1268147370492447e-05, |
|
"loss": 0.6818, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1257412118279616e-05, |
|
"loss": 0.5666, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1246675393471946e-05, |
|
"loss": 0.6373, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1235937208643564e-05, |
|
"loss": 0.7219, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1225197576370282e-05, |
|
"loss": 0.7533, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.121445650922963e-05, |
|
"loss": 0.6122, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1203714019800807e-05, |
|
"loss": 0.5603, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1192970120664679e-05, |
|
"loss": 0.5279, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1182224824403763e-05, |
|
"loss": 0.7464, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1171478143602215e-05, |
|
"loss": 0.6312, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.116073009084581e-05, |
|
"loss": 0.659, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.114998067872193e-05, |
|
"loss": 0.6779, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1139229919819545e-05, |
|
"loss": 0.5554, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1128477826729214e-05, |
|
"loss": 0.5, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1117724412043046e-05, |
|
"loss": 0.5753, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1106969688354705e-05, |
|
"loss": 0.6264, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1096213668259386e-05, |
|
"loss": 0.6996, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1085456364353801e-05, |
|
"loss": 0.5337, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1074697789236167e-05, |
|
"loss": 0.6704, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1063937955506188e-05, |
|
"loss": 0.6089, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1053176875765042e-05, |
|
"loss": 0.6685, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1042414562615375e-05, |
|
"loss": 0.562, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1031651028661261e-05, |
|
"loss": 0.5573, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.1020886286508218e-05, |
|
"loss": 0.6688, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.1010120348763171e-05, |
|
"loss": 0.5279, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.099935322803445e-05, |
|
"loss": 0.6145, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.098858493693177e-05, |
|
"loss": 0.5528, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.097781548806621e-05, |
|
"loss": 0.5533, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0967044894050214e-05, |
|
"loss": 0.7398, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.095627316749756e-05, |
|
"loss": 0.5449, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.094550032102336e-05, |
|
"loss": 0.5651, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0934726367244026e-05, |
|
"loss": 0.7718, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0923951318777282e-05, |
|
"loss": 0.5764, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0913175188242118e-05, |
|
"loss": 0.568, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0902397988258804e-05, |
|
"loss": 0.5253, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0891619731448855e-05, |
|
"loss": 0.5567, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0880840430435029e-05, |
|
"loss": 0.5784, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.08700600978413e-05, |
|
"loss": 0.6781, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0859278746292858e-05, |
|
"loss": 0.5973, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"eval_loss": 0.7631298303604126, |
|
"eval_runtime": 88.8206, |
|
"eval_samples_per_second": 22.517, |
|
"eval_steps_per_second": 11.259, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0848496388416079e-05, |
|
"loss": 0.6337, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0837713036838524e-05, |
|
"loss": 0.6297, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0826928704188914e-05, |
|
"loss": 0.6019, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.081614340309712e-05, |
|
"loss": 0.5909, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0805357146194143e-05, |
|
"loss": 0.7228, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0794569946112114e-05, |
|
"loss": 0.6465, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0783781815484258e-05, |
|
"loss": 0.7455, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0772992766944894e-05, |
|
"loss": 0.6007, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0762202813129419e-05, |
|
"loss": 0.5761, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0751411966674281e-05, |
|
"loss": 0.6086, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0740620240216982e-05, |
|
"loss": 0.5995, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0729827646396054e-05, |
|
"loss": 0.6325, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.071903419785104e-05, |
|
"loss": 0.7608, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0708239907222487e-05, |
|
"loss": 0.5423, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0697444787151927e-05, |
|
"loss": 0.7455, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0686648850281869e-05, |
|
"loss": 0.6594, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0675852109255767e-05, |
|
"loss": 0.7232, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0665054576718028e-05, |
|
"loss": 0.5721, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0654256265313982e-05, |
|
"loss": 0.6063, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.064345718768987e-05, |
|
"loss": 0.5577, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0632657356492828e-05, |
|
"loss": 0.5984, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0621856784370883e-05, |
|
"loss": 0.597, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0611055483972916e-05, |
|
"loss": 0.7085, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.060025346794868e-05, |
|
"loss": 0.5722, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0589450748948749e-05, |
|
"loss": 0.6626, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0578647339624526e-05, |
|
"loss": 0.5803, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0567843252628225e-05, |
|
"loss": 0.5419, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0557038500612853e-05, |
|
"loss": 0.5597, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0546233096232192e-05, |
|
"loss": 0.5009, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.053542705214079e-05, |
|
"loss": 0.6316, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0524620380993946e-05, |
|
"loss": 0.6555, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0513813095447694e-05, |
|
"loss": 0.64, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0503005208158783e-05, |
|
"loss": 0.6681, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.049219673178467e-05, |
|
"loss": 0.5214, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0481387678983503e-05, |
|
"loss": 0.6576, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0470578062414098e-05, |
|
"loss": 0.6077, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0459767894735945e-05, |
|
"loss": 0.6132, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0448957188609162e-05, |
|
"loss": 0.5449, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0438145956694514e-05, |
|
"loss": 0.6, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.042733421165337e-05, |
|
"loss": 0.5882, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0416521966147706e-05, |
|
"loss": 0.6499, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.040570923284008e-05, |
|
"loss": 0.4968, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0394896024393626e-05, |
|
"loss": 0.5561, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0384082353472033e-05, |
|
"loss": 0.6406, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0373268232739531e-05, |
|
"loss": 0.6541, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.036245367486087e-05, |
|
"loss": 0.6876, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0351638692501327e-05, |
|
"loss": 0.5352, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0340823298326663e-05, |
|
"loss": 0.5774, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0330007505003127e-05, |
|
"loss": 0.5215, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0319191325197436e-05, |
|
"loss": 0.5971, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0308374771576755e-05, |
|
"loss": 0.6049, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0297557856808689e-05, |
|
"loss": 0.6297, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.028674059356127e-05, |
|
"loss": 0.6259, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0275922994502936e-05, |
|
"loss": 0.6841, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0265105072302512e-05, |
|
"loss": 0.6333, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0254286839629211e-05, |
|
"loss": 0.5563, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0243468309152603e-05, |
|
"loss": 0.5459, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0232649493542605e-05, |
|
"loss": 0.5272, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0221830405469481e-05, |
|
"loss": 0.5555, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.02110110576038e-05, |
|
"loss": 0.6171, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0200191462616437e-05, |
|
"loss": 0.5179, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0189371633178567e-05, |
|
"loss": 0.6832, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0178551581961622e-05, |
|
"loss": 0.565, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0167731321637315e-05, |
|
"loss": 0.6059, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"eval_loss": 0.7608674168586731, |
|
"eval_runtime": 88.7412, |
|
"eval_samples_per_second": 22.537, |
|
"eval_steps_per_second": 11.269, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0156910864877587e-05, |
|
"loss": 0.573, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0146090224354617e-05, |
|
"loss": 0.4602, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0135269412740791e-05, |
|
"loss": 0.581, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.012444844270871e-05, |
|
"loss": 0.6084, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0113627326931148e-05, |
|
"loss": 0.4998, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0102806078081053e-05, |
|
"loss": 0.6679, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0091984708831528e-05, |
|
"loss": 0.6294, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0081163231855825e-05, |
|
"loss": 0.5248, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0070341659827305e-05, |
|
"loss": 0.6199, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0059520005419463e-05, |
|
"loss": 0.5435, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0048698281305871e-05, |
|
"loss": 0.7399, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.003787650016019e-05, |
|
"loss": 0.7372, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0027054674656152e-05, |
|
"loss": 0.7732, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0016232817467535e-05, |
|
"loss": 0.5891, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.0005410941268153e-05, |
|
"loss": 0.5504, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.994589058731849e-06, |
|
"loss": 0.5565, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.983767182532468e-06, |
|
"loss": 0.7935, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.972945325343851e-06, |
|
"loss": 0.5711, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.962123499839813e-06, |
|
"loss": 0.5962, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.951301718694134e-06, |
|
"loss": 0.634, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.94047999458054e-06, |
|
"loss": 0.5461, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.929658340172698e-06, |
|
"loss": 0.686, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.918836768144182e-06, |
|
"loss": 0.6784, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.908015291168472e-06, |
|
"loss": 0.8519, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.89719392191895e-06, |
|
"loss": 0.8432, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.886372673068856e-06, |
|
"loss": 0.6555, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.875551557291296e-06, |
|
"loss": 0.6079, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.864730587259209e-06, |
|
"loss": 0.5165, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.853909775645387e-06, |
|
"loss": 0.7481, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.843089135122415e-06, |
|
"loss": 0.5803, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.832268678362687e-06, |
|
"loss": 0.7437, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.821448418038378e-06, |
|
"loss": 0.8093, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.810628366821437e-06, |
|
"loss": 0.5449, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.799808537383566e-06, |
|
"loss": 0.5401, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.788988942396204e-06, |
|
"loss": 0.5308, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.778169594530522e-06, |
|
"loss": 0.775, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.767350506457397e-06, |
|
"loss": 0.5956, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.756531690847402e-06, |
|
"loss": 0.6567, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.74571316037079e-06, |
|
"loss": 0.5888, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.73489492769749e-06, |
|
"loss": 0.6409, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.724077005497067e-06, |
|
"loss": 0.6608, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.713259406438734e-06, |
|
"loss": 0.7137, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.702442143191311e-06, |
|
"loss": 0.8226, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.691625228423248e-06, |
|
"loss": 0.7155, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.680808674802568e-06, |
|
"loss": 0.5869, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.669992494996876e-06, |
|
"loss": 0.5947, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.659176701673339e-06, |
|
"loss": 0.7556, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.648361307498675e-06, |
|
"loss": 0.5863, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.637546325139133e-06, |
|
"loss": 0.533, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.626731767260474e-06, |
|
"loss": 0.5881, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.615917646527969e-06, |
|
"loss": 0.5451, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.605103975606376e-06, |
|
"loss": 0.5595, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.594290767159924e-06, |
|
"loss": 0.5347, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.583478033852299e-06, |
|
"loss": 0.5816, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.572665788346632e-06, |
|
"loss": 0.6841, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.561854043305488e-06, |
|
"loss": 0.5027, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.551042811390841e-06, |
|
"loss": 0.5617, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.54023210526406e-06, |
|
"loss": 0.6192, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.529421937585902e-06, |
|
"loss": 0.747, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.5186123210165e-06, |
|
"loss": 0.6287, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.507803268215331e-06, |
|
"loss": 0.5114, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.49699479184122e-06, |
|
"loss": 0.566, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.486186904552308e-06, |
|
"loss": 0.493, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.475379619006055e-06, |
|
"loss": 0.6974, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"eval_loss": 0.7587617039680481, |
|
"eval_runtime": 88.8538, |
|
"eval_samples_per_second": 22.509, |
|
"eval_steps_per_second": 11.254, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.464572947859213e-06, |
|
"loss": 0.6029, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.453766903767813e-06, |
|
"loss": 0.5481, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.44296149938715e-06, |
|
"loss": 0.6451, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.432156747371778e-06, |
|
"loss": 0.6104, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.421352660375478e-06, |
|
"loss": 0.4375, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.410549251051256e-06, |
|
"loss": 0.6665, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.39974653205132e-06, |
|
"loss": 0.5493, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.388944516027086e-06, |
|
"loss": 0.5839, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.378143215629123e-06, |
|
"loss": 0.6281, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.367342643507172e-06, |
|
"loss": 0.5644, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.356542812310132e-06, |
|
"loss": 0.5767, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.345743734686021e-06, |
|
"loss": 0.5628, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.334945423281973e-06, |
|
"loss": 0.6156, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.324147890744235e-06, |
|
"loss": 0.5009, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.313351149718134e-06, |
|
"loss": 0.47, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.302555212848075e-06, |
|
"loss": 0.6736, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.291760092777518e-06, |
|
"loss": 0.6041, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.280965802148963e-06, |
|
"loss": 0.6009, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.27017235360395e-06, |
|
"loss": 0.5804, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.259379759783022e-06, |
|
"loss": 0.6353, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.248588033325724e-06, |
|
"loss": 0.6637, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.237797186870583e-06, |
|
"loss": 0.7938, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.227007233055107e-06, |
|
"loss": 0.6423, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.216218184515746e-06, |
|
"loss": 0.5514, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.20543005388789e-06, |
|
"loss": 0.5953, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.194642853805856e-06, |
|
"loss": 0.6358, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.183856596902883e-06, |
|
"loss": 0.7492, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.17307129581109e-06, |
|
"loss": 0.618, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.162286963161478e-06, |
|
"loss": 0.6069, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.151503611583923e-06, |
|
"loss": 0.648, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.140721253707145e-06, |
|
"loss": 0.5628, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.129939902158702e-06, |
|
"loss": 0.4935, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.119159569564976e-06, |
|
"loss": 0.5897, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.108380268551146e-06, |
|
"loss": 0.5328, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.0976020117412e-06, |
|
"loss": 0.4907, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.086824811757886e-06, |
|
"loss": 0.6008, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.076048681222723e-06, |
|
"loss": 0.5307, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.065273632755976e-06, |
|
"loss": 0.5802, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.054499678976645e-06, |
|
"loss": 0.5346, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.043726832502443e-06, |
|
"loss": 0.6459, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.032955105949791e-06, |
|
"loss": 0.6614, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.022184511933792e-06, |
|
"loss": 0.6337, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.011415063068233e-06, |
|
"loss": 0.6669, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.000646771965551e-06, |
|
"loss": 0.566, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.989879651236834e-06, |
|
"loss": 0.5571, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.979113713491785e-06, |
|
"loss": 0.5685, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.968348971338742e-06, |
|
"loss": 0.6185, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.95758543738463e-06, |
|
"loss": 0.5125, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.94682312423496e-06, |
|
"loss": 0.6122, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.936062044493816e-06, |
|
"loss": 0.7062, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.925302210763838e-06, |
|
"loss": 0.6403, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.914543635646204e-06, |
|
"loss": 0.5356, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.903786331740616e-06, |
|
"loss": 0.542, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.893030311645297e-06, |
|
"loss": 0.7083, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.882275587956956e-06, |
|
"loss": 0.5536, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.871522173270791e-06, |
|
"loss": 0.6085, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.860770080180455e-06, |
|
"loss": 0.8508, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.850019321278074e-06, |
|
"loss": 0.627, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.839269909154193e-06, |
|
"loss": 0.648, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.82852185639779e-06, |
|
"loss": 0.6204, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.817775175596237e-06, |
|
"loss": 0.6613, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.807029879335323e-06, |
|
"loss": 0.5378, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.796285980199195e-06, |
|
"loss": 0.5844, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.785543490770371e-06, |
|
"loss": 0.6088, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"eval_loss": 0.7548692226409912, |
|
"eval_runtime": 88.8179, |
|
"eval_samples_per_second": 22.518, |
|
"eval_steps_per_second": 11.259, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.774802423629718e-06, |
|
"loss": 0.6722, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.76406279135644e-06, |
|
"loss": 0.5064, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.753324606528056e-06, |
|
"loss": 0.669, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.742587881720391e-06, |
|
"loss": 0.4997, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.731852629507557e-06, |
|
"loss": 0.5546, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.721118862461952e-06, |
|
"loss": 0.6507, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.710386593154226e-06, |
|
"loss": 0.6357, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.699655834153275e-06, |
|
"loss": 0.5246, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.688926598026223e-06, |
|
"loss": 0.5624, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.678198897338428e-06, |
|
"loss": 0.7684, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.667472744653432e-06, |
|
"loss": 0.7572, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.656748152532964e-06, |
|
"loss": 0.626, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.646025133536931e-06, |
|
"loss": 0.5223, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.635303700223405e-06, |
|
"loss": 0.6707, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.62458386514859e-06, |
|
"loss": 0.5932, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.61386564086682e-06, |
|
"loss": 0.6159, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.603149039930546e-06, |
|
"loss": 0.5853, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.592434074890315e-06, |
|
"loss": 0.6028, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.581720758294758e-06, |
|
"loss": 0.7374, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.571009102690579e-06, |
|
"loss": 0.5373, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.560299120622532e-06, |
|
"loss": 0.692, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.549590824633418e-06, |
|
"loss": 0.5999, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.538884227264054e-06, |
|
"loss": 0.5304, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.528179341053273e-06, |
|
"loss": 0.6552, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.517476178537905e-06, |
|
"loss": 0.6678, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.506774752252764e-06, |
|
"loss": 0.4632, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.496075074730622e-06, |
|
"loss": 0.5528, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.4853771585022e-06, |
|
"loss": 0.7617, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.474681016096177e-06, |
|
"loss": 0.731, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.463986660039136e-06, |
|
"loss": 0.6924, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.453294102855574e-06, |
|
"loss": 0.5638, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.442603357067877e-06, |
|
"loss": 0.5566, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.431914435196317e-06, |
|
"loss": 0.6574, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.421227349759029e-06, |
|
"loss": 0.6087, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.410542113271986e-06, |
|
"loss": 0.5996, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.39985873824901e-06, |
|
"loss": 0.6194, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.389177237201739e-06, |
|
"loss": 0.5909, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.37849762263961e-06, |
|
"loss": 0.635, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.367819907069857e-06, |
|
"loss": 0.5957, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.357144102997484e-06, |
|
"loss": 0.5106, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.34647022292527e-06, |
|
"loss": 0.5881, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.335798279353724e-06, |
|
"loss": 0.6912, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.325128284781097e-06, |
|
"loss": 0.5616, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.314460251703349e-06, |
|
"loss": 0.7468, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.303794192614159e-06, |
|
"loss": 0.4598, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.293130120004876e-06, |
|
"loss": 0.6906, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.282468046364535e-06, |
|
"loss": 0.6242, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.271807984179821e-06, |
|
"loss": 0.6417, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.261149945935072e-06, |
|
"loss": 0.5574, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.25049394411225e-06, |
|
"loss": 0.6588, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.239839991190932e-06, |
|
"loss": 0.6316, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.229188099648295e-06, |
|
"loss": 0.543, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.218538281959113e-06, |
|
"loss": 0.5491, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.207890550595711e-06, |
|
"loss": 0.5504, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.19724491802799e-06, |
|
"loss": 0.617, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.186601396723378e-06, |
|
"loss": 0.5903, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.175959999146843e-06, |
|
"loss": 0.4883, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.165320737760858e-06, |
|
"loss": 0.746, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.15468362502539e-06, |
|
"loss": 0.6079, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.144048673397907e-06, |
|
"loss": 0.5689, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.133415895333329e-06, |
|
"loss": 0.642, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.122785303284038e-06, |
|
"loss": 0.6023, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.112156909699853e-06, |
|
"loss": 0.6083, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.10153072702802e-06, |
|
"loss": 0.536, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"eval_loss": 0.7491377592086792, |
|
"eval_runtime": 88.8028, |
|
"eval_samples_per_second": 22.522, |
|
"eval_steps_per_second": 11.261, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.090906767713196e-06, |
|
"loss": 0.5937, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.080285044197432e-06, |
|
"loss": 0.6362, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.069665568920161e-06, |
|
"loss": 0.6098, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.059048354318191e-06, |
|
"loss": 0.5175, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.048433412825669e-06, |
|
"loss": 0.4532, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.037820756874087e-06, |
|
"loss": 0.5399, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.02721039889226e-06, |
|
"loss": 0.6447, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.016602351306314e-06, |
|
"loss": 0.6332, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.005996626539668e-06, |
|
"loss": 0.7203, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.995393237013014e-06, |
|
"loss": 0.5252, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.984792195144314e-06, |
|
"loss": 0.5284, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.97419351334879e-06, |
|
"loss": 0.6464, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.963597204038885e-06, |
|
"loss": 0.8598, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.953003279624271e-06, |
|
"loss": 0.5534, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.942411752511821e-06, |
|
"loss": 0.5558, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.931822635105613e-06, |
|
"loss": 0.6868, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.921235939806893e-06, |
|
"loss": 0.5553, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.91065167901407e-06, |
|
"loss": 0.4812, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.900069865122702e-06, |
|
"loss": 0.5457, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.889490510525491e-06, |
|
"loss": 0.6085, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.878913627612246e-06, |
|
"loss": 0.4942, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.868339228769888e-06, |
|
"loss": 0.6985, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.857767326382428e-06, |
|
"loss": 0.7335, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.847197932830953e-06, |
|
"loss": 0.7159, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.836631060493613e-06, |
|
"loss": 0.5312, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.826066721745604e-06, |
|
"loss": 0.7012, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.815504928959147e-06, |
|
"loss": 0.5625, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.804945694503505e-06, |
|
"loss": 0.6072, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.794389030744919e-06, |
|
"loss": 0.5545, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.78383495004663e-06, |
|
"loss": 0.4992, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.773283464768851e-06, |
|
"loss": 0.4736, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.762734587268767e-06, |
|
"loss": 0.5492, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.752188329900492e-06, |
|
"loss": 0.5832, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.741644705015081e-06, |
|
"loss": 0.4772, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.731103724960506e-06, |
|
"loss": 0.4948, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.720565402081642e-06, |
|
"loss": 0.6937, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.710029748720245e-06, |
|
"loss": 0.5376, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.69949677721495e-06, |
|
"loss": 0.5318, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.688966499901253e-06, |
|
"loss": 0.718, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.678438929111493e-06, |
|
"loss": 0.4683, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.667914077174839e-06, |
|
"loss": 0.728, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.65739195641727e-06, |
|
"loss": 0.7365, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.646872579161581e-06, |
|
"loss": 0.5135, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.636355957727343e-06, |
|
"loss": 0.7423, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.625842104430897e-06, |
|
"loss": 0.4843, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.6153310315853465e-06, |
|
"loss": 0.6236, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.604822751500547e-06, |
|
"loss": 0.6085, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.5943172764830695e-06, |
|
"loss": 0.6936, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.583814618836208e-06, |
|
"loss": 0.4644, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.573314790859952e-06, |
|
"loss": 0.6331, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.562817804850984e-06, |
|
"loss": 0.551, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.552323673102651e-06, |
|
"loss": 0.575, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.541832407904963e-06, |
|
"loss": 0.4448, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.531344021544566e-06, |
|
"loss": 0.6688, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.520858526304743e-06, |
|
"loss": 0.5302, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.510375934465385e-06, |
|
"loss": 0.5422, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.499896258302985e-06, |
|
"loss": 0.5832, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.489419510090618e-06, |
|
"loss": 0.5146, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.47894570209794e-06, |
|
"loss": 0.7329, |
|
"step": 1787 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.468474846591151e-06, |
|
"loss": 0.4832, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.458006955833001e-06, |
|
"loss": 0.5192, |
|
"step": 1789 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.447542042082761e-06, |
|
"loss": 0.5345, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.437080117596229e-06, |
|
"loss": 0.5503, |
|
"step": 1791 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.426621194625687e-06, |
|
"loss": 0.5856, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"eval_loss": 0.7484854459762573, |
|
"eval_runtime": 88.7999, |
|
"eval_samples_per_second": 22.523, |
|
"eval_steps_per_second": 11.261, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.416165285419912e-06, |
|
"loss": 0.6397, |
|
"step": 1793 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.405712402224142e-06, |
|
"loss": 0.6284, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.395262557280083e-06, |
|
"loss": 0.5389, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.384815762825875e-06, |
|
"loss": 0.4726, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.374372031096084e-06, |
|
"loss": 0.6793, |
|
"step": 1797 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.363931374321698e-06, |
|
"loss": 0.709, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.353493804730094e-06, |
|
"loss": 0.5181, |
|
"step": 1799 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.343059334545039e-06, |
|
"loss": 0.5778, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.332627975986664e-06, |
|
"loss": 0.6572, |
|
"step": 1801 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.322199741271472e-06, |
|
"loss": 0.6334, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.311774642612287e-06, |
|
"loss": 0.6137, |
|
"step": 1803 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.301352692218272e-06, |
|
"loss": 0.6953, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.2909339022948965e-06, |
|
"loss": 0.6722, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.2805182850439425e-06, |
|
"loss": 0.7714, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.2701058526634594e-06, |
|
"loss": 0.6731, |
|
"step": 1807 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.259696617347776e-06, |
|
"loss": 0.6471, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.2492905912874725e-06, |
|
"loss": 0.5229, |
|
"step": 1809 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.238887786669375e-06, |
|
"loss": 0.7308, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.228488215676536e-06, |
|
"loss": 0.5403, |
|
"step": 1811 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.218091890488217e-06, |
|
"loss": 0.661, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.207698823279882e-06, |
|
"loss": 0.5324, |
|
"step": 1813 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.1973090262231805e-06, |
|
"loss": 0.6866, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.186922511485927e-06, |
|
"loss": 0.6685, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.176539291232098e-06, |
|
"loss": 0.5176, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.166159377621806e-06, |
|
"loss": 0.5331, |
|
"step": 1817 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.155782782811297e-06, |
|
"loss": 0.5978, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.145409518952926e-06, |
|
"loss": 0.6114, |
|
"step": 1819 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.135039598195148e-06, |
|
"loss": 0.5659, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.124673032682499e-06, |
|
"loss": 0.6158, |
|
"step": 1821 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.114309834555597e-06, |
|
"loss": 0.4863, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.103950015951107e-06, |
|
"loss": 0.572, |
|
"step": 1823 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.093593589001737e-06, |
|
"loss": 0.835, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.083240565836217e-06, |
|
"loss": 0.5681, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.07289095857931e-06, |
|
"loss": 0.6087, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.062544779351759e-06, |
|
"loss": 0.601, |
|
"step": 1827 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.052202040270301e-06, |
|
"loss": 0.6498, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.04186275344764e-06, |
|
"loss": 0.6743, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.031526930992445e-06, |
|
"loss": 0.6173, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.02119458500932e-06, |
|
"loss": 0.6137, |
|
"step": 1831 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.010865727598799e-06, |
|
"loss": 0.499, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.000540370857329e-06, |
|
"loss": 0.5361, |
|
"step": 1833 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.990218526877266e-06, |
|
"loss": 0.5872, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.979900207746841e-06, |
|
"loss": 0.6394, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.969585425550161e-06, |
|
"loss": 0.5442, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.9592741923671956e-06, |
|
"loss": 0.6822, |
|
"step": 1837 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.948966520273753e-06, |
|
"loss": 0.5973, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.938662421341473e-06, |
|
"loss": 0.6411, |
|
"step": 1839 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.928361907637799e-06, |
|
"loss": 0.6912, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.9180649912260016e-06, |
|
"loss": 0.6327, |
|
"step": 1841 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.907771684165114e-06, |
|
"loss": 0.629, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.897481998509954e-06, |
|
"loss": 0.6142, |
|
"step": 1843 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.887195946311093e-06, |
|
"loss": 0.5129, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.876913539614853e-06, |
|
"loss": 0.5561, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.8666347904632845e-06, |
|
"loss": 0.4456, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.8563597108941516e-06, |
|
"loss": 0.5976, |
|
"step": 1847 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.846088312940919e-06, |
|
"loss": 0.6724, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.835820608632751e-06, |
|
"loss": 0.6095, |
|
"step": 1849 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.825556609994475e-06, |
|
"loss": 0.5615, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.815296329046581e-06, |
|
"loss": 0.6272, |
|
"step": 1851 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.805039777805205e-06, |
|
"loss": 0.8037, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.794786968282121e-06, |
|
"loss": 0.5641, |
|
"step": 1853 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.784537912484719e-06, |
|
"loss": 0.5288, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.7742926224159815e-06, |
|
"loss": 0.5845, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.76405111007449e-06, |
|
"loss": 0.585, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"eval_loss": 0.7450958490371704, |
|
"eval_runtime": 88.8258, |
|
"eval_samples_per_second": 22.516, |
|
"eval_steps_per_second": 11.258, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.753813387454409e-06, |
|
"loss": 0.6582, |
|
"step": 1857 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.743579466545454e-06, |
|
"loss": 0.5221, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.73334935933289e-06, |
|
"loss": 0.5411, |
|
"step": 1859 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.723123077797515e-06, |
|
"loss": 0.5382, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.712900633915653e-06, |
|
"loss": 0.4572, |
|
"step": 1861 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.702682039659127e-06, |
|
"loss": 0.5233, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.692467306995255e-06, |
|
"loss": 0.6738, |
|
"step": 1863 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.682256447886828e-06, |
|
"loss": 0.7235, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.672049474292107e-06, |
|
"loss": 0.6002, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.6618463981648e-06, |
|
"loss": 0.6311, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.651647231454046e-06, |
|
"loss": 0.6925, |
|
"step": 1867 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.64145198610441e-06, |
|
"loss": 0.4247, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.631260674055873e-06, |
|
"loss": 0.7743, |
|
"step": 1869 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.62107330724379e-06, |
|
"loss": 0.7996, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.610889897598911e-06, |
|
"loss": 0.6315, |
|
"step": 1871 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.600710457047341e-06, |
|
"loss": 0.435, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.590534997510554e-06, |
|
"loss": 0.6108, |
|
"step": 1873 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.580363530905343e-06, |
|
"loss": 0.62, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.5701960691438315e-06, |
|
"loss": 0.6667, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.560032624133458e-06, |
|
"loss": 0.6659, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.549873207776949e-06, |
|
"loss": 0.5416, |
|
"step": 1877 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.539717831972315e-06, |
|
"loss": 0.5268, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.529566508612837e-06, |
|
"loss": 0.7636, |
|
"step": 1879 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.51941924958705e-06, |
|
"loss": 0.6423, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.509276066778727e-06, |
|
"loss": 0.628, |
|
"step": 1881 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.499136972066868e-06, |
|
"loss": 0.7284, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.489001977325681e-06, |
|
"loss": 0.4847, |
|
"step": 1883 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.478871094424587e-06, |
|
"loss": 0.4753, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.468744335228173e-06, |
|
"loss": 0.5437, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.458621711596207e-06, |
|
"loss": 0.5686, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.448503235383608e-06, |
|
"loss": 0.5128, |
|
"step": 1887 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.438388918440448e-06, |
|
"loss": 0.5876, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.428278772611919e-06, |
|
"loss": 0.5346, |
|
"step": 1889 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.41817280973833e-06, |
|
"loss": 0.5722, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.4080710416550885e-06, |
|
"loss": 0.5282, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.3979734801926965e-06, |
|
"loss": 0.5896, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.3878801371767255e-06, |
|
"loss": 0.5756, |
|
"step": 1893 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.377791024427804e-06, |
|
"loss": 0.5612, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.36770615376161e-06, |
|
"loss": 0.6084, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.357625536988854e-06, |
|
"loss": 0.8479, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.34754918591526e-06, |
|
"loss": 0.5956, |
|
"step": 1897 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.3374771123415625e-06, |
|
"loss": 0.6559, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.327409328063479e-06, |
|
"loss": 0.5256, |
|
"step": 1899 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.317345844871713e-06, |
|
"loss": 0.5459, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.307286674551923e-06, |
|
"loss": 0.5671, |
|
"step": 1901 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.29723182888472e-06, |
|
"loss": 0.4931, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.287181319645645e-06, |
|
"loss": 0.7066, |
|
"step": 1903 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.277135158605174e-06, |
|
"loss": 0.6499, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.267093357528677e-06, |
|
"loss": 0.5588, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.257055928176425e-06, |
|
"loss": 0.5938, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.247022882303559e-06, |
|
"loss": 0.6139, |
|
"step": 1907 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.236994231660105e-06, |
|
"loss": 0.5468, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.226969987990926e-06, |
|
"loss": 0.6266, |
|
"step": 1909 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.216950163035727e-06, |
|
"loss": 0.5614, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.206934768529043e-06, |
|
"loss": 0.6777, |
|
"step": 1911 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.196923816200216e-06, |
|
"loss": 0.5049, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.186917317773384e-06, |
|
"loss": 0.5514, |
|
"step": 1913 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.176915284967474e-06, |
|
"loss": 0.5614, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.166917729496182e-06, |
|
"loss": 0.5849, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.1569246630679605e-06, |
|
"loss": 0.6687, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.146936097385999e-06, |
|
"loss": 0.6321, |
|
"step": 1917 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.13695204414822e-06, |
|
"loss": 0.6551, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.126972515047269e-06, |
|
"loss": 0.5871, |
|
"step": 1919 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.1169975217704805e-06, |
|
"loss": 0.5143, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"eval_loss": 0.7439701557159424, |
|
"eval_runtime": 88.8154, |
|
"eval_samples_per_second": 22.519, |
|
"eval_steps_per_second": 11.259, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.107027075999887e-06, |
|
"loss": 0.5085, |
|
"step": 1921 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.097061189412181e-06, |
|
"loss": 0.5354, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.0870998736787355e-06, |
|
"loss": 0.7136, |
|
"step": 1923 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.077143140465558e-06, |
|
"loss": 0.6739, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.0671910014332895e-06, |
|
"loss": 0.4988, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.057243468237192e-06, |
|
"loss": 0.6326, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.0473005525271365e-06, |
|
"loss": 0.4922, |
|
"step": 1927 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.037362265947584e-06, |
|
"loss": 0.5836, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.027428620137574e-06, |
|
"loss": 0.7251, |
|
"step": 1929 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.017499626730707e-06, |
|
"loss": 0.6268, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.007575297355146e-06, |
|
"loss": 0.5628, |
|
"step": 1931 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.99765564363358e-06, |
|
"loss": 0.541, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.987740677183229e-06, |
|
"loss": 0.5807, |
|
"step": 1933 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.977830409615817e-06, |
|
"loss": 0.6026, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.967924852537579e-06, |
|
"loss": 0.6233, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.95802401754922e-06, |
|
"loss": 0.5217, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.9481279162459164e-06, |
|
"loss": 0.6669, |
|
"step": 1937 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.938236560217301e-06, |
|
"loss": 0.5845, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.928349961047459e-06, |
|
"loss": 0.61, |
|
"step": 1939 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.918468130314895e-06, |
|
"loss": 0.6591, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.908591079592529e-06, |
|
"loss": 0.4925, |
|
"step": 1941 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.898718820447685e-06, |
|
"loss": 0.5516, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.88885136444208e-06, |
|
"loss": 0.4537, |
|
"step": 1943 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.878988723131797e-06, |
|
"loss": 0.5765, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.869130908067291e-06, |
|
"loss": 0.5357, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.85927793079335e-06, |
|
"loss": 0.5888, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.849429802849114e-06, |
|
"loss": 0.627, |
|
"step": 1947 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.839586535768032e-06, |
|
"loss": 0.4781, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.829748141077855e-06, |
|
"loss": 0.4761, |
|
"step": 1949 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.819914630300653e-06, |
|
"loss": 0.4859, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.810086014952747e-06, |
|
"loss": 0.5839, |
|
"step": 1951 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.800262306544743e-06, |
|
"loss": 0.568, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.790443516581488e-06, |
|
"loss": 0.6449, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.780629656562079e-06, |
|
"loss": 0.7283, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.770820737979841e-06, |
|
"loss": 0.6201, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.7610167723222976e-06, |
|
"loss": 0.7511, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.751217771071185e-06, |
|
"loss": 0.5401, |
|
"step": 1957 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.741423745702422e-06, |
|
"loss": 0.5501, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.731634707686104e-06, |
|
"loss": 0.6013, |
|
"step": 1959 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.721850668486475e-06, |
|
"loss": 0.5687, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.712071639561923e-06, |
|
"loss": 0.5671, |
|
"step": 1961 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.7022976323649944e-06, |
|
"loss": 0.6546, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.692528658342322e-06, |
|
"loss": 0.5525, |
|
"step": 1963 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.682764728934667e-06, |
|
"loss": 0.5449, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.673005855576863e-06, |
|
"loss": 0.4965, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.663252049697843e-06, |
|
"loss": 0.572, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.653503322720594e-06, |
|
"loss": 0.7223, |
|
"step": 1967 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.643759686062152e-06, |
|
"loss": 0.5907, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.634021151133597e-06, |
|
"loss": 0.5713, |
|
"step": 1969 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.624287729340039e-06, |
|
"loss": 0.5979, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.6145594320805865e-06, |
|
"loss": 0.7585, |
|
"step": 1971 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.604836270748362e-06, |
|
"loss": 0.461, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.595118256730452e-06, |
|
"loss": 0.6431, |
|
"step": 1973 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.585405401407944e-06, |
|
"loss": 0.4972, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.575697716155861e-06, |
|
"loss": 0.5496, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.5659952123431734e-06, |
|
"loss": 0.5657, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.556297901332794e-06, |
|
"loss": 0.6389, |
|
"step": 1977 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.546605794481546e-06, |
|
"loss": 0.5551, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.536918903140165e-06, |
|
"loss": 0.4569, |
|
"step": 1979 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.5272372386532665e-06, |
|
"loss": 0.5818, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.517560812359355e-06, |
|
"loss": 0.6564, |
|
"step": 1981 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.507889635590802e-06, |
|
"loss": 0.6242, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.498223719673818e-06, |
|
"loss": 0.7349, |
|
"step": 1983 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.48856307592847e-06, |
|
"loss": 0.5408, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"eval_loss": 0.7390481233596802, |
|
"eval_runtime": 88.8545, |
|
"eval_samples_per_second": 22.509, |
|
"eval_steps_per_second": 11.254, |
|
"step": 1984 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3003, |
|
"num_train_epochs": 3, |
|
"save_steps": 64, |
|
"total_flos": 5.809085963833442e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|