|
{ |
|
"best_metric": 10.949830055236816, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-75", |
|
"epoch": 0.645682001614205, |
|
"eval_steps": 25, |
|
"global_step": 75, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008609093354856066, |
|
"grad_norm": 10.867487907409668, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 355.0044, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008609093354856066, |
|
"eval_loss": 11.096755027770996, |
|
"eval_runtime": 0.6034, |
|
"eval_samples_per_second": 82.868, |
|
"eval_steps_per_second": 21.546, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017218186709712133, |
|
"grad_norm": 11.303132057189941, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 354.975, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0258272800645682, |
|
"grad_norm": 13.843127250671387, |
|
"learning_rate": 0.0001, |
|
"loss": 355.0412, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.034436373419424265, |
|
"grad_norm": 11.937524795532227, |
|
"learning_rate": 9.99571699711836e-05, |
|
"loss": 354.924, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04304546677428033, |
|
"grad_norm": 12.751911163330078, |
|
"learning_rate": 9.982876141412856e-05, |
|
"loss": 354.8232, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0516545601291364, |
|
"grad_norm": 12.354517936706543, |
|
"learning_rate": 9.961501876182148e-05, |
|
"loss": 354.8041, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.060263653483992465, |
|
"grad_norm": 13.037909507751465, |
|
"learning_rate": 9.931634888554937e-05, |
|
"loss": 354.611, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06887274683884853, |
|
"grad_norm": 13.028841972351074, |
|
"learning_rate": 9.893332032039701e-05, |
|
"loss": 354.4688, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0774818401937046, |
|
"grad_norm": 14.73702335357666, |
|
"learning_rate": 9.846666218300807e-05, |
|
"loss": 354.3231, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08609093354856066, |
|
"grad_norm": 17.07733917236328, |
|
"learning_rate": 9.791726278367022e-05, |
|
"loss": 354.056, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09470002690341674, |
|
"grad_norm": 21.84981918334961, |
|
"learning_rate": 9.728616793536588e-05, |
|
"loss": 353.632, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1033091202582728, |
|
"grad_norm": 25.133949279785156, |
|
"learning_rate": 9.657457896300791e-05, |
|
"loss": 353.3881, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11191821361312887, |
|
"grad_norm": 18.78001594543457, |
|
"learning_rate": 9.578385041664925e-05, |
|
"loss": 353.659, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12052730696798493, |
|
"grad_norm": 10.563817024230957, |
|
"learning_rate": 9.491548749301997e-05, |
|
"loss": 353.9691, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.129136400322841, |
|
"grad_norm": 11.821118354797363, |
|
"learning_rate": 9.397114317029975e-05, |
|
"loss": 353.8424, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13774549367769706, |
|
"grad_norm": 11.502336502075195, |
|
"learning_rate": 9.295261506157986e-05, |
|
"loss": 353.7357, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14635458703255314, |
|
"grad_norm": 10.819986343383789, |
|
"learning_rate": 9.186184199300464e-05, |
|
"loss": 353.6895, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1549636803874092, |
|
"grad_norm": 11.171977996826172, |
|
"learning_rate": 9.070090031310558e-05, |
|
"loss": 353.5294, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.16357277374226525, |
|
"grad_norm": 11.021985054016113, |
|
"learning_rate": 8.947199994035401e-05, |
|
"loss": 353.4442, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17218186709712133, |
|
"grad_norm": 11.427451133728027, |
|
"learning_rate": 8.817748015645558e-05, |
|
"loss": 353.2932, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1807909604519774, |
|
"grad_norm": 12.078248977661133, |
|
"learning_rate": 8.681980515339464e-05, |
|
"loss": 353.0774, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.18940005380683347, |
|
"grad_norm": 13.572750091552734, |
|
"learning_rate": 8.540155934270471e-05, |
|
"loss": 352.7897, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.19800914716168955, |
|
"grad_norm": 16.139827728271484, |
|
"learning_rate": 8.392544243589427e-05, |
|
"loss": 352.1232, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2066182405165456, |
|
"grad_norm": 19.074623107910156, |
|
"learning_rate": 8.239426430539243e-05, |
|
"loss": 351.4342, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.21522733387140167, |
|
"grad_norm": 22.747406005859375, |
|
"learning_rate": 8.081093963579707e-05, |
|
"loss": 350.7261, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.21522733387140167, |
|
"eval_loss": 11.010053634643555, |
|
"eval_runtime": 0.0769, |
|
"eval_samples_per_second": 650.534, |
|
"eval_steps_per_second": 169.139, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.22383642722625774, |
|
"grad_norm": 8.548608779907227, |
|
"learning_rate": 7.917848237560709e-05, |
|
"loss": 353.2629, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.2324455205811138, |
|
"grad_norm": 9.44759750366211, |
|
"learning_rate": 7.75e-05, |
|
"loss": 352.9843, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24105461393596986, |
|
"grad_norm": 10.709016799926758, |
|
"learning_rate": 7.577868759557654e-05, |
|
"loss": 352.6682, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.24966370729082593, |
|
"grad_norm": 9.533336639404297, |
|
"learning_rate": 7.401782177833148e-05, |
|
"loss": 352.8022, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.258272800645682, |
|
"grad_norm": 9.468610763549805, |
|
"learning_rate": 7.222075445642904e-05, |
|
"loss": 352.7345, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2668818940005381, |
|
"grad_norm": 9.217055320739746, |
|
"learning_rate": 7.03909064496551e-05, |
|
"loss": 352.7065, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2754909873553941, |
|
"grad_norm": 9.265793800354004, |
|
"learning_rate": 6.853176097769229e-05, |
|
"loss": 352.6331, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.2841000807102502, |
|
"grad_norm": 9.615739822387695, |
|
"learning_rate": 6.664685702961344e-05, |
|
"loss": 352.4907, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.29270917406510627, |
|
"grad_norm": 10.331357955932617, |
|
"learning_rate": 6.473978262721463e-05, |
|
"loss": 352.2647, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3013182674199623, |
|
"grad_norm": 12.123662948608398, |
|
"learning_rate": 6.281416799501188e-05, |
|
"loss": 351.5476, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3099273607748184, |
|
"grad_norm": 14.970151901245117, |
|
"learning_rate": 6.087367864990233e-05, |
|
"loss": 350.6174, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.31853645412967446, |
|
"grad_norm": 18.231613159179688, |
|
"learning_rate": 5.8922008423644624e-05, |
|
"loss": 349.7336, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3271455474845305, |
|
"grad_norm": 14.60504150390625, |
|
"learning_rate": 5.696287243144013e-05, |
|
"loss": 350.7061, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3357546408393866, |
|
"grad_norm": 9.001364707946777, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 352.3064, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.34436373419424265, |
|
"grad_norm": 9.125486373901367, |
|
"learning_rate": 5.303712756855988e-05, |
|
"loss": 352.185, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.35297282754909876, |
|
"grad_norm": 8.79714584350586, |
|
"learning_rate": 5.107799157635538e-05, |
|
"loss": 352.1189, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.3615819209039548, |
|
"grad_norm": 8.923425674438477, |
|
"learning_rate": 4.912632135009769e-05, |
|
"loss": 352.1105, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.37019101425881085, |
|
"grad_norm": 8.289681434631348, |
|
"learning_rate": 4.718583200498814e-05, |
|
"loss": 352.2823, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.37880010761366695, |
|
"grad_norm": 8.199746131896973, |
|
"learning_rate": 4.526021737278538e-05, |
|
"loss": 352.2198, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.387409200968523, |
|
"grad_norm": 8.637060165405273, |
|
"learning_rate": 4.3353142970386564e-05, |
|
"loss": 352.026, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3960182943233791, |
|
"grad_norm": 9.83723258972168, |
|
"learning_rate": 4.146823902230772e-05, |
|
"loss": 351.563, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.40462738767823514, |
|
"grad_norm": 10.942987442016602, |
|
"learning_rate": 3.960909355034491e-05, |
|
"loss": 351.0425, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4132364810330912, |
|
"grad_norm": 13.6652193069458, |
|
"learning_rate": 3.777924554357096e-05, |
|
"loss": 350.1031, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4218455743879473, |
|
"grad_norm": 16.773061752319336, |
|
"learning_rate": 3.598217822166854e-05, |
|
"loss": 349.162, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.43045466774280333, |
|
"grad_norm": 20.8967227935791, |
|
"learning_rate": 3.422131240442349e-05, |
|
"loss": 348.3621, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.43045466774280333, |
|
"eval_loss": 10.965777397155762, |
|
"eval_runtime": 0.0782, |
|
"eval_samples_per_second": 639.534, |
|
"eval_steps_per_second": 166.279, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4390637610976594, |
|
"grad_norm": 7.474067687988281, |
|
"learning_rate": 3.250000000000001e-05, |
|
"loss": 352.3553, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4476728544525155, |
|
"grad_norm": 8.300982475280762, |
|
"learning_rate": 3.082151762439293e-05, |
|
"loss": 351.9735, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4562819478073715, |
|
"grad_norm": 8.844514846801758, |
|
"learning_rate": 2.9189060364202943e-05, |
|
"loss": 351.6582, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.4648910411622276, |
|
"grad_norm": 8.853071212768555, |
|
"learning_rate": 2.760573569460757e-05, |
|
"loss": 351.5546, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.47350013451708367, |
|
"grad_norm": 8.490742683410645, |
|
"learning_rate": 2.6074557564105727e-05, |
|
"loss": 351.6604, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.4821092278719397, |
|
"grad_norm": 7.409176349639893, |
|
"learning_rate": 2.459844065729529e-05, |
|
"loss": 352.1426, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.4907183212267958, |
|
"grad_norm": 7.821872234344482, |
|
"learning_rate": 2.3180194846605367e-05, |
|
"loss": 351.8066, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.49932741458165186, |
|
"grad_norm": 8.349543571472168, |
|
"learning_rate": 2.1822519843544424e-05, |
|
"loss": 351.5433, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5079365079365079, |
|
"grad_norm": 9.260781288146973, |
|
"learning_rate": 2.0528000059645997e-05, |
|
"loss": 351.208, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.516545601291364, |
|
"grad_norm": 10.735580444335938, |
|
"learning_rate": 1.9299099686894423e-05, |
|
"loss": 350.6387, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5251546946462201, |
|
"grad_norm": 13.846049308776855, |
|
"learning_rate": 1.8138158006995364e-05, |
|
"loss": 349.4801, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5337637880010762, |
|
"grad_norm": 17.65458869934082, |
|
"learning_rate": 1.7047384938420154e-05, |
|
"loss": 348.2975, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5423728813559322, |
|
"grad_norm": 13.394047737121582, |
|
"learning_rate": 1.602885682970026e-05, |
|
"loss": 349.9447, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5509819747107882, |
|
"grad_norm": 7.800135612487793, |
|
"learning_rate": 1.5084512506980026e-05, |
|
"loss": 351.9247, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5595910680656443, |
|
"grad_norm": 8.641666412353516, |
|
"learning_rate": 1.4216149583350754e-05, |
|
"loss": 351.5002, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5682001614205004, |
|
"grad_norm": 8.432456970214844, |
|
"learning_rate": 1.3425421036992098e-05, |
|
"loss": 351.3875, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5768092547753565, |
|
"grad_norm": 9.028701782226562, |
|
"learning_rate": 1.2713832064634126e-05, |
|
"loss": 351.1097, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5854183481302125, |
|
"grad_norm": 8.943015098571777, |
|
"learning_rate": 1.2082737216329794e-05, |
|
"loss": 351.1015, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.5940274414850686, |
|
"grad_norm": 7.793323516845703, |
|
"learning_rate": 1.1533337816991932e-05, |
|
"loss": 351.5153, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6026365348399246, |
|
"grad_norm": 8.137232780456543, |
|
"learning_rate": 1.1066679679603e-05, |
|
"loss": 351.4627, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6112456281947808, |
|
"grad_norm": 8.868036270141602, |
|
"learning_rate": 1.0683651114450641e-05, |
|
"loss": 351.1549, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6198547215496368, |
|
"grad_norm": 10.001080513000488, |
|
"learning_rate": 1.0384981238178534e-05, |
|
"loss": 350.6647, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6284638149044929, |
|
"grad_norm": 12.413795471191406, |
|
"learning_rate": 1.017123858587145e-05, |
|
"loss": 349.7495, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6370729082593489, |
|
"grad_norm": 16.752674102783203, |
|
"learning_rate": 1.00428300288164e-05, |
|
"loss": 348.3455, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.645682001614205, |
|
"grad_norm": 20.989471435546875, |
|
"learning_rate": 1e-05, |
|
"loss": 347.5739, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.645682001614205, |
|
"eval_loss": 10.949830055236816, |
|
"eval_runtime": 0.0779, |
|
"eval_samples_per_second": 641.476, |
|
"eval_steps_per_second": 166.784, |
|
"step": 75 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 75, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 25, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 1, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6323739033600.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|