Instructions to use ugaoo/model_e31d585d with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- PEFT
How to use ugaoo/model_e31d585d with PEFT:
from peft import PeftModel from transformers import AutoModelForCausalLM base_model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen2.5-7B-Instruct") model = PeftModel.from_pretrained(base_model, "ugaoo/model_e31d585d") - Notebooks
- Google Colab
- Kaggle
| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.9971469329529246, | |
| "eval_steps": 500, | |
| "global_step": 932, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0042796005706134095, | |
| "grad_norm": 34.48759460449219, | |
| "learning_rate": 5.0000000000000004e-08, | |
| "loss": 4.9483, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.008559201141226819, | |
| "grad_norm": 35.18870544433594, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 5.1789, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.012838801711840228, | |
| "grad_norm": 34.3349494934082, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 4.8881, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.017118402282453638, | |
| "grad_norm": 36.14989471435547, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 5.0836, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.021398002853067047, | |
| "grad_norm": 33.93682861328125, | |
| "learning_rate": 2.5000000000000004e-07, | |
| "loss": 4.9619, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.025677603423680456, | |
| "grad_norm": 34.47760772705078, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 4.9723, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.029957203994293864, | |
| "grad_norm": 34.52993392944336, | |
| "learning_rate": 3.5000000000000004e-07, | |
| "loss": 4.9278, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.034236804564907276, | |
| "grad_norm": 33.86513900756836, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 4.9141, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.03851640513552068, | |
| "grad_norm": 32.446311950683594, | |
| "learning_rate": 4.5000000000000003e-07, | |
| "loss": 4.7811, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.042796005706134094, | |
| "grad_norm": 33.34199905395508, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 4.8566, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.047075606276747506, | |
| "grad_norm": 34.60908508300781, | |
| "learning_rate": 5.5e-07, | |
| "loss": 4.9406, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05135520684736091, | |
| "grad_norm": 34.570987701416016, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 4.8281, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.05563480741797432, | |
| "grad_norm": 32.070953369140625, | |
| "learning_rate": 6.5e-07, | |
| "loss": 4.6165, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.05991440798858773, | |
| "grad_norm": 33.586509704589844, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 4.8012, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.06419400855920114, | |
| "grad_norm": 30.96600341796875, | |
| "learning_rate": 7.5e-07, | |
| "loss": 4.5845, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.06847360912981455, | |
| "grad_norm": 30.152454376220703, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 4.4356, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.07275320970042796, | |
| "grad_norm": 28.61970329284668, | |
| "learning_rate": 8.500000000000001e-07, | |
| "loss": 4.359, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.07703281027104136, | |
| "grad_norm": 27.507383346557617, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 4.4086, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08131241084165478, | |
| "grad_norm": 25.110597610473633, | |
| "learning_rate": 9.500000000000001e-07, | |
| "loss": 4.1411, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.08559201141226819, | |
| "grad_norm": 23.737529754638672, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 4.0639, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0898716119828816, | |
| "grad_norm": 22.502748489379883, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 3.9744, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.09415121255349501, | |
| "grad_norm": 21.957761764526367, | |
| "learning_rate": 1.1e-06, | |
| "loss": 3.8115, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.09843081312410841, | |
| "grad_norm": 20.552080154418945, | |
| "learning_rate": 1.1500000000000002e-06, | |
| "loss": 3.6059, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.10271041369472182, | |
| "grad_norm": 21.490442276000977, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 3.5285, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.10699001426533523, | |
| "grad_norm": 20.63368034362793, | |
| "learning_rate": 1.25e-06, | |
| "loss": 3.3484, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11126961483594865, | |
| "grad_norm": 20.170631408691406, | |
| "learning_rate": 1.3e-06, | |
| "loss": 3.0973, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.11554921540656206, | |
| "grad_norm": 20.217880249023438, | |
| "learning_rate": 1.3500000000000002e-06, | |
| "loss": 2.9758, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.11982881597717546, | |
| "grad_norm": 18.303476333618164, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 2.8659, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.12410841654778887, | |
| "grad_norm": 15.922847747802734, | |
| "learning_rate": 1.45e-06, | |
| "loss": 2.6369, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.12838801711840228, | |
| "grad_norm": 14.529817581176758, | |
| "learning_rate": 1.5e-06, | |
| "loss": 2.5532, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.1326676176890157, | |
| "grad_norm": 14.177539825439453, | |
| "learning_rate": 1.5500000000000002e-06, | |
| "loss": 2.4497, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.1369472182596291, | |
| "grad_norm": 15.144468307495117, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 2.3432, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.14122681883024252, | |
| "grad_norm": 14.920907974243164, | |
| "learning_rate": 1.6500000000000003e-06, | |
| "loss": 2.14, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.14550641940085593, | |
| "grad_norm": 15.700516700744629, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 2.0247, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.14978601997146934, | |
| "grad_norm": 15.600655555725098, | |
| "learning_rate": 1.75e-06, | |
| "loss": 1.8893, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.15406562054208273, | |
| "grad_norm": 15.281895637512207, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 1.7768, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.15834522111269614, | |
| "grad_norm": 13.99860668182373, | |
| "learning_rate": 1.85e-06, | |
| "loss": 1.5486, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.16262482168330955, | |
| "grad_norm": 13.295161247253418, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 1.427, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.16690442225392296, | |
| "grad_norm": 12.667908668518066, | |
| "learning_rate": 1.9500000000000004e-06, | |
| "loss": 1.2251, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.17118402282453637, | |
| "grad_norm": 12.730255126953125, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.1402, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1754636233951498, | |
| "grad_norm": 13.135544776916504, | |
| "learning_rate": 2.05e-06, | |
| "loss": 0.9867, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.1797432239657632, | |
| "grad_norm": 12.429509162902832, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.8455, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.1840228245363766, | |
| "grad_norm": 11.61819076538086, | |
| "learning_rate": 2.15e-06, | |
| "loss": 0.6935, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.18830242510699002, | |
| "grad_norm": 10.11789608001709, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.5486, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.19258202567760344, | |
| "grad_norm": 8.705474853515625, | |
| "learning_rate": 2.25e-06, | |
| "loss": 0.4345, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.19686162624821682, | |
| "grad_norm": 7.407937526702881, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.3223, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.20114122681883023, | |
| "grad_norm": 5.553786754608154, | |
| "learning_rate": 2.35e-06, | |
| "loss": 0.2376, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.20542082738944364, | |
| "grad_norm": 3.248605966567993, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.198, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.20970042796005706, | |
| "grad_norm": 3.1356797218322754, | |
| "learning_rate": 2.4500000000000003e-06, | |
| "loss": 0.1706, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.21398002853067047, | |
| "grad_norm": 2.7913248538970947, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.1436, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.21825962910128388, | |
| "grad_norm": 2.2059645652770996, | |
| "learning_rate": 2.55e-06, | |
| "loss": 0.144, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.2225392296718973, | |
| "grad_norm": 1.5722575187683105, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.1203, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.2268188302425107, | |
| "grad_norm": 1.3645284175872803, | |
| "learning_rate": 2.6500000000000005e-06, | |
| "loss": 0.1309, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.23109843081312412, | |
| "grad_norm": 1.3658019304275513, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.1122, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.23537803138373753, | |
| "grad_norm": 1.1719582080841064, | |
| "learning_rate": 2.7500000000000004e-06, | |
| "loss": 0.1239, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.2396576319543509, | |
| "grad_norm": 1.030941367149353, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.1124, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.24393723252496433, | |
| "grad_norm": 0.7212232947349548, | |
| "learning_rate": 2.85e-06, | |
| "loss": 0.0848, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.24821683309557774, | |
| "grad_norm": 0.8292589783668518, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.0976, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.2524964336661912, | |
| "grad_norm": 1.0893069505691528, | |
| "learning_rate": 2.95e-06, | |
| "loss": 0.0954, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.25677603423680456, | |
| "grad_norm": 0.8184618353843689, | |
| "learning_rate": 3e-06, | |
| "loss": 0.1029, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.26105563480741795, | |
| "grad_norm": 0.551844596862793, | |
| "learning_rate": 3.05e-06, | |
| "loss": 0.0846, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.2653352353780314, | |
| "grad_norm": 0.6214303970336914, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.0855, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.26961483594864477, | |
| "grad_norm": 0.6127054691314697, | |
| "learning_rate": 3.1500000000000003e-06, | |
| "loss": 0.0858, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.2738944365192582, | |
| "grad_norm": 0.6121392250061035, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.0913, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.2781740370898716, | |
| "grad_norm": 0.5743774771690369, | |
| "learning_rate": 3.2500000000000002e-06, | |
| "loss": 0.0813, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.28245363766048504, | |
| "grad_norm": 0.6720462441444397, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.0871, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.2867332382310984, | |
| "grad_norm": 0.5387166142463684, | |
| "learning_rate": 3.3500000000000005e-06, | |
| "loss": 0.0722, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.29101283880171186, | |
| "grad_norm": 0.4062807261943817, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0699, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.29529243937232524, | |
| "grad_norm": 0.43954649567604065, | |
| "learning_rate": 3.45e-06, | |
| "loss": 0.0808, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.2995720399429387, | |
| "grad_norm": 0.5136083960533142, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.0782, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.30385164051355207, | |
| "grad_norm": 0.43194347620010376, | |
| "learning_rate": 3.5500000000000003e-06, | |
| "loss": 0.0883, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.30813124108416545, | |
| "grad_norm": 0.42371511459350586, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.0875, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.3124108416547789, | |
| "grad_norm": 0.4384445250034332, | |
| "learning_rate": 3.65e-06, | |
| "loss": 0.0842, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3166904422253923, | |
| "grad_norm": 0.3909939229488373, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.0718, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3209700427960057, | |
| "grad_norm": 0.33483919501304626, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.0766, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3252496433666191, | |
| "grad_norm": 0.6500667929649353, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.0841, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.32952924393723254, | |
| "grad_norm": 0.42915841937065125, | |
| "learning_rate": 3.85e-06, | |
| "loss": 0.0743, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.3338088445078459, | |
| "grad_norm": 0.5390797853469849, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.0784, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.33808844507845937, | |
| "grad_norm": 0.4562499225139618, | |
| "learning_rate": 3.95e-06, | |
| "loss": 0.0768, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.34236804564907275, | |
| "grad_norm": 0.45602527260780334, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.076, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3466476462196862, | |
| "grad_norm": 0.3802862763404846, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.0776, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.3509272467902996, | |
| "grad_norm": 0.3189656138420105, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.0695, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.35520684736091296, | |
| "grad_norm": 0.37464478611946106, | |
| "learning_rate": 4.15e-06, | |
| "loss": 0.0732, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.3594864479315264, | |
| "grad_norm": 0.5200878977775574, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.0646, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.3637660485021398, | |
| "grad_norm": 0.3902634382247925, | |
| "learning_rate": 4.25e-06, | |
| "loss": 0.0751, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.3680456490727532, | |
| "grad_norm": 0.4341444969177246, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.0739, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.3723252496433666, | |
| "grad_norm": 0.3288861811161041, | |
| "learning_rate": 4.350000000000001e-06, | |
| "loss": 0.0631, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.37660485021398005, | |
| "grad_norm": 0.33849674463272095, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.0663, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.38088445078459343, | |
| "grad_norm": 0.36165380477905273, | |
| "learning_rate": 4.450000000000001e-06, | |
| "loss": 0.0618, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.38516405135520687, | |
| "grad_norm": 0.5453753471374512, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.0734, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.38944365192582026, | |
| "grad_norm": 0.4055081605911255, | |
| "learning_rate": 4.5500000000000005e-06, | |
| "loss": 0.0684, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.39372325249643364, | |
| "grad_norm": 0.4177473783493042, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.0605, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.3980028530670471, | |
| "grad_norm": 0.35140708088874817, | |
| "learning_rate": 4.65e-06, | |
| "loss": 0.0695, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.40228245363766046, | |
| "grad_norm": 0.5472511053085327, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.0743, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.4065620542082739, | |
| "grad_norm": 0.35946714878082275, | |
| "learning_rate": 4.75e-06, | |
| "loss": 0.0658, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.4108416547788873, | |
| "grad_norm": 0.3542482852935791, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.0707, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.41512125534950073, | |
| "grad_norm": 0.5081551671028137, | |
| "learning_rate": 4.85e-06, | |
| "loss": 0.0774, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4194008559201141, | |
| "grad_norm": 0.3265087902545929, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.0715, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.42368045649072755, | |
| "grad_norm": 0.4452495574951172, | |
| "learning_rate": 4.95e-06, | |
| "loss": 0.064, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.42796005706134094, | |
| "grad_norm": 0.33817920088768005, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0674, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4322396576319544, | |
| "grad_norm": 0.4477202296257019, | |
| "learning_rate": 4.9999926774868305e-06, | |
| "loss": 0.0701, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.43651925820256776, | |
| "grad_norm": 0.3426607847213745, | |
| "learning_rate": 4.999970709990216e-06, | |
| "loss": 0.0637, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.44079885877318115, | |
| "grad_norm": 0.4300517737865448, | |
| "learning_rate": 4.999934097638842e-06, | |
| "loss": 0.0716, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.4450784593437946, | |
| "grad_norm": 0.29154735803604126, | |
| "learning_rate": 4.9998828406471855e-06, | |
| "loss": 0.066, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.44935805991440797, | |
| "grad_norm": 0.38544416427612305, | |
| "learning_rate": 4.99981693931551e-06, | |
| "loss": 0.0735, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.4536376604850214, | |
| "grad_norm": 0.42879173159599304, | |
| "learning_rate": 4.999736394029866e-06, | |
| "loss": 0.0724, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.4579172610556348, | |
| "grad_norm": 0.45247069001197815, | |
| "learning_rate": 4.999641205262088e-06, | |
| "loss": 0.0624, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.46219686162624823, | |
| "grad_norm": 0.45127418637275696, | |
| "learning_rate": 4.9995313735697935e-06, | |
| "loss": 0.0635, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.4664764621968616, | |
| "grad_norm": 0.4134274125099182, | |
| "learning_rate": 4.999406899596378e-06, | |
| "loss": 0.0654, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.47075606276747506, | |
| "grad_norm": 0.5010129809379578, | |
| "learning_rate": 4.99926778407101e-06, | |
| "loss": 0.0678, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.47503566333808844, | |
| "grad_norm": 0.7583813667297363, | |
| "learning_rate": 4.999114027808632e-06, | |
| "loss": 0.0729, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.4793152639087018, | |
| "grad_norm": 0.39662906527519226, | |
| "learning_rate": 4.998945631709948e-06, | |
| "loss": 0.0688, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.48359486447931527, | |
| "grad_norm": 0.33845070004463196, | |
| "learning_rate": 4.998762596761424e-06, | |
| "loss": 0.0689, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.48787446504992865, | |
| "grad_norm": 0.6589358448982239, | |
| "learning_rate": 4.998564924035282e-06, | |
| "loss": 0.0664, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.4921540656205421, | |
| "grad_norm": 0.4025883078575134, | |
| "learning_rate": 4.99835261468949e-06, | |
| "loss": 0.0731, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.4964336661911555, | |
| "grad_norm": 0.32868894934654236, | |
| "learning_rate": 4.998125669967758e-06, | |
| "loss": 0.0657, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5007132667617689, | |
| "grad_norm": 0.3705316483974457, | |
| "learning_rate": 4.997884091199531e-06, | |
| "loss": 0.0668, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5049928673323824, | |
| "grad_norm": 0.32613426446914673, | |
| "learning_rate": 4.997627879799981e-06, | |
| "loss": 0.0654, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5092724679029957, | |
| "grad_norm": 0.42335274815559387, | |
| "learning_rate": 4.997357037269996e-06, | |
| "loss": 0.0695, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5135520684736091, | |
| "grad_norm": 0.39254000782966614, | |
| "learning_rate": 4.997071565196175e-06, | |
| "loss": 0.0663, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5178316690442225, | |
| "grad_norm": 0.33641666173934937, | |
| "learning_rate": 4.996771465250814e-06, | |
| "loss": 0.0626, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5221112696148359, | |
| "grad_norm": 0.38578540086746216, | |
| "learning_rate": 4.996456739191905e-06, | |
| "loss": 0.064, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5263908701854494, | |
| "grad_norm": 0.31561926007270813, | |
| "learning_rate": 4.996127388863116e-06, | |
| "loss": 0.0626, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5306704707560628, | |
| "grad_norm": 0.27469775080680847, | |
| "learning_rate": 4.995783416193782e-06, | |
| "loss": 0.0617, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5349500713266762, | |
| "grad_norm": 0.2643572986125946, | |
| "learning_rate": 4.9954248231989016e-06, | |
| "loss": 0.0592, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5392296718972895, | |
| "grad_norm": 0.32749131321907043, | |
| "learning_rate": 4.995051611979115e-06, | |
| "loss": 0.0597, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.543509272467903, | |
| "grad_norm": 0.4380977153778076, | |
| "learning_rate": 4.994663784720698e-06, | |
| "loss": 0.0663, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.5477888730385164, | |
| "grad_norm": 0.30976516008377075, | |
| "learning_rate": 4.994261343695546e-06, | |
| "loss": 0.0634, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5520684736091298, | |
| "grad_norm": 0.4299394488334656, | |
| "learning_rate": 4.9938442912611625e-06, | |
| "loss": 0.065, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.5563480741797432, | |
| "grad_norm": 0.3520357012748718, | |
| "learning_rate": 4.993412629860646e-06, | |
| "loss": 0.059, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5606276747503567, | |
| "grad_norm": 0.44977450370788574, | |
| "learning_rate": 4.9929663620226734e-06, | |
| "loss": 0.0639, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5649072753209701, | |
| "grad_norm": 0.3896942436695099, | |
| "learning_rate": 4.992505490361487e-06, | |
| "loss": 0.0608, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5691868758915835, | |
| "grad_norm": 0.3307473063468933, | |
| "learning_rate": 4.992030017576876e-06, | |
| "loss": 0.0649, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.5734664764621968, | |
| "grad_norm": 0.27266785502433777, | |
| "learning_rate": 4.991539946454166e-06, | |
| "loss": 0.058, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.5777460770328102, | |
| "grad_norm": 0.43257489800453186, | |
| "learning_rate": 4.991035279864199e-06, | |
| "loss": 0.0651, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.5820256776034237, | |
| "grad_norm": 0.33029407262802124, | |
| "learning_rate": 4.990516020763317e-06, | |
| "loss": 0.0601, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.5863052781740371, | |
| "grad_norm": 0.40888652205467224, | |
| "learning_rate": 4.989982172193346e-06, | |
| "loss": 0.0676, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.5905848787446505, | |
| "grad_norm": 0.2740240693092346, | |
| "learning_rate": 4.989433737281576e-06, | |
| "loss": 0.0599, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.5948644793152639, | |
| "grad_norm": 0.3733709156513214, | |
| "learning_rate": 4.988870719240744e-06, | |
| "loss": 0.0571, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.5991440798858774, | |
| "grad_norm": 0.33992213010787964, | |
| "learning_rate": 4.988293121369016e-06, | |
| "loss": 0.0615, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6034236804564908, | |
| "grad_norm": 0.34187227487564087, | |
| "learning_rate": 4.987700947049966e-06, | |
| "loss": 0.0625, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6077032810271041, | |
| "grad_norm": 0.35290244221687317, | |
| "learning_rate": 4.987094199752558e-06, | |
| "loss": 0.0606, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.6119828815977175, | |
| "grad_norm": 0.2595992982387543, | |
| "learning_rate": 4.986472883031124e-06, | |
| "loss": 0.0551, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.6162624821683309, | |
| "grad_norm": 0.43302714824676514, | |
| "learning_rate": 4.9858370005253435e-06, | |
| "loss": 0.0643, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6205420827389444, | |
| "grad_norm": 0.3430291414260864, | |
| "learning_rate": 4.985186555960223e-06, | |
| "loss": 0.0576, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.6248216833095578, | |
| "grad_norm": 0.5924923419952393, | |
| "learning_rate": 4.984521553146074e-06, | |
| "loss": 0.0679, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.6291012838801712, | |
| "grad_norm": 0.3352043330669403, | |
| "learning_rate": 4.98384199597849e-06, | |
| "loss": 0.0636, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.6333808844507846, | |
| "grad_norm": 0.31009000539779663, | |
| "learning_rate": 4.983147888438324e-06, | |
| "loss": 0.0499, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.637660485021398, | |
| "grad_norm": 0.41210371255874634, | |
| "learning_rate": 4.982439234591665e-06, | |
| "loss": 0.0632, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.6419400855920114, | |
| "grad_norm": 0.30768534541130066, | |
| "learning_rate": 4.9817160385898145e-06, | |
| "loss": 0.0669, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6462196861626248, | |
| "grad_norm": 0.4492046535015106, | |
| "learning_rate": 4.980978304669263e-06, | |
| "loss": 0.0603, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6504992867332382, | |
| "grad_norm": 0.3134528696537018, | |
| "learning_rate": 4.9802260371516635e-06, | |
| "loss": 0.0553, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6547788873038516, | |
| "grad_norm": 0.33358660340309143, | |
| "learning_rate": 4.979459240443806e-06, | |
| "loss": 0.061, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6590584878744651, | |
| "grad_norm": 0.2705594301223755, | |
| "learning_rate": 4.978677919037594e-06, | |
| "loss": 0.0525, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6633380884450785, | |
| "grad_norm": 0.4429916739463806, | |
| "learning_rate": 4.977882077510018e-06, | |
| "loss": 0.0595, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.6676176890156919, | |
| "grad_norm": 0.37663739919662476, | |
| "learning_rate": 4.977071720523125e-06, | |
| "loss": 0.0596, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.6718972895863052, | |
| "grad_norm": 0.5795411467552185, | |
| "learning_rate": 4.9762468528239945e-06, | |
| "loss": 0.0629, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.6761768901569187, | |
| "grad_norm": 0.2995544373989105, | |
| "learning_rate": 4.975407479244711e-06, | |
| "loss": 0.0577, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.6804564907275321, | |
| "grad_norm": 0.2688761055469513, | |
| "learning_rate": 4.974553604702332e-06, | |
| "loss": 0.0612, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.6847360912981455, | |
| "grad_norm": 0.32369598746299744, | |
| "learning_rate": 4.9736852341988655e-06, | |
| "loss": 0.054, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6890156918687589, | |
| "grad_norm": 0.24816952645778656, | |
| "learning_rate": 4.972802372821235e-06, | |
| "loss": 0.0631, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.6932952924393724, | |
| "grad_norm": 0.33390095829963684, | |
| "learning_rate": 4.97190502574125e-06, | |
| "loss": 0.0629, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.6975748930099858, | |
| "grad_norm": 0.2538573443889618, | |
| "learning_rate": 4.9709931982155805e-06, | |
| "loss": 0.0585, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7018544935805991, | |
| "grad_norm": 0.337729275226593, | |
| "learning_rate": 4.970066895585722e-06, | |
| "loss": 0.0534, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.7061340941512125, | |
| "grad_norm": 0.3311474621295929, | |
| "learning_rate": 4.9691261232779644e-06, | |
| "loss": 0.0592, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.7104136947218259, | |
| "grad_norm": 0.4538433849811554, | |
| "learning_rate": 4.968170886803361e-06, | |
| "loss": 0.0607, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.7146932952924394, | |
| "grad_norm": 0.3233712911605835, | |
| "learning_rate": 4.9672011917577e-06, | |
| "loss": 0.0557, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.7189728958630528, | |
| "grad_norm": 0.41257205605506897, | |
| "learning_rate": 4.966217043821462e-06, | |
| "loss": 0.0618, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.7232524964336662, | |
| "grad_norm": 0.36823365092277527, | |
| "learning_rate": 4.965218448759797e-06, | |
| "loss": 0.0619, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.7275320970042796, | |
| "grad_norm": 0.38311564922332764, | |
| "learning_rate": 4.964205412422487e-06, | |
| "loss": 0.0668, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.7318116975748931, | |
| "grad_norm": 0.31038492918014526, | |
| "learning_rate": 4.963177940743908e-06, | |
| "loss": 0.0573, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.7360912981455064, | |
| "grad_norm": 0.2835666835308075, | |
| "learning_rate": 4.962136039742998e-06, | |
| "loss": 0.0649, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7403708987161198, | |
| "grad_norm": 0.4525282680988312, | |
| "learning_rate": 4.961079715523228e-06, | |
| "loss": 0.0593, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7446504992867332, | |
| "grad_norm": 0.33394986391067505, | |
| "learning_rate": 4.9600089742725545e-06, | |
| "loss": 0.0601, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7489300998573466, | |
| "grad_norm": 0.2642901837825775, | |
| "learning_rate": 4.95892382226339e-06, | |
| "loss": 0.0623, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7532097004279601, | |
| "grad_norm": 0.34347841143608093, | |
| "learning_rate": 4.95782426585257e-06, | |
| "loss": 0.0568, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7574893009985735, | |
| "grad_norm": 0.30288106203079224, | |
| "learning_rate": 4.956710311481303e-06, | |
| "loss": 0.0569, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7617689015691869, | |
| "grad_norm": 0.285087913274765, | |
| "learning_rate": 4.955581965675148e-06, | |
| "loss": 0.0534, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.7660485021398002, | |
| "grad_norm": 0.2861518859863281, | |
| "learning_rate": 4.954439235043966e-06, | |
| "loss": 0.0536, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.7703281027104137, | |
| "grad_norm": 0.4016513526439667, | |
| "learning_rate": 4.953282126281884e-06, | |
| "loss": 0.0575, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.7746077032810271, | |
| "grad_norm": 0.4838986098766327, | |
| "learning_rate": 4.9521106461672605e-06, | |
| "loss": 0.0666, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.7788873038516405, | |
| "grad_norm": 0.270673006772995, | |
| "learning_rate": 4.950924801562636e-06, | |
| "loss": 0.0579, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.7831669044222539, | |
| "grad_norm": 0.31113702058792114, | |
| "learning_rate": 4.9497245994147e-06, | |
| "loss": 0.0546, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.7874465049928673, | |
| "grad_norm": 0.2960825264453888, | |
| "learning_rate": 4.948510046754251e-06, | |
| "loss": 0.0553, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.7917261055634808, | |
| "grad_norm": 0.3720844089984894, | |
| "learning_rate": 4.947281150696151e-06, | |
| "loss": 0.0607, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.7960057061340942, | |
| "grad_norm": 0.2569741904735565, | |
| "learning_rate": 4.946037918439285e-06, | |
| "loss": 0.0552, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8002853067047075, | |
| "grad_norm": 0.3115193843841553, | |
| "learning_rate": 4.944780357266522e-06, | |
| "loss": 0.0544, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.8045649072753209, | |
| "grad_norm": 0.2828395366668701, | |
| "learning_rate": 4.943508474544667e-06, | |
| "loss": 0.0541, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.8088445078459344, | |
| "grad_norm": 0.29760900139808655, | |
| "learning_rate": 4.942222277724424e-06, | |
| "loss": 0.0576, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.8131241084165478, | |
| "grad_norm": 0.3851959705352783, | |
| "learning_rate": 4.940921774340347e-06, | |
| "loss": 0.059, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8174037089871612, | |
| "grad_norm": 0.35299891233444214, | |
| "learning_rate": 4.939606972010798e-06, | |
| "loss": 0.0627, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.8216833095577746, | |
| "grad_norm": 0.30920645594596863, | |
| "learning_rate": 4.9382778784379036e-06, | |
| "loss": 0.053, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.8259629101283881, | |
| "grad_norm": 0.31357741355895996, | |
| "learning_rate": 4.936934501407507e-06, | |
| "loss": 0.0523, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.8302425106990015, | |
| "grad_norm": 0.28295785188674927, | |
| "learning_rate": 4.935576848789127e-06, | |
| "loss": 0.0521, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.8345221112696148, | |
| "grad_norm": 0.3303717076778412, | |
| "learning_rate": 4.934204928535905e-06, | |
| "loss": 0.0564, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.8388017118402282, | |
| "grad_norm": 0.31010597944259644, | |
| "learning_rate": 4.932818748684565e-06, | |
| "loss": 0.0495, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.8430813124108416, | |
| "grad_norm": 0.33654817938804626, | |
| "learning_rate": 4.931418317355362e-06, | |
| "loss": 0.0536, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8473609129814551, | |
| "grad_norm": 0.35488635301589966, | |
| "learning_rate": 4.9300036427520396e-06, | |
| "loss": 0.054, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8516405135520685, | |
| "grad_norm": 0.37233811616897583, | |
| "learning_rate": 4.928574733161775e-06, | |
| "loss": 0.0527, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.8559201141226819, | |
| "grad_norm": 0.3253094553947449, | |
| "learning_rate": 4.927131596955137e-06, | |
| "loss": 0.0545, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.8601997146932953, | |
| "grad_norm": 0.345102995634079, | |
| "learning_rate": 4.92567424258603e-06, | |
| "loss": 0.0606, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.8644793152639088, | |
| "grad_norm": 0.32971855998039246, | |
| "learning_rate": 4.924202678591655e-06, | |
| "loss": 0.0542, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.8687589158345221, | |
| "grad_norm": 0.30636993050575256, | |
| "learning_rate": 4.922716913592447e-06, | |
| "loss": 0.0509, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.8730385164051355, | |
| "grad_norm": 0.27384838461875916, | |
| "learning_rate": 4.921216956292033e-06, | |
| "loss": 0.0496, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.8773181169757489, | |
| "grad_norm": 0.30968862771987915, | |
| "learning_rate": 4.919702815477179e-06, | |
| "loss": 0.0608, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.8815977175463623, | |
| "grad_norm": 0.25974956154823303, | |
| "learning_rate": 4.918174500017739e-06, | |
| "loss": 0.0543, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.8858773181169758, | |
| "grad_norm": 0.30472415685653687, | |
| "learning_rate": 4.916632018866599e-06, | |
| "loss": 0.0567, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.8901569186875892, | |
| "grad_norm": 0.3091549277305603, | |
| "learning_rate": 4.91507538105963e-06, | |
| "loss": 0.0511, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.8944365192582026, | |
| "grad_norm": 0.26851367950439453, | |
| "learning_rate": 4.9135045957156356e-06, | |
| "loss": 0.0536, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.8987161198288159, | |
| "grad_norm": 0.25963643193244934, | |
| "learning_rate": 4.911919672036291e-06, | |
| "loss": 0.0509, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9029957203994294, | |
| "grad_norm": 0.28279975056648254, | |
| "learning_rate": 4.910320619306094e-06, | |
| "loss": 0.0538, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.9072753209700428, | |
| "grad_norm": 0.26387840509414673, | |
| "learning_rate": 4.908707446892315e-06, | |
| "loss": 0.0504, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.9115549215406562, | |
| "grad_norm": 0.2996926009654999, | |
| "learning_rate": 4.907080164244934e-06, | |
| "loss": 0.0497, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.9158345221112696, | |
| "grad_norm": 0.4072028398513794, | |
| "learning_rate": 4.905438780896589e-06, | |
| "loss": 0.053, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.920114122681883, | |
| "grad_norm": 0.5946587324142456, | |
| "learning_rate": 4.903783306462522e-06, | |
| "loss": 0.0577, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.9243937232524965, | |
| "grad_norm": 0.34546515345573425, | |
| "learning_rate": 4.9021137506405205e-06, | |
| "loss": 0.0554, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.9286733238231099, | |
| "grad_norm": 0.3417337238788605, | |
| "learning_rate": 4.9004301232108585e-06, | |
| "loss": 0.0529, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.9329529243937232, | |
| "grad_norm": 0.31937581300735474, | |
| "learning_rate": 4.8987324340362445e-06, | |
| "loss": 0.0536, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.9372325249643366, | |
| "grad_norm": 0.28485214710235596, | |
| "learning_rate": 4.897020693061758e-06, | |
| "loss": 0.0488, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.9415121255349501, | |
| "grad_norm": 0.3833935558795929, | |
| "learning_rate": 4.895294910314797e-06, | |
| "loss": 0.0577, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.9457917261055635, | |
| "grad_norm": 0.2772054076194763, | |
| "learning_rate": 4.8935550959050135e-06, | |
| "loss": 0.0506, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.9500713266761769, | |
| "grad_norm": 0.35764405131340027, | |
| "learning_rate": 4.891801260024262e-06, | |
| "loss": 0.0518, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.9543509272467903, | |
| "grad_norm": 0.30019721388816833, | |
| "learning_rate": 4.890033412946527e-06, | |
| "loss": 0.0525, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.9586305278174037, | |
| "grad_norm": 0.3105527460575104, | |
| "learning_rate": 4.888251565027879e-06, | |
| "loss": 0.0552, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.9629101283880172, | |
| "grad_norm": 0.35943159461021423, | |
| "learning_rate": 4.8864557267064e-06, | |
| "loss": 0.0543, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.9671897289586305, | |
| "grad_norm": 0.3295605480670929, | |
| "learning_rate": 4.88464590850213e-06, | |
| "loss": 0.0565, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.9714693295292439, | |
| "grad_norm": 0.31729599833488464, | |
| "learning_rate": 4.882822121017004e-06, | |
| "loss": 0.0524, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.9757489300998573, | |
| "grad_norm": 0.4707350432872772, | |
| "learning_rate": 4.880984374934788e-06, | |
| "loss": 0.0589, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.9800285306704708, | |
| "grad_norm": 0.35285064578056335, | |
| "learning_rate": 4.879132681021017e-06, | |
| "loss": 0.0509, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.9843081312410842, | |
| "grad_norm": 0.32979413866996765, | |
| "learning_rate": 4.877267050122934e-06, | |
| "loss": 0.051, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.9885877318116976, | |
| "grad_norm": 0.3415399491786957, | |
| "learning_rate": 4.8753874931694254e-06, | |
| "loss": 0.0507, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.992867332382311, | |
| "grad_norm": 0.3240598738193512, | |
| "learning_rate": 4.8734940211709535e-06, | |
| "loss": 0.0524, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.9971469329529244, | |
| "grad_norm": 0.31305283308029175, | |
| "learning_rate": 4.871586645219499e-06, | |
| "loss": 0.0528, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.0042796005706134, | |
| "grad_norm": 0.5571584105491638, | |
| "learning_rate": 4.869665376488491e-06, | |
| "loss": 0.1007, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.0085592011412268, | |
| "grad_norm": 0.2822844386100769, | |
| "learning_rate": 4.8677302262327386e-06, | |
| "loss": 0.0419, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.0128388017118402, | |
| "grad_norm": 0.2870626747608185, | |
| "learning_rate": 4.8657812057883755e-06, | |
| "loss": 0.0487, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.0171184022824535, | |
| "grad_norm": 0.2784869968891144, | |
| "learning_rate": 4.863818326572784e-06, | |
| "loss": 0.0393, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.0213980028530671, | |
| "grad_norm": 0.26714813709259033, | |
| "learning_rate": 4.8618416000845296e-06, | |
| "loss": 0.0476, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.0256776034236805, | |
| "grad_norm": 0.30474796891212463, | |
| "learning_rate": 4.859851037903297e-06, | |
| "loss": 0.043, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.029957203994294, | |
| "grad_norm": 0.3593360483646393, | |
| "learning_rate": 4.857846651689821e-06, | |
| "loss": 0.0522, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.0342368045649073, | |
| "grad_norm": 0.359836608171463, | |
| "learning_rate": 4.855828453185818e-06, | |
| "loss": 0.0536, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.0385164051355207, | |
| "grad_norm": 0.3691597878932953, | |
| "learning_rate": 4.853796454213915e-06, | |
| "loss": 0.0432, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.042796005706134, | |
| "grad_norm": 0.38553348183631897, | |
| "learning_rate": 4.851750666677583e-06, | |
| "loss": 0.0444, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.0470756062767475, | |
| "grad_norm": 0.37977030873298645, | |
| "learning_rate": 4.849691102561069e-06, | |
| "loss": 0.0485, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.0513552068473608, | |
| "grad_norm": 0.3164876699447632, | |
| "learning_rate": 4.847617773929319e-06, | |
| "loss": 0.0413, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.0556348074179742, | |
| "grad_norm": 0.3686058223247528, | |
| "learning_rate": 4.845530692927915e-06, | |
| "loss": 0.048, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.0599144079885878, | |
| "grad_norm": 0.3639174699783325, | |
| "learning_rate": 4.843429871782999e-06, | |
| "loss": 0.0465, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.0641940085592012, | |
| "grad_norm": 0.33737680315971375, | |
| "learning_rate": 4.841315322801204e-06, | |
| "loss": 0.0417, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.0684736091298146, | |
| "grad_norm": 0.31087926030158997, | |
| "learning_rate": 4.8391870583695796e-06, | |
| "loss": 0.0449, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.072753209700428, | |
| "grad_norm": 0.3133867383003235, | |
| "learning_rate": 4.837045090955522e-06, | |
| "loss": 0.0453, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.0770328102710414, | |
| "grad_norm": 0.3388800621032715, | |
| "learning_rate": 4.834889433106698e-06, | |
| "loss": 0.0415, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.0813124108416547, | |
| "grad_norm": 0.3206201195716858, | |
| "learning_rate": 4.8327200974509754e-06, | |
| "loss": 0.048, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.0855920114122681, | |
| "grad_norm": 0.3376302719116211, | |
| "learning_rate": 4.8305370966963446e-06, | |
| "loss": 0.0435, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.0898716119828815, | |
| "grad_norm": 0.3998079001903534, | |
| "learning_rate": 4.828340443630847e-06, | |
| "loss": 0.0473, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.094151212553495, | |
| "grad_norm": 0.36078789830207825, | |
| "learning_rate": 4.826130151122498e-06, | |
| "loss": 0.0455, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.0984308131241085, | |
| "grad_norm": 0.3395013213157654, | |
| "learning_rate": 4.823906232119217e-06, | |
| "loss": 0.0435, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.102710413694722, | |
| "grad_norm": 0.31314757466316223, | |
| "learning_rate": 4.8216686996487425e-06, | |
| "loss": 0.0418, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.1069900142653353, | |
| "grad_norm": 0.3544161021709442, | |
| "learning_rate": 4.8194175668185645e-06, | |
| "loss": 0.046, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.1112696148359487, | |
| "grad_norm": 0.3597017228603363, | |
| "learning_rate": 4.817152846815843e-06, | |
| "loss": 0.0398, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.115549215406562, | |
| "grad_norm": 0.39875292778015137, | |
| "learning_rate": 4.814874552907332e-06, | |
| "loss": 0.0433, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.1198288159771754, | |
| "grad_norm": 0.42275699973106384, | |
| "learning_rate": 4.812582698439299e-06, | |
| "loss": 0.0476, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.1241084165477888, | |
| "grad_norm": 0.33560910820961, | |
| "learning_rate": 4.810277296837455e-06, | |
| "loss": 0.0383, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.1283880171184022, | |
| "grad_norm": 0.4616740047931671, | |
| "learning_rate": 4.807958361606864e-06, | |
| "loss": 0.0347, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.1326676176890156, | |
| "grad_norm": 0.3887166380882263, | |
| "learning_rate": 4.805625906331874e-06, | |
| "loss": 0.041, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.1369472182596292, | |
| "grad_norm": 0.4357439875602722, | |
| "learning_rate": 4.8032799446760326e-06, | |
| "loss": 0.0423, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.1412268188302426, | |
| "grad_norm": 0.3627154231071472, | |
| "learning_rate": 4.800920490382008e-06, | |
| "loss": 0.0403, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.145506419400856, | |
| "grad_norm": 0.40753647685050964, | |
| "learning_rate": 4.798547557271509e-06, | |
| "loss": 0.0386, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.1497860199714693, | |
| "grad_norm": 0.4159418046474457, | |
| "learning_rate": 4.796161159245202e-06, | |
| "loss": 0.0424, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.1540656205420827, | |
| "grad_norm": 0.3464265465736389, | |
| "learning_rate": 4.793761310282631e-06, | |
| "loss": 0.0365, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.158345221112696, | |
| "grad_norm": 0.517615556716919, | |
| "learning_rate": 4.791348024442138e-06, | |
| "loss": 0.0429, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.1626248216833095, | |
| "grad_norm": 0.369800329208374, | |
| "learning_rate": 4.788921315860776e-06, | |
| "loss": 0.0431, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.1669044222539229, | |
| "grad_norm": 0.4237928092479706, | |
| "learning_rate": 4.78648119875423e-06, | |
| "loss": 0.0374, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.1711840228245363, | |
| "grad_norm": 0.38458287715911865, | |
| "learning_rate": 4.784027687416731e-06, | |
| "loss": 0.0437, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.1754636233951499, | |
| "grad_norm": 0.4061521589756012, | |
| "learning_rate": 4.7815607962209746e-06, | |
| "loss": 0.0412, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.1797432239657633, | |
| "grad_norm": 0.3805032968521118, | |
| "learning_rate": 4.779080539618036e-06, | |
| "loss": 0.0399, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.1840228245363766, | |
| "grad_norm": 0.47470155358314514, | |
| "learning_rate": 4.7765869321372835e-06, | |
| "loss": 0.0453, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.18830242510699, | |
| "grad_norm": 0.3514023423194885, | |
| "learning_rate": 4.7740799883862966e-06, | |
| "loss": 0.0377, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.1925820256776034, | |
| "grad_norm": 0.37605416774749756, | |
| "learning_rate": 4.771559723050778e-06, | |
| "loss": 0.0414, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.1968616262482168, | |
| "grad_norm": 0.3633909821510315, | |
| "learning_rate": 4.769026150894468e-06, | |
| "loss": 0.0441, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.2011412268188302, | |
| "grad_norm": 0.38254550099372864, | |
| "learning_rate": 4.766479286759061e-06, | |
| "loss": 0.044, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.2054208273894436, | |
| "grad_norm": 0.4671253263950348, | |
| "learning_rate": 4.763919145564111e-06, | |
| "loss": 0.0365, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.209700427960057, | |
| "grad_norm": 0.3728236258029938, | |
| "learning_rate": 4.761345742306955e-06, | |
| "loss": 0.0393, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.2139800285306706, | |
| "grad_norm": 0.4013354480266571, | |
| "learning_rate": 4.758759092062615e-06, | |
| "loss": 0.0445, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.218259629101284, | |
| "grad_norm": 0.432792603969574, | |
| "learning_rate": 4.756159209983714e-06, | |
| "loss": 0.0311, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.2225392296718973, | |
| "grad_norm": 0.4125058352947235, | |
| "learning_rate": 4.753546111300391e-06, | |
| "loss": 0.0322, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.2268188302425107, | |
| "grad_norm": 0.6146723031997681, | |
| "learning_rate": 4.750919811320205e-06, | |
| "loss": 0.0463, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.231098430813124, | |
| "grad_norm": 0.6433658003807068, | |
| "learning_rate": 4.7482803254280485e-06, | |
| "loss": 0.0357, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.2353780313837375, | |
| "grad_norm": 0.5013871192932129, | |
| "learning_rate": 4.745627669086058e-06, | |
| "loss": 0.0365, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.2396576319543509, | |
| "grad_norm": 0.38044819235801697, | |
| "learning_rate": 4.742961857833521e-06, | |
| "loss": 0.041, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.2439372325249642, | |
| "grad_norm": 0.45295485854148865, | |
| "learning_rate": 4.74028290728679e-06, | |
| "loss": 0.0461, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.2482168330955776, | |
| "grad_norm": 0.4638918340206146, | |
| "learning_rate": 4.737590833139185e-06, | |
| "loss": 0.0434, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.2524964336661912, | |
| "grad_norm": 0.5618919730186462, | |
| "learning_rate": 4.734885651160903e-06, | |
| "loss": 0.0377, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.2567760342368046, | |
| "grad_norm": 0.411558598279953, | |
| "learning_rate": 4.732167377198931e-06, | |
| "loss": 0.0374, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.261055634807418, | |
| "grad_norm": 0.3951723277568817, | |
| "learning_rate": 4.729436027176944e-06, | |
| "loss": 0.0453, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.2653352353780314, | |
| "grad_norm": 0.3179916441440582, | |
| "learning_rate": 4.726691617095221e-06, | |
| "loss": 0.0403, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.2696148359486448, | |
| "grad_norm": 0.4432845413684845, | |
| "learning_rate": 4.723934163030545e-06, | |
| "loss": 0.0414, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.2738944365192582, | |
| "grad_norm": 0.446893572807312, | |
| "learning_rate": 4.72116368113611e-06, | |
| "loss": 0.0356, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.2781740370898715, | |
| "grad_norm": 0.3918814957141876, | |
| "learning_rate": 4.718380187641429e-06, | |
| "loss": 0.0334, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.2824536376604851, | |
| "grad_norm": 0.3894858956336975, | |
| "learning_rate": 4.7155836988522364e-06, | |
| "loss": 0.0407, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.2867332382310983, | |
| "grad_norm": 0.3865683972835541, | |
| "learning_rate": 4.712774231150392e-06, | |
| "loss": 0.0374, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.291012838801712, | |
| "grad_norm": 0.35576513409614563, | |
| "learning_rate": 4.7099518009937885e-06, | |
| "loss": 0.0349, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.2952924393723253, | |
| "grad_norm": 0.40566757321357727, | |
| "learning_rate": 4.7071164249162504e-06, | |
| "loss": 0.0433, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.2995720399429387, | |
| "grad_norm": 0.4917856454849243, | |
| "learning_rate": 4.704268119527441e-06, | |
| "loss": 0.0343, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.303851640513552, | |
| "grad_norm": 0.3986580967903137, | |
| "learning_rate": 4.701406901512763e-06, | |
| "loss": 0.0344, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.3081312410841655, | |
| "grad_norm": 0.43434515595436096, | |
| "learning_rate": 4.698532787633262e-06, | |
| "loss": 0.0431, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.3124108416547788, | |
| "grad_norm": 0.4825822710990906, | |
| "learning_rate": 4.695645794725528e-06, | |
| "loss": 0.0292, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.3166904422253922, | |
| "grad_norm": 0.3666658103466034, | |
| "learning_rate": 4.692745939701595e-06, | |
| "loss": 0.0383, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.3209700427960058, | |
| "grad_norm": 0.3598913848400116, | |
| "learning_rate": 4.689833239548844e-06, | |
| "loss": 0.0381, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.325249643366619, | |
| "grad_norm": 0.3856128752231598, | |
| "learning_rate": 4.686907711329903e-06, | |
| "loss": 0.0344, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.3295292439372326, | |
| "grad_norm": 0.422134667634964, | |
| "learning_rate": 4.68396937218255e-06, | |
| "loss": 0.0411, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.333808844507846, | |
| "grad_norm": 0.422688364982605, | |
| "learning_rate": 4.681018239319603e-06, | |
| "loss": 0.0349, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.3380884450784594, | |
| "grad_norm": 0.37932994961738586, | |
| "learning_rate": 4.678054330028831e-06, | |
| "loss": 0.0325, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.3423680456490727, | |
| "grad_norm": 0.3773266673088074, | |
| "learning_rate": 4.675077661672845e-06, | |
| "loss": 0.033, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.3466476462196861, | |
| "grad_norm": 0.39197105169296265, | |
| "learning_rate": 4.6720882516890016e-06, | |
| "loss": 0.0358, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.3509272467902995, | |
| "grad_norm": 0.4106651544570923, | |
| "learning_rate": 4.669086117589295e-06, | |
| "loss": 0.0322, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.355206847360913, | |
| "grad_norm": 0.4139370918273926, | |
| "learning_rate": 4.666071276960256e-06, | |
| "loss": 0.0306, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.3594864479315265, | |
| "grad_norm": 0.39681828022003174, | |
| "learning_rate": 4.663043747462857e-06, | |
| "loss": 0.0346, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.3637660485021397, | |
| "grad_norm": 0.39305955171585083, | |
| "learning_rate": 4.660003546832395e-06, | |
| "loss": 0.027, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.3680456490727533, | |
| "grad_norm": 0.41277015209198, | |
| "learning_rate": 4.656950692878397e-06, | |
| "loss": 0.0322, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.3723252496433667, | |
| "grad_norm": 0.4968262016773224, | |
| "learning_rate": 4.653885203484516e-06, | |
| "loss": 0.0315, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.37660485021398, | |
| "grad_norm": 0.5269052386283875, | |
| "learning_rate": 4.650807096608419e-06, | |
| "loss": 0.0365, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.3808844507845934, | |
| "grad_norm": 0.3427152633666992, | |
| "learning_rate": 4.647716390281689e-06, | |
| "loss": 0.0254, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.3851640513552068, | |
| "grad_norm": 0.42712923884391785, | |
| "learning_rate": 4.644613102609716e-06, | |
| "loss": 0.0337, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.3894436519258202, | |
| "grad_norm": 0.4049385190010071, | |
| "learning_rate": 4.641497251771593e-06, | |
| "loss": 0.0308, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.3937232524964336, | |
| "grad_norm": 0.36787325143814087, | |
| "learning_rate": 4.638368856020006e-06, | |
| "loss": 0.0353, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.3980028530670472, | |
| "grad_norm": 0.43888741731643677, | |
| "learning_rate": 4.635227933681131e-06, | |
| "loss": 0.0281, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.4022824536376604, | |
| "grad_norm": 0.40592697262763977, | |
| "learning_rate": 4.632074503154524e-06, | |
| "loss": 0.0294, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.406562054208274, | |
| "grad_norm": 0.3631339371204376, | |
| "learning_rate": 4.628908582913013e-06, | |
| "loss": 0.0298, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.4108416547788873, | |
| "grad_norm": 0.5272939205169678, | |
| "learning_rate": 4.625730191502594e-06, | |
| "loss": 0.0354, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.4151212553495007, | |
| "grad_norm": 0.47585079073905945, | |
| "learning_rate": 4.622539347542316e-06, | |
| "loss": 0.0341, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.4194008559201141, | |
| "grad_norm": 0.4558964967727661, | |
| "learning_rate": 4.619336069724177e-06, | |
| "loss": 0.0363, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.4236804564907275, | |
| "grad_norm": 0.43466460704803467, | |
| "learning_rate": 4.6161203768130135e-06, | |
| "loss": 0.0367, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.4279600570613409, | |
| "grad_norm": 0.4535815715789795, | |
| "learning_rate": 4.612892287646386e-06, | |
| "loss": 0.0331, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.4322396576319543, | |
| "grad_norm": 0.43253347277641296, | |
| "learning_rate": 4.609651821134476e-06, | |
| "loss": 0.028, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.4365192582025679, | |
| "grad_norm": 0.5027002096176147, | |
| "learning_rate": 4.606398996259971e-06, | |
| "loss": 0.0326, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.440798858773181, | |
| "grad_norm": 0.5224930644035339, | |
| "learning_rate": 4.603133832077953e-06, | |
| "loss": 0.0336, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.4450784593437946, | |
| "grad_norm": 0.4419247508049011, | |
| "learning_rate": 4.5998563477157885e-06, | |
| "loss": 0.0324, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.449358059914408, | |
| "grad_norm": 0.44416379928588867, | |
| "learning_rate": 4.5965665623730145e-06, | |
| "loss": 0.0383, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.4536376604850214, | |
| "grad_norm": 0.5683357119560242, | |
| "learning_rate": 4.593264495321228e-06, | |
| "loss": 0.0319, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.4579172610556348, | |
| "grad_norm": 0.4201441705226898, | |
| "learning_rate": 4.589950165903974e-06, | |
| "loss": 0.0343, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.4621968616262482, | |
| "grad_norm": 0.568689227104187, | |
| "learning_rate": 4.586623593536629e-06, | |
| "loss": 0.0322, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.4664764621968616, | |
| "grad_norm": 0.4224129617214203, | |
| "learning_rate": 4.583284797706288e-06, | |
| "loss": 0.0277, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.470756062767475, | |
| "grad_norm": 0.47905975580215454, | |
| "learning_rate": 4.579933797971652e-06, | |
| "loss": 0.037, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.4750356633380886, | |
| "grad_norm": 0.6059530377388, | |
| "learning_rate": 4.576570613962914e-06, | |
| "loss": 0.0412, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.4793152639087017, | |
| "grad_norm": 0.4478548765182495, | |
| "learning_rate": 4.573195265381641e-06, | |
| "loss": 0.0261, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.4835948644793153, | |
| "grad_norm": 0.4309764802455902, | |
| "learning_rate": 4.5698077720006595e-06, | |
| "loss": 0.0298, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.4878744650499287, | |
| "grad_norm": 0.4392475187778473, | |
| "learning_rate": 4.566408153663943e-06, | |
| "loss": 0.0222, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.492154065620542, | |
| "grad_norm": 0.39503395557403564, | |
| "learning_rate": 4.5629964302864896e-06, | |
| "loss": 0.0293, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.4964336661911555, | |
| "grad_norm": 0.46291348338127136, | |
| "learning_rate": 4.5595726218542124e-06, | |
| "loss": 0.029, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.5007132667617689, | |
| "grad_norm": 0.41241031885147095, | |
| "learning_rate": 4.5561367484238165e-06, | |
| "loss": 0.0332, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.5049928673323825, | |
| "grad_norm": 0.5383005142211914, | |
| "learning_rate": 4.552688830122685e-06, | |
| "loss": 0.0312, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.5092724679029956, | |
| "grad_norm": 0.5965086817741394, | |
| "learning_rate": 4.54922888714876e-06, | |
| "loss": 0.0333, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.5135520684736092, | |
| "grad_norm": 0.43624430894851685, | |
| "learning_rate": 4.5457569397704226e-06, | |
| "loss": 0.0254, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.5178316690442224, | |
| "grad_norm": 0.43263742327690125, | |
| "learning_rate": 4.542273008326379e-06, | |
| "loss": 0.023, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.522111269614836, | |
| "grad_norm": 0.4612899720668793, | |
| "learning_rate": 4.538777113225535e-06, | |
| "loss": 0.0237, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.5263908701854494, | |
| "grad_norm": 0.49297091364860535, | |
| "learning_rate": 4.535269274946881e-06, | |
| "loss": 0.0344, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.5306704707560628, | |
| "grad_norm": 0.5402700304985046, | |
| "learning_rate": 4.531749514039371e-06, | |
| "loss": 0.025, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.5349500713266762, | |
| "grad_norm": 0.4386838972568512, | |
| "learning_rate": 4.5282178511218006e-06, | |
| "loss": 0.026, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.5392296718972895, | |
| "grad_norm": 0.37339386343955994, | |
| "learning_rate": 4.5246743068826895e-06, | |
| "loss": 0.0232, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.5435092724679031, | |
| "grad_norm": 0.5581195950508118, | |
| "learning_rate": 4.521118902080156e-06, | |
| "loss": 0.0356, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.5477888730385163, | |
| "grad_norm": 0.5439732074737549, | |
| "learning_rate": 4.517551657541801e-06, | |
| "loss": 0.0313, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.55206847360913, | |
| "grad_norm": 0.4505188465118408, | |
| "learning_rate": 4.513972594164578e-06, | |
| "loss": 0.0154, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.556348074179743, | |
| "grad_norm": 0.44269970059394836, | |
| "learning_rate": 4.510381732914681e-06, | |
| "loss": 0.024, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.5606276747503567, | |
| "grad_norm": 1.0567827224731445, | |
| "learning_rate": 4.506779094827409e-06, | |
| "loss": 0.0238, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.56490727532097, | |
| "grad_norm": 0.47431910037994385, | |
| "learning_rate": 4.503164701007058e-06, | |
| "loss": 0.0343, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.5691868758915835, | |
| "grad_norm": 0.3802037835121155, | |
| "learning_rate": 4.499538572626782e-06, | |
| "loss": 0.0192, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.5734664764621968, | |
| "grad_norm": 0.37891197204589844, | |
| "learning_rate": 4.495900730928481e-06, | |
| "loss": 0.0243, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.5777460770328102, | |
| "grad_norm": 0.5435581207275391, | |
| "learning_rate": 4.492251197222669e-06, | |
| "loss": 0.0267, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.5820256776034238, | |
| "grad_norm": 0.5308198928833008, | |
| "learning_rate": 4.4885899928883536e-06, | |
| "loss": 0.0297, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.586305278174037, | |
| "grad_norm": 0.5913228988647461, | |
| "learning_rate": 4.4849171393729084e-06, | |
| "loss": 0.0279, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.5905848787446506, | |
| "grad_norm": 0.43857505917549133, | |
| "learning_rate": 4.481232658191947e-06, | |
| "loss": 0.0243, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.5948644793152638, | |
| "grad_norm": 0.41668838262557983, | |
| "learning_rate": 4.4775365709292004e-06, | |
| "loss": 0.0279, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.5991440798858774, | |
| "grad_norm": 0.3665734827518463, | |
| "learning_rate": 4.473828899236385e-06, | |
| "loss": 0.0257, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.6034236804564908, | |
| "grad_norm": 0.3707963526248932, | |
| "learning_rate": 4.470109664833083e-06, | |
| "loss": 0.02, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.6077032810271041, | |
| "grad_norm": 0.3789198100566864, | |
| "learning_rate": 4.466378889506607e-06, | |
| "loss": 0.022, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.6119828815977175, | |
| "grad_norm": 0.3439970910549164, | |
| "learning_rate": 4.462636595111878e-06, | |
| "loss": 0.0159, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.616262482168331, | |
| "grad_norm": 0.4878711700439453, | |
| "learning_rate": 4.458882803571297e-06, | |
| "loss": 0.0231, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.6205420827389445, | |
| "grad_norm": 0.4537769854068756, | |
| "learning_rate": 4.455117536874613e-06, | |
| "loss": 0.0262, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.6248216833095577, | |
| "grad_norm": 0.537686824798584, | |
| "learning_rate": 4.4513408170788e-06, | |
| "loss": 0.0224, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.6291012838801713, | |
| "grad_norm": 0.4968257248401642, | |
| "learning_rate": 4.447552666307921e-06, | |
| "loss": 0.0251, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.6333808844507844, | |
| "grad_norm": 0.5357342958450317, | |
| "learning_rate": 4.443753106753003e-06, | |
| "loss": 0.0219, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.637660485021398, | |
| "grad_norm": 0.4279462695121765, | |
| "learning_rate": 4.4399421606719064e-06, | |
| "loss": 0.0279, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.6419400855920114, | |
| "grad_norm": 0.8014388084411621, | |
| "learning_rate": 4.436119850389193e-06, | |
| "loss": 0.0237, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.6462196861626248, | |
| "grad_norm": 0.6127387881278992, | |
| "learning_rate": 4.432286198295998e-06, | |
| "loss": 0.0256, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.6504992867332382, | |
| "grad_norm": 0.3907901346683502, | |
| "learning_rate": 4.428441226849894e-06, | |
| "loss": 0.0221, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.6547788873038516, | |
| "grad_norm": 0.4546869695186615, | |
| "learning_rate": 4.424584958574766e-06, | |
| "loss": 0.0226, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.6590584878744652, | |
| "grad_norm": 0.4860124886035919, | |
| "learning_rate": 4.4207174160606725e-06, | |
| "loss": 0.023, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.6633380884450784, | |
| "grad_norm": 0.34656503796577454, | |
| "learning_rate": 4.416838621963719e-06, | |
| "loss": 0.0247, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.667617689015692, | |
| "grad_norm": 0.42502695322036743, | |
| "learning_rate": 4.412948599005924e-06, | |
| "loss": 0.0224, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.6718972895863051, | |
| "grad_norm": 0.3963342010974884, | |
| "learning_rate": 4.409047369975079e-06, | |
| "loss": 0.0338, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.6761768901569187, | |
| "grad_norm": 0.4617026150226593, | |
| "learning_rate": 4.405134957724627e-06, | |
| "loss": 0.0206, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.6804564907275321, | |
| "grad_norm": 0.36610788106918335, | |
| "learning_rate": 4.401211385173522e-06, | |
| "loss": 0.0276, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.6847360912981455, | |
| "grad_norm": 0.3438093066215515, | |
| "learning_rate": 4.39727667530609e-06, | |
| "loss": 0.0236, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.6890156918687589, | |
| "grad_norm": 0.48957353830337524, | |
| "learning_rate": 4.3933308511719055e-06, | |
| "loss": 0.0214, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.6932952924393723, | |
| "grad_norm": 0.31086310744285583, | |
| "learning_rate": 4.3893739358856465e-06, | |
| "loss": 0.0176, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.6975748930099859, | |
| "grad_norm": 0.4718981981277466, | |
| "learning_rate": 4.3854059526269644e-06, | |
| "loss": 0.0262, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.701854493580599, | |
| "grad_norm": 0.5084271430969238, | |
| "learning_rate": 4.381426924640346e-06, | |
| "loss": 0.0198, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.7061340941512126, | |
| "grad_norm": 0.39318305253982544, | |
| "learning_rate": 4.377436875234982e-06, | |
| "loss": 0.0203, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.7104136947218258, | |
| "grad_norm": 0.4365154802799225, | |
| "learning_rate": 4.373435827784623e-06, | |
| "loss": 0.0168, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.7146932952924394, | |
| "grad_norm": 0.5564635992050171, | |
| "learning_rate": 4.369423805727444e-06, | |
| "loss": 0.0227, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.7189728958630528, | |
| "grad_norm": 0.5515264868736267, | |
| "learning_rate": 4.365400832565916e-06, | |
| "loss": 0.0191, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.7232524964336662, | |
| "grad_norm": 0.8078024983406067, | |
| "learning_rate": 4.361366931866657e-06, | |
| "loss": 0.0284, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.7275320970042796, | |
| "grad_norm": 0.5032513737678528, | |
| "learning_rate": 4.357322127260299e-06, | |
| "loss": 0.0236, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.731811697574893, | |
| "grad_norm": 1.0713094472885132, | |
| "learning_rate": 4.353266442441352e-06, | |
| "loss": 0.0245, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.7360912981455066, | |
| "grad_norm": 0.5070701241493225, | |
| "learning_rate": 4.349199901168058e-06, | |
| "loss": 0.0215, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.7403708987161197, | |
| "grad_norm": 0.46457090973854065, | |
| "learning_rate": 4.345122527262261e-06, | |
| "loss": 0.018, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.7446504992867333, | |
| "grad_norm": 0.4174697995185852, | |
| "learning_rate": 4.341034344609259e-06, | |
| "loss": 0.0185, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.7489300998573465, | |
| "grad_norm": 0.4278315007686615, | |
| "learning_rate": 4.336935377157668e-06, | |
| "loss": 0.0213, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.75320970042796, | |
| "grad_norm": 0.7992134690284729, | |
| "learning_rate": 4.332825648919286e-06, | |
| "loss": 0.0185, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.7574893009985735, | |
| "grad_norm": 0.5143779516220093, | |
| "learning_rate": 4.32870518396894e-06, | |
| "loss": 0.0209, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.7617689015691869, | |
| "grad_norm": 0.4616805911064148, | |
| "learning_rate": 4.3245740064443586e-06, | |
| "loss": 0.02, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.7660485021398002, | |
| "grad_norm": 0.3600054979324341, | |
| "learning_rate": 4.320432140546026e-06, | |
| "loss": 0.019, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.7703281027104136, | |
| "grad_norm": 0.4483279585838318, | |
| "learning_rate": 4.316279610537032e-06, | |
| "loss": 0.0172, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.7746077032810272, | |
| "grad_norm": 0.5091086626052856, | |
| "learning_rate": 4.312116440742944e-06, | |
| "loss": 0.0271, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.7788873038516404, | |
| "grad_norm": 0.4678874611854553, | |
| "learning_rate": 4.307942655551653e-06, | |
| "loss": 0.0152, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.783166904422254, | |
| "grad_norm": 0.44703155755996704, | |
| "learning_rate": 4.303758279413239e-06, | |
| "loss": 0.0216, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.7874465049928672, | |
| "grad_norm": 0.44851160049438477, | |
| "learning_rate": 4.299563336839818e-06, | |
| "loss": 0.021, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.7917261055634808, | |
| "grad_norm": 0.4456520974636078, | |
| "learning_rate": 4.2953578524054116e-06, | |
| "loss": 0.0162, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.7960057061340942, | |
| "grad_norm": 0.5209571719169617, | |
| "learning_rate": 4.291141850745788e-06, | |
| "loss": 0.0193, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.8002853067047075, | |
| "grad_norm": 0.5911619067192078, | |
| "learning_rate": 4.286915356558332e-06, | |
| "loss": 0.019, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.804564907275321, | |
| "grad_norm": 0.5851296186447144, | |
| "learning_rate": 4.2826783946018905e-06, | |
| "loss": 0.0141, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.8088445078459343, | |
| "grad_norm": 0.3961900770664215, | |
| "learning_rate": 4.278430989696631e-06, | |
| "loss": 0.0255, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.813124108416548, | |
| "grad_norm": 0.7789033055305481, | |
| "learning_rate": 4.274173166723897e-06, | |
| "loss": 0.0124, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.817403708987161, | |
| "grad_norm": 0.4361071288585663, | |
| "learning_rate": 4.269904950626059e-06, | |
| "loss": 0.0157, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.8216833095577747, | |
| "grad_norm": 0.43065768480300903, | |
| "learning_rate": 4.265626366406372e-06, | |
| "loss": 0.0223, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.825962910128388, | |
| "grad_norm": 0.6288639307022095, | |
| "learning_rate": 4.261337439128829e-06, | |
| "loss": 0.0133, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.8302425106990015, | |
| "grad_norm": 0.4564136862754822, | |
| "learning_rate": 4.257038193918011e-06, | |
| "loss": 0.014, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.8345221112696148, | |
| "grad_norm": 0.6153598427772522, | |
| "learning_rate": 4.252728655958939e-06, | |
| "loss": 0.0158, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.8388017118402282, | |
| "grad_norm": 0.5261392593383789, | |
| "learning_rate": 4.248408850496934e-06, | |
| "loss": 0.0159, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.8430813124108416, | |
| "grad_norm": 0.5919463038444519, | |
| "learning_rate": 4.244078802837462e-06, | |
| "loss": 0.0201, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.847360912981455, | |
| "grad_norm": 0.4920569360256195, | |
| "learning_rate": 4.239738538345988e-06, | |
| "loss": 0.0169, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.8516405135520686, | |
| "grad_norm": 0.5132091641426086, | |
| "learning_rate": 4.235388082447825e-06, | |
| "loss": 0.0179, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.8559201141226818, | |
| "grad_norm": 0.49452340602874756, | |
| "learning_rate": 4.231027460627992e-06, | |
| "loss": 0.0166, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.8601997146932954, | |
| "grad_norm": 0.4602709412574768, | |
| "learning_rate": 4.226656698431057e-06, | |
| "loss": 0.0196, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.8644793152639088, | |
| "grad_norm": 0.3728005290031433, | |
| "learning_rate": 4.22227582146099e-06, | |
| "loss": 0.0167, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.8687589158345221, | |
| "grad_norm": 0.4013611674308777, | |
| "learning_rate": 4.217884855381015e-06, | |
| "loss": 0.019, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.8730385164051355, | |
| "grad_norm": 0.4389547109603882, | |
| "learning_rate": 4.213483825913457e-06, | |
| "loss": 0.0193, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.877318116975749, | |
| "grad_norm": 0.3908598721027374, | |
| "learning_rate": 4.209072758839594e-06, | |
| "loss": 0.012, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.8815977175463623, | |
| "grad_norm": 0.7128845453262329, | |
| "learning_rate": 4.2046516799995034e-06, | |
| "loss": 0.0169, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.8858773181169757, | |
| "grad_norm": 0.5817690491676331, | |
| "learning_rate": 4.200220615291911e-06, | |
| "loss": 0.0191, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.8901569186875893, | |
| "grad_norm": 0.7214891910552979, | |
| "learning_rate": 4.1957795906740404e-06, | |
| "loss": 0.0149, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.8944365192582024, | |
| "grad_norm": 0.5022482872009277, | |
| "learning_rate": 4.191328632161462e-06, | |
| "loss": 0.0179, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.898716119828816, | |
| "grad_norm": 0.4441869258880615, | |
| "learning_rate": 4.186867765827935e-06, | |
| "loss": 0.012, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.9029957203994294, | |
| "grad_norm": 0.493295282125473, | |
| "learning_rate": 4.182397017805264e-06, | |
| "loss": 0.0125, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.9072753209700428, | |
| "grad_norm": 0.4610918462276459, | |
| "learning_rate": 4.177916414283137e-06, | |
| "loss": 0.0099, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.9115549215406562, | |
| "grad_norm": 0.44065865874290466, | |
| "learning_rate": 4.173425981508976e-06, | |
| "loss": 0.0146, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.9158345221112696, | |
| "grad_norm": 0.415574312210083, | |
| "learning_rate": 4.168925745787784e-06, | |
| "loss": 0.013, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.920114122681883, | |
| "grad_norm": 0.5749611258506775, | |
| "learning_rate": 4.16441573348199e-06, | |
| "loss": 0.0124, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.9243937232524964, | |
| "grad_norm": 0.49864593148231506, | |
| "learning_rate": 4.159895971011293e-06, | |
| "loss": 0.016, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.92867332382311, | |
| "grad_norm": 0.7332733273506165, | |
| "learning_rate": 4.155366484852509e-06, | |
| "loss": 0.0129, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.9329529243937231, | |
| "grad_norm": 0.4229061007499695, | |
| "learning_rate": 4.150827301539416e-06, | |
| "loss": 0.0137, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.9372325249643367, | |
| "grad_norm": 0.4221555292606354, | |
| "learning_rate": 4.146278447662597e-06, | |
| "loss": 0.0124, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.9415121255349501, | |
| "grad_norm": 0.324710875749588, | |
| "learning_rate": 4.141719949869287e-06, | |
| "loss": 0.0109, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.9457917261055635, | |
| "grad_norm": 0.5981960296630859, | |
| "learning_rate": 4.137151834863213e-06, | |
| "loss": 0.0143, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.9500713266761769, | |
| "grad_norm": 0.9933749437332153, | |
| "learning_rate": 4.132574129404442e-06, | |
| "loss": 0.0157, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.9543509272467903, | |
| "grad_norm": 0.524457573890686, | |
| "learning_rate": 4.127986860309219e-06, | |
| "loss": 0.0133, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.9586305278174037, | |
| "grad_norm": 0.6651455760002136, | |
| "learning_rate": 4.123390054449818e-06, | |
| "loss": 0.0133, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.962910128388017, | |
| "grad_norm": 0.6450986266136169, | |
| "learning_rate": 4.118783738754372e-06, | |
| "loss": 0.0184, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.9671897289586306, | |
| "grad_norm": 0.5285755395889282, | |
| "learning_rate": 4.114167940206729e-06, | |
| "loss": 0.0095, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.9714693295292438, | |
| "grad_norm": 0.5533472895622253, | |
| "learning_rate": 4.109542685846287e-06, | |
| "loss": 0.018, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.9757489300998574, | |
| "grad_norm": 0.5881748199462891, | |
| "learning_rate": 4.104908002767832e-06, | |
| "loss": 0.0128, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.9800285306704708, | |
| "grad_norm": 0.637514054775238, | |
| "learning_rate": 4.100263918121388e-06, | |
| "loss": 0.0121, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.9843081312410842, | |
| "grad_norm": 1.390764594078064, | |
| "learning_rate": 4.095610459112051e-06, | |
| "loss": 0.0154, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.9885877318116976, | |
| "grad_norm": 0.6760232448577881, | |
| "learning_rate": 4.090947652999832e-06, | |
| "loss": 0.015, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.992867332382311, | |
| "grad_norm": 1.089862585067749, | |
| "learning_rate": 4.086275527099501e-06, | |
| "loss": 0.0114, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.9971469329529246, | |
| "grad_norm": 0.4834727346897125, | |
| "learning_rate": 4.08159410878042e-06, | |
| "loss": 0.0089, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.0042796005706136, | |
| "grad_norm": 1.0610970258712769, | |
| "learning_rate": 4.0769034254663856e-06, | |
| "loss": 0.0273, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.0085592011412268, | |
| "grad_norm": 0.3145875036716461, | |
| "learning_rate": 4.072203504635469e-06, | |
| "loss": 0.0078, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.0128388017118404, | |
| "grad_norm": 0.37535548210144043, | |
| "learning_rate": 4.0674943738198595e-06, | |
| "loss": 0.0126, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.0171184022824535, | |
| "grad_norm": 0.4032490849494934, | |
| "learning_rate": 4.062776060605692e-06, | |
| "loss": 0.0143, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.021398002853067, | |
| "grad_norm": 0.8238989114761353, | |
| "learning_rate": 4.058048592632896e-06, | |
| "loss": 0.0131, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.0256776034236803, | |
| "grad_norm": 0.5088585615158081, | |
| "learning_rate": 4.053311997595029e-06, | |
| "loss": 0.0094, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.029957203994294, | |
| "grad_norm": 0.3052437901496887, | |
| "learning_rate": 4.048566303239115e-06, | |
| "loss": 0.0065, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.034236804564907, | |
| "grad_norm": 0.3434632420539856, | |
| "learning_rate": 4.04381153736548e-06, | |
| "loss": 0.0121, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.0385164051355207, | |
| "grad_norm": 0.40094271302223206, | |
| "learning_rate": 4.039047727827594e-06, | |
| "loss": 0.009, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.0427960057061343, | |
| "grad_norm": 0.5058080554008484, | |
| "learning_rate": 4.034274902531903e-06, | |
| "loss": 0.0095, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.0470756062767475, | |
| "grad_norm": 0.33334746956825256, | |
| "learning_rate": 4.029493089437669e-06, | |
| "loss": 0.0092, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.051355206847361, | |
| "grad_norm": 0.3802107572555542, | |
| "learning_rate": 4.0247023165568e-06, | |
| "loss": 0.0064, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.055634807417974, | |
| "grad_norm": 0.36711373925209045, | |
| "learning_rate": 4.019902611953698e-06, | |
| "loss": 0.0085, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.059914407988588, | |
| "grad_norm": 0.4605230987071991, | |
| "learning_rate": 4.015094003745082e-06, | |
| "loss": 0.0069, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.064194008559201, | |
| "grad_norm": 0.5190231800079346, | |
| "learning_rate": 4.010276520099827e-06, | |
| "loss": 0.0112, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.0684736091298146, | |
| "grad_norm": 1.0071794986724854, | |
| "learning_rate": 4.005450189238806e-06, | |
| "loss": 0.0048, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.0727532097004278, | |
| "grad_norm": 0.4913085103034973, | |
| "learning_rate": 4.000615039434715e-06, | |
| "loss": 0.0099, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.0770328102710414, | |
| "grad_norm": 0.7372696995735168, | |
| "learning_rate": 3.995771099011914e-06, | |
| "loss": 0.0053, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.081312410841655, | |
| "grad_norm": 0.23670224845409393, | |
| "learning_rate": 3.990918396346254e-06, | |
| "loss": 0.0054, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.085592011412268, | |
| "grad_norm": 0.7463693022727966, | |
| "learning_rate": 3.98605695986492e-06, | |
| "loss": 0.0058, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.0898716119828817, | |
| "grad_norm": 0.5333644151687622, | |
| "learning_rate": 3.9811868180462585e-06, | |
| "loss": 0.0059, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.094151212553495, | |
| "grad_norm": 0.5247513055801392, | |
| "learning_rate": 3.976307999419611e-06, | |
| "loss": 0.008, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.0984308131241085, | |
| "grad_norm": 0.6092849373817444, | |
| "learning_rate": 3.971420532565148e-06, | |
| "loss": 0.004, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.1027104136947217, | |
| "grad_norm": 0.48461002111434937, | |
| "learning_rate": 3.966524446113703e-06, | |
| "loss": 0.0096, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.1069900142653353, | |
| "grad_norm": 0.6382566690444946, | |
| "learning_rate": 3.961619768746602e-06, | |
| "loss": 0.0083, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.1112696148359484, | |
| "grad_norm": 0.5699125528335571, | |
| "learning_rate": 3.956706529195494e-06, | |
| "loss": 0.0035, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.115549215406562, | |
| "grad_norm": 0.31096431612968445, | |
| "learning_rate": 3.9517847562421915e-06, | |
| "loss": 0.0035, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.1198288159771757, | |
| "grad_norm": 0.35952478647232056, | |
| "learning_rate": 3.946854478718491e-06, | |
| "loss": 0.0078, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.124108416547789, | |
| "grad_norm": 0.45269617438316345, | |
| "learning_rate": 3.94191572550601e-06, | |
| "loss": 0.0057, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.1283880171184024, | |
| "grad_norm": 0.3647656738758087, | |
| "learning_rate": 3.936968525536018e-06, | |
| "loss": 0.0068, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.1326676176890156, | |
| "grad_norm": 0.4928695559501648, | |
| "learning_rate": 3.932012907789262e-06, | |
| "loss": 0.0074, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.136947218259629, | |
| "grad_norm": 0.6422083973884583, | |
| "learning_rate": 3.927048901295805e-06, | |
| "loss": 0.0064, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.1412268188302424, | |
| "grad_norm": 0.32455766201019287, | |
| "learning_rate": 3.922076535134849e-06, | |
| "loss": 0.0037, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.145506419400856, | |
| "grad_norm": 0.4020403027534485, | |
| "learning_rate": 3.917095838434567e-06, | |
| "loss": 0.0056, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.1497860199714696, | |
| "grad_norm": 0.40988966822624207, | |
| "learning_rate": 3.9121068403719335e-06, | |
| "loss": 0.0034, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.1540656205420827, | |
| "grad_norm": 0.31292110681533813, | |
| "learning_rate": 3.90710957017255e-06, | |
| "loss": 0.007, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.1583452211126963, | |
| "grad_norm": 0.4979025721549988, | |
| "learning_rate": 3.90210405711048e-06, | |
| "loss": 0.005, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.1626248216833095, | |
| "grad_norm": 0.8948659896850586, | |
| "learning_rate": 3.897090330508071e-06, | |
| "loss": 0.0077, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.166904422253923, | |
| "grad_norm": 0.5055385231971741, | |
| "learning_rate": 3.8920684197357865e-06, | |
| "loss": 0.0072, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.1711840228245363, | |
| "grad_norm": 0.36552977561950684, | |
| "learning_rate": 3.887038354212031e-06, | |
| "loss": 0.0056, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.17546362339515, | |
| "grad_norm": 0.5923222303390503, | |
| "learning_rate": 3.882000163402984e-06, | |
| "loss": 0.0094, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.179743223965763, | |
| "grad_norm": 0.4993934631347656, | |
| "learning_rate": 3.876953876822418e-06, | |
| "loss": 0.0047, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.1840228245363766, | |
| "grad_norm": 0.3998502790927887, | |
| "learning_rate": 3.871899524031535e-06, | |
| "loss": 0.0103, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.18830242510699, | |
| "grad_norm": 0.4463786482810974, | |
| "learning_rate": 3.866837134638786e-06, | |
| "loss": 0.0036, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.1925820256776034, | |
| "grad_norm": 0.45763128995895386, | |
| "learning_rate": 3.8617667382997e-06, | |
| "loss": 0.0057, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.196861626248217, | |
| "grad_norm": 0.4340563118457794, | |
| "learning_rate": 3.856688364716715e-06, | |
| "loss": 0.0048, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.20114122681883, | |
| "grad_norm": 0.6045655608177185, | |
| "learning_rate": 3.8516020436389945e-06, | |
| "loss": 0.0064, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.205420827389444, | |
| "grad_norm": 0.42897504568099976, | |
| "learning_rate": 3.846507804862262e-06, | |
| "loss": 0.0079, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.209700427960057, | |
| "grad_norm": 0.557436466217041, | |
| "learning_rate": 3.8414056782286225e-06, | |
| "loss": 0.005, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.2139800285306706, | |
| "grad_norm": 0.3902797996997833, | |
| "learning_rate": 3.836295693626386e-06, | |
| "loss": 0.004, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.2182596291012837, | |
| "grad_norm": 0.4710390567779541, | |
| "learning_rate": 3.831177880989897e-06, | |
| "loss": 0.0076, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.2225392296718973, | |
| "grad_norm": 0.4178374707698822, | |
| "learning_rate": 3.826052270299356e-06, | |
| "loss": 0.0081, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.226818830242511, | |
| "grad_norm": 0.7133017778396606, | |
| "learning_rate": 3.820918891580645e-06, | |
| "loss": 0.0086, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.231098430813124, | |
| "grad_norm": 0.7762539386749268, | |
| "learning_rate": 3.81577777490515e-06, | |
| "loss": 0.0057, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.2353780313837377, | |
| "grad_norm": 0.6291279792785645, | |
| "learning_rate": 3.8106289503895866e-06, | |
| "loss": 0.0047, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.239657631954351, | |
| "grad_norm": 0.3010478913784027, | |
| "learning_rate": 3.805472448195822e-06, | |
| "loss": 0.0064, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.2439372325249645, | |
| "grad_norm": 0.4451744258403778, | |
| "learning_rate": 3.8003082985307022e-06, | |
| "loss": 0.0049, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.2482168330955776, | |
| "grad_norm": 0.444017231464386, | |
| "learning_rate": 3.7951365316458698e-06, | |
| "loss": 0.0077, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.2524964336661912, | |
| "grad_norm": 0.4439545273780823, | |
| "learning_rate": 3.7899571778375886e-06, | |
| "loss": 0.0068, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.2567760342368044, | |
| "grad_norm": 0.43541809916496277, | |
| "learning_rate": 3.7847702674465693e-06, | |
| "loss": 0.0079, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.261055634807418, | |
| "grad_norm": 0.3825772702693939, | |
| "learning_rate": 3.779575830857787e-06, | |
| "loss": 0.0082, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.265335235378031, | |
| "grad_norm": 0.24448992311954498, | |
| "learning_rate": 3.7743738985003053e-06, | |
| "loss": 0.0062, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.2696148359486448, | |
| "grad_norm": 0.8010409474372864, | |
| "learning_rate": 3.7691645008471e-06, | |
| "loss": 0.0056, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.2738944365192584, | |
| "grad_norm": 0.2623003125190735, | |
| "learning_rate": 3.763947668414877e-06, | |
| "loss": 0.0038, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.2781740370898715, | |
| "grad_norm": 0.25390884280204773, | |
| "learning_rate": 3.7587234317638955e-06, | |
| "loss": 0.007, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.282453637660485, | |
| "grad_norm": 0.5253174901008606, | |
| "learning_rate": 3.753491821497788e-06, | |
| "loss": 0.006, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.2867332382310983, | |
| "grad_norm": 0.37569546699523926, | |
| "learning_rate": 3.7482528682633844e-06, | |
| "loss": 0.0071, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.291012838801712, | |
| "grad_norm": 0.32741880416870117, | |
| "learning_rate": 3.7430066027505267e-06, | |
| "loss": 0.0045, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.295292439372325, | |
| "grad_norm": 0.6359190344810486, | |
| "learning_rate": 3.7377530556918934e-06, | |
| "loss": 0.0066, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.2995720399429387, | |
| "grad_norm": 0.5190404057502747, | |
| "learning_rate": 3.732492257862819e-06, | |
| "loss": 0.0054, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.3038516405135523, | |
| "grad_norm": 0.5946906805038452, | |
| "learning_rate": 3.727224240081113e-06, | |
| "loss": 0.011, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.3081312410841655, | |
| "grad_norm": 0.6893029808998108, | |
| "learning_rate": 3.7219490332068774e-06, | |
| "loss": 0.0063, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.312410841654779, | |
| "grad_norm": 0.43083977699279785, | |
| "learning_rate": 3.7166666681423326e-06, | |
| "loss": 0.0024, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.316690442225392, | |
| "grad_norm": 0.44732487201690674, | |
| "learning_rate": 3.711377175831626e-06, | |
| "loss": 0.0054, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.320970042796006, | |
| "grad_norm": 0.3756791055202484, | |
| "learning_rate": 3.7060805872606604e-06, | |
| "loss": 0.0057, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.325249643366619, | |
| "grad_norm": 0.5223040580749512, | |
| "learning_rate": 3.7007769334569072e-06, | |
| "loss": 0.0042, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.3295292439372326, | |
| "grad_norm": 0.30271726846694946, | |
| "learning_rate": 3.6954662454892266e-06, | |
| "loss": 0.0019, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.3338088445078458, | |
| "grad_norm": 0.23703891038894653, | |
| "learning_rate": 3.6901485544676848e-06, | |
| "loss": 0.002, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.3380884450784594, | |
| "grad_norm": 0.6809226274490356, | |
| "learning_rate": 3.684823891543371e-06, | |
| "loss": 0.0098, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.3423680456490725, | |
| "grad_norm": 1.772580862045288, | |
| "learning_rate": 3.6794922879082185e-06, | |
| "loss": 0.0053, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.346647646219686, | |
| "grad_norm": 0.5708749294281006, | |
| "learning_rate": 3.6741537747948148e-06, | |
| "loss": 0.0038, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.3509272467902997, | |
| "grad_norm": 0.43098023533821106, | |
| "learning_rate": 3.6688083834762277e-06, | |
| "loss": 0.0034, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.355206847360913, | |
| "grad_norm": 0.4059015214443207, | |
| "learning_rate": 3.663456145265817e-06, | |
| "loss": 0.0021, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.3594864479315265, | |
| "grad_norm": 0.2948651611804962, | |
| "learning_rate": 3.658097091517048e-06, | |
| "loss": 0.0034, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.3637660485021397, | |
| "grad_norm": 0.5517255067825317, | |
| "learning_rate": 3.652731253623315e-06, | |
| "loss": 0.0072, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.3680456490727533, | |
| "grad_norm": 0.4522925615310669, | |
| "learning_rate": 3.647358663017754e-06, | |
| "loss": 0.0076, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.3723252496433664, | |
| "grad_norm": 0.6154627203941345, | |
| "learning_rate": 3.6419793511730554e-06, | |
| "loss": 0.0072, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.37660485021398, | |
| "grad_norm": 0.5896870493888855, | |
| "learning_rate": 3.636593349601286e-06, | |
| "loss": 0.0073, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.3808844507845937, | |
| "grad_norm": 0.5177700519561768, | |
| "learning_rate": 3.6312006898537007e-06, | |
| "loss": 0.0057, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.385164051355207, | |
| "grad_norm": 0.3875308632850647, | |
| "learning_rate": 3.6258014035205553e-06, | |
| "loss": 0.0033, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.3894436519258204, | |
| "grad_norm": 0.4099850654602051, | |
| "learning_rate": 3.6203955222309263e-06, | |
| "loss": 0.0039, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.3937232524964336, | |
| "grad_norm": 0.3532715439796448, | |
| "learning_rate": 3.6149830776525237e-06, | |
| "loss": 0.0035, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.398002853067047, | |
| "grad_norm": 0.3508007526397705, | |
| "learning_rate": 3.609564101491505e-06, | |
| "loss": 0.0034, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.4022824536376604, | |
| "grad_norm": 0.5390321612358093, | |
| "learning_rate": 3.6041386254922885e-06, | |
| "loss": 0.0059, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.406562054208274, | |
| "grad_norm": 0.18945489823818207, | |
| "learning_rate": 3.5987066814373718e-06, | |
| "loss": 0.0055, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.410841654778887, | |
| "grad_norm": 0.3890082836151123, | |
| "learning_rate": 3.593268301147139e-06, | |
| "loss": 0.0026, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.4151212553495007, | |
| "grad_norm": 0.32187849283218384, | |
| "learning_rate": 3.587823516479679e-06, | |
| "loss": 0.0042, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.419400855920114, | |
| "grad_norm": 0.19139395654201508, | |
| "learning_rate": 3.582372359330599e-06, | |
| "loss": 0.0032, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.4236804564907275, | |
| "grad_norm": 0.8424011468887329, | |
| "learning_rate": 3.576914861632833e-06, | |
| "loss": 0.0062, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.427960057061341, | |
| "grad_norm": 0.3709321916103363, | |
| "learning_rate": 3.5714510553564624e-06, | |
| "loss": 0.0036, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.4322396576319543, | |
| "grad_norm": 0.8795949220657349, | |
| "learning_rate": 3.5659809725085203e-06, | |
| "loss": 0.0043, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.436519258202568, | |
| "grad_norm": 0.44260433316230774, | |
| "learning_rate": 3.5605046451328095e-06, | |
| "loss": 0.0034, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.440798858773181, | |
| "grad_norm": 0.36288049817085266, | |
| "learning_rate": 3.555022105309715e-06, | |
| "loss": 0.0042, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.4450784593437946, | |
| "grad_norm": 0.5439409613609314, | |
| "learning_rate": 3.5495333851560115e-06, | |
| "loss": 0.0021, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.449358059914408, | |
| "grad_norm": 0.4087218940258026, | |
| "learning_rate": 3.5440385168246796e-06, | |
| "loss": 0.0026, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.4536376604850214, | |
| "grad_norm": 0.2950667142868042, | |
| "learning_rate": 3.5385375325047167e-06, | |
| "loss": 0.0051, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.457917261055635, | |
| "grad_norm": 0.258934885263443, | |
| "learning_rate": 3.5330304644209456e-06, | |
| "loss": 0.0023, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.462196861626248, | |
| "grad_norm": 0.25606122612953186, | |
| "learning_rate": 3.5275173448338306e-06, | |
| "loss": 0.0044, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.466476462196862, | |
| "grad_norm": 0.39192715287208557, | |
| "learning_rate": 3.5219982060392832e-06, | |
| "loss": 0.0026, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.470756062767475, | |
| "grad_norm": 0.21482016146183014, | |
| "learning_rate": 3.516473080368478e-06, | |
| "loss": 0.0049, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.4750356633380886, | |
| "grad_norm": 0.3853050768375397, | |
| "learning_rate": 3.5109420001876576e-06, | |
| "loss": 0.0024, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.4793152639087017, | |
| "grad_norm": 0.46906372904777527, | |
| "learning_rate": 3.5054049978979487e-06, | |
| "loss": 0.0042, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.4835948644793153, | |
| "grad_norm": 0.40924930572509766, | |
| "learning_rate": 3.4998621059351695e-06, | |
| "loss": 0.0047, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.4878744650499285, | |
| "grad_norm": 0.7026729583740234, | |
| "learning_rate": 3.49431335676964e-06, | |
| "loss": 0.006, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.492154065620542, | |
| "grad_norm": 0.23798321187496185, | |
| "learning_rate": 3.4887587829059887e-06, | |
| "loss": 0.0023, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.4964336661911553, | |
| "grad_norm": 0.6722045540809631, | |
| "learning_rate": 3.4831984168829706e-06, | |
| "loss": 0.0035, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.500713266761769, | |
| "grad_norm": 0.429177463054657, | |
| "learning_rate": 3.477632291273268e-06, | |
| "loss": 0.0032, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.5049928673323825, | |
| "grad_norm": 0.20870012044906616, | |
| "learning_rate": 3.4720604386833024e-06, | |
| "loss": 0.002, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.5092724679029956, | |
| "grad_norm": 0.23582929372787476, | |
| "learning_rate": 3.4664828917530445e-06, | |
| "loss": 0.0021, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.5135520684736092, | |
| "grad_norm": 0.3133554458618164, | |
| "learning_rate": 3.460899683155825e-06, | |
| "loss": 0.0042, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.5178316690442224, | |
| "grad_norm": 0.2973891496658325, | |
| "learning_rate": 3.455310845598137e-06, | |
| "loss": 0.0024, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.522111269614836, | |
| "grad_norm": 0.29642683267593384, | |
| "learning_rate": 3.4497164118194503e-06, | |
| "loss": 0.0046, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.526390870185449, | |
| "grad_norm": 0.495253324508667, | |
| "learning_rate": 3.4441164145920174e-06, | |
| "loss": 0.0017, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.5306704707560628, | |
| "grad_norm": 0.20187705755233765, | |
| "learning_rate": 3.4385108867206803e-06, | |
| "loss": 0.0035, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.5349500713266764, | |
| "grad_norm": 0.46516257524490356, | |
| "learning_rate": 3.4328998610426807e-06, | |
| "loss": 0.0018, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.5392296718972895, | |
| "grad_norm": 0.3390999734401703, | |
| "learning_rate": 3.427283370427466e-06, | |
| "loss": 0.0073, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.543509272467903, | |
| "grad_norm": 0.34318435192108154, | |
| "learning_rate": 3.4216614477764973e-06, | |
| "loss": 0.0029, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.5477888730385163, | |
| "grad_norm": 0.22326043248176575, | |
| "learning_rate": 3.416034126023058e-06, | |
| "loss": 0.0038, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.55206847360913, | |
| "grad_norm": 0.2754570543766022, | |
| "learning_rate": 3.410401438132056e-06, | |
| "loss": 0.002, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.556348074179743, | |
| "grad_norm": 0.5922912955284119, | |
| "learning_rate": 3.4047634170998374e-06, | |
| "loss": 0.0029, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.5606276747503567, | |
| "grad_norm": 0.5190448760986328, | |
| "learning_rate": 3.3991200959539894e-06, | |
| "loss": 0.003, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.5649072753209703, | |
| "grad_norm": 1.955246925354004, | |
| "learning_rate": 3.393471507753146e-06, | |
| "loss": 0.0043, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.5691868758915835, | |
| "grad_norm": 0.2827969193458557, | |
| "learning_rate": 3.3878176855867974e-06, | |
| "loss": 0.0014, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.5734664764621966, | |
| "grad_norm": 0.13652905821800232, | |
| "learning_rate": 3.3821586625750915e-06, | |
| "loss": 0.0041, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.5777460770328102, | |
| "grad_norm": 1.3642975091934204, | |
| "learning_rate": 3.3764944718686456e-06, | |
| "loss": 0.0012, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.582025677603424, | |
| "grad_norm": 0.39340218901634216, | |
| "learning_rate": 3.37082514664835e-06, | |
| "loss": 0.0036, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.586305278174037, | |
| "grad_norm": 0.2879283130168915, | |
| "learning_rate": 3.3651507201251685e-06, | |
| "loss": 0.0015, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.5905848787446506, | |
| "grad_norm": 0.3106430768966675, | |
| "learning_rate": 3.3594712255399554e-06, | |
| "loss": 0.0033, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.5948644793152638, | |
| "grad_norm": 0.12472659349441528, | |
| "learning_rate": 3.3537866961632465e-06, | |
| "loss": 0.0007, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.5991440798858774, | |
| "grad_norm": 0.15480101108551025, | |
| "learning_rate": 3.3480971652950757e-06, | |
| "loss": 0.0018, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.6034236804564905, | |
| "grad_norm": 0.4321823716163635, | |
| "learning_rate": 3.3424026662647763e-06, | |
| "loss": 0.0024, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.607703281027104, | |
| "grad_norm": 0.20745764672756195, | |
| "learning_rate": 3.3367032324307815e-06, | |
| "loss": 0.0024, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.6119828815977177, | |
| "grad_norm": 0.5367287397384644, | |
| "learning_rate": 3.330998897180436e-06, | |
| "loss": 0.0026, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.616262482168331, | |
| "grad_norm": 0.33065301179885864, | |
| "learning_rate": 3.325289693929795e-06, | |
| "loss": 0.0031, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.6205420827389445, | |
| "grad_norm": 0.47890713810920715, | |
| "learning_rate": 3.3195756561234315e-06, | |
| "loss": 0.0017, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.6248216833095577, | |
| "grad_norm": 0.410844624042511, | |
| "learning_rate": 3.31385681723424e-06, | |
| "loss": 0.0012, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.6291012838801713, | |
| "grad_norm": 0.17063617706298828, | |
| "learning_rate": 3.3081332107632386e-06, | |
| "loss": 0.002, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.6333808844507844, | |
| "grad_norm": 0.24861586093902588, | |
| "learning_rate": 3.302404870239374e-06, | |
| "loss": 0.0013, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.637660485021398, | |
| "grad_norm": 0.22020435333251953, | |
| "learning_rate": 3.2966718292193247e-06, | |
| "loss": 0.0027, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.6419400855920117, | |
| "grad_norm": 0.30350565910339355, | |
| "learning_rate": 3.2909341212873065e-06, | |
| "loss": 0.003, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.646219686162625, | |
| "grad_norm": 0.39022594690322876, | |
| "learning_rate": 3.2851917800548726e-06, | |
| "loss": 0.0041, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.650499286733238, | |
| "grad_norm": 0.41128891706466675, | |
| "learning_rate": 3.279444839160718e-06, | |
| "loss": 0.0016, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.6547788873038516, | |
| "grad_norm": 0.2784186005592346, | |
| "learning_rate": 3.273693332270484e-06, | |
| "loss": 0.0015, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.659058487874465, | |
| "grad_norm": 0.3071463108062744, | |
| "learning_rate": 3.2679372930765575e-06, | |
| "loss": 0.0038, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.6633380884450784, | |
| "grad_norm": 1.0749564170837402, | |
| "learning_rate": 3.262176755297877e-06, | |
| "loss": 0.0021, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.667617689015692, | |
| "grad_norm": 0.3864515423774719, | |
| "learning_rate": 3.2564117526797343e-06, | |
| "loss": 0.0051, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.671897289586305, | |
| "grad_norm": 0.4214929938316345, | |
| "learning_rate": 3.250642318993574e-06, | |
| "loss": 0.003, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.6761768901569187, | |
| "grad_norm": 0.20947064459323883, | |
| "learning_rate": 3.2448684880368016e-06, | |
| "loss": 0.0021, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.680456490727532, | |
| "grad_norm": 0.41602402925491333, | |
| "learning_rate": 3.2390902936325768e-06, | |
| "loss": 0.0026, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.6847360912981455, | |
| "grad_norm": 0.36582034826278687, | |
| "learning_rate": 3.233307769629626e-06, | |
| "loss": 0.0043, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.689015691868759, | |
| "grad_norm": 0.2663152813911438, | |
| "learning_rate": 3.2275209499020343e-06, | |
| "loss": 0.0018, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.6932952924393723, | |
| "grad_norm": 0.3802527189254761, | |
| "learning_rate": 3.221729868349053e-06, | |
| "loss": 0.0015, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.697574893009986, | |
| "grad_norm": 0.2044801265001297, | |
| "learning_rate": 3.2159345588948986e-06, | |
| "loss": 0.0016, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.701854493580599, | |
| "grad_norm": 0.31439611315727234, | |
| "learning_rate": 3.2101350554885552e-06, | |
| "loss": 0.0012, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.7061340941512126, | |
| "grad_norm": 0.40683820843696594, | |
| "learning_rate": 3.2043313921035747e-06, | |
| "loss": 0.0044, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.710413694721826, | |
| "grad_norm": 0.3958558142185211, | |
| "learning_rate": 3.1985236027378786e-06, | |
| "loss": 0.0008, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.7146932952924394, | |
| "grad_norm": 0.5626212358474731, | |
| "learning_rate": 3.192711721413558e-06, | |
| "loss": 0.0025, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.718972895863053, | |
| "grad_norm": 0.4065411388874054, | |
| "learning_rate": 3.1868957821766742e-06, | |
| "loss": 0.0025, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.723252496433666, | |
| "grad_norm": 0.2741824686527252, | |
| "learning_rate": 3.1810758190970623e-06, | |
| "loss": 0.0009, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.7275320970042793, | |
| "grad_norm": 0.13432888686656952, | |
| "learning_rate": 3.1752518662681257e-06, | |
| "loss": 0.001, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.731811697574893, | |
| "grad_norm": 0.17365941405296326, | |
| "learning_rate": 3.1694239578066427e-06, | |
| "loss": 0.0039, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.7360912981455066, | |
| "grad_norm": 0.2563876509666443, | |
| "learning_rate": 3.1635921278525615e-06, | |
| "loss": 0.0015, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.7403708987161197, | |
| "grad_norm": 0.17576679587364197, | |
| "learning_rate": 3.157756410568803e-06, | |
| "loss": 0.0013, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.7446504992867333, | |
| "grad_norm": 0.3357720375061035, | |
| "learning_rate": 3.1519168401410627e-06, | |
| "loss": 0.0046, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.7489300998573465, | |
| "grad_norm": 0.34145796298980713, | |
| "learning_rate": 3.1460734507776036e-06, | |
| "loss": 0.0027, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.75320970042796, | |
| "grad_norm": 0.5672411918640137, | |
| "learning_rate": 3.140226276709063e-06, | |
| "loss": 0.0009, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.7574893009985733, | |
| "grad_norm": 0.039542071521282196, | |
| "learning_rate": 3.134375352188247e-06, | |
| "loss": 0.0029, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.761768901569187, | |
| "grad_norm": 0.3622141182422638, | |
| "learning_rate": 3.1285207114899353e-06, | |
| "loss": 0.0011, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.7660485021398005, | |
| "grad_norm": 0.12562383711338043, | |
| "learning_rate": 3.122662388910673e-06, | |
| "loss": 0.0022, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.7703281027104136, | |
| "grad_norm": 0.12533040344715118, | |
| "learning_rate": 3.1168004187685765e-06, | |
| "loss": 0.0023, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.7746077032810272, | |
| "grad_norm": 0.47782161831855774, | |
| "learning_rate": 3.1109348354031284e-06, | |
| "loss": 0.0026, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.7788873038516404, | |
| "grad_norm": 0.1668027937412262, | |
| "learning_rate": 3.1050656731749767e-06, | |
| "loss": 0.0006, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.783166904422254, | |
| "grad_norm": 0.23814761638641357, | |
| "learning_rate": 3.0991929664657373e-06, | |
| "loss": 0.0028, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.787446504992867, | |
| "grad_norm": 0.36936724185943604, | |
| "learning_rate": 3.093316749677788e-06, | |
| "loss": 0.0012, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.7917261055634808, | |
| "grad_norm": 0.26975494623184204, | |
| "learning_rate": 3.087437057234066e-06, | |
| "loss": 0.0014, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.7960057061340944, | |
| "grad_norm": 0.20466478168964386, | |
| "learning_rate": 3.081553923577875e-06, | |
| "loss": 0.0013, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.8002853067047075, | |
| "grad_norm": 0.17590539157390594, | |
| "learning_rate": 3.0756673831726723e-06, | |
| "loss": 0.0008, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.8045649072753207, | |
| "grad_norm": 0.16283346712589264, | |
| "learning_rate": 3.0697774705018734e-06, | |
| "loss": 0.0008, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.8088445078459343, | |
| "grad_norm": 0.17097610235214233, | |
| "learning_rate": 3.063884220068649e-06, | |
| "loss": 0.0011, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.813124108416548, | |
| "grad_norm": 0.1250288337469101, | |
| "learning_rate": 3.057987666395723e-06, | |
| "loss": 0.0041, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.817403708987161, | |
| "grad_norm": 0.2146853357553482, | |
| "learning_rate": 3.052087844025168e-06, | |
| "loss": 0.0007, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.8216833095577747, | |
| "grad_norm": 0.07225178927183151, | |
| "learning_rate": 3.0461847875182056e-06, | |
| "loss": 0.0004, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.8259629101283883, | |
| "grad_norm": 0.12505510449409485, | |
| "learning_rate": 3.0402785314550037e-06, | |
| "loss": 0.0013, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.8302425106990015, | |
| "grad_norm": 0.1191864013671875, | |
| "learning_rate": 3.034369110434472e-06, | |
| "loss": 0.0014, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.8345221112696146, | |
| "grad_norm": 0.49187931418418884, | |
| "learning_rate": 3.028456559074061e-06, | |
| "loss": 0.0003, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.8388017118402282, | |
| "grad_norm": 0.19233481585979462, | |
| "learning_rate": 3.0225409120095597e-06, | |
| "loss": 0.0015, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.843081312410842, | |
| "grad_norm": 0.13615825772285461, | |
| "learning_rate": 3.01662220389489e-06, | |
| "loss": 0.0047, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.847360912981455, | |
| "grad_norm": 0.33949339389801025, | |
| "learning_rate": 3.0107004694019067e-06, | |
| "loss": 0.0016, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.8516405135520686, | |
| "grad_norm": 0.22976908087730408, | |
| "learning_rate": 3.0047757432201936e-06, | |
| "loss": 0.0003, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.8559201141226818, | |
| "grad_norm": 0.06495361775159836, | |
| "learning_rate": 2.9988480600568578e-06, | |
| "loss": 0.0006, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.8601997146932954, | |
| "grad_norm": 0.19875776767730713, | |
| "learning_rate": 2.9929174546363303e-06, | |
| "loss": 0.0009, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.8644793152639085, | |
| "grad_norm": 0.08605135977268219, | |
| "learning_rate": 2.9869839617001605e-06, | |
| "loss": 0.0008, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.868758915834522, | |
| "grad_norm": 0.22537599503993988, | |
| "learning_rate": 2.9810476160068113e-06, | |
| "loss": 0.0012, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.8730385164051357, | |
| "grad_norm": 0.1354469507932663, | |
| "learning_rate": 2.97510845233146e-06, | |
| "loss": 0.001, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.877318116975749, | |
| "grad_norm": 0.28886279463768005, | |
| "learning_rate": 2.969166505465789e-06, | |
| "loss": 0.0011, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.881597717546362, | |
| "grad_norm": 0.35490158200263977, | |
| "learning_rate": 2.963221810217786e-06, | |
| "loss": 0.0002, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.8858773181169757, | |
| "grad_norm": 0.06331105530261993, | |
| "learning_rate": 2.9572744014115384e-06, | |
| "loss": 0.0012, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.8901569186875893, | |
| "grad_norm": 1.2145577669143677, | |
| "learning_rate": 2.9513243138870294e-06, | |
| "loss": 0.0013, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.8944365192582024, | |
| "grad_norm": 0.3513360917568207, | |
| "learning_rate": 2.9453715824999348e-06, | |
| "loss": 0.0026, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.898716119828816, | |
| "grad_norm": 0.33389878273010254, | |
| "learning_rate": 2.9394162421214167e-06, | |
| "loss": 0.0004, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 2.9029957203994297, | |
| "grad_norm": 0.10553620010614395, | |
| "learning_rate": 2.933458327637923e-06, | |
| "loss": 0.0009, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 2.907275320970043, | |
| "grad_norm": 0.296312540769577, | |
| "learning_rate": 2.927497873950979e-06, | |
| "loss": 0.0007, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 2.911554921540656, | |
| "grad_norm": 0.26307055354118347, | |
| "learning_rate": 2.921534915976985e-06, | |
| "loss": 0.0014, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 2.9158345221112696, | |
| "grad_norm": 0.05739770829677582, | |
| "learning_rate": 2.9155694886470124e-06, | |
| "loss": 0.0013, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.920114122681883, | |
| "grad_norm": 0.3625591993331909, | |
| "learning_rate": 2.909601626906597e-06, | |
| "loss": 0.0013, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 2.9243937232524964, | |
| "grad_norm": 0.40288034081459045, | |
| "learning_rate": 2.9036313657155347e-06, | |
| "loss": 0.001, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 2.92867332382311, | |
| "grad_norm": 0.9118512868881226, | |
| "learning_rate": 2.8976587400476804e-06, | |
| "loss": 0.0016, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 2.932952924393723, | |
| "grad_norm": 0.18625538051128387, | |
| "learning_rate": 2.891683784890737e-06, | |
| "loss": 0.0008, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.9372325249643367, | |
| "grad_norm": 0.10386449843645096, | |
| "learning_rate": 2.885706535246055e-06, | |
| "loss": 0.0005, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.94151212553495, | |
| "grad_norm": 0.10134569555521011, | |
| "learning_rate": 2.8797270261284247e-06, | |
| "loss": 0.0014, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 2.9457917261055635, | |
| "grad_norm": 0.2270444929599762, | |
| "learning_rate": 2.8737452925658753e-06, | |
| "loss": 0.0006, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 2.950071326676177, | |
| "grad_norm": 0.09928806871175766, | |
| "learning_rate": 2.867761369599465e-06, | |
| "loss": 0.0007, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 2.9543509272467903, | |
| "grad_norm": 0.4043658375740051, | |
| "learning_rate": 2.8617752922830755e-06, | |
| "loss": 0.001, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 2.9586305278174034, | |
| "grad_norm": 0.12267272174358368, | |
| "learning_rate": 2.8557870956832135e-06, | |
| "loss": 0.0011, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.962910128388017, | |
| "grad_norm": 0.29241859912872314, | |
| "learning_rate": 2.8497968148787953e-06, | |
| "loss": 0.0009, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 2.9671897289586306, | |
| "grad_norm": 0.38876116275787354, | |
| "learning_rate": 2.8438044849609508e-06, | |
| "loss": 0.0031, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 2.971469329529244, | |
| "grad_norm": 0.36466461420059204, | |
| "learning_rate": 2.8378101410328107e-06, | |
| "loss": 0.0011, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 2.9757489300998574, | |
| "grad_norm": 1.4299386739730835, | |
| "learning_rate": 2.8318138182093053e-06, | |
| "loss": 0.0023, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 2.980028530670471, | |
| "grad_norm": 0.07230447232723236, | |
| "learning_rate": 2.825815551616957e-06, | |
| "loss": 0.0024, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.984308131241084, | |
| "grad_norm": 0.268041729927063, | |
| "learning_rate": 2.8198153763936746e-06, | |
| "loss": 0.0023, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 2.9885877318116973, | |
| "grad_norm": 0.5011327266693115, | |
| "learning_rate": 2.813813327688547e-06, | |
| "loss": 0.001, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 2.992867332382311, | |
| "grad_norm": 3.3417763710021973, | |
| "learning_rate": 2.8078094406616403e-06, | |
| "loss": 0.0033, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 2.9971469329529246, | |
| "grad_norm": 0.11345068365335464, | |
| "learning_rate": 2.801803750483787e-06, | |
| "loss": 0.0037, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 3.0042796005706136, | |
| "grad_norm": 0.06195438653230667, | |
| "learning_rate": 2.7957962923363835e-06, | |
| "loss": 0.0006, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.0085592011412268, | |
| "grad_norm": 0.05268440395593643, | |
| "learning_rate": 2.789787101411183e-06, | |
| "loss": 0.0005, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 3.0128388017118404, | |
| "grad_norm": 0.058131493628025055, | |
| "learning_rate": 2.7837762129100887e-06, | |
| "loss": 0.0003, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 3.0171184022824535, | |
| "grad_norm": 0.14371389150619507, | |
| "learning_rate": 2.77776366204495e-06, | |
| "loss": 0.0004, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 3.021398002853067, | |
| "grad_norm": 0.10436198860406876, | |
| "learning_rate": 2.771749484037352e-06, | |
| "loss": 0.0011, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 3.0256776034236803, | |
| "grad_norm": 0.05964021384716034, | |
| "learning_rate": 2.7657337141184137e-06, | |
| "loss": 0.0005, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 3.029957203994294, | |
| "grad_norm": 0.04693179205060005, | |
| "learning_rate": 2.759716387528579e-06, | |
| "loss": 0.0005, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 3.034236804564907, | |
| "grad_norm": 0.0761813372373581, | |
| "learning_rate": 2.753697539517408e-06, | |
| "loss": 0.0007, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 3.0385164051355207, | |
| "grad_norm": 0.07143909484148026, | |
| "learning_rate": 2.7476772053433797e-06, | |
| "loss": 0.0007, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 3.0427960057061343, | |
| "grad_norm": 0.06302608549594879, | |
| "learning_rate": 2.7416554202736713e-06, | |
| "loss": 0.0005, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 3.0470756062767475, | |
| "grad_norm": 0.04796674847602844, | |
| "learning_rate": 2.735632219583966e-06, | |
| "loss": 0.0004, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.051355206847361, | |
| "grad_norm": 0.09792087227106094, | |
| "learning_rate": 2.729607638558235e-06, | |
| "loss": 0.0008, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 3.055634807417974, | |
| "grad_norm": 0.059060439467430115, | |
| "learning_rate": 2.723581712488538e-06, | |
| "loss": 0.0003, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 3.059914407988588, | |
| "grad_norm": 0.12019751220941544, | |
| "learning_rate": 2.7175544766748143e-06, | |
| "loss": 0.0007, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 3.064194008559201, | |
| "grad_norm": 0.03258294612169266, | |
| "learning_rate": 2.711525966424674e-06, | |
| "loss": 0.0002, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 3.0684736091298146, | |
| "grad_norm": 0.18928246200084686, | |
| "learning_rate": 2.7054962170531936e-06, | |
| "loss": 0.0025, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 3.0727532097004278, | |
| "grad_norm": 0.30536651611328125, | |
| "learning_rate": 2.699465263882708e-06, | |
| "loss": 0.0005, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 3.0770328102710414, | |
| "grad_norm": 0.08757955580949783, | |
| "learning_rate": 2.693433142242606e-06, | |
| "loss": 0.0003, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 3.081312410841655, | |
| "grad_norm": 0.03118630312383175, | |
| "learning_rate": 2.687399887469118e-06, | |
| "loss": 0.0002, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 3.085592011412268, | |
| "grad_norm": 0.06996569037437439, | |
| "learning_rate": 2.681365534905115e-06, | |
| "loss": 0.0003, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 3.0898716119828817, | |
| "grad_norm": 0.044631022959947586, | |
| "learning_rate": 2.6753301198998977e-06, | |
| "loss": 0.0001, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.094151212553495, | |
| "grad_norm": 0.02148989960551262, | |
| "learning_rate": 2.6692936778089904e-06, | |
| "loss": 0.0001, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 3.0984308131241085, | |
| "grad_norm": 0.10564662516117096, | |
| "learning_rate": 2.6632562439939353e-06, | |
| "loss": 0.0011, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 3.1027104136947217, | |
| "grad_norm": 0.031829968094825745, | |
| "learning_rate": 2.657217853822083e-06, | |
| "loss": 0.0003, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 3.1069900142653353, | |
| "grad_norm": 0.06434270739555359, | |
| "learning_rate": 2.6511785426663862e-06, | |
| "loss": 0.0005, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 3.1112696148359484, | |
| "grad_norm": 0.0329788438975811, | |
| "learning_rate": 2.645138345905194e-06, | |
| "loss": 0.0003, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 3.115549215406562, | |
| "grad_norm": 0.03140716999769211, | |
| "learning_rate": 2.6390972989220415e-06, | |
| "loss": 0.0002, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 3.1198288159771757, | |
| "grad_norm": 0.11149758100509644, | |
| "learning_rate": 2.6330554371054466e-06, | |
| "loss": 0.0005, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 3.124108416547789, | |
| "grad_norm": 0.007250667549669743, | |
| "learning_rate": 2.6270127958487003e-06, | |
| "loss": 0.0001, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 3.1283880171184024, | |
| "grad_norm": 0.04689525440335274, | |
| "learning_rate": 2.6209694105496564e-06, | |
| "loss": 0.0003, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 3.1326676176890156, | |
| "grad_norm": 0.06441285461187363, | |
| "learning_rate": 2.6149253166105317e-06, | |
| "loss": 0.0005, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.136947218259629, | |
| "grad_norm": 0.15823382139205933, | |
| "learning_rate": 2.6088805494376913e-06, | |
| "loss": 0.0004, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 3.1412268188302424, | |
| "grad_norm": 0.0069446568377316, | |
| "learning_rate": 2.6028351444414447e-06, | |
| "loss": 0.0, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 3.145506419400856, | |
| "grad_norm": 0.031451065093278885, | |
| "learning_rate": 2.59678913703584e-06, | |
| "loss": 0.0001, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 3.1497860199714696, | |
| "grad_norm": 0.05783890560269356, | |
| "learning_rate": 2.590742562638449e-06, | |
| "loss": 0.0004, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 3.1540656205420827, | |
| "grad_norm": 0.10425648093223572, | |
| "learning_rate": 2.584695456670171e-06, | |
| "loss": 0.0006, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 3.1583452211126963, | |
| "grad_norm": 0.007683516480028629, | |
| "learning_rate": 2.5786478545550153e-06, | |
| "loss": 0.0001, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 3.1626248216833095, | |
| "grad_norm": 0.011570609174668789, | |
| "learning_rate": 2.5725997917198987e-06, | |
| "loss": 0.0001, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 3.166904422253923, | |
| "grad_norm": 0.08868878334760666, | |
| "learning_rate": 2.5665513035944373e-06, | |
| "loss": 0.0003, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 3.1711840228245363, | |
| "grad_norm": 0.19744367897510529, | |
| "learning_rate": 2.5605024256107384e-06, | |
| "loss": 0.0011, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 3.17546362339515, | |
| "grad_norm": 0.07183310389518738, | |
| "learning_rate": 2.5544531932031924e-06, | |
| "loss": 0.0003, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.179743223965763, | |
| "grad_norm": 0.530739426612854, | |
| "learning_rate": 2.5484036418082676e-06, | |
| "loss": 0.0005, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 3.1840228245363766, | |
| "grad_norm": 0.020345600321888924, | |
| "learning_rate": 2.542353806864299e-06, | |
| "loss": 0.0001, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 3.18830242510699, | |
| "grad_norm": 0.05388473719358444, | |
| "learning_rate": 2.5363037238112837e-06, | |
| "loss": 0.0002, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 3.1925820256776034, | |
| "grad_norm": 0.055131860077381134, | |
| "learning_rate": 2.530253428090671e-06, | |
| "loss": 0.0004, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 3.196861626248217, | |
| "grad_norm": 0.11556221544742584, | |
| "learning_rate": 2.5242029551451587e-06, | |
| "loss": 0.0006, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.20114122681883, | |
| "grad_norm": 0.295624703168869, | |
| "learning_rate": 2.5181523404184794e-06, | |
| "loss": 0.0015, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 3.205420827389444, | |
| "grad_norm": 0.08749273419380188, | |
| "learning_rate": 2.5121016193551987e-06, | |
| "loss": 0.0006, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 3.209700427960057, | |
| "grad_norm": 0.15132521092891693, | |
| "learning_rate": 2.506050827400505e-06, | |
| "loss": 0.001, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 3.2139800285306706, | |
| "grad_norm": 0.09060026705265045, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.0005, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 3.2182596291012837, | |
| "grad_norm": 0.0072489045560359955, | |
| "learning_rate": 2.4939491725994952e-06, | |
| "loss": 0.0001, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.2225392296718973, | |
| "grad_norm": 0.006529243662953377, | |
| "learning_rate": 2.4878983806448013e-06, | |
| "loss": 0.0001, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 3.226818830242511, | |
| "grad_norm": 0.09600796550512314, | |
| "learning_rate": 2.4818476595815206e-06, | |
| "loss": 0.0007, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 3.231098430813124, | |
| "grad_norm": 0.007499899715185165, | |
| "learning_rate": 2.4757970448548425e-06, | |
| "loss": 0.0001, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 3.2353780313837377, | |
| "grad_norm": 0.09545556455850601, | |
| "learning_rate": 2.4697465719093296e-06, | |
| "loss": 0.0006, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 3.239657631954351, | |
| "grad_norm": 0.21487762033939362, | |
| "learning_rate": 2.463696276188717e-06, | |
| "loss": 0.0012, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.2439372325249645, | |
| "grad_norm": 0.017742682248353958, | |
| "learning_rate": 2.4576461931357016e-06, | |
| "loss": 0.0002, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 3.2482168330955776, | |
| "grad_norm": 0.006112521979957819, | |
| "learning_rate": 2.4515963581917328e-06, | |
| "loss": 0.0001, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 3.2524964336661912, | |
| "grad_norm": 0.027608409523963928, | |
| "learning_rate": 2.4455468067968076e-06, | |
| "loss": 0.0002, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 3.2567760342368044, | |
| "grad_norm": 0.03438973426818848, | |
| "learning_rate": 2.4394975743892624e-06, | |
| "loss": 0.0002, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 3.261055634807418, | |
| "grad_norm": 0.02690828964114189, | |
| "learning_rate": 2.4334486964055635e-06, | |
| "loss": 0.0001, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.265335235378031, | |
| "grad_norm": 0.052413567900657654, | |
| "learning_rate": 2.4274002082801025e-06, | |
| "loss": 0.0003, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 3.2696148359486448, | |
| "grad_norm": 0.020108576864004135, | |
| "learning_rate": 2.4213521454449855e-06, | |
| "loss": 0.0001, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 3.2738944365192584, | |
| "grad_norm": 0.14481624960899353, | |
| "learning_rate": 2.4153045433298296e-06, | |
| "loss": 0.001, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 3.2781740370898715, | |
| "grad_norm": 0.029384953901171684, | |
| "learning_rate": 2.4092574373615514e-06, | |
| "loss": 0.0001, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 3.282453637660485, | |
| "grad_norm": 0.067149817943573, | |
| "learning_rate": 2.4032108629641614e-06, | |
| "loss": 0.0003, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 3.2867332382310983, | |
| "grad_norm": 0.029277823865413666, | |
| "learning_rate": 2.3971648555585557e-06, | |
| "loss": 0.0002, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 3.291012838801712, | |
| "grad_norm": 0.042966265231370926, | |
| "learning_rate": 2.391119450562309e-06, | |
| "loss": 0.0003, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 3.295292439372325, | |
| "grad_norm": 0.05120711773633957, | |
| "learning_rate": 2.385074683389469e-06, | |
| "loss": 0.0004, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 3.2995720399429387, | |
| "grad_norm": 0.005247802473604679, | |
| "learning_rate": 2.3790305894503436e-06, | |
| "loss": 0.0001, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 3.3038516405135523, | |
| "grad_norm": 0.038485050201416016, | |
| "learning_rate": 2.372987204151301e-06, | |
| "loss": 0.0002, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.3081312410841655, | |
| "grad_norm": 0.0560363307595253, | |
| "learning_rate": 2.3669445628945543e-06, | |
| "loss": 0.0004, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 3.312410841654779, | |
| "grad_norm": 0.02810109220445156, | |
| "learning_rate": 2.3609027010779593e-06, | |
| "loss": 0.0002, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 3.316690442225392, | |
| "grad_norm": 0.034111492335796356, | |
| "learning_rate": 2.354861654094807e-06, | |
| "loss": 0.0001, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 3.320970042796006, | |
| "grad_norm": 0.04070909693837166, | |
| "learning_rate": 2.3488214573336146e-06, | |
| "loss": 0.0002, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 3.325249643366619, | |
| "grad_norm": 0.2383888065814972, | |
| "learning_rate": 2.3427821461779172e-06, | |
| "loss": 0.001, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.3295292439372326, | |
| "grad_norm": 0.029681170359253883, | |
| "learning_rate": 2.336743756006065e-06, | |
| "loss": 0.0002, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 3.3338088445078458, | |
| "grad_norm": 0.06831318140029907, | |
| "learning_rate": 2.3307063221910104e-06, | |
| "loss": 0.0003, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 3.3380884450784594, | |
| "grad_norm": 0.07744365930557251, | |
| "learning_rate": 2.324669880100103e-06, | |
| "loss": 0.0005, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 3.3423680456490725, | |
| "grad_norm": 0.03980100154876709, | |
| "learning_rate": 2.318634465094886e-06, | |
| "loss": 0.0001, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 3.346647646219686, | |
| "grad_norm": 0.03938981518149376, | |
| "learning_rate": 2.3126001125308826e-06, | |
| "loss": 0.0001, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.3509272467902997, | |
| "grad_norm": 0.07077000290155411, | |
| "learning_rate": 2.306566857757395e-06, | |
| "loss": 0.0003, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 3.355206847360913, | |
| "grad_norm": 0.21255511045455933, | |
| "learning_rate": 2.300534736117292e-06, | |
| "loss": 0.0011, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 3.3594864479315265, | |
| "grad_norm": 0.09193920344114304, | |
| "learning_rate": 2.2945037829468077e-06, | |
| "loss": 0.0004, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 3.3637660485021397, | |
| "grad_norm": 0.31514987349510193, | |
| "learning_rate": 2.2884740335753276e-06, | |
| "loss": 0.0003, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 3.3680456490727533, | |
| "grad_norm": 0.21466892957687378, | |
| "learning_rate": 2.2824455233251866e-06, | |
| "loss": 0.0008, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 3.3723252496433664, | |
| "grad_norm": 0.11863390356302261, | |
| "learning_rate": 2.2764182875114625e-06, | |
| "loss": 0.0006, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 3.37660485021398, | |
| "grad_norm": 0.011846931651234627, | |
| "learning_rate": 2.270392361441766e-06, | |
| "loss": 0.0001, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 3.3808844507845937, | |
| "grad_norm": 0.2932061553001404, | |
| "learning_rate": 2.2643677804160354e-06, | |
| "loss": 0.0024, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 3.385164051355207, | |
| "grad_norm": 0.016983941197395325, | |
| "learning_rate": 2.25834457972633e-06, | |
| "loss": 0.0001, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 3.3894436519258204, | |
| "grad_norm": 0.019267873838543892, | |
| "learning_rate": 2.2523227946566215e-06, | |
| "loss": 0.0001, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.3937232524964336, | |
| "grad_norm": 0.2439136803150177, | |
| "learning_rate": 2.2463024604825924e-06, | |
| "loss": 0.0004, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 3.398002853067047, | |
| "grad_norm": 0.15215131640434265, | |
| "learning_rate": 2.2402836124714223e-06, | |
| "loss": 0.0004, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 3.4022824536376604, | |
| "grad_norm": 0.18076124787330627, | |
| "learning_rate": 2.234266285881587e-06, | |
| "loss": 0.0015, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 3.406562054208274, | |
| "grad_norm": 0.05619816482067108, | |
| "learning_rate": 2.2282505159626484e-06, | |
| "loss": 0.0003, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 3.410841654778887, | |
| "grad_norm": 0.1122635155916214, | |
| "learning_rate": 2.2222363379550513e-06, | |
| "loss": 0.0005, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 3.4151212553495007, | |
| "grad_norm": 0.1281629353761673, | |
| "learning_rate": 2.2162237870899118e-06, | |
| "loss": 0.0004, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 3.419400855920114, | |
| "grad_norm": 0.26655465364456177, | |
| "learning_rate": 2.2102128985888184e-06, | |
| "loss": 0.001, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 3.4236804564907275, | |
| "grad_norm": 0.1980491280555725, | |
| "learning_rate": 2.2042037076636173e-06, | |
| "loss": 0.0012, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 3.427960057061341, | |
| "grad_norm": 0.016746696084737778, | |
| "learning_rate": 2.198196249516214e-06, | |
| "loss": 0.0001, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 3.4322396576319543, | |
| "grad_norm": 0.03385158255696297, | |
| "learning_rate": 2.19219055933836e-06, | |
| "loss": 0.0001, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.436519258202568, | |
| "grad_norm": 0.10644563287496567, | |
| "learning_rate": 2.186186672311453e-06, | |
| "loss": 0.0005, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 3.440798858773181, | |
| "grad_norm": 0.0988880917429924, | |
| "learning_rate": 2.1801846236063266e-06, | |
| "loss": 0.0004, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 3.4450784593437946, | |
| "grad_norm": 0.08092886209487915, | |
| "learning_rate": 2.1741844483830437e-06, | |
| "loss": 0.0003, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 3.449358059914408, | |
| "grad_norm": 0.005913181230425835, | |
| "learning_rate": 2.1681861817906955e-06, | |
| "loss": 0.0001, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 3.4536376604850214, | |
| "grad_norm": 0.10277076065540314, | |
| "learning_rate": 2.1621898589671897e-06, | |
| "loss": 0.0004, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 3.457917261055635, | |
| "grad_norm": 0.02266569808125496, | |
| "learning_rate": 2.15619551503905e-06, | |
| "loss": 0.0001, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 3.462196861626248, | |
| "grad_norm": 0.03354337066411972, | |
| "learning_rate": 2.1502031851212047e-06, | |
| "loss": 0.0002, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 3.466476462196862, | |
| "grad_norm": 0.05572228133678436, | |
| "learning_rate": 2.1442129043167877e-06, | |
| "loss": 0.0002, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 3.470756062767475, | |
| "grad_norm": 0.020030835643410683, | |
| "learning_rate": 2.1382247077169254e-06, | |
| "loss": 0.0001, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 3.4750356633380886, | |
| "grad_norm": 0.04167158156633377, | |
| "learning_rate": 2.132238630400536e-06, | |
| "loss": 0.0002, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.4793152639087017, | |
| "grad_norm": 0.007672482170164585, | |
| "learning_rate": 2.126254707434125e-06, | |
| "loss": 0.0001, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 3.4835948644793153, | |
| "grad_norm": 0.05470617488026619, | |
| "learning_rate": 2.1202729738715753e-06, | |
| "loss": 0.0003, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 3.4878744650499285, | |
| "grad_norm": 0.08051838725805283, | |
| "learning_rate": 2.114293464753946e-06, | |
| "loss": 0.0002, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 3.492154065620542, | |
| "grad_norm": 0.08511485159397125, | |
| "learning_rate": 2.108316215109264e-06, | |
| "loss": 0.0005, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 3.4964336661911553, | |
| "grad_norm": 0.21538877487182617, | |
| "learning_rate": 2.1023412599523204e-06, | |
| "loss": 0.0016, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 3.500713266761769, | |
| "grad_norm": 0.006761731579899788, | |
| "learning_rate": 2.0963686342844657e-06, | |
| "loss": 0.0001, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 3.5049928673323825, | |
| "grad_norm": 0.06276754289865494, | |
| "learning_rate": 2.090398373093404e-06, | |
| "loss": 0.0004, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 3.5092724679029956, | |
| "grad_norm": 0.04043642431497574, | |
| "learning_rate": 2.084430511352988e-06, | |
| "loss": 0.0001, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 3.5135520684736092, | |
| "grad_norm": 0.06508395075798035, | |
| "learning_rate": 2.0784650840230152e-06, | |
| "loss": 0.0003, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 3.5178316690442224, | |
| "grad_norm": 0.024406511336565018, | |
| "learning_rate": 2.072502126049022e-06, | |
| "loss": 0.0001, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.522111269614836, | |
| "grad_norm": 0.0572439581155777, | |
| "learning_rate": 2.066541672362078e-06, | |
| "loss": 0.0003, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 3.526390870185449, | |
| "grad_norm": 0.1223699077963829, | |
| "learning_rate": 2.060583757878584e-06, | |
| "loss": 0.0008, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 3.5306704707560628, | |
| "grad_norm": 0.04345296695828438, | |
| "learning_rate": 2.054628417500066e-06, | |
| "loss": 0.0002, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 3.5349500713266764, | |
| "grad_norm": 0.0399150550365448, | |
| "learning_rate": 2.0486756861129714e-06, | |
| "loss": 0.0003, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 3.5392296718972895, | |
| "grad_norm": 0.045240480452775955, | |
| "learning_rate": 2.042725598588462e-06, | |
| "loss": 0.0002, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 3.543509272467903, | |
| "grad_norm": 0.13573598861694336, | |
| "learning_rate": 2.0367781897822147e-06, | |
| "loss": 0.0009, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 3.5477888730385163, | |
| "grad_norm": 0.2721225321292877, | |
| "learning_rate": 2.0308334945342117e-06, | |
| "loss": 0.0007, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 3.55206847360913, | |
| "grad_norm": 0.0812113881111145, | |
| "learning_rate": 2.0248915476685405e-06, | |
| "loss": 0.0003, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 3.556348074179743, | |
| "grad_norm": 0.04231754690408707, | |
| "learning_rate": 2.018952383993189e-06, | |
| "loss": 0.0002, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 3.5606276747503567, | |
| "grad_norm": 0.037850312888622284, | |
| "learning_rate": 2.0130160382998403e-06, | |
| "loss": 0.0002, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.5649072753209703, | |
| "grad_norm": 0.07878874987363815, | |
| "learning_rate": 2.0070825453636705e-06, | |
| "loss": 0.0003, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 3.5691868758915835, | |
| "grad_norm": 0.0881073847413063, | |
| "learning_rate": 2.001151939943144e-06, | |
| "loss": 0.0006, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 3.5734664764621966, | |
| "grad_norm": 0.1115725189447403, | |
| "learning_rate": 1.9952242567798076e-06, | |
| "loss": 0.0007, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 3.5777460770328102, | |
| "grad_norm": 0.12267230451107025, | |
| "learning_rate": 1.989299530598094e-06, | |
| "loss": 0.0003, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 3.582025677603424, | |
| "grad_norm": 0.011967863887548447, | |
| "learning_rate": 1.9833777961051107e-06, | |
| "loss": 0.0001, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 3.586305278174037, | |
| "grad_norm": 0.041765160858631134, | |
| "learning_rate": 1.977459087990441e-06, | |
| "loss": 0.0002, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 3.5905848787446506, | |
| "grad_norm": 0.1046781986951828, | |
| "learning_rate": 1.9715434409259393e-06, | |
| "loss": 0.0005, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 3.5948644793152638, | |
| "grad_norm": 0.005457639694213867, | |
| "learning_rate": 1.965630889565529e-06, | |
| "loss": 0.0001, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 3.5991440798858774, | |
| "grad_norm": 0.11675374954938889, | |
| "learning_rate": 1.959721468544998e-06, | |
| "loss": 0.0008, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 3.6034236804564905, | |
| "grad_norm": 0.009887585416436195, | |
| "learning_rate": 1.9538152124817953e-06, | |
| "loss": 0.0001, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.607703281027104, | |
| "grad_norm": 0.0734240785241127, | |
| "learning_rate": 1.9479121559748326e-06, | |
| "loss": 0.0002, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 3.6119828815977177, | |
| "grad_norm": 0.101195327937603, | |
| "learning_rate": 1.942012333604278e-06, | |
| "loss": 0.0003, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 3.616262482168331, | |
| "grad_norm": 0.07921625673770905, | |
| "learning_rate": 1.936115779931351e-06, | |
| "loss": 0.0003, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 3.6205420827389445, | |
| "grad_norm": 0.025934092700481415, | |
| "learning_rate": 1.9302225294981274e-06, | |
| "loss": 0.0001, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 3.6248216833095577, | |
| "grad_norm": 0.15052072703838348, | |
| "learning_rate": 1.924332616827329e-06, | |
| "loss": 0.0009, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 3.6291012838801713, | |
| "grad_norm": 0.1430654525756836, | |
| "learning_rate": 1.9184460764221256e-06, | |
| "loss": 0.0004, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 3.6333808844507844, | |
| "grad_norm": 0.2488204538822174, | |
| "learning_rate": 1.9125629427659344e-06, | |
| "loss": 0.0004, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 3.637660485021398, | |
| "grad_norm": 0.011839679442346096, | |
| "learning_rate": 1.906683250322213e-06, | |
| "loss": 0.0001, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 3.6419400855920117, | |
| "grad_norm": 1.136078119277954, | |
| "learning_rate": 1.9008070335342631e-06, | |
| "loss": 0.0002, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 3.646219686162625, | |
| "grad_norm": 0.014907733537256718, | |
| "learning_rate": 1.894934326825023e-06, | |
| "loss": 0.0001, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.650499286733238, | |
| "grad_norm": 0.04459215700626373, | |
| "learning_rate": 1.8890651645968727e-06, | |
| "loss": 0.0002, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 3.6547788873038516, | |
| "grad_norm": 0.016584349796175957, | |
| "learning_rate": 1.8831995812314247e-06, | |
| "loss": 0.0001, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 3.659058487874465, | |
| "grad_norm": 0.1303810477256775, | |
| "learning_rate": 1.8773376110893276e-06, | |
| "loss": 0.0004, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 3.6633380884450784, | |
| "grad_norm": 0.11380203813314438, | |
| "learning_rate": 1.8714792885100657e-06, | |
| "loss": 0.0007, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 3.667617689015692, | |
| "grad_norm": 0.06985165923833847, | |
| "learning_rate": 1.8656246478117529e-06, | |
| "loss": 0.0003, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 3.671897289586305, | |
| "grad_norm": 0.005729840602725744, | |
| "learning_rate": 1.859773723290938e-06, | |
| "loss": 0.0001, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 3.6761768901569187, | |
| "grad_norm": 0.00528704933822155, | |
| "learning_rate": 1.8539265492223978e-06, | |
| "loss": 0.0001, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 3.680456490727532, | |
| "grad_norm": 0.019035734236240387, | |
| "learning_rate": 1.8480831598589384e-06, | |
| "loss": 0.0001, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 3.6847360912981455, | |
| "grad_norm": 0.00807551946491003, | |
| "learning_rate": 1.8422435894311973e-06, | |
| "loss": 0.0001, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 3.689015691868759, | |
| "grad_norm": 0.02676750347018242, | |
| "learning_rate": 1.8364078721474393e-06, | |
| "loss": 0.0001, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.6932952924393723, | |
| "grad_norm": 0.06462167203426361, | |
| "learning_rate": 1.8305760421933577e-06, | |
| "loss": 0.0002, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 3.697574893009986, | |
| "grad_norm": 0.1425905078649521, | |
| "learning_rate": 1.8247481337318745e-06, | |
| "loss": 0.0008, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 3.701854493580599, | |
| "grad_norm": 0.05638245865702629, | |
| "learning_rate": 1.8189241809029387e-06, | |
| "loss": 0.0002, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 3.7061340941512126, | |
| "grad_norm": 0.056581661105155945, | |
| "learning_rate": 1.813104217823326e-06, | |
| "loss": 0.0002, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 3.710413694721826, | |
| "grad_norm": 0.013416646979749203, | |
| "learning_rate": 1.8072882785864431e-06, | |
| "loss": 0.0001, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 3.7146932952924394, | |
| "grad_norm": 0.009981341660022736, | |
| "learning_rate": 1.8014763972621218e-06, | |
| "loss": 0.0001, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 3.718972895863053, | |
| "grad_norm": 0.06614923477172852, | |
| "learning_rate": 1.7956686078964257e-06, | |
| "loss": 0.0002, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 3.723252496433666, | |
| "grad_norm": 0.031955234706401825, | |
| "learning_rate": 1.7898649445114452e-06, | |
| "loss": 0.0001, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 3.7275320970042793, | |
| "grad_norm": 0.003975023049861193, | |
| "learning_rate": 1.7840654411051023e-06, | |
| "loss": 0.0, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 3.731811697574893, | |
| "grad_norm": 0.009860747493803501, | |
| "learning_rate": 1.7782701316509482e-06, | |
| "loss": 0.0, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.7360912981455066, | |
| "grad_norm": 0.052486445754766464, | |
| "learning_rate": 1.7724790500979663e-06, | |
| "loss": 0.0004, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 3.7403708987161197, | |
| "grad_norm": 0.042078789323568344, | |
| "learning_rate": 1.7666922303703748e-06, | |
| "loss": 0.0001, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 3.7446504992867333, | |
| "grad_norm": 1.635599970817566, | |
| "learning_rate": 1.760909706367423e-06, | |
| "loss": 0.0034, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 3.7489300998573465, | |
| "grad_norm": 0.11844348162412643, | |
| "learning_rate": 1.7551315119631993e-06, | |
| "loss": 0.0007, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 3.75320970042796, | |
| "grad_norm": 0.020422102883458138, | |
| "learning_rate": 1.7493576810064267e-06, | |
| "loss": 0.0001, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 3.7574893009985733, | |
| "grad_norm": 0.004928403068333864, | |
| "learning_rate": 1.7435882473202665e-06, | |
| "loss": 0.0, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 3.761768901569187, | |
| "grad_norm": 0.06713037937879562, | |
| "learning_rate": 1.7378232447021238e-06, | |
| "loss": 0.0004, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 3.7660485021398005, | |
| "grad_norm": 0.013741104863584042, | |
| "learning_rate": 1.732062706923443e-06, | |
| "loss": 0.0001, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 3.7703281027104136, | |
| "grad_norm": 0.038089584559202194, | |
| "learning_rate": 1.7263066677295171e-06, | |
| "loss": 0.0001, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 3.7746077032810272, | |
| "grad_norm": 0.21894338726997375, | |
| "learning_rate": 1.7205551608392822e-06, | |
| "loss": 0.0007, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.7788873038516404, | |
| "grad_norm": 0.07800963521003723, | |
| "learning_rate": 1.7148082199451288e-06, | |
| "loss": 0.0005, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 3.783166904422254, | |
| "grad_norm": 0.016615545377135277, | |
| "learning_rate": 1.7090658787126945e-06, | |
| "loss": 0.0001, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 3.787446504992867, | |
| "grad_norm": 0.01663312129676342, | |
| "learning_rate": 1.7033281707806761e-06, | |
| "loss": 0.0001, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 3.7917261055634808, | |
| "grad_norm": 0.06620707362890244, | |
| "learning_rate": 1.697595129760627e-06, | |
| "loss": 0.0003, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 3.7960057061340944, | |
| "grad_norm": 0.08067363500595093, | |
| "learning_rate": 1.6918667892367622e-06, | |
| "loss": 0.0003, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 3.8002853067047075, | |
| "grad_norm": 0.04012581333518028, | |
| "learning_rate": 1.68614318276576e-06, | |
| "loss": 0.0002, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 3.8045649072753207, | |
| "grad_norm": 0.005692005157470703, | |
| "learning_rate": 1.680424343876569e-06, | |
| "loss": 0.0001, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 3.8088445078459343, | |
| "grad_norm": 1.3083586692810059, | |
| "learning_rate": 1.674710306070206e-06, | |
| "loss": 0.001, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 3.813124108416548, | |
| "grad_norm": 0.07172267138957977, | |
| "learning_rate": 1.669001102819565e-06, | |
| "loss": 0.0003, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 3.817403708987161, | |
| "grad_norm": 0.012305958196520805, | |
| "learning_rate": 1.6632967675692193e-06, | |
| "loss": 0.0001, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.8216833095577747, | |
| "grad_norm": 0.07895144820213318, | |
| "learning_rate": 1.6575973337352244e-06, | |
| "loss": 0.0002, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 3.8259629101283883, | |
| "grad_norm": 0.007915760390460491, | |
| "learning_rate": 1.6519028347049242e-06, | |
| "loss": 0.0001, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 3.8302425106990015, | |
| "grad_norm": 0.08689282089471817, | |
| "learning_rate": 1.6462133038367547e-06, | |
| "loss": 0.0004, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 3.8345221112696146, | |
| "grad_norm": 0.08332937210798264, | |
| "learning_rate": 1.6405287744600456e-06, | |
| "loss": 0.0003, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 3.8388017118402282, | |
| "grad_norm": 0.03776445984840393, | |
| "learning_rate": 1.6348492798748317e-06, | |
| "loss": 0.0001, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 3.843081312410842, | |
| "grad_norm": 0.120399609208107, | |
| "learning_rate": 1.629174853351651e-06, | |
| "loss": 0.0007, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 3.847360912981455, | |
| "grad_norm": 0.026734810322523117, | |
| "learning_rate": 1.623505528131355e-06, | |
| "loss": 0.0001, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 3.8516405135520686, | |
| "grad_norm": 0.08207985013723373, | |
| "learning_rate": 1.6178413374249091e-06, | |
| "loss": 0.0002, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 3.8559201141226818, | |
| "grad_norm": 0.0245340708643198, | |
| "learning_rate": 1.6121823144132036e-06, | |
| "loss": 0.0001, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 3.8601997146932954, | |
| "grad_norm": 0.015675395727157593, | |
| "learning_rate": 1.6065284922468548e-06, | |
| "loss": 0.0001, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.8644793152639085, | |
| "grad_norm": 0.020896075293421745, | |
| "learning_rate": 1.6008799040460112e-06, | |
| "loss": 0.0001, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 3.868758915834522, | |
| "grad_norm": 0.015819240361452103, | |
| "learning_rate": 1.5952365829001632e-06, | |
| "loss": 0.0001, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 3.8730385164051357, | |
| "grad_norm": 0.12879550457000732, | |
| "learning_rate": 1.5895985618679447e-06, | |
| "loss": 0.0003, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 3.877318116975749, | |
| "grad_norm": 0.01913272775709629, | |
| "learning_rate": 1.5839658739769432e-06, | |
| "loss": 0.0001, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 3.881597717546362, | |
| "grad_norm": 0.055048394948244095, | |
| "learning_rate": 1.5783385522235029e-06, | |
| "loss": 0.0002, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 3.8858773181169757, | |
| "grad_norm": 0.09778667241334915, | |
| "learning_rate": 1.5727166295725348e-06, | |
| "loss": 0.0004, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 3.8901569186875893, | |
| "grad_norm": 0.16803383827209473, | |
| "learning_rate": 1.5671001389573199e-06, | |
| "loss": 0.0009, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 3.8944365192582024, | |
| "grad_norm": 0.06138642504811287, | |
| "learning_rate": 1.5614891132793205e-06, | |
| "loss": 0.0002, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 3.898716119828816, | |
| "grad_norm": 0.21488219499588013, | |
| "learning_rate": 1.5558835854079832e-06, | |
| "loss": 0.0012, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 3.9029957203994297, | |
| "grad_norm": 0.027981458231806755, | |
| "learning_rate": 1.5502835881805501e-06, | |
| "loss": 0.0001, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.907275320970043, | |
| "grad_norm": 0.015422360971570015, | |
| "learning_rate": 1.5446891544018632e-06, | |
| "loss": 0.0001, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 3.911554921540656, | |
| "grad_norm": 0.04238557070493698, | |
| "learning_rate": 1.539100316844176e-06, | |
| "loss": 0.0002, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 3.9158345221112696, | |
| "grad_norm": 0.048075973987579346, | |
| "learning_rate": 1.5335171082469563e-06, | |
| "loss": 0.0003, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 3.920114122681883, | |
| "grad_norm": 0.05744335800409317, | |
| "learning_rate": 1.5279395613166987e-06, | |
| "loss": 0.0001, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 3.9243937232524964, | |
| "grad_norm": 0.03021991066634655, | |
| "learning_rate": 1.522367708726733e-06, | |
| "loss": 0.0001, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 3.92867332382311, | |
| "grad_norm": 0.00741999875754118, | |
| "learning_rate": 1.5168015831170294e-06, | |
| "loss": 0.0001, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 3.932952924393723, | |
| "grad_norm": 0.04541436955332756, | |
| "learning_rate": 1.5112412170940113e-06, | |
| "loss": 0.0001, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 3.9372325249643367, | |
| "grad_norm": 0.4240614175796509, | |
| "learning_rate": 1.5056866432303618e-06, | |
| "loss": 0.0025, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 3.94151212553495, | |
| "grad_norm": 0.03267436847090721, | |
| "learning_rate": 1.500137894064831e-06, | |
| "loss": 0.0001, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 3.9457917261055635, | |
| "grad_norm": 0.08648688346147537, | |
| "learning_rate": 1.4945950021020521e-06, | |
| "loss": 0.0002, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.950071326676177, | |
| "grad_norm": 0.16156277060508728, | |
| "learning_rate": 1.489057999812343e-06, | |
| "loss": 0.0006, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 3.9543509272467903, | |
| "grad_norm": 0.04951700568199158, | |
| "learning_rate": 1.483526919631523e-06, | |
| "loss": 0.0002, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 3.9586305278174034, | |
| "grad_norm": 0.16433213651180267, | |
| "learning_rate": 1.4780017939607165e-06, | |
| "loss": 0.0006, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 3.962910128388017, | |
| "grad_norm": 0.008078474551439285, | |
| "learning_rate": 1.4724826551661702e-06, | |
| "loss": 0.0001, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 3.9671897289586306, | |
| "grad_norm": 0.014672919176518917, | |
| "learning_rate": 1.4669695355790552e-06, | |
| "loss": 0.0001, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 3.971469329529244, | |
| "grad_norm": 0.06230577826499939, | |
| "learning_rate": 1.4614624674952843e-06, | |
| "loss": 0.0003, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 3.9757489300998574, | |
| "grad_norm": 0.012285399250686169, | |
| "learning_rate": 1.4559614831753208e-06, | |
| "loss": 0.0001, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 3.980028530670471, | |
| "grad_norm": 0.073011614382267, | |
| "learning_rate": 1.450466614843989e-06, | |
| "loss": 0.0003, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 3.984308131241084, | |
| "grad_norm": 0.2925986349582672, | |
| "learning_rate": 1.444977894690286e-06, | |
| "loss": 0.0012, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 3.9885877318116973, | |
| "grad_norm": 0.055506620556116104, | |
| "learning_rate": 1.4394953548671909e-06, | |
| "loss": 0.0002, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.992867332382311, | |
| "grad_norm": 0.006805168464779854, | |
| "learning_rate": 1.434019027491481e-06, | |
| "loss": 0.0001, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 3.9971469329529246, | |
| "grad_norm": 0.010725623928010464, | |
| "learning_rate": 1.4285489446435388e-06, | |
| "loss": 0.0001, | |
| "step": 932 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1398, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 233, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.0900534230083174e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |