|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 211, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 1.8875, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 1.8643, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 1.8896, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 1.6971, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.5649, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 1.6123, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5796, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999881422898077e-05, |
|
"loss": 1.5678, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999525719713366e-05, |
|
"loss": 1.5245, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9989329748023728e-05, |
|
"loss": 1.5341, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9981033287370443e-05, |
|
"loss": 1.4827, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.997036978271433e-05, |
|
"loss": 1.4803, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9957341762950346e-05, |
|
"loss": 1.4347, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994195231772815e-05, |
|
"loss": 1.4375, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.992420509671936e-05, |
|
"loss": 1.4007, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9904104308752053e-05, |
|
"loss": 1.416, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9881654720812594e-05, |
|
"loss": 1.4114, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.985686165691514e-05, |
|
"loss": 1.3866, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.982973099683902e-05, |
|
"loss": 1.3905, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.980026917473432e-05, |
|
"loss": 1.3748, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9768483177596008e-05, |
|
"loss": 1.3506, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9734380543606932e-05, |
|
"loss": 1.3386, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9697969360350098e-05, |
|
"loss": 1.375, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9659258262890683e-05, |
|
"loss": 1.3462, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.961825643172819e-05, |
|
"loss": 1.3849, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9574973590619243e-05, |
|
"loss": 1.3286, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9529420004271568e-05, |
|
"loss": 1.3512, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.948160647590966e-05, |
|
"loss": 1.3521, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9431544344712776e-05, |
|
"loss": 1.3662, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9379245483125783e-05, |
|
"loss": 1.3462, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.932472229404356e-05, |
|
"loss": 1.3157, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9267987707869605e-05, |
|
"loss": 1.3327, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.920905517944954e-05, |
|
"loss": 1.3148, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9147938684880213e-05, |
|
"loss": 1.3175, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9084652718195237e-05, |
|
"loss": 1.3443, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9019212287927663e-05, |
|
"loss": 1.3115, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8951632913550625e-05, |
|
"loss": 1.3248, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.888193062179685e-05, |
|
"loss": 1.3468, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8810121942857848e-05, |
|
"loss": 1.3053, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8736223906463698e-05, |
|
"loss": 1.3432, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.866025403784439e-05, |
|
"loss": 1.3188, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8582230353573628e-05, |
|
"loss": 1.3094, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8502171357296144e-05, |
|
"loss": 1.3215, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8420096035339454e-05, |
|
"loss": 1.2739, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8336023852211197e-05, |
|
"loss": 1.3058, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8249974745983023e-05, |
|
"loss": 1.2857, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.816196912356222e-05, |
|
"loss": 1.2839, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8072027855852098e-05, |
|
"loss": 1.2899, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7980172272802398e-05, |
|
"loss": 1.2953, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7886424158350784e-05, |
|
"loss": 1.2847, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7790805745256703e-05, |
|
"loss": 1.3089, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.769333970982879e-05, |
|
"loss": 1.2763, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7594049166547073e-05, |
|
"loss": 1.3226, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7492957662581297e-05, |
|
"loss": 1.2884, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7390089172206594e-05, |
|
"loss": 1.326, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7285468091117904e-05, |
|
"loss": 1.2741, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.717911923064442e-05, |
|
"loss": 1.3115, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 1.2578, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.696133945962927e-05, |
|
"loss": 1.2754, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6849960196475808e-05, |
|
"loss": 1.3098, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6736956436465573e-05, |
|
"loss": 1.2831, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6622354978915306e-05, |
|
"loss": 1.2863, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.650618300204242e-05, |
|
"loss": 1.3345, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.638846805651961e-05, |
|
"loss": 1.2951, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.626923805894107e-05, |
|
"loss": 1.2884, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6148521285201926e-05, |
|
"loss": 1.2866, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6026346363792565e-05, |
|
"loss": 1.2499, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5902742269009198e-05, |
|
"loss": 1.277, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5777738314082514e-05, |
|
"loss": 1.2836, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.565136414422592e-05, |
|
"loss": 1.28, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.552364972960506e-05, |
|
"loss": 1.2747, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5394625358230252e-05, |
|
"loss": 1.2783, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.526432162877356e-05, |
|
"loss": 1.2536, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5132769443312207e-05, |
|
"loss": 1.2757, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.2785, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4866044785668563e-05, |
|
"loss": 1.239, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4730935568360103e-05, |
|
"loss": 1.2783, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4594704389793477e-05, |
|
"loss": 1.2945, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4457383557765385e-05, |
|
"loss": 1.3282, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4319005638488413e-05, |
|
"loss": 1.2503, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4179603448867836e-05, |
|
"loss": 1.2563, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.403921004871895e-05, |
|
"loss": 1.2832, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3897858732926794e-05, |
|
"loss": 1.2885, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3755583023550128e-05, |
|
"loss": 1.2307, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3612416661871532e-05, |
|
"loss": 1.2627, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3468393600395526e-05, |
|
"loss": 1.2693, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3323547994796597e-05, |
|
"loss": 1.2647, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3177914195819018e-05, |
|
"loss": 1.2946, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3031526741130435e-05, |
|
"loss": 1.2575, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2884420347131123e-05, |
|
"loss": 1.2744, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2736629900720832e-05, |
|
"loss": 1.2949, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2588190451025209e-05, |
|
"loss": 1.2619, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2439137201083772e-05, |
|
"loss": 1.2577, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2289505499501341e-05, |
|
"loss": 1.2875, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2139330832064975e-05, |
|
"loss": 1.2539, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1988648813328368e-05, |
|
"loss": 1.2455, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1837495178165706e-05, |
|
"loss": 1.278, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1685905773296992e-05, |
|
"loss": 1.19, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1533916548786856e-05, |
|
"loss": 1.2884, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1381563549518823e-05, |
|
"loss": 1.2803, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1228882906647142e-05, |
|
"loss": 1.2633, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1075910829028116e-05, |
|
"loss": 1.2803, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.092268359463302e-05, |
|
"loss": 1.2648, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0769237541944639e-05, |
|
"loss": 1.2875, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0615609061339431e-05, |
|
"loss": 1.296, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0461834586457398e-05, |
|
"loss": 1.2473, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0307950585561705e-05, |
|
"loss": 1.2367, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0153993552890069e-05, |
|
"loss": 1.2594, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2455, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.846006447109934e-06, |
|
"loss": 1.2389, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.692049414438298e-06, |
|
"loss": 1.2345, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.538165413542607e-06, |
|
"loss": 1.2333, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.384390938660572e-06, |
|
"loss": 1.2659, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.230762458055363e-06, |
|
"loss": 1.255, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.07731640536698e-06, |
|
"loss": 1.2748, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.924089170971887e-06, |
|
"loss": 1.236, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.771117093352861e-06, |
|
"loss": 1.2205, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.618436450481182e-06, |
|
"loss": 1.2642, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.466083451213145e-06, |
|
"loss": 1.2446, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.314094226703007e-06, |
|
"loss": 1.2335, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.162504821834296e-06, |
|
"loss": 1.2341, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.011351186671637e-06, |
|
"loss": 1.2627, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.860669167935028e-06, |
|
"loss": 1.2627, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.710494500498662e-06, |
|
"loss": 1.2629, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.560862798916229e-06, |
|
"loss": 1.2712, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.411809548974792e-06, |
|
"loss": 1.2291, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.263370099279173e-06, |
|
"loss": 1.2256, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.115579652868878e-06, |
|
"loss": 1.2618, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.968473258869566e-06, |
|
"loss": 1.2276, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.822085804180985e-06, |
|
"loss": 1.2346, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.6764520052034054e-06, |
|
"loss": 1.2408, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.5316063996044735e-06, |
|
"loss": 1.2374, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.387583338128471e-06, |
|
"loss": 1.2408, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.244416976449875e-06, |
|
"loss": 1.2408, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.102141267073207e-06, |
|
"loss": 1.2389, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.960789951281052e-06, |
|
"loss": 1.264, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.82039655113217e-06, |
|
"loss": 1.2144, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.6809943615115915e-06, |
|
"loss": 1.2186, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.542616442234618e-06, |
|
"loss": 1.2578, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.405295610206525e-06, |
|
"loss": 1.2285, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.269064431639901e-06, |
|
"loss": 1.2604, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.133955214331439e-06, |
|
"loss": 1.2013, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 1.248, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.867230556687797e-06, |
|
"loss": 1.2354, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7356783712264405e-06, |
|
"loss": 1.2128, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.605374641769752e-06, |
|
"loss": 1.2409, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.476350270394942e-06, |
|
"loss": 1.2855, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.348635855774082e-06, |
|
"loss": 1.2478, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.222261685917489e-06, |
|
"loss": 1.2466, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.097257730990806e-06, |
|
"loss": 1.2434, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.973653636207437e-06, |
|
"loss": 1.2537, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.851478714798076e-06, |
|
"loss": 1.2391, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.730761941058938e-06, |
|
"loss": 1.2721, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6115319434803897e-06, |
|
"loss": 1.2237, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.493816997957582e-06, |
|
"loss": 1.2108, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.377645021084701e-06, |
|
"loss": 1.2401, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2630435635344283e-06, |
|
"loss": 1.176, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.150039803524194e-06, |
|
"loss": 1.2525, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0386605403707347e-06, |
|
"loss": 1.2273, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 1.2215, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.820880769355582e-06, |
|
"loss": 1.2344, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7145319088820986e-06, |
|
"loss": 1.2307, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6099108277934105e-06, |
|
"loss": 1.2581, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.507042337418707e-06, |
|
"loss": 1.2203, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.405950833452928e-06, |
|
"loss": 1.2539, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.306660290171211e-06, |
|
"loss": 1.2341, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.209194254743295e-06, |
|
"loss": 1.2397, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.1135758416492168e-06, |
|
"loss": 1.2131, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.019827727197605e-06, |
|
"loss": 1.2319, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.927972144147905e-06, |
|
"loss": 1.2468, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8380308764377841e-06, |
|
"loss": 1.217, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7500252540169782e-06, |
|
"loss": 1.2254, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.663976147788806e-06, |
|
"loss": 1.211, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5799039646605486e-06, |
|
"loss": 1.2048, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4978286427038602e-06, |
|
"loss": 1.2243, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4177696464263725e-06, |
|
"loss": 1.1779, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.339745962155613e-06, |
|
"loss": 1.2447, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2637760935363053e-06, |
|
"loss": 1.2624, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1898780571421554e-06, |
|
"loss": 1.2074, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1180693782031516e-06, |
|
"loss": 1.2285, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0483670864493777e-06, |
|
"loss": 1.2529, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.807877120723397e-07, |
|
"loss": 1.2407, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.153472818047627e-07, |
|
"loss": 1.2512, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.520613151197899e-07, |
|
"loss": 1.2288, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.909448205504633e-07, |
|
"loss": 1.2128, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.320122921303962e-07, |
|
"loss": 1.2383, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.752777059564431e-07, |
|
"loss": 1.2692, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.207545168742212e-07, |
|
"loss": 1.2128, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.684556552872256e-07, |
|
"loss": 1.2152, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.183935240903415e-07, |
|
"loss": 1.2494, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.7057999572843516e-07, |
|
"loss": 1.2492, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.2502640938075654e-07, |
|
"loss": 1.2599, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.817435682718096e-07, |
|
"loss": 1.2451, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4074173710931804e-07, |
|
"loss": 1.2468, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.020306396499062e-07, |
|
"loss": 1.2331, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.656194563930714e-07, |
|
"loss": 1.2034, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.315168224039932e-07, |
|
"loss": 1.2083, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9973082526568155e-07, |
|
"loss": 1.2145, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7026900316098217e-07, |
|
"loss": 1.1815, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.4313834308486097e-07, |
|
"loss": 1.2388, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1834527918740624e-07, |
|
"loss": 1.2485, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.589569124794918e-08, |
|
"loss": 1.2527, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.579490328064265e-08, |
|
"loss": 1.2448, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.8047682271855644e-08, |
|
"loss": 1.2123, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.2658237049655325e-08, |
|
"loss": 1.259, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.963021728567106e-08, |
|
"loss": 1.2348, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.896671262955896e-08, |
|
"loss": 1.2254, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0670251976275803e-08, |
|
"loss": 1.2404, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.74280286634099e-09, |
|
"loss": 1.2435, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.185771019230897e-09, |
|
"loss": 1.2122, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.2199, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 211, |
|
"total_flos": 167544146296832.0, |
|
"train_loss": 1.289455922294002, |
|
"train_runtime": 2388.2192, |
|
"train_samples_per_second": 9.884, |
|
"train_steps_per_second": 0.088 |
|
} |
|
], |
|
"max_steps": 211, |
|
"num_train_epochs": 1, |
|
"total_flos": 167544146296832.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|