|
{ |
|
"best_metric": 0.4087750315666199, |
|
"best_model_checkpoint": "deberta-v3-large-lr55-e6-bs4-acc6-dsv3/checkpoint-213", |
|
"epoch": 12.0, |
|
"eval_steps": 500, |
|
"global_step": 512, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.8460181951522827, |
|
"learning_rate": 5.496726190476191e-06, |
|
"loss": 0.6818, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.9176546335220337, |
|
"learning_rate": 5.493452380952381e-06, |
|
"loss": 0.7102, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.076033353805542, |
|
"learning_rate": 5.490178571428571e-06, |
|
"loss": 0.712, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.9934174418449402, |
|
"learning_rate": 5.486904761904762e-06, |
|
"loss": 0.6877, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.206833600997925, |
|
"learning_rate": 5.483630952380952e-06, |
|
"loss": 0.7138, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.930253803730011, |
|
"learning_rate": 5.480357142857143e-06, |
|
"loss": 0.675, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.047216773033142, |
|
"learning_rate": 5.477083333333333e-06, |
|
"loss": 0.7025, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.5932674407958984, |
|
"learning_rate": 5.473809523809524e-06, |
|
"loss": 0.6736, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1506260633468628, |
|
"learning_rate": 5.4705357142857145e-06, |
|
"loss": 0.6687, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.9675679206848145, |
|
"learning_rate": 5.4672619047619045e-06, |
|
"loss": 0.7021, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.620849132537842, |
|
"learning_rate": 5.463988095238095e-06, |
|
"loss": 0.682, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.2752580642700195, |
|
"learning_rate": 5.460714285714286e-06, |
|
"loss": 0.6633, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 3.6715457439422607, |
|
"learning_rate": 5.457440476190476e-06, |
|
"loss": 0.6926, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.4275119304656982, |
|
"learning_rate": 5.454166666666667e-06, |
|
"loss": 0.6945, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.6921539306640625, |
|
"learning_rate": 5.450892857142857e-06, |
|
"loss": 0.6876, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.6308276653289795, |
|
"learning_rate": 5.4476190476190476e-06, |
|
"loss": 0.7249, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0595824718475342, |
|
"learning_rate": 5.444345238095238e-06, |
|
"loss": 0.6804, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.645174026489258, |
|
"learning_rate": 5.441071428571428e-06, |
|
"loss": 0.6768, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.631472110748291, |
|
"learning_rate": 5.437797619047619e-06, |
|
"loss": 0.6812, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 3.3360867500305176, |
|
"learning_rate": 5.43452380952381e-06, |
|
"loss": 0.6817, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.531082034111023, |
|
"learning_rate": 5.43125e-06, |
|
"loss": 0.6624, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.1303884983062744, |
|
"learning_rate": 5.427976190476191e-06, |
|
"loss": 0.6926, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.1816420555114746, |
|
"learning_rate": 5.4247023809523815e-06, |
|
"loss": 0.7173, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.382896900177002, |
|
"learning_rate": 5.4214285714285714e-06, |
|
"loss": 0.7172, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.177649021148682, |
|
"learning_rate": 5.418154761904762e-06, |
|
"loss": 0.6932, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 6.432918548583984, |
|
"learning_rate": 5.414880952380952e-06, |
|
"loss": 0.6679, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.7472172975540161, |
|
"learning_rate": 5.411607142857142e-06, |
|
"loss": 0.6704, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 2.1172163486480713, |
|
"learning_rate": 5.408333333333333e-06, |
|
"loss": 0.6637, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.6855010986328125, |
|
"learning_rate": 5.405059523809524e-06, |
|
"loss": 0.656, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.4389238357543945, |
|
"learning_rate": 5.401785714285714e-06, |
|
"loss": 0.6562, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.0723414421081543, |
|
"learning_rate": 5.3985119047619045e-06, |
|
"loss": 0.6812, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.1830461025238037, |
|
"learning_rate": 5.3952380952380944e-06, |
|
"loss": 0.6448, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.109797716140747, |
|
"learning_rate": 5.391964285714285e-06, |
|
"loss": 0.6841, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.5417990684509277, |
|
"learning_rate": 5.388690476190476e-06, |
|
"loss": 0.6428, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.0981853008270264, |
|
"learning_rate": 5.385416666666666e-06, |
|
"loss": 0.6493, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.927240014076233, |
|
"learning_rate": 5.382142857142857e-06, |
|
"loss": 0.6496, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.362425327301025, |
|
"learning_rate": 5.378869047619048e-06, |
|
"loss": 0.6666, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 6.46484899520874, |
|
"learning_rate": 5.3755952380952376e-06, |
|
"loss": 0.7947, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.2664661407470703, |
|
"learning_rate": 5.372321428571428e-06, |
|
"loss": 0.608, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 2.2718191146850586, |
|
"learning_rate": 5.369047619047619e-06, |
|
"loss": 0.6247, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.372130870819092, |
|
"learning_rate": 5.365773809523809e-06, |
|
"loss": 0.7885, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.7550398111343384, |
|
"learning_rate": 5.3625e-06, |
|
"loss": 0.6425, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"eval_accuracy": 0.6681818181818182, |
|
"eval_loss": 0.6295949816703796, |
|
"eval_runtime": 1.9777, |
|
"eval_samples_per_second": 111.242, |
|
"eval_steps_per_second": 27.81, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 3.6129467487335205, |
|
"learning_rate": 5.35922619047619e-06, |
|
"loss": 0.7519, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 4.161266326904297, |
|
"learning_rate": 5.355952380952381e-06, |
|
"loss": 0.6534, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 2.0768189430236816, |
|
"learning_rate": 5.3526785714285715e-06, |
|
"loss": 0.6805, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 2.847532272338867, |
|
"learning_rate": 5.349404761904761e-06, |
|
"loss": 0.6691, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 2.2965738773345947, |
|
"learning_rate": 5.346130952380952e-06, |
|
"loss": 0.6695, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 2.5308315753936768, |
|
"learning_rate": 5.342857142857143e-06, |
|
"loss": 0.6215, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 2.581547975540161, |
|
"learning_rate": 5.339583333333333e-06, |
|
"loss": 0.5902, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 3.935713768005371, |
|
"learning_rate": 5.336309523809524e-06, |
|
"loss": 0.5963, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 3.4248504638671875, |
|
"learning_rate": 5.333035714285714e-06, |
|
"loss": 0.6649, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 7.237908840179443, |
|
"learning_rate": 5.3297619047619045e-06, |
|
"loss": 0.7107, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 2.9264256954193115, |
|
"learning_rate": 5.326488095238095e-06, |
|
"loss": 0.6806, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 2.699674606323242, |
|
"learning_rate": 5.323214285714285e-06, |
|
"loss": 0.6276, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 3.520171642303467, |
|
"learning_rate": 5.319940476190476e-06, |
|
"loss": 0.6278, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 2.0494229793548584, |
|
"learning_rate": 5.316666666666667e-06, |
|
"loss": 0.5886, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 2.4809341430664062, |
|
"learning_rate": 5.313392857142857e-06, |
|
"loss": 0.6001, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 5.9523820877075195, |
|
"learning_rate": 5.310119047619048e-06, |
|
"loss": 0.6717, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 4.1546502113342285, |
|
"learning_rate": 5.306845238095238e-06, |
|
"loss": 0.6483, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 3.3143973350524902, |
|
"learning_rate": 5.303571428571428e-06, |
|
"loss": 0.5775, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 7.167575359344482, |
|
"learning_rate": 5.300297619047619e-06, |
|
"loss": 0.6182, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.888744592666626, |
|
"learning_rate": 5.297023809523809e-06, |
|
"loss": 0.5694, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 4.127984046936035, |
|
"learning_rate": 5.29375e-06, |
|
"loss": 0.5242, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 3.9158403873443604, |
|
"learning_rate": 5.290476190476191e-06, |
|
"loss": 0.5515, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 3.5147557258605957, |
|
"learning_rate": 5.287202380952381e-06, |
|
"loss": 0.5724, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 4.157176494598389, |
|
"learning_rate": 5.2839285714285715e-06, |
|
"loss": 0.6203, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 5.852452754974365, |
|
"learning_rate": 5.280654761904762e-06, |
|
"loss": 0.6141, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 4.920362949371338, |
|
"learning_rate": 5.277380952380952e-06, |
|
"loss": 0.6591, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 13.502717018127441, |
|
"learning_rate": 5.274107142857143e-06, |
|
"loss": 0.5012, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 3.996752977371216, |
|
"learning_rate": 5.270833333333333e-06, |
|
"loss": 0.5382, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 6.804556369781494, |
|
"learning_rate": 5.267559523809524e-06, |
|
"loss": 0.6235, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 3.19149112701416, |
|
"learning_rate": 5.264285714285715e-06, |
|
"loss": 0.6962, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 5.843012809753418, |
|
"learning_rate": 5.2610119047619045e-06, |
|
"loss": 0.619, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 8.353483200073242, |
|
"learning_rate": 5.257738095238095e-06, |
|
"loss": 0.5508, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 4.157430648803711, |
|
"learning_rate": 5.254464285714286e-06, |
|
"loss": 0.5476, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 7.451327800750732, |
|
"learning_rate": 5.251190476190476e-06, |
|
"loss": 0.563, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 2.8933181762695312, |
|
"learning_rate": 5.247916666666667e-06, |
|
"loss": 0.537, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 5.481462478637695, |
|
"learning_rate": 5.244642857142858e-06, |
|
"loss": 0.4794, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 3.3026862144470215, |
|
"learning_rate": 5.241369047619047e-06, |
|
"loss": 0.5493, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 4.270678520202637, |
|
"learning_rate": 5.238095238095238e-06, |
|
"loss": 0.5098, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 6.741820812225342, |
|
"learning_rate": 5.234821428571428e-06, |
|
"loss": 0.587, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 12.857108116149902, |
|
"learning_rate": 5.231547619047618e-06, |
|
"loss": 0.6178, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 14.573826789855957, |
|
"learning_rate": 5.228273809523809e-06, |
|
"loss": 0.5698, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 16.281530380249023, |
|
"learning_rate": 5.225e-06, |
|
"loss": 0.5929, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 8.965470314025879, |
|
"learning_rate": 5.22172619047619e-06, |
|
"loss": 0.583, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"eval_accuracy": 0.7, |
|
"eval_loss": 0.5743765830993652, |
|
"eval_runtime": 1.9707, |
|
"eval_samples_per_second": 111.636, |
|
"eval_steps_per_second": 27.909, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 7.075886249542236, |
|
"learning_rate": 5.218452380952381e-06, |
|
"loss": 0.5969, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 5.647829055786133, |
|
"learning_rate": 5.2151785714285715e-06, |
|
"loss": 0.5581, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 12.890501976013184, |
|
"learning_rate": 5.2119047619047615e-06, |
|
"loss": 0.5742, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 6.206398963928223, |
|
"learning_rate": 5.208630952380952e-06, |
|
"loss": 0.4763, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 15.82968521118164, |
|
"learning_rate": 5.205357142857142e-06, |
|
"loss": 0.5015, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 2.781341314315796, |
|
"learning_rate": 5.202083333333333e-06, |
|
"loss": 0.6048, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 31.130008697509766, |
|
"learning_rate": 5.198809523809524e-06, |
|
"loss": 0.6021, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 41.88652038574219, |
|
"learning_rate": 5.195535714285714e-06, |
|
"loss": 0.5294, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 13.205155372619629, |
|
"learning_rate": 5.1922619047619046e-06, |
|
"loss": 0.5571, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 4.5637054443359375, |
|
"learning_rate": 5.188988095238095e-06, |
|
"loss": 0.415, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 8.613974571228027, |
|
"learning_rate": 5.185714285714285e-06, |
|
"loss": 0.665, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 16.234405517578125, |
|
"learning_rate": 5.182440476190476e-06, |
|
"loss": 0.5181, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 6.523887634277344, |
|
"learning_rate": 5.179166666666666e-06, |
|
"loss": 0.4848, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 9.602444648742676, |
|
"learning_rate": 5.175892857142857e-06, |
|
"loss": 0.4226, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 5.141808032989502, |
|
"learning_rate": 5.172619047619048e-06, |
|
"loss": 0.4066, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 14.732641220092773, |
|
"learning_rate": 5.169345238095238e-06, |
|
"loss": 0.4309, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 13.681814193725586, |
|
"learning_rate": 5.166071428571428e-06, |
|
"loss": 0.5336, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 9.389954566955566, |
|
"learning_rate": 5.162797619047619e-06, |
|
"loss": 0.4979, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 3.370234727859497, |
|
"learning_rate": 5.159523809523809e-06, |
|
"loss": 0.5064, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 6.832080841064453, |
|
"learning_rate": 5.15625e-06, |
|
"loss": 0.5425, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 5.864181041717529, |
|
"learning_rate": 5.152976190476191e-06, |
|
"loss": 0.4536, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 4.189244747161865, |
|
"learning_rate": 5.149702380952381e-06, |
|
"loss": 0.5783, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 13.515569686889648, |
|
"learning_rate": 5.1464285714285715e-06, |
|
"loss": 0.6054, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 8.88909912109375, |
|
"learning_rate": 5.1431547619047615e-06, |
|
"loss": 0.4679, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 12.870144844055176, |
|
"learning_rate": 5.139880952380952e-06, |
|
"loss": 0.4515, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 7.6494927406311035, |
|
"learning_rate": 5.136607142857143e-06, |
|
"loss": 0.4118, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 5.379680633544922, |
|
"learning_rate": 5.133333333333333e-06, |
|
"loss": 0.4404, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 12.46003246307373, |
|
"learning_rate": 5.130059523809524e-06, |
|
"loss": 0.5413, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 16.497268676757812, |
|
"learning_rate": 5.126785714285715e-06, |
|
"loss": 0.4424, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 27.879886627197266, |
|
"learning_rate": 5.123511904761905e-06, |
|
"loss": 0.6332, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 24.27963638305664, |
|
"learning_rate": 5.120238095238095e-06, |
|
"loss": 0.6507, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 9.079182624816895, |
|
"learning_rate": 5.116964285714285e-06, |
|
"loss": 0.4821, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 8.210986137390137, |
|
"learning_rate": 5.113690476190476e-06, |
|
"loss": 0.4236, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 9.538209915161133, |
|
"learning_rate": 5.110416666666667e-06, |
|
"loss": 0.5458, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 11.242250442504883, |
|
"learning_rate": 5.107142857142857e-06, |
|
"loss": 0.3717, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 7.99606466293335, |
|
"learning_rate": 5.103869047619048e-06, |
|
"loss": 0.463, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 8.157175064086914, |
|
"learning_rate": 5.1005952380952385e-06, |
|
"loss": 0.4463, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 6.33786678314209, |
|
"learning_rate": 5.0973214285714284e-06, |
|
"loss": 0.4483, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 12.092632293701172, |
|
"learning_rate": 5.094047619047619e-06, |
|
"loss": 0.4729, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 6.666680335998535, |
|
"learning_rate": 5.09077380952381e-06, |
|
"loss": 0.4628, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 13.772384643554688, |
|
"learning_rate": 5.0875e-06, |
|
"loss": 0.2814, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 28.22318458557129, |
|
"learning_rate": 5.084226190476191e-06, |
|
"loss": 0.6727, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 25.690156936645508, |
|
"learning_rate": 5.080952380952381e-06, |
|
"loss": 0.4848, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7227272727272728, |
|
"eval_loss": 0.6313565373420715, |
|
"eval_runtime": 1.9701, |
|
"eval_samples_per_second": 111.669, |
|
"eval_steps_per_second": 27.917, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 20.357358932495117, |
|
"learning_rate": 5.0776785714285715e-06, |
|
"loss": 0.6101, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 24.226192474365234, |
|
"learning_rate": 5.074404761904762e-06, |
|
"loss": 0.4901, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 21.727924346923828, |
|
"learning_rate": 5.071130952380952e-06, |
|
"loss": 0.3983, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 8.311666488647461, |
|
"learning_rate": 5.067857142857142e-06, |
|
"loss": 0.4098, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 8.897052764892578, |
|
"learning_rate": 5.064583333333333e-06, |
|
"loss": 0.4665, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 24.099576950073242, |
|
"learning_rate": 5.061309523809523e-06, |
|
"loss": 0.4198, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 10.50734806060791, |
|
"learning_rate": 5.058035714285714e-06, |
|
"loss": 0.3888, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 22.82213592529297, |
|
"learning_rate": 5.054761904761905e-06, |
|
"loss": 0.4525, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 37.29859161376953, |
|
"learning_rate": 5.0514880952380946e-06, |
|
"loss": 0.486, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 14.720842361450195, |
|
"learning_rate": 5.048214285714285e-06, |
|
"loss": 0.4094, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 13.443496704101562, |
|
"learning_rate": 5.044940476190476e-06, |
|
"loss": 0.3444, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 10.844743728637695, |
|
"learning_rate": 5.041666666666666e-06, |
|
"loss": 0.3291, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 10.810824394226074, |
|
"learning_rate": 5.038392857142857e-06, |
|
"loss": 0.4249, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 5.066943168640137, |
|
"learning_rate": 5.035119047619048e-06, |
|
"loss": 0.3591, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 75.52027130126953, |
|
"learning_rate": 5.031845238095238e-06, |
|
"loss": 0.4469, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 41.847652435302734, |
|
"learning_rate": 5.0285714285714285e-06, |
|
"loss": 0.4158, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 17.77975082397461, |
|
"learning_rate": 5.025297619047618e-06, |
|
"loss": 0.2459, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 41.41980743408203, |
|
"learning_rate": 5.022023809523809e-06, |
|
"loss": 0.5155, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 42.26784896850586, |
|
"learning_rate": 5.01875e-06, |
|
"loss": 0.3646, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 15.376642227172852, |
|
"learning_rate": 5.01547619047619e-06, |
|
"loss": 0.411, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 17.662681579589844, |
|
"learning_rate": 5.012202380952381e-06, |
|
"loss": 0.3776, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 12.808351516723633, |
|
"learning_rate": 5.0089285714285716e-06, |
|
"loss": 0.3035, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 30.4917049407959, |
|
"learning_rate": 5.0056547619047615e-06, |
|
"loss": 0.5275, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 9.420913696289062, |
|
"learning_rate": 5.002380952380952e-06, |
|
"loss": 0.4193, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 20.13055419921875, |
|
"learning_rate": 4.999107142857142e-06, |
|
"loss": 0.5197, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 3.9424262046813965, |
|
"learning_rate": 4.995833333333333e-06, |
|
"loss": 0.3119, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 4.024766445159912, |
|
"learning_rate": 4.992559523809524e-06, |
|
"loss": 0.3072, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 16.67523956298828, |
|
"learning_rate": 4.989285714285714e-06, |
|
"loss": 0.4243, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 14.94045352935791, |
|
"learning_rate": 4.986011904761905e-06, |
|
"loss": 0.4539, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 14.900248527526855, |
|
"learning_rate": 4.9827380952380954e-06, |
|
"loss": 0.3834, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 59.21234893798828, |
|
"learning_rate": 4.979464285714285e-06, |
|
"loss": 0.6183, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 9.785917282104492, |
|
"learning_rate": 4.976190476190476e-06, |
|
"loss": 0.6158, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 47.627864837646484, |
|
"learning_rate": 4.972916666666667e-06, |
|
"loss": 0.7627, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 33.516265869140625, |
|
"learning_rate": 4.969642857142857e-06, |
|
"loss": 0.6755, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 31.58948516845703, |
|
"learning_rate": 4.966369047619048e-06, |
|
"loss": 0.3291, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 6.718880653381348, |
|
"learning_rate": 4.963095238095238e-06, |
|
"loss": 0.3233, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 17.761388778686523, |
|
"learning_rate": 4.9598214285714285e-06, |
|
"loss": 0.332, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 24.450733184814453, |
|
"learning_rate": 4.956547619047619e-06, |
|
"loss": 0.4093, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 5.664928913116455, |
|
"learning_rate": 4.953273809523809e-06, |
|
"loss": 0.458, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 8.065629005432129, |
|
"learning_rate": 4.95e-06, |
|
"loss": 0.503, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 8.75709056854248, |
|
"learning_rate": 4.946726190476191e-06, |
|
"loss": 0.4183, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 29.46023941040039, |
|
"learning_rate": 4.943452380952381e-06, |
|
"loss": 0.4187, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"eval_accuracy": 0.7727272727272727, |
|
"eval_loss": 0.48904669284820557, |
|
"eval_runtime": 1.961, |
|
"eval_samples_per_second": 112.185, |
|
"eval_steps_per_second": 28.046, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 2.7368314266204834, |
|
"learning_rate": 4.940178571428572e-06, |
|
"loss": 0.4246, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 4.692036151885986, |
|
"learning_rate": 4.9369047619047615e-06, |
|
"loss": 0.303, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 21.547483444213867, |
|
"learning_rate": 4.933630952380952e-06, |
|
"loss": 0.3867, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 8.194944381713867, |
|
"learning_rate": 4.930357142857143e-06, |
|
"loss": 0.1695, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 3.5077579021453857, |
|
"learning_rate": 4.927083333333333e-06, |
|
"loss": 0.3142, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 33.11520004272461, |
|
"learning_rate": 4.923809523809524e-06, |
|
"loss": 0.2809, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 22.623079299926758, |
|
"learning_rate": 4.920535714285715e-06, |
|
"loss": 0.5169, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 50.30414581298828, |
|
"learning_rate": 4.917261904761905e-06, |
|
"loss": 0.5075, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 22.926681518554688, |
|
"learning_rate": 4.9139880952380955e-06, |
|
"loss": 0.3153, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 4.504119396209717, |
|
"learning_rate": 4.910714285714286e-06, |
|
"loss": 0.3218, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 26.036670684814453, |
|
"learning_rate": 4.907440476190476e-06, |
|
"loss": 0.4977, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 3.1559319496154785, |
|
"learning_rate": 4.904166666666667e-06, |
|
"loss": 0.2435, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 10.12259292602539, |
|
"learning_rate": 4.900892857142857e-06, |
|
"loss": 0.2531, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 14.153977394104004, |
|
"learning_rate": 4.897619047619047e-06, |
|
"loss": 0.2939, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 27.15022850036621, |
|
"learning_rate": 4.894345238095238e-06, |
|
"loss": 0.3503, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 14.092424392700195, |
|
"learning_rate": 4.8910714285714285e-06, |
|
"loss": 0.2999, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 21.638959884643555, |
|
"learning_rate": 4.8877976190476185e-06, |
|
"loss": 0.2351, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 10.01887321472168, |
|
"learning_rate": 4.884523809523809e-06, |
|
"loss": 0.313, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 13.962733268737793, |
|
"learning_rate": 4.881249999999999e-06, |
|
"loss": 0.3914, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 12.105456352233887, |
|
"learning_rate": 4.87797619047619e-06, |
|
"loss": 0.3625, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 11.165689468383789, |
|
"learning_rate": 4.874702380952381e-06, |
|
"loss": 0.425, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 13.687613487243652, |
|
"learning_rate": 4.871428571428571e-06, |
|
"loss": 0.3409, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 16.423450469970703, |
|
"learning_rate": 4.8681547619047616e-06, |
|
"loss": 0.4593, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 10.233711242675781, |
|
"learning_rate": 4.864880952380952e-06, |
|
"loss": 0.2118, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 9.759559631347656, |
|
"learning_rate": 4.861607142857142e-06, |
|
"loss": 0.3881, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 16.481748580932617, |
|
"learning_rate": 4.858333333333333e-06, |
|
"loss": 0.3053, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 17.1108455657959, |
|
"learning_rate": 4.855059523809524e-06, |
|
"loss": 0.4328, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 18.17218589782715, |
|
"learning_rate": 4.851785714285714e-06, |
|
"loss": 0.3355, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 10.150218963623047, |
|
"learning_rate": 4.848511904761905e-06, |
|
"loss": 0.244, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 8.062572479248047, |
|
"learning_rate": 4.845238095238095e-06, |
|
"loss": 0.2057, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 11.194631576538086, |
|
"learning_rate": 4.8419642857142854e-06, |
|
"loss": 0.2364, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"grad_norm": 14.064760208129883, |
|
"learning_rate": 4.838690476190476e-06, |
|
"loss": 0.2463, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"grad_norm": 18.526512145996094, |
|
"learning_rate": 4.835416666666666e-06, |
|
"loss": 0.2456, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 17.577804565429688, |
|
"learning_rate": 4.832142857142857e-06, |
|
"loss": 0.2601, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 11.843798637390137, |
|
"learning_rate": 4.828869047619048e-06, |
|
"loss": 0.1637, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 12.153199195861816, |
|
"learning_rate": 4.825595238095238e-06, |
|
"loss": 0.1723, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 10.96668529510498, |
|
"learning_rate": 4.8223214285714285e-06, |
|
"loss": 0.3315, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 8.555688858032227, |
|
"learning_rate": 4.8190476190476185e-06, |
|
"loss": 0.2321, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 12.00627326965332, |
|
"learning_rate": 4.815773809523809e-06, |
|
"loss": 0.2672, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 22.342628479003906, |
|
"learning_rate": 4.8125e-06, |
|
"loss": 0.4607, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 25.33961296081543, |
|
"learning_rate": 4.80922619047619e-06, |
|
"loss": 0.2068, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"grad_norm": 22.976959228515625, |
|
"learning_rate": 4.805952380952381e-06, |
|
"loss": 0.3447, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"grad_norm": 19.871631622314453, |
|
"learning_rate": 4.802678571428572e-06, |
|
"loss": 0.1563, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"eval_accuracy": 0.8545454545454545, |
|
"eval_loss": 0.4087750315666199, |
|
"eval_runtime": 1.9518, |
|
"eval_samples_per_second": 112.716, |
|
"eval_steps_per_second": 28.179, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 6.280614852905273, |
|
"learning_rate": 4.799404761904762e-06, |
|
"loss": 0.1367, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 18.553333282470703, |
|
"learning_rate": 4.796130952380952e-06, |
|
"loss": 0.3876, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"grad_norm": 10.598409652709961, |
|
"learning_rate": 4.792857142857143e-06, |
|
"loss": 0.2598, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"grad_norm": 28.292373657226562, |
|
"learning_rate": 4.789583333333333e-06, |
|
"loss": 0.1943, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 29.251375198364258, |
|
"learning_rate": 4.786309523809524e-06, |
|
"loss": 0.3162, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"grad_norm": 54.16463088989258, |
|
"learning_rate": 4.783035714285714e-06, |
|
"loss": 0.3793, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"grad_norm": 13.832413673400879, |
|
"learning_rate": 4.779761904761905e-06, |
|
"loss": 0.134, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"grad_norm": 17.206539154052734, |
|
"learning_rate": 4.7764880952380955e-06, |
|
"loss": 0.3387, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 22.453044891357422, |
|
"learning_rate": 4.7732142857142855e-06, |
|
"loss": 0.2228, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"grad_norm": 34.940757751464844, |
|
"learning_rate": 4.769940476190476e-06, |
|
"loss": 0.2947, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"grad_norm": 10.684017181396484, |
|
"learning_rate": 4.766666666666667e-06, |
|
"loss": 0.303, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"grad_norm": 12.175122261047363, |
|
"learning_rate": 4.763392857142857e-06, |
|
"loss": 0.2265, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"grad_norm": 7.9341139793396, |
|
"learning_rate": 4.760119047619048e-06, |
|
"loss": 0.2761, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"grad_norm": 28.212831497192383, |
|
"learning_rate": 4.756845238095238e-06, |
|
"loss": 0.3097, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"grad_norm": 31.275775909423828, |
|
"learning_rate": 4.7535714285714286e-06, |
|
"loss": 0.3684, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"grad_norm": 23.49386978149414, |
|
"learning_rate": 4.750297619047619e-06, |
|
"loss": 0.221, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"grad_norm": 25.68425941467285, |
|
"learning_rate": 4.747023809523809e-06, |
|
"loss": 0.6258, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 25.499523162841797, |
|
"learning_rate": 4.74375e-06, |
|
"loss": 0.3263, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 17.70143699645996, |
|
"learning_rate": 4.740476190476191e-06, |
|
"loss": 0.2089, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"grad_norm": 14.780723571777344, |
|
"learning_rate": 4.737202380952381e-06, |
|
"loss": 0.1825, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"grad_norm": 5.02520227432251, |
|
"learning_rate": 4.733928571428572e-06, |
|
"loss": 0.2236, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"grad_norm": 26.148418426513672, |
|
"learning_rate": 4.7306547619047625e-06, |
|
"loss": 0.2121, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 36.72833251953125, |
|
"learning_rate": 4.727380952380952e-06, |
|
"loss": 0.4922, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"grad_norm": 16.5570125579834, |
|
"learning_rate": 4.724107142857142e-06, |
|
"loss": 0.25, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"grad_norm": 36.25120544433594, |
|
"learning_rate": 4.720833333333333e-06, |
|
"loss": 0.4475, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 13.410614013671875, |
|
"learning_rate": 4.717559523809523e-06, |
|
"loss": 0.2065, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 18.893632888793945, |
|
"learning_rate": 4.714285714285714e-06, |
|
"loss": 0.2649, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"grad_norm": 9.750955581665039, |
|
"learning_rate": 4.711011904761905e-06, |
|
"loss": 0.2054, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"grad_norm": 9.189886093139648, |
|
"learning_rate": 4.707738095238095e-06, |
|
"loss": 0.1694, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"grad_norm": 6.946423530578613, |
|
"learning_rate": 4.7044642857142855e-06, |
|
"loss": 0.186, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"grad_norm": 12.687536239624023, |
|
"learning_rate": 4.701190476190476e-06, |
|
"loss": 0.1277, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 30.949155807495117, |
|
"learning_rate": 4.697916666666666e-06, |
|
"loss": 0.2875, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 11.202219009399414, |
|
"learning_rate": 4.694642857142857e-06, |
|
"loss": 0.2124, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"grad_norm": 2.138655185699463, |
|
"learning_rate": 4.691369047619047e-06, |
|
"loss": 0.067, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 10.137479782104492, |
|
"learning_rate": 4.688095238095238e-06, |
|
"loss": 0.1081, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"grad_norm": 10.702754020690918, |
|
"learning_rate": 4.684821428571429e-06, |
|
"loss": 0.1297, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"grad_norm": 11.548544883728027, |
|
"learning_rate": 4.6815476190476185e-06, |
|
"loss": 0.1005, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 22.75307273864746, |
|
"learning_rate": 4.678273809523809e-06, |
|
"loss": 0.1593, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"grad_norm": 9.360298156738281, |
|
"learning_rate": 4.675e-06, |
|
"loss": 0.0642, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"grad_norm": 19.112125396728516, |
|
"learning_rate": 4.67172619047619e-06, |
|
"loss": 0.2293, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 28.329204559326172, |
|
"learning_rate": 4.668452380952381e-06, |
|
"loss": 0.2197, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"grad_norm": 18.537385940551758, |
|
"learning_rate": 4.665178571428571e-06, |
|
"loss": 0.3401, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 38.293357849121094, |
|
"learning_rate": 4.661904761904762e-06, |
|
"loss": 0.3536, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.8454545454545455, |
|
"eval_loss": 0.488827645778656, |
|
"eval_runtime": 1.962, |
|
"eval_samples_per_second": 112.13, |
|
"eval_steps_per_second": 28.033, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"grad_norm": 4.5169901847839355, |
|
"learning_rate": 4.6586309523809524e-06, |
|
"loss": 0.0597, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"grad_norm": 8.103499412536621, |
|
"learning_rate": 4.655357142857142e-06, |
|
"loss": 0.0922, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"grad_norm": 24.916545867919922, |
|
"learning_rate": 4.652083333333333e-06, |
|
"loss": 0.1184, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"grad_norm": 20.035037994384766, |
|
"learning_rate": 4.648809523809524e-06, |
|
"loss": 0.2025, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"grad_norm": 6.929393291473389, |
|
"learning_rate": 4.645535714285714e-06, |
|
"loss": 0.2023, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"grad_norm": 15.74714183807373, |
|
"learning_rate": 4.642261904761905e-06, |
|
"loss": 0.2039, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"grad_norm": 10.057015419006348, |
|
"learning_rate": 4.6389880952380955e-06, |
|
"loss": 0.0961, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"grad_norm": 1.1386276483535767, |
|
"learning_rate": 4.6357142857142855e-06, |
|
"loss": 0.0461, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"grad_norm": 3.349055767059326, |
|
"learning_rate": 4.632440476190476e-06, |
|
"loss": 0.181, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"grad_norm": 47.79439163208008, |
|
"learning_rate": 4.629166666666666e-06, |
|
"loss": 0.1307, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"grad_norm": 8.228836059570312, |
|
"learning_rate": 4.625892857142857e-06, |
|
"loss": 0.2784, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"grad_norm": 9.270495414733887, |
|
"learning_rate": 4.622619047619048e-06, |
|
"loss": 0.1872, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"grad_norm": 0.9865421652793884, |
|
"learning_rate": 4.619345238095238e-06, |
|
"loss": 0.0396, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"grad_norm": 46.00654602050781, |
|
"learning_rate": 4.616071428571429e-06, |
|
"loss": 0.2206, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"grad_norm": 35.620689392089844, |
|
"learning_rate": 4.612797619047619e-06, |
|
"loss": 0.0977, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"grad_norm": 16.84827995300293, |
|
"learning_rate": 4.609523809523809e-06, |
|
"loss": 0.0652, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 11.778346061706543, |
|
"learning_rate": 4.60625e-06, |
|
"loss": 0.0619, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"grad_norm": 12.670008659362793, |
|
"learning_rate": 4.60297619047619e-06, |
|
"loss": 0.1638, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"grad_norm": 3.050675392150879, |
|
"learning_rate": 4.599702380952381e-06, |
|
"loss": 0.1787, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"grad_norm": 9.351134300231934, |
|
"learning_rate": 4.596428571428572e-06, |
|
"loss": 0.2906, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"grad_norm": 18.061622619628906, |
|
"learning_rate": 4.593154761904762e-06, |
|
"loss": 0.1441, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"grad_norm": 5.385902404785156, |
|
"learning_rate": 4.5898809523809525e-06, |
|
"loss": 0.0326, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"grad_norm": 1.4941680431365967, |
|
"learning_rate": 4.586607142857143e-06, |
|
"loss": 0.0304, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"grad_norm": 8.055174827575684, |
|
"learning_rate": 4.583333333333333e-06, |
|
"loss": 0.3047, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"grad_norm": 11.676218032836914, |
|
"learning_rate": 4.580059523809524e-06, |
|
"loss": 0.4685, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"grad_norm": 3.277010917663574, |
|
"learning_rate": 4.576785714285715e-06, |
|
"loss": 0.0433, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"grad_norm": 14.3773832321167, |
|
"learning_rate": 4.573511904761905e-06, |
|
"loss": 0.3162, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"grad_norm": 5.165549278259277, |
|
"learning_rate": 4.5702380952380956e-06, |
|
"loss": 0.1855, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"grad_norm": 23.437685012817383, |
|
"learning_rate": 4.5669642857142855e-06, |
|
"loss": 0.12, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"grad_norm": 3.0389316082000732, |
|
"learning_rate": 4.563690476190476e-06, |
|
"loss": 0.0439, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"grad_norm": 35.046241760253906, |
|
"learning_rate": 4.560416666666667e-06, |
|
"loss": 0.1244, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"grad_norm": 28.730819702148438, |
|
"learning_rate": 4.557142857142857e-06, |
|
"loss": 0.2398, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"grad_norm": 19.567346572875977, |
|
"learning_rate": 4.553869047619047e-06, |
|
"loss": 0.0952, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"grad_norm": 17.299070358276367, |
|
"learning_rate": 4.550595238095238e-06, |
|
"loss": 0.2421, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"grad_norm": 4.0813422203063965, |
|
"learning_rate": 4.547321428571428e-06, |
|
"loss": 0.1717, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"grad_norm": 15.247352600097656, |
|
"learning_rate": 4.544047619047619e-06, |
|
"loss": 0.1287, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"grad_norm": 22.969430923461914, |
|
"learning_rate": 4.540773809523809e-06, |
|
"loss": 0.0944, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"grad_norm": 14.135355949401855, |
|
"learning_rate": 4.537499999999999e-06, |
|
"loss": 0.0501, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"grad_norm": 6.1480607986450195, |
|
"learning_rate": 4.53422619047619e-06, |
|
"loss": 0.0491, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"grad_norm": 5.824121952056885, |
|
"learning_rate": 4.530952380952381e-06, |
|
"loss": 0.4394, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 3.691617727279663, |
|
"learning_rate": 4.527678571428571e-06, |
|
"loss": 0.0313, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"grad_norm": 1.4261701107025146, |
|
"learning_rate": 4.524404761904762e-06, |
|
"loss": 0.1709, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"eval_accuracy": 0.85, |
|
"eval_loss": 0.5375058054924011, |
|
"eval_runtime": 1.9645, |
|
"eval_samples_per_second": 111.989, |
|
"eval_steps_per_second": 27.997, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"grad_norm": 37.57842254638672, |
|
"learning_rate": 4.5211309523809525e-06, |
|
"loss": 0.299, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"grad_norm": 19.698734283447266, |
|
"learning_rate": 4.5178571428571424e-06, |
|
"loss": 0.3081, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"grad_norm": 0.6386685371398926, |
|
"learning_rate": 4.514583333333333e-06, |
|
"loss": 0.024, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"grad_norm": 1.1291331052780151, |
|
"learning_rate": 4.511309523809523e-06, |
|
"loss": 0.0343, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"grad_norm": 3.5354042053222656, |
|
"learning_rate": 4.508035714285714e-06, |
|
"loss": 0.1509, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"grad_norm": 0.7393876314163208, |
|
"learning_rate": 4.504761904761905e-06, |
|
"loss": 0.0237, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"grad_norm": 2.4936347007751465, |
|
"learning_rate": 4.501488095238095e-06, |
|
"loss": 0.0299, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"grad_norm": 28.873003005981445, |
|
"learning_rate": 4.4982142857142855e-06, |
|
"loss": 0.0815, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 53.65907669067383, |
|
"learning_rate": 4.494940476190476e-06, |
|
"loss": 0.1271, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"grad_norm": 0.4335859715938568, |
|
"learning_rate": 4.491666666666666e-06, |
|
"loss": 0.0183, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"grad_norm": 2.026947259902954, |
|
"learning_rate": 4.488392857142857e-06, |
|
"loss": 0.1534, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"grad_norm": 0.4929918348789215, |
|
"learning_rate": 4.485119047619047e-06, |
|
"loss": 0.0192, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"grad_norm": 24.899736404418945, |
|
"learning_rate": 4.481845238095238e-06, |
|
"loss": 0.0524, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"grad_norm": 0.6321877837181091, |
|
"learning_rate": 4.478571428571429e-06, |
|
"loss": 0.0205, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"grad_norm": 1.5749492645263672, |
|
"learning_rate": 4.475297619047619e-06, |
|
"loss": 0.1671, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"grad_norm": 0.4469403624534607, |
|
"learning_rate": 4.472023809523809e-06, |
|
"loss": 0.0154, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"grad_norm": 10.251415252685547, |
|
"learning_rate": 4.46875e-06, |
|
"loss": 0.1924, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"grad_norm": 4.918264865875244, |
|
"learning_rate": 4.46547619047619e-06, |
|
"loss": 0.499, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"grad_norm": 0.3266950845718384, |
|
"learning_rate": 4.462202380952381e-06, |
|
"loss": 0.0131, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"grad_norm": 11.629830360412598, |
|
"learning_rate": 4.458928571428572e-06, |
|
"loss": 0.3543, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"grad_norm": 35.164634704589844, |
|
"learning_rate": 4.455654761904762e-06, |
|
"loss": 0.1511, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 0.5766786932945251, |
|
"learning_rate": 4.4523809523809525e-06, |
|
"loss": 0.0165, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"grad_norm": 30.383398056030273, |
|
"learning_rate": 4.4491071428571425e-06, |
|
"loss": 0.152, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"grad_norm": 26.80906867980957, |
|
"learning_rate": 4.445833333333333e-06, |
|
"loss": 0.2207, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"grad_norm": 0.6242391467094421, |
|
"learning_rate": 4.442559523809524e-06, |
|
"loss": 0.0129, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"grad_norm": 8.999675750732422, |
|
"learning_rate": 4.439285714285714e-06, |
|
"loss": 0.1625, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"grad_norm": 0.30194684863090515, |
|
"learning_rate": 4.436011904761905e-06, |
|
"loss": 0.0107, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"grad_norm": 11.341878890991211, |
|
"learning_rate": 4.432738095238096e-06, |
|
"loss": 0.1591, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"grad_norm": 1.6119003295898438, |
|
"learning_rate": 4.4294642857142856e-06, |
|
"loss": 0.0128, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"grad_norm": 58.44689178466797, |
|
"learning_rate": 4.426190476190476e-06, |
|
"loss": 0.1724, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"grad_norm": 1.108935832977295, |
|
"learning_rate": 4.422916666666666e-06, |
|
"loss": 0.0137, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"grad_norm": 0.3343135118484497, |
|
"learning_rate": 4.419642857142857e-06, |
|
"loss": 0.0115, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"grad_norm": 2.9117400646209717, |
|
"learning_rate": 4.416369047619048e-06, |
|
"loss": 0.0137, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"grad_norm": 0.3950476348400116, |
|
"learning_rate": 4.413095238095238e-06, |
|
"loss": 0.01, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"grad_norm": 0.46693718433380127, |
|
"learning_rate": 4.409821428571429e-06, |
|
"loss": 0.0109, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"grad_norm": 0.3062843680381775, |
|
"learning_rate": 4.4065476190476195e-06, |
|
"loss": 0.0082, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"grad_norm": 66.0848388671875, |
|
"learning_rate": 4.4032738095238094e-06, |
|
"loss": 0.0962, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"grad_norm": 0.5519910454750061, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.0084, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"grad_norm": 0.20682856440544128, |
|
"learning_rate": 4.396726190476191e-06, |
|
"loss": 0.0077, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"grad_norm": 23.140066146850586, |
|
"learning_rate": 4.393452380952381e-06, |
|
"loss": 0.0632, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"grad_norm": 1.7450965642929077, |
|
"learning_rate": 4.390178571428572e-06, |
|
"loss": 0.0097, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"grad_norm": 14.685700416564941, |
|
"learning_rate": 4.386904761904762e-06, |
|
"loss": 0.1616, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"grad_norm": 0.1476498693227768, |
|
"learning_rate": 4.3836309523809525e-06, |
|
"loss": 0.0067, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"eval_accuracy": 0.8863636363636364, |
|
"eval_loss": 0.5613145232200623, |
|
"eval_runtime": 1.9469, |
|
"eval_samples_per_second": 113.002, |
|
"eval_steps_per_second": 28.251, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"grad_norm": 0.2792944312095642, |
|
"learning_rate": 4.3803571428571425e-06, |
|
"loss": 0.0069, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"grad_norm": 0.5173578262329102, |
|
"learning_rate": 4.377083333333333e-06, |
|
"loss": 0.0073, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"grad_norm": 2.705310344696045, |
|
"learning_rate": 4.373809523809523e-06, |
|
"loss": 0.0116, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"grad_norm": 9.17689037322998, |
|
"learning_rate": 4.370535714285714e-06, |
|
"loss": 0.1767, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"grad_norm": 3.7868285179138184, |
|
"learning_rate": 4.367261904761904e-06, |
|
"loss": 0.0087, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"grad_norm": 0.3689323663711548, |
|
"learning_rate": 4.363988095238095e-06, |
|
"loss": 0.0061, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"grad_norm": 0.20344328880310059, |
|
"learning_rate": 4.360714285714286e-06, |
|
"loss": 0.0057, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"grad_norm": 37.69071578979492, |
|
"learning_rate": 4.3574404761904755e-06, |
|
"loss": 0.2596, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"grad_norm": 0.16801539063453674, |
|
"learning_rate": 4.354166666666666e-06, |
|
"loss": 0.0051, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"grad_norm": 5.060468673706055, |
|
"learning_rate": 4.350892857142857e-06, |
|
"loss": 0.2096, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"grad_norm": 1.643596887588501, |
|
"learning_rate": 4.347619047619047e-06, |
|
"loss": 0.0076, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"grad_norm": 0.7824279069900513, |
|
"learning_rate": 4.344345238095238e-06, |
|
"loss": 0.0059, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"grad_norm": 2.7976343631744385, |
|
"learning_rate": 4.341071428571429e-06, |
|
"loss": 0.0067, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"grad_norm": 13.010490417480469, |
|
"learning_rate": 4.337797619047619e-06, |
|
"loss": 0.3109, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"grad_norm": 0.10449977964162827, |
|
"learning_rate": 4.3345238095238094e-06, |
|
"loss": 0.0039, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"grad_norm": 0.4954240024089813, |
|
"learning_rate": 4.331249999999999e-06, |
|
"loss": 0.0064, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"grad_norm": 1.7399401664733887, |
|
"learning_rate": 4.32797619047619e-06, |
|
"loss": 0.0056, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"grad_norm": 7.1369147300720215, |
|
"learning_rate": 4.324702380952381e-06, |
|
"loss": 0.2385, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"grad_norm": 23.376176834106445, |
|
"learning_rate": 4.321428571428571e-06, |
|
"loss": 0.3003, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"grad_norm": 0.34425580501556396, |
|
"learning_rate": 4.318154761904762e-06, |
|
"loss": 0.0044, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"grad_norm": 0.21413090825080872, |
|
"learning_rate": 4.3148809523809526e-06, |
|
"loss": 0.004, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"grad_norm": 3.548433303833008, |
|
"learning_rate": 4.3116071428571425e-06, |
|
"loss": 0.123, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"grad_norm": 1.6423828601837158, |
|
"learning_rate": 4.308333333333333e-06, |
|
"loss": 0.2496, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"grad_norm": 2.283780813217163, |
|
"learning_rate": 4.305059523809523e-06, |
|
"loss": 0.2397, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"grad_norm": 0.362973153591156, |
|
"learning_rate": 4.301785714285714e-06, |
|
"loss": 0.0064, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"grad_norm": 19.265830993652344, |
|
"learning_rate": 4.298511904761905e-06, |
|
"loss": 0.0201, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"grad_norm": 0.20540741086006165, |
|
"learning_rate": 4.295238095238095e-06, |
|
"loss": 0.0047, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"grad_norm": 3.41279935836792, |
|
"learning_rate": 4.291964285714286e-06, |
|
"loss": 0.0088, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"grad_norm": 10.215970039367676, |
|
"learning_rate": 4.288690476190476e-06, |
|
"loss": 0.0162, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"grad_norm": 0.7728336453437805, |
|
"learning_rate": 4.285416666666666e-06, |
|
"loss": 0.008, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"grad_norm": 2.603362798690796, |
|
"learning_rate": 4.282142857142857e-06, |
|
"loss": 0.0122, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"grad_norm": 58.749488830566406, |
|
"learning_rate": 4.278869047619048e-06, |
|
"loss": 0.304, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"grad_norm": 8.352728843688965, |
|
"learning_rate": 4.275595238095238e-06, |
|
"loss": 0.1469, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"grad_norm": 0.2930574417114258, |
|
"learning_rate": 4.272321428571429e-06, |
|
"loss": 0.0067, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"grad_norm": 0.6395189166069031, |
|
"learning_rate": 4.269047619047619e-06, |
|
"loss": 0.0114, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"grad_norm": 21.4405517578125, |
|
"learning_rate": 4.2657738095238095e-06, |
|
"loss": 0.0245, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"grad_norm": 20.98697280883789, |
|
"learning_rate": 4.2625e-06, |
|
"loss": 0.2003, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"grad_norm": 0.16704310476779938, |
|
"learning_rate": 4.25922619047619e-06, |
|
"loss": 0.0045, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"grad_norm": 0.30824193358421326, |
|
"learning_rate": 4.255952380952381e-06, |
|
"loss": 0.0063, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"grad_norm": 38.134681701660156, |
|
"learning_rate": 4.252678571428572e-06, |
|
"loss": 0.0395, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"grad_norm": 0.32065123319625854, |
|
"learning_rate": 4.249404761904762e-06, |
|
"loss": 0.0055, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"grad_norm": 2.818316698074341, |
|
"learning_rate": 4.2461309523809526e-06, |
|
"loss": 0.0077, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 8.902009010314941, |
|
"learning_rate": 4.2428571428571425e-06, |
|
"loss": 0.0103, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.8772727272727273, |
|
"eval_loss": 0.6385995149612427, |
|
"eval_runtime": 1.9711, |
|
"eval_samples_per_second": 111.612, |
|
"eval_steps_per_second": 27.903, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"grad_norm": 50.5201301574707, |
|
"learning_rate": 4.239583333333333e-06, |
|
"loss": 0.1206, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"grad_norm": 0.10049311071634293, |
|
"learning_rate": 4.236309523809524e-06, |
|
"loss": 0.0038, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"grad_norm": 0.11850426346063614, |
|
"learning_rate": 4.233035714285714e-06, |
|
"loss": 0.0038, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"grad_norm": 0.10592693090438843, |
|
"learning_rate": 4.229761904761905e-06, |
|
"loss": 0.0037, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"grad_norm": 0.4136406481266022, |
|
"learning_rate": 4.226488095238096e-06, |
|
"loss": 0.0049, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"grad_norm": 0.09526017308235168, |
|
"learning_rate": 4.223214285714286e-06, |
|
"loss": 0.0034, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"grad_norm": 0.08932331204414368, |
|
"learning_rate": 4.2199404761904764e-06, |
|
"loss": 0.0034, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"grad_norm": 12.363450050354004, |
|
"learning_rate": 4.216666666666667e-06, |
|
"loss": 0.1276, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"grad_norm": 0.19276930391788483, |
|
"learning_rate": 4.213392857142857e-06, |
|
"loss": 0.0036, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"grad_norm": 0.10427949577569962, |
|
"learning_rate": 4.210119047619047e-06, |
|
"loss": 0.0041, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"grad_norm": 0.07943836599588394, |
|
"learning_rate": 4.206845238095238e-06, |
|
"loss": 0.0029, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 0.18391607701778412, |
|
"learning_rate": 4.203571428571428e-06, |
|
"loss": 0.0036, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"grad_norm": 0.3423359990119934, |
|
"learning_rate": 4.200297619047619e-06, |
|
"loss": 0.0037, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"grad_norm": 41.31951904296875, |
|
"learning_rate": 4.1970238095238095e-06, |
|
"loss": 0.215, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"grad_norm": 0.1459679901599884, |
|
"learning_rate": 4.1937499999999994e-06, |
|
"loss": 0.0035, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"grad_norm": 0.0786140188574791, |
|
"learning_rate": 4.19047619047619e-06, |
|
"loss": 0.0027, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"grad_norm": 1.5436954498291016, |
|
"learning_rate": 4.18720238095238e-06, |
|
"loss": 0.086, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"grad_norm": 1.1575084924697876, |
|
"learning_rate": 4.183928571428571e-06, |
|
"loss": 0.0048, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"grad_norm": 0.07273612916469574, |
|
"learning_rate": 4.180654761904762e-06, |
|
"loss": 0.0027, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"grad_norm": 2.0359814167022705, |
|
"learning_rate": 4.177380952380952e-06, |
|
"loss": 0.2268, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"grad_norm": 0.06620334088802338, |
|
"learning_rate": 4.1741071428571426e-06, |
|
"loss": 0.0024, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"grad_norm": 0.9721193313598633, |
|
"learning_rate": 4.170833333333333e-06, |
|
"loss": 0.0038, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"grad_norm": 1.0129694938659668, |
|
"learning_rate": 4.167559523809523e-06, |
|
"loss": 0.0047, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"grad_norm": 2.4980669021606445, |
|
"learning_rate": 4.164285714285714e-06, |
|
"loss": 0.2284, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"grad_norm": 0.1482830047607422, |
|
"learning_rate": 4.161011904761905e-06, |
|
"loss": 0.0033, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"grad_norm": 96.48265075683594, |
|
"learning_rate": 4.157738095238095e-06, |
|
"loss": 0.0618, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"grad_norm": 0.1294354349374771, |
|
"learning_rate": 4.154464285714286e-06, |
|
"loss": 0.0031, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"grad_norm": 55.35332107543945, |
|
"learning_rate": 4.151190476190476e-06, |
|
"loss": 0.1618, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"grad_norm": 3.1412291526794434, |
|
"learning_rate": 4.147916666666666e-06, |
|
"loss": 0.1818, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"grad_norm": 2.8581583499908447, |
|
"learning_rate": 4.144642857142857e-06, |
|
"loss": 0.0895, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"grad_norm": 0.1566578447818756, |
|
"learning_rate": 4.141369047619047e-06, |
|
"loss": 0.004, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"grad_norm": 0.20300696790218353, |
|
"learning_rate": 4.138095238095238e-06, |
|
"loss": 0.0045, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"grad_norm": 28.00294303894043, |
|
"learning_rate": 4.134821428571429e-06, |
|
"loss": 0.0955, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"grad_norm": 4.9537553787231445, |
|
"learning_rate": 4.131547619047619e-06, |
|
"loss": 0.0108, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"grad_norm": 1.2664889097213745, |
|
"learning_rate": 4.1282738095238095e-06, |
|
"loss": 0.0144, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"grad_norm": 21.156566619873047, |
|
"learning_rate": 4.125e-06, |
|
"loss": 0.0984, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"grad_norm": 14.108423233032227, |
|
"learning_rate": 4.12172619047619e-06, |
|
"loss": 0.0181, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"grad_norm": 0.3327002227306366, |
|
"learning_rate": 4.118452380952381e-06, |
|
"loss": 0.0059, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"grad_norm": 0.26034343242645264, |
|
"learning_rate": 4.115178571428571e-06, |
|
"loss": 0.0047, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"grad_norm": 0.19648778438568115, |
|
"learning_rate": 4.111904761904762e-06, |
|
"loss": 0.0043, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"grad_norm": 95.51915740966797, |
|
"learning_rate": 4.108630952380953e-06, |
|
"loss": 0.0498, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"grad_norm": 0.3881193995475769, |
|
"learning_rate": 4.1053571428571426e-06, |
|
"loss": 0.0037, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"eval_accuracy": 0.8636363636363636, |
|
"eval_loss": 0.6795494556427002, |
|
"eval_runtime": 1.9632, |
|
"eval_samples_per_second": 112.063, |
|
"eval_steps_per_second": 28.016, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"grad_norm": 117.20142364501953, |
|
"learning_rate": 4.102083333333333e-06, |
|
"loss": 0.0613, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"grad_norm": 0.3005952537059784, |
|
"learning_rate": 4.098809523809524e-06, |
|
"loss": 0.0036, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"grad_norm": 0.22291749715805054, |
|
"learning_rate": 4.095535714285714e-06, |
|
"loss": 0.004, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"grad_norm": 0.16808579862117767, |
|
"learning_rate": 4.092261904761905e-06, |
|
"loss": 0.0035, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"grad_norm": 0.153959259390831, |
|
"learning_rate": 4.088988095238095e-06, |
|
"loss": 0.0036, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"grad_norm": 0.32211458683013916, |
|
"learning_rate": 4.085714285714286e-06, |
|
"loss": 0.0038, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"grad_norm": 1.331497311592102, |
|
"learning_rate": 4.0824404761904765e-06, |
|
"loss": 0.0044, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"grad_norm": 0.26392656564712524, |
|
"learning_rate": 4.0791666666666664e-06, |
|
"loss": 0.0032, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"grad_norm": 0.24153968691825867, |
|
"learning_rate": 4.075892857142857e-06, |
|
"loss": 0.0029, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"grad_norm": 52.36206817626953, |
|
"learning_rate": 4.072619047619048e-06, |
|
"loss": 0.0645, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"grad_norm": 0.12544922530651093, |
|
"learning_rate": 4.069345238095238e-06, |
|
"loss": 0.0025, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"grad_norm": 0.14890314638614655, |
|
"learning_rate": 4.066071428571429e-06, |
|
"loss": 0.0024, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"grad_norm": 0.050663262605667114, |
|
"learning_rate": 4.06279761904762e-06, |
|
"loss": 0.002, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"grad_norm": 0.07523107528686523, |
|
"learning_rate": 4.0595238095238095e-06, |
|
"loss": 0.0023, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"grad_norm": 0.04829227551817894, |
|
"learning_rate": 4.05625e-06, |
|
"loss": 0.0017, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"grad_norm": 0.0691770538687706, |
|
"learning_rate": 4.05297619047619e-06, |
|
"loss": 0.0023, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"grad_norm": 0.06425254791975021, |
|
"learning_rate": 4.049702380952381e-06, |
|
"loss": 0.002, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"grad_norm": 0.061768341809511185, |
|
"learning_rate": 4.046428571428572e-06, |
|
"loss": 0.0021, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"grad_norm": 2.5151658058166504, |
|
"learning_rate": 4.043154761904762e-06, |
|
"loss": 0.1136, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"grad_norm": 0.042002830654382706, |
|
"learning_rate": 4.039880952380953e-06, |
|
"loss": 0.0016, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"grad_norm": 0.064276322722435, |
|
"learning_rate": 4.036607142857143e-06, |
|
"loss": 0.0022, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"grad_norm": 0.06543342024087906, |
|
"learning_rate": 4.0333333333333326e-06, |
|
"loss": 0.0018, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"grad_norm": 0.04980894550681114, |
|
"learning_rate": 4.030059523809523e-06, |
|
"loss": 0.0017, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"grad_norm": 0.11392680555582047, |
|
"learning_rate": 4.026785714285714e-06, |
|
"loss": 0.0024, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"grad_norm": 0.15568284690380096, |
|
"learning_rate": 4.023511904761904e-06, |
|
"loss": 0.0021, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"grad_norm": 0.05342809110879898, |
|
"learning_rate": 4.020238095238095e-06, |
|
"loss": 0.0018, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"grad_norm": 9.609216690063477, |
|
"learning_rate": 4.016964285714286e-06, |
|
"loss": 0.1925, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"grad_norm": 18.55946922302246, |
|
"learning_rate": 4.013690476190476e-06, |
|
"loss": 0.0191, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"grad_norm": 1.1658024787902832, |
|
"learning_rate": 4.0104166666666665e-06, |
|
"loss": 0.0035, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"grad_norm": 0.062038108706474304, |
|
"learning_rate": 4.007142857142857e-06, |
|
"loss": 0.0017, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"grad_norm": 0.1960655152797699, |
|
"learning_rate": 4.003869047619047e-06, |
|
"loss": 0.0018, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"grad_norm": 0.9941339492797852, |
|
"learning_rate": 4.000595238095238e-06, |
|
"loss": 0.0802, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"grad_norm": 0.2770668864250183, |
|
"learning_rate": 3.997321428571428e-06, |
|
"loss": 0.002, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"grad_norm": 0.07205989211797714, |
|
"learning_rate": 3.994047619047619e-06, |
|
"loss": 0.0018, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"grad_norm": 0.17621174454689026, |
|
"learning_rate": 3.9907738095238096e-06, |
|
"loss": 0.0026, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"grad_norm": 0.6454585790634155, |
|
"learning_rate": 3.9874999999999995e-06, |
|
"loss": 0.0023, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"grad_norm": 0.9011671543121338, |
|
"learning_rate": 3.98422619047619e-06, |
|
"loss": 0.0775, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"grad_norm": 0.19559772312641144, |
|
"learning_rate": 3.980952380952381e-06, |
|
"loss": 0.0018, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"grad_norm": 0.06330468505620956, |
|
"learning_rate": 3.977678571428571e-06, |
|
"loss": 0.0016, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"grad_norm": 0.05253860726952553, |
|
"learning_rate": 3.974404761904762e-06, |
|
"loss": 0.0015, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"grad_norm": 0.16123701632022858, |
|
"learning_rate": 3.971130952380952e-06, |
|
"loss": 0.0019, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"grad_norm": 2.395909070968628, |
|
"learning_rate": 3.967857142857143e-06, |
|
"loss": 0.2863, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"grad_norm": 2.7706680297851562, |
|
"learning_rate": 3.964583333333333e-06, |
|
"loss": 0.2554, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"eval_accuracy": 0.8590909090909091, |
|
"eval_loss": 0.861154317855835, |
|
"eval_runtime": 1.9879, |
|
"eval_samples_per_second": 110.668, |
|
"eval_steps_per_second": 27.667, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"grad_norm": 0.11244534701108932, |
|
"learning_rate": 3.961309523809523e-06, |
|
"loss": 0.0019, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"grad_norm": 0.27306148409843445, |
|
"learning_rate": 3.958035714285714e-06, |
|
"loss": 0.0025, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"grad_norm": 0.06686495989561081, |
|
"learning_rate": 3.954761904761905e-06, |
|
"loss": 0.0017, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"grad_norm": 0.07426581531763077, |
|
"learning_rate": 3.951488095238095e-06, |
|
"loss": 0.002, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"grad_norm": 0.06276604533195496, |
|
"learning_rate": 3.948214285714286e-06, |
|
"loss": 0.0017, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"grad_norm": 114.83184051513672, |
|
"learning_rate": 3.9449404761904765e-06, |
|
"loss": 0.0446, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"grad_norm": 0.07929594069719315, |
|
"learning_rate": 3.9416666666666665e-06, |
|
"loss": 0.0019, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"grad_norm": 3.3116915225982666, |
|
"learning_rate": 3.938392857142857e-06, |
|
"loss": 0.2205, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"grad_norm": 0.2510410249233246, |
|
"learning_rate": 3.935119047619047e-06, |
|
"loss": 0.0028, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"grad_norm": 0.11125629395246506, |
|
"learning_rate": 3.931845238095238e-06, |
|
"loss": 0.0021, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"grad_norm": 0.33865758776664734, |
|
"learning_rate": 3.928571428571429e-06, |
|
"loss": 0.0036, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"grad_norm": 1.4806885719299316, |
|
"learning_rate": 3.925297619047619e-06, |
|
"loss": 0.0062, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"grad_norm": 0.4448486268520355, |
|
"learning_rate": 3.92202380952381e-06, |
|
"loss": 0.0026, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"grad_norm": 3.580939769744873, |
|
"learning_rate": 3.91875e-06, |
|
"loss": 0.1848, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"grad_norm": 0.07441940158605576, |
|
"learning_rate": 3.91547619047619e-06, |
|
"loss": 0.0019, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"grad_norm": 0.05166240781545639, |
|
"learning_rate": 3.912202380952381e-06, |
|
"loss": 0.0017, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"grad_norm": 0.08746666461229324, |
|
"learning_rate": 3.908928571428571e-06, |
|
"loss": 0.0021, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"grad_norm": 0.2311132699251175, |
|
"learning_rate": 3.905654761904762e-06, |
|
"loss": 0.0029, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"grad_norm": 0.10750468820333481, |
|
"learning_rate": 3.902380952380953e-06, |
|
"loss": 0.002, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"grad_norm": 0.29891398549079895, |
|
"learning_rate": 3.899107142857143e-06, |
|
"loss": 0.003, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"grad_norm": 0.13320636749267578, |
|
"learning_rate": 3.8958333333333334e-06, |
|
"loss": 0.0025, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"grad_norm": 0.11678657680749893, |
|
"learning_rate": 3.892559523809524e-06, |
|
"loss": 0.0028, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"grad_norm": 0.15591733157634735, |
|
"learning_rate": 3.889285714285714e-06, |
|
"loss": 0.0021, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"grad_norm": 35.02848434448242, |
|
"learning_rate": 3.886011904761905e-06, |
|
"loss": 0.0162, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"grad_norm": 14.46584415435791, |
|
"learning_rate": 3.882738095238096e-06, |
|
"loss": 0.0833, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"grad_norm": 0.19305245578289032, |
|
"learning_rate": 3.879464285714286e-06, |
|
"loss": 0.002, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"grad_norm": 0.814598023891449, |
|
"learning_rate": 3.8761904761904765e-06, |
|
"loss": 0.0697, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"grad_norm": 0.03925960883498192, |
|
"learning_rate": 3.8729166666666665e-06, |
|
"loss": 0.0015, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"grad_norm": 0.07029083371162415, |
|
"learning_rate": 3.869642857142857e-06, |
|
"loss": 0.0017, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"grad_norm": 0.048684656620025635, |
|
"learning_rate": 3.866369047619047e-06, |
|
"loss": 0.0016, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 11.72, |
|
"grad_norm": 0.07016553729772568, |
|
"learning_rate": 3.863095238095238e-06, |
|
"loss": 0.002, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"grad_norm": 0.10889595001935959, |
|
"learning_rate": 3.859821428571428e-06, |
|
"loss": 0.0018, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"grad_norm": 0.04481940343976021, |
|
"learning_rate": 3.856547619047619e-06, |
|
"loss": 0.0015, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"grad_norm": 0.05054694786667824, |
|
"learning_rate": 3.853273809523809e-06, |
|
"loss": 0.0016, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"grad_norm": 0.07786347717046738, |
|
"learning_rate": 3.8499999999999996e-06, |
|
"loss": 0.0015, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"grad_norm": 0.10747917741537094, |
|
"learning_rate": 3.84672619047619e-06, |
|
"loss": 0.002, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"grad_norm": 3.685096263885498, |
|
"learning_rate": 3.84345238095238e-06, |
|
"loss": 0.0047, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"grad_norm": 0.8624734878540039, |
|
"learning_rate": 3.840178571428571e-06, |
|
"loss": 0.0707, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"grad_norm": 0.03694400563836098, |
|
"learning_rate": 3.836904761904762e-06, |
|
"loss": 0.0014, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"grad_norm": 0.08262762427330017, |
|
"learning_rate": 3.833630952380952e-06, |
|
"loss": 0.0015, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"grad_norm": 0.06174321845173836, |
|
"learning_rate": 3.830357142857143e-06, |
|
"loss": 0.0014, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"grad_norm": 0.04213571548461914, |
|
"learning_rate": 3.8270833333333335e-06, |
|
"loss": 0.0013, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 4.956207275390625, |
|
"learning_rate": 3.823809523809523e-06, |
|
"loss": 0.2121, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.8909090909090909, |
|
"eval_loss": 0.7056229114532471, |
|
"eval_runtime": 1.9577, |
|
"eval_samples_per_second": 112.375, |
|
"eval_steps_per_second": 28.094, |
|
"step": 512 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1680, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 40, |
|
"save_steps": 500, |
|
"total_flos": 5202308973417984.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|