InLegalLLaMA-Instruct / trainer_state.json
sudipto-ducs's picture
Upload folder using huggingface_hub
6f8ee2c verified
raw
history blame contribute delete
No virus
85.6 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.999174236168456,
"eval_steps": 500,
"global_step": 2724,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"grad_norm": 4.723361968994141,
"learning_rate": 1.5000000000000002e-07,
"loss": 3.607,
"step": 5
},
{
"epoch": 0.01,
"grad_norm": 2.801504611968994,
"learning_rate": 3.0000000000000004e-07,
"loss": 3.6149,
"step": 10
},
{
"epoch": 0.02,
"grad_norm": 5.641049861907959,
"learning_rate": 4.5e-07,
"loss": 3.933,
"step": 15
},
{
"epoch": 0.02,
"grad_norm": 5.0696587562561035,
"learning_rate": 6.000000000000001e-07,
"loss": 3.5431,
"step": 20
},
{
"epoch": 0.03,
"grad_norm": 6.305172920227051,
"learning_rate": 7.5e-07,
"loss": 3.5036,
"step": 25
},
{
"epoch": 0.03,
"grad_norm": 5.998531818389893,
"learning_rate": 9e-07,
"loss": 3.7618,
"step": 30
},
{
"epoch": 0.04,
"grad_norm": 4.491389751434326,
"learning_rate": 1.0500000000000001e-06,
"loss": 3.3489,
"step": 35
},
{
"epoch": 0.04,
"grad_norm": 5.935448169708252,
"learning_rate": 1.2000000000000002e-06,
"loss": 3.9085,
"step": 40
},
{
"epoch": 0.05,
"grad_norm": 5.686679363250732,
"learning_rate": 1.35e-06,
"loss": 3.6536,
"step": 45
},
{
"epoch": 0.06,
"grad_norm": 4.586507797241211,
"learning_rate": 1.5e-06,
"loss": 3.4313,
"step": 50
},
{
"epoch": 0.06,
"grad_norm": 4.8034772872924805,
"learning_rate": 1.65e-06,
"loss": 3.3082,
"step": 55
},
{
"epoch": 0.07,
"grad_norm": 5.47461462020874,
"learning_rate": 1.8e-06,
"loss": 3.5749,
"step": 60
},
{
"epoch": 0.07,
"grad_norm": 6.304111957550049,
"learning_rate": 1.95e-06,
"loss": 3.5684,
"step": 65
},
{
"epoch": 0.08,
"grad_norm": 6.649425983428955,
"learning_rate": 2.1000000000000002e-06,
"loss": 3.25,
"step": 70
},
{
"epoch": 0.08,
"grad_norm": 6.285486698150635,
"learning_rate": 2.25e-06,
"loss": 3.0361,
"step": 75
},
{
"epoch": 0.09,
"grad_norm": 8.364280700683594,
"learning_rate": 2.4000000000000003e-06,
"loss": 3.4918,
"step": 80
},
{
"epoch": 0.09,
"grad_norm": 8.540417671203613,
"learning_rate": 2.55e-06,
"loss": 3.2424,
"step": 85
},
{
"epoch": 0.1,
"grad_norm": 3.8771581649780273,
"learning_rate": 2.7e-06,
"loss": 2.7386,
"step": 90
},
{
"epoch": 0.1,
"grad_norm": 6.713310241699219,
"learning_rate": 2.8500000000000002e-06,
"loss": 2.5229,
"step": 95
},
{
"epoch": 0.11,
"grad_norm": 7.163495063781738,
"learning_rate": 3e-06,
"loss": 2.3543,
"step": 100
},
{
"epoch": 0.12,
"grad_norm": 6.106879234313965,
"learning_rate": 3.15e-06,
"loss": 2.1774,
"step": 105
},
{
"epoch": 0.12,
"grad_norm": 6.056279182434082,
"learning_rate": 3.3e-06,
"loss": 2.102,
"step": 110
},
{
"epoch": 0.13,
"grad_norm": 6.470534324645996,
"learning_rate": 3.4500000000000004e-06,
"loss": 1.8336,
"step": 115
},
{
"epoch": 0.13,
"grad_norm": 5.0930376052856445,
"learning_rate": 3.6e-06,
"loss": 1.4314,
"step": 120
},
{
"epoch": 0.14,
"grad_norm": 4.7424397468566895,
"learning_rate": 3.75e-06,
"loss": 1.3507,
"step": 125
},
{
"epoch": 0.14,
"grad_norm": 4.056792736053467,
"learning_rate": 3.9e-06,
"loss": 1.2108,
"step": 130
},
{
"epoch": 0.15,
"grad_norm": 3.122858762741089,
"learning_rate": 4.05e-06,
"loss": 0.7046,
"step": 135
},
{
"epoch": 0.15,
"grad_norm": 2.3814079761505127,
"learning_rate": 4.2000000000000004e-06,
"loss": 0.7918,
"step": 140
},
{
"epoch": 0.16,
"grad_norm": 3.257763385772705,
"learning_rate": 4.35e-06,
"loss": 0.8256,
"step": 145
},
{
"epoch": 0.17,
"grad_norm": 2.871866464614868,
"learning_rate": 4.5e-06,
"loss": 0.6759,
"step": 150
},
{
"epoch": 0.17,
"grad_norm": 3.0206196308135986,
"learning_rate": 4.65e-06,
"loss": 0.7936,
"step": 155
},
{
"epoch": 0.18,
"grad_norm": 2.52962589263916,
"learning_rate": 4.800000000000001e-06,
"loss": 0.5798,
"step": 160
},
{
"epoch": 0.18,
"grad_norm": 1.2966934442520142,
"learning_rate": 4.95e-06,
"loss": 0.6066,
"step": 165
},
{
"epoch": 0.19,
"grad_norm": 1.9257962703704834,
"learning_rate": 5.1e-06,
"loss": 0.6452,
"step": 170
},
{
"epoch": 0.19,
"grad_norm": 2.903165340423584,
"learning_rate": 5.25e-06,
"loss": 0.6818,
"step": 175
},
{
"epoch": 0.2,
"grad_norm": 2.3169002532958984,
"learning_rate": 5.4e-06,
"loss": 0.7669,
"step": 180
},
{
"epoch": 0.2,
"grad_norm": 2.465702533721924,
"learning_rate": 5.55e-06,
"loss": 0.5847,
"step": 185
},
{
"epoch": 0.21,
"grad_norm": 2.0977537631988525,
"learning_rate": 5.7000000000000005e-06,
"loss": 0.6548,
"step": 190
},
{
"epoch": 0.21,
"grad_norm": 2.9947569370269775,
"learning_rate": 5.850000000000001e-06,
"loss": 0.6992,
"step": 195
},
{
"epoch": 0.22,
"grad_norm": 2.6077871322631836,
"learning_rate": 6e-06,
"loss": 0.6392,
"step": 200
},
{
"epoch": 0.23,
"grad_norm": 2.76350736618042,
"learning_rate": 6.1499999999999996e-06,
"loss": 0.5201,
"step": 205
},
{
"epoch": 0.23,
"grad_norm": 1.9421017169952393,
"learning_rate": 6.3e-06,
"loss": 0.5464,
"step": 210
},
{
"epoch": 0.24,
"grad_norm": 1.9033386707305908,
"learning_rate": 6.45e-06,
"loss": 0.567,
"step": 215
},
{
"epoch": 0.24,
"grad_norm": 2.0922622680664062,
"learning_rate": 6.6e-06,
"loss": 0.6302,
"step": 220
},
{
"epoch": 0.25,
"grad_norm": 2.7234456539154053,
"learning_rate": 6.750000000000001e-06,
"loss": 0.8481,
"step": 225
},
{
"epoch": 0.25,
"grad_norm": 2.4856104850769043,
"learning_rate": 6.900000000000001e-06,
"loss": 0.5044,
"step": 230
},
{
"epoch": 0.26,
"grad_norm": 2.776841163635254,
"learning_rate": 7.049999999999999e-06,
"loss": 0.4163,
"step": 235
},
{
"epoch": 0.26,
"grad_norm": 1.360962986946106,
"learning_rate": 7.2e-06,
"loss": 0.5952,
"step": 240
},
{
"epoch": 0.27,
"grad_norm": 1.9951967000961304,
"learning_rate": 7.35e-06,
"loss": 0.4798,
"step": 245
},
{
"epoch": 0.28,
"grad_norm": 1.2397233247756958,
"learning_rate": 7.5e-06,
"loss": 0.3873,
"step": 250
},
{
"epoch": 0.28,
"grad_norm": 1.793684720993042,
"learning_rate": 7.65e-06,
"loss": 0.5231,
"step": 255
},
{
"epoch": 0.29,
"grad_norm": 4.247454643249512,
"learning_rate": 7.8e-06,
"loss": 0.4825,
"step": 260
},
{
"epoch": 0.29,
"grad_norm": 2.6619770526885986,
"learning_rate": 7.95e-06,
"loss": 0.4743,
"step": 265
},
{
"epoch": 0.3,
"grad_norm": 2.1707732677459717,
"learning_rate": 8.1e-06,
"loss": 0.3325,
"step": 270
},
{
"epoch": 0.3,
"grad_norm": 1.470760464668274,
"learning_rate": 8.25e-06,
"loss": 0.6128,
"step": 275
},
{
"epoch": 0.31,
"grad_norm": 2.647947072982788,
"learning_rate": 8.400000000000001e-06,
"loss": 0.6124,
"step": 280
},
{
"epoch": 0.31,
"grad_norm": 5.327182769775391,
"learning_rate": 8.55e-06,
"loss": 0.5567,
"step": 285
},
{
"epoch": 0.32,
"grad_norm": 2.314610719680786,
"learning_rate": 8.7e-06,
"loss": 0.4464,
"step": 290
},
{
"epoch": 0.32,
"grad_norm": 2.571409225463867,
"learning_rate": 8.85e-06,
"loss": 0.5127,
"step": 295
},
{
"epoch": 0.33,
"grad_norm": 2.6803112030029297,
"learning_rate": 9e-06,
"loss": 0.4206,
"step": 300
},
{
"epoch": 0.34,
"grad_norm": 1.8423502445220947,
"learning_rate": 9.15e-06,
"loss": 0.4846,
"step": 305
},
{
"epoch": 0.34,
"grad_norm": 1.603908658027649,
"learning_rate": 9.3e-06,
"loss": 0.5434,
"step": 310
},
{
"epoch": 0.35,
"grad_norm": 2.6442675590515137,
"learning_rate": 9.450000000000001e-06,
"loss": 0.56,
"step": 315
},
{
"epoch": 0.35,
"grad_norm": 4.274781703948975,
"learning_rate": 9.600000000000001e-06,
"loss": 0.4854,
"step": 320
},
{
"epoch": 0.36,
"grad_norm": 3.596944570541382,
"learning_rate": 9.75e-06,
"loss": 0.6743,
"step": 325
},
{
"epoch": 0.36,
"grad_norm": 2.466118574142456,
"learning_rate": 9.9e-06,
"loss": 0.5149,
"step": 330
},
{
"epoch": 0.37,
"grad_norm": 1.8980193138122559,
"learning_rate": 1.005e-05,
"loss": 0.5721,
"step": 335
},
{
"epoch": 0.37,
"grad_norm": 1.9017683267593384,
"learning_rate": 1.02e-05,
"loss": 0.4637,
"step": 340
},
{
"epoch": 0.38,
"grad_norm": 2.220961809158325,
"learning_rate": 1.035e-05,
"loss": 0.4672,
"step": 345
},
{
"epoch": 0.39,
"grad_norm": 2.758296489715576,
"learning_rate": 1.05e-05,
"loss": 0.4269,
"step": 350
},
{
"epoch": 0.39,
"grad_norm": 1.6211260557174683,
"learning_rate": 1.065e-05,
"loss": 0.4208,
"step": 355
},
{
"epoch": 0.4,
"grad_norm": 1.5319430828094482,
"learning_rate": 1.08e-05,
"loss": 0.5012,
"step": 360
},
{
"epoch": 0.4,
"grad_norm": 2.5233569145202637,
"learning_rate": 1.095e-05,
"loss": 0.2889,
"step": 365
},
{
"epoch": 0.41,
"grad_norm": 1.5497692823410034,
"learning_rate": 1.11e-05,
"loss": 0.3052,
"step": 370
},
{
"epoch": 0.41,
"grad_norm": 3.8792238235473633,
"learning_rate": 1.125e-05,
"loss": 0.6235,
"step": 375
},
{
"epoch": 0.42,
"grad_norm": 2.3589935302734375,
"learning_rate": 1.1400000000000001e-05,
"loss": 0.5508,
"step": 380
},
{
"epoch": 0.42,
"grad_norm": 2.8691861629486084,
"learning_rate": 1.1550000000000001e-05,
"loss": 0.4495,
"step": 385
},
{
"epoch": 0.43,
"grad_norm": 2.3704771995544434,
"learning_rate": 1.1700000000000001e-05,
"loss": 0.4749,
"step": 390
},
{
"epoch": 0.43,
"grad_norm": 2.102457046508789,
"learning_rate": 1.185e-05,
"loss": 0.4324,
"step": 395
},
{
"epoch": 0.44,
"grad_norm": 6.848997116088867,
"learning_rate": 1.2e-05,
"loss": 0.4796,
"step": 400
},
{
"epoch": 0.45,
"grad_norm": 1.6692250967025757,
"learning_rate": 1.215e-05,
"loss": 0.3403,
"step": 405
},
{
"epoch": 0.45,
"grad_norm": 2.407634973526001,
"learning_rate": 1.2299999999999999e-05,
"loss": 0.4553,
"step": 410
},
{
"epoch": 0.46,
"grad_norm": 2.6080591678619385,
"learning_rate": 1.245e-05,
"loss": 0.3708,
"step": 415
},
{
"epoch": 0.46,
"grad_norm": 2.4701273441314697,
"learning_rate": 1.26e-05,
"loss": 0.3112,
"step": 420
},
{
"epoch": 0.47,
"grad_norm": 2.7998530864715576,
"learning_rate": 1.275e-05,
"loss": 0.3935,
"step": 425
},
{
"epoch": 0.47,
"grad_norm": 2.0203258991241455,
"learning_rate": 1.29e-05,
"loss": 0.3994,
"step": 430
},
{
"epoch": 0.48,
"grad_norm": 2.6193652153015137,
"learning_rate": 1.305e-05,
"loss": 0.5095,
"step": 435
},
{
"epoch": 0.48,
"grad_norm": 1.9212027788162231,
"learning_rate": 1.32e-05,
"loss": 0.3615,
"step": 440
},
{
"epoch": 0.49,
"grad_norm": 3.352229595184326,
"learning_rate": 1.3350000000000001e-05,
"loss": 0.6153,
"step": 445
},
{
"epoch": 0.5,
"grad_norm": 1.7962478399276733,
"learning_rate": 1.3500000000000001e-05,
"loss": 0.6112,
"step": 450
},
{
"epoch": 0.5,
"grad_norm": 4.245258331298828,
"learning_rate": 1.3650000000000001e-05,
"loss": 0.5036,
"step": 455
},
{
"epoch": 0.51,
"grad_norm": 1.9633055925369263,
"learning_rate": 1.3800000000000002e-05,
"loss": 0.5212,
"step": 460
},
{
"epoch": 0.51,
"grad_norm": 2.65956711769104,
"learning_rate": 1.395e-05,
"loss": 0.4709,
"step": 465
},
{
"epoch": 0.52,
"grad_norm": 1.38247811794281,
"learning_rate": 1.4099999999999999e-05,
"loss": 0.7051,
"step": 470
},
{
"epoch": 0.52,
"grad_norm": 1.2942965030670166,
"learning_rate": 1.4249999999999999e-05,
"loss": 0.5219,
"step": 475
},
{
"epoch": 0.53,
"grad_norm": 1.6196554899215698,
"learning_rate": 1.44e-05,
"loss": 0.4838,
"step": 480
},
{
"epoch": 0.53,
"grad_norm": 2.9407804012298584,
"learning_rate": 1.455e-05,
"loss": 0.4163,
"step": 485
},
{
"epoch": 0.54,
"grad_norm": 2.036487340927124,
"learning_rate": 1.47e-05,
"loss": 0.3517,
"step": 490
},
{
"epoch": 0.55,
"grad_norm": 4.017712116241455,
"learning_rate": 1.485e-05,
"loss": 0.3407,
"step": 495
},
{
"epoch": 0.55,
"grad_norm": 1.1127279996871948,
"learning_rate": 1.5e-05,
"loss": 0.5691,
"step": 500
},
{
"epoch": 0.56,
"grad_norm": 2.890312910079956,
"learning_rate": 1.515e-05,
"loss": 0.3602,
"step": 505
},
{
"epoch": 0.56,
"grad_norm": 2.6266424655914307,
"learning_rate": 1.53e-05,
"loss": 0.2707,
"step": 510
},
{
"epoch": 0.57,
"grad_norm": 1.9139853715896606,
"learning_rate": 1.545e-05,
"loss": 0.4348,
"step": 515
},
{
"epoch": 0.57,
"grad_norm": 1.4371010065078735,
"learning_rate": 1.56e-05,
"loss": 0.4869,
"step": 520
},
{
"epoch": 0.58,
"grad_norm": 2.060549020767212,
"learning_rate": 1.575e-05,
"loss": 0.446,
"step": 525
},
{
"epoch": 0.58,
"grad_norm": 2.383223295211792,
"learning_rate": 1.59e-05,
"loss": 0.4541,
"step": 530
},
{
"epoch": 0.59,
"grad_norm": 3.066714286804199,
"learning_rate": 1.605e-05,
"loss": 0.654,
"step": 535
},
{
"epoch": 0.59,
"grad_norm": 1.502321481704712,
"learning_rate": 1.62e-05,
"loss": 0.6171,
"step": 540
},
{
"epoch": 0.6,
"grad_norm": 3.31970477104187,
"learning_rate": 1.635e-05,
"loss": 0.4521,
"step": 545
},
{
"epoch": 0.61,
"grad_norm": 2.2695887088775635,
"learning_rate": 1.65e-05,
"loss": 0.5606,
"step": 550
},
{
"epoch": 0.61,
"grad_norm": 1.9954079389572144,
"learning_rate": 1.665e-05,
"loss": 0.4517,
"step": 555
},
{
"epoch": 0.62,
"grad_norm": 3.29400372505188,
"learning_rate": 1.6800000000000002e-05,
"loss": 0.6205,
"step": 560
},
{
"epoch": 0.62,
"grad_norm": 2.401451587677002,
"learning_rate": 1.695e-05,
"loss": 0.5424,
"step": 565
},
{
"epoch": 0.63,
"grad_norm": 3.064767837524414,
"learning_rate": 1.71e-05,
"loss": 0.5388,
"step": 570
},
{
"epoch": 0.63,
"grad_norm": 2.0694704055786133,
"learning_rate": 1.725e-05,
"loss": 0.3733,
"step": 575
},
{
"epoch": 0.64,
"grad_norm": 1.1812100410461426,
"learning_rate": 1.74e-05,
"loss": 0.4173,
"step": 580
},
{
"epoch": 0.64,
"grad_norm": 1.7811343669891357,
"learning_rate": 1.755e-05,
"loss": 0.7022,
"step": 585
},
{
"epoch": 0.65,
"grad_norm": 1.5441101789474487,
"learning_rate": 1.77e-05,
"loss": 0.6539,
"step": 590
},
{
"epoch": 0.66,
"grad_norm": 2.039987087249756,
"learning_rate": 1.785e-05,
"loss": 0.3081,
"step": 595
},
{
"epoch": 0.66,
"grad_norm": 2.5088701248168945,
"learning_rate": 1.8e-05,
"loss": 0.3314,
"step": 600
},
{
"epoch": 0.67,
"grad_norm": 2.6933419704437256,
"learning_rate": 1.815e-05,
"loss": 0.5642,
"step": 605
},
{
"epoch": 0.67,
"grad_norm": 1.9799847602844238,
"learning_rate": 1.83e-05,
"loss": 0.3405,
"step": 610
},
{
"epoch": 0.68,
"grad_norm": 2.641113758087158,
"learning_rate": 1.845e-05,
"loss": 0.4678,
"step": 615
},
{
"epoch": 0.68,
"grad_norm": 1.5051275491714478,
"learning_rate": 1.86e-05,
"loss": 0.5578,
"step": 620
},
{
"epoch": 0.69,
"grad_norm": 1.5316920280456543,
"learning_rate": 1.8750000000000002e-05,
"loss": 0.5345,
"step": 625
},
{
"epoch": 0.69,
"grad_norm": 0.8923272490501404,
"learning_rate": 1.8900000000000002e-05,
"loss": 0.4797,
"step": 630
},
{
"epoch": 0.7,
"grad_norm": 1.6083885431289673,
"learning_rate": 1.9050000000000002e-05,
"loss": 0.4117,
"step": 635
},
{
"epoch": 0.7,
"grad_norm": 1.3130501508712769,
"learning_rate": 1.9200000000000003e-05,
"loss": 0.4314,
"step": 640
},
{
"epoch": 0.71,
"grad_norm": 1.2643862962722778,
"learning_rate": 1.935e-05,
"loss": 0.3503,
"step": 645
},
{
"epoch": 0.72,
"grad_norm": 2.3432857990264893,
"learning_rate": 1.95e-05,
"loss": 0.4664,
"step": 650
},
{
"epoch": 0.72,
"grad_norm": 1.6133382320404053,
"learning_rate": 1.965e-05,
"loss": 0.5415,
"step": 655
},
{
"epoch": 0.73,
"grad_norm": 2.1628336906433105,
"learning_rate": 1.98e-05,
"loss": 0.3583,
"step": 660
},
{
"epoch": 0.73,
"grad_norm": 1.1896861791610718,
"learning_rate": 1.995e-05,
"loss": 0.3906,
"step": 665
},
{
"epoch": 0.74,
"grad_norm": 1.399902582168579,
"learning_rate": 2.01e-05,
"loss": 0.5711,
"step": 670
},
{
"epoch": 0.74,
"grad_norm": 1.9324532747268677,
"learning_rate": 2.025e-05,
"loss": 0.447,
"step": 675
},
{
"epoch": 0.75,
"grad_norm": 1.4938113689422607,
"learning_rate": 2.04e-05,
"loss": 0.6049,
"step": 680
},
{
"epoch": 0.75,
"grad_norm": 0.6185623407363892,
"learning_rate": 2.055e-05,
"loss": 0.44,
"step": 685
},
{
"epoch": 0.76,
"grad_norm": 1.0526788234710693,
"learning_rate": 2.07e-05,
"loss": 0.5259,
"step": 690
},
{
"epoch": 0.77,
"grad_norm": 1.5097798109054565,
"learning_rate": 2.085e-05,
"loss": 0.4513,
"step": 695
},
{
"epoch": 0.77,
"grad_norm": 1.4816968441009521,
"learning_rate": 2.1e-05,
"loss": 0.6861,
"step": 700
},
{
"epoch": 0.78,
"grad_norm": 2.5907509326934814,
"learning_rate": 2.115e-05,
"loss": 0.5984,
"step": 705
},
{
"epoch": 0.78,
"grad_norm": 1.7099113464355469,
"learning_rate": 2.13e-05,
"loss": 0.2523,
"step": 710
},
{
"epoch": 0.79,
"grad_norm": 1.2753609418869019,
"learning_rate": 2.145e-05,
"loss": 0.5409,
"step": 715
},
{
"epoch": 0.79,
"grad_norm": 0.9064136147499084,
"learning_rate": 2.16e-05,
"loss": 0.3004,
"step": 720
},
{
"epoch": 0.8,
"grad_norm": 1.4393867254257202,
"learning_rate": 2.175e-05,
"loss": 0.4156,
"step": 725
},
{
"epoch": 0.8,
"grad_norm": 1.8317877054214478,
"learning_rate": 2.19e-05,
"loss": 0.5019,
"step": 730
},
{
"epoch": 0.81,
"grad_norm": 1.2695151567459106,
"learning_rate": 2.205e-05,
"loss": 0.6344,
"step": 735
},
{
"epoch": 0.81,
"grad_norm": 0.9795133471488953,
"learning_rate": 2.22e-05,
"loss": 0.4634,
"step": 740
},
{
"epoch": 0.82,
"grad_norm": 2.9170186519622803,
"learning_rate": 2.235e-05,
"loss": 0.5323,
"step": 745
},
{
"epoch": 0.83,
"grad_norm": 1.4109234809875488,
"learning_rate": 2.25e-05,
"loss": 0.4622,
"step": 750
},
{
"epoch": 0.83,
"grad_norm": 1.0275310277938843,
"learning_rate": 2.265e-05,
"loss": 0.4223,
"step": 755
},
{
"epoch": 0.84,
"grad_norm": 2.929098129272461,
"learning_rate": 2.2800000000000002e-05,
"loss": 0.4939,
"step": 760
},
{
"epoch": 0.84,
"grad_norm": 1.730406641960144,
"learning_rate": 2.2950000000000002e-05,
"loss": 0.6818,
"step": 765
},
{
"epoch": 0.85,
"grad_norm": 0.8586449027061462,
"learning_rate": 2.3100000000000002e-05,
"loss": 0.5406,
"step": 770
},
{
"epoch": 0.85,
"grad_norm": 1.2498224973678589,
"learning_rate": 2.3250000000000003e-05,
"loss": 0.3736,
"step": 775
},
{
"epoch": 0.86,
"grad_norm": 0.6030346751213074,
"learning_rate": 2.3400000000000003e-05,
"loss": 0.5458,
"step": 780
},
{
"epoch": 0.86,
"grad_norm": 1.4750096797943115,
"learning_rate": 2.3550000000000003e-05,
"loss": 0.5234,
"step": 785
},
{
"epoch": 0.87,
"grad_norm": 1.3055192232131958,
"learning_rate": 2.37e-05,
"loss": 0.5177,
"step": 790
},
{
"epoch": 0.88,
"grad_norm": 1.990779995918274,
"learning_rate": 2.385e-05,
"loss": 0.4851,
"step": 795
},
{
"epoch": 0.88,
"grad_norm": 2.6766905784606934,
"learning_rate": 2.4e-05,
"loss": 0.5505,
"step": 800
},
{
"epoch": 0.89,
"grad_norm": 2.486478090286255,
"learning_rate": 2.415e-05,
"loss": 0.4049,
"step": 805
},
{
"epoch": 0.89,
"grad_norm": 1.191136360168457,
"learning_rate": 2.43e-05,
"loss": 0.3537,
"step": 810
},
{
"epoch": 0.9,
"grad_norm": 1.3190600872039795,
"learning_rate": 2.4449999999999998e-05,
"loss": 0.5542,
"step": 815
},
{
"epoch": 0.9,
"grad_norm": 1.4110169410705566,
"learning_rate": 2.4599999999999998e-05,
"loss": 0.4228,
"step": 820
},
{
"epoch": 0.91,
"grad_norm": 2.436192512512207,
"learning_rate": 2.475e-05,
"loss": 0.5922,
"step": 825
},
{
"epoch": 0.91,
"grad_norm": 2.8494598865509033,
"learning_rate": 2.49e-05,
"loss": 0.6034,
"step": 830
},
{
"epoch": 0.92,
"grad_norm": 1.5710737705230713,
"learning_rate": 2.505e-05,
"loss": 0.2955,
"step": 835
},
{
"epoch": 0.92,
"grad_norm": 2.6547062397003174,
"learning_rate": 2.52e-05,
"loss": 0.4286,
"step": 840
},
{
"epoch": 0.93,
"grad_norm": 1.1925177574157715,
"learning_rate": 2.535e-05,
"loss": 0.4727,
"step": 845
},
{
"epoch": 0.94,
"grad_norm": 1.3642709255218506,
"learning_rate": 2.55e-05,
"loss": 0.406,
"step": 850
},
{
"epoch": 0.94,
"grad_norm": 1.1482707262039185,
"learning_rate": 2.565e-05,
"loss": 0.4667,
"step": 855
},
{
"epoch": 0.95,
"grad_norm": 2.0437512397766113,
"learning_rate": 2.58e-05,
"loss": 0.5288,
"step": 860
},
{
"epoch": 0.95,
"grad_norm": 1.0377088785171509,
"learning_rate": 2.595e-05,
"loss": 0.3604,
"step": 865
},
{
"epoch": 0.96,
"grad_norm": 2.0166819095611572,
"learning_rate": 2.61e-05,
"loss": 0.4019,
"step": 870
},
{
"epoch": 0.96,
"grad_norm": 1.1202322244644165,
"learning_rate": 2.625e-05,
"loss": 0.397,
"step": 875
},
{
"epoch": 0.97,
"grad_norm": 1.1196401119232178,
"learning_rate": 2.64e-05,
"loss": 0.3056,
"step": 880
},
{
"epoch": 0.97,
"grad_norm": 1.8753234148025513,
"learning_rate": 2.655e-05,
"loss": 0.3347,
"step": 885
},
{
"epoch": 0.98,
"grad_norm": 2.1503918170928955,
"learning_rate": 2.6700000000000002e-05,
"loss": 0.6701,
"step": 890
},
{
"epoch": 0.99,
"grad_norm": 1.2472453117370605,
"learning_rate": 2.6850000000000002e-05,
"loss": 0.513,
"step": 895
},
{
"epoch": 0.99,
"grad_norm": 1.2424818277359009,
"learning_rate": 2.7000000000000002e-05,
"loss": 0.3437,
"step": 900
},
{
"epoch": 1.0,
"grad_norm": 1.36464524269104,
"learning_rate": 2.7150000000000003e-05,
"loss": 0.5147,
"step": 905
},
{
"epoch": 1.0,
"grad_norm": 2.0129966735839844,
"learning_rate": 2.7300000000000003e-05,
"loss": 0.5189,
"step": 910
},
{
"epoch": 1.01,
"grad_norm": 3.107072114944458,
"learning_rate": 2.7450000000000003e-05,
"loss": 0.527,
"step": 915
},
{
"epoch": 1.01,
"grad_norm": 1.9841065406799316,
"learning_rate": 2.7600000000000003e-05,
"loss": 0.3458,
"step": 920
},
{
"epoch": 1.02,
"grad_norm": 1.4352362155914307,
"learning_rate": 2.7750000000000004e-05,
"loss": 0.4081,
"step": 925
},
{
"epoch": 1.02,
"grad_norm": 1.5732287168502808,
"learning_rate": 2.79e-05,
"loss": 0.6428,
"step": 930
},
{
"epoch": 1.03,
"grad_norm": 1.5462892055511475,
"learning_rate": 2.805e-05,
"loss": 0.5453,
"step": 935
},
{
"epoch": 1.03,
"grad_norm": 2.118161201477051,
"learning_rate": 2.8199999999999998e-05,
"loss": 0.5021,
"step": 940
},
{
"epoch": 1.04,
"grad_norm": 2.953655481338501,
"learning_rate": 2.8349999999999998e-05,
"loss": 0.4168,
"step": 945
},
{
"epoch": 1.05,
"grad_norm": 0.539341390132904,
"learning_rate": 2.8499999999999998e-05,
"loss": 0.5838,
"step": 950
},
{
"epoch": 1.05,
"grad_norm": 0.8679121732711792,
"learning_rate": 2.865e-05,
"loss": 0.4803,
"step": 955
},
{
"epoch": 1.06,
"grad_norm": 0.6618215441703796,
"learning_rate": 2.88e-05,
"loss": 0.4741,
"step": 960
},
{
"epoch": 1.06,
"grad_norm": 1.349791407585144,
"learning_rate": 2.895e-05,
"loss": 0.539,
"step": 965
},
{
"epoch": 1.07,
"grad_norm": 0.9783216714859009,
"learning_rate": 2.91e-05,
"loss": 0.4964,
"step": 970
},
{
"epoch": 1.07,
"grad_norm": 0.7639239430427551,
"learning_rate": 2.925e-05,
"loss": 0.305,
"step": 975
},
{
"epoch": 1.08,
"grad_norm": 1.376918911933899,
"learning_rate": 2.94e-05,
"loss": 0.415,
"step": 980
},
{
"epoch": 1.08,
"grad_norm": 0.8644680380821228,
"learning_rate": 2.955e-05,
"loss": 0.4427,
"step": 985
},
{
"epoch": 1.09,
"grad_norm": 0.9307350516319275,
"learning_rate": 2.97e-05,
"loss": 0.3651,
"step": 990
},
{
"epoch": 1.1,
"grad_norm": 1.0594663619995117,
"learning_rate": 2.985e-05,
"loss": 0.3768,
"step": 995
},
{
"epoch": 1.1,
"grad_norm": 1.9165922403335571,
"learning_rate": 3e-05,
"loss": 0.3997,
"step": 1000
},
{
"epoch": 1.11,
"grad_norm": 1.4573155641555786,
"learning_rate": 2.999937737939374e-05,
"loss": 0.2581,
"step": 1005
},
{
"epoch": 1.11,
"grad_norm": 1.2896963357925415,
"learning_rate": 2.9997509569262485e-05,
"loss": 0.3903,
"step": 1010
},
{
"epoch": 1.12,
"grad_norm": 1.1831902265548706,
"learning_rate": 2.9994396724664514e-05,
"loss": 0.49,
"step": 1015
},
{
"epoch": 1.12,
"grad_norm": 2.8245599269866943,
"learning_rate": 2.9990039104015984e-05,
"loss": 0.4482,
"step": 1020
},
{
"epoch": 1.13,
"grad_norm": 1.2168867588043213,
"learning_rate": 2.998443706906948e-05,
"loss": 0.5133,
"step": 1025
},
{
"epoch": 1.13,
"grad_norm": 1.0247105360031128,
"learning_rate": 2.9977591084883992e-05,
"loss": 0.4762,
"step": 1030
},
{
"epoch": 1.14,
"grad_norm": 0.7047167420387268,
"learning_rate": 2.9969501719786296e-05,
"loss": 0.6164,
"step": 1035
},
{
"epoch": 1.15,
"grad_norm": 1.1248735189437866,
"learning_rate": 2.9960169645323774e-05,
"loss": 0.3757,
"step": 1040
},
{
"epoch": 1.15,
"grad_norm": 0.8849346041679382,
"learning_rate": 2.9949595636208678e-05,
"loss": 0.3092,
"step": 1045
},
{
"epoch": 1.16,
"grad_norm": 1.1959189176559448,
"learning_rate": 2.9937780570253807e-05,
"loss": 0.3765,
"step": 1050
},
{
"epoch": 1.16,
"grad_norm": 0.845991313457489,
"learning_rate": 2.9924725428299625e-05,
"loss": 0.5164,
"step": 1055
},
{
"epoch": 1.17,
"grad_norm": 0.6847428679466248,
"learning_rate": 2.991043129413285e-05,
"loss": 0.439,
"step": 1060
},
{
"epoch": 1.17,
"grad_norm": 0.9967267513275146,
"learning_rate": 2.9894899354396496e-05,
"loss": 0.4436,
"step": 1065
},
{
"epoch": 1.18,
"grad_norm": 0.8753231167793274,
"learning_rate": 2.9878130898491303e-05,
"loss": 0.3638,
"step": 1070
},
{
"epoch": 1.18,
"grad_norm": 0.7683019042015076,
"learning_rate": 2.9860127318468782e-05,
"loss": 0.5267,
"step": 1075
},
{
"epoch": 1.19,
"grad_norm": 1.2402276992797852,
"learning_rate": 2.9840890108915572e-05,
"loss": 0.66,
"step": 1080
},
{
"epoch": 1.19,
"grad_norm": 0.9644812345504761,
"learning_rate": 2.9820420866829433e-05,
"loss": 0.1985,
"step": 1085
},
{
"epoch": 1.2,
"grad_norm": 1.2949899435043335,
"learning_rate": 2.979872129148661e-05,
"loss": 0.4225,
"step": 1090
},
{
"epoch": 1.21,
"grad_norm": 1.2814561128616333,
"learning_rate": 2.9775793184300798e-05,
"loss": 0.2877,
"step": 1095
},
{
"epoch": 1.21,
"grad_norm": 2.1358540058135986,
"learning_rate": 2.9751638448673612e-05,
"loss": 0.609,
"step": 1100
},
{
"epoch": 1.22,
"grad_norm": NaN,
"learning_rate": 2.9731432831792347e-05,
"loss": 0.5548,
"step": 1105
},
{
"epoch": 1.22,
"grad_norm": 1.0352325439453125,
"learning_rate": 2.9705075286477642e-05,
"loss": 0.5043,
"step": 1110
},
{
"epoch": 1.23,
"grad_norm": 1.3879212141036987,
"learning_rate": 2.9677496983444274e-05,
"loss": 0.3721,
"step": 1115
},
{
"epoch": 1.23,
"grad_norm": 1.3792262077331543,
"learning_rate": 2.9648700212134885e-05,
"loss": 0.4842,
"step": 1120
},
{
"epoch": 1.24,
"grad_norm": 1.1040356159210205,
"learning_rate": 2.961868736314456e-05,
"loss": 0.4203,
"step": 1125
},
{
"epoch": 1.24,
"grad_norm": 1.0859042406082153,
"learning_rate": 2.9587460928022404e-05,
"loss": 0.4129,
"step": 1130
},
{
"epoch": 1.25,
"grad_norm": 0.9380947351455688,
"learning_rate": 2.9555023499064677e-05,
"loss": 0.3209,
"step": 1135
},
{
"epoch": 1.26,
"grad_norm": 0.6131200194358826,
"learning_rate": 2.9521377769099603e-05,
"loss": 0.4211,
"step": 1140
},
{
"epoch": 1.26,
"grad_norm": 1.2759392261505127,
"learning_rate": 2.948652653126382e-05,
"loss": 0.7952,
"step": 1145
},
{
"epoch": 1.27,
"grad_norm": 2.88456130027771,
"learning_rate": 2.9450472678770505e-05,
"loss": 0.5054,
"step": 1150
},
{
"epoch": 1.27,
"grad_norm": 0.833156943321228,
"learning_rate": 2.9413219204669195e-05,
"loss": 0.37,
"step": 1155
},
{
"epoch": 1.28,
"grad_norm": 1.2013107538223267,
"learning_rate": 2.9374769201597305e-05,
"loss": 0.4595,
"step": 1160
},
{
"epoch": 1.28,
"grad_norm": 1.4866896867752075,
"learning_rate": 2.9335125861523395e-05,
"loss": 0.318,
"step": 1165
},
{
"epoch": 1.29,
"grad_norm": 3.088609218597412,
"learning_rate": 2.9294292475482192e-05,
"loss": 0.5222,
"step": 1170
},
{
"epoch": 1.29,
"grad_norm": 1.1780695915222168,
"learning_rate": 2.9252272433301376e-05,
"loss": 0.6204,
"step": 1175
},
{
"epoch": 1.3,
"grad_norm": 0.6830143928527832,
"learning_rate": 2.920906922332016e-05,
"loss": 0.5025,
"step": 1180
},
{
"epoch": 1.3,
"grad_norm": 1.2696876525878906,
"learning_rate": 2.9164686432099713e-05,
"loss": 0.3402,
"step": 1185
},
{
"epoch": 1.31,
"grad_norm": 0.5633934736251831,
"learning_rate": 2.9119127744125428e-05,
"loss": 0.4001,
"step": 1190
},
{
"epoch": 1.32,
"grad_norm": 0.7630770802497864,
"learning_rate": 2.9072396941501023e-05,
"loss": 0.4415,
"step": 1195
},
{
"epoch": 1.32,
"grad_norm": 1.4518195390701294,
"learning_rate": 2.9024497903634584e-05,
"loss": 0.5651,
"step": 1200
},
{
"epoch": 1.33,
"grad_norm": 1.5180546045303345,
"learning_rate": 2.8975434606916515e-05,
"loss": 0.4689,
"step": 1205
},
{
"epoch": 1.33,
"grad_norm": 1.4250961542129517,
"learning_rate": 2.8925211124389423e-05,
"loss": 0.2893,
"step": 1210
},
{
"epoch": 1.34,
"grad_norm": 1.938849687576294,
"learning_rate": 2.887383162540999e-05,
"loss": 0.3407,
"step": 1215
},
{
"epoch": 1.34,
"grad_norm": 1.0794405937194824,
"learning_rate": 2.8821300375302858e-05,
"loss": 0.374,
"step": 1220
},
{
"epoch": 1.35,
"grad_norm": 1.5490267276763916,
"learning_rate": 2.876762173500653e-05,
"loss": 0.5285,
"step": 1225
},
{
"epoch": 1.35,
"grad_norm": 1.1713478565216064,
"learning_rate": 2.8712800160711353e-05,
"loss": 0.5842,
"step": 1230
},
{
"epoch": 1.36,
"grad_norm": 1.5063586235046387,
"learning_rate": 2.8656840203489565e-05,
"loss": 0.4172,
"step": 1235
},
{
"epoch": 1.37,
"grad_norm": 0.477967232465744,
"learning_rate": 2.8599746508917496e-05,
"loss": 0.4052,
"step": 1240
},
{
"epoch": 1.37,
"grad_norm": 1.269463062286377,
"learning_rate": 2.8541523816689917e-05,
"loss": 0.5223,
"step": 1245
},
{
"epoch": 1.38,
"grad_norm": 0.9727627038955688,
"learning_rate": 2.8482176960226555e-05,
"loss": 0.3865,
"step": 1250
},
{
"epoch": 1.38,
"grad_norm": 1.1779907941818237,
"learning_rate": 2.842171086627083e-05,
"loss": 0.4907,
"step": 1255
},
{
"epoch": 1.39,
"grad_norm": 1.321874976158142,
"learning_rate": 2.8360130554480895e-05,
"loss": 0.4799,
"step": 1260
},
{
"epoch": 1.39,
"grad_norm": 1.128961205482483,
"learning_rate": 2.829744113701289e-05,
"loss": 0.2856,
"step": 1265
},
{
"epoch": 1.4,
"grad_norm": 0.7564502358436584,
"learning_rate": 2.8233647818096562e-05,
"loss": 0.4064,
"step": 1270
},
{
"epoch": 1.4,
"grad_norm": 1.3942064046859741,
"learning_rate": 2.8168755893603233e-05,
"loss": 0.4904,
"step": 1275
},
{
"epoch": 1.41,
"grad_norm": 0.8835464715957642,
"learning_rate": 2.8102770750606147e-05,
"loss": 0.4211,
"step": 1280
},
{
"epoch": 1.41,
"grad_norm": 1.4010355472564697,
"learning_rate": 2.8035697866933277e-05,
"loss": 0.3368,
"step": 1285
},
{
"epoch": 1.42,
"grad_norm": 0.8351401686668396,
"learning_rate": 2.7967542810712548e-05,
"loss": 0.4576,
"step": 1290
},
{
"epoch": 1.43,
"grad_norm": 1.1750423908233643,
"learning_rate": 2.789831123990962e-05,
"loss": 0.2992,
"step": 1295
},
{
"epoch": 1.43,
"grad_norm": 0.9141637682914734,
"learning_rate": 2.7828008901858175e-05,
"loss": 0.5354,
"step": 1300
},
{
"epoch": 1.44,
"grad_norm": 1.4612001180648804,
"learning_rate": 2.775664163278278e-05,
"loss": 0.4124,
"step": 1305
},
{
"epoch": 1.44,
"grad_norm": 0.8740494847297668,
"learning_rate": 2.7684215357314428e-05,
"loss": 0.4102,
"step": 1310
},
{
"epoch": 1.45,
"grad_norm": 1.0325684547424316,
"learning_rate": 2.7610736087998648e-05,
"loss": 0.3746,
"step": 1315
},
{
"epoch": 1.45,
"grad_norm": 1.2742629051208496,
"learning_rate": 2.7536209924796407e-05,
"loss": 0.5872,
"step": 1320
},
{
"epoch": 1.46,
"grad_norm": 0.6541928052902222,
"learning_rate": 2.7460643054577684e-05,
"loss": 0.4492,
"step": 1325
},
{
"epoch": 1.46,
"grad_norm": 0.9362463355064392,
"learning_rate": 2.7384041750607895e-05,
"loss": 0.3903,
"step": 1330
},
{
"epoch": 1.47,
"grad_norm": 1.3968509435653687,
"learning_rate": 2.7306412372027082e-05,
"loss": 0.557,
"step": 1335
},
{
"epoch": 1.48,
"grad_norm": 1.368221640586853,
"learning_rate": 2.7227761363322006e-05,
"loss": 0.446,
"step": 1340
},
{
"epoch": 1.48,
"grad_norm": 1.2684072256088257,
"learning_rate": 2.7148095253791174e-05,
"loss": 0.6294,
"step": 1345
},
{
"epoch": 1.49,
"grad_norm": 1.466238260269165,
"learning_rate": 2.706742065700276e-05,
"loss": 0.4555,
"step": 1350
},
{
"epoch": 1.49,
"grad_norm": 1.2599902153015137,
"learning_rate": 2.6985744270245627e-05,
"loss": 0.4199,
"step": 1355
},
{
"epoch": 1.5,
"grad_norm": 0.8082879185676575,
"learning_rate": 2.690307287397329e-05,
"loss": 0.3501,
"step": 1360
},
{
"epoch": 1.5,
"grad_norm": 1.3563495874404907,
"learning_rate": 2.681941333124107e-05,
"loss": 0.4526,
"step": 1365
},
{
"epoch": 1.51,
"grad_norm": 0.7122817039489746,
"learning_rate": 2.6734772587136324e-05,
"loss": 0.4239,
"step": 1370
},
{
"epoch": 1.51,
"grad_norm": 1.2698917388916016,
"learning_rate": 2.664915766820191e-05,
"loss": 0.3165,
"step": 1375
},
{
"epoch": 1.52,
"grad_norm": 1.7459849119186401,
"learning_rate": 2.656257568185286e-05,
"loss": 0.3812,
"step": 1380
},
{
"epoch": 1.52,
"grad_norm": 0.7778897285461426,
"learning_rate": 2.6475033815786353e-05,
"loss": 0.4149,
"step": 1385
},
{
"epoch": 1.53,
"grad_norm": 1.0717759132385254,
"learning_rate": 2.6386539337385012e-05,
"loss": 0.515,
"step": 1390
},
{
"epoch": 1.54,
"grad_norm": 0.9191924929618835,
"learning_rate": 2.629709959311361e-05,
"loss": 0.3977,
"step": 1395
},
{
"epoch": 1.54,
"grad_norm": 1.0447479486465454,
"learning_rate": 2.62067220079092e-05,
"loss": 0.335,
"step": 1400
},
{
"epoch": 1.55,
"grad_norm": 1.2984446287155151,
"learning_rate": 2.6115414084564682e-05,
"loss": 0.5425,
"step": 1405
},
{
"epoch": 1.55,
"grad_norm": 0.9010884761810303,
"learning_rate": 2.6023183403106014e-05,
"loss": 0.4465,
"step": 1410
},
{
"epoch": 1.56,
"grad_norm": 1.8324975967407227,
"learning_rate": 2.59300376201629e-05,
"loss": 0.3131,
"step": 1415
},
{
"epoch": 1.56,
"grad_norm": 1.0951191186904907,
"learning_rate": 2.583598446833319e-05,
"loss": 0.4113,
"step": 1420
},
{
"epoch": 1.57,
"grad_norm": 0.9710536003112793,
"learning_rate": 2.5741031755540932e-05,
"loss": 0.368,
"step": 1425
},
{
"epoch": 1.57,
"grad_norm": 0.8006061911582947,
"learning_rate": 2.564518736438821e-05,
"loss": 0.5833,
"step": 1430
},
{
"epoch": 1.58,
"grad_norm": 0.47641128301620483,
"learning_rate": 2.5548459251500747e-05,
"loss": 0.5812,
"step": 1435
},
{
"epoch": 1.59,
"grad_norm": 0.873199999332428,
"learning_rate": 2.5450855446867384e-05,
"loss": 0.5852,
"step": 1440
},
{
"epoch": 1.59,
"grad_norm": 1.6408307552337646,
"learning_rate": 2.5352384053173453e-05,
"loss": 0.4245,
"step": 1445
},
{
"epoch": 1.6,
"grad_norm": 0.8571744561195374,
"learning_rate": 2.5253053245128135e-05,
"loss": 0.3346,
"step": 1450
},
{
"epoch": 1.6,
"grad_norm": 0.6984312534332275,
"learning_rate": 2.5152871268785813e-05,
"loss": 0.3652,
"step": 1455
},
{
"epoch": 1.61,
"grad_norm": 0.6111830472946167,
"learning_rate": 2.5051846440861545e-05,
"loss": 0.2863,
"step": 1460
},
{
"epoch": 1.61,
"grad_norm": 1.2058312892913818,
"learning_rate": 2.4949987148040608e-05,
"loss": 0.5868,
"step": 1465
},
{
"epoch": 1.62,
"grad_norm": 0.940091609954834,
"learning_rate": 2.4847301846282277e-05,
"loss": 0.4137,
"step": 1470
},
{
"epoch": 1.62,
"grad_norm": 1.831299901008606,
"learning_rate": 2.474379906011788e-05,
"loss": 0.4452,
"step": 1475
},
{
"epoch": 1.63,
"grad_norm": 0.8370305299758911,
"learning_rate": 2.4639487381943075e-05,
"loss": 0.4705,
"step": 1480
},
{
"epoch": 1.64,
"grad_norm": 1.1725839376449585,
"learning_rate": 2.4534375471304563e-05,
"loss": 0.3157,
"step": 1485
},
{
"epoch": 1.64,
"grad_norm": 1.3950480222702026,
"learning_rate": 2.442847205418122e-05,
"loss": 0.4179,
"step": 1490
},
{
"epoch": 1.65,
"grad_norm": 1.3916176557540894,
"learning_rate": 2.4321785922259685e-05,
"loss": 0.5267,
"step": 1495
},
{
"epoch": 1.65,
"grad_norm": 1.6205462217330933,
"learning_rate": 2.42143259322045e-05,
"loss": 0.5793,
"step": 1500
},
{
"epoch": 1.66,
"grad_norm": 1.0348162651062012,
"learning_rate": 2.4106101004922893e-05,
"loss": 0.4365,
"step": 1505
},
{
"epoch": 1.66,
"grad_norm": 0.5200983285903931,
"learning_rate": 2.3997120124824178e-05,
"loss": 0.3882,
"step": 1510
},
{
"epoch": 1.67,
"grad_norm": 1.8840643167495728,
"learning_rate": 2.3887392339073898e-05,
"loss": 0.3899,
"step": 1515
},
{
"epoch": 1.67,
"grad_norm": 1.1195874214172363,
"learning_rate": 2.3776926756842787e-05,
"loss": 0.3071,
"step": 1520
},
{
"epoch": 1.68,
"grad_norm": 0.9049373269081116,
"learning_rate": 2.3665732548550558e-05,
"loss": 0.3486,
"step": 1525
},
{
"epoch": 1.68,
"grad_norm": 1.0285577774047852,
"learning_rate": 2.3553818945104588e-05,
"loss": 0.4388,
"step": 1530
},
{
"epoch": 1.69,
"grad_norm": 1.3187785148620605,
"learning_rate": 2.3441195237133624e-05,
"loss": 0.4953,
"step": 1535
},
{
"epoch": 1.7,
"grad_norm": 1.222777247428894,
"learning_rate": 2.3327870774216515e-05,
"loss": 0.4594,
"step": 1540
},
{
"epoch": 1.7,
"grad_norm": 0.6450194120407104,
"learning_rate": 2.3213854964106035e-05,
"loss": 0.4665,
"step": 1545
},
{
"epoch": 1.71,
"grad_norm": 1.990315318107605,
"learning_rate": 2.309915727194789e-05,
"loss": 0.3313,
"step": 1550
},
{
"epoch": 1.71,
"grad_norm": 1.7571722269058228,
"learning_rate": 2.2983787219494964e-05,
"loss": 0.3571,
"step": 1555
},
{
"epoch": 1.72,
"grad_norm": 0.9484684467315674,
"learning_rate": 2.2867754384316865e-05,
"loss": 0.4045,
"step": 1560
},
{
"epoch": 1.72,
"grad_norm": 0.9541623592376709,
"learning_rate": 2.2751068399004806e-05,
"loss": 0.5165,
"step": 1565
},
{
"epoch": 1.73,
"grad_norm": 1.3149410486221313,
"learning_rate": 2.2633738950371984e-05,
"loss": 0.3404,
"step": 1570
},
{
"epoch": 1.73,
"grad_norm": 0.7877027988433838,
"learning_rate": 2.2515775778649386e-05,
"loss": 0.3857,
"step": 1575
},
{
"epoch": 1.74,
"grad_norm": 0.8127877116203308,
"learning_rate": 2.2397188676677217e-05,
"loss": 0.5441,
"step": 1580
},
{
"epoch": 1.75,
"grad_norm": 1.8573230504989624,
"learning_rate": 2.227798748909191e-05,
"loss": 0.4012,
"step": 1585
},
{
"epoch": 1.75,
"grad_norm": 1.7995444536209106,
"learning_rate": 2.2158182111508904e-05,
"loss": 0.3189,
"step": 1590
},
{
"epoch": 1.76,
"grad_norm": 0.9786883592605591,
"learning_rate": 2.20377824897011e-05,
"loss": 0.4802,
"step": 1595
},
{
"epoch": 1.76,
"grad_norm": 0.9040762186050415,
"learning_rate": 2.191679861877323e-05,
"loss": 0.4147,
"step": 1600
},
{
"epoch": 1.77,
"grad_norm": 0.962874174118042,
"learning_rate": 2.179524054233211e-05,
"loss": 0.3856,
"step": 1605
},
{
"epoch": 1.77,
"grad_norm": 1.2043737173080444,
"learning_rate": 2.1673118351652843e-05,
"loss": 0.5292,
"step": 1610
},
{
"epoch": 1.78,
"grad_norm": 0.5223831534385681,
"learning_rate": 2.1550442184841072e-05,
"loss": 0.4305,
"step": 1615
},
{
"epoch": 1.78,
"grad_norm": 0.9191122055053711,
"learning_rate": 2.1427222225991383e-05,
"loss": 0.472,
"step": 1620
},
{
"epoch": 1.79,
"grad_norm": 1.261473536491394,
"learning_rate": 2.130346870434184e-05,
"loss": 0.4266,
"step": 1625
},
{
"epoch": 1.79,
"grad_norm": 1.202399730682373,
"learning_rate": 2.1179191893424797e-05,
"loss": 0.3269,
"step": 1630
},
{
"epoch": 1.8,
"grad_norm": 0.7132131457328796,
"learning_rate": 2.105440211021404e-05,
"loss": 0.2125,
"step": 1635
},
{
"epoch": 1.81,
"grad_norm": 1.0556139945983887,
"learning_rate": 2.09291097142683e-05,
"loss": 0.3205,
"step": 1640
},
{
"epoch": 1.81,
"grad_norm": 0.7523159980773926,
"learning_rate": 2.0803325106871234e-05,
"loss": 0.3239,
"step": 1645
},
{
"epoch": 1.82,
"grad_norm": 1.4122850894927979,
"learning_rate": 2.0677058730168e-05,
"loss": 0.4792,
"step": 1650
},
{
"epoch": 1.82,
"grad_norm": 1.4816789627075195,
"learning_rate": 2.055032106629831e-05,
"loss": 0.6169,
"step": 1655
},
{
"epoch": 1.83,
"grad_norm": 0.8011027574539185,
"learning_rate": 2.0423122636526325e-05,
"loss": 0.4804,
"step": 1660
},
{
"epoch": 1.83,
"grad_norm": 0.6560605764389038,
"learning_rate": 2.0295474000367173e-05,
"loss": 0.3411,
"step": 1665
},
{
"epoch": 1.84,
"grad_norm": 1.2358989715576172,
"learning_rate": 2.0167385754710346e-05,
"loss": 0.3476,
"step": 1670
},
{
"epoch": 1.84,
"grad_norm": 1.342943787574768,
"learning_rate": 2.0038868532940003e-05,
"loss": 0.3981,
"step": 1675
},
{
"epoch": 1.85,
"grad_norm": 1.1446216106414795,
"learning_rate": 1.990993300405222e-05,
"loss": 0.406,
"step": 1680
},
{
"epoch": 1.86,
"grad_norm": 1.2379969358444214,
"learning_rate": 1.9780589871769272e-05,
"loss": 0.4369,
"step": 1685
},
{
"epoch": 1.86,
"grad_norm": 1.4717028141021729,
"learning_rate": 1.9650849873651102e-05,
"loss": 0.5346,
"step": 1690
},
{
"epoch": 1.87,
"grad_norm": 0.824568510055542,
"learning_rate": 1.952072378020387e-05,
"loss": 0.3544,
"step": 1695
},
{
"epoch": 1.87,
"grad_norm": 1.0470279455184937,
"learning_rate": 1.9390222393985866e-05,
"loss": 0.2773,
"step": 1700
},
{
"epoch": 1.88,
"grad_norm": 0.9954514503479004,
"learning_rate": 1.9259356548710722e-05,
"loss": 0.3972,
"step": 1705
},
{
"epoch": 1.88,
"grad_norm": 0.49930986762046814,
"learning_rate": 1.912813710834803e-05,
"loss": 0.3004,
"step": 1710
},
{
"epoch": 1.89,
"grad_norm": 0.7324523329734802,
"learning_rate": 1.8996574966221453e-05,
"loss": 0.7305,
"step": 1715
},
{
"epoch": 1.89,
"grad_norm": 0.9635807275772095,
"learning_rate": 1.886468104410442e-05,
"loss": 0.4353,
"step": 1720
},
{
"epoch": 1.9,
"grad_norm": 1.794856309890747,
"learning_rate": 1.873246629131343e-05,
"loss": 0.3805,
"step": 1725
},
{
"epoch": 1.9,
"grad_norm": 1.0289092063903809,
"learning_rate": 1.8599941683799087e-05,
"loss": 0.4203,
"step": 1730
},
{
"epoch": 1.91,
"grad_norm": 0.6733255982398987,
"learning_rate": 1.846711822323492e-05,
"loss": 0.3358,
"step": 1735
},
{
"epoch": 1.92,
"grad_norm": 0.44114547967910767,
"learning_rate": 1.8334006936104077e-05,
"loss": 0.4077,
"step": 1740
},
{
"epoch": 1.92,
"grad_norm": 1.2705578804016113,
"learning_rate": 1.8200618872783917e-05,
"loss": 0.2637,
"step": 1745
},
{
"epoch": 1.93,
"grad_norm": 1.1656275987625122,
"learning_rate": 1.8066965106628698e-05,
"loss": 0.2701,
"step": 1750
},
{
"epoch": 1.93,
"grad_norm": 1.0239408016204834,
"learning_rate": 1.7933056733050267e-05,
"loss": 0.3862,
"step": 1755
},
{
"epoch": 1.94,
"grad_norm": 1.3315787315368652,
"learning_rate": 1.7798904868596994e-05,
"loss": 0.4242,
"step": 1760
},
{
"epoch": 1.94,
"grad_norm": 1.190731406211853,
"learning_rate": 1.7664520650030903e-05,
"loss": 0.1688,
"step": 1765
},
{
"epoch": 1.95,
"grad_norm": 1.2880607843399048,
"learning_rate": 1.7529915233403145e-05,
"loss": 0.3489,
"step": 1770
},
{
"epoch": 1.95,
"grad_norm": 0.499487966299057,
"learning_rate": 1.7395099793127865e-05,
"loss": 0.4446,
"step": 1775
},
{
"epoch": 1.96,
"grad_norm": 1.118751049041748,
"learning_rate": 1.726008552105455e-05,
"loss": 0.4587,
"step": 1780
},
{
"epoch": 1.97,
"grad_norm": 1.0625907182693481,
"learning_rate": 1.712488362553893e-05,
"loss": 0.6011,
"step": 1785
},
{
"epoch": 1.97,
"grad_norm": 0.5367158055305481,
"learning_rate": 1.6989505330512484e-05,
"loss": 0.292,
"step": 1790
},
{
"epoch": 1.98,
"grad_norm": 0.6846025586128235,
"learning_rate": 1.6853961874550698e-05,
"loss": 0.396,
"step": 1795
},
{
"epoch": 1.98,
"grad_norm": 1.0111944675445557,
"learning_rate": 1.6718264509940076e-05,
"loss": 0.3749,
"step": 1800
},
{
"epoch": 1.99,
"grad_norm": 1.2397122383117676,
"learning_rate": 1.6582424501743996e-05,
"loss": 0.2241,
"step": 1805
},
{
"epoch": 1.99,
"grad_norm": 1.4244718551635742,
"learning_rate": 1.644645312686757e-05,
"loss": 0.3892,
"step": 1810
},
{
"epoch": 2.0,
"grad_norm": 2.0830795764923096,
"learning_rate": 1.631036167312144e-05,
"loss": 0.4523,
"step": 1815
},
{
"epoch": 2.0,
"grad_norm": 0.6778771281242371,
"learning_rate": 1.617416143828473e-05,
"loss": 0.3769,
"step": 1820
},
{
"epoch": 2.01,
"grad_norm": 0.6887485384941101,
"learning_rate": 1.603786372916714e-05,
"loss": 0.4108,
"step": 1825
},
{
"epoch": 2.01,
"grad_norm": 1.05043625831604,
"learning_rate": 1.5901479860670323e-05,
"loss": 0.4774,
"step": 1830
},
{
"epoch": 2.02,
"grad_norm": 0.8635022044181824,
"learning_rate": 1.576502115484852e-05,
"loss": 0.46,
"step": 1835
},
{
"epoch": 2.03,
"grad_norm": 0.7978819608688354,
"learning_rate": 1.5628498939968686e-05,
"loss": 0.4967,
"step": 1840
},
{
"epoch": 2.03,
"grad_norm": 0.591788113117218,
"learning_rate": 1.549192454957005e-05,
"loss": 0.4264,
"step": 1845
},
{
"epoch": 2.04,
"grad_norm": 0.7562071084976196,
"learning_rate": 1.5355309321523237e-05,
"loss": 0.5705,
"step": 1850
},
{
"epoch": 2.04,
"grad_norm": 0.5904907584190369,
"learning_rate": 1.5218664597089071e-05,
"loss": 0.6413,
"step": 1855
},
{
"epoch": 2.05,
"grad_norm": 1.1089377403259277,
"learning_rate": 1.508200171997704e-05,
"loss": 0.4454,
"step": 1860
},
{
"epoch": 2.05,
"grad_norm": 0.8817293643951416,
"learning_rate": 1.4945332035403587e-05,
"loss": 0.4438,
"step": 1865
},
{
"epoch": 2.06,
"grad_norm": 1.1409955024719238,
"learning_rate": 1.4808666889150303e-05,
"loss": 0.4391,
"step": 1870
},
{
"epoch": 2.06,
"grad_norm": 0.8762648701667786,
"learning_rate": 1.4672017626622023e-05,
"loss": 0.3385,
"step": 1875
},
{
"epoch": 2.07,
"grad_norm": 0.9974231123924255,
"learning_rate": 1.4535395591904958e-05,
"loss": 0.3412,
"step": 1880
},
{
"epoch": 2.08,
"grad_norm": 0.7490105032920837,
"learning_rate": 1.439881212682499e-05,
"loss": 0.4903,
"step": 1885
},
{
"epoch": 2.08,
"grad_norm": 1.0734269618988037,
"learning_rate": 1.4262278570006103e-05,
"loss": 0.5753,
"step": 1890
},
{
"epoch": 2.09,
"grad_norm": 1.4163916110992432,
"learning_rate": 1.4125806255929076e-05,
"loss": 0.2856,
"step": 1895
},
{
"epoch": 2.09,
"grad_norm": 0.6236549019813538,
"learning_rate": 1.3989406513990575e-05,
"loss": 0.286,
"step": 1900
},
{
"epoch": 2.1,
"grad_norm": 0.8455921411514282,
"learning_rate": 1.38530906675626e-05,
"loss": 0.4819,
"step": 1905
},
{
"epoch": 2.1,
"grad_norm": 0.7641191482543945,
"learning_rate": 1.3716870033052476e-05,
"loss": 0.497,
"step": 1910
},
{
"epoch": 2.11,
"grad_norm": 0.9699380993843079,
"learning_rate": 1.358075591896341e-05,
"loss": 0.1826,
"step": 1915
},
{
"epoch": 2.11,
"grad_norm": 1.1498082876205444,
"learning_rate": 1.34447596249557e-05,
"loss": 0.2888,
"step": 1920
},
{
"epoch": 2.12,
"grad_norm": 1.7836366891860962,
"learning_rate": 1.3308892440908678e-05,
"loss": 0.5541,
"step": 1925
},
{
"epoch": 2.12,
"grad_norm": 1.6860030889511108,
"learning_rate": 1.3173165645983482e-05,
"loss": 0.3234,
"step": 1930
},
{
"epoch": 2.13,
"grad_norm": 0.7591866850852966,
"learning_rate": 1.3037590507686695e-05,
"loss": 0.5428,
"step": 1935
},
{
"epoch": 2.14,
"grad_norm": 1.1677800416946411,
"learning_rate": 1.2902178280934948e-05,
"loss": 0.3434,
"step": 1940
},
{
"epoch": 2.14,
"grad_norm": 1.8365012407302856,
"learning_rate": 1.2766940207120609e-05,
"loss": 0.2709,
"step": 1945
},
{
"epoch": 2.15,
"grad_norm": 0.8034731149673462,
"learning_rate": 1.2631887513178544e-05,
"loss": 0.5227,
"step": 1950
},
{
"epoch": 2.15,
"grad_norm": 0.7503282427787781,
"learning_rate": 1.249703141065411e-05,
"loss": 0.3255,
"step": 1955
},
{
"epoch": 2.16,
"grad_norm": 1.1902406215667725,
"learning_rate": 1.2362383094772413e-05,
"loss": 0.518,
"step": 1960
},
{
"epoch": 2.16,
"grad_norm": 0.50048828125,
"learning_rate": 1.2227953743508935e-05,
"loss": 0.412,
"step": 1965
},
{
"epoch": 2.17,
"grad_norm": 1.088340163230896,
"learning_rate": 1.209375451666156e-05,
"loss": 0.3134,
"step": 1970
},
{
"epoch": 2.17,
"grad_norm": 0.9749330878257751,
"learning_rate": 1.1959796554924155e-05,
"loss": 0.2601,
"step": 1975
},
{
"epoch": 2.18,
"grad_norm": 1.1269935369491577,
"learning_rate": 1.1826090978961706e-05,
"loss": 0.4026,
"step": 1980
},
{
"epoch": 2.19,
"grad_norm": 1.243455171585083,
"learning_rate": 1.1692648888487106e-05,
"loss": 0.3724,
"step": 1985
},
{
"epoch": 2.19,
"grad_norm": 1.3622498512268066,
"learning_rate": 1.1559481361339723e-05,
"loss": 0.3994,
"step": 1990
},
{
"epoch": 2.2,
"grad_norm": 1.5660467147827148,
"learning_rate": 1.142659945256576e-05,
"loss": 0.4255,
"step": 1995
},
{
"epoch": 2.2,
"grad_norm": 1.1115527153015137,
"learning_rate": 1.12940141935005e-05,
"loss": 0.4976,
"step": 2000
},
{
"epoch": 2.21,
"grad_norm": 0.8586636781692505,
"learning_rate": 1.1161736590852522e-05,
"loss": 0.2858,
"step": 2005
},
{
"epoch": 2.21,
"grad_norm": 2.1333322525024414,
"learning_rate": 1.1029777625789982e-05,
"loss": 0.3974,
"step": 2010
},
{
"epoch": 2.22,
"grad_norm": 0.8023208379745483,
"learning_rate": 1.0898148253028992e-05,
"loss": 0.4768,
"step": 2015
},
{
"epoch": 2.22,
"grad_norm": 1.3136727809906006,
"learning_rate": 1.0766859399924198e-05,
"loss": 0.3854,
"step": 2020
},
{
"epoch": 2.23,
"grad_norm": 1.2942872047424316,
"learning_rate": 1.0635921965561644e-05,
"loss": 0.6315,
"step": 2025
},
{
"epoch": 2.24,
"grad_norm": 1.1733945608139038,
"learning_rate": 1.0505346819853966e-05,
"loss": 0.2202,
"step": 2030
},
{
"epoch": 2.24,
"grad_norm": 1.2896422147750854,
"learning_rate": 1.0375144802638011e-05,
"loss": 0.3692,
"step": 2035
},
{
"epoch": 2.25,
"grad_norm": 1.188212275505066,
"learning_rate": 1.024532672277497e-05,
"loss": 0.4869,
"step": 2040
},
{
"epoch": 2.25,
"grad_norm": 1.2566425800323486,
"learning_rate": 1.0115903357253056e-05,
"loss": 0.3254,
"step": 2045
},
{
"epoch": 2.26,
"grad_norm": 1.7713689804077148,
"learning_rate": 9.986885450292837e-06,
"loss": 0.3681,
"step": 2050
},
{
"epoch": 2.26,
"grad_norm": 1.208200454711914,
"learning_rate": 9.858283712455311e-06,
"loss": 0.2643,
"step": 2055
},
{
"epoch": 2.27,
"grad_norm": 0.957243025302887,
"learning_rate": 9.730108819752737e-06,
"loss": 0.2966,
"step": 2060
},
{
"epoch": 2.27,
"grad_norm": 0.9913397431373596,
"learning_rate": 9.602371412762371e-06,
"loss": 0.3375,
"step": 2065
},
{
"epoch": 2.28,
"grad_norm": 1.0104014873504639,
"learning_rate": 9.475082095743123e-06,
"loss": 0.3598,
"step": 2070
},
{
"epoch": 2.28,
"grad_norm": 0.6341925859451294,
"learning_rate": 9.348251435755213e-06,
"loss": 0.41,
"step": 2075
},
{
"epoch": 2.29,
"grad_norm": 0.6854486465454102,
"learning_rate": 9.221889961782971e-06,
"loss": 0.3253,
"step": 2080
},
{
"epoch": 2.3,
"grad_norm": 1.3789153099060059,
"learning_rate": 9.096008163860735e-06,
"loss": 0.4887,
"step": 2085
},
{
"epoch": 2.3,
"grad_norm": 0.9915759563446045,
"learning_rate": 8.97061649220201e-06,
"loss": 0.3067,
"step": 2090
},
{
"epoch": 2.31,
"grad_norm": 0.6337260603904724,
"learning_rate": 8.84572535633195e-06,
"loss": 0.4107,
"step": 2095
},
{
"epoch": 2.31,
"grad_norm": 1.3694593906402588,
"learning_rate": 8.721345124223193e-06,
"loss": 0.2412,
"step": 2100
},
{
"epoch": 2.32,
"grad_norm": 1.0332863330841064,
"learning_rate": 8.597486121435126e-06,
"loss": 0.5425,
"step": 2105
},
{
"epoch": 2.32,
"grad_norm": 0.7652474641799927,
"learning_rate": 8.474158630256755e-06,
"loss": 0.5445,
"step": 2110
},
{
"epoch": 2.33,
"grad_norm": 1.368090271949768,
"learning_rate": 8.351372888853045e-06,
"loss": 0.4415,
"step": 2115
},
{
"epoch": 2.33,
"grad_norm": 1.044106125831604,
"learning_rate": 8.229139090415035e-06,
"loss": 0.4247,
"step": 2120
},
{
"epoch": 2.34,
"grad_norm": 0.9483750462532043,
"learning_rate": 8.107467382313608e-06,
"loss": 0.3865,
"step": 2125
},
{
"epoch": 2.35,
"grad_norm": 0.7410837411880493,
"learning_rate": 7.986367865257129e-06,
"loss": 0.3543,
"step": 2130
},
{
"epoch": 2.35,
"grad_norm": 0.6833118200302124,
"learning_rate": 7.865850592452894e-06,
"loss": 0.1985,
"step": 2135
},
{
"epoch": 2.36,
"grad_norm": 0.7763660550117493,
"learning_rate": 7.745925568772554e-06,
"loss": 0.4385,
"step": 2140
},
{
"epoch": 2.36,
"grad_norm": 1.189669132232666,
"learning_rate": 7.626602749921589e-06,
"loss": 0.4026,
"step": 2145
},
{
"epoch": 2.37,
"grad_norm": 1.6189225912094116,
"learning_rate": 7.5078920416127495e-06,
"loss": 0.3927,
"step": 2150
},
{
"epoch": 2.37,
"grad_norm": 1.7551486492156982,
"learning_rate": 7.389803298743809e-06,
"loss": 0.5965,
"step": 2155
},
{
"epoch": 2.38,
"grad_norm": 1.2080819606781006,
"learning_rate": 7.272346324579388e-06,
"loss": 0.2134,
"step": 2160
},
{
"epoch": 2.38,
"grad_norm": 1.190613865852356,
"learning_rate": 7.1555308699371416e-06,
"loss": 0.4577,
"step": 2165
},
{
"epoch": 2.39,
"grad_norm": 1.435311198234558,
"learning_rate": 7.039366632378304e-06,
"loss": 0.2424,
"step": 2170
},
{
"epoch": 2.39,
"grad_norm": 0.7552112936973572,
"learning_rate": 6.9238632554026084e-06,
"loss": 0.2778,
"step": 2175
},
{
"epoch": 2.4,
"grad_norm": 0.6694565415382385,
"learning_rate": 6.809030327647728e-06,
"loss": 0.4163,
"step": 2180
},
{
"epoch": 2.41,
"grad_norm": 1.4150924682617188,
"learning_rate": 6.694877382093284e-06,
"loss": 0.278,
"step": 2185
},
{
"epoch": 2.41,
"grad_norm": 1.9875400066375732,
"learning_rate": 6.5814138952694305e-06,
"loss": 0.4271,
"step": 2190
},
{
"epoch": 2.42,
"grad_norm": 1.8704239130020142,
"learning_rate": 6.468649286470153e-06,
"loss": 0.3261,
"step": 2195
},
{
"epoch": 2.42,
"grad_norm": 0.6322317123413086,
"learning_rate": 6.356592916971343e-06,
"loss": 0.4293,
"step": 2200
},
{
"epoch": 2.43,
"grad_norm": 0.6502904891967773,
"learning_rate": 6.2452540892536245e-06,
"loss": 0.3743,
"step": 2205
},
{
"epoch": 2.43,
"grad_norm": 3.024371385574341,
"learning_rate": 6.1346420462301125e-06,
"loss": 0.4385,
"step": 2210
},
{
"epoch": 2.44,
"grad_norm": 0.7644482254981995,
"learning_rate": 6.024765970479122e-06,
"loss": 0.2991,
"step": 2215
},
{
"epoch": 2.44,
"grad_norm": 0.7494433522224426,
"learning_rate": 5.915634983481835e-06,
"loss": 0.4962,
"step": 2220
},
{
"epoch": 2.45,
"grad_norm": 2.201181411743164,
"learning_rate": 5.807258144865086e-06,
"loss": 0.4794,
"step": 2225
},
{
"epoch": 2.46,
"grad_norm": 0.9114490151405334,
"learning_rate": 5.699644451649277e-06,
"loss": 0.2898,
"step": 2230
},
{
"epoch": 2.46,
"grad_norm": 1.601459264755249,
"learning_rate": 5.59280283750146e-06,
"loss": 0.3268,
"step": 2235
},
{
"epoch": 2.47,
"grad_norm": 1.5998916625976562,
"learning_rate": 5.486742171993705e-06,
"loss": 0.4436,
"step": 2240
},
{
"epoch": 2.47,
"grad_norm": 0.8929490447044373,
"learning_rate": 5.381471259866807e-06,
"loss": 0.359,
"step": 2245
},
{
"epoch": 2.48,
"grad_norm": 0.9474959969520569,
"learning_rate": 5.276998840299308e-06,
"loss": 0.484,
"step": 2250
},
{
"epoch": 2.48,
"grad_norm": 1.7978712320327759,
"learning_rate": 5.1733335861820305e-06,
"loss": 0.2033,
"step": 2255
},
{
"epoch": 2.49,
"grad_norm": 0.9559624195098877,
"learning_rate": 5.070484103398103e-06,
"loss": 0.3781,
"step": 2260
},
{
"epoch": 2.49,
"grad_norm": 0.7897327542304993,
"learning_rate": 4.968458930108495e-06,
"loss": 0.3866,
"step": 2265
},
{
"epoch": 2.5,
"grad_norm": 1.8434853553771973,
"learning_rate": 4.867266536043235e-06,
"loss": 0.487,
"step": 2270
},
{
"epoch": 2.5,
"grad_norm": 1.959332823753357,
"learning_rate": 4.766915321798297e-06,
"loss": 0.5033,
"step": 2275
},
{
"epoch": 2.51,
"grad_norm": 1.3424344062805176,
"learning_rate": 4.667413618138192e-06,
"loss": 0.6175,
"step": 2280
},
{
"epoch": 2.52,
"grad_norm": 1.1145857572555542,
"learning_rate": 4.568769685304388e-06,
"loss": 0.2437,
"step": 2285
},
{
"epoch": 2.52,
"grad_norm": 1.2104226350784302,
"learning_rate": 4.470991712329597e-06,
"loss": 0.4342,
"step": 2290
},
{
"epoch": 2.53,
"grad_norm": 0.958635687828064,
"learning_rate": 4.374087816357923e-06,
"loss": 0.3341,
"step": 2295
},
{
"epoch": 2.53,
"grad_norm": 1.011942982673645,
"learning_rate": 4.278066041971024e-06,
"loss": 0.573,
"step": 2300
},
{
"epoch": 2.54,
"grad_norm": 0.9302467107772827,
"learning_rate": 4.1829343605202895e-06,
"loss": 0.4891,
"step": 2305
},
{
"epoch": 2.54,
"grad_norm": 0.598698079586029,
"learning_rate": 4.088700669465074e-06,
"loss": 0.2921,
"step": 2310
},
{
"epoch": 2.55,
"grad_norm": 1.0277636051177979,
"learning_rate": 3.995372791717092e-06,
"loss": 0.3742,
"step": 2315
},
{
"epoch": 2.55,
"grad_norm": 1.5416873693466187,
"learning_rate": 3.902958474990987e-06,
"loss": 0.2199,
"step": 2320
},
{
"epoch": 2.56,
"grad_norm": 1.640619158744812,
"learning_rate": 3.8114653911611452e-06,
"loss": 0.3463,
"step": 2325
},
{
"epoch": 2.57,
"grad_norm": 0.802915632724762,
"learning_rate": 3.720901135624807e-06,
"loss": 0.3876,
"step": 2330
},
{
"epoch": 2.57,
"grad_norm": 0.6372286677360535,
"learning_rate": 3.6312732266715374e-06,
"loss": 0.3851,
"step": 2335
},
{
"epoch": 2.58,
"grad_norm": 1.0239226818084717,
"learning_rate": 3.5425891048590693e-06,
"loss": 0.5176,
"step": 2340
},
{
"epoch": 2.58,
"grad_norm": 1.344281792640686,
"learning_rate": 3.454856132395623e-06,
"loss": 0.4344,
"step": 2345
},
{
"epoch": 2.59,
"grad_norm": 1.4836249351501465,
"learning_rate": 3.368081592528739e-06,
"loss": 0.3258,
"step": 2350
},
{
"epoch": 2.59,
"grad_norm": 0.48362186551094055,
"learning_rate": 3.2822726889406296e-06,
"loss": 0.3484,
"step": 2355
},
{
"epoch": 2.6,
"grad_norm": 1.674930214881897,
"learning_rate": 3.197436545150168e-06,
"loss": 0.2531,
"step": 2360
},
{
"epoch": 2.6,
"grad_norm": 1.283719539642334,
"learning_rate": 3.113580203921533e-06,
"loss": 0.4796,
"step": 2365
},
{
"epoch": 2.61,
"grad_norm": 1.3754234313964844,
"learning_rate": 3.030710626679522e-06,
"loss": 0.3232,
"step": 2370
},
{
"epoch": 2.61,
"grad_norm": 1.8028233051300049,
"learning_rate": 2.9488346929316546e-06,
"loss": 0.4161,
"step": 2375
},
{
"epoch": 2.62,
"grad_norm": 1.4452279806137085,
"learning_rate": 2.8679591996970728e-06,
"loss": 0.2511,
"step": 2380
},
{
"epoch": 2.63,
"grad_norm": 0.9447839856147766,
"learning_rate": 2.7880908609422563e-06,
"loss": 0.2869,
"step": 2385
},
{
"epoch": 2.63,
"grad_norm": 0.5915132164955139,
"learning_rate": 2.7092363070236686e-06,
"loss": 0.3261,
"step": 2390
},
{
"epoch": 2.64,
"grad_norm": 1.6882514953613281,
"learning_rate": 2.631402084137337e-06,
"loss": 0.2105,
"step": 2395
},
{
"epoch": 2.64,
"grad_norm": 1.2579067945480347,
"learning_rate": 2.5545946537753994e-06,
"loss": 0.2521,
"step": 2400
},
{
"epoch": 2.65,
"grad_norm": 2.2939579486846924,
"learning_rate": 2.478820392189699e-06,
"loss": 0.3847,
"step": 2405
},
{
"epoch": 2.65,
"grad_norm": 1.4380046129226685,
"learning_rate": 2.4040855898624677e-06,
"loss": 0.3619,
"step": 2410
},
{
"epoch": 2.66,
"grad_norm": 2.006512403488159,
"learning_rate": 2.3303964509840916e-06,
"loss": 0.5343,
"step": 2415
},
{
"epoch": 2.66,
"grad_norm": 0.8354169726371765,
"learning_rate": 2.25775909293808e-06,
"loss": 0.2209,
"step": 2420
},
{
"epoch": 2.67,
"grad_norm": 2.4486894607543945,
"learning_rate": 2.1861795457932236e-06,
"loss": 0.4199,
"step": 2425
},
{
"epoch": 2.68,
"grad_norm": 2.1726739406585693,
"learning_rate": 2.1156637518029943e-06,
"loss": 0.3883,
"step": 2430
},
{
"epoch": 2.68,
"grad_norm": 1.3366342782974243,
"learning_rate": 2.0462175649122407e-06,
"loss": 0.4508,
"step": 2435
},
{
"epoch": 2.69,
"grad_norm": 1.2342681884765625,
"learning_rate": 1.9778467502712334e-06,
"loss": 0.4942,
"step": 2440
},
{
"epoch": 2.69,
"grad_norm": 0.9087796211242676,
"learning_rate": 1.9105569837570457e-06,
"loss": 0.3828,
"step": 2445
},
{
"epoch": 2.7,
"grad_norm": 1.1849201917648315,
"learning_rate": 1.844353851502371e-06,
"loss": 0.3724,
"step": 2450
},
{
"epoch": 2.7,
"grad_norm": 1.1934826374053955,
"learning_rate": 1.779242849431793e-06,
"loss": 0.376,
"step": 2455
},
{
"epoch": 2.71,
"grad_norm": 1.01549232006073,
"learning_rate": 1.7152293828055194e-06,
"loss": 0.4463,
"step": 2460
},
{
"epoch": 2.71,
"grad_norm": 1.314704179763794,
"learning_rate": 1.6523187657706672e-06,
"loss": 0.4637,
"step": 2465
},
{
"epoch": 2.72,
"grad_norm": 1.545615553855896,
"learning_rate": 1.5905162209201135e-06,
"loss": 0.3411,
"step": 2470
},
{
"epoch": 2.73,
"grad_norm": 0.7735347747802734,
"learning_rate": 1.529826878858912e-06,
"loss": 0.3245,
"step": 2475
},
{
"epoch": 2.73,
"grad_norm": 1.3003946542739868,
"learning_rate": 1.4702557777783904e-06,
"loss": 0.3665,
"step": 2480
},
{
"epoch": 2.74,
"grad_norm": 1.4502391815185547,
"learning_rate": 1.4118078630378906e-06,
"loss": 0.3318,
"step": 2485
},
{
"epoch": 2.74,
"grad_norm": 0.8701452016830444,
"learning_rate": 1.3544879867542275e-06,
"loss": 0.2807,
"step": 2490
},
{
"epoch": 2.75,
"grad_norm": 1.2284448146820068,
"learning_rate": 1.2983009073988832e-06,
"loss": 0.3733,
"step": 2495
},
{
"epoch": 2.75,
"grad_norm": 0.8479854464530945,
"learning_rate": 1.2432512894029823e-06,
"loss": 0.3448,
"step": 2500
},
{
"epoch": 2.76,
"grad_norm": 0.9034194350242615,
"learning_rate": 1.1893437027700604e-06,
"loss": 0.3248,
"step": 2505
},
{
"epoch": 2.76,
"grad_norm": 0.9006950259208679,
"learning_rate": 1.136582622696684e-06,
"loss": 0.389,
"step": 2510
},
{
"epoch": 2.77,
"grad_norm": 0.8429726362228394,
"learning_rate": 1.0849724292009467e-06,
"loss": 0.3469,
"step": 2515
},
{
"epoch": 2.77,
"grad_norm": 1.1029099225997925,
"learning_rate": 1.034517406758842e-06,
"loss": 0.2766,
"step": 2520
},
{
"epoch": 2.78,
"grad_norm": 1.1907734870910645,
"learning_rate": 9.852217439485893e-07,
"loss": 0.341,
"step": 2525
},
{
"epoch": 2.79,
"grad_norm": 0.5092141628265381,
"learning_rate": 9.3708953310292e-07,
"loss": 0.6436,
"step": 2530
},
{
"epoch": 2.79,
"grad_norm": 1.428278923034668,
"learning_rate": 8.901247699693399e-07,
"loss": 0.2796,
"step": 2535
},
{
"epoch": 2.8,
"grad_norm": 0.8995442986488342,
"learning_rate": 8.443313533784175e-07,
"loss": 0.4055,
"step": 2540
},
{
"epoch": 2.8,
"grad_norm": 0.4836633801460266,
"learning_rate": 7.997130849201323e-07,
"loss": 0.3853,
"step": 2545
},
{
"epoch": 2.81,
"grad_norm": 2.5244317054748535,
"learning_rate": 7.56273668628264e-07,
"loss": 0.5473,
"step": 2550
},
{
"epoch": 2.81,
"grad_norm": 2.398171901702881,
"learning_rate": 7.140167106729017e-07,
"loss": 0.4997,
"step": 2555
},
{
"epoch": 2.82,
"grad_norm": 0.6999978423118591,
"learning_rate": 6.729457190610888e-07,
"loss": 0.3492,
"step": 2560
},
{
"epoch": 2.82,
"grad_norm": 1.3380895853042603,
"learning_rate": 6.330641033455837e-07,
"loss": 0.3369,
"step": 2565
},
{
"epoch": 2.83,
"grad_norm": 1.4348106384277344,
"learning_rate": 5.943751743418179e-07,
"loss": 0.2401,
"step": 2570
},
{
"epoch": 2.84,
"grad_norm": 1.0490989685058594,
"learning_rate": 5.568821438530519e-07,
"loss": 0.2856,
"step": 2575
},
{
"epoch": 2.84,
"grad_norm": 2.1060283184051514,
"learning_rate": 5.205881244037352e-07,
"loss": 0.2513,
"step": 2580
},
{
"epoch": 2.85,
"grad_norm": 1.4565911293029785,
"learning_rate": 4.85496128981116e-07,
"loss": 0.545,
"step": 2585
},
{
"epoch": 2.85,
"grad_norm": 0.6282710433006287,
"learning_rate": 4.5160907078512894e-07,
"loss": 0.4935,
"step": 2590
},
{
"epoch": 2.86,
"grad_norm": 1.2574007511138916,
"learning_rate": 4.189297629865335e-07,
"loss": 0.5524,
"step": 2595
},
{
"epoch": 2.86,
"grad_norm": 2.5676729679107666,
"learning_rate": 3.874609184933847e-07,
"loss": 0.3217,
"step": 2600
},
{
"epoch": 2.87,
"grad_norm": 1.900389552116394,
"learning_rate": 3.5720514972582776e-07,
"loss": 0.203,
"step": 2605
},
{
"epoch": 2.87,
"grad_norm": 1.0852973461151123,
"learning_rate": 3.281649683992033e-07,
"loss": 0.3713,
"step": 2610
},
{
"epoch": 2.88,
"grad_norm": 1.8293077945709229,
"learning_rate": 3.0034278531555105e-07,
"loss": 0.426,
"step": 2615
},
{
"epoch": 2.88,
"grad_norm": 1.1292721033096313,
"learning_rate": 2.737409101634747e-07,
"loss": 0.4578,
"step": 2620
},
{
"epoch": 2.89,
"grad_norm": 0.7649122476577759,
"learning_rate": 2.4836155132638874e-07,
"loss": 0.2586,
"step": 2625
},
{
"epoch": 2.9,
"grad_norm": 1.4662760496139526,
"learning_rate": 2.2420681569919998e-07,
"loss": 0.4307,
"step": 2630
},
{
"epoch": 2.9,
"grad_norm": 1.294457197189331,
"learning_rate": 2.012787085133927e-07,
"loss": 0.4343,
"step": 2635
},
{
"epoch": 2.91,
"grad_norm": 0.9145516157150269,
"learning_rate": 1.7957913317056817e-07,
"loss": 0.4964,
"step": 2640
},
{
"epoch": 2.91,
"grad_norm": 0.9060572981834412,
"learning_rate": 1.5910989108442632e-07,
"loss": 0.3721,
"step": 2645
},
{
"epoch": 2.92,
"grad_norm": 1.0683475732803345,
"learning_rate": 1.398726815312218e-07,
"loss": 0.5016,
"step": 2650
},
{
"epoch": 2.92,
"grad_norm": 0.8625506162643433,
"learning_rate": 1.2186910150869867e-07,
"loss": 0.4209,
"step": 2655
},
{
"epoch": 2.93,
"grad_norm": 1.0874123573303223,
"learning_rate": 1.0510064560350974e-07,
"loss": 0.44,
"step": 2660
},
{
"epoch": 2.93,
"grad_norm": 1.2623063325881958,
"learning_rate": 8.956870586714739e-08,
"loss": 0.3736,
"step": 2665
},
{
"epoch": 2.94,
"grad_norm": 1.6609416007995605,
"learning_rate": 7.527457170037776e-08,
"loss": 0.5932,
"step": 2670
},
{
"epoch": 2.95,
"grad_norm": 1.819159984588623,
"learning_rate": 6.221942974619476e-08,
"loss": 0.4801,
"step": 2675
},
{
"epoch": 2.95,
"grad_norm": 1.391512155532837,
"learning_rate": 5.0404363791322074e-08,
"loss": 0.4572,
"step": 2680
},
{
"epoch": 2.96,
"grad_norm": 1.40304696559906,
"learning_rate": 3.983035467622853e-08,
"loss": 0.2913,
"step": 2685
},
{
"epoch": 2.96,
"grad_norm": 2.047868013381958,
"learning_rate": 3.049828021370826e-08,
"loss": 0.2785,
"step": 2690
},
{
"epoch": 2.97,
"grad_norm": 1.6269519329071045,
"learning_rate": 2.2408915116008956e-08,
"loss": 0.1878,
"step": 2695
},
{
"epoch": 2.97,
"grad_norm": 1.4585676193237305,
"learning_rate": 1.5562930930519968e-08,
"loss": 0.697,
"step": 2700
},
{
"epoch": 2.98,
"grad_norm": 1.2848623991012573,
"learning_rate": 9.960895984016949e-09,
"loss": 0.3595,
"step": 2705
},
{
"epoch": 2.98,
"grad_norm": 1.122453212738037,
"learning_rate": 5.603275335484548e-09,
"loss": 0.3628,
"step": 2710
},
{
"epoch": 2.99,
"grad_norm": 1.7388654947280884,
"learning_rate": 2.490430737512317e-09,
"loss": 0.2751,
"step": 2715
},
{
"epoch": 2.99,
"grad_norm": 0.46183499693870544,
"learning_rate": 6.22620606258728e-10,
"loss": 0.357,
"step": 2720
},
{
"epoch": 3.0,
"step": 2724,
"total_flos": 1.004173813492482e+18,
"train_loss": 0.5588170812518586,
"train_runtime": 28902.9641,
"train_samples_per_second": 1.508,
"train_steps_per_second": 0.094
}
],
"logging_steps": 5,
"max_steps": 2724,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 100,
"total_flos": 1.004173813492482e+18,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}