qd-zh-phi-1_5 / trainer_state.json
voidful's picture
Upload folder using huggingface_hub
d279c7b verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.9999777750613963,
"eval_steps": 500,
"global_step": 51422,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"grad_norm": 6.53427791595459,
"learning_rate": 2.0000000000000002e-07,
"loss": 4.9702,
"step": 100
},
{
"epoch": 0.02,
"grad_norm": 3.246262788772583,
"learning_rate": 4.0000000000000003e-07,
"loss": 3.894,
"step": 200
},
{
"epoch": 0.02,
"grad_norm": 3.0513827800750732,
"learning_rate": 6.000000000000001e-07,
"loss": 3.374,
"step": 300
},
{
"epoch": 0.03,
"grad_norm": 3.749248504638672,
"learning_rate": 8.000000000000001e-07,
"loss": 3.0036,
"step": 400
},
{
"epoch": 0.04,
"grad_norm": 3.22784423828125,
"learning_rate": 1.0000000000000002e-06,
"loss": 2.7402,
"step": 500
},
{
"epoch": 0.05,
"grad_norm": 2.726270914077759,
"learning_rate": 1.2000000000000002e-06,
"loss": 2.516,
"step": 600
},
{
"epoch": 0.05,
"grad_norm": 4.169952392578125,
"learning_rate": 1.4000000000000001e-06,
"loss": 2.3578,
"step": 700
},
{
"epoch": 0.06,
"grad_norm": 3.3691835403442383,
"learning_rate": 1.6000000000000001e-06,
"loss": 2.2373,
"step": 800
},
{
"epoch": 0.07,
"grad_norm": 2.555671453475952,
"learning_rate": 1.8000000000000001e-06,
"loss": 2.1324,
"step": 900
},
{
"epoch": 0.08,
"grad_norm": 2.965663433074951,
"learning_rate": 2.0000000000000003e-06,
"loss": 2.003,
"step": 1000
},
{
"epoch": 0.09,
"grad_norm": 3.139995813369751,
"learning_rate": 2.2e-06,
"loss": 1.9204,
"step": 1100
},
{
"epoch": 0.09,
"grad_norm": 3.4419736862182617,
"learning_rate": 2.4000000000000003e-06,
"loss": 1.8174,
"step": 1200
},
{
"epoch": 0.1,
"grad_norm": 3.3173742294311523,
"learning_rate": 2.6e-06,
"loss": 1.7202,
"step": 1300
},
{
"epoch": 0.11,
"grad_norm": 3.725154161453247,
"learning_rate": 2.8000000000000003e-06,
"loss": 1.6697,
"step": 1400
},
{
"epoch": 0.12,
"grad_norm": 2.7185709476470947,
"learning_rate": 3e-06,
"loss": 1.5936,
"step": 1500
},
{
"epoch": 0.12,
"grad_norm": 3.5915303230285645,
"learning_rate": 3.2000000000000003e-06,
"loss": 1.541,
"step": 1600
},
{
"epoch": 0.13,
"grad_norm": 3.4957127571105957,
"learning_rate": 3.4000000000000005e-06,
"loss": 1.478,
"step": 1700
},
{
"epoch": 0.14,
"grad_norm": 3.322261333465576,
"learning_rate": 3.6000000000000003e-06,
"loss": 1.4561,
"step": 1800
},
{
"epoch": 0.15,
"grad_norm": 2.6218302249908447,
"learning_rate": 3.8000000000000005e-06,
"loss": 1.3999,
"step": 1900
},
{
"epoch": 0.16,
"grad_norm": 2.7259745597839355,
"learning_rate": 4.000000000000001e-06,
"loss": 1.3631,
"step": 2000
},
{
"epoch": 0.16,
"grad_norm": 2.891881227493286,
"learning_rate": 4.2000000000000004e-06,
"loss": 1.3468,
"step": 2100
},
{
"epoch": 0.17,
"grad_norm": 2.940047025680542,
"learning_rate": 4.4e-06,
"loss": 1.3039,
"step": 2200
},
{
"epoch": 0.18,
"grad_norm": 2.9909629821777344,
"learning_rate": 4.600000000000001e-06,
"loss": 1.2905,
"step": 2300
},
{
"epoch": 0.19,
"grad_norm": 2.4165866374969482,
"learning_rate": 4.800000000000001e-06,
"loss": 1.2583,
"step": 2400
},
{
"epoch": 0.19,
"grad_norm": 2.3624277114868164,
"learning_rate": 5e-06,
"loss": 1.2188,
"step": 2500
},
{
"epoch": 0.2,
"grad_norm": 2.2490053176879883,
"learning_rate": 5.2e-06,
"loss": 1.2105,
"step": 2600
},
{
"epoch": 0.21,
"grad_norm": 2.5461137294769287,
"learning_rate": 5.400000000000001e-06,
"loss": 1.1861,
"step": 2700
},
{
"epoch": 0.22,
"grad_norm": 2.602668523788452,
"learning_rate": 5.600000000000001e-06,
"loss": 1.1658,
"step": 2800
},
{
"epoch": 0.23,
"grad_norm": 2.377206563949585,
"learning_rate": 5.8e-06,
"loss": 1.1425,
"step": 2900
},
{
"epoch": 0.23,
"grad_norm": 2.560969114303589,
"learning_rate": 6e-06,
"loss": 1.129,
"step": 3000
},
{
"epoch": 0.24,
"grad_norm": 2.227200746536255,
"learning_rate": 6.200000000000001e-06,
"loss": 1.1131,
"step": 3100
},
{
"epoch": 0.25,
"grad_norm": 1.9198317527770996,
"learning_rate": 6.4000000000000006e-06,
"loss": 1.1162,
"step": 3200
},
{
"epoch": 0.26,
"grad_norm": 2.1105523109436035,
"learning_rate": 6.600000000000001e-06,
"loss": 1.0905,
"step": 3300
},
{
"epoch": 0.26,
"grad_norm": 2.541492223739624,
"learning_rate": 6.800000000000001e-06,
"loss": 1.0809,
"step": 3400
},
{
"epoch": 0.27,
"grad_norm": 2.3046059608459473,
"learning_rate": 7e-06,
"loss": 1.0542,
"step": 3500
},
{
"epoch": 0.28,
"grad_norm": 2.2684073448181152,
"learning_rate": 7.2000000000000005e-06,
"loss": 1.0375,
"step": 3600
},
{
"epoch": 0.29,
"grad_norm": 1.9288487434387207,
"learning_rate": 7.4e-06,
"loss": 1.0226,
"step": 3700
},
{
"epoch": 0.3,
"grad_norm": 1.7624694108963013,
"learning_rate": 7.600000000000001e-06,
"loss": 1.0133,
"step": 3800
},
{
"epoch": 0.3,
"grad_norm": 2.0840542316436768,
"learning_rate": 7.800000000000002e-06,
"loss": 1.0289,
"step": 3900
},
{
"epoch": 0.31,
"grad_norm": 2.0796873569488525,
"learning_rate": 8.000000000000001e-06,
"loss": 0.9929,
"step": 4000
},
{
"epoch": 0.32,
"grad_norm": 1.5572433471679688,
"learning_rate": 8.2e-06,
"loss": 1.0076,
"step": 4100
},
{
"epoch": 0.33,
"grad_norm": 1.9323557615280151,
"learning_rate": 8.400000000000001e-06,
"loss": 0.975,
"step": 4200
},
{
"epoch": 0.33,
"grad_norm": 2.0943708419799805,
"learning_rate": 8.6e-06,
"loss": 0.979,
"step": 4300
},
{
"epoch": 0.34,
"grad_norm": 1.8976091146469116,
"learning_rate": 8.8e-06,
"loss": 0.9674,
"step": 4400
},
{
"epoch": 0.35,
"grad_norm": 2.4065496921539307,
"learning_rate": 9e-06,
"loss": 0.9684,
"step": 4500
},
{
"epoch": 0.36,
"grad_norm": 1.681624412536621,
"learning_rate": 9.200000000000002e-06,
"loss": 0.963,
"step": 4600
},
{
"epoch": 0.37,
"grad_norm": 2.0208232402801514,
"learning_rate": 9.4e-06,
"loss": 0.9387,
"step": 4700
},
{
"epoch": 0.37,
"grad_norm": 2.300846815109253,
"learning_rate": 9.600000000000001e-06,
"loss": 0.9294,
"step": 4800
},
{
"epoch": 0.38,
"grad_norm": 1.7484502792358398,
"learning_rate": 9.800000000000001e-06,
"loss": 0.9081,
"step": 4900
},
{
"epoch": 0.39,
"grad_norm": 1.68325936794281,
"learning_rate": 1e-05,
"loss": 0.9291,
"step": 5000
},
{
"epoch": 0.4,
"grad_norm": 1.8952064514160156,
"learning_rate": 9.983129481231549e-06,
"loss": 0.9288,
"step": 5100
},
{
"epoch": 0.4,
"grad_norm": 2.0792810916900635,
"learning_rate": 9.966258962463097e-06,
"loss": 0.9003,
"step": 5200
},
{
"epoch": 0.41,
"grad_norm": 1.7303239107131958,
"learning_rate": 9.949388443694644e-06,
"loss": 0.8995,
"step": 5300
},
{
"epoch": 0.42,
"grad_norm": 1.9337693452835083,
"learning_rate": 9.932517924926192e-06,
"loss": 0.8959,
"step": 5400
},
{
"epoch": 0.43,
"grad_norm": 1.4797320365905762,
"learning_rate": 9.91564740615774e-06,
"loss": 0.8672,
"step": 5500
},
{
"epoch": 0.44,
"grad_norm": 1.5944349765777588,
"learning_rate": 9.898776887389287e-06,
"loss": 0.8745,
"step": 5600
},
{
"epoch": 0.44,
"grad_norm": 1.4109618663787842,
"learning_rate": 9.881906368620837e-06,
"loss": 0.8463,
"step": 5700
},
{
"epoch": 0.45,
"grad_norm": 1.4643152952194214,
"learning_rate": 9.865035849852383e-06,
"loss": 0.8498,
"step": 5800
},
{
"epoch": 0.46,
"grad_norm": 1.895533800125122,
"learning_rate": 9.848165331083931e-06,
"loss": 0.8532,
"step": 5900
},
{
"epoch": 0.47,
"grad_norm": 1.6388016939163208,
"learning_rate": 9.83129481231548e-06,
"loss": 0.8575,
"step": 6000
},
{
"epoch": 0.47,
"grad_norm": 1.7428650856018066,
"learning_rate": 9.814424293547026e-06,
"loss": 0.8313,
"step": 6100
},
{
"epoch": 0.48,
"grad_norm": 1.4427262544631958,
"learning_rate": 9.797553774778576e-06,
"loss": 0.8231,
"step": 6200
},
{
"epoch": 0.49,
"grad_norm": 1.5858181715011597,
"learning_rate": 9.780683256010123e-06,
"loss": 0.8339,
"step": 6300
},
{
"epoch": 0.5,
"grad_norm": 1.4243927001953125,
"learning_rate": 9.76381273724167e-06,
"loss": 0.8417,
"step": 6400
},
{
"epoch": 0.51,
"grad_norm": 1.7441660165786743,
"learning_rate": 9.746942218473219e-06,
"loss": 0.8076,
"step": 6500
},
{
"epoch": 0.51,
"grad_norm": 1.4151054620742798,
"learning_rate": 9.730071699704765e-06,
"loss": 0.8001,
"step": 6600
},
{
"epoch": 0.52,
"grad_norm": 1.41933012008667,
"learning_rate": 9.713201180936315e-06,
"loss": 0.8197,
"step": 6700
},
{
"epoch": 0.53,
"grad_norm": 1.4041861295700073,
"learning_rate": 9.696330662167862e-06,
"loss": 0.8003,
"step": 6800
},
{
"epoch": 0.54,
"grad_norm": 1.3906748294830322,
"learning_rate": 9.67946014339941e-06,
"loss": 0.8003,
"step": 6900
},
{
"epoch": 0.54,
"grad_norm": 1.2998430728912354,
"learning_rate": 9.662589624630958e-06,
"loss": 0.7968,
"step": 7000
},
{
"epoch": 0.55,
"grad_norm": 1.5463752746582031,
"learning_rate": 9.645719105862507e-06,
"loss": 0.7947,
"step": 7100
},
{
"epoch": 0.56,
"grad_norm": 1.5098308324813843,
"learning_rate": 9.628848587094055e-06,
"loss": 0.787,
"step": 7200
},
{
"epoch": 0.57,
"grad_norm": 1.5164891481399536,
"learning_rate": 9.611978068325601e-06,
"loss": 0.7911,
"step": 7300
},
{
"epoch": 0.58,
"grad_norm": 1.2971314191818237,
"learning_rate": 9.59510754955715e-06,
"loss": 0.7857,
"step": 7400
},
{
"epoch": 0.58,
"grad_norm": 1.7399604320526123,
"learning_rate": 9.578237030788698e-06,
"loss": 0.7822,
"step": 7500
},
{
"epoch": 0.59,
"grad_norm": 1.19468092918396,
"learning_rate": 9.561366512020246e-06,
"loss": 0.7716,
"step": 7600
},
{
"epoch": 0.6,
"grad_norm": 1.3369121551513672,
"learning_rate": 9.544495993251794e-06,
"loss": 0.7698,
"step": 7700
},
{
"epoch": 0.61,
"grad_norm": 1.3297808170318604,
"learning_rate": 9.52762547448334e-06,
"loss": 0.7585,
"step": 7800
},
{
"epoch": 0.61,
"grad_norm": 1.6198933124542236,
"learning_rate": 9.510754955714889e-06,
"loss": 0.785,
"step": 7900
},
{
"epoch": 0.62,
"grad_norm": 1.6185461282730103,
"learning_rate": 9.493884436946437e-06,
"loss": 0.7557,
"step": 8000
},
{
"epoch": 0.63,
"grad_norm": 1.4298818111419678,
"learning_rate": 9.477013918177985e-06,
"loss": 0.7663,
"step": 8100
},
{
"epoch": 0.64,
"grad_norm": 1.4085651636123657,
"learning_rate": 9.460143399409532e-06,
"loss": 0.7593,
"step": 8200
},
{
"epoch": 0.65,
"grad_norm": 1.2686153650283813,
"learning_rate": 9.44327288064108e-06,
"loss": 0.7644,
"step": 8300
},
{
"epoch": 0.65,
"grad_norm": 1.5299006700515747,
"learning_rate": 9.426402361872628e-06,
"loss": 0.748,
"step": 8400
},
{
"epoch": 0.66,
"grad_norm": 1.4610661268234253,
"learning_rate": 9.409531843104176e-06,
"loss": 0.7499,
"step": 8500
},
{
"epoch": 0.67,
"grad_norm": 1.314367651939392,
"learning_rate": 9.392661324335725e-06,
"loss": 0.7423,
"step": 8600
},
{
"epoch": 0.68,
"grad_norm": 1.218855857849121,
"learning_rate": 9.375790805567271e-06,
"loss": 0.7302,
"step": 8700
},
{
"epoch": 0.68,
"grad_norm": 1.2473499774932861,
"learning_rate": 9.35892028679882e-06,
"loss": 0.735,
"step": 8800
},
{
"epoch": 0.69,
"grad_norm": 1.3518482446670532,
"learning_rate": 9.342049768030368e-06,
"loss": 0.7351,
"step": 8900
},
{
"epoch": 0.7,
"grad_norm": 1.2447612285614014,
"learning_rate": 9.325179249261916e-06,
"loss": 0.7291,
"step": 9000
},
{
"epoch": 0.71,
"grad_norm": 1.4226117134094238,
"learning_rate": 9.308308730493464e-06,
"loss": 0.7206,
"step": 9100
},
{
"epoch": 0.72,
"grad_norm": 1.238205909729004,
"learning_rate": 9.29143821172501e-06,
"loss": 0.7318,
"step": 9200
},
{
"epoch": 0.72,
"grad_norm": 1.318700909614563,
"learning_rate": 9.274567692956559e-06,
"loss": 0.7231,
"step": 9300
},
{
"epoch": 0.73,
"grad_norm": 1.363457441329956,
"learning_rate": 9.257697174188107e-06,
"loss": 0.7275,
"step": 9400
},
{
"epoch": 0.74,
"grad_norm": 1.4574874639511108,
"learning_rate": 9.240826655419655e-06,
"loss": 0.7112,
"step": 9500
},
{
"epoch": 0.75,
"grad_norm": 1.3727684020996094,
"learning_rate": 9.223956136651203e-06,
"loss": 0.7322,
"step": 9600
},
{
"epoch": 0.75,
"grad_norm": 1.2791839838027954,
"learning_rate": 9.20708561788275e-06,
"loss": 0.7205,
"step": 9700
},
{
"epoch": 0.76,
"grad_norm": 1.625589370727539,
"learning_rate": 9.190215099114298e-06,
"loss": 0.7063,
"step": 9800
},
{
"epoch": 0.77,
"grad_norm": 1.3039993047714233,
"learning_rate": 9.173344580345846e-06,
"loss": 0.7106,
"step": 9900
},
{
"epoch": 0.78,
"grad_norm": 1.4416632652282715,
"learning_rate": 9.156474061577395e-06,
"loss": 0.7194,
"step": 10000
},
{
"epoch": 0.79,
"grad_norm": 1.3791232109069824,
"learning_rate": 9.139603542808943e-06,
"loss": 0.7112,
"step": 10100
},
{
"epoch": 0.79,
"grad_norm": 1.4265581369400024,
"learning_rate": 9.12273302404049e-06,
"loss": 0.7068,
"step": 10200
},
{
"epoch": 0.8,
"grad_norm": 1.2084933519363403,
"learning_rate": 9.10586250527204e-06,
"loss": 0.7096,
"step": 10300
},
{
"epoch": 0.81,
"grad_norm": 1.3753681182861328,
"learning_rate": 9.088991986503586e-06,
"loss": 0.6937,
"step": 10400
},
{
"epoch": 0.82,
"grad_norm": 1.122209906578064,
"learning_rate": 9.072121467735134e-06,
"loss": 0.7097,
"step": 10500
},
{
"epoch": 0.82,
"grad_norm": 1.412460446357727,
"learning_rate": 9.055250948966682e-06,
"loss": 0.709,
"step": 10600
},
{
"epoch": 0.83,
"grad_norm": 1.2995516061782837,
"learning_rate": 9.038380430198229e-06,
"loss": 0.6902,
"step": 10700
},
{
"epoch": 0.84,
"grad_norm": 1.1382883787155151,
"learning_rate": 9.021509911429777e-06,
"loss": 0.693,
"step": 10800
},
{
"epoch": 0.85,
"grad_norm": 1.3806873559951782,
"learning_rate": 9.004639392661325e-06,
"loss": 0.6812,
"step": 10900
},
{
"epoch": 0.86,
"grad_norm": 1.1994802951812744,
"learning_rate": 8.987768873892873e-06,
"loss": 0.6913,
"step": 11000
},
{
"epoch": 0.86,
"grad_norm": 1.3595366477966309,
"learning_rate": 8.970898355124422e-06,
"loss": 0.6942,
"step": 11100
},
{
"epoch": 0.87,
"grad_norm": 1.1623262166976929,
"learning_rate": 8.954027836355968e-06,
"loss": 0.6924,
"step": 11200
},
{
"epoch": 0.88,
"grad_norm": 1.1356381177902222,
"learning_rate": 8.937157317587516e-06,
"loss": 0.6877,
"step": 11300
},
{
"epoch": 0.89,
"grad_norm": 1.3679704666137695,
"learning_rate": 8.920286798819064e-06,
"loss": 0.6821,
"step": 11400
},
{
"epoch": 0.89,
"grad_norm": 1.1613571643829346,
"learning_rate": 8.903416280050613e-06,
"loss": 0.6764,
"step": 11500
},
{
"epoch": 0.9,
"grad_norm": 1.484339714050293,
"learning_rate": 8.886545761282161e-06,
"loss": 0.6864,
"step": 11600
},
{
"epoch": 0.91,
"grad_norm": 1.2525767087936401,
"learning_rate": 8.869675242513707e-06,
"loss": 0.6846,
"step": 11700
},
{
"epoch": 0.92,
"grad_norm": 1.2930700778961182,
"learning_rate": 8.852804723745256e-06,
"loss": 0.6931,
"step": 11800
},
{
"epoch": 0.93,
"grad_norm": 1.113165259361267,
"learning_rate": 8.835934204976804e-06,
"loss": 0.6718,
"step": 11900
},
{
"epoch": 0.93,
"grad_norm": 1.2441542148590088,
"learning_rate": 8.81906368620835e-06,
"loss": 0.6678,
"step": 12000
},
{
"epoch": 0.94,
"grad_norm": 1.3137460947036743,
"learning_rate": 8.8021931674399e-06,
"loss": 0.6702,
"step": 12100
},
{
"epoch": 0.95,
"grad_norm": 1.0759252309799194,
"learning_rate": 8.785322648671447e-06,
"loss": 0.663,
"step": 12200
},
{
"epoch": 0.96,
"grad_norm": 1.074950933456421,
"learning_rate": 8.768452129902995e-06,
"loss": 0.6618,
"step": 12300
},
{
"epoch": 0.96,
"grad_norm": 1.2644020318984985,
"learning_rate": 8.751581611134543e-06,
"loss": 0.673,
"step": 12400
},
{
"epoch": 0.97,
"grad_norm": 1.118434190750122,
"learning_rate": 8.73471109236609e-06,
"loss": 0.6555,
"step": 12500
},
{
"epoch": 0.98,
"grad_norm": 1.443091869354248,
"learning_rate": 8.71784057359764e-06,
"loss": 0.6647,
"step": 12600
},
{
"epoch": 0.99,
"grad_norm": 1.4388694763183594,
"learning_rate": 8.700970054829186e-06,
"loss": 0.6655,
"step": 12700
},
{
"epoch": 1.0,
"grad_norm": 1.119398593902588,
"learning_rate": 8.684099536060734e-06,
"loss": 0.6651,
"step": 12800
},
{
"epoch": 1.0,
"eval_loss": 0.6430252194404602,
"eval_runtime": 233.0784,
"eval_samples_per_second": 27.665,
"eval_steps_per_second": 27.665,
"step": 12855
},
{
"epoch": 1.0,
"grad_norm": 1.409602165222168,
"learning_rate": 8.667229017292283e-06,
"loss": 0.6307,
"step": 12900
},
{
"epoch": 1.01,
"grad_norm": 1.1905642747879028,
"learning_rate": 8.650358498523829e-06,
"loss": 0.6197,
"step": 13000
},
{
"epoch": 1.02,
"grad_norm": 1.2009611129760742,
"learning_rate": 8.633487979755379e-06,
"loss": 0.6075,
"step": 13100
},
{
"epoch": 1.03,
"grad_norm": 1.1692153215408325,
"learning_rate": 8.616617460986926e-06,
"loss": 0.6136,
"step": 13200
},
{
"epoch": 1.03,
"grad_norm": 1.0197334289550781,
"learning_rate": 8.599746942218474e-06,
"loss": 0.6116,
"step": 13300
},
{
"epoch": 1.04,
"grad_norm": 1.1080808639526367,
"learning_rate": 8.582876423450022e-06,
"loss": 0.6093,
"step": 13400
},
{
"epoch": 1.05,
"grad_norm": 1.5718735456466675,
"learning_rate": 8.566005904681568e-06,
"loss": 0.6097,
"step": 13500
},
{
"epoch": 1.06,
"grad_norm": 1.2772808074951172,
"learning_rate": 8.549135385913118e-06,
"loss": 0.6008,
"step": 13600
},
{
"epoch": 1.07,
"grad_norm": 1.1503815650939941,
"learning_rate": 8.532264867144665e-06,
"loss": 0.6213,
"step": 13700
},
{
"epoch": 1.07,
"grad_norm": 1.1865904331207275,
"learning_rate": 8.515394348376213e-06,
"loss": 0.6041,
"step": 13800
},
{
"epoch": 1.08,
"grad_norm": 1.2000179290771484,
"learning_rate": 8.498523829607761e-06,
"loss": 0.61,
"step": 13900
},
{
"epoch": 1.09,
"grad_norm": 1.3241515159606934,
"learning_rate": 8.48165331083931e-06,
"loss": 0.6043,
"step": 14000
},
{
"epoch": 1.1,
"grad_norm": 1.1904330253601074,
"learning_rate": 8.464782792070858e-06,
"loss": 0.6166,
"step": 14100
},
{
"epoch": 1.1,
"grad_norm": 1.0845420360565186,
"learning_rate": 8.447912273302404e-06,
"loss": 0.6107,
"step": 14200
},
{
"epoch": 1.11,
"grad_norm": 1.2940455675125122,
"learning_rate": 8.431041754533953e-06,
"loss": 0.6131,
"step": 14300
},
{
"epoch": 1.12,
"grad_norm": 1.211099624633789,
"learning_rate": 8.4141712357655e-06,
"loss": 0.6092,
"step": 14400
},
{
"epoch": 1.13,
"grad_norm": 1.245797872543335,
"learning_rate": 8.397300716997049e-06,
"loss": 0.6114,
"step": 14500
},
{
"epoch": 1.14,
"grad_norm": 1.1073272228240967,
"learning_rate": 8.380430198228595e-06,
"loss": 0.6072,
"step": 14600
},
{
"epoch": 1.14,
"grad_norm": 1.2218642234802246,
"learning_rate": 8.363559679460144e-06,
"loss": 0.6036,
"step": 14700
},
{
"epoch": 1.15,
"grad_norm": 1.3461518287658691,
"learning_rate": 8.346689160691692e-06,
"loss": 0.6037,
"step": 14800
},
{
"epoch": 1.16,
"grad_norm": 1.400412917137146,
"learning_rate": 8.32981864192324e-06,
"loss": 0.6032,
"step": 14900
},
{
"epoch": 1.17,
"grad_norm": 1.209768533706665,
"learning_rate": 8.312948123154788e-06,
"loss": 0.6133,
"step": 15000
},
{
"epoch": 1.17,
"grad_norm": 1.2482277154922485,
"learning_rate": 8.296077604386335e-06,
"loss": 0.6027,
"step": 15100
},
{
"epoch": 1.18,
"grad_norm": 1.2215890884399414,
"learning_rate": 8.279207085617883e-06,
"loss": 0.6087,
"step": 15200
},
{
"epoch": 1.19,
"grad_norm": 1.1370948553085327,
"learning_rate": 8.262336566849431e-06,
"loss": 0.6073,
"step": 15300
},
{
"epoch": 1.2,
"grad_norm": 1.1755813360214233,
"learning_rate": 8.24546604808098e-06,
"loss": 0.6039,
"step": 15400
},
{
"epoch": 1.21,
"grad_norm": 1.0347890853881836,
"learning_rate": 8.228595529312528e-06,
"loss": 0.5935,
"step": 15500
},
{
"epoch": 1.21,
"grad_norm": 1.3201394081115723,
"learning_rate": 8.211725010544074e-06,
"loss": 0.6063,
"step": 15600
},
{
"epoch": 1.22,
"grad_norm": 1.3351361751556396,
"learning_rate": 8.194854491775622e-06,
"loss": 0.6092,
"step": 15700
},
{
"epoch": 1.23,
"grad_norm": 1.4285881519317627,
"learning_rate": 8.17798397300717e-06,
"loss": 0.6022,
"step": 15800
},
{
"epoch": 1.24,
"grad_norm": 1.106252908706665,
"learning_rate": 8.161113454238719e-06,
"loss": 0.6197,
"step": 15900
},
{
"epoch": 1.24,
"grad_norm": 1.1707721948623657,
"learning_rate": 8.144242935470267e-06,
"loss": 0.6063,
"step": 16000
},
{
"epoch": 1.25,
"grad_norm": 1.1436829566955566,
"learning_rate": 8.127372416701814e-06,
"loss": 0.6026,
"step": 16100
},
{
"epoch": 1.26,
"grad_norm": 0.986708402633667,
"learning_rate": 8.110501897933362e-06,
"loss": 0.5977,
"step": 16200
},
{
"epoch": 1.27,
"grad_norm": 1.2486974000930786,
"learning_rate": 8.09363137916491e-06,
"loss": 0.6115,
"step": 16300
},
{
"epoch": 1.28,
"grad_norm": 1.1223987340927124,
"learning_rate": 8.076760860396458e-06,
"loss": 0.5952,
"step": 16400
},
{
"epoch": 1.28,
"grad_norm": 1.2212467193603516,
"learning_rate": 8.059890341628006e-06,
"loss": 0.6029,
"step": 16500
},
{
"epoch": 1.29,
"grad_norm": 1.3288586139678955,
"learning_rate": 8.043019822859553e-06,
"loss": 0.6076,
"step": 16600
},
{
"epoch": 1.3,
"grad_norm": 1.1246837377548218,
"learning_rate": 8.026149304091101e-06,
"loss": 0.5966,
"step": 16700
},
{
"epoch": 1.31,
"grad_norm": 1.0421278476715088,
"learning_rate": 8.00927878532265e-06,
"loss": 0.5926,
"step": 16800
},
{
"epoch": 1.31,
"grad_norm": 1.2226579189300537,
"learning_rate": 7.992408266554198e-06,
"loss": 0.5908,
"step": 16900
},
{
"epoch": 1.32,
"grad_norm": 1.2911863327026367,
"learning_rate": 7.975537747785746e-06,
"loss": 0.5961,
"step": 17000
},
{
"epoch": 1.33,
"grad_norm": 1.2572822570800781,
"learning_rate": 7.958667229017292e-06,
"loss": 0.5996,
"step": 17100
},
{
"epoch": 1.34,
"grad_norm": 0.9904419779777527,
"learning_rate": 7.94179671024884e-06,
"loss": 0.5768,
"step": 17200
},
{
"epoch": 1.35,
"grad_norm": 1.0573612451553345,
"learning_rate": 7.924926191480389e-06,
"loss": 0.5923,
"step": 17300
},
{
"epoch": 1.35,
"grad_norm": 1.045145869255066,
"learning_rate": 7.908055672711937e-06,
"loss": 0.5846,
"step": 17400
},
{
"epoch": 1.36,
"grad_norm": 1.1190580129623413,
"learning_rate": 7.891185153943485e-06,
"loss": 0.5939,
"step": 17500
},
{
"epoch": 1.37,
"grad_norm": 1.1604979038238525,
"learning_rate": 7.874314635175032e-06,
"loss": 0.5852,
"step": 17600
},
{
"epoch": 1.38,
"grad_norm": 1.1649556159973145,
"learning_rate": 7.85744411640658e-06,
"loss": 0.5931,
"step": 17700
},
{
"epoch": 1.38,
"grad_norm": 1.163870096206665,
"learning_rate": 7.840573597638128e-06,
"loss": 0.5932,
"step": 17800
},
{
"epoch": 1.39,
"grad_norm": 1.2127817869186401,
"learning_rate": 7.823703078869675e-06,
"loss": 0.586,
"step": 17900
},
{
"epoch": 1.4,
"grad_norm": 1.225334644317627,
"learning_rate": 7.806832560101225e-06,
"loss": 0.5983,
"step": 18000
},
{
"epoch": 1.41,
"grad_norm": 1.2414904832839966,
"learning_rate": 7.789962041332771e-06,
"loss": 0.596,
"step": 18100
},
{
"epoch": 1.42,
"grad_norm": 1.1808518171310425,
"learning_rate": 7.77309152256432e-06,
"loss": 0.5792,
"step": 18200
},
{
"epoch": 1.42,
"grad_norm": 1.1521859169006348,
"learning_rate": 7.756221003795868e-06,
"loss": 0.5985,
"step": 18300
},
{
"epoch": 1.43,
"grad_norm": 1.1075421571731567,
"learning_rate": 7.739350485027414e-06,
"loss": 0.5938,
"step": 18400
},
{
"epoch": 1.44,
"grad_norm": 1.0834968090057373,
"learning_rate": 7.722479966258964e-06,
"loss": 0.5762,
"step": 18500
},
{
"epoch": 1.45,
"grad_norm": 1.2507002353668213,
"learning_rate": 7.70560944749051e-06,
"loss": 0.5868,
"step": 18600
},
{
"epoch": 1.45,
"grad_norm": 1.0832693576812744,
"learning_rate": 7.688738928722059e-06,
"loss": 0.5809,
"step": 18700
},
{
"epoch": 1.46,
"grad_norm": 1.0033252239227295,
"learning_rate": 7.671868409953607e-06,
"loss": 0.5772,
"step": 18800
},
{
"epoch": 1.47,
"grad_norm": 1.1204442977905273,
"learning_rate": 7.654997891185153e-06,
"loss": 0.5866,
"step": 18900
},
{
"epoch": 1.48,
"grad_norm": 1.1747297048568726,
"learning_rate": 7.638127372416703e-06,
"loss": 0.5757,
"step": 19000
},
{
"epoch": 1.49,
"grad_norm": 1.2900995016098022,
"learning_rate": 7.62125685364825e-06,
"loss": 0.5837,
"step": 19100
},
{
"epoch": 1.49,
"grad_norm": 1.1278128623962402,
"learning_rate": 7.604386334879799e-06,
"loss": 0.5743,
"step": 19200
},
{
"epoch": 1.5,
"grad_norm": 1.3102425336837769,
"learning_rate": 7.587515816111346e-06,
"loss": 0.5862,
"step": 19300
},
{
"epoch": 1.51,
"grad_norm": 1.0361497402191162,
"learning_rate": 7.570645297342894e-06,
"loss": 0.5675,
"step": 19400
},
{
"epoch": 1.52,
"grad_norm": 1.135097861289978,
"learning_rate": 7.553774778574442e-06,
"loss": 0.5808,
"step": 19500
},
{
"epoch": 1.52,
"grad_norm": 1.302728295326233,
"learning_rate": 7.536904259805989e-06,
"loss": 0.5829,
"step": 19600
},
{
"epoch": 1.53,
"grad_norm": 1.2034662961959839,
"learning_rate": 7.520033741037538e-06,
"loss": 0.5732,
"step": 19700
},
{
"epoch": 1.54,
"grad_norm": 1.2533587217330933,
"learning_rate": 7.503163222269086e-06,
"loss": 0.5739,
"step": 19800
},
{
"epoch": 1.55,
"grad_norm": 1.3044142723083496,
"learning_rate": 7.486292703500633e-06,
"loss": 0.5793,
"step": 19900
},
{
"epoch": 1.56,
"grad_norm": 1.0934174060821533,
"learning_rate": 7.469422184732181e-06,
"loss": 0.5713,
"step": 20000
},
{
"epoch": 1.56,
"grad_norm": 1.4974043369293213,
"learning_rate": 7.452551665963729e-06,
"loss": 0.5781,
"step": 20100
},
{
"epoch": 1.57,
"grad_norm": 0.9929585456848145,
"learning_rate": 7.435681147195278e-06,
"loss": 0.5762,
"step": 20200
},
{
"epoch": 1.58,
"grad_norm": 1.0563292503356934,
"learning_rate": 7.418810628426825e-06,
"loss": 0.5685,
"step": 20300
},
{
"epoch": 1.59,
"grad_norm": 1.000756859779358,
"learning_rate": 7.401940109658372e-06,
"loss": 0.5797,
"step": 20400
},
{
"epoch": 1.59,
"grad_norm": 1.2806288003921509,
"learning_rate": 7.385069590889921e-06,
"loss": 0.5755,
"step": 20500
},
{
"epoch": 1.6,
"grad_norm": 1.249469518661499,
"learning_rate": 7.368199072121468e-06,
"loss": 0.5877,
"step": 20600
},
{
"epoch": 1.61,
"grad_norm": 1.0450996160507202,
"learning_rate": 7.351328553353016e-06,
"loss": 0.5717,
"step": 20700
},
{
"epoch": 1.62,
"grad_norm": 0.9555745720863342,
"learning_rate": 7.3344580345845635e-06,
"loss": 0.5834,
"step": 20800
},
{
"epoch": 1.63,
"grad_norm": 1.139003038406372,
"learning_rate": 7.317587515816113e-06,
"loss": 0.5745,
"step": 20900
},
{
"epoch": 1.63,
"grad_norm": 1.217157006263733,
"learning_rate": 7.30071699704766e-06,
"loss": 0.5644,
"step": 21000
},
{
"epoch": 1.64,
"grad_norm": 1.2491965293884277,
"learning_rate": 7.283846478279207e-06,
"loss": 0.5695,
"step": 21100
},
{
"epoch": 1.65,
"grad_norm": 1.1394678354263306,
"learning_rate": 7.2669759595107556e-06,
"loss": 0.5671,
"step": 21200
},
{
"epoch": 1.66,
"grad_norm": 1.1305979490280151,
"learning_rate": 7.250105440742303e-06,
"loss": 0.5637,
"step": 21300
},
{
"epoch": 1.66,
"grad_norm": 1.0464513301849365,
"learning_rate": 7.233234921973852e-06,
"loss": 0.5725,
"step": 21400
},
{
"epoch": 1.67,
"grad_norm": 1.0836619138717651,
"learning_rate": 7.216364403205399e-06,
"loss": 0.5606,
"step": 21500
},
{
"epoch": 1.68,
"grad_norm": 1.1601755619049072,
"learning_rate": 7.199493884436947e-06,
"loss": 0.568,
"step": 21600
},
{
"epoch": 1.69,
"grad_norm": 1.1347750425338745,
"learning_rate": 7.182623365668495e-06,
"loss": 0.5474,
"step": 21700
},
{
"epoch": 1.7,
"grad_norm": 1.3478429317474365,
"learning_rate": 7.165752846900042e-06,
"loss": 0.5604,
"step": 21800
},
{
"epoch": 1.7,
"grad_norm": 1.279989242553711,
"learning_rate": 7.148882328131591e-06,
"loss": 0.5588,
"step": 21900
},
{
"epoch": 1.71,
"grad_norm": 1.1383345127105713,
"learning_rate": 7.132011809363139e-06,
"loss": 0.5697,
"step": 22000
},
{
"epoch": 1.72,
"grad_norm": 1.3981715440750122,
"learning_rate": 7.115141290594686e-06,
"loss": 0.574,
"step": 22100
},
{
"epoch": 1.73,
"grad_norm": 1.0683062076568604,
"learning_rate": 7.098270771826234e-06,
"loss": 0.5602,
"step": 22200
},
{
"epoch": 1.73,
"grad_norm": 1.1539549827575684,
"learning_rate": 7.081400253057782e-06,
"loss": 0.551,
"step": 22300
},
{
"epoch": 1.74,
"grad_norm": 1.2667831182479858,
"learning_rate": 7.064529734289331e-06,
"loss": 0.5615,
"step": 22400
},
{
"epoch": 1.75,
"grad_norm": 1.0187904834747314,
"learning_rate": 7.047659215520878e-06,
"loss": 0.5614,
"step": 22500
},
{
"epoch": 1.76,
"grad_norm": 1.0305335521697998,
"learning_rate": 7.0307886967524255e-06,
"loss": 0.5591,
"step": 22600
},
{
"epoch": 1.77,
"grad_norm": 1.2172187566757202,
"learning_rate": 7.013918177983974e-06,
"loss": 0.5628,
"step": 22700
},
{
"epoch": 1.77,
"grad_norm": 1.045708179473877,
"learning_rate": 6.997047659215521e-06,
"loss": 0.5583,
"step": 22800
},
{
"epoch": 1.78,
"grad_norm": 1.0602800846099854,
"learning_rate": 6.98017714044707e-06,
"loss": 0.5586,
"step": 22900
},
{
"epoch": 1.79,
"grad_norm": 1.1277014017105103,
"learning_rate": 6.9633066216786175e-06,
"loss": 0.5588,
"step": 23000
},
{
"epoch": 1.8,
"grad_norm": 1.283036231994629,
"learning_rate": 6.946436102910165e-06,
"loss": 0.5622,
"step": 23100
},
{
"epoch": 1.8,
"grad_norm": 1.3365052938461304,
"learning_rate": 6.929565584141713e-06,
"loss": 0.5596,
"step": 23200
},
{
"epoch": 1.81,
"grad_norm": 1.0396366119384766,
"learning_rate": 6.91269506537326e-06,
"loss": 0.5603,
"step": 23300
},
{
"epoch": 1.82,
"grad_norm": 1.184523344039917,
"learning_rate": 6.895824546604809e-06,
"loss": 0.5552,
"step": 23400
},
{
"epoch": 1.83,
"grad_norm": 1.3032324314117432,
"learning_rate": 6.878954027836357e-06,
"loss": 0.5586,
"step": 23500
},
{
"epoch": 1.84,
"grad_norm": 1.157319188117981,
"learning_rate": 6.862083509067904e-06,
"loss": 0.564,
"step": 23600
},
{
"epoch": 1.84,
"grad_norm": 1.1763290166854858,
"learning_rate": 6.8452129902994524e-06,
"loss": 0.5528,
"step": 23700
},
{
"epoch": 1.85,
"grad_norm": 1.0526578426361084,
"learning_rate": 6.828342471531e-06,
"loss": 0.5578,
"step": 23800
},
{
"epoch": 1.86,
"grad_norm": 1.0352507829666138,
"learning_rate": 6.811471952762548e-06,
"loss": 0.5531,
"step": 23900
},
{
"epoch": 1.87,
"grad_norm": 1.1414486169815063,
"learning_rate": 6.794601433994095e-06,
"loss": 0.5638,
"step": 24000
},
{
"epoch": 1.87,
"grad_norm": 1.1250499486923218,
"learning_rate": 6.777730915225644e-06,
"loss": 0.5553,
"step": 24100
},
{
"epoch": 1.88,
"grad_norm": 1.178361415863037,
"learning_rate": 6.760860396457192e-06,
"loss": 0.5667,
"step": 24200
},
{
"epoch": 1.89,
"grad_norm": 1.283947467803955,
"learning_rate": 6.743989877688739e-06,
"loss": 0.5554,
"step": 24300
},
{
"epoch": 1.9,
"grad_norm": 1.2320690155029297,
"learning_rate": 6.727119358920287e-06,
"loss": 0.5508,
"step": 24400
},
{
"epoch": 1.91,
"grad_norm": 1.1178127527236938,
"learning_rate": 6.710248840151835e-06,
"loss": 0.5614,
"step": 24500
},
{
"epoch": 1.91,
"grad_norm": 1.1042382717132568,
"learning_rate": 6.693378321383384e-06,
"loss": 0.5507,
"step": 24600
},
{
"epoch": 1.92,
"grad_norm": 1.0952810049057007,
"learning_rate": 6.676507802614931e-06,
"loss": 0.5437,
"step": 24700
},
{
"epoch": 1.93,
"grad_norm": 1.3677794933319092,
"learning_rate": 6.6596372838464785e-06,
"loss": 0.5684,
"step": 24800
},
{
"epoch": 1.94,
"grad_norm": 1.3078778982162476,
"learning_rate": 6.642766765078027e-06,
"loss": 0.5619,
"step": 24900
},
{
"epoch": 1.94,
"grad_norm": 1.1470273733139038,
"learning_rate": 6.625896246309574e-06,
"loss": 0.5356,
"step": 25000
},
{
"epoch": 1.95,
"grad_norm": 1.2592968940734863,
"learning_rate": 6.609025727541123e-06,
"loss": 0.5516,
"step": 25100
},
{
"epoch": 1.96,
"grad_norm": 1.0444315671920776,
"learning_rate": 6.5921552087726706e-06,
"loss": 0.5463,
"step": 25200
},
{
"epoch": 1.97,
"grad_norm": 1.1364483833312988,
"learning_rate": 6.575284690004218e-06,
"loss": 0.5443,
"step": 25300
},
{
"epoch": 1.98,
"grad_norm": 1.1489980220794678,
"learning_rate": 6.558414171235766e-06,
"loss": 0.544,
"step": 25400
},
{
"epoch": 1.98,
"grad_norm": 0.92986661195755,
"learning_rate": 6.5415436524673135e-06,
"loss": 0.5501,
"step": 25500
},
{
"epoch": 1.99,
"grad_norm": 1.272945761680603,
"learning_rate": 6.5246731336988626e-06,
"loss": 0.5443,
"step": 25600
},
{
"epoch": 2.0,
"grad_norm": 1.163329005241394,
"learning_rate": 6.50780261493041e-06,
"loss": 0.5564,
"step": 25700
},
{
"epoch": 2.0,
"eval_loss": 0.5525972843170166,
"eval_runtime": 233.1479,
"eval_samples_per_second": 27.656,
"eval_steps_per_second": 27.656,
"step": 25711
},
{
"epoch": 2.01,
"grad_norm": 0.9832913875579834,
"learning_rate": 6.490932096161957e-06,
"loss": 0.495,
"step": 25800
},
{
"epoch": 2.01,
"grad_norm": 1.2738430500030518,
"learning_rate": 6.4740615773935055e-06,
"loss": 0.4882,
"step": 25900
},
{
"epoch": 2.02,
"grad_norm": 1.1017522811889648,
"learning_rate": 6.457191058625053e-06,
"loss": 0.4908,
"step": 26000
},
{
"epoch": 2.03,
"grad_norm": 1.2149347066879272,
"learning_rate": 6.440320539856602e-06,
"loss": 0.4937,
"step": 26100
},
{
"epoch": 2.04,
"grad_norm": 1.1412744522094727,
"learning_rate": 6.423450021088149e-06,
"loss": 0.4829,
"step": 26200
},
{
"epoch": 2.05,
"grad_norm": 1.1836376190185547,
"learning_rate": 6.406579502319697e-06,
"loss": 0.4886,
"step": 26300
},
{
"epoch": 2.05,
"grad_norm": 1.1491622924804688,
"learning_rate": 6.389708983551245e-06,
"loss": 0.4807,
"step": 26400
},
{
"epoch": 2.06,
"grad_norm": 1.2772555351257324,
"learning_rate": 6.372838464782792e-06,
"loss": 0.4943,
"step": 26500
},
{
"epoch": 2.07,
"grad_norm": 1.2873830795288086,
"learning_rate": 6.3559679460143405e-06,
"loss": 0.4876,
"step": 26600
},
{
"epoch": 2.08,
"grad_norm": 1.2151273488998413,
"learning_rate": 6.339097427245889e-06,
"loss": 0.4835,
"step": 26700
},
{
"epoch": 2.08,
"grad_norm": 1.0475809574127197,
"learning_rate": 6.322226908477436e-06,
"loss": 0.4903,
"step": 26800
},
{
"epoch": 2.09,
"grad_norm": 1.1041380167007446,
"learning_rate": 6.305356389708984e-06,
"loss": 0.4875,
"step": 26900
},
{
"epoch": 2.1,
"grad_norm": 1.3321166038513184,
"learning_rate": 6.288485870940532e-06,
"loss": 0.4854,
"step": 27000
},
{
"epoch": 2.11,
"grad_norm": 1.1235756874084473,
"learning_rate": 6.27161535217208e-06,
"loss": 0.4888,
"step": 27100
},
{
"epoch": 2.12,
"grad_norm": 1.0493980646133423,
"learning_rate": 6.254744833403627e-06,
"loss": 0.4921,
"step": 27200
},
{
"epoch": 2.12,
"grad_norm": 1.0204726457595825,
"learning_rate": 6.237874314635175e-06,
"loss": 0.4791,
"step": 27300
},
{
"epoch": 2.13,
"grad_norm": 1.2959028482437134,
"learning_rate": 6.221003795866724e-06,
"loss": 0.484,
"step": 27400
},
{
"epoch": 2.14,
"grad_norm": 1.1229135990142822,
"learning_rate": 6.204133277098271e-06,
"loss": 0.5006,
"step": 27500
},
{
"epoch": 2.15,
"grad_norm": 1.0745277404785156,
"learning_rate": 6.187262758329819e-06,
"loss": 0.4859,
"step": 27600
},
{
"epoch": 2.15,
"grad_norm": 1.222716212272644,
"learning_rate": 6.170392239561367e-06,
"loss": 0.4891,
"step": 27700
},
{
"epoch": 2.16,
"grad_norm": 1.3341572284698486,
"learning_rate": 6.153521720792916e-06,
"loss": 0.4915,
"step": 27800
},
{
"epoch": 2.17,
"grad_norm": 1.187410831451416,
"learning_rate": 6.136651202024463e-06,
"loss": 0.4834,
"step": 27900
},
{
"epoch": 2.18,
"grad_norm": 1.1536892652511597,
"learning_rate": 6.11978068325601e-06,
"loss": 0.486,
"step": 28000
},
{
"epoch": 2.19,
"grad_norm": 1.3513904809951782,
"learning_rate": 6.102910164487559e-06,
"loss": 0.4947,
"step": 28100
},
{
"epoch": 2.19,
"grad_norm": 1.2405853271484375,
"learning_rate": 6.086039645719106e-06,
"loss": 0.4928,
"step": 28200
},
{
"epoch": 2.2,
"grad_norm": 1.4121474027633667,
"learning_rate": 6.069169126950655e-06,
"loss": 0.4767,
"step": 28300
},
{
"epoch": 2.21,
"grad_norm": 1.161752462387085,
"learning_rate": 6.052298608182202e-06,
"loss": 0.4922,
"step": 28400
},
{
"epoch": 2.22,
"grad_norm": 1.2579442262649536,
"learning_rate": 6.03542808941375e-06,
"loss": 0.4773,
"step": 28500
},
{
"epoch": 2.22,
"grad_norm": 1.140579104423523,
"learning_rate": 6.018557570645298e-06,
"loss": 0.4798,
"step": 28600
},
{
"epoch": 2.23,
"grad_norm": 1.1399654150009155,
"learning_rate": 6.001687051876845e-06,
"loss": 0.4944,
"step": 28700
},
{
"epoch": 2.24,
"grad_norm": 1.1394907236099243,
"learning_rate": 5.984816533108394e-06,
"loss": 0.4868,
"step": 28800
},
{
"epoch": 2.25,
"grad_norm": 1.4349123239517212,
"learning_rate": 5.967946014339942e-06,
"loss": 0.4897,
"step": 28900
},
{
"epoch": 2.26,
"grad_norm": 1.0745654106140137,
"learning_rate": 5.951075495571489e-06,
"loss": 0.4857,
"step": 29000
},
{
"epoch": 2.26,
"grad_norm": 1.0100363492965698,
"learning_rate": 5.934204976803037e-06,
"loss": 0.4881,
"step": 29100
},
{
"epoch": 2.27,
"grad_norm": 1.0638195276260376,
"learning_rate": 5.917334458034585e-06,
"loss": 0.4868,
"step": 29200
},
{
"epoch": 2.28,
"grad_norm": 1.222130537033081,
"learning_rate": 5.900463939266134e-06,
"loss": 0.4772,
"step": 29300
},
{
"epoch": 2.29,
"grad_norm": 1.444646954536438,
"learning_rate": 5.883593420497681e-06,
"loss": 0.4865,
"step": 29400
},
{
"epoch": 2.29,
"grad_norm": 1.2834380865097046,
"learning_rate": 5.8667229017292285e-06,
"loss": 0.4883,
"step": 29500
},
{
"epoch": 2.3,
"grad_norm": 1.1390889883041382,
"learning_rate": 5.849852382960777e-06,
"loss": 0.4838,
"step": 29600
},
{
"epoch": 2.31,
"grad_norm": 1.1462106704711914,
"learning_rate": 5.832981864192324e-06,
"loss": 0.4879,
"step": 29700
},
{
"epoch": 2.32,
"grad_norm": 1.1867048740386963,
"learning_rate": 5.816111345423872e-06,
"loss": 0.485,
"step": 29800
},
{
"epoch": 2.33,
"grad_norm": 1.1438149213790894,
"learning_rate": 5.7992408266554205e-06,
"loss": 0.4926,
"step": 29900
},
{
"epoch": 2.33,
"grad_norm": 1.0708352327346802,
"learning_rate": 5.782370307886968e-06,
"loss": 0.476,
"step": 30000
},
{
"epoch": 2.34,
"grad_norm": 1.059994101524353,
"learning_rate": 5.765499789118516e-06,
"loss": 0.4819,
"step": 30100
},
{
"epoch": 2.35,
"grad_norm": 1.1846622228622437,
"learning_rate": 5.7486292703500635e-06,
"loss": 0.4796,
"step": 30200
},
{
"epoch": 2.36,
"grad_norm": 1.2288451194763184,
"learning_rate": 5.731758751581612e-06,
"loss": 0.4826,
"step": 30300
},
{
"epoch": 2.36,
"grad_norm": 1.278354525566101,
"learning_rate": 5.714888232813159e-06,
"loss": 0.4767,
"step": 30400
},
{
"epoch": 2.37,
"grad_norm": 1.0636687278747559,
"learning_rate": 5.698017714044707e-06,
"loss": 0.4746,
"step": 30500
},
{
"epoch": 2.38,
"grad_norm": 1.18000328540802,
"learning_rate": 5.6811471952762555e-06,
"loss": 0.4774,
"step": 30600
},
{
"epoch": 2.39,
"grad_norm": 1.2274439334869385,
"learning_rate": 5.664276676507803e-06,
"loss": 0.4838,
"step": 30700
},
{
"epoch": 2.4,
"grad_norm": 1.0425608158111572,
"learning_rate": 5.647406157739351e-06,
"loss": 0.4758,
"step": 30800
},
{
"epoch": 2.4,
"grad_norm": 1.2052890062332153,
"learning_rate": 5.630535638970898e-06,
"loss": 0.4833,
"step": 30900
},
{
"epoch": 2.41,
"grad_norm": 1.1569069623947144,
"learning_rate": 5.613665120202446e-06,
"loss": 0.4865,
"step": 31000
},
{
"epoch": 2.42,
"grad_norm": 1.067058801651001,
"learning_rate": 5.596794601433995e-06,
"loss": 0.4748,
"step": 31100
},
{
"epoch": 2.43,
"grad_norm": 1.1886574029922485,
"learning_rate": 5.579924082665542e-06,
"loss": 0.4787,
"step": 31200
},
{
"epoch": 2.43,
"grad_norm": 1.2338231801986694,
"learning_rate": 5.56305356389709e-06,
"loss": 0.4817,
"step": 31300
},
{
"epoch": 2.44,
"grad_norm": 1.4264863729476929,
"learning_rate": 5.546183045128638e-06,
"loss": 0.4735,
"step": 31400
},
{
"epoch": 2.45,
"grad_norm": 1.0503981113433838,
"learning_rate": 5.529312526360187e-06,
"loss": 0.4852,
"step": 31500
},
{
"epoch": 2.46,
"grad_norm": 1.3074897527694702,
"learning_rate": 5.512442007591734e-06,
"loss": 0.4863,
"step": 31600
},
{
"epoch": 2.47,
"grad_norm": 1.3505380153656006,
"learning_rate": 5.495571488823282e-06,
"loss": 0.4894,
"step": 31700
},
{
"epoch": 2.47,
"grad_norm": 1.0360243320465088,
"learning_rate": 5.47870097005483e-06,
"loss": 0.4878,
"step": 31800
},
{
"epoch": 2.48,
"grad_norm": 1.166276216506958,
"learning_rate": 5.461830451286377e-06,
"loss": 0.4804,
"step": 31900
},
{
"epoch": 2.49,
"grad_norm": 1.0366944074630737,
"learning_rate": 5.444959932517926e-06,
"loss": 0.4717,
"step": 32000
},
{
"epoch": 2.5,
"grad_norm": 1.046561360359192,
"learning_rate": 5.428089413749474e-06,
"loss": 0.4863,
"step": 32100
},
{
"epoch": 2.5,
"grad_norm": 1.0516040325164795,
"learning_rate": 5.411218894981021e-06,
"loss": 0.4785,
"step": 32200
},
{
"epoch": 2.51,
"grad_norm": 1.2917454242706299,
"learning_rate": 5.394348376212569e-06,
"loss": 0.4892,
"step": 32300
},
{
"epoch": 2.52,
"grad_norm": 1.0712103843688965,
"learning_rate": 5.3774778574441165e-06,
"loss": 0.4893,
"step": 32400
},
{
"epoch": 2.53,
"grad_norm": 1.0713152885437012,
"learning_rate": 5.360607338675666e-06,
"loss": 0.4828,
"step": 32500
},
{
"epoch": 2.54,
"grad_norm": 1.0355168581008911,
"learning_rate": 5.343736819907213e-06,
"loss": 0.4813,
"step": 32600
},
{
"epoch": 2.54,
"grad_norm": 1.2146625518798828,
"learning_rate": 5.32686630113876e-06,
"loss": 0.4879,
"step": 32700
},
{
"epoch": 2.55,
"grad_norm": 1.3223153352737427,
"learning_rate": 5.3099957823703085e-06,
"loss": 0.4832,
"step": 32800
},
{
"epoch": 2.56,
"grad_norm": 1.0482958555221558,
"learning_rate": 5.293125263601856e-06,
"loss": 0.477,
"step": 32900
},
{
"epoch": 2.57,
"grad_norm": 1.1204655170440674,
"learning_rate": 5.276254744833404e-06,
"loss": 0.4861,
"step": 33000
},
{
"epoch": 2.57,
"grad_norm": 1.2434940338134766,
"learning_rate": 5.2593842260649515e-06,
"loss": 0.474,
"step": 33100
},
{
"epoch": 2.58,
"grad_norm": 1.0437453985214233,
"learning_rate": 5.2425137072965e-06,
"loss": 0.4664,
"step": 33200
},
{
"epoch": 2.59,
"grad_norm": 1.5683417320251465,
"learning_rate": 5.225643188528048e-06,
"loss": 0.4691,
"step": 33300
},
{
"epoch": 2.6,
"grad_norm": 1.290700912475586,
"learning_rate": 5.208772669759595e-06,
"loss": 0.4841,
"step": 33400
},
{
"epoch": 2.61,
"grad_norm": 1.2382111549377441,
"learning_rate": 5.1919021509911435e-06,
"loss": 0.4804,
"step": 33500
},
{
"epoch": 2.61,
"grad_norm": 1.090914011001587,
"learning_rate": 5.175031632222691e-06,
"loss": 0.4776,
"step": 33600
},
{
"epoch": 2.62,
"grad_norm": 1.215134620666504,
"learning_rate": 5.158161113454238e-06,
"loss": 0.4751,
"step": 33700
},
{
"epoch": 2.63,
"grad_norm": 1.1768503189086914,
"learning_rate": 5.141290594685787e-06,
"loss": 0.4809,
"step": 33800
},
{
"epoch": 2.64,
"grad_norm": 1.1772345304489136,
"learning_rate": 5.124420075917335e-06,
"loss": 0.4826,
"step": 33900
},
{
"epoch": 2.64,
"grad_norm": 0.980823278427124,
"learning_rate": 5.107549557148883e-06,
"loss": 0.4792,
"step": 34000
},
{
"epoch": 2.65,
"grad_norm": 1.1386022567749023,
"learning_rate": 5.09067903838043e-06,
"loss": 0.477,
"step": 34100
},
{
"epoch": 2.66,
"grad_norm": 1.2393611669540405,
"learning_rate": 5.073808519611978e-06,
"loss": 0.4845,
"step": 34200
},
{
"epoch": 2.67,
"grad_norm": 1.2329386472702026,
"learning_rate": 5.056938000843527e-06,
"loss": 0.4876,
"step": 34300
},
{
"epoch": 2.68,
"grad_norm": 1.2180235385894775,
"learning_rate": 5.040067482075074e-06,
"loss": 0.4768,
"step": 34400
},
{
"epoch": 2.68,
"grad_norm": 1.026029109954834,
"learning_rate": 5.023196963306622e-06,
"loss": 0.4841,
"step": 34500
},
{
"epoch": 2.69,
"grad_norm": 1.1008986234664917,
"learning_rate": 5.00632644453817e-06,
"loss": 0.4706,
"step": 34600
},
{
"epoch": 2.7,
"grad_norm": 1.0781484842300415,
"learning_rate": 4.989455925769718e-06,
"loss": 0.4735,
"step": 34700
},
{
"epoch": 2.71,
"grad_norm": 1.0506776571273804,
"learning_rate": 4.972585407001266e-06,
"loss": 0.4773,
"step": 34800
},
{
"epoch": 2.71,
"grad_norm": 1.1269252300262451,
"learning_rate": 4.955714888232813e-06,
"loss": 0.4688,
"step": 34900
},
{
"epoch": 2.72,
"grad_norm": 1.0111608505249023,
"learning_rate": 4.938844369464361e-06,
"loss": 0.4782,
"step": 35000
},
{
"epoch": 2.73,
"grad_norm": 1.0887819528579712,
"learning_rate": 4.921973850695909e-06,
"loss": 0.4741,
"step": 35100
},
{
"epoch": 2.74,
"grad_norm": 1.1558033227920532,
"learning_rate": 4.905103331927457e-06,
"loss": 0.4818,
"step": 35200
},
{
"epoch": 2.75,
"grad_norm": 1.4611961841583252,
"learning_rate": 4.888232813159005e-06,
"loss": 0.4729,
"step": 35300
},
{
"epoch": 2.75,
"grad_norm": 1.0571033954620361,
"learning_rate": 4.871362294390553e-06,
"loss": 0.4747,
"step": 35400
},
{
"epoch": 2.76,
"grad_norm": 0.9912450909614563,
"learning_rate": 4.8544917756221e-06,
"loss": 0.4773,
"step": 35500
},
{
"epoch": 2.77,
"grad_norm": 1.04233717918396,
"learning_rate": 4.837621256853648e-06,
"loss": 0.4713,
"step": 35600
},
{
"epoch": 2.78,
"grad_norm": 1.1826248168945312,
"learning_rate": 4.820750738085197e-06,
"loss": 0.479,
"step": 35700
},
{
"epoch": 2.78,
"grad_norm": 1.2568957805633545,
"learning_rate": 4.803880219316745e-06,
"loss": 0.4657,
"step": 35800
},
{
"epoch": 2.79,
"grad_norm": 1.2568626403808594,
"learning_rate": 4.787009700548292e-06,
"loss": 0.4855,
"step": 35900
},
{
"epoch": 2.8,
"grad_norm": 1.1952085494995117,
"learning_rate": 4.77013918177984e-06,
"loss": 0.4741,
"step": 36000
},
{
"epoch": 2.81,
"grad_norm": 1.2562224864959717,
"learning_rate": 4.753268663011388e-06,
"loss": 0.4738,
"step": 36100
},
{
"epoch": 2.82,
"grad_norm": 1.166925311088562,
"learning_rate": 4.736398144242936e-06,
"loss": 0.4841,
"step": 36200
},
{
"epoch": 2.82,
"grad_norm": 1.2542935609817505,
"learning_rate": 4.719527625474483e-06,
"loss": 0.4782,
"step": 36300
},
{
"epoch": 2.83,
"grad_norm": 1.1650253534317017,
"learning_rate": 4.7026571067060315e-06,
"loss": 0.4747,
"step": 36400
},
{
"epoch": 2.84,
"grad_norm": 1.2341094017028809,
"learning_rate": 4.68578658793758e-06,
"loss": 0.4852,
"step": 36500
},
{
"epoch": 2.85,
"grad_norm": 1.3932220935821533,
"learning_rate": 4.668916069169127e-06,
"loss": 0.4725,
"step": 36600
},
{
"epoch": 2.85,
"grad_norm": 1.3615128993988037,
"learning_rate": 4.652045550400675e-06,
"loss": 0.4794,
"step": 36700
},
{
"epoch": 2.86,
"grad_norm": 1.3352348804473877,
"learning_rate": 4.635175031632223e-06,
"loss": 0.4755,
"step": 36800
},
{
"epoch": 2.87,
"grad_norm": 1.2848610877990723,
"learning_rate": 4.618304512863771e-06,
"loss": 0.4873,
"step": 36900
},
{
"epoch": 2.88,
"grad_norm": 1.153571605682373,
"learning_rate": 4.601433994095319e-06,
"loss": 0.4767,
"step": 37000
},
{
"epoch": 2.89,
"grad_norm": 1.2806872129440308,
"learning_rate": 4.5845634753268665e-06,
"loss": 0.4732,
"step": 37100
},
{
"epoch": 2.89,
"grad_norm": 1.292242169380188,
"learning_rate": 4.567692956558415e-06,
"loss": 0.4803,
"step": 37200
},
{
"epoch": 2.9,
"grad_norm": 1.2197422981262207,
"learning_rate": 4.550822437789962e-06,
"loss": 0.4726,
"step": 37300
},
{
"epoch": 2.91,
"grad_norm": 1.2034332752227783,
"learning_rate": 4.53395191902151e-06,
"loss": 0.4832,
"step": 37400
},
{
"epoch": 2.92,
"grad_norm": 1.4561015367507935,
"learning_rate": 4.5170814002530585e-06,
"loss": 0.4797,
"step": 37500
},
{
"epoch": 2.92,
"grad_norm": 1.1991673707962036,
"learning_rate": 4.500210881484606e-06,
"loss": 0.4837,
"step": 37600
},
{
"epoch": 2.93,
"grad_norm": 1.1463394165039062,
"learning_rate": 4.483340362716154e-06,
"loss": 0.471,
"step": 37700
},
{
"epoch": 2.94,
"grad_norm": 1.0985455513000488,
"learning_rate": 4.4664698439477014e-06,
"loss": 0.4671,
"step": 37800
},
{
"epoch": 2.95,
"grad_norm": 1.1526215076446533,
"learning_rate": 4.44959932517925e-06,
"loss": 0.4727,
"step": 37900
},
{
"epoch": 2.96,
"grad_norm": 1.0876762866973877,
"learning_rate": 4.432728806410798e-06,
"loss": 0.468,
"step": 38000
},
{
"epoch": 2.96,
"grad_norm": 1.0995726585388184,
"learning_rate": 4.415858287642345e-06,
"loss": 0.4752,
"step": 38100
},
{
"epoch": 2.97,
"grad_norm": 1.0579547882080078,
"learning_rate": 4.398987768873893e-06,
"loss": 0.4739,
"step": 38200
},
{
"epoch": 2.98,
"grad_norm": 1.174473762512207,
"learning_rate": 4.382117250105441e-06,
"loss": 0.4749,
"step": 38300
},
{
"epoch": 2.99,
"grad_norm": 1.066528081893921,
"learning_rate": 4.365246731336989e-06,
"loss": 0.4721,
"step": 38400
},
{
"epoch": 2.99,
"grad_norm": 0.9816131591796875,
"learning_rate": 4.348376212568537e-06,
"loss": 0.4703,
"step": 38500
},
{
"epoch": 3.0,
"eval_loss": 0.524797260761261,
"eval_runtime": 233.3906,
"eval_samples_per_second": 27.628,
"eval_steps_per_second": 27.628,
"step": 38566
},
{
"epoch": 3.0,
"grad_norm": 1.1473281383514404,
"learning_rate": 4.331505693800085e-06,
"loss": 0.4575,
"step": 38600
},
{
"epoch": 3.01,
"grad_norm": 1.112509846687317,
"learning_rate": 4.314635175031632e-06,
"loss": 0.4101,
"step": 38700
},
{
"epoch": 3.02,
"grad_norm": 1.1065998077392578,
"learning_rate": 4.29776465626318e-06,
"loss": 0.406,
"step": 38800
},
{
"epoch": 3.03,
"grad_norm": 1.0503835678100586,
"learning_rate": 4.280894137494728e-06,
"loss": 0.4111,
"step": 38900
},
{
"epoch": 3.03,
"grad_norm": 1.226361632347107,
"learning_rate": 4.264023618726277e-06,
"loss": 0.4126,
"step": 39000
},
{
"epoch": 3.04,
"grad_norm": 1.2307040691375732,
"learning_rate": 4.247153099957824e-06,
"loss": 0.4006,
"step": 39100
},
{
"epoch": 3.05,
"grad_norm": 1.4101603031158447,
"learning_rate": 4.230282581189372e-06,
"loss": 0.4157,
"step": 39200
},
{
"epoch": 3.06,
"grad_norm": 1.257118582725525,
"learning_rate": 4.2134120624209196e-06,
"loss": 0.4178,
"step": 39300
},
{
"epoch": 3.06,
"grad_norm": 1.1911427974700928,
"learning_rate": 4.196541543652468e-06,
"loss": 0.4109,
"step": 39400
},
{
"epoch": 3.07,
"grad_norm": 1.1281373500823975,
"learning_rate": 4.179671024884015e-06,
"loss": 0.4091,
"step": 39500
},
{
"epoch": 3.08,
"grad_norm": 1.3192293643951416,
"learning_rate": 4.162800506115563e-06,
"loss": 0.408,
"step": 39600
},
{
"epoch": 3.09,
"grad_norm": 1.245911955833435,
"learning_rate": 4.145929987347112e-06,
"loss": 0.4117,
"step": 39700
},
{
"epoch": 3.1,
"grad_norm": 1.466209053993225,
"learning_rate": 4.129059468578659e-06,
"loss": 0.412,
"step": 39800
},
{
"epoch": 3.1,
"grad_norm": 1.4812861680984497,
"learning_rate": 4.112188949810207e-06,
"loss": 0.4169,
"step": 39900
},
{
"epoch": 3.11,
"grad_norm": 1.10277259349823,
"learning_rate": 4.0953184310417545e-06,
"loss": 0.4141,
"step": 40000
},
{
"epoch": 3.12,
"grad_norm": 1.393477439880371,
"learning_rate": 4.078447912273303e-06,
"loss": 0.411,
"step": 40100
},
{
"epoch": 3.13,
"grad_norm": 1.0994014739990234,
"learning_rate": 4.061577393504851e-06,
"loss": 0.4061,
"step": 40200
},
{
"epoch": 3.13,
"grad_norm": 1.2751548290252686,
"learning_rate": 4.044706874736398e-06,
"loss": 0.4209,
"step": 40300
},
{
"epoch": 3.14,
"grad_norm": 1.2419929504394531,
"learning_rate": 4.0278363559679465e-06,
"loss": 0.4099,
"step": 40400
},
{
"epoch": 3.15,
"grad_norm": 1.1361491680145264,
"learning_rate": 4.010965837199494e-06,
"loss": 0.4087,
"step": 40500
},
{
"epoch": 3.16,
"grad_norm": 1.077528715133667,
"learning_rate": 3.994095318431042e-06,
"loss": 0.4124,
"step": 40600
},
{
"epoch": 3.17,
"grad_norm": 1.264989972114563,
"learning_rate": 3.97722479966259e-06,
"loss": 0.413,
"step": 40700
},
{
"epoch": 3.17,
"grad_norm": 1.3401693105697632,
"learning_rate": 3.960354280894138e-06,
"loss": 0.4127,
"step": 40800
},
{
"epoch": 3.18,
"grad_norm": 1.2370538711547852,
"learning_rate": 3.943483762125686e-06,
"loss": 0.4106,
"step": 40900
},
{
"epoch": 3.19,
"grad_norm": 1.1465864181518555,
"learning_rate": 3.926613243357233e-06,
"loss": 0.4135,
"step": 41000
},
{
"epoch": 3.2,
"grad_norm": 1.1296664476394653,
"learning_rate": 3.9097427245887815e-06,
"loss": 0.403,
"step": 41100
},
{
"epoch": 3.2,
"grad_norm": 1.2683963775634766,
"learning_rate": 3.89287220582033e-06,
"loss": 0.4145,
"step": 41200
},
{
"epoch": 3.21,
"grad_norm": 1.3670600652694702,
"learning_rate": 3.876001687051877e-06,
"loss": 0.4159,
"step": 41300
},
{
"epoch": 3.22,
"grad_norm": 1.1410002708435059,
"learning_rate": 3.8591311682834244e-06,
"loss": 0.4066,
"step": 41400
},
{
"epoch": 3.23,
"grad_norm": 1.1826266050338745,
"learning_rate": 3.842260649514973e-06,
"loss": 0.4101,
"step": 41500
},
{
"epoch": 3.24,
"grad_norm": 1.0869300365447998,
"learning_rate": 3.825390130746521e-06,
"loss": 0.4116,
"step": 41600
},
{
"epoch": 3.24,
"grad_norm": 1.2107648849487305,
"learning_rate": 3.8085196119780687e-06,
"loss": 0.4083,
"step": 41700
},
{
"epoch": 3.25,
"grad_norm": 1.4168275594711304,
"learning_rate": 3.791649093209617e-06,
"loss": 0.4065,
"step": 41800
},
{
"epoch": 3.26,
"grad_norm": 1.3980125188827515,
"learning_rate": 3.7747785744411642e-06,
"loss": 0.4034,
"step": 41900
},
{
"epoch": 3.27,
"grad_norm": 1.1646445989608765,
"learning_rate": 3.757908055672712e-06,
"loss": 0.4078,
"step": 42000
},
{
"epoch": 3.27,
"grad_norm": 1.3632497787475586,
"learning_rate": 3.7410375369042602e-06,
"loss": 0.411,
"step": 42100
},
{
"epoch": 3.28,
"grad_norm": 1.249964714050293,
"learning_rate": 3.724167018135808e-06,
"loss": 0.4138,
"step": 42200
},
{
"epoch": 3.29,
"grad_norm": 1.3631513118743896,
"learning_rate": 3.707296499367356e-06,
"loss": 0.4045,
"step": 42300
},
{
"epoch": 3.3,
"grad_norm": 1.2958687543869019,
"learning_rate": 3.6904259805989036e-06,
"loss": 0.4131,
"step": 42400
},
{
"epoch": 3.31,
"grad_norm": 1.4446603059768677,
"learning_rate": 3.6735554618304514e-06,
"loss": 0.4199,
"step": 42500
},
{
"epoch": 3.31,
"grad_norm": 1.2070785760879517,
"learning_rate": 3.656684943061999e-06,
"loss": 0.4159,
"step": 42600
},
{
"epoch": 3.32,
"grad_norm": 1.514217734336853,
"learning_rate": 3.6398144242935474e-06,
"loss": 0.4079,
"step": 42700
},
{
"epoch": 3.33,
"grad_norm": 1.194019079208374,
"learning_rate": 3.622943905525095e-06,
"loss": 0.4165,
"step": 42800
},
{
"epoch": 3.34,
"grad_norm": 1.4232854843139648,
"learning_rate": 3.6060733867566434e-06,
"loss": 0.4159,
"step": 42900
},
{
"epoch": 3.34,
"grad_norm": 1.1158806085586548,
"learning_rate": 3.5892028679881908e-06,
"loss": 0.4142,
"step": 43000
},
{
"epoch": 3.35,
"grad_norm": 1.1942150592803955,
"learning_rate": 3.5723323492197386e-06,
"loss": 0.4101,
"step": 43100
},
{
"epoch": 3.36,
"grad_norm": 1.254410982131958,
"learning_rate": 3.5554618304512868e-06,
"loss": 0.4042,
"step": 43200
},
{
"epoch": 3.37,
"grad_norm": 1.163241982460022,
"learning_rate": 3.5385913116828346e-06,
"loss": 0.4084,
"step": 43300
},
{
"epoch": 3.38,
"grad_norm": 1.1720082759857178,
"learning_rate": 3.5217207929143828e-06,
"loss": 0.4109,
"step": 43400
},
{
"epoch": 3.38,
"grad_norm": 1.2809245586395264,
"learning_rate": 3.50485027414593e-06,
"loss": 0.4086,
"step": 43500
},
{
"epoch": 3.39,
"grad_norm": 1.3495714664459229,
"learning_rate": 3.487979755377478e-06,
"loss": 0.4077,
"step": 43600
},
{
"epoch": 3.4,
"grad_norm": 1.273619532585144,
"learning_rate": 3.471109236609026e-06,
"loss": 0.4099,
"step": 43700
},
{
"epoch": 3.41,
"grad_norm": 1.223887324333191,
"learning_rate": 3.454238717840574e-06,
"loss": 0.4075,
"step": 43800
},
{
"epoch": 3.41,
"grad_norm": 1.131531834602356,
"learning_rate": 3.4373681990721217e-06,
"loss": 0.4105,
"step": 43900
},
{
"epoch": 3.42,
"grad_norm": 1.5390982627868652,
"learning_rate": 3.4204976803036695e-06,
"loss": 0.4116,
"step": 44000
},
{
"epoch": 3.43,
"grad_norm": 1.4024629592895508,
"learning_rate": 3.4036271615352173e-06,
"loss": 0.403,
"step": 44100
},
{
"epoch": 3.44,
"grad_norm": 1.2337039709091187,
"learning_rate": 3.386756642766765e-06,
"loss": 0.4142,
"step": 44200
},
{
"epoch": 3.45,
"grad_norm": 1.3793139457702637,
"learning_rate": 3.3698861239983133e-06,
"loss": 0.4148,
"step": 44300
},
{
"epoch": 3.45,
"grad_norm": 1.348007321357727,
"learning_rate": 3.353015605229861e-06,
"loss": 0.4054,
"step": 44400
},
{
"epoch": 3.46,
"grad_norm": 1.566469430923462,
"learning_rate": 3.3361450864614093e-06,
"loss": 0.4125,
"step": 44500
},
{
"epoch": 3.47,
"grad_norm": 1.1387908458709717,
"learning_rate": 3.3192745676929567e-06,
"loss": 0.4085,
"step": 44600
},
{
"epoch": 3.48,
"grad_norm": 1.2111226320266724,
"learning_rate": 3.3024040489245045e-06,
"loss": 0.4151,
"step": 44700
},
{
"epoch": 3.48,
"grad_norm": 1.3462387323379517,
"learning_rate": 3.2855335301560527e-06,
"loss": 0.4094,
"step": 44800
},
{
"epoch": 3.49,
"grad_norm": 1.2581942081451416,
"learning_rate": 3.2686630113876005e-06,
"loss": 0.4051,
"step": 44900
},
{
"epoch": 3.5,
"grad_norm": 1.4975100755691528,
"learning_rate": 3.2517924926191487e-06,
"loss": 0.4108,
"step": 45000
},
{
"epoch": 3.51,
"grad_norm": 1.167520523071289,
"learning_rate": 3.234921973850696e-06,
"loss": 0.4039,
"step": 45100
},
{
"epoch": 3.52,
"grad_norm": 1.2680399417877197,
"learning_rate": 3.218051455082244e-06,
"loss": 0.4127,
"step": 45200
},
{
"epoch": 3.52,
"grad_norm": 1.3369581699371338,
"learning_rate": 3.201180936313792e-06,
"loss": 0.4081,
"step": 45300
},
{
"epoch": 3.53,
"grad_norm": 1.311107873916626,
"learning_rate": 3.18431041754534e-06,
"loss": 0.4083,
"step": 45400
},
{
"epoch": 3.54,
"grad_norm": 1.3422982692718506,
"learning_rate": 3.1674398987768877e-06,
"loss": 0.4086,
"step": 45500
},
{
"epoch": 3.55,
"grad_norm": 1.5844368934631348,
"learning_rate": 3.1505693800084354e-06,
"loss": 0.413,
"step": 45600
},
{
"epoch": 3.55,
"grad_norm": 1.2561074495315552,
"learning_rate": 3.1336988612399832e-06,
"loss": 0.414,
"step": 45700
},
{
"epoch": 3.56,
"grad_norm": 1.2554163932800293,
"learning_rate": 3.116828342471531e-06,
"loss": 0.4112,
"step": 45800
},
{
"epoch": 3.57,
"grad_norm": 1.5437623262405396,
"learning_rate": 3.0999578237030792e-06,
"loss": 0.4106,
"step": 45900
},
{
"epoch": 3.58,
"grad_norm": 1.1693291664123535,
"learning_rate": 3.083087304934627e-06,
"loss": 0.4119,
"step": 46000
},
{
"epoch": 3.59,
"grad_norm": 1.373396635055542,
"learning_rate": 3.0662167861661752e-06,
"loss": 0.4104,
"step": 46100
},
{
"epoch": 3.59,
"grad_norm": 1.2403075695037842,
"learning_rate": 3.0493462673977226e-06,
"loss": 0.4206,
"step": 46200
},
{
"epoch": 3.6,
"grad_norm": 1.3935080766677856,
"learning_rate": 3.0324757486292704e-06,
"loss": 0.4084,
"step": 46300
},
{
"epoch": 3.61,
"grad_norm": 1.1799649000167847,
"learning_rate": 3.0156052298608186e-06,
"loss": 0.4087,
"step": 46400
},
{
"epoch": 3.62,
"grad_norm": 1.169565200805664,
"learning_rate": 2.9987347110923664e-06,
"loss": 0.4049,
"step": 46500
},
{
"epoch": 3.62,
"grad_norm": 1.3765678405761719,
"learning_rate": 2.9818641923239146e-06,
"loss": 0.402,
"step": 46600
},
{
"epoch": 3.63,
"grad_norm": 1.5545567274093628,
"learning_rate": 2.964993673555462e-06,
"loss": 0.4111,
"step": 46700
},
{
"epoch": 3.64,
"grad_norm": 1.3903080224990845,
"learning_rate": 2.9481231547870098e-06,
"loss": 0.4054,
"step": 46800
},
{
"epoch": 3.65,
"grad_norm": 1.1009324789047241,
"learning_rate": 2.931252636018558e-06,
"loss": 0.4006,
"step": 46900
},
{
"epoch": 3.66,
"grad_norm": 1.1967973709106445,
"learning_rate": 2.9143821172501058e-06,
"loss": 0.4139,
"step": 47000
},
{
"epoch": 3.66,
"grad_norm": 1.1527669429779053,
"learning_rate": 2.8975115984816536e-06,
"loss": 0.4168,
"step": 47100
},
{
"epoch": 3.67,
"grad_norm": 1.3475886583328247,
"learning_rate": 2.8806410797132014e-06,
"loss": 0.4084,
"step": 47200
},
{
"epoch": 3.68,
"grad_norm": 1.0963339805603027,
"learning_rate": 2.863770560944749e-06,
"loss": 0.404,
"step": 47300
},
{
"epoch": 3.69,
"grad_norm": 1.3665622472763062,
"learning_rate": 2.846900042176297e-06,
"loss": 0.4049,
"step": 47400
},
{
"epoch": 3.69,
"grad_norm": 1.4860732555389404,
"learning_rate": 2.830029523407845e-06,
"loss": 0.4035,
"step": 47500
},
{
"epoch": 3.7,
"grad_norm": 1.3885513544082642,
"learning_rate": 2.813159004639393e-06,
"loss": 0.4033,
"step": 47600
},
{
"epoch": 3.71,
"grad_norm": 1.2163450717926025,
"learning_rate": 2.7962884858709403e-06,
"loss": 0.4085,
"step": 47700
},
{
"epoch": 3.72,
"grad_norm": 1.4728765487670898,
"learning_rate": 2.7794179671024885e-06,
"loss": 0.4049,
"step": 47800
},
{
"epoch": 3.73,
"grad_norm": 1.2868274450302124,
"learning_rate": 2.7625474483340363e-06,
"loss": 0.4119,
"step": 47900
},
{
"epoch": 3.73,
"grad_norm": 1.5102245807647705,
"learning_rate": 2.7456769295655845e-06,
"loss": 0.3999,
"step": 48000
},
{
"epoch": 3.74,
"grad_norm": 1.0794662237167358,
"learning_rate": 2.7288064107971323e-06,
"loss": 0.4031,
"step": 48100
},
{
"epoch": 3.75,
"grad_norm": 1.4579282999038696,
"learning_rate": 2.7119358920286805e-06,
"loss": 0.4084,
"step": 48200
},
{
"epoch": 3.76,
"grad_norm": 1.2417278289794922,
"learning_rate": 2.695065373260228e-06,
"loss": 0.4146,
"step": 48300
},
{
"epoch": 3.76,
"grad_norm": 1.4067513942718506,
"learning_rate": 2.6781948544917757e-06,
"loss": 0.4111,
"step": 48400
},
{
"epoch": 3.77,
"grad_norm": 1.2484619617462158,
"learning_rate": 2.661324335723324e-06,
"loss": 0.4125,
"step": 48500
},
{
"epoch": 3.78,
"grad_norm": 1.2016352415084839,
"learning_rate": 2.6444538169548717e-06,
"loss": 0.4049,
"step": 48600
},
{
"epoch": 3.79,
"grad_norm": 1.154489278793335,
"learning_rate": 2.6275832981864195e-06,
"loss": 0.4115,
"step": 48700
},
{
"epoch": 3.8,
"grad_norm": 1.4561036825180054,
"learning_rate": 2.6107127794179673e-06,
"loss": 0.4139,
"step": 48800
},
{
"epoch": 3.8,
"grad_norm": 1.373879075050354,
"learning_rate": 2.593842260649515e-06,
"loss": 0.4092,
"step": 48900
},
{
"epoch": 3.81,
"grad_norm": 1.198873519897461,
"learning_rate": 2.576971741881063e-06,
"loss": 0.4128,
"step": 49000
},
{
"epoch": 3.82,
"grad_norm": 1.1985969543457031,
"learning_rate": 2.560101223112611e-06,
"loss": 0.4058,
"step": 49100
},
{
"epoch": 3.83,
"grad_norm": 1.1502113342285156,
"learning_rate": 2.543230704344159e-06,
"loss": 0.4045,
"step": 49200
},
{
"epoch": 3.83,
"grad_norm": 1.4170736074447632,
"learning_rate": 2.5263601855757062e-06,
"loss": 0.4147,
"step": 49300
},
{
"epoch": 3.84,
"grad_norm": 1.1340407133102417,
"learning_rate": 2.5094896668072544e-06,
"loss": 0.4114,
"step": 49400
},
{
"epoch": 3.85,
"grad_norm": 1.1955766677856445,
"learning_rate": 2.4926191480388022e-06,
"loss": 0.4095,
"step": 49500
},
{
"epoch": 3.86,
"grad_norm": 1.2731263637542725,
"learning_rate": 2.4757486292703504e-06,
"loss": 0.4117,
"step": 49600
},
{
"epoch": 3.87,
"grad_norm": 1.2607747316360474,
"learning_rate": 2.458878110501898e-06,
"loss": 0.4093,
"step": 49700
},
{
"epoch": 3.87,
"grad_norm": 1.2586636543273926,
"learning_rate": 2.442007591733446e-06,
"loss": 0.4113,
"step": 49800
},
{
"epoch": 3.88,
"grad_norm": 1.1627777814865112,
"learning_rate": 2.425137072964994e-06,
"loss": 0.4103,
"step": 49900
},
{
"epoch": 3.89,
"grad_norm": 1.3252959251403809,
"learning_rate": 2.4082665541965416e-06,
"loss": 0.4123,
"step": 50000
},
{
"epoch": 3.9,
"grad_norm": 1.3316943645477295,
"learning_rate": 2.39139603542809e-06,
"loss": 0.4101,
"step": 50100
},
{
"epoch": 3.9,
"grad_norm": 1.1199568510055542,
"learning_rate": 2.3745255166596376e-06,
"loss": 0.3995,
"step": 50200
},
{
"epoch": 3.91,
"grad_norm": 1.3360415697097778,
"learning_rate": 2.3576549978911854e-06,
"loss": 0.4084,
"step": 50300
},
{
"epoch": 3.92,
"grad_norm": 1.3866636753082275,
"learning_rate": 2.340784479122733e-06,
"loss": 0.4103,
"step": 50400
},
{
"epoch": 3.93,
"grad_norm": 1.39589262008667,
"learning_rate": 2.323913960354281e-06,
"loss": 0.4114,
"step": 50500
},
{
"epoch": 3.94,
"grad_norm": 1.269492506980896,
"learning_rate": 2.3070434415858288e-06,
"loss": 0.4031,
"step": 50600
},
{
"epoch": 3.94,
"grad_norm": 1.2548081874847412,
"learning_rate": 2.290172922817377e-06,
"loss": 0.4088,
"step": 50700
},
{
"epoch": 3.95,
"grad_norm": 1.410291075706482,
"learning_rate": 2.2733024040489248e-06,
"loss": 0.4046,
"step": 50800
},
{
"epoch": 3.96,
"grad_norm": 1.4330939054489136,
"learning_rate": 2.2564318852804726e-06,
"loss": 0.4095,
"step": 50900
},
{
"epoch": 3.97,
"grad_norm": 1.1737109422683716,
"learning_rate": 2.2395613665120204e-06,
"loss": 0.4014,
"step": 51000
},
{
"epoch": 3.97,
"grad_norm": 1.186975121498108,
"learning_rate": 2.222690847743568e-06,
"loss": 0.398,
"step": 51100
},
{
"epoch": 3.98,
"grad_norm": 1.2583659887313843,
"learning_rate": 2.2058203289751164e-06,
"loss": 0.4068,
"step": 51200
},
{
"epoch": 3.99,
"grad_norm": 1.1873714923858643,
"learning_rate": 2.1889498102066637e-06,
"loss": 0.4131,
"step": 51300
},
{
"epoch": 4.0,
"grad_norm": 1.2815061807632446,
"learning_rate": 2.172079291438212e-06,
"loss": 0.3969,
"step": 51400
},
{
"epoch": 4.0,
"eval_loss": 0.5248594284057617,
"eval_runtime": 233.5072,
"eval_samples_per_second": 27.614,
"eval_steps_per_second": 27.614,
"step": 51422
}
],
"logging_steps": 100,
"max_steps": 64275,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"total_flos": 2.2978476544119404e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}