diff --git "a/checkpoint-2136960/trainer_state.json" "b/checkpoint-2136960/trainer_state.json"
new file mode 100644--- /dev/null
+++ "b/checkpoint-2136960/trainer_state.json"
@@ -0,0 +1,25287 @@
+{
+  "best_metric": 3.9916534423828125,
+  "best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/rel-cl2/lstm/1/checkpoints/checkpoint-2136960",
+  "epoch": 1.0250006060157382,
+  "eval_steps": 10,
+  "global_step": 2136960,
+  "is_hyper_param_search": false,
+  "is_local_process_zero": true,
+  "is_world_process_zero": true,
+  "log_history": [
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.999998362119627e-05,
+      "loss": 10.8195,
+      "step": 1
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.999161405248948e-05,
+      "loss": 7.5561,
+      "step": 512
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.998322810497896e-05,
+      "loss": 7.0599,
+      "step": 1024
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.997484215746844e-05,
+      "loss": 6.9972,
+      "step": 1536
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.996645620995792e-05,
+      "loss": 6.9475,
+      "step": 2048
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.99580702624474e-05,
+      "loss": 6.9188,
+      "step": 2560
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.994968431493688e-05,
+      "loss": 6.7574,
+      "step": 3072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.994131474623009e-05,
+      "loss": 6.6474,
+      "step": 3584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.993292879871958e-05,
+      "loss": 6.5534,
+      "step": 4096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.992454285120906e-05,
+      "loss": 6.4733,
+      "step": 4608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.991615690369854e-05,
+      "loss": 6.4162,
+      "step": 5120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.990777095618801e-05,
+      "loss": 6.3506,
+      "step": 5632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.989938500867749e-05,
+      "loss": 6.2788,
+      "step": 6144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.989099906116697e-05,
+      "loss": 6.2098,
+      "step": 6656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.988261311365645e-05,
+      "loss": 6.154,
+      "step": 7168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.987422716614593e-05,
+      "loss": 6.0923,
+      "step": 7680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.986584121863541e-05,
+      "loss": 6.0503,
+      "step": 8192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.985745527112489e-05,
+      "loss": 6.0096,
+      "step": 8704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.984906932361437e-05,
+      "loss": 5.963,
+      "step": 9216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.984068337610385e-05,
+      "loss": 5.9206,
+      "step": 9728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.983229742859333e-05,
+      "loss": 5.8923,
+      "step": 10240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.982391148108281e-05,
+      "loss": 5.8495,
+      "step": 10752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.981552553357229e-05,
+      "loss": 5.8265,
+      "step": 11264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.980713958606178e-05,
+      "loss": 5.788,
+      "step": 11776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.979875363855125e-05,
+      "loss": 5.7724,
+      "step": 12288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.979036769104073e-05,
+      "loss": 5.7327,
+      "step": 12800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.978198174353021e-05,
+      "loss": 5.7102,
+      "step": 13312
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.9773612174823426e-05,
+      "loss": 5.6808,
+      "step": 13824
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.97652262273129e-05,
+      "loss": 5.6655,
+      "step": 14336
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.975684027980238e-05,
+      "loss": 5.6388,
+      "step": 14848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.974845433229186e-05,
+      "loss": 5.6265,
+      "step": 15360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9740101142388804e-05,
+      "loss": 5.6087,
+      "step": 15872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9731715194878284e-05,
+      "loss": 5.5848,
+      "step": 16384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9723329247367764e-05,
+      "loss": 5.5705,
+      "step": 16896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9714943299857244e-05,
+      "loss": 5.5555,
+      "step": 17408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.970657373115046e-05,
+      "loss": 5.5334,
+      "step": 17920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.969818778363994e-05,
+      "loss": 5.5301,
+      "step": 18432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.968980183612942e-05,
+      "loss": 5.4959,
+      "step": 18944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.96814158886189e-05,
+      "loss": 5.4843,
+      "step": 19456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.967302994110837e-05,
+      "loss": 5.4547,
+      "step": 19968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.966467675120532e-05,
+      "loss": 5.4546,
+      "step": 20480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.96562908036948e-05,
+      "loss": 5.4389,
+      "step": 20992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.964790485618428e-05,
+      "loss": 5.4366,
+      "step": 21504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.963951890867376e-05,
+      "loss": 5.409,
+      "step": 22016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.963113296116324e-05,
+      "loss": 5.3987,
+      "step": 22528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.962274701365272e-05,
+      "loss": 5.3976,
+      "step": 23040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.96143610661422e-05,
+      "loss": 5.3891,
+      "step": 23552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9605991497435414e-05,
+      "loss": 5.3773,
+      "step": 24064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9597605549924894e-05,
+      "loss": 5.3578,
+      "step": 24576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9589219602414374e-05,
+      "loss": 5.3439,
+      "step": 25088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.958083365490385e-05,
+      "loss": 5.3611,
+      "step": 25600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.957244770739333e-05,
+      "loss": 5.3472,
+      "step": 26112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.956406175988281e-05,
+      "loss": 5.3271,
+      "step": 26624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.955567581237229e-05,
+      "loss": 5.3072,
+      "step": 27136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.954728986486177e-05,
+      "loss": 5.3151,
+      "step": 27648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9538920296154976e-05,
+      "loss": 5.294,
+      "step": 28160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9530534348644456e-05,
+      "loss": 5.3014,
+      "step": 28672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9522148401133936e-05,
+      "loss": 5.27,
+      "step": 29184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.951376245362342e-05,
+      "loss": 5.2815,
+      "step": 29696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.950539288491663e-05,
+      "loss": 5.266,
+      "step": 30208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.949702331620985e-05,
+      "loss": 5.2482,
+      "step": 30720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.948863736869932e-05,
+      "loss": 5.235,
+      "step": 31232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.94802514211888e-05,
+      "loss": 5.2471,
+      "step": 31744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.947186547367828e-05,
+      "loss": 5.2046,
+      "step": 32256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.946347952616776e-05,
+      "loss": 5.2193,
+      "step": 32768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.945509357865724e-05,
+      "loss": 5.2267,
+      "step": 33280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.944670763114672e-05,
+      "loss": 5.2094,
+      "step": 33792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.94383216836362e-05,
+      "loss": 5.2021,
+      "step": 34304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.942995211492941e-05,
+      "loss": 5.1768,
+      "step": 34816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.942156616741889e-05,
+      "loss": 5.1731,
+      "step": 35328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.941318021990837e-05,
+      "loss": 5.171,
+      "step": 35840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9404794272397856e-05,
+      "loss": 5.1818,
+      "step": 36352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9396408324887336e-05,
+      "loss": 5.1619,
+      "step": 36864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9388038756180545e-05,
+      "loss": 5.1702,
+      "step": 37376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9379652808670025e-05,
+      "loss": 5.1592,
+      "step": 37888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9371283239963234e-05,
+      "loss": 5.1568,
+      "step": 38400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9362897292452714e-05,
+      "loss": 5.1342,
+      "step": 38912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9354511344942194e-05,
+      "loss": 5.1321,
+      "step": 39424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9346125397431674e-05,
+      "loss": 5.1243,
+      "step": 39936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9337739449921154e-05,
+      "loss": 5.1194,
+      "step": 40448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9329353502410634e-05,
+      "loss": 5.1185,
+      "step": 40960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9320967554900114e-05,
+      "loss": 5.1013,
+      "step": 41472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.9312581607389594e-05,
+      "loss": 5.1026,
+      "step": 41984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.930421203868281e-05,
+      "loss": 5.1017,
+      "step": 42496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.929582609117229e-05,
+      "loss": 5.0774,
+      "step": 43008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.928744014366177e-05,
+      "loss": 5.0845,
+      "step": 43520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.927905419615125e-05,
+      "loss": 5.0835,
+      "step": 44032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.927066824864073e-05,
+      "loss": 5.0844,
+      "step": 44544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.92622823011302e-05,
+      "loss": 5.0655,
+      "step": 45056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.925389635361968e-05,
+      "loss": 5.062,
+      "step": 45568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.924551040610916e-05,
+      "loss": 5.0485,
+      "step": 46080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.923712445859864e-05,
+      "loss": 5.0456,
+      "step": 46592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.922875488989185e-05,
+      "loss": 5.0451,
+      "step": 47104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.922036894238133e-05,
+      "loss": 5.0429,
+      "step": 47616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.921198299487081e-05,
+      "loss": 5.0349,
+      "step": 48128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.920359704736029e-05,
+      "loss": 5.037,
+      "step": 48640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.919522747865351e-05,
+      "loss": 5.0276,
+      "step": 49152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.918684153114299e-05,
+      "loss": 5.0119,
+      "step": 49664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.917845558363247e-05,
+      "loss": 5.0076,
+      "step": 50176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.917006963612195e-05,
+      "loss": 5.0109,
+      "step": 50688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.916168368861143e-05,
+      "loss": 5.0064,
+      "step": 51200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9153314119904636e-05,
+      "loss": 5.0103,
+      "step": 51712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9144928172394116e-05,
+      "loss": 4.9923,
+      "step": 52224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9136558603687325e-05,
+      "loss": 4.9852,
+      "step": 52736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9128172656176805e-05,
+      "loss": 4.9892,
+      "step": 53248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9119786708666285e-05,
+      "loss": 4.9695,
+      "step": 53760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9111400761155765e-05,
+      "loss": 4.975,
+      "step": 54272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.9103014813645245e-05,
+      "loss": 4.9649,
+      "step": 54784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.909462886613473e-05,
+      "loss": 4.9654,
+      "step": 55296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.908624291862421e-05,
+      "loss": 4.9521,
+      "step": 55808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.907785697111369e-05,
+      "loss": 4.9555,
+      "step": 56320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.906947102360317e-05,
+      "loss": 4.9455,
+      "step": 56832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.906110145489638e-05,
+      "loss": 4.9481,
+      "step": 57344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.905271550738586e-05,
+      "loss": 4.9428,
+      "step": 57856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.904432955987534e-05,
+      "loss": 4.9335,
+      "step": 58368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.903594361236482e-05,
+      "loss": 4.9442,
+      "step": 58880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.902757404365803e-05,
+      "loss": 4.9418,
+      "step": 59392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.901918809614751e-05,
+      "loss": 4.9294,
+      "step": 59904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.901080214863699e-05,
+      "loss": 4.9164,
+      "step": 60416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.900241620112647e-05,
+      "loss": 4.9174,
+      "step": 60928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.899403025361595e-05,
+      "loss": 4.9151,
+      "step": 61440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8985660684909166e-05,
+      "loss": 4.9168,
+      "step": 61952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8977274737398646e-05,
+      "loss": 4.9108,
+      "step": 62464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8968888789888125e-05,
+      "loss": 4.9012,
+      "step": 62976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8960502842377605e-05,
+      "loss": 4.9047,
+      "step": 63488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8952116894867085e-05,
+      "loss": 4.8905,
+      "step": 64000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8943730947356565e-05,
+      "loss": 4.8952,
+      "step": 64512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.893534499984604e-05,
+      "loss": 4.8811,
+      "step": 65024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.892695905233552e-05,
+      "loss": 4.8873,
+      "step": 65536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8918573104825e-05,
+      "loss": 4.876,
+      "step": 66048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8910203536118214e-05,
+      "loss": 4.8856,
+      "step": 66560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.890181758860769e-05,
+      "loss": 4.8846,
+      "step": 67072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.889343164109717e-05,
+      "loss": 4.882,
+      "step": 67584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.888504569358665e-05,
+      "loss": 4.8584,
+      "step": 68096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8876659746076134e-05,
+      "loss": 4.8639,
+      "step": 68608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8868273798565614e-05,
+      "loss": 4.862,
+      "step": 69120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.8859887851055094e-05,
+      "loss": 4.8639,
+      "step": 69632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.88515182823483e-05,
+      "loss": 4.852,
+      "step": 70144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.884313233483778e-05,
+      "loss": 4.8572,
+      "step": 70656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.883474638732726e-05,
+      "loss": 4.8458,
+      "step": 71168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.882636043981674e-05,
+      "loss": 4.861,
+      "step": 71680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.881797449230622e-05,
+      "loss": 4.8289,
+      "step": 72192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.880960492359943e-05,
+      "loss": 4.8484,
+      "step": 72704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.880121897608891e-05,
+      "loss": 4.8358,
+      "step": 73216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.879283302857839e-05,
+      "loss": 4.8287,
+      "step": 73728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.878444708106787e-05,
+      "loss": 4.8282,
+      "step": 74240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.877607751236109e-05,
+      "loss": 4.813,
+      "step": 74752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.876769156485057e-05,
+      "loss": 4.8183,
+      "step": 75264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.875932199614378e-05,
+      "loss": 4.8269,
+      "step": 75776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.875093604863326e-05,
+      "loss": 4.8123,
+      "step": 76288
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.779831886291504,
+      "eval_runtime": 288.1178,
+      "eval_samples_per_second": 1324.427,
+      "eval_steps_per_second": 41.389,
+      "step": 76320
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.874255010112274e-05,
+      "loss": 4.8111,
+      "step": 76800
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.873416415361222e-05,
+      "loss": 4.8048,
+      "step": 77312
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.87257782061017e-05,
+      "loss": 4.8175,
+      "step": 77824
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.871739225859118e-05,
+      "loss": 4.8068,
+      "step": 78336
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8709006311080657e-05,
+      "loss": 4.8127,
+      "step": 78848
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8700636742373866e-05,
+      "loss": 4.7888,
+      "step": 79360
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8692250794863346e-05,
+      "loss": 4.796,
+      "step": 79872
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8683864847352826e-05,
+      "loss": 4.7801,
+      "step": 80384
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8675478899842306e-05,
+      "loss": 4.7839,
+      "step": 80896
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8667092952331785e-05,
+      "loss": 4.788,
+      "step": 81408
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.865870700482127e-05,
+      "loss": 4.7842,
+      "step": 81920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.865033743611448e-05,
+      "loss": 4.7876,
+      "step": 82432
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.864196786740769e-05,
+      "loss": 4.7664,
+      "step": 82944
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.863358191989717e-05,
+      "loss": 4.7803,
+      "step": 83456
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.862519597238665e-05,
+      "loss": 4.7618,
+      "step": 83968
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.861682640367986e-05,
+      "loss": 4.7667,
+      "step": 84480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.860844045616934e-05,
+      "loss": 4.7686,
+      "step": 84992
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.860005450865882e-05,
+      "loss": 4.7593,
+      "step": 85504
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.85916685611483e-05,
+      "loss": 4.7627,
+      "step": 86016
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.858328261363778e-05,
+      "loss": 4.775,
+      "step": 86528
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.857489666612726e-05,
+      "loss": 4.7505,
+      "step": 87040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.856651071861674e-05,
+      "loss": 4.7575,
+      "step": 87552
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.8558124771106226e-05,
+      "loss": 4.7503,
+      "step": 88064
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.85497388235957e-05,
+      "loss": 4.7591,
+      "step": 88576
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.854135287608518e-05,
+      "loss": 4.748,
+      "step": 89088
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.853296692857466e-05,
+      "loss": 4.7478,
+      "step": 89600
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.852458098106414e-05,
+      "loss": 4.7309,
+      "step": 90112
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.851619503355362e-05,
+      "loss": 4.7403,
+      "step": 90624
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.85078090860431e-05,
+      "loss": 4.7271,
+      "step": 91136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.849942313853258e-05,
+      "loss": 4.738,
+      "step": 91648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.849105356982579e-05,
+      "loss": 4.7333,
+      "step": 92160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.848266762231527e-05,
+      "loss": 4.7318,
+      "step": 92672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.847428167480475e-05,
+      "loss": 4.7342,
+      "step": 93184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.846589572729423e-05,
+      "loss": 4.725,
+      "step": 93696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8457526158587444e-05,
+      "loss": 4.7303,
+      "step": 94208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8449140211076924e-05,
+      "loss": 4.7232,
+      "step": 94720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8440754263566404e-05,
+      "loss": 4.7129,
+      "step": 95232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8432368316055884e-05,
+      "loss": 4.7066,
+      "step": 95744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8423982368545363e-05,
+      "loss": 4.6953,
+      "step": 96256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8415596421034843e-05,
+      "loss": 4.7051,
+      "step": 96768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.840721047352432e-05,
+      "loss": 4.7039,
+      "step": 97280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.839884090481753e-05,
+      "loss": 4.7105,
+      "step": 97792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.839045495730701e-05,
+      "loss": 4.6933,
+      "step": 98304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.838206900979649e-05,
+      "loss": 4.6952,
+      "step": 98816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.837368306228597e-05,
+      "loss": 4.6984,
+      "step": 99328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.836529711477545e-05,
+      "loss": 4.6911,
+      "step": 99840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.835691116726493e-05,
+      "loss": 4.6973,
+      "step": 100352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.834852521975441e-05,
+      "loss": 4.6833,
+      "step": 100864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.834013927224389e-05,
+      "loss": 4.675,
+      "step": 101376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.833176970353711e-05,
+      "loss": 4.7029,
+      "step": 101888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.832338375602659e-05,
+      "loss": 4.6967,
+      "step": 102400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.831499780851606e-05,
+      "loss": 4.6819,
+      "step": 102912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.830661186100554e-05,
+      "loss": 4.678,
+      "step": 103424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.829822591349502e-05,
+      "loss": 4.6853,
+      "step": 103936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.828985634478824e-05,
+      "loss": 4.6695,
+      "step": 104448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.828147039727771e-05,
+      "loss": 4.687,
+      "step": 104960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.827308444976719e-05,
+      "loss": 4.6578,
+      "step": 105472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.826469850225667e-05,
+      "loss": 4.682,
+      "step": 105984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.825631255474615e-05,
+      "loss": 4.6739,
+      "step": 106496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.824792660723563e-05,
+      "loss": 4.6568,
+      "step": 107008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.823954065972511e-05,
+      "loss": 4.6549,
+      "step": 107520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.82311547122146e-05,
+      "loss": 4.6691,
+      "step": 108032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8222801522311535e-05,
+      "loss": 4.6315,
+      "step": 108544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8214415574801015e-05,
+      "loss": 4.654,
+      "step": 109056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8206029627290495e-05,
+      "loss": 4.6645,
+      "step": 109568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8197643679779975e-05,
+      "loss": 4.656,
+      "step": 110080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8189274111073184e-05,
+      "loss": 4.6474,
+      "step": 110592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8180888163562664e-05,
+      "loss": 4.6334,
+      "step": 111104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8172502216052144e-05,
+      "loss": 4.639,
+      "step": 111616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8164116268541624e-05,
+      "loss": 4.6418,
+      "step": 112128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.815574669983483e-05,
+      "loss": 4.6485,
+      "step": 112640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.814736075232432e-05,
+      "loss": 4.6363,
+      "step": 113152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.81389748048138e-05,
+      "loss": 4.6456,
+      "step": 113664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.813058885730328e-05,
+      "loss": 4.649,
+      "step": 114176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.812220290979276e-05,
+      "loss": 4.6462,
+      "step": 114688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.811381696228224e-05,
+      "loss": 4.6283,
+      "step": 115200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.810543101477172e-05,
+      "loss": 4.6338,
+      "step": 115712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.80970450672612e-05,
+      "loss": 4.6326,
+      "step": 116224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.808867549855441e-05,
+      "loss": 4.625,
+      "step": 116736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.808028955104389e-05,
+      "loss": 4.6308,
+      "step": 117248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.807190360353337e-05,
+      "loss": 4.6205,
+      "step": 117760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.806351765602285e-05,
+      "loss": 4.6291,
+      "step": 118272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.805514808731606e-05,
+      "loss": 4.6264,
+      "step": 118784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.804676213980554e-05,
+      "loss": 4.6066,
+      "step": 119296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.803837619229502e-05,
+      "loss": 4.6191,
+      "step": 119808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8029990244784504e-05,
+      "loss": 4.6188,
+      "step": 120320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.8021604297273984e-05,
+      "loss": 4.6249,
+      "step": 120832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.801323472856719e-05,
+      "loss": 4.6154,
+      "step": 121344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.800484878105667e-05,
+      "loss": 4.6108,
+      "step": 121856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.799646283354615e-05,
+      "loss": 4.6036,
+      "step": 122368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.798807688603563e-05,
+      "loss": 4.5998,
+      "step": 122880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.797970731732884e-05,
+      "loss": 4.604,
+      "step": 123392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.797132136981832e-05,
+      "loss": 4.6104,
+      "step": 123904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.79629354223078e-05,
+      "loss": 4.6054,
+      "step": 124416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.795454947479728e-05,
+      "loss": 4.6106,
+      "step": 124928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.794616352728676e-05,
+      "loss": 4.6005,
+      "step": 125440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.793779395857997e-05,
+      "loss": 4.595,
+      "step": 125952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.792940801106946e-05,
+      "loss": 4.5925,
+      "step": 126464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.792102206355894e-05,
+      "loss": 4.5972,
+      "step": 126976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.791263611604842e-05,
+      "loss": 4.6017,
+      "step": 127488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.79042501685379e-05,
+      "loss": 4.6028,
+      "step": 128000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7895880599831107e-05,
+      "loss": 4.5901,
+      "step": 128512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7887494652320587e-05,
+      "loss": 4.5824,
+      "step": 129024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7879108704810067e-05,
+      "loss": 4.5922,
+      "step": 129536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7870722757299546e-05,
+      "loss": 4.577,
+      "step": 130048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.786233680978902e-05,
+      "loss": 4.584,
+      "step": 130560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7853967241082236e-05,
+      "loss": 4.5802,
+      "step": 131072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7845581293571715e-05,
+      "loss": 4.5804,
+      "step": 131584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7837195346061195e-05,
+      "loss": 4.5706,
+      "step": 132096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7828809398550675e-05,
+      "loss": 4.5814,
+      "step": 132608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7820423451040155e-05,
+      "loss": 4.5675,
+      "step": 133120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.781205388233337e-05,
+      "loss": 4.5722,
+      "step": 133632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7803667934822844e-05,
+      "loss": 4.5762,
+      "step": 134144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7795281987312324e-05,
+      "loss": 4.5642,
+      "step": 134656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7786896039801804e-05,
+      "loss": 4.5851,
+      "step": 135168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.777852647109502e-05,
+      "loss": 4.5813,
+      "step": 135680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.777014052358449e-05,
+      "loss": 4.5737,
+      "step": 136192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.776175457607397e-05,
+      "loss": 4.5632,
+      "step": 136704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.775336862856345e-05,
+      "loss": 4.5608,
+      "step": 137216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.774499905985666e-05,
+      "loss": 4.5673,
+      "step": 137728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.773661311234615e-05,
+      "loss": 4.5617,
+      "step": 138240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.772822716483563e-05,
+      "loss": 4.5727,
+      "step": 138752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.771984121732511e-05,
+      "loss": 4.5525,
+      "step": 139264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.771145526981459e-05,
+      "loss": 4.5608,
+      "step": 139776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.770306932230407e-05,
+      "loss": 4.5584,
+      "step": 140288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.769468337479355e-05,
+      "loss": 4.5635,
+      "step": 140800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.768629742728303e-05,
+      "loss": 4.546,
+      "step": 141312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.767792785857624e-05,
+      "loss": 4.5561,
+      "step": 141824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.766954191106572e-05,
+      "loss": 4.5529,
+      "step": 142336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.76611559635552e-05,
+      "loss": 4.555,
+      "step": 142848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.765277001604468e-05,
+      "loss": 4.5589,
+      "step": 143360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.764438406853416e-05,
+      "loss": 4.5623,
+      "step": 143872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.763599812102364e-05,
+      "loss": 4.5398,
+      "step": 144384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.762761217351312e-05,
+      "loss": 4.5484,
+      "step": 144896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.76192262260026e-05,
+      "loss": 4.5526,
+      "step": 145408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7610856657295813e-05,
+      "loss": 4.5491,
+      "step": 145920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7602470709785293e-05,
+      "loss": 4.5443,
+      "step": 146432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.75941011410785e-05,
+      "loss": 4.5479,
+      "step": 146944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.758571519356798e-05,
+      "loss": 4.5424,
+      "step": 147456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.757732924605746e-05,
+      "loss": 4.5595,
+      "step": 147968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.756894329854694e-05,
+      "loss": 4.5306,
+      "step": 148480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.756055735103642e-05,
+      "loss": 4.5488,
+      "step": 148992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.75521714035259e-05,
+      "loss": 4.5361,
+      "step": 149504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.7543785456015375e-05,
+      "loss": 4.5327,
+      "step": 150016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.753541588730859e-05,
+      "loss": 4.5343,
+      "step": 150528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.752702993979807e-05,
+      "loss": 4.5265,
+      "step": 151040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.751864399228755e-05,
+      "loss": 4.5283,
+      "step": 151552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.751025804477703e-05,
+      "loss": 4.534,
+      "step": 152064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.750187209726651e-05,
+      "loss": 4.5274,
+      "step": 152576
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.494524002075195,
+      "eval_runtime": 288.449,
+      "eval_samples_per_second": 1322.906,
+      "eval_steps_per_second": 41.342,
+      "step": 152640
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.749348614975599e-05,
+      "loss": 4.5262,
+      "step": 153088
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.748510020224547e-05,
+      "loss": 4.5186,
+      "step": 153600
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.747671425473495e-05,
+      "loss": 4.5382,
+      "step": 154112
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.746832830722443e-05,
+      "loss": 4.5264,
+      "step": 154624
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.745994235971391e-05,
+      "loss": 4.5361,
+      "step": 155136
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.745155641220339e-05,
+      "loss": 4.5141,
+      "step": 155648
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.744317046469287e-05,
+      "loss": 4.5224,
+      "step": 156160
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.743478451718235e-05,
+      "loss": 4.5078,
+      "step": 156672
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.742639856967183e-05,
+      "loss": 4.5174,
+      "step": 157184
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.741801262216131e-05,
+      "loss": 4.5193,
+      "step": 157696
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.740962667465079e-05,
+      "loss": 4.5152,
+      "step": 158208
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7401240727140264e-05,
+      "loss": 4.5262,
+      "step": 158720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.739287115843348e-05,
+      "loss": 4.5013,
+      "step": 159232
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7384485210922966e-05,
+      "loss": 4.5182,
+      "step": 159744
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7376099263412446e-05,
+      "loss": 4.5033,
+      "step": 160256
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.736771331590192e-05,
+      "loss": 4.5029,
+      "step": 160768
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.73593273683914e-05,
+      "loss": 4.5036,
+      "step": 161280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.735094142088088e-05,
+      "loss": 4.5115,
+      "step": 161792
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.734255547337036e-05,
+      "loss": 4.5026,
+      "step": 162304
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.733416952585984e-05,
+      "loss": 4.5215,
+      "step": 162816
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.732578357834932e-05,
+      "loss": 4.5029,
+      "step": 163328
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.73173976308388e-05,
+      "loss": 4.5042,
+      "step": 163840
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.730901168332828e-05,
+      "loss": 4.503,
+      "step": 164352
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.730062573581776e-05,
+      "loss": 4.5126,
+      "step": 164864
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.729225616711097e-05,
+      "loss": 4.4995,
+      "step": 165376
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.728387021960045e-05,
+      "loss": 4.503,
+      "step": 165888
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.727548427208993e-05,
+      "loss": 4.4894,
+      "step": 166400
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.7267098324579415e-05,
+      "loss": 4.4941,
+      "step": 166912
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.725874513467635e-05,
+      "loss": 4.4833,
+      "step": 167424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.725035918716583e-05,
+      "loss": 4.4986,
+      "step": 167936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.724197323965531e-05,
+      "loss": 4.4974,
+      "step": 168448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.723358729214479e-05,
+      "loss": 4.4935,
+      "step": 168960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.722520134463427e-05,
+      "loss": 4.5015,
+      "step": 169472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.721681539712375e-05,
+      "loss": 4.4897,
+      "step": 169984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.720844582841696e-05,
+      "loss": 4.4901,
+      "step": 170496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.720005988090644e-05,
+      "loss": 4.4915,
+      "step": 171008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.719169031219965e-05,
+      "loss": 4.4862,
+      "step": 171520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.718330436468914e-05,
+      "loss": 4.4764,
+      "step": 172032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.717491841717862e-05,
+      "loss": 4.4676,
+      "step": 172544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.71665324696681e-05,
+      "loss": 4.4776,
+      "step": 173056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.715814652215758e-05,
+      "loss": 4.4798,
+      "step": 173568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.714976057464706e-05,
+      "loss": 4.4838,
+      "step": 174080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.714137462713654e-05,
+      "loss": 4.4753,
+      "step": 174592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.713298867962602e-05,
+      "loss": 4.4687,
+      "step": 175104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.71246027321155e-05,
+      "loss": 4.4825,
+      "step": 175616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.711623316340871e-05,
+      "loss": 4.473,
+      "step": 176128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.710784721589819e-05,
+      "loss": 4.4749,
+      "step": 176640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7099461268387667e-05,
+      "loss": 4.4669,
+      "step": 177152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7091075320877147e-05,
+      "loss": 4.4554,
+      "step": 177664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7082689373366627e-05,
+      "loss": 4.4871,
+      "step": 178176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7074319804659836e-05,
+      "loss": 4.4797,
+      "step": 178688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.706593385714932e-05,
+      "loss": 4.4676,
+      "step": 179200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.70575479096388e-05,
+      "loss": 4.4653,
+      "step": 179712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.704916196212828e-05,
+      "loss": 4.4741,
+      "step": 180224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7040776014617755e-05,
+      "loss": 4.4556,
+      "step": 180736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.703240644591097e-05,
+      "loss": 4.4764,
+      "step": 181248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.702402049840045e-05,
+      "loss": 4.4508,
+      "step": 181760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7015634550889924e-05,
+      "loss": 4.469,
+      "step": 182272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.7007248603379404e-05,
+      "loss": 4.4738,
+      "step": 182784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.699887903467262e-05,
+      "loss": 4.4546,
+      "step": 183296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.69904930871621e-05,
+      "loss": 4.4467,
+      "step": 183808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6982107139651573e-05,
+      "loss": 4.465,
+      "step": 184320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.697372119214106e-05,
+      "loss": 4.4352,
+      "step": 184832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.696533524463054e-05,
+      "loss": 4.4534,
+      "step": 185344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.695694929712002e-05,
+      "loss": 4.4594,
+      "step": 185856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.694857972841323e-05,
+      "loss": 4.4581,
+      "step": 186368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.694019378090271e-05,
+      "loss": 4.4516,
+      "step": 186880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.693180783339219e-05,
+      "loss": 4.4364,
+      "step": 187392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.692342188588167e-05,
+      "loss": 4.4406,
+      "step": 187904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.691503593837115e-05,
+      "loss": 4.4428,
+      "step": 188416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.690666636966436e-05,
+      "loss": 4.4552,
+      "step": 188928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.689828042215384e-05,
+      "loss": 4.4428,
+      "step": 189440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.688989447464332e-05,
+      "loss": 4.4518,
+      "step": 189952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.68815085271328e-05,
+      "loss": 4.458,
+      "step": 190464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.687312257962228e-05,
+      "loss": 4.455,
+      "step": 190976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.686473663211176e-05,
+      "loss": 4.4371,
+      "step": 191488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6856367063404974e-05,
+      "loss": 4.4433,
+      "step": 192000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6847981115894454e-05,
+      "loss": 4.4492,
+      "step": 192512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.6839595168383934e-05,
+      "loss": 4.4361,
+      "step": 193024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.683122559967714e-05,
+      "loss": 4.4411,
+      "step": 193536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.682283965216662e-05,
+      "loss": 4.4373,
+      "step": 194048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.68144537046561e-05,
+      "loss": 4.4382,
+      "step": 194560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.680606775714558e-05,
+      "loss": 4.4438,
+      "step": 195072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.679769818843879e-05,
+      "loss": 4.4227,
+      "step": 195584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.678931224092827e-05,
+      "loss": 4.4412,
+      "step": 196096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.678092629341775e-05,
+      "loss": 4.4312,
+      "step": 196608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.677254034590723e-05,
+      "loss": 4.4441,
+      "step": 197120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.676415439839671e-05,
+      "loss": 4.4348,
+      "step": 197632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.675576845088619e-05,
+      "loss": 4.4311,
+      "step": 198144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.674738250337568e-05,
+      "loss": 4.4247,
+      "step": 198656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.673899655586516e-05,
+      "loss": 4.4232,
+      "step": 199168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.673061060835464e-05,
+      "loss": 4.4229,
+      "step": 199680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.672222466084412e-05,
+      "loss": 4.4311,
+      "step": 200192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.671383871333359e-05,
+      "loss": 4.4378,
+      "step": 200704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.670545276582307e-05,
+      "loss": 4.4318,
+      "step": 201216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.669708319711629e-05,
+      "loss": 4.4237,
+      "step": 201728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6688713628409496e-05,
+      "loss": 4.4198,
+      "step": 202240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6680327680898976e-05,
+      "loss": 4.4215,
+      "step": 202752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6671941733388456e-05,
+      "loss": 4.4234,
+      "step": 203264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6663555785877936e-05,
+      "loss": 4.4318,
+      "step": 203776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6655169838367416e-05,
+      "loss": 4.4312,
+      "step": 204288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6646783890856896e-05,
+      "loss": 4.4199,
+      "step": 204800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.663841432215011e-05,
+      "loss": 4.4116,
+      "step": 205312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.663002837463959e-05,
+      "loss": 4.4286,
+      "step": 205824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6621642427129065e-05,
+      "loss": 4.4094,
+      "step": 206336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6613256479618545e-05,
+      "loss": 4.4189,
+      "step": 206848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6604870532108025e-05,
+      "loss": 4.411,
+      "step": 207360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6596500963401234e-05,
+      "loss": 4.4159,
+      "step": 207872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6588115015890714e-05,
+      "loss": 4.4065,
+      "step": 208384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6579729068380194e-05,
+      "loss": 4.417,
+      "step": 208896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6571343120869674e-05,
+      "loss": 4.4063,
+      "step": 209408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6562957173359154e-05,
+      "loss": 4.4024,
+      "step": 209920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6554571225848634e-05,
+      "loss": 4.4189,
+      "step": 210432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6546185278338114e-05,
+      "loss": 4.3986,
+      "step": 210944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.65377993308276e-05,
+      "loss": 4.4243,
+      "step": 211456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.652941338331708e-05,
+      "loss": 4.4206,
+      "step": 211968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.652104381461029e-05,
+      "loss": 4.4142,
+      "step": 212480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.651265786709977e-05,
+      "loss": 4.4049,
+      "step": 212992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.650427191958925e-05,
+      "loss": 4.403,
+      "step": 213504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.649588597207873e-05,
+      "loss": 4.4038,
+      "step": 214016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.648751640337194e-05,
+      "loss": 4.4043,
+      "step": 214528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.647914683466515e-05,
+      "loss": 4.4138,
+      "step": 215040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.647076088715463e-05,
+      "loss": 4.3966,
+      "step": 215552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.646237493964411e-05,
+      "loss": 4.4073,
+      "step": 216064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.645398899213359e-05,
+      "loss": 4.4052,
+      "step": 216576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.644560304462307e-05,
+      "loss": 4.4055,
+      "step": 217088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6437217097112554e-05,
+      "loss": 4.3951,
+      "step": 217600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6428831149602034e-05,
+      "loss": 4.4013,
+      "step": 218112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6420445202091514e-05,
+      "loss": 4.401,
+      "step": 218624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6412059254580994e-05,
+      "loss": 4.403,
+      "step": 219136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6403673307070474e-05,
+      "loss": 4.4031,
+      "step": 219648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.639528735955995e-05,
+      "loss": 4.4114,
+      "step": 220160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.638690141204943e-05,
+      "loss": 4.3866,
+      "step": 220672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.637853184334264e-05,
+      "loss": 4.3996,
+      "step": 221184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.637014589583212e-05,
+      "loss": 4.4052,
+      "step": 221696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6361759948321596e-05,
+      "loss": 4.3959,
+      "step": 222208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6353374000811076e-05,
+      "loss": 4.3934,
+      "step": 222720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.634500443210429e-05,
+      "loss": 4.4041,
+      "step": 223232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.633663486339751e-05,
+      "loss": 4.3935,
+      "step": 223744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.632824891588699e-05,
+      "loss": 4.4104,
+      "step": 224256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.63198793471802e-05,
+      "loss": 4.3835,
+      "step": 224768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.631149339966968e-05,
+      "loss": 4.397,
+      "step": 225280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.630310745215916e-05,
+      "loss": 4.3932,
+      "step": 225792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.629472150464864e-05,
+      "loss": 4.3882,
+      "step": 226304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.628633555713812e-05,
+      "loss": 4.3865,
+      "step": 226816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.6277949609627597e-05,
+      "loss": 4.3833,
+      "step": 227328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.626956366211707e-05,
+      "loss": 4.3806,
+      "step": 227840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.626117771460655e-05,
+      "loss": 4.3906,
+      "step": 228352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.625279176709603e-05,
+      "loss": 4.3857,
+      "step": 228864
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.3582868576049805,
+      "eval_runtime": 293.5432,
+      "eval_samples_per_second": 1299.948,
+      "eval_steps_per_second": 40.624,
+      "step": 228960
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6244422198389246e-05,
+      "loss": 4.3887,
+      "step": 229376
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6236036250878726e-05,
+      "loss": 4.3754,
+      "step": 229888
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6227650303368205e-05,
+      "loss": 4.3947,
+      "step": 230400
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6219264355857685e-05,
+      "loss": 4.3826,
+      "step": 230912
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6210878408347165e-05,
+      "loss": 4.3988,
+      "step": 231424
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6202492460836645e-05,
+      "loss": 4.3769,
+      "step": 231936
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6194106513326125e-05,
+      "loss": 4.3828,
+      "step": 232448
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6185720565815605e-05,
+      "loss": 4.3684,
+      "step": 232960
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6177350997108814e-05,
+      "loss": 4.3816,
+      "step": 233472
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6168965049598294e-05,
+      "loss": 4.382,
+      "step": 233984
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6160579102087774e-05,
+      "loss": 4.3795,
+      "step": 234496
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6152193154577254e-05,
+      "loss": 4.3837,
+      "step": 235008
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.614382358587046e-05,
+      "loss": 4.3677,
+      "step": 235520
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.613545401716368e-05,
+      "loss": 4.3831,
+      "step": 236032
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.612706806965316e-05,
+      "loss": 4.3696,
+      "step": 236544
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.611868212214264e-05,
+      "loss": 4.3639,
+      "step": 237056
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.611029617463212e-05,
+      "loss": 4.3692,
+      "step": 237568
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.61019102271216e-05,
+      "loss": 4.3744,
+      "step": 238080
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.609354065841481e-05,
+      "loss": 4.3704,
+      "step": 238592
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.608515471090429e-05,
+      "loss": 4.3866,
+      "step": 239104
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.607676876339377e-05,
+      "loss": 4.3714,
+      "step": 239616
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.606838281588325e-05,
+      "loss": 4.3769,
+      "step": 240128
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.605999686837273e-05,
+      "loss": 4.3679,
+      "step": 240640
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.605161092086221e-05,
+      "loss": 4.3785,
+      "step": 241152
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.604322497335169e-05,
+      "loss": 4.368,
+      "step": 241664
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.603483902584117e-05,
+      "loss": 4.3728,
+      "step": 242176
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.602645307833065e-05,
+      "loss": 4.3629,
+      "step": 242688
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.601806713082013e-05,
+      "loss": 4.3584,
+      "step": 243200
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.6009697562113344e-05,
+      "loss": 4.3592,
+      "step": 243712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.6001311614602824e-05,
+      "loss": 4.3679,
+      "step": 244224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5992925667092303e-05,
+      "loss": 4.37,
+      "step": 244736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5984539719581783e-05,
+      "loss": 4.3664,
+      "step": 245248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5976153772071257e-05,
+      "loss": 4.3754,
+      "step": 245760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.59678005821682e-05,
+      "loss": 4.3645,
+      "step": 246272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.595941463465768e-05,
+      "loss": 4.36,
+      "step": 246784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.595102868714716e-05,
+      "loss": 4.3633,
+      "step": 247296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.594264273963664e-05,
+      "loss": 4.3668,
+      "step": 247808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.593425679212612e-05,
+      "loss": 4.3479,
+      "step": 248320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.59258708446156e-05,
+      "loss": 4.3428,
+      "step": 248832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.591748489710508e-05,
+      "loss": 4.3506,
+      "step": 249344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.590909894959456e-05,
+      "loss": 4.3588,
+      "step": 249856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.590071300208404e-05,
+      "loss": 4.3584,
+      "step": 250368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.589232705457352e-05,
+      "loss": 4.3526,
+      "step": 250880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5883941107063e-05,
+      "loss": 4.3427,
+      "step": 251392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.587557153835621e-05,
+      "loss": 4.3586,
+      "step": 251904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.586718559084569e-05,
+      "loss": 4.3524,
+      "step": 252416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.585879964333517e-05,
+      "loss": 4.3523,
+      "step": 252928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.585041369582465e-05,
+      "loss": 4.3456,
+      "step": 253440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.584202774831413e-05,
+      "loss": 4.3306,
+      "step": 253952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.583364180080361e-05,
+      "loss": 4.3749,
+      "step": 254464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.582527223209682e-05,
+      "loss": 4.3568,
+      "step": 254976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.58168862845863e-05,
+      "loss": 4.3493,
+      "step": 255488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5808500337075786e-05,
+      "loss": 4.3493,
+      "step": 256000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5800114389565266e-05,
+      "loss": 4.3509,
+      "step": 256512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5791728442054746e-05,
+      "loss": 4.3403,
+      "step": 257024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5783358873347955e-05,
+      "loss": 4.3533,
+      "step": 257536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5774972925837435e-05,
+      "loss": 4.3367,
+      "step": 258048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5766586978326915e-05,
+      "loss": 4.3481,
+      "step": 258560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5758201030816395e-05,
+      "loss": 4.3593,
+      "step": 259072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5749815083305875e-05,
+      "loss": 4.338,
+      "step": 259584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5741429135795355e-05,
+      "loss": 4.3261,
+      "step": 260096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5733043188284835e-05,
+      "loss": 4.3532,
+      "step": 260608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5724657240774315e-05,
+      "loss": 4.3204,
+      "step": 261120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5716287672067524e-05,
+      "loss": 4.3331,
+      "step": 261632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.570791810336074e-05,
+      "loss": 4.346,
+      "step": 262144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.569953215585022e-05,
+      "loss": 4.3441,
+      "step": 262656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.56911462083397e-05,
+      "loss": 4.3354,
+      "step": 263168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.568276026082918e-05,
+      "loss": 4.3288,
+      "step": 263680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.567437431331866e-05,
+      "loss": 4.3209,
+      "step": 264192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.566598836580814e-05,
+      "loss": 4.3281,
+      "step": 264704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.565760241829762e-05,
+      "loss": 4.3445,
+      "step": 265216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.564921647078709e-05,
+      "loss": 4.3284,
+      "step": 265728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.564084690208031e-05,
+      "loss": 4.3444,
+      "step": 266240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.563246095456979e-05,
+      "loss": 4.3405,
+      "step": 266752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.562407500705927e-05,
+      "loss": 4.3471,
+      "step": 267264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.561568905954874e-05,
+      "loss": 4.3282,
+      "step": 267776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.560731949084196e-05,
+      "loss": 4.3286,
+      "step": 268288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.559893354333144e-05,
+      "loss": 4.3388,
+      "step": 268800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.559054759582092e-05,
+      "loss": 4.324,
+      "step": 269312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.558217802711413e-05,
+      "loss": 4.3378,
+      "step": 269824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.557379207960361e-05,
+      "loss": 4.3254,
+      "step": 270336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.556540613209309e-05,
+      "loss": 4.3274,
+      "step": 270848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5557020184582566e-05,
+      "loss": 4.3353,
+      "step": 271360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5548634237072046e-05,
+      "loss": 4.3179,
+      "step": 271872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5540248289561526e-05,
+      "loss": 4.3285,
+      "step": 272384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5531862342051006e-05,
+      "loss": 4.3276,
+      "step": 272896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5523476394540486e-05,
+      "loss": 4.3344,
+      "step": 273408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5515090447029966e-05,
+      "loss": 4.3233,
+      "step": 273920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.5506720878323175e-05,
+      "loss": 4.327,
+      "step": 274432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5498334930812655e-05,
+      "loss": 4.3156,
+      "step": 274944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.548994898330214e-05,
+      "loss": 4.3153,
+      "step": 275456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.548156303579162e-05,
+      "loss": 4.3196,
+      "step": 275968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.54731770882811e-05,
+      "loss": 4.324,
+      "step": 276480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.546479114077058e-05,
+      "loss": 4.3319,
+      "step": 276992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.545642157206379e-05,
+      "loss": 4.3303,
+      "step": 277504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.544803562455327e-05,
+      "loss": 4.3201,
+      "step": 278016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.543964967704275e-05,
+      "loss": 4.3138,
+      "step": 278528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.543126372953223e-05,
+      "loss": 4.3171,
+      "step": 279040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.542289416082544e-05,
+      "loss": 4.3218,
+      "step": 279552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.541450821331492e-05,
+      "loss": 4.3253,
+      "step": 280064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.54061222658044e-05,
+      "loss": 4.3271,
+      "step": 280576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.539773631829388e-05,
+      "loss": 4.317,
+      "step": 281088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.538935037078336e-05,
+      "loss": 4.3099,
+      "step": 281600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.538096442327284e-05,
+      "loss": 4.328,
+      "step": 282112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5372578475762326e-05,
+      "loss": 4.3029,
+      "step": 282624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5364192528251806e-05,
+      "loss": 4.3195,
+      "step": 283136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5355822959545015e-05,
+      "loss": 4.3065,
+      "step": 283648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5347437012034495e-05,
+      "loss": 4.3141,
+      "step": 284160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5339051064523975e-05,
+      "loss": 4.3064,
+      "step": 284672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5330665117013455e-05,
+      "loss": 4.314,
+      "step": 285184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.532227916950293e-05,
+      "loss": 4.3067,
+      "step": 285696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5313909600796144e-05,
+      "loss": 4.302,
+      "step": 286208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5305523653285624e-05,
+      "loss": 4.3139,
+      "step": 286720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5297137705775104e-05,
+      "loss": 4.3027,
+      "step": 287232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.528875175826458e-05,
+      "loss": 4.3223,
+      "step": 287744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5280365810754064e-05,
+      "loss": 4.3231,
+      "step": 288256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.527199624204728e-05,
+      "loss": 4.3175,
+      "step": 288768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.526361029453675e-05,
+      "loss": 4.3067,
+      "step": 289280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.525522434702623e-05,
+      "loss": 4.3067,
+      "step": 289792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.524683839951571e-05,
+      "loss": 4.3046,
+      "step": 290304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.523845245200519e-05,
+      "loss": 4.3072,
+      "step": 290816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.523006650449467e-05,
+      "loss": 4.3175,
+      "step": 291328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.522168055698415e-05,
+      "loss": 4.2948,
+      "step": 291840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.521329460947363e-05,
+      "loss": 4.3092,
+      "step": 292352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.520492504076684e-05,
+      "loss": 4.3055,
+      "step": 292864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.519655547206005e-05,
+      "loss": 4.312,
+      "step": 293376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.518816952454953e-05,
+      "loss": 4.2991,
+      "step": 293888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.517978357703902e-05,
+      "loss": 4.3067,
+      "step": 294400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.517141400833223e-05,
+      "loss": 4.2991,
+      "step": 294912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.516302806082171e-05,
+      "loss": 4.3151,
+      "step": 295424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5154642113311187e-05,
+      "loss": 4.3065,
+      "step": 295936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5146256165800667e-05,
+      "loss": 4.3162,
+      "step": 296448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5137870218290147e-05,
+      "loss": 4.2916,
+      "step": 296960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5129484270779626e-05,
+      "loss": 4.3078,
+      "step": 297472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5121098323269106e-05,
+      "loss": 4.3099,
+      "step": 297984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5112712375758586e-05,
+      "loss": 4.296,
+      "step": 298496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5104326428248066e-05,
+      "loss": 4.3028,
+      "step": 299008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5095940480737546e-05,
+      "loss": 4.3106,
+      "step": 299520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5087570912030755e-05,
+      "loss": 4.3006,
+      "step": 300032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5079184964520235e-05,
+      "loss": 4.3186,
+      "step": 300544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5070799017009715e-05,
+      "loss": 4.2898,
+      "step": 301056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.5062413069499195e-05,
+      "loss": 4.3069,
+      "step": 301568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.505404350079241e-05,
+      "loss": 4.2947,
+      "step": 302080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.504565755328189e-05,
+      "loss": 4.2977,
+      "step": 302592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.503727160577137e-05,
+      "loss": 4.2981,
+      "step": 303104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.502888565826085e-05,
+      "loss": 4.2905,
+      "step": 303616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.502049971075033e-05,
+      "loss": 4.288,
+      "step": 304128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.501211376323981e-05,
+      "loss": 4.2969,
+      "step": 304640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.500372781572929e-05,
+      "loss": 4.2964,
+      "step": 305152
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.2750773429870605,
+      "eval_runtime": 293.1439,
+      "eval_samples_per_second": 1301.719,
+      "eval_steps_per_second": 40.68,
+      "step": 305280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4995341868218764e-05,
+      "loss": 4.2958,
+      "step": 305664
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4986955920708244e-05,
+      "loss": 4.2852,
+      "step": 306176
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4978569973197724e-05,
+      "loss": 4.3043,
+      "step": 306688
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4970184025687204e-05,
+      "loss": 4.293,
+      "step": 307200
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4961798078176684e-05,
+      "loss": 4.3037,
+      "step": 307712
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.4953412130666164e-05,
+      "loss": 4.2926,
+      "step": 308224
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.494502618315565e-05,
+      "loss": 4.2921,
+      "step": 308736
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.493664023564513e-05,
+      "loss": 4.2802,
+      "step": 309248
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.492825428813461e-05,
+      "loss": 4.2896,
+      "step": 309760
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.491986834062409e-05,
+      "loss": 4.2923,
+      "step": 310272
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.491148239311357e-05,
+      "loss": 4.2916,
+      "step": 310784
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.490309644560305e-05,
+      "loss": 4.2929,
+      "step": 311296
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.489472687689626e-05,
+      "loss": 4.2806,
+      "step": 311808
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.488634092938574e-05,
+      "loss": 4.2963,
+      "step": 312320
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.487795498187522e-05,
+      "loss": 4.2803,
+      "step": 312832
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.486958541316843e-05,
+      "loss": 4.2775,
+      "step": 313344
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.486119946565791e-05,
+      "loss": 4.281,
+      "step": 313856
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.485281351814739e-05,
+      "loss": 4.2898,
+      "step": 314368
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.484442757063687e-05,
+      "loss": 4.2827,
+      "step": 314880
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.483604162312635e-05,
+      "loss": 4.2987,
+      "step": 315392
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.482765567561583e-05,
+      "loss": 4.2862,
+      "step": 315904
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.481926972810531e-05,
+      "loss": 4.2932,
+      "step": 316416
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.481088378059479e-05,
+      "loss": 4.2809,
+      "step": 316928
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.480249783308427e-05,
+      "loss": 4.2902,
+      "step": 317440
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.479411188557375e-05,
+      "loss": 4.2834,
+      "step": 317952
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.478572593806323e-05,
+      "loss": 4.288,
+      "step": 318464
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.477733999055271e-05,
+      "loss": 4.2782,
+      "step": 318976
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.476897042184592e-05,
+      "loss": 4.2767,
+      "step": 319488
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.47605844743354e-05,
+      "loss": 4.2726,
+      "step": 320000
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.475219852682488e-05,
+      "loss": 4.2809,
+      "step": 320512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.474381257931436e-05,
+      "loss": 4.2861,
+      "step": 321024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.473542663180384e-05,
+      "loss": 4.2828,
+      "step": 321536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.472704068429332e-05,
+      "loss": 4.2886,
+      "step": 322048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.471867111558653e-05,
+      "loss": 4.2777,
+      "step": 322560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.471028516807601e-05,
+      "loss": 4.2777,
+      "step": 323072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.470189922056549e-05,
+      "loss": 4.2856,
+      "step": 323584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.469351327305497e-05,
+      "loss": 4.2844,
+      "step": 324096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.468512732554445e-05,
+      "loss": 4.2607,
+      "step": 324608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.467674137803393e-05,
+      "loss": 4.2671,
+      "step": 325120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.466837180932714e-05,
+      "loss": 4.265,
+      "step": 325632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.465998586181662e-05,
+      "loss": 4.2714,
+      "step": 326144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.46515999143061e-05,
+      "loss": 4.2813,
+      "step": 326656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.464321396679558e-05,
+      "loss": 4.2696,
+      "step": 327168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.463484439808879e-05,
+      "loss": 4.2651,
+      "step": 327680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.462645845057827e-05,
+      "loss": 4.2755,
+      "step": 328192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.461807250306775e-05,
+      "loss": 4.2689,
+      "step": 328704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.460968655555724e-05,
+      "loss": 4.2737,
+      "step": 329216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.460130060804672e-05,
+      "loss": 4.268,
+      "step": 329728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4592931039339926e-05,
+      "loss": 4.2446,
+      "step": 330240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4584545091829406e-05,
+      "loss": 4.2936,
+      "step": 330752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4576159144318886e-05,
+      "loss": 4.2772,
+      "step": 331264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4567773196808366e-05,
+      "loss": 4.2751,
+      "step": 331776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.455938724929784e-05,
+      "loss": 4.2675,
+      "step": 332288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.455100130178732e-05,
+      "loss": 4.2674,
+      "step": 332800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4542631733080535e-05,
+      "loss": 4.2608,
+      "step": 333312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4534262164373744e-05,
+      "loss": 4.2699,
+      "step": 333824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4525876216863224e-05,
+      "loss": 4.2617,
+      "step": 334336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4517490269352704e-05,
+      "loss": 4.2662,
+      "step": 334848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.450910432184219e-05,
+      "loss": 4.2816,
+      "step": 335360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4500718374331664e-05,
+      "loss": 4.2594,
+      "step": 335872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4492332426821144e-05,
+      "loss": 4.2508,
+      "step": 336384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4483946479310624e-05,
+      "loss": 4.2747,
+      "step": 336896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4475560531800104e-05,
+      "loss": 4.2426,
+      "step": 337408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.446719096309331e-05,
+      "loss": 4.2581,
+      "step": 337920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.445880501558279e-05,
+      "loss": 4.2659,
+      "step": 338432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.445041906807227e-05,
+      "loss": 4.2655,
+      "step": 338944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.444203312056175e-05,
+      "loss": 4.2577,
+      "step": 339456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.443366355185496e-05,
+      "loss": 4.2551,
+      "step": 339968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.442527760434444e-05,
+      "loss": 4.2392,
+      "step": 340480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.441689165683393e-05,
+      "loss": 4.2539,
+      "step": 340992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.440850570932341e-05,
+      "loss": 4.2646,
+      "step": 341504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.440011976181289e-05,
+      "loss": 4.255,
+      "step": 342016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.439173381430237e-05,
+      "loss": 4.2615,
+      "step": 342528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.438334786679185e-05,
+      "loss": 4.2676,
+      "step": 343040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.437497829808506e-05,
+      "loss": 4.2746,
+      "step": 343552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.436660872937827e-05,
+      "loss": 4.2516,
+      "step": 344064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4358222781867747e-05,
+      "loss": 4.2493,
+      "step": 344576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4349836834357227e-05,
+      "loss": 4.2652,
+      "step": 345088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4341450886846707e-05,
+      "loss": 4.2448,
+      "step": 345600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4333064939336186e-05,
+      "loss": 4.2649,
+      "step": 346112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4324678991825666e-05,
+      "loss": 4.25,
+      "step": 346624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4316293044315146e-05,
+      "loss": 4.2513,
+      "step": 347136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.4307907096804626e-05,
+      "loss": 4.2634,
+      "step": 347648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.429952114929411e-05,
+      "loss": 4.2422,
+      "step": 348160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.429113520178359e-05,
+      "loss": 4.2523,
+      "step": 348672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.428274925427307e-05,
+      "loss": 4.2503,
+      "step": 349184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.427436330676255e-05,
+      "loss": 4.2621,
+      "step": 349696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.426599373805576e-05,
+      "loss": 4.2458,
+      "step": 350208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.425760779054524e-05,
+      "loss": 4.2544,
+      "step": 350720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.424922184303472e-05,
+      "loss": 4.2432,
+      "step": 351232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.42408358955242e-05,
+      "loss": 4.2444,
+      "step": 351744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.423248270562114e-05,
+      "loss": 4.2422,
+      "step": 352256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.422409675811062e-05,
+      "loss": 4.2521,
+      "step": 352768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.42157108106001e-05,
+      "loss": 4.2584,
+      "step": 353280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.420732486308958e-05,
+      "loss": 4.2536,
+      "step": 353792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.419893891557907e-05,
+      "loss": 4.2526,
+      "step": 354304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.419055296806855e-05,
+      "loss": 4.2402,
+      "step": 354816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4182183399361756e-05,
+      "loss": 4.2437,
+      "step": 355328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4173797451851236e-05,
+      "loss": 4.2472,
+      "step": 355840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4165411504340716e-05,
+      "loss": 4.2536,
+      "step": 356352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4157025556830196e-05,
+      "loss": 4.254,
+      "step": 356864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4148672366927134e-05,
+      "loss": 4.2441,
+      "step": 357376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4140286419416614e-05,
+      "loss": 4.2397,
+      "step": 357888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4131900471906094e-05,
+      "loss": 4.2568,
+      "step": 358400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4123514524395574e-05,
+      "loss": 4.2326,
+      "step": 358912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4115128576885054e-05,
+      "loss": 4.247,
+      "step": 359424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4106742629374534e-05,
+      "loss": 4.238,
+      "step": 359936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.409835668186402e-05,
+      "loss": 4.2452,
+      "step": 360448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.40899707343535e-05,
+      "loss": 4.2346,
+      "step": 360960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4081584786842974e-05,
+      "loss": 4.2447,
+      "step": 361472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4073198839332453e-05,
+      "loss": 4.2333,
+      "step": 361984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4064812891821933e-05,
+      "loss": 4.2345,
+      "step": 362496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.4056426944311413e-05,
+      "loss": 4.243,
+      "step": 363008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.404804099680089e-05,
+      "loss": 4.2353,
+      "step": 363520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.403965504929037e-05,
+      "loss": 4.2501,
+      "step": 364032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.403126910177985e-05,
+      "loss": 4.254,
+      "step": 364544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.402289953307306e-05,
+      "loss": 4.2439,
+      "step": 365056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.401451358556254e-05,
+      "loss": 4.2417,
+      "step": 365568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.400612763805202e-05,
+      "loss": 4.2321,
+      "step": 366080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.39977416905415e-05,
+      "loss": 4.2376,
+      "step": 366592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.398935574303098e-05,
+      "loss": 4.2368,
+      "step": 367104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.39809861743242e-05,
+      "loss": 4.2531,
+      "step": 367616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.397260022681368e-05,
+      "loss": 4.2227,
+      "step": 368128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.396421427930316e-05,
+      "loss": 4.2415,
+      "step": 368640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.395582833179264e-05,
+      "loss": 4.2369,
+      "step": 369152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.394745876308585e-05,
+      "loss": 4.2417,
+      "step": 369664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.393907281557533e-05,
+      "loss": 4.2327,
+      "step": 370176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3930703246868536e-05,
+      "loss": 4.2389,
+      "step": 370688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3922317299358016e-05,
+      "loss": 4.2284,
+      "step": 371200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3913931351847496e-05,
+      "loss": 4.2459,
+      "step": 371712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3905545404336976e-05,
+      "loss": 4.2412,
+      "step": 372224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3897159456826456e-05,
+      "loss": 4.2438,
+      "step": 372736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3888773509315936e-05,
+      "loss": 4.2273,
+      "step": 373248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.388038756180542e-05,
+      "loss": 4.2393,
+      "step": 373760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.38720016142949e-05,
+      "loss": 4.2423,
+      "step": 374272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.386361566678438e-05,
+      "loss": 4.2302,
+      "step": 374784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.385522971927386e-05,
+      "loss": 4.2351,
+      "step": 375296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3846843771763336e-05,
+      "loss": 4.2437,
+      "step": 375808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3838457824252816e-05,
+      "loss": 4.231,
+      "step": 376320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3830071876742296e-05,
+      "loss": 4.2556,
+      "step": 376832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3821685929231775e-05,
+      "loss": 4.2251,
+      "step": 377344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3813299981721255e-05,
+      "loss": 4.2382,
+      "step": 377856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3804914034210735e-05,
+      "loss": 4.2279,
+      "step": 378368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3796544465503944e-05,
+      "loss": 4.2325,
+      "step": 378880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.378817489679716e-05,
+      "loss": 4.2333,
+      "step": 379392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.377978894928664e-05,
+      "loss": 4.2245,
+      "step": 379904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3771419380579856e-05,
+      "loss": 4.2265,
+      "step": 380416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.3763033433069336e-05,
+      "loss": 4.2206,
+      "step": 380928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.375464748555881e-05,
+      "loss": 4.2348,
+      "step": 381440
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.217353343963623,
+      "eval_runtime": 294.6691,
+      "eval_samples_per_second": 1294.981,
+      "eval_steps_per_second": 40.469,
+      "step": 381600
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.374626153804829e-05,
+      "loss": 4.2351,
+      "step": 381952
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.373787559053777e-05,
+      "loss": 4.2208,
+      "step": 382464
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.372948964302725e-05,
+      "loss": 4.2363,
+      "step": 382976
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.372110369551673e-05,
+      "loss": 4.2326,
+      "step": 383488
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.371271774800621e-05,
+      "loss": 4.2341,
+      "step": 384000
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.370433180049569e-05,
+      "loss": 4.226,
+      "step": 384512
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.369594585298517e-05,
+      "loss": 4.227,
+      "step": 385024
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.368755990547465e-05,
+      "loss": 4.2156,
+      "step": 385536
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.367917395796413e-05,
+      "loss": 4.2251,
+      "step": 386048
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.3670804389257345e-05,
+      "loss": 4.228,
+      "step": 386560
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.3662418441746825e-05,
+      "loss": 4.2298,
+      "step": 387072
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.3654032494236305e-05,
+      "loss": 4.2242,
+      "step": 387584
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.3645662925529514e-05,
+      "loss": 4.2222,
+      "step": 388096
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.3637276978018994e-05,
+      "loss": 4.2263,
+      "step": 388608
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.3628891030508474e-05,
+      "loss": 4.2174,
+      "step": 389120
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.3620505082997954e-05,
+      "loss": 4.2101,
+      "step": 389632
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.3612119135487434e-05,
+      "loss": 4.2188,
+      "step": 390144
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.360374956678064e-05,
+      "loss": 4.2245,
+      "step": 390656
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.359536361927012e-05,
+      "loss": 4.2206,
+      "step": 391168
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.35869776717596e-05,
+      "loss": 4.2341,
+      "step": 391680
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.357859172424908e-05,
+      "loss": 4.2259,
+      "step": 392192
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.357020577673856e-05,
+      "loss": 4.2302,
+      "step": 392704
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.356181982922804e-05,
+      "loss": 4.218,
+      "step": 393216
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.355343388171752e-05,
+      "loss": 4.2259,
+      "step": 393728
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.3545047934207e-05,
+      "loss": 4.2199,
+      "step": 394240
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.353667836550022e-05,
+      "loss": 4.227,
+      "step": 394752
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.352830879679343e-05,
+      "loss": 4.2175,
+      "step": 395264
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.351992284928291e-05,
+      "loss": 4.2122,
+      "step": 395776
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.351153690177239e-05,
+      "loss": 4.211,
+      "step": 396288
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.350315095426187e-05,
+      "loss": 4.222,
+      "step": 396800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.349476500675134e-05,
+      "loss": 4.2209,
+      "step": 397312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3486395438044556e-05,
+      "loss": 4.2222,
+      "step": 397824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3478009490534036e-05,
+      "loss": 4.225,
+      "step": 398336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3469623543023516e-05,
+      "loss": 4.2197,
+      "step": 398848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3461237595512996e-05,
+      "loss": 4.2161,
+      "step": 399360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3452851648002476e-05,
+      "loss": 4.2242,
+      "step": 399872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3444465700491956e-05,
+      "loss": 4.2207,
+      "step": 400384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3436079752981436e-05,
+      "loss": 4.2044,
+      "step": 400896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3427710184274645e-05,
+      "loss": 4.2058,
+      "step": 401408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3419324236764125e-05,
+      "loss": 4.2041,
+      "step": 401920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3410938289253605e-05,
+      "loss": 4.2059,
+      "step": 402432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3402552341743085e-05,
+      "loss": 4.2213,
+      "step": 402944
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3394182773036294e-05,
+      "loss": 4.2101,
+      "step": 403456
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3385796825525774e-05,
+      "loss": 4.2036,
+      "step": 403968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3377410878015254e-05,
+      "loss": 4.2159,
+      "step": 404480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3369024930504734e-05,
+      "loss": 4.208,
+      "step": 404992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3360638982994214e-05,
+      "loss": 4.2149,
+      "step": 405504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.33522530354837e-05,
+      "loss": 4.2049,
+      "step": 406016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.334386708797318e-05,
+      "loss": 4.1849,
+      "step": 406528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.333548114046266e-05,
+      "loss": 4.2359,
+      "step": 407040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.332711157175587e-05,
+      "loss": 4.2134,
+      "step": 407552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.331874200304908e-05,
+      "loss": 4.2177,
+      "step": 408064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.331035605553856e-05,
+      "loss": 4.2031,
+      "step": 408576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.330197010802804e-05,
+      "loss": 4.2129,
+      "step": 409088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.329358416051752e-05,
+      "loss": 4.2021,
+      "step": 409600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3285198213007e-05,
+      "loss": 4.2069,
+      "step": 410112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.327681226549648e-05,
+      "loss": 4.2063,
+      "step": 410624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.326842631798596e-05,
+      "loss": 4.2058,
+      "step": 411136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.326005674927917e-05,
+      "loss": 4.2249,
+      "step": 411648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3251670801768654e-05,
+      "loss": 4.1977,
+      "step": 412160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3243301233061863e-05,
+      "loss": 4.1894,
+      "step": 412672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.323491528555134e-05,
+      "loss": 4.2168,
+      "step": 413184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.322652933804082e-05,
+      "loss": 4.1854,
+      "step": 413696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.32181433905303e-05,
+      "loss": 4.1956,
+      "step": 414208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.320975744301978e-05,
+      "loss": 4.2117,
+      "step": 414720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.320137149550926e-05,
+      "loss": 4.2112,
+      "step": 415232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.319298554799874e-05,
+      "loss": 4.195,
+      "step": 415744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.318459960048822e-05,
+      "loss": 4.2029,
+      "step": 416256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.317623003178143e-05,
+      "loss": 4.1802,
+      "step": 416768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.316784408427091e-05,
+      "loss": 4.1993,
+      "step": 417280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.315945813676039e-05,
+      "loss": 4.2054,
+      "step": 417792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.315107218924987e-05,
+      "loss": 4.1953,
+      "step": 418304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.314270262054309e-05,
+      "loss": 4.2062,
+      "step": 418816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.313431667303257e-05,
+      "loss": 4.2077,
+      "step": 419328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.312593072552205e-05,
+      "loss": 4.2196,
+      "step": 419840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.311754477801153e-05,
+      "loss": 4.1921,
+      "step": 420352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3109158830501e-05,
+      "loss": 4.197,
+      "step": 420864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.310077288299048e-05,
+      "loss": 4.2104,
+      "step": 421376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.309238693547996e-05,
+      "loss": 4.1893,
+      "step": 421888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.308400098796944e-05,
+      "loss": 4.2046,
+      "step": 422400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.307561504045892e-05,
+      "loss": 4.1967,
+      "step": 422912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.30672290929484e-05,
+      "loss": 4.1952,
+      "step": 423424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.305885952424161e-05,
+      "loss": 4.2095,
+      "step": 423936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.305047357673109e-05,
+      "loss": 4.1889,
+      "step": 424448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3042087629220577e-05,
+      "loss": 4.1922,
+      "step": 424960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3033701681710057e-05,
+      "loss": 4.2001,
+      "step": 425472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3025315734199536e-05,
+      "loss": 4.2043,
+      "step": 425984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3016929786689016e-05,
+      "loss": 4.191,
+      "step": 426496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.3008543839178496e-05,
+      "loss": 4.2,
+      "step": 427008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.3000157891667976e-05,
+      "loss": 4.1827,
+      "step": 427520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2991788322961185e-05,
+      "loss": 4.1934,
+      "step": 428032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2983418754254395e-05,
+      "loss": 4.1892,
+      "step": 428544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2975032806743874e-05,
+      "loss": 4.1968,
+      "step": 429056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2966646859233354e-05,
+      "loss": 4.209,
+      "step": 429568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2958260911722834e-05,
+      "loss": 4.1961,
+      "step": 430080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2949874964212314e-05,
+      "loss": 4.1971,
+      "step": 430592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.294150539550553e-05,
+      "loss": 4.187,
+      "step": 431104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.293311944799501e-05,
+      "loss": 4.19,
+      "step": 431616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.292473350048449e-05,
+      "loss": 4.1928,
+      "step": 432128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.291634755297397e-05,
+      "loss": 4.2012,
+      "step": 432640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.290796160546345e-05,
+      "loss": 4.1966,
+      "step": 433152
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.289957565795293e-05,
+      "loss": 4.1976,
+      "step": 433664
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.289118971044241e-05,
+      "loss": 4.1873,
+      "step": 434176
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.288280376293189e-05,
+      "loss": 4.2008,
+      "step": 434688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.28744341942251e-05,
+      "loss": 4.1791,
+      "step": 435200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.286604824671458e-05,
+      "loss": 4.1943,
+      "step": 435712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.285767867800779e-05,
+      "loss": 4.1794,
+      "step": 436224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.284929273049727e-05,
+      "loss": 4.1966,
+      "step": 436736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.284090678298675e-05,
+      "loss": 4.1835,
+      "step": 437248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.283252083547623e-05,
+      "loss": 4.1888,
+      "step": 437760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2824151266769444e-05,
+      "loss": 4.1834,
+      "step": 438272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2815765319258924e-05,
+      "loss": 4.1824,
+      "step": 438784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2807379371748404e-05,
+      "loss": 4.1866,
+      "step": 439296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2798993424237884e-05,
+      "loss": 4.1842,
+      "step": 439808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2790607476727364e-05,
+      "loss": 4.1954,
+      "step": 440320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.278222152921684e-05,
+      "loss": 4.2001,
+      "step": 440832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.277383558170632e-05,
+      "loss": 4.1935,
+      "step": 441344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.27654496341958e-05,
+      "loss": 4.1945,
+      "step": 441856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.275706368668528e-05,
+      "loss": 4.1763,
+      "step": 442368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.274867773917476e-05,
+      "loss": 4.1841,
+      "step": 442880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2740291791664237e-05,
+      "loss": 4.1874,
+      "step": 443392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2731905844153717e-05,
+      "loss": 4.1977,
+      "step": 443904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.272353627544693e-05,
+      "loss": 4.1705,
+      "step": 444416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.271515032793641e-05,
+      "loss": 4.1887,
+      "step": 444928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.270676438042589e-05,
+      "loss": 4.1893,
+      "step": 445440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.26983948117191e-05,
+      "loss": 4.1849,
+      "step": 445952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.269000886420858e-05,
+      "loss": 4.1823,
+      "step": 446464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.268162291669806e-05,
+      "loss": 4.1897,
+      "step": 446976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.267323696918754e-05,
+      "loss": 4.1784,
+      "step": 447488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.266485102167702e-05,
+      "loss": 4.1931,
+      "step": 448000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.26564650741665e-05,
+      "loss": 4.1902,
+      "step": 448512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.264809550545971e-05,
+      "loss": 4.1922,
+      "step": 449024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.263970955794919e-05,
+      "loss": 4.1765,
+      "step": 449536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.263132361043867e-05,
+      "loss": 4.1892,
+      "step": 450048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.262293766292815e-05,
+      "loss": 4.1914,
+      "step": 450560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.261455171541763e-05,
+      "loss": 4.1797,
+      "step": 451072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.260616576790712e-05,
+      "loss": 4.1846,
+      "step": 451584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2597796199200326e-05,
+      "loss": 4.1905,
+      "step": 452096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2589426630493535e-05,
+      "loss": 4.1858,
+      "step": 452608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2581040682983015e-05,
+      "loss": 4.2009,
+      "step": 453120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2572671114276224e-05,
+      "loss": 4.178,
+      "step": 453632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2564285166765704e-05,
+      "loss": 4.1862,
+      "step": 454144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2555899219255184e-05,
+      "loss": 4.1804,
+      "step": 454656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2547513271744664e-05,
+      "loss": 4.1835,
+      "step": 455168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2539127324234144e-05,
+      "loss": 4.1771,
+      "step": 455680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2530741376723624e-05,
+      "loss": 4.1728,
+      "step": 456192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2522355429213104e-05,
+      "loss": 4.1814,
+      "step": 456704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.2513969481702584e-05,
+      "loss": 4.1694,
+      "step": 457216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.250558353419207e-05,
+      "loss": 4.1869,
+      "step": 457728
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.17576265335083,
+      "eval_runtime": 291.3672,
+      "eval_samples_per_second": 1309.657,
+      "eval_steps_per_second": 40.928,
+      "step": 457920
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.249719758668155e-05,
+      "loss": 4.1877,
+      "step": 458240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.2488811639171024e-05,
+      "loss": 4.1711,
+      "step": 458752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.2480425691660504e-05,
+      "loss": 4.1857,
+      "step": 459264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.2472039744149984e-05,
+      "loss": 4.18,
+      "step": 459776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.2463653796639464e-05,
+      "loss": 4.1882,
+      "step": 460288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.2455267849128943e-05,
+      "loss": 4.1773,
+      "step": 460800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.2446881901618423e-05,
+      "loss": 4.1795,
+      "step": 461312
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.24384959541079e-05,
+      "loss": 4.1674,
+      "step": 461824
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.243011000659738e-05,
+      "loss": 4.1771,
+      "step": 462336
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.242172405908686e-05,
+      "loss": 4.1802,
+      "step": 462848
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.241333811157634e-05,
+      "loss": 4.1802,
+      "step": 463360
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.240495216406582e-05,
+      "loss": 4.1757,
+      "step": 463872
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.239658259535903e-05,
+      "loss": 4.1744,
+      "step": 464384
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.238819664784852e-05,
+      "loss": 4.1768,
+      "step": 464896
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.2379810700338e-05,
+      "loss": 4.1746,
+      "step": 465408
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.237142475282748e-05,
+      "loss": 4.1608,
+      "step": 465920
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.236303880531696e-05,
+      "loss": 4.1717,
+      "step": 466432
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.235465285780644e-05,
+      "loss": 4.1765,
+      "step": 466944
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.234626691029591e-05,
+      "loss": 4.1743,
+      "step": 467456
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.233788096278539e-05,
+      "loss": 4.1847,
+      "step": 467968
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.232949501527487e-05,
+      "loss": 4.1816,
+      "step": 468480
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.232110906776435e-05,
+      "loss": 4.1841,
+      "step": 468992
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.231272312025383e-05,
+      "loss": 4.1666,
+      "step": 469504
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.230433717274331e-05,
+      "loss": 4.1787,
+      "step": 470016
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.229598398284026e-05,
+      "loss": 4.1754,
+      "step": 470528
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.228759803532974e-05,
+      "loss": 4.1786,
+      "step": 471040
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.227921208781922e-05,
+      "loss": 4.1684,
+      "step": 471552
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.22708261403087e-05,
+      "loss": 4.1692,
+      "step": 472064
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.226244019279818e-05,
+      "loss": 4.1661,
+      "step": 472576
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 4.2254054245287657e-05,
+      "loss": 4.1704,
+      "step": 473088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.2245684676580866e-05,
+      "loss": 4.1714,
+      "step": 473600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.2237298729070346e-05,
+      "loss": 4.1789,
+      "step": 474112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.2228912781559826e-05,
+      "loss": 4.1795,
+      "step": 474624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.2220526834049306e-05,
+      "loss": 4.1757,
+      "step": 475136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.2212157265342515e-05,
+      "loss": 4.1699,
+      "step": 475648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.2203771317831995e-05,
+      "loss": 4.1722,
+      "step": 476160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.219540174912521e-05,
+      "loss": 4.1773,
+      "step": 476672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.218701580161469e-05,
+      "loss": 4.1612,
+      "step": 477184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.217862985410417e-05,
+      "loss": 4.1565,
+      "step": 477696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.217024390659365e-05,
+      "loss": 4.1565,
+      "step": 478208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.216185795908313e-05,
+      "loss": 4.1639,
+      "step": 478720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.215347201157261e-05,
+      "loss": 4.1701,
+      "step": 479232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.214508606406209e-05,
+      "loss": 4.1666,
+      "step": 479744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.213670011655157e-05,
+      "loss": 4.1627,
+      "step": 480256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.212831416904105e-05,
+      "loss": 4.1669,
+      "step": 480768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.211992822153053e-05,
+      "loss": 4.165,
+      "step": 481280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.211154227402001e-05,
+      "loss": 4.1681,
+      "step": 481792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.210315632650949e-05,
+      "loss": 4.1597,
+      "step": 482304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.209477037899897e-05,
+      "loss": 4.1424,
+      "step": 482816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.208638443148845e-05,
+      "loss": 4.1858,
+      "step": 483328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.207799848397792e-05,
+      "loss": 4.17,
+      "step": 483840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.206961253646741e-05,
+      "loss": 4.1745,
+      "step": 484352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.2061242967760626e-05,
+      "loss": 4.1618,
+      "step": 484864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.20528570202501e-05,
+      "loss": 4.1659,
+      "step": 485376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.204447107273958e-05,
+      "loss": 4.1618,
+      "step": 485888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.203608512522906e-05,
+      "loss": 4.1619,
+      "step": 486400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.202769917771854e-05,
+      "loss": 4.1652,
+      "step": 486912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.201932960901175e-05,
+      "loss": 4.1595,
+      "step": 487424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.201094366150123e-05,
+      "loss": 4.1808,
+      "step": 487936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.200255771399071e-05,
+      "loss": 4.1569,
+      "step": 488448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1994188145283924e-05,
+      "loss": 4.1468,
+      "step": 488960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.19858021977734e-05,
+      "loss": 4.1718,
+      "step": 489472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.197741625026288e-05,
+      "loss": 4.1426,
+      "step": 489984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1969030302752363e-05,
+      "loss": 4.1453,
+      "step": 490496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1960644355241843e-05,
+      "loss": 4.1707,
+      "step": 491008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1952258407731323e-05,
+      "loss": 4.1662,
+      "step": 491520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.19438724602208e-05,
+      "loss": 4.1493,
+      "step": 492032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.193548651271028e-05,
+      "loss": 4.1625,
+      "step": 492544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.192711694400349e-05,
+      "loss": 4.137,
+      "step": 493056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.191873099649297e-05,
+      "loss": 4.1559,
+      "step": 493568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.191034504898245e-05,
+      "loss": 4.1601,
+      "step": 494080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.190195910147193e-05,
+      "loss": 4.155,
+      "step": 494592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.189358953276514e-05,
+      "loss": 4.1625,
+      "step": 495104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.188520358525462e-05,
+      "loss": 4.1623,
+      "step": 495616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.18768176377441e-05,
+      "loss": 4.1737,
+      "step": 496128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.186843169023358e-05,
+      "loss": 4.1514,
+      "step": 496640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.18600621215268e-05,
+      "loss": 4.1538,
+      "step": 497152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.185167617401628e-05,
+      "loss": 4.1641,
+      "step": 497664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.184329022650576e-05,
+      "loss": 4.1487,
+      "step": 498176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.183490427899524e-05,
+      "loss": 4.1605,
+      "step": 498688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1826551089092175e-05,
+      "loss": 4.1556,
+      "step": 499200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1818165141581655e-05,
+      "loss": 4.1518,
+      "step": 499712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1809779194071135e-05,
+      "loss": 4.1717,
+      "step": 500224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1801393246560615e-05,
+      "loss": 4.1442,
+      "step": 500736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1793007299050095e-05,
+      "loss": 4.1477,
+      "step": 501248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1784621351539575e-05,
+      "loss": 4.1588,
+      "step": 501760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1776235404029055e-05,
+      "loss": 4.1613,
+      "step": 502272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.1767849456518535e-05,
+      "loss": 4.1514,
+      "step": 502784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 4.175947988781175e-05,
+      "loss": 4.1579,
+      "step": 503296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.175111031910496e-05,
+      "loss": 4.1396,
+      "step": 503808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.174272437159444e-05,
+      "loss": 4.1488,
+      "step": 504320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.173433842408392e-05,
+      "loss": 4.1485,
+      "step": 504832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.17259524765734e-05,
+      "loss": 4.154,
+      "step": 505344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.171756652906288e-05,
+      "loss": 4.1663,
+      "step": 505856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.170918058155236e-05,
+      "loss": 4.1526,
+      "step": 506368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.170079463404184e-05,
+      "loss": 4.1557,
+      "step": 506880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.169240868653132e-05,
+      "loss": 4.1477,
+      "step": 507392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.16840227390208e-05,
+      "loss": 4.1466,
+      "step": 507904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.167563679151028e-05,
+      "loss": 4.1489,
+      "step": 508416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.166725084399975e-05,
+      "loss": 4.1606,
+      "step": 508928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.165886489648923e-05,
+      "loss": 4.156,
+      "step": 509440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.165049532778245e-05,
+      "loss": 4.1564,
+      "step": 509952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1642125759075664e-05,
+      "loss": 4.143,
+      "step": 510464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1633739811565144e-05,
+      "loss": 4.1564,
+      "step": 510976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1625353864054624e-05,
+      "loss": 4.1427,
+      "step": 511488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1616967916544104e-05,
+      "loss": 4.1521,
+      "step": 512000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.160859834783731e-05,
+      "loss": 4.1375,
+      "step": 512512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.160021240032679e-05,
+      "loss": 4.1538,
+      "step": 513024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.159182645281627e-05,
+      "loss": 4.1422,
+      "step": 513536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.158344050530575e-05,
+      "loss": 4.1489,
+      "step": 514048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1575054557795226e-05,
+      "loss": 4.139,
+      "step": 514560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1566668610284706e-05,
+      "loss": 4.148,
+      "step": 515072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.155829904157792e-05,
+      "loss": 4.1429,
+      "step": 515584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.15499130940674e-05,
+      "loss": 4.1403,
+      "step": 516096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.154152714655688e-05,
+      "loss": 4.1584,
+      "step": 516608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.153314119904636e-05,
+      "loss": 4.1587,
+      "step": 517120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.152475525153584e-05,
+      "loss": 4.1541,
+      "step": 517632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.151636930402532e-05,
+      "loss": 4.1491,
+      "step": 518144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.15079833565148e-05,
+      "loss": 4.1393,
+      "step": 518656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.149959740900428e-05,
+      "loss": 4.1439,
+      "step": 519168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.149122784029749e-05,
+      "loss": 4.1479,
+      "step": 519680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.148284189278697e-05,
+      "loss": 4.1516,
+      "step": 520192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.147445594527645e-05,
+      "loss": 4.1333,
+      "step": 520704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.146606999776593e-05,
+      "loss": 4.149,
+      "step": 521216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.145768405025541e-05,
+      "loss": 4.1486,
+      "step": 521728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.144931448154863e-05,
+      "loss": 4.1454,
+      "step": 522240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.144092853403811e-05,
+      "loss": 4.1418,
+      "step": 522752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1432542586527587e-05,
+      "loss": 4.1462,
+      "step": 523264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1424156639017067e-05,
+      "loss": 4.1381,
+      "step": 523776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1415787070310276e-05,
+      "loss": 4.1563,
+      "step": 524288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1407401122799756e-05,
+      "loss": 4.1516,
+      "step": 524800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1399015175289236e-05,
+      "loss": 4.1502,
+      "step": 525312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1390629227778716e-05,
+      "loss": 4.1346,
+      "step": 525824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1382259659071925e-05,
+      "loss": 4.1536,
+      "step": 526336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1373873711561405e-05,
+      "loss": 4.1497,
+      "step": 526848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1365487764050885e-05,
+      "loss": 4.1393,
+      "step": 527360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.1357118195344094e-05,
+      "loss": 4.1481,
+      "step": 527872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.134874862663731e-05,
+      "loss": 4.148,
+      "step": 528384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.134036267912679e-05,
+      "loss": 4.1444,
+      "step": 528896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.133197673161627e-05,
+      "loss": 4.1619,
+      "step": 529408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.132359078410575e-05,
+      "loss": 4.1375,
+      "step": 529920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.131520483659523e-05,
+      "loss": 4.1472,
+      "step": 530432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.130681888908471e-05,
+      "loss": 4.1446,
+      "step": 530944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.129843294157419e-05,
+      "loss": 4.1437,
+      "step": 531456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.129004699406367e-05,
+      "loss": 4.136,
+      "step": 531968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.128166104655315e-05,
+      "loss": 4.133,
+      "step": 532480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.127327509904263e-05,
+      "loss": 4.1396,
+      "step": 532992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.126488915153211e-05,
+      "loss": 4.1337,
+      "step": 533504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 4.125650320402159e-05,
+      "loss": 4.1461,
+      "step": 534016
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.14478063583374,
+      "eval_runtime": 294.6857,
+      "eval_samples_per_second": 1294.908,
+      "eval_steps_per_second": 40.467,
+      "step": 534240
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1248150014118534e-05,
+      "loss": 4.1446,
+      "step": 534528
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1239764066608014e-05,
+      "loss": 4.1332,
+      "step": 535040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1231378119097494e-05,
+      "loss": 4.1452,
+      "step": 535552
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1222992171586974e-05,
+      "loss": 4.1421,
+      "step": 536064
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1214606224076454e-05,
+      "loss": 4.1471,
+      "step": 536576
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1206220276565934e-05,
+      "loss": 4.1392,
+      "step": 537088
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1197834329055414e-05,
+      "loss": 4.1432,
+      "step": 537600
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.118944838154489e-05,
+      "loss": 4.1318,
+      "step": 538112
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.11810788128381e-05,
+      "loss": 4.1304,
+      "step": 538624
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.117270924413131e-05,
+      "loss": 4.1464,
+      "step": 539136
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.116432329662079e-05,
+      "loss": 4.1415,
+      "step": 539648
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.115593734911027e-05,
+      "loss": 4.1344,
+      "step": 540160
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.114756778040348e-05,
+      "loss": 4.1455,
+      "step": 540672
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.113918183289297e-05,
+      "loss": 4.1318,
+      "step": 541184
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.113079588538245e-05,
+      "loss": 4.1317,
+      "step": 541696
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.112240993787193e-05,
+      "loss": 4.1251,
+      "step": 542208
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.111402399036141e-05,
+      "loss": 4.1336,
+      "step": 542720
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.110563804285089e-05,
+      "loss": 4.1379,
+      "step": 543232
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.109725209534036e-05,
+      "loss": 4.1321,
+      "step": 543744
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.108886614782984e-05,
+      "loss": 4.1482,
+      "step": 544256
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.108048020031932e-05,
+      "loss": 4.1428,
+      "step": 544768
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.10720942528088e-05,
+      "loss": 4.1445,
+      "step": 545280
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.106370830529828e-05,
+      "loss": 4.132,
+      "step": 545792
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.105532235778776e-05,
+      "loss": 4.1363,
+      "step": 546304
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.104695278908097e-05,
+      "loss": 4.1452,
+      "step": 546816
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.103856684157045e-05,
+      "loss": 4.1374,
+      "step": 547328
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1030197272863665e-05,
+      "loss": 4.1305,
+      "step": 547840
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1021811325353145e-05,
+      "loss": 4.1346,
+      "step": 548352
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1013425377842625e-05,
+      "loss": 4.1241,
+      "step": 548864
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 4.1005039430332105e-05,
+      "loss": 4.1335,
+      "step": 549376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0996653482821585e-05,
+      "loss": 4.1343,
+      "step": 549888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0988267535311065e-05,
+      "loss": 4.1413,
+      "step": 550400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0979881587800545e-05,
+      "loss": 4.1411,
+      "step": 550912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0971512019093754e-05,
+      "loss": 4.1401,
+      "step": 551424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0963126071583234e-05,
+      "loss": 4.1335,
+      "step": 551936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0954740124072714e-05,
+      "loss": 4.1329,
+      "step": 552448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0946354176562194e-05,
+      "loss": 4.1388,
+      "step": 552960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.09379846078554e-05,
+      "loss": 4.1242,
+      "step": 553472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.092959866034489e-05,
+      "loss": 4.1217,
+      "step": 553984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.092121271283437e-05,
+      "loss": 4.1188,
+      "step": 554496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.091282676532385e-05,
+      "loss": 4.1273,
+      "step": 555008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.090444081781333e-05,
+      "loss": 4.1297,
+      "step": 555520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.089605487030281e-05,
+      "loss": 4.1322,
+      "step": 556032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.088766892279229e-05,
+      "loss": 4.1268,
+      "step": 556544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.087928297528177e-05,
+      "loss": 4.1279,
+      "step": 557056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.087089702777125e-05,
+      "loss": 4.1302,
+      "step": 557568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.086251108026072e-05,
+      "loss": 4.1349,
+      "step": 558080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.08541251327502e-05,
+      "loss": 4.1225,
+      "step": 558592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.084575556404342e-05,
+      "loss": 4.1062,
+      "step": 559104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.08373696165329e-05,
+      "loss": 4.1441,
+      "step": 559616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.082898366902237e-05,
+      "loss": 4.1347,
+      "step": 560128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.082059772151186e-05,
+      "loss": 4.1403,
+      "step": 560640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.081221177400134e-05,
+      "loss": 4.1244,
+      "step": 561152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.080382582649082e-05,
+      "loss": 4.1288,
+      "step": 561664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.079545625778403e-05,
+      "loss": 4.1246,
+      "step": 562176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.078707031027351e-05,
+      "loss": 4.1247,
+      "step": 562688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.077868436276299e-05,
+      "loss": 4.1326,
+      "step": 563200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.077029841525247e-05,
+      "loss": 4.117,
+      "step": 563712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.076191246774195e-05,
+      "loss": 4.1466,
+      "step": 564224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.075355927783889e-05,
+      "loss": 4.1258,
+      "step": 564736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.074517333032837e-05,
+      "loss": 4.105,
+      "step": 565248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0736787382817845e-05,
+      "loss": 4.1362,
+      "step": 565760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0728401435307325e-05,
+      "loss": 4.112,
+      "step": 566272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.072001548779681e-05,
+      "loss": 4.1093,
+      "step": 566784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.071162954028629e-05,
+      "loss": 4.1355,
+      "step": 567296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.070324359277577e-05,
+      "loss": 4.128,
+      "step": 567808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.069485764526525e-05,
+      "loss": 4.115,
+      "step": 568320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.068647169775473e-05,
+      "loss": 4.1249,
+      "step": 568832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.067808575024421e-05,
+      "loss": 4.1047,
+      "step": 569344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.066969980273369e-05,
+      "loss": 4.118,
+      "step": 569856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.066131385522317e-05,
+      "loss": 4.1249,
+      "step": 570368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.065294428651638e-05,
+      "loss": 4.1176,
+      "step": 570880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.064455833900586e-05,
+      "loss": 4.1265,
+      "step": 571392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.063617239149534e-05,
+      "loss": 4.1232,
+      "step": 571904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.062778644398482e-05,
+      "loss": 4.138,
+      "step": 572416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.06194004964743e-05,
+      "loss": 4.1163,
+      "step": 572928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.061101454896378e-05,
+      "loss": 4.1201,
+      "step": 573440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.060262860145326e-05,
+      "loss": 4.1279,
+      "step": 573952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.059424265394274e-05,
+      "loss": 4.1145,
+      "step": 574464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0585873085235956e-05,
+      "loss": 4.1257,
+      "step": 574976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0577487137725436e-05,
+      "loss": 4.1205,
+      "step": 575488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.056910119021491e-05,
+      "loss": 4.1129,
+      "step": 576000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.056071524270439e-05,
+      "loss": 4.1341,
+      "step": 576512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.055232929519387e-05,
+      "loss": 4.1084,
+      "step": 577024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0543959726487085e-05,
+      "loss": 4.1121,
+      "step": 577536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.053557377897656e-05,
+      "loss": 4.124,
+      "step": 578048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.052718783146604e-05,
+      "loss": 4.1229,
+      "step": 578560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.051880188395552e-05,
+      "loss": 4.1149,
+      "step": 579072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 4.0510448694052463e-05,
+      "loss": 4.1241,
+      "step": 579584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0502062746541943e-05,
+      "loss": 4.1037,
+      "step": 580096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.049367679903143e-05,
+      "loss": 4.1108,
+      "step": 580608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.048529085152091e-05,
+      "loss": 4.114,
+      "step": 581120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.047690490401038e-05,
+      "loss": 4.1163,
+      "step": 581632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.046851895649986e-05,
+      "loss": 4.1319,
+      "step": 582144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.046013300898934e-05,
+      "loss": 4.119,
+      "step": 582656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.045174706147882e-05,
+      "loss": 4.1228,
+      "step": 583168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.044337749277203e-05,
+      "loss": 4.112,
+      "step": 583680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.043500792406525e-05,
+      "loss": 4.1136,
+      "step": 584192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.042662197655473e-05,
+      "loss": 4.1149,
+      "step": 584704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.041823602904421e-05,
+      "loss": 4.1245,
+      "step": 585216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.040985008153368e-05,
+      "loss": 4.1223,
+      "step": 585728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.040146413402317e-05,
+      "loss": 4.1232,
+      "step": 586240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.039307818651265e-05,
+      "loss": 4.113,
+      "step": 586752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.038470861780586e-05,
+      "loss": 4.117,
+      "step": 587264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.037633904909907e-05,
+      "loss": 4.1115,
+      "step": 587776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.036795310158855e-05,
+      "loss": 4.1152,
+      "step": 588288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.035956715407803e-05,
+      "loss": 4.1038,
+      "step": 588800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0351181206567506e-05,
+      "loss": 4.1212,
+      "step": 589312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0342795259056986e-05,
+      "loss": 4.1083,
+      "step": 589824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0334409311546466e-05,
+      "loss": 4.1142,
+      "step": 590336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0326023364035946e-05,
+      "loss": 4.1057,
+      "step": 590848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0317637416525426e-05,
+      "loss": 4.1124,
+      "step": 591360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0309251469014906e-05,
+      "loss": 4.1111,
+      "step": 591872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0300865521504386e-05,
+      "loss": 4.1085,
+      "step": 592384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0292479573993866e-05,
+      "loss": 4.1208,
+      "step": 592896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.028409362648335e-05,
+      "loss": 4.1266,
+      "step": 593408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.027572405777656e-05,
+      "loss": 4.1251,
+      "step": 593920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.026733811026604e-05,
+      "loss": 4.1141,
+      "step": 594432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.025895216275552e-05,
+      "loss": 4.105,
+      "step": 594944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.025058259404873e-05,
+      "loss": 4.109,
+      "step": 595456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.024219664653821e-05,
+      "loss": 4.1143,
+      "step": 595968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.023381069902769e-05,
+      "loss": 4.117,
+      "step": 596480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.022542475151717e-05,
+      "loss": 4.0999,
+      "step": 596992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.021703880400665e-05,
+      "loss": 4.1167,
+      "step": 597504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.020866923529986e-05,
+      "loss": 4.1165,
+      "step": 598016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.020028328778934e-05,
+      "loss": 4.1088,
+      "step": 598528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.019189734027882e-05,
+      "loss": 4.1084,
+      "step": 599040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0183527771572035e-05,
+      "loss": 4.1148,
+      "step": 599552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0175141824061515e-05,
+      "loss": 4.1057,
+      "step": 600064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0166755876550995e-05,
+      "loss": 4.1213,
+      "step": 600576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0158369929040475e-05,
+      "loss": 4.1133,
+      "step": 601088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0149983981529955e-05,
+      "loss": 4.12,
+      "step": 601600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0141598034019435e-05,
+      "loss": 4.102,
+      "step": 602112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0133212086508915e-05,
+      "loss": 4.1204,
+      "step": 602624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0124826138998395e-05,
+      "loss": 4.1174,
+      "step": 603136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.011644019148787e-05,
+      "loss": 4.1076,
+      "step": 603648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.010805424397735e-05,
+      "loss": 4.1149,
+      "step": 604160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0099684675270564e-05,
+      "loss": 4.1132,
+      "step": 604672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0091298727760044e-05,
+      "loss": 4.1143,
+      "step": 605184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0082912780249524e-05,
+      "loss": 4.1307,
+      "step": 605696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0074526832739004e-05,
+      "loss": 4.1025,
+      "step": 606208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0066140885228484e-05,
+      "loss": 4.1165,
+      "step": 606720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0057754937717964e-05,
+      "loss": 4.1122,
+      "step": 607232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0049368990207444e-05,
+      "loss": 4.1119,
+      "step": 607744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.0040983042696924e-05,
+      "loss": 4.1041,
+      "step": 608256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.003261347399013e-05,
+      "loss": 4.0998,
+      "step": 608768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.002422752647961e-05,
+      "loss": 4.1098,
+      "step": 609280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.001585795777282e-05,
+      "loss": 4.1006,
+      "step": 609792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 4.00074720102623e-05,
+      "loss": 4.1147,
+      "step": 610304
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.119741916656494,
+      "eval_runtime": 295.1931,
+      "eval_samples_per_second": 1292.683,
+      "eval_steps_per_second": 40.397,
+      "step": 610560
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.999908606275178e-05,
+      "loss": 4.1065,
+      "step": 610816
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.999070011524126e-05,
+      "loss": 4.1047,
+      "step": 611328
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.998231416773074e-05,
+      "loss": 4.112,
+      "step": 611840
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.997392822022022e-05,
+      "loss": 4.1107,
+      "step": 612352
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.996554227270971e-05,
+      "loss": 4.1156,
+      "step": 612864
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.995715632519919e-05,
+      "loss": 4.1081,
+      "step": 613376
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.994877037768867e-05,
+      "loss": 4.1144,
+      "step": 613888
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.994038443017815e-05,
+      "loss": 4.0992,
+      "step": 614400
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.993199848266763e-05,
+      "loss": 4.099,
+      "step": 614912
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.992361253515711e-05,
+      "loss": 4.116,
+      "step": 615424
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.991522658764658e-05,
+      "loss": 4.109,
+      "step": 615936
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.990684064013606e-05,
+      "loss": 4.1025,
+      "step": 616448
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.989847107142928e-05,
+      "loss": 4.115,
+      "step": 616960
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.989008512391876e-05,
+      "loss": 4.0985,
+      "step": 617472
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.988169917640823e-05,
+      "loss": 4.103,
+      "step": 617984
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.987331322889771e-05,
+      "loss": 4.0881,
+      "step": 618496
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.986492728138719e-05,
+      "loss": 4.1038,
+      "step": 619008
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.985654133387668e-05,
+      "loss": 4.1096,
+      "step": 619520
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.984815538636616e-05,
+      "loss": 4.0968,
+      "step": 620032
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.983976943885564e-05,
+      "loss": 4.116,
+      "step": 620544
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.983138349134512e-05,
+      "loss": 4.1141,
+      "step": 621056
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.9822997543834597e-05,
+      "loss": 4.1105,
+      "step": 621568
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.9814611596324077e-05,
+      "loss": 4.1042,
+      "step": 622080
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.9806225648813557e-05,
+      "loss": 4.1066,
+      "step": 622592
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.9797856080106766e-05,
+      "loss": 4.1101,
+      "step": 623104
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.9789470132596246e-05,
+      "loss": 4.1053,
+      "step": 623616
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.9781084185085726e-05,
+      "loss": 4.096,
+      "step": 624128
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.9772714616378935e-05,
+      "loss": 4.1058,
+      "step": 624640
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.9764328668868415e-05,
+      "loss": 4.0938,
+      "step": 625152
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.9755942721357895e-05,
+      "loss": 4.1017,
+      "step": 625664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9747556773847374e-05,
+      "loss": 4.1015,
+      "step": 626176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.973917082633686e-05,
+      "loss": 4.1066,
+      "step": 626688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.973080125763007e-05,
+      "loss": 4.1113,
+      "step": 627200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.972241531011955e-05,
+      "loss": 4.1132,
+      "step": 627712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.971402936260903e-05,
+      "loss": 4.0965,
+      "step": 628224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.970565979390224e-05,
+      "loss": 4.1042,
+      "step": 628736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.969727384639172e-05,
+      "loss": 4.1102,
+      "step": 629248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.96888878988812e-05,
+      "loss": 4.0916,
+      "step": 629760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.968050195137068e-05,
+      "loss": 4.093,
+      "step": 630272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.967211600386016e-05,
+      "loss": 4.0922,
+      "step": 630784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.966373005634964e-05,
+      "loss": 4.0909,
+      "step": 631296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.965534410883912e-05,
+      "loss": 4.1006,
+      "step": 631808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.96469581613286e-05,
+      "loss": 4.1019,
+      "step": 632320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.963857221381808e-05,
+      "loss": 4.0933,
+      "step": 632832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.963018626630756e-05,
+      "loss": 4.0953,
+      "step": 633344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.962180031879704e-05,
+      "loss": 4.097,
+      "step": 633856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.961341437128652e-05,
+      "loss": 4.1074,
+      "step": 634368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.960504480257973e-05,
+      "loss": 4.0875,
+      "step": 634880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9596675233872944e-05,
+      "loss": 4.08,
+      "step": 635392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.958828928636242e-05,
+      "loss": 4.1094,
+      "step": 635904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.95799033388519e-05,
+      "loss": 4.1031,
+      "step": 636416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.957151739134138e-05,
+      "loss": 4.108,
+      "step": 636928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.956313144383086e-05,
+      "loss": 4.0943,
+      "step": 637440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9554761875124066e-05,
+      "loss": 4.0969,
+      "step": 637952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9546375927613546e-05,
+      "loss": 4.0959,
+      "step": 638464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.953800635890676e-05,
+      "loss": 4.0944,
+      "step": 638976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.952962041139624e-05,
+      "loss": 4.1015,
+      "step": 639488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.952123446388572e-05,
+      "loss": 4.0845,
+      "step": 640000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.95128485163752e-05,
+      "loss": 4.1204,
+      "step": 640512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.950446256886468e-05,
+      "loss": 4.091,
+      "step": 641024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.949607662135416e-05,
+      "loss": 4.0762,
+      "step": 641536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.948769067384364e-05,
+      "loss": 4.1063,
+      "step": 642048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.947930472633312e-05,
+      "loss": 4.087,
+      "step": 642560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.94709187788226e-05,
+      "loss": 4.0771,
+      "step": 643072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.946253283131208e-05,
+      "loss": 4.1038,
+      "step": 643584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.945414688380156e-05,
+      "loss": 4.0997,
+      "step": 644096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.944576093629104e-05,
+      "loss": 4.0903,
+      "step": 644608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.943739136758425e-05,
+      "loss": 4.0919,
+      "step": 645120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.942900542007373e-05,
+      "loss": 4.0741,
+      "step": 645632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.942061947256322e-05,
+      "loss": 4.0861,
+      "step": 646144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.94122335250527e-05,
+      "loss": 4.0976,
+      "step": 646656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9403863956345906e-05,
+      "loss": 4.0905,
+      "step": 647168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9395478008835386e-05,
+      "loss": 4.0943,
+      "step": 647680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9387108440128595e-05,
+      "loss": 4.0916,
+      "step": 648192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9378722492618075e-05,
+      "loss": 4.1084,
+      "step": 648704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9370336545107555e-05,
+      "loss": 4.0897,
+      "step": 649216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9361950597597035e-05,
+      "loss": 4.0918,
+      "step": 649728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9353564650086515e-05,
+      "loss": 4.0946,
+      "step": 650240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9345178702575995e-05,
+      "loss": 4.0886,
+      "step": 650752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9336792755065475e-05,
+      "loss": 4.0975,
+      "step": 651264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.9328406807554955e-05,
+      "loss": 4.0896,
+      "step": 651776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.932003723884817e-05,
+      "loss": 4.0875,
+      "step": 652288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.931165129133765e-05,
+      "loss": 4.1027,
+      "step": 652800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.930326534382713e-05,
+      "loss": 4.079,
+      "step": 653312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.929489577512034e-05,
+      "loss": 4.0844,
+      "step": 653824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.928650982760982e-05,
+      "loss": 4.0909,
+      "step": 654336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.92781238800993e-05,
+      "loss": 4.0941,
+      "step": 654848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.926973793258878e-05,
+      "loss": 4.0831,
+      "step": 655360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.926135198507825e-05,
+      "loss": 4.0965,
+      "step": 655872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.925296603756773e-05,
+      "loss": 4.0806,
+      "step": 656384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.924458009005721e-05,
+      "loss": 4.0834,
+      "step": 656896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.923619414254669e-05,
+      "loss": 4.0809,
+      "step": 657408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.922784095264364e-05,
+      "loss": 4.0859,
+      "step": 657920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9219471383936854e-05,
+      "loss": 4.1019,
+      "step": 658432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9211085436426334e-05,
+      "loss": 4.0876,
+      "step": 658944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9202699488915813e-05,
+      "loss": 4.0921,
+      "step": 659456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9194313541405293e-05,
+      "loss": 4.0838,
+      "step": 659968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.918592759389477e-05,
+      "loss": 4.0835,
+      "step": 660480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.917754164638425e-05,
+      "loss": 4.0864,
+      "step": 660992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9169155698873727e-05,
+      "loss": 4.0957,
+      "step": 661504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9160769751363206e-05,
+      "loss": 4.0937,
+      "step": 662016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9152383803852686e-05,
+      "loss": 4.0913,
+      "step": 662528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9143997856342166e-05,
+      "loss": 4.0912,
+      "step": 663040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9135611908831646e-05,
+      "loss": 4.0873,
+      "step": 663552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9127225961321126e-05,
+      "loss": 4.0851,
+      "step": 664064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.911885639261434e-05,
+      "loss": 4.0859,
+      "step": 664576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.911047044510382e-05,
+      "loss": 4.0751,
+      "step": 665088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.91020844975933e-05,
+      "loss": 4.0911,
+      "step": 665600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.909369855008278e-05,
+      "loss": 4.0809,
+      "step": 666112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.908531260257226e-05,
+      "loss": 4.0834,
+      "step": 666624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.907694303386547e-05,
+      "loss": 4.0807,
+      "step": 667136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.906855708635495e-05,
+      "loss": 4.0781,
+      "step": 667648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.906017113884443e-05,
+      "loss": 4.0847,
+      "step": 668160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.905178519133391e-05,
+      "loss": 4.0793,
+      "step": 668672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.904341562262712e-05,
+      "loss": 4.0959,
+      "step": 669184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.90350296751166e-05,
+      "loss": 4.0958,
+      "step": 669696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9026660106409816e-05,
+      "loss": 4.0943,
+      "step": 670208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9018274158899296e-05,
+      "loss": 4.0855,
+      "step": 670720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9009904590192505e-05,
+      "loss": 4.0805,
+      "step": 671232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.9001518642681985e-05,
+      "loss": 4.079,
+      "step": 671744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8993132695171465e-05,
+      "loss": 4.0876,
+      "step": 672256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8984746747660945e-05,
+      "loss": 4.0915,
+      "step": 672768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8976360800150425e-05,
+      "loss": 4.0741,
+      "step": 673280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8967974852639905e-05,
+      "loss": 4.0845,
+      "step": 673792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8959588905129385e-05,
+      "loss": 4.0881,
+      "step": 674304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8951219336422594e-05,
+      "loss": 4.0844,
+      "step": 674816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8942833388912074e-05,
+      "loss": 4.0793,
+      "step": 675328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8934447441401554e-05,
+      "loss": 4.0856,
+      "step": 675840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8926061493891034e-05,
+      "loss": 4.0762,
+      "step": 676352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8917675546380514e-05,
+      "loss": 4.0941,
+      "step": 676864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8909289598869994e-05,
+      "loss": 4.0846,
+      "step": 677376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.890090365135948e-05,
+      "loss": 4.0962,
+      "step": 677888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.889251770384896e-05,
+      "loss": 4.0769,
+      "step": 678400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.888413175633844e-05,
+      "loss": 4.0879,
+      "step": 678912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.887574580882791e-05,
+      "loss": 4.0925,
+      "step": 679424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.886737624012113e-05,
+      "loss": 4.0814,
+      "step": 679936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.885899029261061e-05,
+      "loss": 4.0849,
+      "step": 680448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.885060434510009e-05,
+      "loss": 4.0867,
+      "step": 680960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.884221839758956e-05,
+      "loss": 4.0832,
+      "step": 681472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.883383245007904e-05,
+      "loss": 4.1008,
+      "step": 681984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.882544650256852e-05,
+      "loss": 4.0796,
+      "step": 682496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.8817060555058e-05,
+      "loss": 4.0891,
+      "step": 683008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.880867460754748e-05,
+      "loss": 4.0861,
+      "step": 683520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.88003050388407e-05,
+      "loss": 4.0833,
+      "step": 684032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.879191909133018e-05,
+      "loss": 4.0766,
+      "step": 684544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.878353314381966e-05,
+      "loss": 4.0749,
+      "step": 685056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.877516357511287e-05,
+      "loss": 4.08,
+      "step": 685568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.876677762760235e-05,
+      "loss": 4.0777,
+      "step": 686080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.875839168009183e-05,
+      "loss": 4.0844,
+      "step": 686592
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.099898815155029,
+      "eval_runtime": 286.0052,
+      "eval_samples_per_second": 1334.21,
+      "eval_steps_per_second": 41.695,
+      "step": 686880
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.875000573258131e-05,
+      "loss": 4.0793,
+      "step": 687104
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.874161978507079e-05,
+      "loss": 4.0783,
+      "step": 687616
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.873323383756027e-05,
+      "loss": 4.0808,
+      "step": 688128
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.872484789004975e-05,
+      "loss": 4.0872,
+      "step": 688640
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.871646194253923e-05,
+      "loss": 4.092,
+      "step": 689152
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.870807599502871e-05,
+      "loss": 4.0819,
+      "step": 689664
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8699706426321916e-05,
+      "loss": 4.083,
+      "step": 690176
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.86913204788114e-05,
+      "loss": 4.0733,
+      "step": 690688
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.868293453130088e-05,
+      "loss": 4.0744,
+      "step": 691200
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.867454858379036e-05,
+      "loss": 4.0843,
+      "step": 691712
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.866617901508357e-05,
+      "loss": 4.0827,
+      "step": 692224
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.865779306757305e-05,
+      "loss": 4.0779,
+      "step": 692736
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.864943987766999e-05,
+      "loss": 4.0875,
+      "step": 693248
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.864105393015947e-05,
+      "loss": 4.0729,
+      "step": 693760
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.863266798264895e-05,
+      "loss": 4.0749,
+      "step": 694272
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.862428203513843e-05,
+      "loss": 4.0593,
+      "step": 694784
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.861589608762791e-05,
+      "loss": 4.0812,
+      "step": 695296
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.860751014011739e-05,
+      "loss": 4.0814,
+      "step": 695808
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.859912419260687e-05,
+      "loss": 4.0708,
+      "step": 696320
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8590738245096356e-05,
+      "loss": 4.0884,
+      "step": 696832
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8582368676389565e-05,
+      "loss": 4.0857,
+      "step": 697344
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8573982728879045e-05,
+      "loss": 4.0865,
+      "step": 697856
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8565596781368525e-05,
+      "loss": 4.0796,
+      "step": 698368
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8557210833858005e-05,
+      "loss": 4.0802,
+      "step": 698880
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8548824886347485e-05,
+      "loss": 4.0814,
+      "step": 699392
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8540438938836965e-05,
+      "loss": 4.0832,
+      "step": 699904
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8532052991326445e-05,
+      "loss": 4.0658,
+      "step": 700416
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.852366704381592e-05,
+      "loss": 4.0804,
+      "step": 700928
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.85152810963054e-05,
+      "loss": 4.0673,
+      "step": 701440
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.8506911527598614e-05,
+      "loss": 4.0764,
+      "step": 701952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.849854195889182e-05,
+      "loss": 4.0779,
+      "step": 702464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.849015601138131e-05,
+      "loss": 4.0833,
+      "step": 702976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.848177006387079e-05,
+      "loss": 4.0767,
+      "step": 703488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.847338411636027e-05,
+      "loss": 4.0888,
+      "step": 704000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.846499816884975e-05,
+      "loss": 4.0721,
+      "step": 704512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.845662860014296e-05,
+      "loss": 4.0776,
+      "step": 705024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.844824265263244e-05,
+      "loss": 4.0824,
+      "step": 705536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.843985670512192e-05,
+      "loss": 4.0621,
+      "step": 706048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.843147075761139e-05,
+      "loss": 4.0662,
+      "step": 706560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.842308481010087e-05,
+      "loss": 4.0706,
+      "step": 707072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.841469886259035e-05,
+      "loss": 4.0652,
+      "step": 707584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.840631291507983e-05,
+      "loss": 4.0737,
+      "step": 708096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.839794334637305e-05,
+      "loss": 4.0811,
+      "step": 708608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.838955739886253e-05,
+      "loss": 4.0672,
+      "step": 709120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.838117145135201e-05,
+      "loss": 4.0732,
+      "step": 709632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.837278550384149e-05,
+      "loss": 4.0705,
+      "step": 710144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.836439955633097e-05,
+      "loss": 4.0775,
+      "step": 710656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.835601360882045e-05,
+      "loss": 4.0632,
+      "step": 711168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.834762766130993e-05,
+      "loss": 4.0565,
+      "step": 711680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.833924171379941e-05,
+      "loss": 4.0833,
+      "step": 712192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8330872145092616e-05,
+      "loss": 4.078,
+      "step": 712704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8322486197582096e-05,
+      "loss": 4.0885,
+      "step": 713216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8314100250071576e-05,
+      "loss": 4.0657,
+      "step": 713728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8305714302561056e-05,
+      "loss": 4.0702,
+      "step": 714240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8297344733854265e-05,
+      "loss": 4.0718,
+      "step": 714752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.828897516514748e-05,
+      "loss": 4.0668,
+      "step": 715264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.828058921763696e-05,
+      "loss": 4.0815,
+      "step": 715776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.827220327012644e-05,
+      "loss": 4.0546,
+      "step": 716288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.826381732261592e-05,
+      "loss": 4.0959,
+      "step": 716800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.82554313751054e-05,
+      "loss": 4.07,
+      "step": 717312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.824704542759488e-05,
+      "loss": 4.0487,
+      "step": 717824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.823865948008436e-05,
+      "loss": 4.0773,
+      "step": 718336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.823027353257384e-05,
+      "loss": 4.0637,
+      "step": 718848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.822188758506332e-05,
+      "loss": 4.0512,
+      "step": 719360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.82135016375528e-05,
+      "loss": 4.0754,
+      "step": 719872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.820511569004228e-05,
+      "loss": 4.0782,
+      "step": 720384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8196729742531754e-05,
+      "loss": 4.0615,
+      "step": 720896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8188343795021234e-05,
+      "loss": 4.0701,
+      "step": 721408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.817999060511818e-05,
+      "loss": 4.048,
+      "step": 721920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8171604657607666e-05,
+      "loss": 4.063,
+      "step": 722432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8163218710097146e-05,
+      "loss": 4.0685,
+      "step": 722944
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8154832762586626e-05,
+      "loss": 4.064,
+      "step": 723456
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8146463193879835e-05,
+      "loss": 4.0759,
+      "step": 723968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8138077246369315e-05,
+      "loss": 4.062,
+      "step": 724480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8129691298858795e-05,
+      "loss": 4.0861,
+      "step": 724992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8121305351348275e-05,
+      "loss": 4.0655,
+      "step": 725504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8112919403837755e-05,
+      "loss": 4.0691,
+      "step": 726016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.810453345632723e-05,
+      "loss": 4.0687,
+      "step": 726528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.809614750881671e-05,
+      "loss": 4.0621,
+      "step": 727040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.808776156130619e-05,
+      "loss": 4.0697,
+      "step": 727552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.807937561379567e-05,
+      "loss": 4.0626,
+      "step": 728064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.807098966628515e-05,
+      "loss": 4.0696,
+      "step": 728576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8062603718774634e-05,
+      "loss": 4.0726,
+      "step": 729088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.8054217771264114e-05,
+      "loss": 4.0626,
+      "step": 729600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.804584820255732e-05,
+      "loss": 4.0558,
+      "step": 730112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.803747863385053e-05,
+      "loss": 4.066,
+      "step": 730624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.802909268634001e-05,
+      "loss": 4.0727,
+      "step": 731136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.802070673882949e-05,
+      "loss": 4.0582,
+      "step": 731648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.801232079131897e-05,
+      "loss": 4.0673,
+      "step": 732160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.800393484380845e-05,
+      "loss": 4.0543,
+      "step": 732672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.799554889629793e-05,
+      "loss": 4.0614,
+      "step": 733184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.798717932759114e-05,
+      "loss": 4.0591,
+      "step": 733696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.797879338008062e-05,
+      "loss": 4.0631,
+      "step": 734208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.79704074325701e-05,
+      "loss": 4.0759,
+      "step": 734720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.796202148505959e-05,
+      "loss": 4.0648,
+      "step": 735232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.795363553754907e-05,
+      "loss": 4.069,
+      "step": 735744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.794526596884228e-05,
+      "loss": 4.0595,
+      "step": 736256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.793688002133176e-05,
+      "loss": 4.059,
+      "step": 736768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.792849407382124e-05,
+      "loss": 4.0625,
+      "step": 737280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.792010812631072e-05,
+      "loss": 4.0707,
+      "step": 737792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.79117221788002e-05,
+      "loss": 4.0704,
+      "step": 738304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.790333623128968e-05,
+      "loss": 4.067,
+      "step": 738816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.789495028377916e-05,
+      "loss": 4.065,
+      "step": 739328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.788656433626864e-05,
+      "loss": 4.0606,
+      "step": 739840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7878194767561846e-05,
+      "loss": 4.0643,
+      "step": 740352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7869808820051326e-05,
+      "loss": 4.0611,
+      "step": 740864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7861422872540806e-05,
+      "loss": 4.0562,
+      "step": 741376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7853036925030286e-05,
+      "loss": 4.0648,
+      "step": 741888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.78446673563235e-05,
+      "loss": 4.0572,
+      "step": 742400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.783628140881298e-05,
+      "loss": 4.0591,
+      "step": 742912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.782789546130246e-05,
+      "loss": 4.0551,
+      "step": 743424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.781950951379194e-05,
+      "loss": 4.0535,
+      "step": 743936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.781113994508515e-05,
+      "loss": 4.0611,
+      "step": 744448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.780275399757463e-05,
+      "loss": 4.0538,
+      "step": 744960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.779436805006411e-05,
+      "loss": 4.0689,
+      "step": 745472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.778598210255359e-05,
+      "loss": 4.069,
+      "step": 745984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.77776125338468e-05,
+      "loss": 4.0754,
+      "step": 746496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.776922658633628e-05,
+      "loss": 4.0627,
+      "step": 747008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.776084063882576e-05,
+      "loss": 4.0582,
+      "step": 747520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7752471070118975e-05,
+      "loss": 4.0529,
+      "step": 748032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7744085122608455e-05,
+      "loss": 4.0687,
+      "step": 748544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7735699175097935e-05,
+      "loss": 4.0651,
+      "step": 749056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7727313227587415e-05,
+      "loss": 4.048,
+      "step": 749568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.771892728007689e-05,
+      "loss": 4.0606,
+      "step": 750080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.771054133256637e-05,
+      "loss": 4.0626,
+      "step": 750592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.770215538505585e-05,
+      "loss": 4.0616,
+      "step": 751104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.769376943754533e-05,
+      "loss": 4.0568,
+      "step": 751616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.768539986883854e-05,
+      "loss": 4.0607,
+      "step": 752128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.767701392132802e-05,
+      "loss": 4.0522,
+      "step": 752640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.76686279738175e-05,
+      "loss": 4.0723,
+      "step": 753152
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.766024202630698e-05,
+      "loss": 4.0616,
+      "step": 753664
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.765187245760019e-05,
+      "loss": 4.0699,
+      "step": 754176
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.764348651008967e-05,
+      "loss": 4.0527,
+      "step": 754688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.763510056257915e-05,
+      "loss": 4.0665,
+      "step": 755200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.762671461506863e-05,
+      "loss": 4.0693,
+      "step": 755712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.761834504636184e-05,
+      "loss": 4.0549,
+      "step": 756224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.760995909885132e-05,
+      "loss": 4.0639,
+      "step": 756736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.76015731513408e-05,
+      "loss": 4.0618,
+      "step": 757248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.759320358263401e-05,
+      "loss": 4.0657,
+      "step": 757760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.758481763512349e-05,
+      "loss": 4.0739,
+      "step": 758272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.757643168761297e-05,
+      "loss": 4.0595,
+      "step": 758784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.756804574010245e-05,
+      "loss": 4.0626,
+      "step": 759296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.755965979259193e-05,
+      "loss": 4.063,
+      "step": 759808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.755127384508141e-05,
+      "loss": 4.061,
+      "step": 760320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7542904276374627e-05,
+      "loss": 4.0539,
+      "step": 760832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7534518328864107e-05,
+      "loss": 4.0523,
+      "step": 761344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7526132381353586e-05,
+      "loss": 4.0547,
+      "step": 761856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7517746433843066e-05,
+      "loss": 4.055,
+      "step": 762368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.7509360486332546e-05,
+      "loss": 4.0589,
+      "step": 762880
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.083510875701904,
+      "eval_runtime": 283.4154,
+      "eval_samples_per_second": 1346.402,
+      "eval_steps_per_second": 42.076,
+      "step": 763200
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7500974538822026e-05,
+      "loss": 4.059,
+      "step": 763392
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7492588591311506e-05,
+      "loss": 4.0533,
+      "step": 763904
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7484202643800986e-05,
+      "loss": 4.0576,
+      "step": 764416
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7475816696290466e-05,
+      "loss": 4.0637,
+      "step": 764928
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7467430748779946e-05,
+      "loss": 4.0705,
+      "step": 765440
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7459044801269426e-05,
+      "loss": 4.0576,
+      "step": 765952
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.74506588537589e-05,
+      "loss": 4.0631,
+      "step": 766464
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.744227290624838e-05,
+      "loss": 4.0519,
+      "step": 766976
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7433886958737866e-05,
+      "loss": 4.0487,
+      "step": 767488
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7425501011227346e-05,
+      "loss": 4.0568,
+      "step": 768000
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7417115063716826e-05,
+      "loss": 4.0647,
+      "step": 768512
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7408729116206306e-05,
+      "loss": 4.0564,
+      "step": 769024
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7400359547499515e-05,
+      "loss": 4.0634,
+      "step": 769536
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7391973599988995e-05,
+      "loss": 4.0482,
+      "step": 770048
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7383587652478475e-05,
+      "loss": 4.0552,
+      "step": 770560
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7375201704967955e-05,
+      "loss": 4.0329,
+      "step": 771072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7366815757457435e-05,
+      "loss": 4.0593,
+      "step": 771584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7358429809946915e-05,
+      "loss": 4.0581,
+      "step": 772096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7350043862436395e-05,
+      "loss": 4.0472,
+      "step": 772608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7341657914925875e-05,
+      "loss": 4.0639,
+      "step": 773120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7333271967415355e-05,
+      "loss": 4.0652,
+      "step": 773632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7324886019904835e-05,
+      "loss": 4.0639,
+      "step": 774144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7316500072394315e-05,
+      "loss": 4.053,
+      "step": 774656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.7308114124883794e-05,
+      "loss": 4.0578,
+      "step": 775168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.729976093498074e-05,
+      "loss": 4.0586,
+      "step": 775680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.729137498747022e-05,
+      "loss": 4.0619,
+      "step": 776192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.72829890399597e-05,
+      "loss": 4.0421,
+      "step": 776704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.727460309244918e-05,
+      "loss": 4.0555,
+      "step": 777216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.726621714493866e-05,
+      "loss": 4.0482,
+      "step": 777728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.725784757623187e-05,
+      "loss": 4.049,
+      "step": 778240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.724946162872135e-05,
+      "loss": 4.0575,
+      "step": 778752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.724107568121083e-05,
+      "loss": 4.0592,
+      "step": 779264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.723268973370031e-05,
+      "loss": 4.0563,
+      "step": 779776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.722430378618979e-05,
+      "loss": 4.0661,
+      "step": 780288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.7215934217483e-05,
+      "loss": 4.0524,
+      "step": 780800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.720756464877621e-05,
+      "loss": 4.0565,
+      "step": 781312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.719917870126569e-05,
+      "loss": 4.0632,
+      "step": 781824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.719079275375517e-05,
+      "loss": 4.0386,
+      "step": 782336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.718240680624465e-05,
+      "loss": 4.0428,
+      "step": 782848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.717402085873413e-05,
+      "loss": 4.0472,
+      "step": 783360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.716565129002734e-05,
+      "loss": 4.0377,
+      "step": 783872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.715726534251682e-05,
+      "loss": 4.0525,
+      "step": 784384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.71488793950063e-05,
+      "loss": 4.0597,
+      "step": 784896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.714049344749578e-05,
+      "loss": 4.0439,
+      "step": 785408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.713212387878899e-05,
+      "loss": 4.0504,
+      "step": 785920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.712373793127847e-05,
+      "loss": 4.0505,
+      "step": 786432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.711535198376795e-05,
+      "loss": 4.0521,
+      "step": 786944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.710696603625744e-05,
+      "loss": 4.0419,
+      "step": 787456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.709858008874691e-05,
+      "loss": 4.0336,
+      "step": 787968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.709019414123639e-05,
+      "loss": 4.0577,
+      "step": 788480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.708180819372587e-05,
+      "loss": 4.0591,
+      "step": 788992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.707342224621535e-05,
+      "loss": 4.0673,
+      "step": 789504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.706503629870483e-05,
+      "loss": 4.0451,
+      "step": 790016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.705665035119431e-05,
+      "loss": 4.046,
+      "step": 790528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.704826440368379e-05,
+      "loss": 4.0506,
+      "step": 791040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.703987845617327e-05,
+      "loss": 4.0455,
+      "step": 791552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.703150888746648e-05,
+      "loss": 4.0605,
+      "step": 792064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.702312293995596e-05,
+      "loss": 4.0312,
+      "step": 792576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.7014753371249176e-05,
+      "loss": 4.0746,
+      "step": 793088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.7006367423738655e-05,
+      "loss": 4.0509,
+      "step": 793600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6997981476228135e-05,
+      "loss": 4.0284,
+      "step": 794112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6989595528717615e-05,
+      "loss": 4.0537,
+      "step": 794624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6981209581207095e-05,
+      "loss": 4.0414,
+      "step": 795136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6972823633696575e-05,
+      "loss": 4.0296,
+      "step": 795648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6964437686186055e-05,
+      "loss": 4.0556,
+      "step": 796160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6956051738675535e-05,
+      "loss": 4.0552,
+      "step": 796672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6947665791165015e-05,
+      "loss": 4.04,
+      "step": 797184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6939296222458224e-05,
+      "loss": 4.0535,
+      "step": 797696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.693092665375143e-05,
+      "loss": 4.0244,
+      "step": 798208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.692254070624091e-05,
+      "loss": 4.0423,
+      "step": 798720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.691415475873039e-05,
+      "loss": 4.0409,
+      "step": 799232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.690576881121987e-05,
+      "loss": 4.0455,
+      "step": 799744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.689738286370936e-05,
+      "loss": 4.0484,
+      "step": 800256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.688899691619884e-05,
+      "loss": 4.042,
+      "step": 800768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.688062734749205e-05,
+      "loss": 4.0664,
+      "step": 801280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.687224139998153e-05,
+      "loss": 4.045,
+      "step": 801792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.686385545247101e-05,
+      "loss": 4.0499,
+      "step": 802304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.685546950496049e-05,
+      "loss": 4.0439,
+      "step": 802816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.684708355744997e-05,
+      "loss": 4.0397,
+      "step": 803328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.683869760993945e-05,
+      "loss": 4.0485,
+      "step": 803840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.683031166242892e-05,
+      "loss": 4.0416,
+      "step": 804352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.682194209372214e-05,
+      "loss": 4.0455,
+      "step": 804864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.681355614621162e-05,
+      "loss": 4.0563,
+      "step": 805376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.68051701987011e-05,
+      "loss": 4.0415,
+      "step": 805888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.679678425119058e-05,
+      "loss": 4.0315,
+      "step": 806400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6788414682483794e-05,
+      "loss": 4.0426,
+      "step": 806912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.6780028734973274e-05,
+      "loss": 4.0528,
+      "step": 807424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.677164278746275e-05,
+      "loss": 4.0333,
+      "step": 807936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.676325683995223e-05,
+      "loss": 4.0502,
+      "step": 808448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.675488727124544e-05,
+      "loss": 4.0396,
+      "step": 808960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.674650132373492e-05,
+      "loss": 4.0349,
+      "step": 809472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6738115376224396e-05,
+      "loss": 4.0381,
+      "step": 809984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6729729428713876e-05,
+      "loss": 4.038,
+      "step": 810496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6721343481203356e-05,
+      "loss": 4.0596,
+      "step": 811008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6712957533692836e-05,
+      "loss": 4.0416,
+      "step": 811520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6704587964986045e-05,
+      "loss": 4.0474,
+      "step": 812032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.669620201747553e-05,
+      "loss": 4.0398,
+      "step": 812544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.668781606996501e-05,
+      "loss": 4.0383,
+      "step": 813056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.667943012245449e-05,
+      "loss": 4.0389,
+      "step": 813568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.667104417494397e-05,
+      "loss": 4.0501,
+      "step": 814080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.666265822743345e-05,
+      "loss": 4.0498,
+      "step": 814592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.665428865872666e-05,
+      "loss": 4.046,
+      "step": 815104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.664590271121614e-05,
+      "loss": 4.0458,
+      "step": 815616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.663751676370562e-05,
+      "loss": 4.0356,
+      "step": 816128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.66291308161951e-05,
+      "loss": 4.047,
+      "step": 816640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.662074486868458e-05,
+      "loss": 4.0389,
+      "step": 817152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.661239167878152e-05,
+      "loss": 4.0347,
+      "step": 817664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6604005731271e-05,
+      "loss": 4.0396,
+      "step": 818176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6595619783760485e-05,
+      "loss": 4.0391,
+      "step": 818688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6587233836249965e-05,
+      "loss": 4.0394,
+      "step": 819200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6578847888739445e-05,
+      "loss": 4.032,
+      "step": 819712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6570461941228925e-05,
+      "loss": 4.0314,
+      "step": 820224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6562075993718405e-05,
+      "loss": 4.0409,
+      "step": 820736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6553690046207885e-05,
+      "loss": 4.0423,
+      "step": 821248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6545304098697365e-05,
+      "loss": 4.0443,
+      "step": 821760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6536918151186845e-05,
+      "loss": 4.049,
+      "step": 822272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6528532203676325e-05,
+      "loss": 4.0564,
+      "step": 822784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6520162634969534e-05,
+      "loss": 4.0396,
+      "step": 823296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6511776687459014e-05,
+      "loss": 4.0397,
+      "step": 823808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6503390739948494e-05,
+      "loss": 4.0321,
+      "step": 824320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6495004792437974e-05,
+      "loss": 4.045,
+      "step": 824832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.648663522373118e-05,
+      "loss": 4.0438,
+      "step": 825344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.647824927622067e-05,
+      "loss": 4.0329,
+      "step": 825856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.646987970751388e-05,
+      "loss": 4.0342,
+      "step": 826368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.646149376000336e-05,
+      "loss": 4.0465,
+      "step": 826880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.645310781249284e-05,
+      "loss": 4.0424,
+      "step": 827392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.644472186498232e-05,
+      "loss": 4.0328,
+      "step": 827904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.643635229627553e-05,
+      "loss": 4.0377,
+      "step": 828416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.642796634876501e-05,
+      "loss": 4.0368,
+      "step": 828928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.641958040125449e-05,
+      "loss": 4.0469,
+      "step": 829440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.641119445374397e-05,
+      "loss": 4.0441,
+      "step": 829952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.640280850623345e-05,
+      "loss": 4.0492,
+      "step": 830464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.639442255872293e-05,
+      "loss": 4.0339,
+      "step": 830976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.638603661121241e-05,
+      "loss": 4.0457,
+      "step": 831488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.637765066370189e-05,
+      "loss": 4.0484,
+      "step": 832000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.636926471619137e-05,
+      "loss": 4.031,
+      "step": 832512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.636087876868085e-05,
+      "loss": 4.0408,
+      "step": 833024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.635249282117033e-05,
+      "loss": 4.0431,
+      "step": 833536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6344123252463536e-05,
+      "loss": 4.0477,
+      "step": 834048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6335737304953016e-05,
+      "loss": 4.0499,
+      "step": 834560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6327351357442496e-05,
+      "loss": 4.038,
+      "step": 835072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6318965409931976e-05,
+      "loss": 4.0421,
+      "step": 835584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6310579462421456e-05,
+      "loss": 4.0439,
+      "step": 836096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6302193514910936e-05,
+      "loss": 4.0379,
+      "step": 836608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6293807567400416e-05,
+      "loss": 4.0327,
+      "step": 837120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6285421619889896e-05,
+      "loss": 4.0336,
+      "step": 837632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.6277052051183105e-05,
+      "loss": 4.0306,
+      "step": 838144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.626866610367259e-05,
+      "loss": 4.0391,
+      "step": 838656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.626028015616207e-05,
+      "loss": 4.0397,
+      "step": 839168
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.069751739501953,
+      "eval_runtime": 290.1959,
+      "eval_samples_per_second": 1314.943,
+      "eval_steps_per_second": 41.093,
+      "step": 839520
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.625189420865155e-05,
+      "loss": 4.0365,
+      "step": 839680
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.624350826114103e-05,
+      "loss": 4.0351,
+      "step": 840192
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.623513869243424e-05,
+      "loss": 4.0383,
+      "step": 840704
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.622675274492372e-05,
+      "loss": 4.0417,
+      "step": 841216
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.62183667974132e-05,
+      "loss": 4.047,
+      "step": 841728
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.620998084990268e-05,
+      "loss": 4.039,
+      "step": 842240
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.620161128119589e-05,
+      "loss": 4.0431,
+      "step": 842752
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.619322533368537e-05,
+      "loss": 4.0346,
+      "step": 843264
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.618483938617485e-05,
+      "loss": 4.0272,
+      "step": 843776
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.617646981746806e-05,
+      "loss": 4.0357,
+      "step": 844288
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.616808386995754e-05,
+      "loss": 4.049,
+      "step": 844800
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.6159697922447025e-05,
+      "loss": 4.0313,
+      "step": 845312
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.6151328353740234e-05,
+      "loss": 4.0465,
+      "step": 845824
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.6142942406229714e-05,
+      "loss": 4.0262,
+      "step": 846336
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.6134556458719194e-05,
+      "loss": 4.0338,
+      "step": 846848
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.6126170511208674e-05,
+      "loss": 4.0166,
+      "step": 847360
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.6117784563698154e-05,
+      "loss": 4.0405,
+      "step": 847872
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.6109398616187634e-05,
+      "loss": 4.0367,
+      "step": 848384
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.6101012668677114e-05,
+      "loss": 4.0312,
+      "step": 848896
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.609262672116659e-05,
+      "loss": 4.0364,
+      "step": 849408
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.608424077365607e-05,
+      "loss": 4.0504,
+      "step": 849920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.607585482614555e-05,
+      "loss": 4.042,
+      "step": 850432
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.606746887863503e-05,
+      "loss": 4.0381,
+      "step": 850944
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.605908293112451e-05,
+      "loss": 4.0347,
+      "step": 851456
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.605072974122146e-05,
+      "loss": 4.0436,
+      "step": 851968
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.604234379371094e-05,
+      "loss": 4.0378,
+      "step": 852480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.603395784620042e-05,
+      "loss": 4.0272,
+      "step": 852992
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.602557189868989e-05,
+      "loss": 4.0341,
+      "step": 853504
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.601718595117937e-05,
+      "loss": 4.0285,
+      "step": 854016
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.600881638247259e-05,
+      "loss": 4.0272,
+      "step": 854528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.600043043496206e-05,
+      "loss": 4.04,
+      "step": 855040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.599204448745154e-05,
+      "loss": 4.033,
+      "step": 855552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.598365853994102e-05,
+      "loss": 4.0427,
+      "step": 856064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.59752725924305e-05,
+      "loss": 4.0437,
+      "step": 856576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.596688664491998e-05,
+      "loss": 4.0313,
+      "step": 857088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.59585170762132e-05,
+      "loss": 4.0378,
+      "step": 857600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.595013112870268e-05,
+      "loss": 4.0406,
+      "step": 858112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.594174518119216e-05,
+      "loss": 4.0228,
+      "step": 858624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5933359233681637e-05,
+      "loss": 4.0273,
+      "step": 859136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5924973286171117e-05,
+      "loss": 4.0215,
+      "step": 859648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5916587338660597e-05,
+      "loss": 4.0217,
+      "step": 860160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5908201391150076e-05,
+      "loss": 4.032,
+      "step": 860672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5899831822443286e-05,
+      "loss": 4.0416,
+      "step": 861184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5891445874932766e-05,
+      "loss": 4.0251,
+      "step": 861696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5883059927422245e-05,
+      "loss": 4.0269,
+      "step": 862208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5874673979911725e-05,
+      "loss": 4.0346,
+      "step": 862720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5866288032401205e-05,
+      "loss": 4.0326,
+      "step": 863232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5857918463694414e-05,
+      "loss": 4.0244,
+      "step": 863744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.58495325161839e-05,
+      "loss": 4.0151,
+      "step": 864256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.584114656867338e-05,
+      "loss": 4.0362,
+      "step": 864768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.583276062116286e-05,
+      "loss": 4.0419,
+      "step": 865280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.582437467365234e-05,
+      "loss": 4.0423,
+      "step": 865792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.581598872614182e-05,
+      "loss": 4.0263,
+      "step": 866304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.58076027786313e-05,
+      "loss": 4.027,
+      "step": 866816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.579923320992451e-05,
+      "loss": 4.0299,
+      "step": 867328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.579084726241399e-05,
+      "loss": 4.0239,
+      "step": 867840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.578246131490347e-05,
+      "loss": 4.0433,
+      "step": 868352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.577407536739295e-05,
+      "loss": 4.0147,
+      "step": 868864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.576568941988242e-05,
+      "loss": 4.0513,
+      "step": 869376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.57573034723719e-05,
+      "loss": 4.0316,
+      "step": 869888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.574891752486138e-05,
+      "loss": 4.0116,
+      "step": 870400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.57405479561546e-05,
+      "loss": 4.0308,
+      "step": 870912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.573216200864408e-05,
+      "loss": 4.0233,
+      "step": 871424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.572377606113356e-05,
+      "loss": 4.0129,
+      "step": 871936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.571539011362304e-05,
+      "loss": 4.0313,
+      "step": 872448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.570700416611252e-05,
+      "loss": 4.0355,
+      "step": 872960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5698618218602e-05,
+      "loss": 4.0225,
+      "step": 873472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.569024864989521e-05,
+      "loss": 4.0344,
+      "step": 873984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.568186270238469e-05,
+      "loss": 4.0056,
+      "step": 874496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.567347675487417e-05,
+      "loss": 4.0241,
+      "step": 875008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.566509080736365e-05,
+      "loss": 4.023,
+      "step": 875520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.565670485985313e-05,
+      "loss": 4.0283,
+      "step": 876032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.564833529114634e-05,
+      "loss": 4.0263,
+      "step": 876544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5639949343635823e-05,
+      "loss": 4.024,
+      "step": 877056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.5631563396125303e-05,
+      "loss": 4.0444,
+      "step": 877568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.562317744861478e-05,
+      "loss": 4.0289,
+      "step": 878080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.561479150110426e-05,
+      "loss": 4.0283,
+      "step": 878592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.560640555359374e-05,
+      "loss": 4.0264,
+      "step": 879104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.559803598488695e-05,
+      "loss": 4.0218,
+      "step": 879616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.558965003737643e-05,
+      "loss": 4.0319,
+      "step": 880128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.558126408986591e-05,
+      "loss": 4.0234,
+      "step": 880640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.557287814235539e-05,
+      "loss": 4.0232,
+      "step": 881152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.556449219484487e-05,
+      "loss": 4.0352,
+      "step": 881664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.555610624733435e-05,
+      "loss": 4.0264,
+      "step": 882176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.554773667862756e-05,
+      "loss": 4.0095,
+      "step": 882688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.553935073111704e-05,
+      "loss": 4.0254,
+      "step": 883200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.553096478360652e-05,
+      "loss": 4.0317,
+      "step": 883712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.552259521489974e-05,
+      "loss": 4.0165,
+      "step": 884224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.551420926738922e-05,
+      "loss": 4.0273,
+      "step": 884736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.55058233198787e-05,
+      "loss": 4.024,
+      "step": 885248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.549743737236818e-05,
+      "loss": 4.0143,
+      "step": 885760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.548905142485766e-05,
+      "loss": 4.0231,
+      "step": 886272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.548066547734714e-05,
+      "loss": 4.0177,
+      "step": 886784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.547227952983661e-05,
+      "loss": 4.0382,
+      "step": 887296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.546389358232609e-05,
+      "loss": 4.0256,
+      "step": 887808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5455524013619306e-05,
+      "loss": 4.0304,
+      "step": 888320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5447138066108786e-05,
+      "loss": 4.0171,
+      "step": 888832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.543875211859826e-05,
+      "loss": 4.0181,
+      "step": 889344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.543036617108774e-05,
+      "loss": 4.0256,
+      "step": 889856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5421980223577226e-05,
+      "loss": 4.0302,
+      "step": 890368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5413594276066706e-05,
+      "loss": 4.0296,
+      "step": 890880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5405224707359915e-05,
+      "loss": 4.0269,
+      "step": 891392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5396838759849395e-05,
+      "loss": 4.0244,
+      "step": 891904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5388452812338875e-05,
+      "loss": 4.0155,
+      "step": 892416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5380066864828355e-05,
+      "loss": 4.0293,
+      "step": 892928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5371680917317835e-05,
+      "loss": 4.0197,
+      "step": 893440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5363311348611044e-05,
+      "loss": 4.0221,
+      "step": 893952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5354925401100524e-05,
+      "loss": 4.0192,
+      "step": 894464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5346539453590004e-05,
+      "loss": 4.0198,
+      "step": 894976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5338153506079483e-05,
+      "loss": 4.0213,
+      "step": 895488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5329767558568963e-05,
+      "loss": 4.0114,
+      "step": 896000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.532139798986218e-05,
+      "loss": 4.0121,
+      "step": 896512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.531301204235166e-05,
+      "loss": 4.0244,
+      "step": 897024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.530464247364487e-05,
+      "loss": 4.0222,
+      "step": 897536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.529625652613435e-05,
+      "loss": 4.0236,
+      "step": 898048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.528787057862383e-05,
+      "loss": 4.0294,
+      "step": 898560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.527948463111331e-05,
+      "loss": 4.0381,
+      "step": 899072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.527109868360279e-05,
+      "loss": 4.0208,
+      "step": 899584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.526271273609227e-05,
+      "loss": 4.0171,
+      "step": 900096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.525432678858175e-05,
+      "loss": 4.0183,
+      "step": 900608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.524594084107123e-05,
+      "loss": 4.0243,
+      "step": 901120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.523755489356071e-05,
+      "loss": 4.0244,
+      "step": 901632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.522916894605019e-05,
+      "loss": 4.0176,
+      "step": 902144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.522078299853967e-05,
+      "loss": 4.0169,
+      "step": 902656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.521239705102915e-05,
+      "loss": 4.0268,
+      "step": 903168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5204027482322364e-05,
+      "loss": 4.0184,
+      "step": 903680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5195641534811844e-05,
+      "loss": 4.0191,
+      "step": 904192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5187255587301324e-05,
+      "loss": 4.0253,
+      "step": 904704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.51788696397908e-05,
+      "loss": 4.0201,
+      "step": 905216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.517050007108401e-05,
+      "loss": 4.0223,
+      "step": 905728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.516211412357349e-05,
+      "loss": 4.029,
+      "step": 906240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.515372817606297e-05,
+      "loss": 4.0295,
+      "step": 906752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5145342228552446e-05,
+      "loss": 4.0179,
+      "step": 907264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5136956281041926e-05,
+      "loss": 4.0279,
+      "step": 907776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5128570333531406e-05,
+      "loss": 4.031,
+      "step": 908288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.5120184386020886e-05,
+      "loss": 4.0169,
+      "step": 908800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.51118148173141e-05,
+      "loss": 4.0211,
+      "step": 909312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.510344524860731e-05,
+      "loss": 4.0256,
+      "step": 909824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.50950593010968e-05,
+      "loss": 4.031,
+      "step": 910336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.508667335358627e-05,
+      "loss": 4.0335,
+      "step": 910848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.507828740607575e-05,
+      "loss": 4.0251,
+      "step": 911360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.506990145856523e-05,
+      "loss": 4.0212,
+      "step": 911872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.506151551105471e-05,
+      "loss": 4.0256,
+      "step": 912384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.505312956354419e-05,
+      "loss": 4.0244,
+      "step": 912896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.504474361603367e-05,
+      "loss": 4.0127,
+      "step": 913408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.503637404732688e-05,
+      "loss": 4.0211,
+      "step": 913920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.502798809981636e-05,
+      "loss": 4.0084,
+      "step": 914432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.501960215230584e-05,
+      "loss": 4.0197,
+      "step": 914944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.501123258359905e-05,
+      "loss": 4.0201,
+      "step": 915456
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.058034420013428,
+      "eval_runtime": 291.6524,
+      "eval_samples_per_second": 1308.376,
+      "eval_steps_per_second": 40.888,
+      "step": 915840
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.5002846636088535e-05,
+      "loss": 4.0278,
+      "step": 915968
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4994460688578015e-05,
+      "loss": 4.0125,
+      "step": 916480
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4986074741067495e-05,
+      "loss": 4.0176,
+      "step": 916992
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4977688793556975e-05,
+      "loss": 4.0322,
+      "step": 917504
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4969302846046455e-05,
+      "loss": 4.0252,
+      "step": 918016
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4960916898535935e-05,
+      "loss": 4.0267,
+      "step": 918528
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4952530951025415e-05,
+      "loss": 4.0241,
+      "step": 919040
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4944145003514895e-05,
+      "loss": 4.0166,
+      "step": 919552
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4935775434808104e-05,
+      "loss": 4.0086,
+      "step": 920064
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4927389487297584e-05,
+      "loss": 4.0251,
+      "step": 920576
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4919003539787064e-05,
+      "loss": 4.0219,
+      "step": 921088
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.4910617592276544e-05,
+      "loss": 4.0211,
+      "step": 921600
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.490224802356975e-05,
+      "loss": 4.0274,
+      "step": 922112
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.489386207605923e-05,
+      "loss": 4.0081,
+      "step": 922624
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.488547612854872e-05,
+      "loss": 4.0177,
+      "step": 923136
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.48770901810382e-05,
+      "loss": 3.9988,
+      "step": 923648
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.486870423352768e-05,
+      "loss": 4.0251,
+      "step": 924160
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.486031828601716e-05,
+      "loss": 4.016,
+      "step": 924672
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.485193233850663e-05,
+      "loss": 4.0185,
+      "step": 925184
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.484354639099611e-05,
+      "loss": 4.0223,
+      "step": 925696
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.483516044348559e-05,
+      "loss": 4.0296,
+      "step": 926208
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.482679087477881e-05,
+      "loss": 4.0246,
+      "step": 926720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.481840492726828e-05,
+      "loss": 4.0178,
+      "step": 927232
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.481001897975776e-05,
+      "loss": 4.0169,
+      "step": 927744
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.480163303224724e-05,
+      "loss": 4.0303,
+      "step": 928256
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.479324708473672e-05,
+      "loss": 4.0203,
+      "step": 928768
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.47848611372262e-05,
+      "loss": 4.0093,
+      "step": 929280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.477649156851942e-05,
+      "loss": 4.0201,
+      "step": 929792
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.47681056210089e-05,
+      "loss": 4.0074,
+      "step": 930304
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.475971967349838e-05,
+      "loss": 4.0155,
+      "step": 930816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.475133372598786e-05,
+      "loss": 4.0234,
+      "step": 931328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.474294777847734e-05,
+      "loss": 4.0154,
+      "step": 931840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.473456183096682e-05,
+      "loss": 4.0228,
+      "step": 932352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.47261758834563e-05,
+      "loss": 4.0277,
+      "step": 932864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.471778993594578e-05,
+      "loss": 4.0162,
+      "step": 933376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4709420367238986e-05,
+      "loss": 4.0227,
+      "step": 933888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4701034419728466e-05,
+      "loss": 4.0245,
+      "step": 934400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4692648472217946e-05,
+      "loss": 4.0044,
+      "step": 934912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4684262524707426e-05,
+      "loss": 4.0126,
+      "step": 935424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4675876577196906e-05,
+      "loss": 4.0035,
+      "step": 935936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4667490629686386e-05,
+      "loss": 4.0071,
+      "step": 936448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.46591210609796e-05,
+      "loss": 4.0166,
+      "step": 936960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.465073511346908e-05,
+      "loss": 4.0221,
+      "step": 937472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.464234916595856e-05,
+      "loss": 4.0071,
+      "step": 937984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.463396321844804e-05,
+      "loss": 4.0129,
+      "step": 938496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.462557727093752e-05,
+      "loss": 4.0142,
+      "step": 939008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4617191323426995e-05,
+      "loss": 4.0192,
+      "step": 939520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4608805375916475e-05,
+      "loss": 4.0083,
+      "step": 940032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4600419428405955e-05,
+      "loss": 3.9969,
+      "step": 940544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.459204985969917e-05,
+      "loss": 4.0161,
+      "step": 941056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4583663912188644e-05,
+      "loss": 4.0305,
+      "step": 941568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4575277964678124e-05,
+      "loss": 4.0267,
+      "step": 942080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.456690839597134e-05,
+      "loss": 4.0059,
+      "step": 942592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.455852244846082e-05,
+      "loss": 4.0124,
+      "step": 943104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.45501365009503e-05,
+      "loss": 4.0145,
+      "step": 943616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4541783311047244e-05,
+      "loss": 4.0012,
+      "step": 944128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4533397363536724e-05,
+      "loss": 4.0317,
+      "step": 944640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4525011416026204e-05,
+      "loss": 3.9953,
+      "step": 945152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4516625468515684e-05,
+      "loss": 4.0341,
+      "step": 945664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4508239521005164e-05,
+      "loss": 4.0135,
+      "step": 946176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4499853573494644e-05,
+      "loss": 3.9987,
+      "step": 946688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.449146762598412e-05,
+      "loss": 4.0133,
+      "step": 947200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.44830816784736e-05,
+      "loss": 4.0098,
+      "step": 947712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.447469573096308e-05,
+      "loss": 3.9943,
+      "step": 948224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.446630978345256e-05,
+      "loss": 4.0153,
+      "step": 948736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.445794021474577e-05,
+      "loss": 4.02,
+      "step": 949248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.444955426723525e-05,
+      "loss": 4.0069,
+      "step": 949760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.444116831972473e-05,
+      "loss": 4.0151,
+      "step": 950272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.443278237221421e-05,
+      "loss": 3.9902,
+      "step": 950784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.442439642470369e-05,
+      "loss": 4.0081,
+      "step": 951296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.441601047719317e-05,
+      "loss": 4.0046,
+      "step": 951808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.440762452968265e-05,
+      "loss": 4.0143,
+      "step": 952320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.439923858217213e-05,
+      "loss": 4.0069,
+      "step": 952832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.439085263466161e-05,
+      "loss": 4.01,
+      "step": 953344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.438246668715109e-05,
+      "loss": 4.0256,
+      "step": 953856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.43740971184443e-05,
+      "loss": 4.0128,
+      "step": 954368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.436571117093378e-05,
+      "loss": 4.0109,
+      "step": 954880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.435732522342326e-05,
+      "loss": 4.0096,
+      "step": 955392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.434893927591274e-05,
+      "loss": 4.0075,
+      "step": 955904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.434055332840223e-05,
+      "loss": 4.0151,
+      "step": 956416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.433218375969544e-05,
+      "loss": 4.0057,
+      "step": 956928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.432379781218492e-05,
+      "loss": 4.0132,
+      "step": 957440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.43154118646744e-05,
+      "loss": 4.0145,
+      "step": 957952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.430702591716388e-05,
+      "loss": 4.0143,
+      "step": 958464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.429863996965336e-05,
+      "loss": 3.9905,
+      "step": 958976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.429025402214283e-05,
+      "loss": 4.0122,
+      "step": 959488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.428186807463231e-05,
+      "loss": 4.0122,
+      "step": 960000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.427348212712179e-05,
+      "loss": 4.0023,
+      "step": 960512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4265128937218735e-05,
+      "loss": 4.011,
+      "step": 961024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.4256742989708215e-05,
+      "loss": 4.0061,
+      "step": 961536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4248357042197695e-05,
+      "loss": 4.0014,
+      "step": 962048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.423998747349091e-05,
+      "loss": 4.0034,
+      "step": 962560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.423160152598039e-05,
+      "loss": 4.0055,
+      "step": 963072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.422321557846987e-05,
+      "loss": 4.0173,
+      "step": 963584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.421482963095935e-05,
+      "loss": 4.0097,
+      "step": 964096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.420644368344883e-05,
+      "loss": 4.0136,
+      "step": 964608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4198057735938304e-05,
+      "loss": 4.0036,
+      "step": 965120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4189671788427784e-05,
+      "loss": 4.0037,
+      "step": 965632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4181285840917264e-05,
+      "loss": 4.0074,
+      "step": 966144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4172899893406744e-05,
+      "loss": 4.0157,
+      "step": 966656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4164513945896224e-05,
+      "loss": 4.0123,
+      "step": 967168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4156127998385704e-05,
+      "loss": 4.0114,
+      "step": 967680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4147742050875184e-05,
+      "loss": 4.0105,
+      "step": 968192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.41393724821684e-05,
+      "loss": 4.0007,
+      "step": 968704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.413098653465788e-05,
+      "loss": 4.0126,
+      "step": 969216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.412260058714736e-05,
+      "loss": 4.0061,
+      "step": 969728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.411423101844057e-05,
+      "loss": 4.007,
+      "step": 970240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.410584507093005e-05,
+      "loss": 4.0018,
+      "step": 970752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.409745912341953e-05,
+      "loss": 4.0046,
+      "step": 971264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.408907317590901e-05,
+      "loss": 4.0043,
+      "step": 971776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.408068722839849e-05,
+      "loss": 4.0002,
+      "step": 972288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.407230128088797e-05,
+      "loss": 3.9934,
+      "step": 972800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.406391533337745e-05,
+      "loss": 4.0073,
+      "step": 973312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.405552938586693e-05,
+      "loss": 4.0133,
+      "step": 973824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.404715981716014e-05,
+      "loss": 4.0022,
+      "step": 974336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.403877386964962e-05,
+      "loss": 4.0135,
+      "step": 974848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.40303879221391e-05,
+      "loss": 4.0227,
+      "step": 975360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4022001974628584e-05,
+      "loss": 4.0048,
+      "step": 975872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4013616027118064e-05,
+      "loss": 4.0022,
+      "step": 976384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.4005230079607544e-05,
+      "loss": 4.0035,
+      "step": 976896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.399684413209702e-05,
+      "loss": 4.0068,
+      "step": 977408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.398847456339023e-05,
+      "loss": 4.0066,
+      "step": 977920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.398008861587971e-05,
+      "loss": 4.0025,
+      "step": 978432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.397170266836919e-05,
+      "loss": 4.0039,
+      "step": 978944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3963316720858666e-05,
+      "loss": 4.011,
+      "step": 979456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3954930773348146e-05,
+      "loss": 4.0033,
+      "step": 979968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3946544825837626e-05,
+      "loss": 4.0088,
+      "step": 980480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3938158878327106e-05,
+      "loss": 4.0052,
+      "step": 980992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.392978930962032e-05,
+      "loss": 4.004,
+      "step": 981504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.39214033621098e-05,
+      "loss": 4.0008,
+      "step": 982016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.391301741459928e-05,
+      "loss": 4.0169,
+      "step": 982528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.390463146708876e-05,
+      "loss": 4.0112,
+      "step": 983040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.389624551957824e-05,
+      "loss": 4.0023,
+      "step": 983552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.388785957206772e-05,
+      "loss": 4.0079,
+      "step": 984064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.38794736245572e-05,
+      "loss": 4.0171,
+      "step": 984576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.387108767704668e-05,
+      "loss": 3.9982,
+      "step": 985088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.386270172953616e-05,
+      "loss": 4.0085,
+      "step": 985600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.385433216082937e-05,
+      "loss": 4.0096,
+      "step": 986112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.384594621331885e-05,
+      "loss": 4.0162,
+      "step": 986624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.383756026580833e-05,
+      "loss": 4.0144,
+      "step": 987136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.382917431829781e-05,
+      "loss": 4.0116,
+      "step": 987648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.382080474959102e-05,
+      "loss": 4.005,
+      "step": 988160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3812418802080507e-05,
+      "loss": 4.0175,
+      "step": 988672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3804032854569986e-05,
+      "loss": 4.0046,
+      "step": 989184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3795646907059466e-05,
+      "loss": 3.9957,
+      "step": 989696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3787260959548946e-05,
+      "loss": 4.0057,
+      "step": 990208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3778907769645885e-05,
+      "loss": 3.9948,
+      "step": 990720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3770521822135365e-05,
+      "loss": 4.0065,
+      "step": 991232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.3762135874624845e-05,
+      "loss": 4.002,
+      "step": 991744
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.048644065856934,
+      "eval_runtime": 294.6646,
+      "eval_samples_per_second": 1295.001,
+      "eval_steps_per_second": 40.47,
+      "step": 992160
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.3753749927114324e-05,
+      "loss": 4.0052,
+      "step": 992256
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.3745363979603804e-05,
+      "loss": 3.9988,
+      "step": 992768
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.3736978032093284e-05,
+      "loss": 4.0037,
+      "step": 993280
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.3728608463386493e-05,
+      "loss": 4.016,
+      "step": 993792
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.3720222515875973e-05,
+      "loss": 4.0084,
+      "step": 994304
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.371183656836546e-05,
+      "loss": 4.0148,
+      "step": 994816
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.370345062085494e-05,
+      "loss": 4.0044,
+      "step": 995328
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.369506467334442e-05,
+      "loss": 4.0023,
+      "step": 995840
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.36866787258339e-05,
+      "loss": 3.99,
+      "step": 996352
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.367830915712711e-05,
+      "loss": 4.0097,
+      "step": 996864
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.366992320961659e-05,
+      "loss": 4.0061,
+      "step": 997376
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.366153726210607e-05,
+      "loss": 4.0055,
+      "step": 997888
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.365316769339928e-05,
+      "loss": 4.017,
+      "step": 998400
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.364478174588876e-05,
+      "loss": 3.9886,
+      "step": 998912
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.363639579837824e-05,
+      "loss": 3.9987,
+      "step": 999424
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.362800985086772e-05,
+      "loss": 3.9907,
+      "step": 999936
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.36196239033572e-05,
+      "loss": 4.0036,
+      "step": 1000448
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.361123795584668e-05,
+      "loss": 4.0045,
+      "step": 1000960
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.360285200833616e-05,
+      "loss": 3.9987,
+      "step": 1001472
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.359446606082564e-05,
+      "loss": 4.0101,
+      "step": 1001984
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.358608011331512e-05,
+      "loss": 4.0119,
+      "step": 1002496
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.35776941658046e-05,
+      "loss": 4.0085,
+      "step": 1003008
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.356930821829408e-05,
+      "loss": 4.005,
+      "step": 1003520
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.356092227078356e-05,
+      "loss": 4.0019,
+      "step": 1004032
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.355255270207677e-05,
+      "loss": 4.0157,
+      "step": 1004544
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.3544183133369976e-05,
+      "loss": 4.0034,
+      "step": 1005056
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.3535797185859456e-05,
+      "loss": 3.9948,
+      "step": 1005568
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.352742761715267e-05,
+      "loss": 4.0048,
+      "step": 1006080
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.351904166964215e-05,
+      "loss": 3.9916,
+      "step": 1006592
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.351065572213163e-05,
+      "loss": 3.9998,
+      "step": 1007104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.350226977462111e-05,
+      "loss": 4.0027,
+      "step": 1007616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.349388382711059e-05,
+      "loss": 4.0012,
+      "step": 1008128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.348549787960007e-05,
+      "loss": 4.0101,
+      "step": 1008640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.347711193208955e-05,
+      "loss": 4.012,
+      "step": 1009152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.346872598457903e-05,
+      "loss": 4.001,
+      "step": 1009664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.346037279467597e-05,
+      "loss": 4.0082,
+      "step": 1010176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.345198684716545e-05,
+      "loss": 4.0095,
+      "step": 1010688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.344360089965493e-05,
+      "loss": 3.9911,
+      "step": 1011200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.343521495214441e-05,
+      "loss": 3.9979,
+      "step": 1011712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.342682900463389e-05,
+      "loss": 3.9864,
+      "step": 1012224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.341844305712337e-05,
+      "loss": 3.9946,
+      "step": 1012736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.341005710961285e-05,
+      "loss": 3.9967,
+      "step": 1013248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.340167116210233e-05,
+      "loss": 4.0143,
+      "step": 1013760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3393285214591816e-05,
+      "loss": 3.99,
+      "step": 1014272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3384899267081296e-05,
+      "loss": 3.9963,
+      "step": 1014784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3376513319570776e-05,
+      "loss": 4.0005,
+      "step": 1015296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3368127372060256e-05,
+      "loss": 4.0016,
+      "step": 1015808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3359757803353465e-05,
+      "loss": 3.9957,
+      "step": 1016320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3351371855842945e-05,
+      "loss": 3.985,
+      "step": 1016832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3343002287136154e-05,
+      "loss": 4.0027,
+      "step": 1017344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3334616339625634e-05,
+      "loss": 4.0122,
+      "step": 1017856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3326230392115114e-05,
+      "loss": 4.0072,
+      "step": 1018368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3317844444604594e-05,
+      "loss": 3.9959,
+      "step": 1018880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.33094748758978e-05,
+      "loss": 3.9925,
+      "step": 1019392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.330108892838728e-05,
+      "loss": 4.0032,
+      "step": 1019904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.329270298087677e-05,
+      "loss": 3.9877,
+      "step": 1020416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.328431703336625e-05,
+      "loss": 4.0143,
+      "step": 1020928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.327594746465946e-05,
+      "loss": 3.9845,
+      "step": 1021440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.326756151714894e-05,
+      "loss": 4.018,
+      "step": 1021952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.325917556963842e-05,
+      "loss": 4.0033,
+      "step": 1022464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.32507896221279e-05,
+      "loss": 3.9816,
+      "step": 1022976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.324240367461738e-05,
+      "loss": 3.997,
+      "step": 1023488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.323401772710686e-05,
+      "loss": 3.9979,
+      "step": 1024000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.322564815840007e-05,
+      "loss": 3.9817,
+      "step": 1024512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.321726221088955e-05,
+      "loss": 3.9924,
+      "step": 1025024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.320887626337903e-05,
+      "loss": 4.0098,
+      "step": 1025536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.320049031586851e-05,
+      "loss": 3.9931,
+      "step": 1026048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.319210436835799e-05,
+      "loss": 3.9961,
+      "step": 1026560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.318371842084747e-05,
+      "loss": 3.9798,
+      "step": 1027072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.317534885214068e-05,
+      "loss": 3.9943,
+      "step": 1027584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.316696290463016e-05,
+      "loss": 3.9893,
+      "step": 1028096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3158576957119636e-05,
+      "loss": 3.9998,
+      "step": 1028608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3150191009609116e-05,
+      "loss": 3.9931,
+      "step": 1029120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3141805062098596e-05,
+      "loss": 3.9965,
+      "step": 1029632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3133435493391805e-05,
+      "loss": 4.0083,
+      "step": 1030144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3125049545881285e-05,
+      "loss": 3.9983,
+      "step": 1030656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3116663598370765e-05,
+      "loss": 3.997,
+      "step": 1031168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3108277650860245e-05,
+      "loss": 3.9986,
+      "step": 1031680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3099891703349725e-05,
+      "loss": 3.996,
+      "step": 1032192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.3091505755839205e-05,
+      "loss": 3.9996,
+      "step": 1032704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.308311980832869e-05,
+      "loss": 3.9899,
+      "step": 1033216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.307473386081817e-05,
+      "loss": 3.9966,
+      "step": 1033728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.306636429211138e-05,
+      "loss": 4.0041,
+      "step": 1034240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.305799472340459e-05,
+      "loss": 3.998,
+      "step": 1034752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.304960877589407e-05,
+      "loss": 3.9807,
+      "step": 1035264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.304122282838355e-05,
+      "loss": 3.9961,
+      "step": 1035776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.303283688087303e-05,
+      "loss": 3.9933,
+      "step": 1036288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.302446731216624e-05,
+      "loss": 3.9942,
+      "step": 1036800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.301608136465572e-05,
+      "loss": 3.9918,
+      "step": 1037312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.30076954171452e-05,
+      "loss": 3.9989,
+      "step": 1037824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.299930946963468e-05,
+      "loss": 3.9829,
+      "step": 1038336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2990939900927895e-05,
+      "loss": 3.9908,
+      "step": 1038848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2982553953417375e-05,
+      "loss": 3.9867,
+      "step": 1039360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2974168005906855e-05,
+      "loss": 4.0057,
+      "step": 1039872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2965782058396335e-05,
+      "loss": 3.9923,
+      "step": 1040384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2957396110885815e-05,
+      "loss": 4.0037,
+      "step": 1040896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2949010163375295e-05,
+      "loss": 3.9852,
+      "step": 1041408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2940624215864775e-05,
+      "loss": 3.9912,
+      "step": 1041920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2932254647157984e-05,
+      "loss": 3.9929,
+      "step": 1042432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2923868699647464e-05,
+      "loss": 4.0,
+      "step": 1042944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2915482752136944e-05,
+      "loss": 3.9999,
+      "step": 1043456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2907096804626423e-05,
+      "loss": 3.9971,
+      "step": 1043968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2898710857115903e-05,
+      "loss": 3.994,
+      "step": 1044480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2890324909605383e-05,
+      "loss": 3.9881,
+      "step": 1044992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.288193896209486e-05,
+      "loss": 4.0009,
+      "step": 1045504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.287356939338808e-05,
+      "loss": 3.9873,
+      "step": 1046016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.286518344587756e-05,
+      "loss": 3.9907,
+      "step": 1046528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.285681387717077e-05,
+      "loss": 3.9912,
+      "step": 1047040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.284842792966025e-05,
+      "loss": 3.9889,
+      "step": 1047552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.284004198214973e-05,
+      "loss": 3.9902,
+      "step": 1048064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.283165603463921e-05,
+      "loss": 3.9914,
+      "step": 1048576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.282327008712869e-05,
+      "loss": 3.9791,
+      "step": 1049088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.281488413961817e-05,
+      "loss": 3.9933,
+      "step": 1049600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.280651457091138e-05,
+      "loss": 3.9977,
+      "step": 1050112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.279812862340086e-05,
+      "loss": 3.9883,
+      "step": 1050624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.278974267589034e-05,
+      "loss": 4.0013,
+      "step": 1051136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.278135672837982e-05,
+      "loss": 4.008,
+      "step": 1051648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.277298715967303e-05,
+      "loss": 3.9926,
+      "step": 1052160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.276460121216251e-05,
+      "loss": 3.9896,
+      "step": 1052672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.275621526465199e-05,
+      "loss": 3.987,
+      "step": 1053184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2747829317141466e-05,
+      "loss": 3.9925,
+      "step": 1053696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2739443369630946e-05,
+      "loss": 3.9913,
+      "step": 1054208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2731057422120426e-05,
+      "loss": 3.9901,
+      "step": 1054720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2722671474609906e-05,
+      "loss": 3.9877,
+      "step": 1055232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2714285527099386e-05,
+      "loss": 3.9994,
+      "step": 1055744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2705915958392595e-05,
+      "loss": 3.9922,
+      "step": 1056256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2697530010882075e-05,
+      "loss": 3.9888,
+      "step": 1056768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2689144063371555e-05,
+      "loss": 3.9919,
+      "step": 1057280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2680758115861035e-05,
+      "loss": 3.9918,
+      "step": 1057792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2672372168350515e-05,
+      "loss": 3.9857,
+      "step": 1058304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.266398622084e-05,
+      "loss": 4.004,
+      "step": 1058816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.265561665213321e-05,
+      "loss": 4.0004,
+      "step": 1059328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.264723070462269e-05,
+      "loss": 3.992,
+      "step": 1059840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.263884475711217e-05,
+      "loss": 3.9935,
+      "step": 1060352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.263045880960165e-05,
+      "loss": 3.9992,
+      "step": 1060864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.262207286209113e-05,
+      "loss": 3.9904,
+      "step": 1061376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.261368691458061e-05,
+      "loss": 3.994,
+      "step": 1061888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.260530096707009e-05,
+      "loss": 3.9936,
+      "step": 1062400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.25969313983633e-05,
+      "loss": 4.0076,
+      "step": 1062912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.258854545085278e-05,
+      "loss": 3.9936,
+      "step": 1063424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.258015950334226e-05,
+      "loss": 3.9999,
+      "step": 1063936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.257177355583174e-05,
+      "loss": 3.9896,
+      "step": 1064448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.256338760832122e-05,
+      "loss": 4.0025,
+      "step": 1064960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.25550016608107e-05,
+      "loss": 3.9905,
+      "step": 1065472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2546615713300186e-05,
+      "loss": 3.9822,
+      "step": 1065984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.253822976578966e-05,
+      "loss": 3.9928,
+      "step": 1066496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2529860197082875e-05,
+      "loss": 3.9805,
+      "step": 1067008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2521474249572355e-05,
+      "loss": 3.9938,
+      "step": 1067520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.2513104680865564e-05,
+      "loss": 3.9871,
+      "step": 1068032
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.0411176681518555,
+      "eval_runtime": 286.0545,
+      "eval_samples_per_second": 1333.98,
+      "eval_steps_per_second": 41.688,
+      "step": 1068480
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.2504718733355044e-05,
+      "loss": 3.9871,
+      "step": 1068544
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.2496332785844524e-05,
+      "loss": 3.988,
+      "step": 1069056
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.2487946838334004e-05,
+      "loss": 3.9874,
+      "step": 1069568
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.247956089082348e-05,
+      "loss": 4.0032,
+      "step": 1070080
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.247117494331296e-05,
+      "loss": 3.9943,
+      "step": 1070592
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.246278899580244e-05,
+      "loss": 4.0021,
+      "step": 1071104
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.2454403048291924e-05,
+      "loss": 3.9902,
+      "step": 1071616
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.2446017100781404e-05,
+      "loss": 3.9868,
+      "step": 1072128
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.2437631153270884e-05,
+      "loss": 3.9815,
+      "step": 1072640
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.2429245205760364e-05,
+      "loss": 3.9931,
+      "step": 1073152
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.2420859258249843e-05,
+      "loss": 3.9918,
+      "step": 1073664
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.2412473310739323e-05,
+      "loss": 3.9928,
+      "step": 1074176
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.240410374203253e-05,
+      "loss": 4.0051,
+      "step": 1074688
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.239571779452201e-05,
+      "loss": 3.9783,
+      "step": 1075200
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.238733184701149e-05,
+      "loss": 3.9809,
+      "step": 1075712
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.237894589950097e-05,
+      "loss": 3.9789,
+      "step": 1076224
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.237055995199045e-05,
+      "loss": 3.9862,
+      "step": 1076736
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.236217400447993e-05,
+      "loss": 3.9908,
+      "step": 1077248
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.235378805696941e-05,
+      "loss": 3.988,
+      "step": 1077760
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.234540210945889e-05,
+      "loss": 3.9914,
+      "step": 1078272
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.233701616194837e-05,
+      "loss": 4.0008,
+      "step": 1078784
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.232863021443785e-05,
+      "loss": 3.9908,
+      "step": 1079296
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.232024426692733e-05,
+      "loss": 3.9945,
+      "step": 1079808
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.231185831941681e-05,
+      "loss": 3.986,
+      "step": 1080320
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.230348875071002e-05,
+      "loss": 4.0029,
+      "step": 1080832
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.22951028031995e-05,
+      "loss": 3.9899,
+      "step": 1081344
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.228671685568898e-05,
+      "loss": 3.9858,
+      "step": 1081856
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.227833090817846e-05,
+      "loss": 3.9864,
+      "step": 1082368
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.226996133947167e-05,
+      "loss": 3.9801,
+      "step": 1082880
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 3.226157539196115e-05,
+      "loss": 3.988,
+      "step": 1083392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.225318944445063e-05,
+      "loss": 3.9887,
+      "step": 1083904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.224481987574384e-05,
+      "loss": 3.9893,
+      "step": 1084416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2236433928233326e-05,
+      "loss": 3.9959,
+      "step": 1084928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2228047980722806e-05,
+      "loss": 3.9985,
+      "step": 1085440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2219662033212286e-05,
+      "loss": 3.9874,
+      "step": 1085952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2211292464505495e-05,
+      "loss": 3.9957,
+      "step": 1086464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2202906516994975e-05,
+      "loss": 3.9925,
+      "step": 1086976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2194520569484455e-05,
+      "loss": 3.9798,
+      "step": 1087488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2186134621973935e-05,
+      "loss": 3.9851,
+      "step": 1088000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2177748674463415e-05,
+      "loss": 3.9713,
+      "step": 1088512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2169379105756624e-05,
+      "loss": 3.9867,
+      "step": 1089024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2160993158246104e-05,
+      "loss": 3.9838,
+      "step": 1089536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2152607210735584e-05,
+      "loss": 3.9972,
+      "step": 1090048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2144221263225064e-05,
+      "loss": 3.9765,
+      "step": 1090560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2135835315714544e-05,
+      "loss": 3.9832,
+      "step": 1091072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.212746574700776e-05,
+      "loss": 3.9896,
+      "step": 1091584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.211907979949724e-05,
+      "loss": 3.9875,
+      "step": 1092096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.211069385198672e-05,
+      "loss": 3.9874,
+      "step": 1092608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.21023079044762e-05,
+      "loss": 3.9665,
+      "step": 1093120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.209392195696568e-05,
+      "loss": 3.9878,
+      "step": 1093632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.208553600945516e-05,
+      "loss": 3.9995,
+      "step": 1094144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.207715006194464e-05,
+      "loss": 3.9955,
+      "step": 1094656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.206878049323785e-05,
+      "loss": 3.9814,
+      "step": 1095168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.206039454572733e-05,
+      "loss": 3.9789,
+      "step": 1095680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.205200859821681e-05,
+      "loss": 3.9904,
+      "step": 1096192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.204362265070629e-05,
+      "loss": 3.9745,
+      "step": 1096704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.203523670319577e-05,
+      "loss": 3.9997,
+      "step": 1097216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.202685075568525e-05,
+      "loss": 3.9697,
+      "step": 1097728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.201846480817473e-05,
+      "loss": 4.0072,
+      "step": 1098240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.201007886066421e-05,
+      "loss": 3.9895,
+      "step": 1098752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.2001709291957424e-05,
+      "loss": 3.9671,
+      "step": 1099264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1993323344446904e-05,
+      "loss": 3.9807,
+      "step": 1099776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.198493739693638e-05,
+      "loss": 3.988,
+      "step": 1100288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.197656782822959e-05,
+      "loss": 3.9668,
+      "step": 1100800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.196818188071907e-05,
+      "loss": 3.9758,
+      "step": 1101312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.195979593320855e-05,
+      "loss": 3.9988,
+      "step": 1101824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1951409985698026e-05,
+      "loss": 3.9841,
+      "step": 1102336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1943024038187506e-05,
+      "loss": 3.9815,
+      "step": 1102848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1934638090676986e-05,
+      "loss": 3.9637,
+      "step": 1103360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1926252143166466e-05,
+      "loss": 3.9811,
+      "step": 1103872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.191788257445968e-05,
+      "loss": 3.9749,
+      "step": 1104384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.190949662694916e-05,
+      "loss": 3.9921,
+      "step": 1104896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.190111067943864e-05,
+      "loss": 3.9753,
+      "step": 1105408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.189272473192812e-05,
+      "loss": 3.986,
+      "step": 1105920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.18843387844176e-05,
+      "loss": 3.992,
+      "step": 1106432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.187596921571081e-05,
+      "loss": 3.9882,
+      "step": 1106944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.186758326820029e-05,
+      "loss": 3.9817,
+      "step": 1107456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.185919732068977e-05,
+      "loss": 3.9824,
+      "step": 1107968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.185081137317925e-05,
+      "loss": 3.986,
+      "step": 1108480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.184242542566873e-05,
+      "loss": 3.986,
+      "step": 1108992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.183403947815821e-05,
+      "loss": 3.9789,
+      "step": 1109504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.182565353064769e-05,
+      "loss": 3.981,
+      "step": 1110016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.18172839619409e-05,
+      "loss": 3.9878,
+      "step": 1110528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.180889801443038e-05,
+      "loss": 3.9866,
+      "step": 1111040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1800512066919866e-05,
+      "loss": 3.9673,
+      "step": 1111552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1792126119409346e-05,
+      "loss": 3.9836,
+      "step": 1112064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1783740171898826e-05,
+      "loss": 3.9813,
+      "step": 1112576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1775354224388306e-05,
+      "loss": 3.9813,
+      "step": 1113088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1766968276877786e-05,
+      "loss": 3.9814,
+      "step": 1113600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 3.1758598708170995e-05,
+      "loss": 3.9849,
+      "step": 1114112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1750212760660475e-05,
+      "loss": 3.973,
+      "step": 1114624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1741826813149955e-05,
+      "loss": 3.9734,
+      "step": 1115136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1733440865639435e-05,
+      "loss": 3.9754,
+      "step": 1115648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1725054918128915e-05,
+      "loss": 3.9908,
+      "step": 1116160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.171666897061839e-05,
+      "loss": 3.9809,
+      "step": 1116672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1708299401911604e-05,
+      "loss": 3.9888,
+      "step": 1117184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1699913454401084e-05,
+      "loss": 3.9777,
+      "step": 1117696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1691527506890564e-05,
+      "loss": 3.9797,
+      "step": 1118208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1683141559380044e-05,
+      "loss": 3.9763,
+      "step": 1118720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1674755611869524e-05,
+      "loss": 3.99,
+      "step": 1119232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1666369664359004e-05,
+      "loss": 3.9822,
+      "step": 1119744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1657983716848484e-05,
+      "loss": 3.9851,
+      "step": 1120256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1649597769337964e-05,
+      "loss": 3.9812,
+      "step": 1120768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1641211821827444e-05,
+      "loss": 3.9731,
+      "step": 1121280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.163284225312065e-05,
+      "loss": 3.9926,
+      "step": 1121792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.162447268441386e-05,
+      "loss": 3.9694,
+      "step": 1122304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.161608673690334e-05,
+      "loss": 3.9772,
+      "step": 1122816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.160770078939282e-05,
+      "loss": 3.9825,
+      "step": 1123328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.15993148418823e-05,
+      "loss": 3.9721,
+      "step": 1123840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.159094527317552e-05,
+      "loss": 3.976,
+      "step": 1124352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1582559325665e-05,
+      "loss": 3.9792,
+      "step": 1124864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.157417337815448e-05,
+      "loss": 3.9673,
+      "step": 1125376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.156578743064396e-05,
+      "loss": 3.9829,
+      "step": 1125888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.155740148313344e-05,
+      "loss": 3.9807,
+      "step": 1126400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.154901553562292e-05,
+      "loss": 3.9804,
+      "step": 1126912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1540645966916126e-05,
+      "loss": 3.9894,
+      "step": 1127424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1532260019405606e-05,
+      "loss": 3.9956,
+      "step": 1127936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1523874071895086e-05,
+      "loss": 3.9827,
+      "step": 1128448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1515488124384566e-05,
+      "loss": 3.9771,
+      "step": 1128960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1507102176874046e-05,
+      "loss": 3.9746,
+      "step": 1129472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1498716229363526e-05,
+      "loss": 3.9793,
+      "step": 1129984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1490330281853006e-05,
+      "loss": 3.9766,
+      "step": 1130496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1481944334342486e-05,
+      "loss": 3.9802,
+      "step": 1131008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.147355838683197e-05,
+      "loss": 3.9707,
+      "step": 1131520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.146517243932145e-05,
+      "loss": 3.9871,
+      "step": 1132032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.145680287061466e-05,
+      "loss": 3.977,
+      "step": 1132544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.144841692310414e-05,
+      "loss": 3.9807,
+      "step": 1133056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.144003097559362e-05,
+      "loss": 3.9785,
+      "step": 1133568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.143166140688683e-05,
+      "loss": 3.9765,
+      "step": 1134080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.142327545937631e-05,
+      "loss": 3.9731,
+      "step": 1134592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.141488951186579e-05,
+      "loss": 3.9907,
+      "step": 1135104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.140650356435527e-05,
+      "loss": 3.9864,
+      "step": 1135616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.139811761684475e-05,
+      "loss": 3.9843,
+      "step": 1136128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.138974804813796e-05,
+      "loss": 3.9746,
+      "step": 1136640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.138136210062744e-05,
+      "loss": 3.9849,
+      "step": 1137152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.137297615311692e-05,
+      "loss": 3.9797,
+      "step": 1137664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.13645902056064e-05,
+      "loss": 3.9815,
+      "step": 1138176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1356220636899616e-05,
+      "loss": 3.9846,
+      "step": 1138688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1347834689389095e-05,
+      "loss": 3.9901,
+      "step": 1139200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1339448741878575e-05,
+      "loss": 3.9825,
+      "step": 1139712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.133106279436805e-05,
+      "loss": 3.9892,
+      "step": 1140224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.132267684685753e-05,
+      "loss": 3.9799,
+      "step": 1140736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.131429089934701e-05,
+      "loss": 3.9866,
+      "step": 1141248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.130590495183649e-05,
+      "loss": 3.9758,
+      "step": 1141760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.129751900432597e-05,
+      "loss": 3.9743,
+      "step": 1142272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.128914943561918e-05,
+      "loss": 3.9798,
+      "step": 1142784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.128076348810866e-05,
+      "loss": 3.9656,
+      "step": 1143296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.1272377540598144e-05,
+      "loss": 3.9791,
+      "step": 1143808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 3.126400797189135e-05,
+      "loss": 3.9785,
+      "step": 1144320
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.034204006195068,
+      "eval_runtime": 291.2505,
+      "eval_samples_per_second": 1310.181,
+      "eval_steps_per_second": 40.944,
+      "step": 1144800
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.125562202438083e-05,
+      "loss": 3.9767,
+      "step": 1144832
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.124723607687031e-05,
+      "loss": 3.976,
+      "step": 1145344
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.123885012935979e-05,
+      "loss": 3.9756,
+      "step": 1145856
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.123046418184927e-05,
+      "loss": 3.9892,
+      "step": 1146368
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.122207823433875e-05,
+      "loss": 3.9824,
+      "step": 1146880
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.121369228682823e-05,
+      "loss": 3.9915,
+      "step": 1147392
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.120530633931771e-05,
+      "loss": 3.9759,
+      "step": 1147904
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.119693677061092e-05,
+      "loss": 3.9769,
+      "step": 1148416
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.11885508231004e-05,
+      "loss": 3.9678,
+      "step": 1148928
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.118016487558988e-05,
+      "loss": 3.9844,
+      "step": 1149440
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.117177892807936e-05,
+      "loss": 3.9789,
+      "step": 1149952
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.116340935937258e-05,
+      "loss": 3.9809,
+      "step": 1150464
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.115503979066579e-05,
+      "loss": 3.9843,
+      "step": 1150976
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.1146670221958996e-05,
+      "loss": 3.9729,
+      "step": 1151488
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.1138284274448476e-05,
+      "loss": 3.9729,
+      "step": 1152000
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.1129898326937956e-05,
+      "loss": 3.9692,
+      "step": 1152512
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.1121512379427436e-05,
+      "loss": 3.9676,
+      "step": 1153024
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.1113126431916916e-05,
+      "loss": 3.9767,
+      "step": 1153536
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.1104740484406396e-05,
+      "loss": 3.9772,
+      "step": 1154048
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.1096354536895876e-05,
+      "loss": 3.9789,
+      "step": 1154560
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.1087968589385356e-05,
+      "loss": 3.99,
+      "step": 1155072
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.1079599020678565e-05,
+      "loss": 3.9785,
+      "step": 1155584
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.107121307316805e-05,
+      "loss": 3.9814,
+      "step": 1156096
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.106282712565753e-05,
+      "loss": 3.976,
+      "step": 1156608
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.105444117814701e-05,
+      "loss": 3.9882,
+      "step": 1157120
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.104605523063649e-05,
+      "loss": 3.9732,
+      "step": 1157632
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.103766928312597e-05,
+      "loss": 3.9748,
+      "step": 1158144
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.102929971441918e-05,
+      "loss": 3.9719,
+      "step": 1158656
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.102091376690866e-05,
+      "loss": 3.9723,
+      "step": 1159168
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 3.101252781939814e-05,
+      "loss": 3.9755,
+      "step": 1159680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.100414187188762e-05,
+      "loss": 3.9743,
+      "step": 1160192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.09957559243771e-05,
+      "loss": 3.9811,
+      "step": 1160704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.098736997686658e-05,
+      "loss": 3.9845,
+      "step": 1161216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0978984029356053e-05,
+      "loss": 3.9802,
+      "step": 1161728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.097061446064927e-05,
+      "loss": 3.9781,
+      "step": 1162240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0962244891942485e-05,
+      "loss": 3.9824,
+      "step": 1162752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0953858944431965e-05,
+      "loss": 3.9813,
+      "step": 1163264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0945472996921445e-05,
+      "loss": 3.9656,
+      "step": 1163776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0937087049410925e-05,
+      "loss": 3.9765,
+      "step": 1164288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0928701101900405e-05,
+      "loss": 3.959,
+      "step": 1164800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0920315154389885e-05,
+      "loss": 3.9719,
+      "step": 1165312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.091192920687936e-05,
+      "loss": 3.968,
+      "step": 1165824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.090354325936884e-05,
+      "loss": 3.9861,
+      "step": 1166336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.089515731185832e-05,
+      "loss": 3.9695,
+      "step": 1166848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.08867713643478e-05,
+      "loss": 3.9683,
+      "step": 1167360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.087838541683728e-05,
+      "loss": 3.9745,
+      "step": 1167872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.086999946932676e-05,
+      "loss": 3.9773,
+      "step": 1168384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.08616462794237e-05,
+      "loss": 3.9729,
+      "step": 1168896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.085326033191318e-05,
+      "loss": 3.956,
+      "step": 1169408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.084487438440266e-05,
+      "loss": 3.9715,
+      "step": 1169920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.083648843689214e-05,
+      "loss": 3.9883,
+      "step": 1170432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.082810248938162e-05,
+      "loss": 3.9816,
+      "step": 1170944
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.08197165418711e-05,
+      "loss": 3.9725,
+      "step": 1171456
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.081133059436058e-05,
+      "loss": 3.9676,
+      "step": 1171968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.080294464685006e-05,
+      "loss": 3.9809,
+      "step": 1172480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.079455869933954e-05,
+      "loss": 3.9612,
+      "step": 1172992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.078618913063275e-05,
+      "loss": 3.9877,
+      "step": 1173504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.077780318312223e-05,
+      "loss": 3.9565,
+      "step": 1174016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.076941723561171e-05,
+      "loss": 3.9876,
+      "step": 1174528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.076103128810119e-05,
+      "loss": 3.9816,
+      "step": 1175040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.075264534059067e-05,
+      "loss": 3.9576,
+      "step": 1175552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.074425939308015e-05,
+      "loss": 3.97,
+      "step": 1176064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.073587344556964e-05,
+      "loss": 3.9779,
+      "step": 1176576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.072750387686285e-05,
+      "loss": 3.9505,
+      "step": 1177088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.071911792935233e-05,
+      "loss": 3.9668,
+      "step": 1177600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.071073198184181e-05,
+      "loss": 3.9837,
+      "step": 1178112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.070234603433129e-05,
+      "loss": 3.9718,
+      "step": 1178624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.069396008682077e-05,
+      "loss": 3.9691,
+      "step": 1179136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0685590518113976e-05,
+      "loss": 3.9563,
+      "step": 1179648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0677204570603456e-05,
+      "loss": 3.9684,
+      "step": 1180160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0668818623092936e-05,
+      "loss": 3.9605,
+      "step": 1180672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0660432675582416e-05,
+      "loss": 3.9795,
+      "step": 1181184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0652063106875625e-05,
+      "loss": 3.9636,
+      "step": 1181696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0643677159365105e-05,
+      "loss": 3.9758,
+      "step": 1182208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.063529121185459e-05,
+      "loss": 3.979,
+      "step": 1182720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.062690526434407e-05,
+      "loss": 3.9752,
+      "step": 1183232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0618519316833545e-05,
+      "loss": 3.9695,
+      "step": 1183744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.061014974812676e-05,
+      "loss": 3.9713,
+      "step": 1184256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.060176380061624e-05,
+      "loss": 3.9756,
+      "step": 1184768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.059337785310572e-05,
+      "loss": 3.9747,
+      "step": 1185280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0584991905595194e-05,
+      "loss": 3.9695,
+      "step": 1185792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0576605958084674e-05,
+      "loss": 3.9689,
+      "step": 1186304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0568220010574154e-05,
+      "loss": 3.975,
+      "step": 1186816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0559834063063634e-05,
+      "loss": 3.9751,
+      "step": 1187328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0551448115553114e-05,
+      "loss": 3.9564,
+      "step": 1187840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.0543062168042594e-05,
+      "loss": 3.9698,
+      "step": 1188352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.053469259933581e-05,
+      "loss": 3.968,
+      "step": 1188864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.052630665182529e-05,
+      "loss": 3.9694,
+      "step": 1189376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.051792070431477e-05,
+      "loss": 3.9716,
+      "step": 1189888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 3.050955113560798e-05,
+      "loss": 3.9708,
+      "step": 1190400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.050116518809746e-05,
+      "loss": 3.964,
+      "step": 1190912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.049277924058694e-05,
+      "loss": 3.9583,
+      "step": 1191424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.048439329307642e-05,
+      "loss": 3.9639,
+      "step": 1191936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.04760073455659e-05,
+      "loss": 3.9779,
+      "step": 1192448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.046762139805538e-05,
+      "loss": 3.9691,
+      "step": 1192960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.045925182934859e-05,
+      "loss": 3.9779,
+      "step": 1193472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.045086588183807e-05,
+      "loss": 3.9649,
+      "step": 1193984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.044247993432755e-05,
+      "loss": 3.9657,
+      "step": 1194496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.043411036562076e-05,
+      "loss": 3.963,
+      "step": 1195008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.042572441811024e-05,
+      "loss": 3.9804,
+      "step": 1195520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.041733847059972e-05,
+      "loss": 3.9689,
+      "step": 1196032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.04089525230892e-05,
+      "loss": 3.9768,
+      "step": 1196544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0400566575578683e-05,
+      "loss": 3.9668,
+      "step": 1197056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0392180628068163e-05,
+      "loss": 3.9615,
+      "step": 1197568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0383811059361372e-05,
+      "loss": 3.9795,
+      "step": 1198080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0375425111850852e-05,
+      "loss": 3.9616,
+      "step": 1198592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0367039164340332e-05,
+      "loss": 3.9639,
+      "step": 1199104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0358653216829812e-05,
+      "loss": 3.9696,
+      "step": 1199616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0350267269319292e-05,
+      "loss": 3.9673,
+      "step": 1200128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0341881321808775e-05,
+      "loss": 3.9609,
+      "step": 1200640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0333495374298255e-05,
+      "loss": 3.9658,
+      "step": 1201152
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.032510942678773e-05,
+      "loss": 3.9561,
+      "step": 1201664
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.031672347927721e-05,
+      "loss": 3.9693,
+      "step": 1202176
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.030833753176669e-05,
+      "loss": 3.9691,
+      "step": 1202688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.029995158425617e-05,
+      "loss": 3.9676,
+      "step": 1203200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.029156563674565e-05,
+      "loss": 3.974,
+      "step": 1203712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.028319606803886e-05,
+      "loss": 3.9857,
+      "step": 1204224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.027481012052834e-05,
+      "loss": 3.971,
+      "step": 1204736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.026642417301782e-05,
+      "loss": 3.9653,
+      "step": 1205248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.02580382255073e-05,
+      "loss": 3.9591,
+      "step": 1205760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0249668656800513e-05,
+      "loss": 3.967,
+      "step": 1206272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0241282709289993e-05,
+      "loss": 3.9648,
+      "step": 1206784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0232896761779473e-05,
+      "loss": 3.9672,
+      "step": 1207296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0224510814268953e-05,
+      "loss": 3.9621,
+      "step": 1207808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0216124866758433e-05,
+      "loss": 3.9761,
+      "step": 1208320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0207738919247913e-05,
+      "loss": 3.9675,
+      "step": 1208832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0199352971737393e-05,
+      "loss": 3.9701,
+      "step": 1209344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0190967024226873e-05,
+      "loss": 3.964,
+      "step": 1209856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0182597455520085e-05,
+      "loss": 3.9656,
+      "step": 1210368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0174227886813294e-05,
+      "loss": 3.96,
+      "step": 1210880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0165841939302774e-05,
+      "loss": 3.9774,
+      "step": 1211392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0157455991792254e-05,
+      "loss": 3.9796,
+      "step": 1211904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0149070044281734e-05,
+      "loss": 3.9738,
+      "step": 1212416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0140684096771214e-05,
+      "loss": 3.9626,
+      "step": 1212928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0132298149260698e-05,
+      "loss": 3.9688,
+      "step": 1213440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0123912201750177e-05,
+      "loss": 3.9712,
+      "step": 1213952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0115542633043387e-05,
+      "loss": 3.9689,
+      "step": 1214464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0107156685532867e-05,
+      "loss": 3.9725,
+      "step": 1214976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0098770738022347e-05,
+      "loss": 3.9785,
+      "step": 1215488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.009040116931556e-05,
+      "loss": 3.9692,
+      "step": 1216000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.008201522180504e-05,
+      "loss": 3.9805,
+      "step": 1216512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.007362927429452e-05,
+      "loss": 3.9623,
+      "step": 1217024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0065243326784e-05,
+      "loss": 3.9747,
+      "step": 1217536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0056873758077208e-05,
+      "loss": 3.9686,
+      "step": 1218048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0048487810566688e-05,
+      "loss": 3.9586,
+      "step": 1218560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.0040101863056168e-05,
+      "loss": 3.9718,
+      "step": 1219072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.003171591554565e-05,
+      "loss": 3.9586,
+      "step": 1219584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.002332996803513e-05,
+      "loss": 3.9634,
+      "step": 1220096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 3.001494402052461e-05,
+      "loss": 3.969,
+      "step": 1220608
+    },
+    {
+      "epoch": 1.03,
+      "learning_rate": 3.000655807301409e-05,
+      "loss": 3.9657,
+      "step": 1221120
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.028191089630127,
+      "eval_runtime": 292.8413,
+      "eval_samples_per_second": 1303.064,
+      "eval_steps_per_second": 40.722,
+      "step": 1221120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9998172125503564e-05,
+      "loss": 3.9641,
+      "step": 1221632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9989786177993044e-05,
+      "loss": 3.9657,
+      "step": 1222144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9981400230482528e-05,
+      "loss": 3.9755,
+      "step": 1222656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9973014282972008e-05,
+      "loss": 3.9712,
+      "step": 1223168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9964628335461487e-05,
+      "loss": 3.98,
+      "step": 1223680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9956242387950967e-05,
+      "loss": 3.9631,
+      "step": 1224192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9947856440440447e-05,
+      "loss": 3.9607,
+      "step": 1224704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9939470492929927e-05,
+      "loss": 3.9617,
+      "step": 1225216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9931084545419407e-05,
+      "loss": 3.9707,
+      "step": 1225728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9922698597908887e-05,
+      "loss": 3.9673,
+      "step": 1226240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9914312650398367e-05,
+      "loss": 3.9702,
+      "step": 1226752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9905926702887847e-05,
+      "loss": 3.9706,
+      "step": 1227264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.989755713418106e-05,
+      "loss": 3.9599,
+      "step": 1227776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.988917118667054e-05,
+      "loss": 3.9642,
+      "step": 1228288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.988080161796375e-05,
+      "loss": 3.9564,
+      "step": 1228800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.987241567045323e-05,
+      "loss": 3.9562,
+      "step": 1229312
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.986402972294271e-05,
+      "loss": 3.9661,
+      "step": 1229824
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9855643775432192e-05,
+      "loss": 3.9661,
+      "step": 1230336
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9847257827921672e-05,
+      "loss": 3.9672,
+      "step": 1230848
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9838871880411152e-05,
+      "loss": 3.9824,
+      "step": 1231360
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9830485932900625e-05,
+      "loss": 3.965,
+      "step": 1231872
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9822099985390105e-05,
+      "loss": 3.9713,
+      "step": 1232384
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9813714037879585e-05,
+      "loss": 3.9648,
+      "step": 1232896
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9805328090369068e-05,
+      "loss": 3.9766,
+      "step": 1233408
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9796942142858548e-05,
+      "loss": 3.966,
+      "step": 1233920
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9788556195348028e-05,
+      "loss": 3.9655,
+      "step": 1234432
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9780186626641237e-05,
+      "loss": 3.9584,
+      "step": 1234944
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9771800679130717e-05,
+      "loss": 3.9646,
+      "step": 1235456
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.9763414731620197e-05,
+      "loss": 3.9598,
+      "step": 1235968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9755028784109677e-05,
+      "loss": 3.9657,
+      "step": 1236480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.974664283659916e-05,
+      "loss": 3.9678,
+      "step": 1236992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.973825688908864e-05,
+      "loss": 3.9686,
+      "step": 1237504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.972987094157812e-05,
+      "loss": 3.9706,
+      "step": 1238016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.972150137287133e-05,
+      "loss": 3.9686,
+      "step": 1238528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.971313180416454e-05,
+      "loss": 3.9667,
+      "step": 1239040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9704745856654022e-05,
+      "loss": 3.9738,
+      "step": 1239552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9696359909143502e-05,
+      "loss": 3.9573,
+      "step": 1240064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9687973961632982e-05,
+      "loss": 3.9626,
+      "step": 1240576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9679588014122462e-05,
+      "loss": 3.9493,
+      "step": 1241088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9671202066611942e-05,
+      "loss": 3.9584,
+      "step": 1241600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9662816119101422e-05,
+      "loss": 3.9579,
+      "step": 1242112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.96544301715909e-05,
+      "loss": 3.9702,
+      "step": 1242624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.964604422408038e-05,
+      "loss": 3.9595,
+      "step": 1243136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.963765827656986e-05,
+      "loss": 3.9575,
+      "step": 1243648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9629272329059345e-05,
+      "loss": 3.9667,
+      "step": 1244160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9620886381548818e-05,
+      "loss": 3.9671,
+      "step": 1244672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9612500434038298e-05,
+      "loss": 3.9591,
+      "step": 1245184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9604114486527778e-05,
+      "loss": 3.9521,
+      "step": 1245696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9595728539017258e-05,
+      "loss": 3.9559,
+      "step": 1246208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9587342591506738e-05,
+      "loss": 3.9769,
+      "step": 1246720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9578989401603686e-05,
+      "loss": 3.9734,
+      "step": 1247232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9570603454093166e-05,
+      "loss": 3.9573,
+      "step": 1247744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.956221750658264e-05,
+      "loss": 3.9586,
+      "step": 1248256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.955383155907212e-05,
+      "loss": 3.9683,
+      "step": 1248768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.95454456115616e-05,
+      "loss": 3.9508,
+      "step": 1249280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9537076042854815e-05,
+      "loss": 3.972,
+      "step": 1249792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9528690095344292e-05,
+      "loss": 3.9488,
+      "step": 1250304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9520304147833772e-05,
+      "loss": 3.9793,
+      "step": 1250816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9511918200323252e-05,
+      "loss": 3.9671,
+      "step": 1251328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9503532252812732e-05,
+      "loss": 3.9523,
+      "step": 1251840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.949514630530221e-05,
+      "loss": 3.9528,
+      "step": 1252352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.948676035779169e-05,
+      "loss": 3.97,
+      "step": 1252864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.947837441028117e-05,
+      "loss": 3.9384,
+      "step": 1253376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9469988462770655e-05,
+      "loss": 3.9557,
+      "step": 1253888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9461602515260135e-05,
+      "loss": 3.9693,
+      "step": 1254400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9453216567749615e-05,
+      "loss": 3.962,
+      "step": 1254912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9444830620239095e-05,
+      "loss": 3.9586,
+      "step": 1255424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9436477430336033e-05,
+      "loss": 3.9453,
+      "step": 1255936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9428091482825516e-05,
+      "loss": 3.9539,
+      "step": 1256448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9419705535314996e-05,
+      "loss": 3.9498,
+      "step": 1256960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9411319587804476e-05,
+      "loss": 3.9723,
+      "step": 1257472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9402933640293956e-05,
+      "loss": 3.9521,
+      "step": 1257984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9394547692783436e-05,
+      "loss": 3.9642,
+      "step": 1258496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9386161745272916e-05,
+      "loss": 3.9651,
+      "step": 1259008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9377792176566125e-05,
+      "loss": 3.97,
+      "step": 1259520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.936940622905561e-05,
+      "loss": 3.9547,
+      "step": 1260032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.936102028154509e-05,
+      "loss": 3.9606,
+      "step": 1260544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.935263433403457e-05,
+      "loss": 3.9642,
+      "step": 1261056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.934424838652405e-05,
+      "loss": 3.964,
+      "step": 1261568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.933586243901353e-05,
+      "loss": 3.9573,
+      "step": 1262080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9327476491503e-05,
+      "loss": 3.9568,
+      "step": 1262592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9319090543992485e-05,
+      "loss": 3.9636,
+      "step": 1263104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.93107209752857e-05,
+      "loss": 3.962,
+      "step": 1263616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9302335027775174e-05,
+      "loss": 3.9498,
+      "step": 1264128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.929396545906839e-05,
+      "loss": 3.9544,
+      "step": 1264640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.928557951155787e-05,
+      "loss": 3.959,
+      "step": 1265152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.927719356404735e-05,
+      "loss": 3.9641,
+      "step": 1265664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9268807616536826e-05,
+      "loss": 3.9565,
+      "step": 1266176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.9260421669026306e-05,
+      "loss": 3.9597,
+      "step": 1266688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9252035721515786e-05,
+      "loss": 3.9546,
+      "step": 1267200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9243649774005266e-05,
+      "loss": 3.9489,
+      "step": 1267712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9235263826494746e-05,
+      "loss": 3.9524,
+      "step": 1268224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9226894257787955e-05,
+      "loss": 3.9636,
+      "step": 1268736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.921850831027744e-05,
+      "loss": 3.9573,
+      "step": 1269248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9210138741570648e-05,
+      "loss": 3.9688,
+      "step": 1269760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9201769172863864e-05,
+      "loss": 3.9629,
+      "step": 1270272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9193383225353344e-05,
+      "loss": 3.9509,
+      "step": 1270784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9184997277842823e-05,
+      "loss": 3.953,
+      "step": 1271296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.91766113303323e-05,
+      "loss": 3.9657,
+      "step": 1271808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.916822538282178e-05,
+      "loss": 3.9634,
+      "step": 1272320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.915983943531126e-05,
+      "loss": 3.9647,
+      "step": 1272832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.915145348780074e-05,
+      "loss": 3.9548,
+      "step": 1273344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.914306754029022e-05,
+      "loss": 3.953,
+      "step": 1273856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.91346815927797e-05,
+      "loss": 3.9703,
+      "step": 1274368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.912631202407291e-05,
+      "loss": 3.9484,
+      "step": 1274880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9117926076562392e-05,
+      "loss": 3.9572,
+      "step": 1275392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9109540129051872e-05,
+      "loss": 3.9579,
+      "step": 1275904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9101154181541352e-05,
+      "loss": 3.9586,
+      "step": 1276416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9092768234030832e-05,
+      "loss": 3.9486,
+      "step": 1276928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9084382286520312e-05,
+      "loss": 3.9541,
+      "step": 1277440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9075996339009792e-05,
+      "loss": 3.948,
+      "step": 1277952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9067610391499272e-05,
+      "loss": 3.9578,
+      "step": 1278464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9059224443988752e-05,
+      "loss": 3.9555,
+      "step": 1278976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9050854875281964e-05,
+      "loss": 3.9551,
+      "step": 1279488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9042468927771444e-05,
+      "loss": 3.9651,
+      "step": 1280000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9034082980260924e-05,
+      "loss": 3.9753,
+      "step": 1280512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9025713411554133e-05,
+      "loss": 3.9613,
+      "step": 1281024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9017327464043613e-05,
+      "loss": 3.9532,
+      "step": 1281536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9008941516533093e-05,
+      "loss": 3.9516,
+      "step": 1282048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.9000555569022573e-05,
+      "loss": 3.9527,
+      "step": 1282560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8992169621512057e-05,
+      "loss": 3.9583,
+      "step": 1283072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8983783674001537e-05,
+      "loss": 3.9577,
+      "step": 1283584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.897539772649101e-05,
+      "loss": 3.9488,
+      "step": 1284096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.896701177898049e-05,
+      "loss": 3.9617,
+      "step": 1284608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8958642210273706e-05,
+      "loss": 3.9583,
+      "step": 1285120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8950256262763186e-05,
+      "loss": 3.9572,
+      "step": 1285632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8941870315252662e-05,
+      "loss": 3.9535,
+      "step": 1286144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8933484367742142e-05,
+      "loss": 3.9559,
+      "step": 1286656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8925098420231622e-05,
+      "loss": 3.9507,
+      "step": 1287168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8916745230328567e-05,
+      "loss": 3.9673,
+      "step": 1287680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8908359282818047e-05,
+      "loss": 3.9675,
+      "step": 1288192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8899973335307527e-05,
+      "loss": 3.9648,
+      "step": 1288704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.889158738779701e-05,
+      "loss": 3.9488,
+      "step": 1289216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8883201440286483e-05,
+      "loss": 3.963,
+      "step": 1289728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8874815492775963e-05,
+      "loss": 3.9588,
+      "step": 1290240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8866429545265443e-05,
+      "loss": 3.9597,
+      "step": 1290752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.885805997655866e-05,
+      "loss": 3.9564,
+      "step": 1291264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8849674029048136e-05,
+      "loss": 3.9678,
+      "step": 1291776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8841304460341352e-05,
+      "loss": 3.9574,
+      "step": 1292288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.883291851283083e-05,
+      "loss": 3.9755,
+      "step": 1292800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8824532565320305e-05,
+      "loss": 3.9502,
+      "step": 1293312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8816146617809785e-05,
+      "loss": 3.9695,
+      "step": 1293824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8807760670299265e-05,
+      "loss": 3.9552,
+      "step": 1294336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8799374722788748e-05,
+      "loss": 3.9517,
+      "step": 1294848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8790988775278228e-05,
+      "loss": 3.9585,
+      "step": 1295360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8782602827767708e-05,
+      "loss": 3.9493,
+      "step": 1295872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8774216880257188e-05,
+      "loss": 3.9541,
+      "step": 1296384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8765847311550397e-05,
+      "loss": 3.9585,
+      "step": 1296896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.8757461364039877e-05,
+      "loss": 3.9505,
+      "step": 1297408
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.022686004638672,
+      "eval_runtime": 294.2769,
+      "eval_samples_per_second": 1296.707,
+      "eval_steps_per_second": 40.523,
+      "step": 1297440
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8749075416529357e-05,
+      "loss": 3.957,
+      "step": 1297920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.874068946901884e-05,
+      "loss": 3.9538,
+      "step": 1298432
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.873230352150832e-05,
+      "loss": 3.9669,
+      "step": 1298944
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.87239175739978e-05,
+      "loss": 3.9606,
+      "step": 1299456
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.871553162648728e-05,
+      "loss": 3.9706,
+      "step": 1299968
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.870714567897676e-05,
+      "loss": 3.9521,
+      "step": 1300480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.869877611026997e-05,
+      "loss": 3.9548,
+      "step": 1300992
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.869039016275945e-05,
+      "loss": 3.9484,
+      "step": 1301504
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8682004215248933e-05,
+      "loss": 3.9608,
+      "step": 1302016
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8673618267738412e-05,
+      "loss": 3.9556,
+      "step": 1302528
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.866524869903162e-05,
+      "loss": 3.9563,
+      "step": 1303040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.86568627515211e-05,
+      "loss": 3.9631,
+      "step": 1303552
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.864849318281431e-05,
+      "loss": 3.946,
+      "step": 1304064
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8640107235303794e-05,
+      "loss": 3.958,
+      "step": 1304576
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8631721287793274e-05,
+      "loss": 3.9476,
+      "step": 1305088
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8623335340282754e-05,
+      "loss": 3.9483,
+      "step": 1305600
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8614965771575963e-05,
+      "loss": 3.9541,
+      "step": 1306112
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8606579824065443e-05,
+      "loss": 3.9522,
+      "step": 1306624
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8598193876554923e-05,
+      "loss": 3.9567,
+      "step": 1307136
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8589807929044403e-05,
+      "loss": 3.9758,
+      "step": 1307648
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8581421981533886e-05,
+      "loss": 3.9526,
+      "step": 1308160
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8573036034023366e-05,
+      "loss": 3.9566,
+      "step": 1308672
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8564666465316575e-05,
+      "loss": 3.9583,
+      "step": 1309184
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8556280517806055e-05,
+      "loss": 3.9622,
+      "step": 1309696
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8547894570295535e-05,
+      "loss": 3.959,
+      "step": 1310208
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8539508622785015e-05,
+      "loss": 3.959,
+      "step": 1310720
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.8531122675274495e-05,
+      "loss": 3.9454,
+      "step": 1311232
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.852273672776397e-05,
+      "loss": 3.9551,
+      "step": 1311744
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.851435078025345e-05,
+      "loss": 3.9453,
+      "step": 1312256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.850596483274293e-05,
+      "loss": 3.9579,
+      "step": 1312768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.849759526403614e-05,
+      "loss": 3.9536,
+      "step": 1313280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8489225695329357e-05,
+      "loss": 3.959,
+      "step": 1313792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.848083974781884e-05,
+      "loss": 3.9651,
+      "step": 1314304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.847245380030832e-05,
+      "loss": 3.9552,
+      "step": 1314816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.846408423160153e-05,
+      "loss": 3.9607,
+      "step": 1315328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.845569828409101e-05,
+      "loss": 3.958,
+      "step": 1315840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.844731233658049e-05,
+      "loss": 3.9509,
+      "step": 1316352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.843892638906997e-05,
+      "loss": 3.9497,
+      "step": 1316864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8430540441559445e-05,
+      "loss": 3.9396,
+      "step": 1317376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8422154494048925e-05,
+      "loss": 3.948,
+      "step": 1317888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8413768546538405e-05,
+      "loss": 3.9505,
+      "step": 1318400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8405382599027885e-05,
+      "loss": 3.9619,
+      "step": 1318912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8396996651517365e-05,
+      "loss": 3.9486,
+      "step": 1319424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8388610704006845e-05,
+      "loss": 3.9486,
+      "step": 1319936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8380241135300058e-05,
+      "loss": 3.9554,
+      "step": 1320448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8371855187789538e-05,
+      "loss": 3.9503,
+      "step": 1320960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8363469240279018e-05,
+      "loss": 3.9538,
+      "step": 1321472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8355099671572227e-05,
+      "loss": 3.9418,
+      "step": 1321984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8346713724061707e-05,
+      "loss": 3.9414,
+      "step": 1322496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8338327776551187e-05,
+      "loss": 3.9671,
+      "step": 1323008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8329941829040666e-05,
+      "loss": 3.965,
+      "step": 1323520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.832155588153015e-05,
+      "loss": 3.9459,
+      "step": 1324032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.831316993401963e-05,
+      "loss": 3.952,
+      "step": 1324544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.830478398650911e-05,
+      "loss": 3.9578,
+      "step": 1325056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.829639803899859e-05,
+      "loss": 3.9415,
+      "step": 1325568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.82880284702918e-05,
+      "loss": 3.9623,
+      "step": 1326080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.827964252278128e-05,
+      "loss": 3.9374,
+      "step": 1326592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.827125657527076e-05,
+      "loss": 3.9673,
+      "step": 1327104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8262870627760242e-05,
+      "loss": 3.9586,
+      "step": 1327616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8254484680249722e-05,
+      "loss": 3.9445,
+      "step": 1328128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.824611511154293e-05,
+      "loss": 3.945,
+      "step": 1328640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.823772916403241e-05,
+      "loss": 3.9585,
+      "step": 1329152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.822934321652189e-05,
+      "loss": 3.9259,
+      "step": 1329664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.822095726901137e-05,
+      "loss": 3.9465,
+      "step": 1330176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.821257132150085e-05,
+      "loss": 3.9591,
+      "step": 1330688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8204185373990328e-05,
+      "loss": 3.9557,
+      "step": 1331200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8195799426479807e-05,
+      "loss": 3.9465,
+      "step": 1331712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8187413478969287e-05,
+      "loss": 3.9353,
+      "step": 1332224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8179027531458767e-05,
+      "loss": 3.9433,
+      "step": 1332736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.817065796275198e-05,
+      "loss": 3.9426,
+      "step": 1333248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.816227201524146e-05,
+      "loss": 3.9538,
+      "step": 1333760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.815388606773094e-05,
+      "loss": 3.9446,
+      "step": 1334272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.814550012022042e-05,
+      "loss": 3.9543,
+      "step": 1334784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.813713055151363e-05,
+      "loss": 3.9557,
+      "step": 1335296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8128760982806845e-05,
+      "loss": 3.9604,
+      "step": 1335808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8120375035296325e-05,
+      "loss": 3.9425,
+      "step": 1336320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.81119890877858e-05,
+      "loss": 3.9513,
+      "step": 1336832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.810360314027528e-05,
+      "loss": 3.9547,
+      "step": 1337344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.809521719276476e-05,
+      "loss": 3.9504,
+      "step": 1337856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.808683124525424e-05,
+      "loss": 3.9496,
+      "step": 1338368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.807844529774372e-05,
+      "loss": 3.9457,
+      "step": 1338880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.80700593502332e-05,
+      "loss": 3.9554,
+      "step": 1339392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.806167340272268e-05,
+      "loss": 3.9503,
+      "step": 1339904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8053303834015893e-05,
+      "loss": 3.9365,
+      "step": 1340416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8044917886505373e-05,
+      "loss": 3.9466,
+      "step": 1340928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8036548317798582e-05,
+      "loss": 3.9472,
+      "step": 1341440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8028162370288062e-05,
+      "loss": 3.9544,
+      "step": 1341952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8019776422777542e-05,
+      "loss": 3.9498,
+      "step": 1342464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.8011390475267026e-05,
+      "loss": 3.9457,
+      "step": 1342976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.8003004527756506e-05,
+      "loss": 3.9441,
+      "step": 1343488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7994634959049715e-05,
+      "loss": 3.9378,
+      "step": 1344000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7986249011539195e-05,
+      "loss": 3.9398,
+      "step": 1344512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7977863064028675e-05,
+      "loss": 3.9534,
+      "step": 1345024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7969477116518155e-05,
+      "loss": 3.9495,
+      "step": 1345536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7961107547811367e-05,
+      "loss": 3.9566,
+      "step": 1346048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7952721600300847e-05,
+      "loss": 3.9516,
+      "step": 1346560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7944335652790327e-05,
+      "loss": 3.9444,
+      "step": 1347072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7935949705279807e-05,
+      "loss": 3.9453,
+      "step": 1347584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7927563757769287e-05,
+      "loss": 3.9513,
+      "step": 1348096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7919177810258767e-05,
+      "loss": 3.9578,
+      "step": 1348608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7910791862748247e-05,
+      "loss": 3.9553,
+      "step": 1349120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7902405915237727e-05,
+      "loss": 3.9456,
+      "step": 1349632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.789401996772721e-05,
+      "loss": 3.9415,
+      "step": 1350144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.788563402021669e-05,
+      "loss": 3.9584,
+      "step": 1350656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7877248072706163e-05,
+      "loss": 3.9384,
+      "step": 1351168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.786887850399938e-05,
+      "loss": 3.9478,
+      "step": 1351680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7860508935292588e-05,
+      "loss": 3.9474,
+      "step": 1352192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.785212298778207e-05,
+      "loss": 3.948,
+      "step": 1352704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.784373704027155e-05,
+      "loss": 3.9376,
+      "step": 1353216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.783535109276103e-05,
+      "loss": 3.9483,
+      "step": 1353728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.782696514525051e-05,
+      "loss": 3.9359,
+      "step": 1354240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.781859557654372e-05,
+      "loss": 3.9449,
+      "step": 1354752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.78102096290332e-05,
+      "loss": 3.9474,
+      "step": 1355264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.780182368152268e-05,
+      "loss": 3.9428,
+      "step": 1355776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.779343773401216e-05,
+      "loss": 3.9617,
+      "step": 1356288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7785051786501637e-05,
+      "loss": 3.9633,
+      "step": 1356800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7776698596598582e-05,
+      "loss": 3.9526,
+      "step": 1357312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7768329027891795e-05,
+      "loss": 3.9436,
+      "step": 1357824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7759943080381274e-05,
+      "loss": 3.94,
+      "step": 1358336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7751557132870754e-05,
+      "loss": 3.9472,
+      "step": 1358848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7743171185360234e-05,
+      "loss": 3.9435,
+      "step": 1359360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7734785237849714e-05,
+      "loss": 3.9548,
+      "step": 1359872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7726399290339194e-05,
+      "loss": 3.9361,
+      "step": 1360384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7718013342828674e-05,
+      "loss": 3.9478,
+      "step": 1360896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7709627395318154e-05,
+      "loss": 3.9495,
+      "step": 1361408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7701241447807634e-05,
+      "loss": 3.9515,
+      "step": 1361920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.769285550029711e-05,
+      "loss": 3.9389,
+      "step": 1362432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.768446955278659e-05,
+      "loss": 3.9459,
+      "step": 1362944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.767608360527607e-05,
+      "loss": 3.9473,
+      "step": 1363456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.766769765776555e-05,
+      "loss": 3.9518,
+      "step": 1363968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.765931171025503e-05,
+      "loss": 3.9566,
+      "step": 1364480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.765092576274451e-05,
+      "loss": 3.956,
+      "step": 1364992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.764253981523399e-05,
+      "loss": 3.9382,
+      "step": 1365504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7634170246527203e-05,
+      "loss": 3.9525,
+      "step": 1366016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7625784299016683e-05,
+      "loss": 3.9515,
+      "step": 1366528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7617414730309892e-05,
+      "loss": 3.9472,
+      "step": 1367040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7609028782799372e-05,
+      "loss": 3.9504,
+      "step": 1367552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7600642835288852e-05,
+      "loss": 3.9559,
+      "step": 1368064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7592256887778335e-05,
+      "loss": 3.9482,
+      "step": 1368576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7583870940267815e-05,
+      "loss": 3.9649,
+      "step": 1369088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7575484992757295e-05,
+      "loss": 3.942,
+      "step": 1369600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7567099045246775e-05,
+      "loss": 3.9555,
+      "step": 1370112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7558713097736255e-05,
+      "loss": 3.9483,
+      "step": 1370624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7550343529029464e-05,
+      "loss": 3.9424,
+      "step": 1371136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7541957581518944e-05,
+      "loss": 3.9466,
+      "step": 1371648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7533588012812157e-05,
+      "loss": 3.9429,
+      "step": 1372160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7525202065301637e-05,
+      "loss": 3.945,
+      "step": 1372672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7516816117791117e-05,
+      "loss": 3.9448,
+      "step": 1373184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.7508446549084326e-05,
+      "loss": 3.9411,
+      "step": 1373696
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.0179524421691895,
+      "eval_runtime": 295.0017,
+      "eval_samples_per_second": 1293.521,
+      "eval_steps_per_second": 40.423,
+      "step": 1373760
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7500060601573806e-05,
+      "loss": 3.9461,
+      "step": 1374208
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.749167465406329e-05,
+      "loss": 3.9418,
+      "step": 1374720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.748328870655277e-05,
+      "loss": 3.9574,
+      "step": 1375232
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.747490275904225e-05,
+      "loss": 3.9492,
+      "step": 1375744
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.746651681153173e-05,
+      "loss": 3.9646,
+      "step": 1376256
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.745813086402121e-05,
+      "loss": 3.9409,
+      "step": 1376768
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.744974491651069e-05,
+      "loss": 3.9462,
+      "step": 1377280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.744135896900017e-05,
+      "loss": 3.9351,
+      "step": 1377792
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.743297302148965e-05,
+      "loss": 3.9519,
+      "step": 1378304
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7424587073979125e-05,
+      "loss": 3.9473,
+      "step": 1378816
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7416201126468605e-05,
+      "loss": 3.9458,
+      "step": 1379328
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7407815178958085e-05,
+      "loss": 3.956,
+      "step": 1379840
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.739946198905503e-05,
+      "loss": 3.9347,
+      "step": 1380352
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.739107604154451e-05,
+      "loss": 3.9489,
+      "step": 1380864
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.738269009403399e-05,
+      "loss": 3.9366,
+      "step": 1381376
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7374304146523473e-05,
+      "loss": 3.9394,
+      "step": 1381888
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7365918199012947e-05,
+      "loss": 3.9384,
+      "step": 1382400
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7357532251502426e-05,
+      "loss": 3.9492,
+      "step": 1382912
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7349146303991906e-05,
+      "loss": 3.9455,
+      "step": 1383424
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7340760356481386e-05,
+      "loss": 3.9647,
+      "step": 1383936
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.7332374408970866e-05,
+      "loss": 3.9449,
+      "step": 1384448
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.732398846146035e-05,
+      "loss": 3.9475,
+      "step": 1384960
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.731560251394983e-05,
+      "loss": 3.9461,
+      "step": 1385472
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.730721656643931e-05,
+      "loss": 3.9536,
+      "step": 1385984
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.729883061892879e-05,
+      "loss": 3.9448,
+      "step": 1386496
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.729044467141827e-05,
+      "loss": 3.9499,
+      "step": 1387008
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.728205872390775e-05,
+      "loss": 3.9377,
+      "step": 1387520
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.727367277639723e-05,
+      "loss": 3.9427,
+      "step": 1388032
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.726530320769044e-05,
+      "loss": 3.9335,
+      "step": 1388544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7256917260179922e-05,
+      "loss": 3.9489,
+      "step": 1389056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7248531312669402e-05,
+      "loss": 3.9493,
+      "step": 1389568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7240145365158882e-05,
+      "loss": 3.9487,
+      "step": 1390080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.723177579645209e-05,
+      "loss": 3.9554,
+      "step": 1390592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.722338984894157e-05,
+      "loss": 3.9458,
+      "step": 1391104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7215020280234783e-05,
+      "loss": 3.947,
+      "step": 1391616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7206634332724263e-05,
+      "loss": 3.9508,
+      "step": 1392128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7198264764017472e-05,
+      "loss": 3.9423,
+      "step": 1392640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7189878816506952e-05,
+      "loss": 3.9389,
+      "step": 1393152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7181492868996432e-05,
+      "loss": 3.93,
+      "step": 1393664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7173106921485912e-05,
+      "loss": 3.9396,
+      "step": 1394176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7164720973975392e-05,
+      "loss": 3.9418,
+      "step": 1394688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7156335026464876e-05,
+      "loss": 3.9499,
+      "step": 1395200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7147949078954356e-05,
+      "loss": 3.9428,
+      "step": 1395712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7139563131443835e-05,
+      "loss": 3.9338,
+      "step": 1396224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.713117718393331e-05,
+      "loss": 3.9508,
+      "step": 1396736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.712279123642279e-05,
+      "loss": 3.9442,
+      "step": 1397248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7114421667716004e-05,
+      "loss": 3.9407,
+      "step": 1397760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7106035720205484e-05,
+      "loss": 3.9365,
+      "step": 1398272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.709764977269496e-05,
+      "loss": 3.9295,
+      "step": 1398784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.708926382518444e-05,
+      "loss": 3.9598,
+      "step": 1399296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.708087787767392e-05,
+      "loss": 3.9545,
+      "step": 1399808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.70724919301634e-05,
+      "loss": 3.9371,
+      "step": 1400320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.706410598265288e-05,
+      "loss": 3.9404,
+      "step": 1400832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.705572003514236e-05,
+      "loss": 3.9496,
+      "step": 1401344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7047350466435573e-05,
+      "loss": 3.9318,
+      "step": 1401856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7038980897728782e-05,
+      "loss": 3.9525,
+      "step": 1402368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7030594950218262e-05,
+      "loss": 3.9291,
+      "step": 1402880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7022209002707742e-05,
+      "loss": 3.9531,
+      "step": 1403392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7013823055197222e-05,
+      "loss": 3.9534,
+      "step": 1403904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.7005437107686706e-05,
+      "loss": 3.9367,
+      "step": 1404416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6997051160176186e-05,
+      "loss": 3.9302,
+      "step": 1404928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6988665212665665e-05,
+      "loss": 3.9497,
+      "step": 1405440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6980279265155145e-05,
+      "loss": 3.9222,
+      "step": 1405952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6971893317644625e-05,
+      "loss": 3.9386,
+      "step": 1406464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6963507370134105e-05,
+      "loss": 3.9475,
+      "step": 1406976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6955137801427314e-05,
+      "loss": 3.9478,
+      "step": 1407488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6946751853916798e-05,
+      "loss": 3.9405,
+      "step": 1408000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6938365906406278e-05,
+      "loss": 3.9277,
+      "step": 1408512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6929979958895758e-05,
+      "loss": 3.9318,
+      "step": 1409024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6921594011385238e-05,
+      "loss": 3.9326,
+      "step": 1409536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6913208063874718e-05,
+      "loss": 3.9453,
+      "step": 1410048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6904822116364198e-05,
+      "loss": 3.9357,
+      "step": 1410560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6896436168853674e-05,
+      "loss": 3.9453,
+      "step": 1411072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.688806660014689e-05,
+      "loss": 3.9492,
+      "step": 1411584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.68796970314401e-05,
+      "loss": 3.9514,
+      "step": 1412096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.687131108392958e-05,
+      "loss": 3.9328,
+      "step": 1412608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.686292513641906e-05,
+      "loss": 3.9402,
+      "step": 1413120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.685453918890854e-05,
+      "loss": 3.9485,
+      "step": 1413632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.684615324139802e-05,
+      "loss": 3.9376,
+      "step": 1414144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6837767293887495e-05,
+      "loss": 3.9415,
+      "step": 1414656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6829381346376975e-05,
+      "loss": 3.9394,
+      "step": 1415168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6820995398866455e-05,
+      "loss": 3.9424,
+      "step": 1415680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6812609451355935e-05,
+      "loss": 3.943,
+      "step": 1416192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6804239882649144e-05,
+      "loss": 3.9274,
+      "step": 1416704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6795853935138628e-05,
+      "loss": 3.9408,
+      "step": 1417216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6787467987628108e-05,
+      "loss": 3.9355,
+      "step": 1417728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6779082040117588e-05,
+      "loss": 3.9452,
+      "step": 1418240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6770728850214533e-05,
+      "loss": 3.9389,
+      "step": 1418752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.6762342902704013e-05,
+      "loss": 3.9378,
+      "step": 1419264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6753956955193493e-05,
+      "loss": 3.9354,
+      "step": 1419776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.674557100768297e-05,
+      "loss": 3.9304,
+      "step": 1420288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6737201438976185e-05,
+      "loss": 3.9277,
+      "step": 1420800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6728815491465665e-05,
+      "loss": 3.9416,
+      "step": 1421312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6720429543955145e-05,
+      "loss": 3.9513,
+      "step": 1421824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6712043596444618e-05,
+      "loss": 3.9446,
+      "step": 1422336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6703657648934098e-05,
+      "loss": 3.9399,
+      "step": 1422848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.669527170142358e-05,
+      "loss": 3.9344,
+      "step": 1423360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.668688575391306e-05,
+      "loss": 3.9382,
+      "step": 1423872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.667849980640254e-05,
+      "loss": 3.9398,
+      "step": 1424384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.667011385889202e-05,
+      "loss": 3.947,
+      "step": 1424896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.666174429018523e-05,
+      "loss": 3.9439,
+      "step": 1425408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.665335834267471e-05,
+      "loss": 3.9374,
+      "step": 1425920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.664497239516419e-05,
+      "loss": 3.9298,
+      "step": 1426432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.663658644765367e-05,
+      "loss": 3.9498,
+      "step": 1426944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6628216878946883e-05,
+      "loss": 3.9286,
+      "step": 1427456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6619830931436363e-05,
+      "loss": 3.9409,
+      "step": 1427968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6611444983925843e-05,
+      "loss": 3.9326,
+      "step": 1428480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6603059036415323e-05,
+      "loss": 3.9406,
+      "step": 1428992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6594689467708532e-05,
+      "loss": 3.9304,
+      "step": 1429504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6586303520198015e-05,
+      "loss": 3.941,
+      "step": 1430016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6577917572687495e-05,
+      "loss": 3.93,
+      "step": 1430528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6569531625176975e-05,
+      "loss": 3.9316,
+      "step": 1431040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6561162056470184e-05,
+      "loss": 3.9435,
+      "step": 1431552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6552776108959664e-05,
+      "loss": 3.9309,
+      "step": 1432064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6544390161449144e-05,
+      "loss": 3.9522,
+      "step": 1432576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6536004213938624e-05,
+      "loss": 3.9525,
+      "step": 1433088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6527651024035566e-05,
+      "loss": 3.9448,
+      "step": 1433600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6519265076525046e-05,
+      "loss": 3.9343,
+      "step": 1434112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6510879129014525e-05,
+      "loss": 3.9323,
+      "step": 1434624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6502493181504005e-05,
+      "loss": 3.9354,
+      "step": 1435136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6494107233993485e-05,
+      "loss": 3.9357,
+      "step": 1435648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.648572128648297e-05,
+      "loss": 3.9456,
+      "step": 1436160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.647733533897245e-05,
+      "loss": 3.9281,
+      "step": 1436672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.646894939146193e-05,
+      "loss": 3.9392,
+      "step": 1437184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.646056344395141e-05,
+      "loss": 3.9433,
+      "step": 1437696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.645217749644089e-05,
+      "loss": 3.941,
+      "step": 1438208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.644379154893037e-05,
+      "loss": 3.9335,
+      "step": 1438720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.643540560141985e-05,
+      "loss": 3.9345,
+      "step": 1439232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.642703603271306e-05,
+      "loss": 3.9382,
+      "step": 1439744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.641866646400627e-05,
+      "loss": 3.9419,
+      "step": 1440256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.641028051649575e-05,
+      "loss": 3.9437,
+      "step": 1440768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.640189456898523e-05,
+      "loss": 3.9512,
+      "step": 1441280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.639350862147471e-05,
+      "loss": 3.9296,
+      "step": 1441792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.638512267396419e-05,
+      "loss": 3.9449,
+      "step": 1442304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.637673672645367e-05,
+      "loss": 3.9444,
+      "step": 1442816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6368350778943153e-05,
+      "loss": 3.9375,
+      "step": 1443328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6359964831432626e-05,
+      "loss": 3.939,
+      "step": 1443840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6351578883922106e-05,
+      "loss": 3.9488,
+      "step": 1444352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6343209315215322e-05,
+      "loss": 3.9397,
+      "step": 1444864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6334823367704802e-05,
+      "loss": 3.9562,
+      "step": 1445376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.632643742019428e-05,
+      "loss": 3.9324,
+      "step": 1445888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.631805147268376e-05,
+      "loss": 3.9426,
+      "step": 1446400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.630966552517324e-05,
+      "loss": 3.9392,
+      "step": 1446912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6301295956466448e-05,
+      "loss": 3.9359,
+      "step": 1447424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6292910008955928e-05,
+      "loss": 3.9365,
+      "step": 1447936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6284524061445408e-05,
+      "loss": 3.9347,
+      "step": 1448448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.627613811393489e-05,
+      "loss": 3.933,
+      "step": 1448960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6267784924031836e-05,
+      "loss": 3.9372,
+      "step": 1449472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.6259398976521316e-05,
+      "loss": 3.9354,
+      "step": 1449984
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.013299465179443,
+      "eval_runtime": 295.3219,
+      "eval_samples_per_second": 1292.119,
+      "eval_steps_per_second": 40.38,
+      "step": 1450080
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6251013029010796e-05,
+      "loss": 3.9386,
+      "step": 1450496
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6242643460304005e-05,
+      "loss": 3.9333,
+      "step": 1451008
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6234257512793485e-05,
+      "loss": 3.9461,
+      "step": 1451520
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6225871565282968e-05,
+      "loss": 3.9392,
+      "step": 1452032
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6217485617772448e-05,
+      "loss": 3.9573,
+      "step": 1452544
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.620909967026192e-05,
+      "loss": 3.9345,
+      "step": 1453056
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.62007137227514e-05,
+      "loss": 3.9382,
+      "step": 1453568
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.619232777524088e-05,
+      "loss": 3.9267,
+      "step": 1454080
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.618394182773036e-05,
+      "loss": 3.9397,
+      "step": 1454592
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6175555880219845e-05,
+      "loss": 3.9416,
+      "step": 1455104
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6167169932709325e-05,
+      "loss": 3.9356,
+      "step": 1455616
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6158783985198805e-05,
+      "loss": 3.9421,
+      "step": 1456128
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6150414416492014e-05,
+      "loss": 3.931,
+      "step": 1456640
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6142028468981494e-05,
+      "loss": 3.9404,
+      "step": 1457152
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6133642521470974e-05,
+      "loss": 3.9299,
+      "step": 1457664
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6125256573960454e-05,
+      "loss": 3.9285,
+      "step": 1458176
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6116870626449937e-05,
+      "loss": 3.9298,
+      "step": 1458688
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6108484678939417e-05,
+      "loss": 3.9358,
+      "step": 1459200
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6100098731428897e-05,
+      "loss": 3.9379,
+      "step": 1459712
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6091712783918377e-05,
+      "loss": 3.954,
+      "step": 1460224
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6083343215211586e-05,
+      "loss": 3.9378,
+      "step": 1460736
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6074973646504798e-05,
+      "loss": 3.9438,
+      "step": 1461248
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6066587698994278e-05,
+      "loss": 3.9355,
+      "step": 1461760
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6058201751483758e-05,
+      "loss": 3.9425,
+      "step": 1462272
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6049815803973238e-05,
+      "loss": 3.9387,
+      "step": 1462784
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6041429856462718e-05,
+      "loss": 3.9415,
+      "step": 1463296
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6033060287755927e-05,
+      "loss": 3.9316,
+      "step": 1463808
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6024674340245407e-05,
+      "loss": 3.9288,
+      "step": 1464320
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.6016288392734887e-05,
+      "loss": 3.93,
+      "step": 1464832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.600790244522437e-05,
+      "loss": 3.939,
+      "step": 1465344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.599951649771385e-05,
+      "loss": 3.9409,
+      "step": 1465856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.599114692900706e-05,
+      "loss": 3.9396,
+      "step": 1466368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.598276098149654e-05,
+      "loss": 3.9486,
+      "step": 1466880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.597437503398602e-05,
+      "loss": 3.9394,
+      "step": 1467392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5966005465279232e-05,
+      "loss": 3.9348,
+      "step": 1467904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5957619517768712e-05,
+      "loss": 3.9398,
+      "step": 1468416
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5949233570258192e-05,
+      "loss": 3.9398,
+      "step": 1468928
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5940847622747672e-05,
+      "loss": 3.9279,
+      "step": 1469440
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5932461675237152e-05,
+      "loss": 3.9215,
+      "step": 1469952
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5924075727726632e-05,
+      "loss": 3.9274,
+      "step": 1470464
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.591568978021611e-05,
+      "loss": 3.9374,
+      "step": 1470976
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5907303832705588e-05,
+      "loss": 3.9396,
+      "step": 1471488
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5898917885195068e-05,
+      "loss": 3.9362,
+      "step": 1472000
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5890531937684548e-05,
+      "loss": 3.9231,
+      "step": 1472512
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5882162368977757e-05,
+      "loss": 3.9406,
+      "step": 1473024
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5873776421467237e-05,
+      "loss": 3.9352,
+      "step": 1473536
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5865390473956717e-05,
+      "loss": 3.9327,
+      "step": 1474048
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.58570045264462e-05,
+      "loss": 3.9271,
+      "step": 1474560
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.584861857893568e-05,
+      "loss": 3.9144,
+      "step": 1475072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.584023263142516e-05,
+      "loss": 3.9565,
+      "step": 1475584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.583184668391464e-05,
+      "loss": 3.9426,
+      "step": 1476096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.582346073640412e-05,
+      "loss": 3.9319,
+      "step": 1476608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.581509116769733e-05,
+      "loss": 3.9356,
+      "step": 1477120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5806721598990542e-05,
+      "loss": 3.9361,
+      "step": 1477632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5798335651480022e-05,
+      "loss": 3.926,
+      "step": 1478144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5789949703969502e-05,
+      "loss": 3.941,
+      "step": 1478656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5781563756458982e-05,
+      "loss": 3.9235,
+      "step": 1479168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5773177808948462e-05,
+      "loss": 3.94,
+      "step": 1479680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.576479186143794e-05,
+      "loss": 3.9485,
+      "step": 1480192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.575640591392742e-05,
+      "loss": 3.9294,
+      "step": 1480704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.57480199664169e-05,
+      "loss": 3.9191,
+      "step": 1481216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5739634018906385e-05,
+      "loss": 3.9465,
+      "step": 1481728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5731264450199594e-05,
+      "loss": 3.9133,
+      "step": 1482240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5722878502689074e-05,
+      "loss": 3.9249,
+      "step": 1482752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5714492555178554e-05,
+      "loss": 3.9418,
+      "step": 1483264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5706106607668034e-05,
+      "loss": 3.9374,
+      "step": 1483776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5697737038961246e-05,
+      "loss": 3.9308,
+      "step": 1484288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5689351091450726e-05,
+      "loss": 3.9243,
+      "step": 1484800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5680965143940206e-05,
+      "loss": 3.9158,
+      "step": 1485312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5672579196429686e-05,
+      "loss": 3.9245,
+      "step": 1485824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5664209627722895e-05,
+      "loss": 3.9399,
+      "step": 1486336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5655823680212375e-05,
+      "loss": 3.9243,
+      "step": 1486848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5647437732701855e-05,
+      "loss": 3.9407,
+      "step": 1487360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.563905178519134e-05,
+      "loss": 3.9357,
+      "step": 1487872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.563066583768082e-05,
+      "loss": 3.9469,
+      "step": 1488384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5622296268974028e-05,
+      "loss": 3.9278,
+      "step": 1488896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5613926700267237e-05,
+      "loss": 3.9279,
+      "step": 1489408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5605540752756717e-05,
+      "loss": 3.9405,
+      "step": 1489920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.55971548052462e-05,
+      "loss": 3.9272,
+      "step": 1490432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.558876885773568e-05,
+      "loss": 3.9383,
+      "step": 1490944
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.558038291022516e-05,
+      "loss": 3.9299,
+      "step": 1491456
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.557199696271464e-05,
+      "loss": 3.9321,
+      "step": 1491968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.556362739400785e-05,
+      "loss": 3.9362,
+      "step": 1492480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.555524144649733e-05,
+      "loss": 3.9204,
+      "step": 1492992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.554685549898681e-05,
+      "loss": 3.9306,
+      "step": 1493504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.5538469551476292e-05,
+      "loss": 3.9304,
+      "step": 1494016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.55300999827695e-05,
+      "loss": 3.9357,
+      "step": 1494528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.552171403525898e-05,
+      "loss": 3.9298,
+      "step": 1495040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.551332808774846e-05,
+      "loss": 3.9303,
+      "step": 1495552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.550494214023794e-05,
+      "loss": 3.925,
+      "step": 1496064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5496572571531154e-05,
+      "loss": 3.9228,
+      "step": 1496576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5488186624020634e-05,
+      "loss": 3.9239,
+      "step": 1497088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5479800676510114e-05,
+      "loss": 3.9311,
+      "step": 1497600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5471414728999594e-05,
+      "loss": 3.9422,
+      "step": 1498112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5463028781489067e-05,
+      "loss": 3.9383,
+      "step": 1498624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5454642833978547e-05,
+      "loss": 3.9329,
+      "step": 1499136
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5446273265271763e-05,
+      "loss": 3.9233,
+      "step": 1499648
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5437887317761243e-05,
+      "loss": 3.9289,
+      "step": 1500160
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.542950137025072e-05,
+      "loss": 3.9336,
+      "step": 1500672
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.54211154227402e-05,
+      "loss": 3.9391,
+      "step": 1501184
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.541272947522968e-05,
+      "loss": 3.9365,
+      "step": 1501696
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.540434352771916e-05,
+      "loss": 3.9302,
+      "step": 1502208
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.539595758020864e-05,
+      "loss": 3.9228,
+      "step": 1502720
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.538757163269812e-05,
+      "loss": 3.9455,
+      "step": 1503232
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.537920206399133e-05,
+      "loss": 3.9141,
+      "step": 1503744
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.537083249528454e-05,
+      "loss": 3.9352,
+      "step": 1504256
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.536244654777402e-05,
+      "loss": 3.9251,
+      "step": 1504768
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.53540606002635e-05,
+      "loss": 3.9324,
+      "step": 1505280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5345674652752984e-05,
+      "loss": 3.9215,
+      "step": 1505792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5337288705242464e-05,
+      "loss": 3.9321,
+      "step": 1506304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5328902757731944e-05,
+      "loss": 3.9233,
+      "step": 1506816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5320533189025153e-05,
+      "loss": 3.9233,
+      "step": 1507328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5312147241514633e-05,
+      "loss": 3.9315,
+      "step": 1507840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5303761294004113e-05,
+      "loss": 3.9239,
+      "step": 1508352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5295375346493593e-05,
+      "loss": 3.9426,
+      "step": 1508864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5286989398983073e-05,
+      "loss": 3.9447,
+      "step": 1509376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5278603451472556e-05,
+      "loss": 3.9379,
+      "step": 1509888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5270217503962036e-05,
+      "loss": 3.9267,
+      "step": 1510400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5261831556451516e-05,
+      "loss": 3.9267,
+      "step": 1510912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5253445608940996e-05,
+      "loss": 3.9244,
+      "step": 1511424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5245076040234205e-05,
+      "loss": 3.9256,
+      "step": 1511936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5236706471527417e-05,
+      "loss": 3.9379,
+      "step": 1512448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5228320524016897e-05,
+      "loss": 3.9151,
+      "step": 1512960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5219934576506377e-05,
+      "loss": 3.934,
+      "step": 1513472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5211548628995857e-05,
+      "loss": 3.9313,
+      "step": 1513984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5203162681485337e-05,
+      "loss": 3.9351,
+      "step": 1514496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5194793112778546e-05,
+      "loss": 3.9239,
+      "step": 1515008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5186407165268026e-05,
+      "loss": 3.9287,
+      "step": 1515520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.517802121775751e-05,
+      "loss": 3.9257,
+      "step": 1516032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.516963527024699e-05,
+      "loss": 3.9408,
+      "step": 1516544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.516124932273647e-05,
+      "loss": 3.9343,
+      "step": 1517056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.515286337522595e-05,
+      "loss": 3.9428,
+      "step": 1517568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.514449380651916e-05,
+      "loss": 3.9186,
+      "step": 1518080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.513610785900864e-05,
+      "loss": 3.9375,
+      "step": 1518592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.512772191149812e-05,
+      "loss": 3.9376,
+      "step": 1519104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.511935234279133e-05,
+      "loss": 3.9229,
+      "step": 1519616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.511096639528081e-05,
+      "loss": 3.932,
+      "step": 1520128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.510258044777029e-05,
+      "loss": 3.9419,
+      "step": 1520640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.509419450025977e-05,
+      "loss": 3.934,
+      "step": 1521152
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.508580855274925e-05,
+      "loss": 3.9488,
+      "step": 1521664
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5077422605238727e-05,
+      "loss": 3.9245,
+      "step": 1522176
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5069036657728207e-05,
+      "loss": 3.9389,
+      "step": 1522688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5060650710217687e-05,
+      "loss": 3.9279,
+      "step": 1523200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5052264762707167e-05,
+      "loss": 3.9299,
+      "step": 1523712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5043878815196647e-05,
+      "loss": 3.9284,
+      "step": 1524224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5035492867686127e-05,
+      "loss": 3.9283,
+      "step": 1524736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.5027106920175607e-05,
+      "loss": 3.9237,
+      "step": 1525248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.501875373027255e-05,
+      "loss": 3.9262,
+      "step": 1525760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.501036778276203e-05,
+      "loss": 3.927,
+      "step": 1526272
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.009834289550781,
+      "eval_runtime": 293.7762,
+      "eval_samples_per_second": 1298.917,
+      "eval_steps_per_second": 40.592,
+      "step": 1526400
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.500198183525151e-05,
+      "loss": 3.9292,
+      "step": 1526784
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4993595887740992e-05,
+      "loss": 3.9251,
+      "step": 1527296
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.498520994023047e-05,
+      "loss": 3.9401,
+      "step": 1527808
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.497682399271995e-05,
+      "loss": 3.9309,
+      "step": 1528320
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4968438045209432e-05,
+      "loss": 3.946,
+      "step": 1528832
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4960052097698912e-05,
+      "loss": 3.9313,
+      "step": 1529344
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4951666150188392e-05,
+      "loss": 3.9276,
+      "step": 1529856
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.494328020267787e-05,
+      "loss": 3.9205,
+      "step": 1530368
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4934894255167348e-05,
+      "loss": 3.9304,
+      "step": 1530880
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4926508307656828e-05,
+      "loss": 3.9329,
+      "step": 1531392
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4918122360146308e-05,
+      "loss": 3.9297,
+      "step": 1531904
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4909736412635788e-05,
+      "loss": 3.932,
+      "step": 1532416
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4901366843929e-05,
+      "loss": 3.924,
+      "step": 1532928
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.489298089641848e-05,
+      "loss": 3.9347,
+      "step": 1533440
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.488459494890796e-05,
+      "loss": 3.9207,
+      "step": 1533952
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.487620900139744e-05,
+      "loss": 3.9195,
+      "step": 1534464
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.486782305388692e-05,
+      "loss": 3.9226,
+      "step": 1534976
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.48594371063764e-05,
+      "loss": 3.9316,
+      "step": 1535488
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4851067537669613e-05,
+      "loss": 3.9289,
+      "step": 1536000
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4842681590159093e-05,
+      "loss": 3.9447,
+      "step": 1536512
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4834295642648573e-05,
+      "loss": 3.9314,
+      "step": 1537024
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4825909695138053e-05,
+      "loss": 3.9395,
+      "step": 1537536
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.481752374762753e-05,
+      "loss": 3.925,
+      "step": 1538048
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.480913780011701e-05,
+      "loss": 3.9316,
+      "step": 1538560
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.480075185260649e-05,
+      "loss": 3.9301,
+      "step": 1539072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4792365905095973e-05,
+      "loss": 3.9354,
+      "step": 1539584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4783979957585452e-05,
+      "loss": 3.9252,
+      "step": 1540096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4775594010074932e-05,
+      "loss": 3.9257,
+      "step": 1540608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.4767208062564412e-05,
+      "loss": 3.9213,
+      "step": 1541120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.475883849385762e-05,
+      "loss": 3.9279,
+      "step": 1541632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4750468925150834e-05,
+      "loss": 3.9348,
+      "step": 1542144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4742082977640314e-05,
+      "loss": 3.9329,
+      "step": 1542656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4733697030129794e-05,
+      "loss": 3.9376,
+      "step": 1543168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4725311082619274e-05,
+      "loss": 3.9292,
+      "step": 1543680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4716925135108754e-05,
+      "loss": 3.9281,
+      "step": 1544192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4708555566401963e-05,
+      "loss": 3.9376,
+      "step": 1544704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4700169618891443e-05,
+      "loss": 3.9339,
+      "step": 1545216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4691783671380926e-05,
+      "loss": 3.9155,
+      "step": 1545728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4683397723870406e-05,
+      "loss": 3.9182,
+      "step": 1546240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4675028155163615e-05,
+      "loss": 3.9182,
+      "step": 1546752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4666642207653095e-05,
+      "loss": 3.9247,
+      "step": 1547264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4658272638946304e-05,
+      "loss": 3.9346,
+      "step": 1547776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4649886691435788e-05,
+      "loss": 3.9241,
+      "step": 1548288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4641500743925268e-05,
+      "loss": 3.9212,
+      "step": 1548800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4633114796414748e-05,
+      "loss": 3.9292,
+      "step": 1549312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4624728848904228e-05,
+      "loss": 3.9253,
+      "step": 1549824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4616342901393707e-05,
+      "loss": 3.9278,
+      "step": 1550336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4607956953883184e-05,
+      "loss": 3.922,
+      "step": 1550848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4599571006372664e-05,
+      "loss": 3.9019,
+      "step": 1551360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4591185058862144e-05,
+      "loss": 3.9503,
+      "step": 1551872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4582799111351627e-05,
+      "loss": 3.9333,
+      "step": 1552384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4574413163841107e-05,
+      "loss": 3.9314,
+      "step": 1552896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4566027216330587e-05,
+      "loss": 3.9266,
+      "step": 1553408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4557657647623796e-05,
+      "loss": 3.9256,
+      "step": 1553920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4549271700113276e-05,
+      "loss": 3.9193,
+      "step": 1554432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.454090213140649e-05,
+      "loss": 3.9298,
+      "step": 1554944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.453251618389597e-05,
+      "loss": 3.9199,
+      "step": 1555456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.452413023638545e-05,
+      "loss": 3.9286,
+      "step": 1555968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.451574428887493e-05,
+      "loss": 3.9402,
+      "step": 1556480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.450735834136441e-05,
+      "loss": 3.9207,
+      "step": 1556992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.449897239385389e-05,
+      "loss": 3.9137,
+      "step": 1557504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4490586446343365e-05,
+      "loss": 3.9356,
+      "step": 1558016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.448220049883285e-05,
+      "loss": 3.9061,
+      "step": 1558528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.447381455132233e-05,
+      "loss": 3.9193,
+      "step": 1559040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.446542860381181e-05,
+      "loss": 3.9289,
+      "step": 1559552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.445707541390875e-05,
+      "loss": 3.9293,
+      "step": 1560064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.444868946639823e-05,
+      "loss": 3.9228,
+      "step": 1560576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.444030351888771e-05,
+      "loss": 3.9195,
+      "step": 1561088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.443191757137719e-05,
+      "loss": 3.9066,
+      "step": 1561600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.442353162386667e-05,
+      "loss": 3.9184,
+      "step": 1562112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4415162055159882e-05,
+      "loss": 3.9305,
+      "step": 1562624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4406776107649362e-05,
+      "loss": 3.9186,
+      "step": 1563136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.439840653894257e-05,
+      "loss": 3.9276,
+      "step": 1563648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.439002059143205e-05,
+      "loss": 3.9296,
+      "step": 1564160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4381634643921535e-05,
+      "loss": 3.9411,
+      "step": 1564672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4373248696411015e-05,
+      "loss": 3.9203,
+      "step": 1565184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.436486274890049e-05,
+      "loss": 3.918,
+      "step": 1565696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.435647680138997e-05,
+      "loss": 3.9326,
+      "step": 1566208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.434809085387945e-05,
+      "loss": 3.9174,
+      "step": 1566720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.433970490636893e-05,
+      "loss": 3.9348,
+      "step": 1567232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.433131895885841e-05,
+      "loss": 3.9222,
+      "step": 1567744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.432293301134789e-05,
+      "loss": 3.9238,
+      "step": 1568256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.431454706383737e-05,
+      "loss": 3.9303,
+      "step": 1568768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.430616111632685e-05,
+      "loss": 3.9133,
+      "step": 1569280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.429777516881633e-05,
+      "loss": 3.9225,
+      "step": 1569792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4289405600109543e-05,
+      "loss": 3.9204,
+      "step": 1570304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.428101965259902e-05,
+      "loss": 3.9286,
+      "step": 1570816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4272633705088503e-05,
+      "loss": 3.9173,
+      "step": 1571328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.4264247757577983e-05,
+      "loss": 3.9252,
+      "step": 1571840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4255878188871196e-05,
+      "loss": 3.917,
+      "step": 1572352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4247492241360672e-05,
+      "loss": 3.9177,
+      "step": 1572864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4239106293850152e-05,
+      "loss": 3.9117,
+      "step": 1573376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4230736725143365e-05,
+      "loss": 3.9261,
+      "step": 1573888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4222350777632845e-05,
+      "loss": 3.9357,
+      "step": 1574400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4213964830122325e-05,
+      "loss": 3.926,
+      "step": 1574912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4205578882611804e-05,
+      "loss": 3.9301,
+      "step": 1575424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4197192935101284e-05,
+      "loss": 3.9153,
+      "step": 1575936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4188806987590764e-05,
+      "loss": 3.9198,
+      "step": 1576448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4180437418883974e-05,
+      "loss": 3.9243,
+      "step": 1576960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4172051471373457e-05,
+      "loss": 3.9315,
+      "step": 1577472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4163665523862937e-05,
+      "loss": 3.9281,
+      "step": 1577984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4155295955156146e-05,
+      "loss": 3.9216,
+      "step": 1578496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4146910007645626e-05,
+      "loss": 3.9194,
+      "step": 1579008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4138524060135106e-05,
+      "loss": 3.9372,
+      "step": 1579520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4130138112624586e-05,
+      "loss": 3.9097,
+      "step": 1580032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4121752165114066e-05,
+      "loss": 3.9257,
+      "step": 1580544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.411336621760355e-05,
+      "loss": 3.9163,
+      "step": 1581056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4104980270093026e-05,
+      "loss": 3.9262,
+      "step": 1581568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4096594322582506e-05,
+      "loss": 3.9147,
+      "step": 1582080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4088208375071986e-05,
+      "loss": 3.9239,
+      "step": 1582592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4079822427561466e-05,
+      "loss": 3.9151,
+      "step": 1583104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4071452858854675e-05,
+      "loss": 3.9181,
+      "step": 1583616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4063066911344158e-05,
+      "loss": 3.9233,
+      "step": 1584128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4054680963833638e-05,
+      "loss": 3.9182,
+      "step": 1584640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4046295016323118e-05,
+      "loss": 3.9313,
+      "step": 1585152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4037909068812598e-05,
+      "loss": 3.9374,
+      "step": 1585664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4029523121302078e-05,
+      "loss": 3.9266,
+      "step": 1586176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4021137173791554e-05,
+      "loss": 3.9245,
+      "step": 1586688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.4012751226281034e-05,
+      "loss": 3.914,
+      "step": 1587200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.400438165757425e-05,
+      "loss": 3.9191,
+      "step": 1587712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.399599571006373e-05,
+      "loss": 3.92,
+      "step": 1588224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3987609762553207e-05,
+      "loss": 3.9348,
+      "step": 1588736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3979223815042687e-05,
+      "loss": 3.9055,
+      "step": 1589248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.39708542463359e-05,
+      "loss": 3.9244,
+      "step": 1589760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.396246829882538e-05,
+      "loss": 3.9213,
+      "step": 1590272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.395408235131486e-05,
+      "loss": 3.9276,
+      "step": 1590784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.394569640380434e-05,
+      "loss": 3.9195,
+      "step": 1591296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.393732683509755e-05,
+      "loss": 3.9206,
+      "step": 1591808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3928940887587028e-05,
+      "loss": 3.9149,
+      "step": 1592320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3920554940076508e-05,
+      "loss": 3.9323,
+      "step": 1592832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3912168992565988e-05,
+      "loss": 3.9284,
+      "step": 1593344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3903799423859204e-05,
+      "loss": 3.9322,
+      "step": 1593856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.389541347634868e-05,
+      "loss": 3.9139,
+      "step": 1594368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.388702752883816e-05,
+      "loss": 3.9298,
+      "step": 1594880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.387864158132764e-05,
+      "loss": 3.9297,
+      "step": 1595392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.387025563381712e-05,
+      "loss": 3.9199,
+      "step": 1595904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.38618696863066e-05,
+      "loss": 3.9253,
+      "step": 1596416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.385348373879608e-05,
+      "loss": 3.9312,
+      "step": 1596928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3845114170089293e-05,
+      "loss": 3.9234,
+      "step": 1597440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3836728222578773e-05,
+      "loss": 3.9459,
+      "step": 1597952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3828342275068253e-05,
+      "loss": 3.9162,
+      "step": 1598464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3819956327557733e-05,
+      "loss": 3.9279,
+      "step": 1598976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.381157038004721e-05,
+      "loss": 3.9193,
+      "step": 1599488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.380318443253669e-05,
+      "loss": 3.9232,
+      "step": 1600000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3794798485026172e-05,
+      "loss": 3.9238,
+      "step": 1600512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3786428916319385e-05,
+      "loss": 3.9188,
+      "step": 1601024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.377804296880886e-05,
+      "loss": 3.9206,
+      "step": 1601536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.376965702129834e-05,
+      "loss": 3.9117,
+      "step": 1602048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.376127107378782e-05,
+      "loss": 3.9227,
+      "step": 1602560
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.006990909576416,
+      "eval_runtime": 296.3428,
+      "eval_samples_per_second": 1287.668,
+      "eval_steps_per_second": 40.241,
+      "step": 1602720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.37528851262773e-05,
+      "loss": 3.9251,
+      "step": 1603072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.374449917876678e-05,
+      "loss": 3.9168,
+      "step": 1603584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.373611323125626e-05,
+      "loss": 3.9311,
+      "step": 1604096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3727727283745745e-05,
+      "loss": 3.9271,
+      "step": 1604608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.371934133623522e-05,
+      "loss": 3.9335,
+      "step": 1605120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.37109553887247e-05,
+      "loss": 3.922,
+      "step": 1605632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.370256944121418e-05,
+      "loss": 3.9239,
+      "step": 1606144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.369418349370366e-05,
+      "loss": 3.9117,
+      "step": 1606656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3685813924996873e-05,
+      "loss": 3.9222,
+      "step": 1607168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3677427977486353e-05,
+      "loss": 3.9251,
+      "step": 1607680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3669042029975833e-05,
+      "loss": 3.927,
+      "step": 1608192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3660656082465313e-05,
+      "loss": 3.921,
+      "step": 1608704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3652270134954793e-05,
+      "loss": 3.9219,
+      "step": 1609216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3643884187444273e-05,
+      "loss": 3.9211,
+      "step": 1609728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.363549823993375e-05,
+      "loss": 3.9168,
+      "step": 1610240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.362711229242323e-05,
+      "loss": 3.9091,
+      "step": 1610752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3618726344912713e-05,
+      "loss": 3.9164,
+      "step": 1611264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3610340397402193e-05,
+      "loss": 3.9234,
+      "step": 1611776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3601954449891673e-05,
+      "loss": 3.9209,
+      "step": 1612288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3593568502381153e-05,
+      "loss": 3.9361,
+      "step": 1612800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3585182554870633e-05,
+      "loss": 3.9258,
+      "step": 1613312
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.357679660736011e-05,
+      "loss": 3.9326,
+      "step": 1613824
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.356841065984959e-05,
+      "loss": 3.92,
+      "step": 1614336
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.356002471233907e-05,
+      "loss": 3.9232,
+      "step": 1614848
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3551655143632282e-05,
+      "loss": 3.9216,
+      "step": 1615360
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3543269196121762e-05,
+      "loss": 3.9318,
+      "step": 1615872
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3534899627414974e-05,
+      "loss": 3.9179,
+      "step": 1616384
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3526513679904454e-05,
+      "loss": 3.9164,
+      "step": 1616896
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.351812773239393e-05,
+      "loss": 3.9132,
+      "step": 1617408
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.3509741784883414e-05,
+      "loss": 3.9233,
+      "step": 1617920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3501355837372894e-05,
+      "loss": 3.924,
+      "step": 1618432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3492986268666103e-05,
+      "loss": 3.9272,
+      "step": 1618944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3484600321155583e-05,
+      "loss": 3.9281,
+      "step": 1619456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3476230752448796e-05,
+      "loss": 3.9227,
+      "step": 1619968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3467844804938276e-05,
+      "loss": 3.9215,
+      "step": 1620480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3459475236231488e-05,
+      "loss": 3.93,
+      "step": 1620992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3451089288720968e-05,
+      "loss": 3.9254,
+      "step": 1621504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3442703341210448e-05,
+      "loss": 3.9112,
+      "step": 1622016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3434317393699928e-05,
+      "loss": 3.9109,
+      "step": 1622528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3425931446189405e-05,
+      "loss": 3.9093,
+      "step": 1623040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3417545498678885e-05,
+      "loss": 3.913,
+      "step": 1623552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3409159551168368e-05,
+      "loss": 3.9301,
+      "step": 1624064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3400773603657848e-05,
+      "loss": 3.9195,
+      "step": 1624576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3392387656147328e-05,
+      "loss": 3.913,
+      "step": 1625088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3384001708636808e-05,
+      "loss": 3.9228,
+      "step": 1625600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3375615761126284e-05,
+      "loss": 3.917,
+      "step": 1626112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3367229813615764e-05,
+      "loss": 3.9216,
+      "step": 1626624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3358843866105244e-05,
+      "loss": 3.9123,
+      "step": 1627136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.335047429739846e-05,
+      "loss": 3.8948,
+      "step": 1627648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3342088349887937e-05,
+      "loss": 3.9435,
+      "step": 1628160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3333702402377417e-05,
+      "loss": 3.9246,
+      "step": 1628672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3325316454866897e-05,
+      "loss": 3.9262,
+      "step": 1629184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3316930507356377e-05,
+      "loss": 3.9142,
+      "step": 1629696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3308544559845856e-05,
+      "loss": 3.9226,
+      "step": 1630208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3300158612335336e-05,
+      "loss": 3.9118,
+      "step": 1630720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.329180542243228e-05,
+      "loss": 3.9194,
+      "step": 1631232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3283419474921758e-05,
+      "loss": 3.9182,
+      "step": 1631744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3275033527411238e-05,
+      "loss": 3.9209,
+      "step": 1632256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3266647579900718e-05,
+      "loss": 3.936,
+      "step": 1632768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3258261632390198e-05,
+      "loss": 3.9114,
+      "step": 1633280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3249875684879678e-05,
+      "loss": 3.9041,
+      "step": 1633792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.324148973736916e-05,
+      "loss": 3.9309,
+      "step": 1634304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.323310378985864e-05,
+      "loss": 3.9001,
+      "step": 1634816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3224717842348118e-05,
+      "loss": 3.9076,
+      "step": 1635328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3216331894837598e-05,
+      "loss": 3.9258,
+      "step": 1635840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3207945947327078e-05,
+      "loss": 3.9249,
+      "step": 1636352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3199559999816558e-05,
+      "loss": 3.9115,
+      "step": 1636864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.319119043110977e-05,
+      "loss": 3.9173,
+      "step": 1637376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.318280448359925e-05,
+      "loss": 3.897,
+      "step": 1637888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.317441853608873e-05,
+      "loss": 3.9138,
+      "step": 1638400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.316603258857821e-05,
+      "loss": 3.9214,
+      "step": 1638912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.315766301987142e-05,
+      "loss": 3.9096,
+      "step": 1639424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3149309829968364e-05,
+      "loss": 3.9218,
+      "step": 1639936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3140923882457844e-05,
+      "loss": 3.9207,
+      "step": 1640448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3132537934947324e-05,
+      "loss": 3.9345,
+      "step": 1640960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3124168366240533e-05,
+      "loss": 3.9105,
+      "step": 1641472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3115782418730013e-05,
+      "loss": 3.9152,
+      "step": 1641984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3107396471219493e-05,
+      "loss": 3.9252,
+      "step": 1642496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3099010523708976e-05,
+      "loss": 3.9101,
+      "step": 1643008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3090624576198456e-05,
+      "loss": 3.9227,
+      "step": 1643520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3082238628687936e-05,
+      "loss": 3.9192,
+      "step": 1644032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3073852681177413e-05,
+      "loss": 3.9148,
+      "step": 1644544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3065466733666893e-05,
+      "loss": 3.9254,
+      "step": 1645056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3057080786156373e-05,
+      "loss": 3.9071,
+      "step": 1645568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3048694838645853e-05,
+      "loss": 3.9097,
+      "step": 1646080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3040308891135336e-05,
+      "loss": 3.9164,
+      "step": 1646592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3031922943624816e-05,
+      "loss": 3.9219,
+      "step": 1647104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3023536996114296e-05,
+      "loss": 3.9128,
+      "step": 1647616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.3015151048603772e-05,
+      "loss": 3.9183,
+      "step": 1648128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.3006765101093252e-05,
+      "loss": 3.9063,
+      "step": 1648640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2998379153582732e-05,
+      "loss": 3.9156,
+      "step": 1649152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2990009584875945e-05,
+      "loss": 3.9071,
+      "step": 1649664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2981623637365425e-05,
+      "loss": 3.9161,
+      "step": 1650176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2973237689854905e-05,
+      "loss": 3.9311,
+      "step": 1650688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2964868121148117e-05,
+      "loss": 3.9163,
+      "step": 1651200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2956482173637594e-05,
+      "loss": 3.922,
+      "step": 1651712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2948096226127074e-05,
+      "loss": 3.9101,
+      "step": 1652224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2939710278616554e-05,
+      "loss": 3.9135,
+      "step": 1652736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2931324331106037e-05,
+      "loss": 3.9157,
+      "step": 1653248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2922938383595517e-05,
+      "loss": 3.9258,
+      "step": 1653760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2914552436084997e-05,
+      "loss": 3.9178,
+      "step": 1654272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2906166488574477e-05,
+      "loss": 3.9195,
+      "step": 1654784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2897780541063954e-05,
+      "loss": 3.9106,
+      "step": 1655296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2889410972357166e-05,
+      "loss": 3.9261,
+      "step": 1655808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2881025024846646e-05,
+      "loss": 3.905,
+      "step": 1656320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.287265545613986e-05,
+      "loss": 3.9196,
+      "step": 1656832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.286426950862934e-05,
+      "loss": 3.9061,
+      "step": 1657344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.285588356111882e-05,
+      "loss": 3.9223,
+      "step": 1657856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.28474976136083e-05,
+      "loss": 3.9101,
+      "step": 1658368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2839111666097775e-05,
+      "loss": 3.9149,
+      "step": 1658880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2830725718587255e-05,
+      "loss": 3.9109,
+      "step": 1659392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.282235614988047e-05,
+      "loss": 3.9108,
+      "step": 1659904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.281397020236995e-05,
+      "loss": 3.9136,
+      "step": 1660416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2805584254859427e-05,
+      "loss": 3.914,
+      "step": 1660928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2797198307348907e-05,
+      "loss": 3.9218,
+      "step": 1661440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2788812359838387e-05,
+      "loss": 3.9285,
+      "step": 1661952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2780426412327867e-05,
+      "loss": 3.9211,
+      "step": 1662464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2772040464817347e-05,
+      "loss": 3.9199,
+      "step": 1662976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.276365451730683e-05,
+      "loss": 3.9048,
+      "step": 1663488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2755268569796307e-05,
+      "loss": 3.9105,
+      "step": 1664000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2746882622285787e-05,
+      "loss": 3.9143,
+      "step": 1664512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2738513053579e-05,
+      "loss": 3.9257,
+      "step": 1665024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.273012710606848e-05,
+      "loss": 3.8991,
+      "step": 1665536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2721773916165424e-05,
+      "loss": 3.9203,
+      "step": 1666048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.27133879686549e-05,
+      "loss": 3.9182,
+      "step": 1666560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.270500202114438e-05,
+      "loss": 3.9165,
+      "step": 1667072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.269661607363386e-05,
+      "loss": 3.9135,
+      "step": 1667584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.268823012612334e-05,
+      "loss": 3.9182,
+      "step": 1668096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.267984417861282e-05,
+      "loss": 3.9089,
+      "step": 1668608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.26714582311023e-05,
+      "loss": 3.9253,
+      "step": 1669120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.266307228359178e-05,
+      "loss": 3.9212,
+      "step": 1669632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.265468633608126e-05,
+      "loss": 3.9253,
+      "step": 1670144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.264630038857074e-05,
+      "loss": 3.9064,
+      "step": 1670656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2637930819863953e-05,
+      "loss": 3.9236,
+      "step": 1671168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.262954487235343e-05,
+      "loss": 3.9237,
+      "step": 1671680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.262115892484291e-05,
+      "loss": 3.9125,
+      "step": 1672192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2612772977332393e-05,
+      "loss": 3.9184,
+      "step": 1672704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2604387029821873e-05,
+      "loss": 3.9229,
+      "step": 1673216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2596001082311353e-05,
+      "loss": 3.9201,
+      "step": 1673728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2587631513604562e-05,
+      "loss": 3.9332,
+      "step": 1674240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2579245566094042e-05,
+      "loss": 3.9151,
+      "step": 1674752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2570859618583522e-05,
+      "loss": 3.9197,
+      "step": 1675264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2562473671073002e-05,
+      "loss": 3.9136,
+      "step": 1675776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2554087723562485e-05,
+      "loss": 3.9181,
+      "step": 1676288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2545701776051962e-05,
+      "loss": 3.9124,
+      "step": 1676800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.253731582854144e-05,
+      "loss": 3.9118,
+      "step": 1677312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.252892988103092e-05,
+      "loss": 3.9161,
+      "step": 1677824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2520576691127863e-05,
+      "loss": 3.9042,
+      "step": 1678336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.2512190743617347e-05,
+      "loss": 3.9201,
+      "step": 1678848
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 4.004070281982422,
+      "eval_runtime": 292.0611,
+      "eval_samples_per_second": 1306.545,
+      "eval_steps_per_second": 40.83,
+      "step": 1679040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2503804796106827e-05,
+      "loss": 3.9189,
+      "step": 1679360
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2495418848596307e-05,
+      "loss": 3.9116,
+      "step": 1679872
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2487032901085786e-05,
+      "loss": 3.9211,
+      "step": 1680384
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2478646953575263e-05,
+      "loss": 3.9195,
+      "step": 1680896
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2470261006064743e-05,
+      "loss": 3.9281,
+      "step": 1681408
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2461875058554223e-05,
+      "loss": 3.9162,
+      "step": 1681920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2453505489847435e-05,
+      "loss": 3.9173,
+      "step": 1682432
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2445119542336915e-05,
+      "loss": 3.9065,
+      "step": 1682944
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2436733594826395e-05,
+      "loss": 3.9154,
+      "step": 1683456
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2428364026119608e-05,
+      "loss": 3.9186,
+      "step": 1683968
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2419978078609084e-05,
+      "loss": 3.9164,
+      "step": 1684480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2411592131098564e-05,
+      "loss": 3.9139,
+      "step": 1684992
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.240322256239178e-05,
+      "loss": 3.9133,
+      "step": 1685504
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.239483661488126e-05,
+      "loss": 3.9142,
+      "step": 1686016
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2386450667370737e-05,
+      "loss": 3.9124,
+      "step": 1686528
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2378064719860217e-05,
+      "loss": 3.9034,
+      "step": 1687040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2369678772349697e-05,
+      "loss": 3.9096,
+      "step": 1687552
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2361292824839177e-05,
+      "loss": 3.9135,
+      "step": 1688064
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2352906877328657e-05,
+      "loss": 3.9163,
+      "step": 1688576
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.234452092981814e-05,
+      "loss": 3.9262,
+      "step": 1689088
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2336134982307616e-05,
+      "loss": 3.9228,
+      "step": 1689600
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.2327749034797096e-05,
+      "loss": 3.9264,
+      "step": 1690112
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.231937946609031e-05,
+      "loss": 3.9084,
+      "step": 1690624
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.231099351857979e-05,
+      "loss": 3.9179,
+      "step": 1691136
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.230260757106927e-05,
+      "loss": 3.9129,
+      "step": 1691648
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.229422162355875e-05,
+      "loss": 3.9224,
+      "step": 1692160
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.228583567604823e-05,
+      "loss": 3.9108,
+      "step": 1692672
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.227744972853771e-05,
+      "loss": 3.913,
+      "step": 1693184
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.226906378102719e-05,
+      "loss": 3.9101,
+      "step": 1693696
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 2.226067783351667e-05,
+      "loss": 3.9129,
+      "step": 1694208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2252308264809878e-05,
+      "loss": 3.9136,
+      "step": 1694720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.224392231729936e-05,
+      "loss": 3.9249,
+      "step": 1695232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.223555274859257e-05,
+      "loss": 3.9211,
+      "step": 1695744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2227183179885783e-05,
+      "loss": 3.9204,
+      "step": 1696256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2218797232375263e-05,
+      "loss": 3.9139,
+      "step": 1696768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.221042766366847e-05,
+      "loss": 3.9194,
+      "step": 1697280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2202041716157955e-05,
+      "loss": 3.92,
+      "step": 1697792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2193655768647435e-05,
+      "loss": 3.9064,
+      "step": 1698304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.218526982113691e-05,
+      "loss": 3.9015,
+      "step": 1698816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.217688387362639e-05,
+      "loss": 3.9016,
+      "step": 1699328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.216849792611587e-05,
+      "loss": 3.9097,
+      "step": 1699840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.216011197860535e-05,
+      "loss": 3.9158,
+      "step": 1700352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.215172603109483e-05,
+      "loss": 3.9138,
+      "step": 1700864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.214334008358431e-05,
+      "loss": 3.908,
+      "step": 1701376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2134970514877524e-05,
+      "loss": 3.9156,
+      "step": 1701888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2126584567367004e-05,
+      "loss": 3.9135,
+      "step": 1702400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2118198619856484e-05,
+      "loss": 3.9149,
+      "step": 1702912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2109812672345964e-05,
+      "loss": 3.9072,
+      "step": 1703424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2101426724835444e-05,
+      "loss": 3.8901,
+      "step": 1703936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2093040777324924e-05,
+      "loss": 3.9328,
+      "step": 1704448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2084654829814404e-05,
+      "loss": 3.9158,
+      "step": 1704960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2076268882303884e-05,
+      "loss": 3.9223,
+      "step": 1705472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2067882934793363e-05,
+      "loss": 3.9105,
+      "step": 1705984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2059496987282843e-05,
+      "loss": 3.9134,
+      "step": 1706496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2051111039772323e-05,
+      "loss": 3.9065,
+      "step": 1707008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2042741471065532e-05,
+      "loss": 3.9107,
+      "step": 1707520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2034355523555016e-05,
+      "loss": 3.9125,
+      "step": 1708032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2025969576044496e-05,
+      "loss": 3.9094,
+      "step": 1708544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2017583628533976e-05,
+      "loss": 3.9295,
+      "step": 1709056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2009197681023452e-05,
+      "loss": 3.9084,
+      "step": 1709568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.2000811733512932e-05,
+      "loss": 3.8986,
+      "step": 1710080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1992425786002412e-05,
+      "loss": 3.9221,
+      "step": 1710592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1984039838491892e-05,
+      "loss": 3.8944,
+      "step": 1711104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1975670269785105e-05,
+      "loss": 3.8963,
+      "step": 1711616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1967300701078317e-05,
+      "loss": 3.9222,
+      "step": 1712128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1958914753567797e-05,
+      "loss": 3.9169,
+      "step": 1712640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1950528806057274e-05,
+      "loss": 3.9044,
+      "step": 1713152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1942159237350486e-05,
+      "loss": 3.9131,
+      "step": 1713664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.193377328983997e-05,
+      "loss": 3.8911,
+      "step": 1714176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.192538734232945e-05,
+      "loss": 3.9077,
+      "step": 1714688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1917001394818926e-05,
+      "loss": 3.9097,
+      "step": 1715200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1908615447308406e-05,
+      "loss": 3.9064,
+      "step": 1715712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1900229499797886e-05,
+      "loss": 3.9131,
+      "step": 1716224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1891843552287366e-05,
+      "loss": 3.9142,
+      "step": 1716736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1883457604776846e-05,
+      "loss": 3.9267,
+      "step": 1717248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1875071657266326e-05,
+      "loss": 3.9063,
+      "step": 1717760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.186668570975581e-05,
+      "loss": 3.9088,
+      "step": 1718272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1858299762245286e-05,
+      "loss": 3.9161,
+      "step": 1718784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1849913814734766e-05,
+      "loss": 3.9039,
+      "step": 1719296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1841544246027978e-05,
+      "loss": 3.9147,
+      "step": 1719808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1833158298517455e-05,
+      "loss": 3.912,
+      "step": 1720320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1824772351006935e-05,
+      "loss": 3.9073,
+      "step": 1720832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1816386403496418e-05,
+      "loss": 3.9239,
+      "step": 1721344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.180801683478963e-05,
+      "loss": 3.8967,
+      "step": 1721856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1799630887279107e-05,
+      "loss": 3.9023,
+      "step": 1722368
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1791244939768587e-05,
+      "loss": 3.913,
+      "step": 1722880
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.1782858992258067e-05,
+      "loss": 3.9146,
+      "step": 1723392
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.177448942355128e-05,
+      "loss": 3.9088,
+      "step": 1723904
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 2.176610347604076e-05,
+      "loss": 3.9107,
+      "step": 1724416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1757733907333972e-05,
+      "loss": 3.897,
+      "step": 1724928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1749347959823452e-05,
+      "loss": 3.9074,
+      "step": 1725440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.174096201231293e-05,
+      "loss": 3.9021,
+      "step": 1725952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.173257606480241e-05,
+      "loss": 3.9107,
+      "step": 1726464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.172419011729189e-05,
+      "loss": 3.923,
+      "step": 1726976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1715820548585104e-05,
+      "loss": 3.9094,
+      "step": 1727488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.170743460107458e-05,
+      "loss": 3.9147,
+      "step": 1728000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1699065032367793e-05,
+      "loss": 3.903,
+      "step": 1728512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1690679084857273e-05,
+      "loss": 3.9061,
+      "step": 1729024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.168229313734675e-05,
+      "loss": 3.9062,
+      "step": 1729536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1673907189836233e-05,
+      "loss": 3.9207,
+      "step": 1730048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1665521242325713e-05,
+      "loss": 3.9125,
+      "step": 1730560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1657135294815193e-05,
+      "loss": 3.9148,
+      "step": 1731072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1648749347304673e-05,
+      "loss": 3.9022,
+      "step": 1731584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1640363399794153e-05,
+      "loss": 3.9179,
+      "step": 1732096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1631993831087362e-05,
+      "loss": 3.9016,
+      "step": 1732608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1623607883576842e-05,
+      "loss": 3.9115,
+      "step": 1733120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1615221936066325e-05,
+      "loss": 3.8972,
+      "step": 1733632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1606835988555805e-05,
+      "loss": 3.913,
+      "step": 1734144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1598450041045285e-05,
+      "loss": 3.9041,
+      "step": 1734656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1590064093534762e-05,
+      "loss": 3.9095,
+      "step": 1735168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1581678146024242e-05,
+      "loss": 3.8997,
+      "step": 1735680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1573292198513722e-05,
+      "loss": 3.9099,
+      "step": 1736192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1564922629806934e-05,
+      "loss": 3.9038,
+      "step": 1736704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1556536682296414e-05,
+      "loss": 3.9058,
+      "step": 1737216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1548150734785894e-05,
+      "loss": 3.9206,
+      "step": 1737728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1539764787275374e-05,
+      "loss": 3.9209,
+      "step": 1738240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1531395218568583e-05,
+      "loss": 3.9147,
+      "step": 1738752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1523009271058063e-05,
+      "loss": 3.9126,
+      "step": 1739264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1514623323547543e-05,
+      "loss": 3.9019,
+      "step": 1739776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1506237376037026e-05,
+      "loss": 3.9035,
+      "step": 1740288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1497851428526506e-05,
+      "loss": 3.9103,
+      "step": 1740800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1489481859819715e-05,
+      "loss": 3.9135,
+      "step": 1741312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1481095912309195e-05,
+      "loss": 3.8946,
+      "step": 1741824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1472709964798675e-05,
+      "loss": 3.9132,
+      "step": 1742336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1464324017288155e-05,
+      "loss": 3.9136,
+      "step": 1742848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1455938069777635e-05,
+      "loss": 3.9093,
+      "step": 1743360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1447552122267115e-05,
+      "loss": 3.9063,
+      "step": 1743872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1439166174756595e-05,
+      "loss": 3.9078,
+      "step": 1744384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1430780227246075e-05,
+      "loss": 3.9009,
+      "step": 1744896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1422394279735555e-05,
+      "loss": 3.9207,
+      "step": 1745408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1414024711028764e-05,
+      "loss": 3.9154,
+      "step": 1745920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.140565514232198e-05,
+      "loss": 3.915,
+      "step": 1746432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.139726919481146e-05,
+      "loss": 3.9001,
+      "step": 1746944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.138888324730094e-05,
+      "loss": 3.9182,
+      "step": 1747456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1380497299790417e-05,
+      "loss": 3.9161,
+      "step": 1747968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1372111352279897e-05,
+      "loss": 3.9061,
+      "step": 1748480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1363725404769376e-05,
+      "loss": 3.9152,
+      "step": 1748992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1355339457258856e-05,
+      "loss": 3.9136,
+      "step": 1749504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.1346953509748336e-05,
+      "loss": 3.9133,
+      "step": 1750016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.133858394104155e-05,
+      "loss": 3.9277,
+      "step": 1750528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.133019799353103e-05,
+      "loss": 3.9055,
+      "step": 1751040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.132181204602051e-05,
+      "loss": 3.9136,
+      "step": 1751552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.131342609850999e-05,
+      "loss": 3.9115,
+      "step": 1752064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.130504015099947e-05,
+      "loss": 3.9119,
+      "step": 1752576
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.129665420348895e-05,
+      "loss": 3.9033,
+      "step": 1753088
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.128828463478216e-05,
+      "loss": 3.9034,
+      "step": 1753600
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.127991506607537e-05,
+      "loss": 3.9081,
+      "step": 1754112
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.127152911856485e-05,
+      "loss": 3.9008,
+      "step": 1754624
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 2.126314317105433e-05,
+      "loss": 3.9119,
+      "step": 1755136
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 4.001281261444092,
+      "eval_runtime": 291.7896,
+      "eval_samples_per_second": 1307.761,
+      "eval_steps_per_second": 40.868,
+      "step": 1755360
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.125475722354381e-05,
+      "loss": 3.9151,
+      "step": 1755648
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.124637127603329e-05,
+      "loss": 3.9042,
+      "step": 1756160
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.123798532852277e-05,
+      "loss": 3.9137,
+      "step": 1756672
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.122959938101225e-05,
+      "loss": 3.9142,
+      "step": 1757184
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.122121343350173e-05,
+      "loss": 3.9213,
+      "step": 1757696
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.121282748599121e-05,
+      "loss": 3.9077,
+      "step": 1758208
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.120444153848069e-05,
+      "loss": 3.9144,
+      "step": 1758720
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.119605559097017e-05,
+      "loss": 3.9037,
+      "step": 1759232
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.118766964345965e-05,
+      "loss": 3.9011,
+      "step": 1759744
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.117928369594913e-05,
+      "loss": 3.917,
+      "step": 1760256
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.117089774843861e-05,
+      "loss": 3.9093,
+      "step": 1760768
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.116251180092809e-05,
+      "loss": 3.9052,
+      "step": 1761280
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.1154142232221302e-05,
+      "loss": 3.9156,
+      "step": 1761792
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.114575628471078e-05,
+      "loss": 3.9024,
+      "step": 1762304
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.113737033720026e-05,
+      "loss": 3.9025,
+      "step": 1762816
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.1128984389689742e-05,
+      "loss": 3.8963,
+      "step": 1763328
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.1120598442179222e-05,
+      "loss": 3.9028,
+      "step": 1763840
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.1112212494668702e-05,
+      "loss": 3.9107,
+      "step": 1764352
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.1103826547158182e-05,
+      "loss": 3.9033,
+      "step": 1764864
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.109544059964766e-05,
+      "loss": 3.923,
+      "step": 1765376
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.108705465213714e-05,
+      "loss": 3.9163,
+      "step": 1765888
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.107868508343035e-05,
+      "loss": 3.9174,
+      "step": 1766400
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.1070299135919834e-05,
+      "loss": 3.905,
+      "step": 1766912
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.106191318840931e-05,
+      "loss": 3.9057,
+      "step": 1767424
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.105352724089879e-05,
+      "loss": 3.9156,
+      "step": 1767936
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.104514129338827e-05,
+      "loss": 3.9143,
+      "step": 1768448
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.103677172468148e-05,
+      "loss": 3.9044,
+      "step": 1768960
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.1028402155974696e-05,
+      "loss": 3.9075,
+      "step": 1769472
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.1020016208464176e-05,
+      "loss": 3.899,
+      "step": 1769984
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.1011630260953656e-05,
+      "loss": 3.9077,
+      "step": 1770496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.1003244313443132e-05,
+      "loss": 3.9084,
+      "step": 1771008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0994858365932612e-05,
+      "loss": 3.9172,
+      "step": 1771520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0986472418422092e-05,
+      "loss": 3.9134,
+      "step": 1772032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0978086470911572e-05,
+      "loss": 3.9135,
+      "step": 1772544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0969700523401052e-05,
+      "loss": 3.9084,
+      "step": 1773056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0961330954694264e-05,
+      "loss": 3.91,
+      "step": 1773568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0952961385987477e-05,
+      "loss": 3.9131,
+      "step": 1774080
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0944575438476953e-05,
+      "loss": 3.9001,
+      "step": 1774592
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0936189490966433e-05,
+      "loss": 3.898,
+      "step": 1775104
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0927803543455913e-05,
+      "loss": 3.8942,
+      "step": 1775616
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0919417595945397e-05,
+      "loss": 3.9025,
+      "step": 1776128
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0911031648434877e-05,
+      "loss": 3.9048,
+      "step": 1776640
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0902645700924357e-05,
+      "loss": 3.911,
+      "step": 1777152
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0894259753413837e-05,
+      "loss": 3.9053,
+      "step": 1777664
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0885873805903313e-05,
+      "loss": 3.9051,
+      "step": 1778176
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0877487858392793e-05,
+      "loss": 3.9063,
+      "step": 1778688
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0869101910882273e-05,
+      "loss": 3.9115,
+      "step": 1779200
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0860715963371753e-05,
+      "loss": 3.8994,
+      "step": 1779712
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0852346394664966e-05,
+      "loss": 3.8844,
+      "step": 1780224
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0843960447154445e-05,
+      "loss": 3.9208,
+      "step": 1780736
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0835590878447658e-05,
+      "loss": 3.912,
+      "step": 1781248
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0827204930937135e-05,
+      "loss": 3.917,
+      "step": 1781760
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0818818983426618e-05,
+      "loss": 3.9044,
+      "step": 1782272
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0810433035916098e-05,
+      "loss": 3.9071,
+      "step": 1782784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0802047088405578e-05,
+      "loss": 3.9016,
+      "step": 1783296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0793661140895058e-05,
+      "loss": 3.9022,
+      "step": 1783808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0785275193384538e-05,
+      "loss": 3.9099,
+      "step": 1784320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0776889245874018e-05,
+      "loss": 3.8981,
+      "step": 1784832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0768519677167227e-05,
+      "loss": 3.9256,
+      "step": 1785344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0760133729656707e-05,
+      "loss": 3.9064,
+      "step": 1785856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.075174778214619e-05,
+      "loss": 3.887,
+      "step": 1786368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.074336183463567e-05,
+      "loss": 3.9163,
+      "step": 1786880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0734975887125147e-05,
+      "loss": 3.8893,
+      "step": 1787392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.072660631841836e-05,
+      "loss": 3.8891,
+      "step": 1787904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0718236749711568e-05,
+      "loss": 3.916,
+      "step": 1788416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.070985080220105e-05,
+      "loss": 3.9071,
+      "step": 1788928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.070146485469053e-05,
+      "loss": 3.8965,
+      "step": 1789440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.069307890718001e-05,
+      "loss": 3.9047,
+      "step": 1789952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.068469295966949e-05,
+      "loss": 3.8875,
+      "step": 1790464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.06763233909627e-05,
+      "loss": 3.8995,
+      "step": 1790976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.066793744345218e-05,
+      "loss": 3.9057,
+      "step": 1791488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.065955149594166e-05,
+      "loss": 3.8971,
+      "step": 1792000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0651165548431144e-05,
+      "loss": 3.9068,
+      "step": 1792512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.064277960092062e-05,
+      "loss": 3.9049,
+      "step": 1793024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.06343936534101e-05,
+      "loss": 3.9187,
+      "step": 1793536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.062600770589958e-05,
+      "loss": 3.8996,
+      "step": 1794048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.061762175838906e-05,
+      "loss": 3.9054,
+      "step": 1794560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.060923581087854e-05,
+      "loss": 3.907,
+      "step": 1795072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0600882620975485e-05,
+      "loss": 3.8977,
+      "step": 1795584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0592496673464965e-05,
+      "loss": 3.9102,
+      "step": 1796096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.058411072595444e-05,
+      "loss": 3.9064,
+      "step": 1796608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.057572477844392e-05,
+      "loss": 3.8967,
+      "step": 1797120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.05673388309334e-05,
+      "loss": 3.9182,
+      "step": 1797632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0558969262226614e-05,
+      "loss": 3.8909,
+      "step": 1798144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0550583314716094e-05,
+      "loss": 3.8952,
+      "step": 1798656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0542197367205574e-05,
+      "loss": 3.9081,
+      "step": 1799168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0533811419695054e-05,
+      "loss": 3.9057,
+      "step": 1799680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0525425472184534e-05,
+      "loss": 3.9004,
+      "step": 1800192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 2.0517055903477743e-05,
+      "loss": 3.909,
+      "step": 1800704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0508669955967226e-05,
+      "loss": 3.8908,
+      "step": 1801216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0500284008456706e-05,
+      "loss": 3.8983,
+      "step": 1801728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0491898060946186e-05,
+      "loss": 3.8991,
+      "step": 1802240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0483512113435666e-05,
+      "loss": 3.8995,
+      "step": 1802752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0475126165925146e-05,
+      "loss": 3.9165,
+      "step": 1803264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0466756597218355e-05,
+      "loss": 3.9025,
+      "step": 1803776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0458370649707835e-05,
+      "loss": 3.9105,
+      "step": 1804288
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0449984702197315e-05,
+      "loss": 3.8965,
+      "step": 1804800
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0441598754686795e-05,
+      "loss": 3.9018,
+      "step": 1805312
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0433212807176275e-05,
+      "loss": 3.9015,
+      "step": 1805824
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0424826859665755e-05,
+      "loss": 3.9106,
+      "step": 1806336
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0416457290958967e-05,
+      "loss": 3.906,
+      "step": 1806848
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0408071343448444e-05,
+      "loss": 3.9098,
+      "step": 1807360
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0399685395937927e-05,
+      "loss": 3.899,
+      "step": 1807872
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0391299448427407e-05,
+      "loss": 3.9054,
+      "step": 1808384
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0382913500916887e-05,
+      "loss": 3.9016,
+      "step": 1808896
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0374527553406367e-05,
+      "loss": 3.9017,
+      "step": 1809408
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0366157984699576e-05,
+      "loss": 3.8924,
+      "step": 1809920
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0357772037189056e-05,
+      "loss": 3.9094,
+      "step": 1810432
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0349386089678536e-05,
+      "loss": 3.8953,
+      "step": 1810944
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.034100014216802e-05,
+      "loss": 3.901,
+      "step": 1811456
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.03326141946575e-05,
+      "loss": 3.8926,
+      "step": 1811968
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0324228247146976e-05,
+      "loss": 3.9019,
+      "step": 1812480
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0315842299636456e-05,
+      "loss": 3.8981,
+      "step": 1812992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0307456352125936e-05,
+      "loss": 3.8989,
+      "step": 1813504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.029910316222288e-05,
+      "loss": 3.9086,
+      "step": 1814016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.029071721471236e-05,
+      "loss": 3.9163,
+      "step": 1814528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.028233126720184e-05,
+      "loss": 3.9142,
+      "step": 1815040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.027394531969132e-05,
+      "loss": 3.902,
+      "step": 1815552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.02655593721808e-05,
+      "loss": 3.8944,
+      "step": 1816064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.025718980347401e-05,
+      "loss": 3.897,
+      "step": 1816576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.024880385596349e-05,
+      "loss": 3.9031,
+      "step": 1817088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0240417908452973e-05,
+      "loss": 3.9061,
+      "step": 1817600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.023203196094245e-05,
+      "loss": 3.8896,
+      "step": 1818112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0223662392235662e-05,
+      "loss": 3.9061,
+      "step": 1818624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0215276444725142e-05,
+      "loss": 3.909,
+      "step": 1819136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0206890497214622e-05,
+      "loss": 3.8997,
+      "step": 1819648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.01985045497041e-05,
+      "loss": 3.899,
+      "step": 1820160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0190118602193582e-05,
+      "loss": 3.904,
+      "step": 1820672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0181732654683062e-05,
+      "loss": 3.8969,
+      "step": 1821184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0173346707172542e-05,
+      "loss": 3.9103,
+      "step": 1821696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.016497713846575e-05,
+      "loss": 3.906,
+      "step": 1822208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.015659119095523e-05,
+      "loss": 3.9122,
+      "step": 1822720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.014820524344471e-05,
+      "loss": 3.8939,
+      "step": 1823232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.013981929593419e-05,
+      "loss": 3.911,
+      "step": 1823744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0131433348423674e-05,
+      "loss": 3.9098,
+      "step": 1824256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0123047400913154e-05,
+      "loss": 3.9008,
+      "step": 1824768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.011466145340263e-05,
+      "loss": 3.9052,
+      "step": 1825280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.010627550589211e-05,
+      "loss": 3.9045,
+      "step": 1825792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0097905937185323e-05,
+      "loss": 3.9078,
+      "step": 1826304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0089519989674803e-05,
+      "loss": 3.9225,
+      "step": 1826816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0081150420968016e-05,
+      "loss": 3.8963,
+      "step": 1827328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0072764473457496e-05,
+      "loss": 3.9103,
+      "step": 1827840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0064378525946976e-05,
+      "loss": 3.9058,
+      "step": 1828352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0055992578436456e-05,
+      "loss": 3.9044,
+      "step": 1828864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0047606630925932e-05,
+      "loss": 3.8987,
+      "step": 1829376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0039220683415412e-05,
+      "loss": 3.8944,
+      "step": 1829888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0030851114708628e-05,
+      "loss": 3.9036,
+      "step": 1830400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0022465167198105e-05,
+      "loss": 3.894,
+      "step": 1830912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 2.0014079219687585e-05,
+      "loss": 3.9052,
+      "step": 1831424
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.998615026473999,
+      "eval_runtime": 293.0898,
+      "eval_samples_per_second": 1301.959,
+      "eval_steps_per_second": 40.687,
+      "step": 1831680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 2.0005693272177065e-05,
+      "loss": 3.9013,
+      "step": 1831936
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9997307324666544e-05,
+      "loss": 3.8998,
+      "step": 1832448
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9988921377156024e-05,
+      "loss": 3.9066,
+      "step": 1832960
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9980535429645504e-05,
+      "loss": 3.9071,
+      "step": 1833472
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9972149482134984e-05,
+      "loss": 3.9131,
+      "step": 1833984
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9963763534624464e-05,
+      "loss": 3.9026,
+      "step": 1834496
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9955377587113944e-05,
+      "loss": 3.9105,
+      "step": 1835008
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9946991639603424e-05,
+      "loss": 3.8957,
+      "step": 1835520
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9938605692092904e-05,
+      "loss": 3.8973,
+      "step": 1836032
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9930219744582384e-05,
+      "loss": 3.9112,
+      "step": 1836544
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9921833797071864e-05,
+      "loss": 3.9037,
+      "step": 1837056
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9913447849561344e-05,
+      "loss": 3.8976,
+      "step": 1837568
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9905078280854557e-05,
+      "loss": 3.911,
+      "step": 1838080
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9896692333344036e-05,
+      "loss": 3.8946,
+      "step": 1838592
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9888322764637246e-05,
+      "loss": 3.8981,
+      "step": 1839104
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9879936817126726e-05,
+      "loss": 3.8854,
+      "step": 1839616
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9871550869616205e-05,
+      "loss": 3.9008,
+      "step": 1840128
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9863164922105685e-05,
+      "loss": 3.9068,
+      "step": 1840640
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.985477897459517e-05,
+      "loss": 3.895,
+      "step": 1841152
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9846393027084645e-05,
+      "loss": 3.915,
+      "step": 1841664
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9838007079574125e-05,
+      "loss": 3.91,
+      "step": 1842176
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9829621132063605e-05,
+      "loss": 3.9107,
+      "step": 1842688
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9821235184553085e-05,
+      "loss": 3.9029,
+      "step": 1843200
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9812849237042565e-05,
+      "loss": 3.904,
+      "step": 1843712
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9804463289532045e-05,
+      "loss": 3.9061,
+      "step": 1844224
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9796077342021525e-05,
+      "loss": 3.9048,
+      "step": 1844736
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9787691394511005e-05,
+      "loss": 3.8959,
+      "step": 1845248
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9779305447000485e-05,
+      "loss": 3.9029,
+      "step": 1845760
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9770919499489965e-05,
+      "loss": 3.8943,
+      "step": 1846272
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.9762549930783174e-05,
+      "loss": 3.9011,
+      "step": 1846784
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9754163983272654e-05,
+      "loss": 3.9013,
+      "step": 1847296
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9745778035762137e-05,
+      "loss": 3.9071,
+      "step": 1847808
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9737392088251617e-05,
+      "loss": 3.9107,
+      "step": 1848320
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9729022519544826e-05,
+      "loss": 3.9116,
+      "step": 1848832
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9720636572034306e-05,
+      "loss": 3.8971,
+      "step": 1849344
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.971226700332752e-05,
+      "loss": 3.9067,
+      "step": 1849856
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9703881055817e-05,
+      "loss": 3.9079,
+      "step": 1850368
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.969549510830648e-05,
+      "loss": 3.891,
+      "step": 1850880
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.968710916079596e-05,
+      "loss": 3.8956,
+      "step": 1851392
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.967872321328544e-05,
+      "loss": 3.8905,
+      "step": 1851904
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.967033726577492e-05,
+      "loss": 3.8925,
+      "step": 1852416
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.96619513182644e-05,
+      "loss": 3.9005,
+      "step": 1852928
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.965356537075388e-05,
+      "loss": 3.9047,
+      "step": 1853440
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9645179423243355e-05,
+      "loss": 3.8972,
+      "step": 1853952
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.963679347573284e-05,
+      "loss": 3.896,
+      "step": 1854464
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.962840752822232e-05,
+      "loss": 3.8993,
+      "step": 1854976
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.96200215807118e-05,
+      "loss": 3.9097,
+      "step": 1855488
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9611635633201278e-05,
+      "loss": 3.8916,
+      "step": 1856000
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9603249685690758e-05,
+      "loss": 3.8808,
+      "step": 1856512
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9594880116983967e-05,
+      "loss": 3.9123,
+      "step": 1857024
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9586494169473447e-05,
+      "loss": 3.9045,
+      "step": 1857536
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.957810822196293e-05,
+      "loss": 3.9102,
+      "step": 1858048
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.956972227445241e-05,
+      "loss": 3.8981,
+      "step": 1858560
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9561336326941887e-05,
+      "loss": 3.8969,
+      "step": 1859072
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9552950379431367e-05,
+      "loss": 3.8978,
+      "step": 1859584
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.954458081072458e-05,
+      "loss": 3.8977,
+      "step": 1860096
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.953619486321406e-05,
+      "loss": 3.9063,
+      "step": 1860608
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9527825294507272e-05,
+      "loss": 3.8894,
+      "step": 1861120
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9519439346996752e-05,
+      "loss": 3.9221,
+      "step": 1861632
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9511053399486232e-05,
+      "loss": 3.8955,
+      "step": 1862144
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9502667451975712e-05,
+      "loss": 3.8804,
+      "step": 1862656
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.949428150446519e-05,
+      "loss": 3.9116,
+      "step": 1863168
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.948589555695467e-05,
+      "loss": 3.8913,
+      "step": 1863680
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.947750960944415e-05,
+      "loss": 3.8804,
+      "step": 1864192
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9469123661933632e-05,
+      "loss": 3.9063,
+      "step": 1864704
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.946073771442311e-05,
+      "loss": 3.9041,
+      "step": 1865216
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.945235176691259e-05,
+      "loss": 3.8959,
+      "step": 1865728
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9443965819402068e-05,
+      "loss": 3.8957,
+      "step": 1866240
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9435579871891548e-05,
+      "loss": 3.8799,
+      "step": 1866752
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.942721030318476e-05,
+      "loss": 3.8925,
+      "step": 1867264
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.941882435567424e-05,
+      "loss": 3.9024,
+      "step": 1867776
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9410454786967453e-05,
+      "loss": 3.8943,
+      "step": 1868288
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9402068839456933e-05,
+      "loss": 3.899,
+      "step": 1868800
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9393682891946413e-05,
+      "loss": 3.8973,
+      "step": 1869312
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.938529694443589e-05,
+      "loss": 3.9134,
+      "step": 1869824
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.937691099692537e-05,
+      "loss": 3.8972,
+      "step": 1870336
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9368525049414853e-05,
+      "loss": 3.8992,
+      "step": 1870848
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9360139101904333e-05,
+      "loss": 3.9009,
+      "step": 1871360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9351753154393813e-05,
+      "loss": 3.8943,
+      "step": 1871872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9343383585687022e-05,
+      "loss": 3.9045,
+      "step": 1872384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9334997638176502e-05,
+      "loss": 3.8984,
+      "step": 1872896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9326611690665982e-05,
+      "loss": 3.8967,
+      "step": 1873408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9318225743155462e-05,
+      "loss": 3.9083,
+      "step": 1873920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9309856174448674e-05,
+      "loss": 3.8862,
+      "step": 1874432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9301486605741887e-05,
+      "loss": 3.8922,
+      "step": 1874944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9293100658231363e-05,
+      "loss": 3.8997,
+      "step": 1875456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9284714710720843e-05,
+      "loss": 3.9006,
+      "step": 1875968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9276328763210323e-05,
+      "loss": 3.8914,
+      "step": 1876480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.9267942815699807e-05,
+      "loss": 3.9034,
+      "step": 1876992
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9259556868189286e-05,
+      "loss": 3.8893,
+      "step": 1877504
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9251187299482496e-05,
+      "loss": 3.8915,
+      "step": 1878016
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9242801351971976e-05,
+      "loss": 3.8891,
+      "step": 1878528
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9234415404461455e-05,
+      "loss": 3.8941,
+      "step": 1879040
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9226029456950935e-05,
+      "loss": 3.9128,
+      "step": 1879552
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9217643509440415e-05,
+      "loss": 3.898,
+      "step": 1880064
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9209257561929895e-05,
+      "loss": 3.902,
+      "step": 1880576
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9200887993223108e-05,
+      "loss": 3.8937,
+      "step": 1881088
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9192502045712588e-05,
+      "loss": 3.8928,
+      "step": 1881600
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9184116098202068e-05,
+      "loss": 3.8956,
+      "step": 1882112
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9175730150691544e-05,
+      "loss": 3.9057,
+      "step": 1882624
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9167344203181024e-05,
+      "loss": 3.9006,
+      "step": 1883136
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9158958255670508e-05,
+      "loss": 3.8983,
+      "step": 1883648
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9150572308159988e-05,
+      "loss": 3.9004,
+      "step": 1884160
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9142202739453197e-05,
+      "loss": 3.8984,
+      "step": 1884672
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9133816791942677e-05,
+      "loss": 3.8956,
+      "step": 1885184
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9125430844432157e-05,
+      "loss": 3.8951,
+      "step": 1885696
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9117044896921637e-05,
+      "loss": 3.8875,
+      "step": 1886208
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.910867532821485e-05,
+      "loss": 3.9012,
+      "step": 1886720
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.910028938070433e-05,
+      "loss": 3.8915,
+      "step": 1887232
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.909190343319381e-05,
+      "loss": 3.8925,
+      "step": 1887744
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.908351748568329e-05,
+      "loss": 3.8914,
+      "step": 1888256
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.907513153817277e-05,
+      "loss": 3.8914,
+      "step": 1888768
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.9066761969465978e-05,
+      "loss": 3.8951,
+      "step": 1889280
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.905837602195546e-05,
+      "loss": 3.8923,
+      "step": 1889792
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.905000645324867e-05,
+      "loss": 3.907,
+      "step": 1890304
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.904162050573815e-05,
+      "loss": 3.9077,
+      "step": 1890816
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.903323455822763e-05,
+      "loss": 3.9066,
+      "step": 1891328
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.902484861071711e-05,
+      "loss": 3.897,
+      "step": 1891840
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.901646266320659e-05,
+      "loss": 3.8934,
+      "step": 1892352
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.900807671569607e-05,
+      "loss": 3.8914,
+      "step": 1892864
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8999690768185554e-05,
+      "loss": 3.8994,
+      "step": 1893376
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.899130482067503e-05,
+      "loss": 3.9032,
+      "step": 1893888
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8982935251968243e-05,
+      "loss": 3.8864,
+      "step": 1894400
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8974549304457723e-05,
+      "loss": 3.8967,
+      "step": 1894912
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.89661633569472e-05,
+      "loss": 3.9022,
+      "step": 1895424
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.895777740943668e-05,
+      "loss": 3.8966,
+      "step": 1895936
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8949391461926162e-05,
+      "loss": 3.8936,
+      "step": 1896448
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8941005514415642e-05,
+      "loss": 3.8986,
+      "step": 1896960
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8932619566905122e-05,
+      "loss": 3.8895,
+      "step": 1897472
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.892424999819833e-05,
+      "loss": 3.9077,
+      "step": 1897984
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.891586405068781e-05,
+      "loss": 3.898,
+      "step": 1898496
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.890747810317729e-05,
+      "loss": 3.9103,
+      "step": 1899008
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.889909215566677e-05,
+      "loss": 3.8887,
+      "step": 1899520
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8890706208156255e-05,
+      "loss": 3.9007,
+      "step": 1900032
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8882320260645735e-05,
+      "loss": 3.9075,
+      "step": 1900544
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.887393431313521e-05,
+      "loss": 3.8949,
+      "step": 1901056
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.886554836562469e-05,
+      "loss": 3.8973,
+      "step": 1901568
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8857178796917904e-05,
+      "loss": 3.8991,
+      "step": 1902080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.884879284940738e-05,
+      "loss": 3.8994,
+      "step": 1902592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8840406901896863e-05,
+      "loss": 3.9153,
+      "step": 1903104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8832020954386343e-05,
+      "loss": 3.8947,
+      "step": 1903616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8823651385679556e-05,
+      "loss": 3.9031,
+      "step": 1904128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8815265438169032e-05,
+      "loss": 3.9014,
+      "step": 1904640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8806895869462245e-05,
+      "loss": 3.8968,
+      "step": 1905152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8798509921951725e-05,
+      "loss": 3.8911,
+      "step": 1905664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8790123974441205e-05,
+      "loss": 3.8912,
+      "step": 1906176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8781738026930685e-05,
+      "loss": 3.8959,
+      "step": 1906688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8773368458223897e-05,
+      "loss": 3.8926,
+      "step": 1907200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.8764982510713377e-05,
+      "loss": 3.898,
+      "step": 1907712
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.9966838359832764,
+      "eval_runtime": 293.1458,
+      "eval_samples_per_second": 1301.711,
+      "eval_steps_per_second": 40.679,
+      "step": 1908000
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8756596563202854e-05,
+      "loss": 3.8957,
+      "step": 1908224
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8748210615692334e-05,
+      "loss": 3.8943,
+      "step": 1908736
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8739824668181817e-05,
+      "loss": 3.8988,
+      "step": 1909248
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8731438720671297e-05,
+      "loss": 3.9061,
+      "step": 1909760
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8723052773160777e-05,
+      "loss": 3.9108,
+      "step": 1910272
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8714666825650257e-05,
+      "loss": 3.8977,
+      "step": 1910784
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8706297256943466e-05,
+      "loss": 3.9003,
+      "step": 1911296
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8697911309432946e-05,
+      "loss": 3.8917,
+      "step": 1911808
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8689525361922426e-05,
+      "loss": 3.8914,
+      "step": 1912320
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.868113941441191e-05,
+      "loss": 3.9027,
+      "step": 1912832
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.867276984570512e-05,
+      "loss": 3.9002,
+      "step": 1913344
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.86643838981946e-05,
+      "loss": 3.894,
+      "step": 1913856
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8656014329487808e-05,
+      "loss": 3.9032,
+      "step": 1914368
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8647628381977287e-05,
+      "loss": 3.8884,
+      "step": 1914880
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.863924243446677e-05,
+      "loss": 3.8915,
+      "step": 1915392
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.863085648695625e-05,
+      "loss": 3.8791,
+      "step": 1915904
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.862248691824946e-05,
+      "loss": 3.9002,
+      "step": 1916416
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.861410097073894e-05,
+      "loss": 3.9012,
+      "step": 1916928
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.860571502322842e-05,
+      "loss": 3.8892,
+      "step": 1917440
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.85973290757179e-05,
+      "loss": 3.9051,
+      "step": 1917952
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.858894312820738e-05,
+      "loss": 3.9042,
+      "step": 1918464
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.858055718069686e-05,
+      "loss": 3.9053,
+      "step": 1918976
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.857217123318634e-05,
+      "loss": 3.8986,
+      "step": 1919488
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.856378528567582e-05,
+      "loss": 3.8978,
+      "step": 1920000
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.85553993381653e-05,
+      "loss": 3.8982,
+      "step": 1920512
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.854702976945851e-05,
+      "loss": 3.9047,
+      "step": 1921024
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.853864382194799e-05,
+      "loss": 3.8861,
+      "step": 1921536
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8530257874437472e-05,
+      "loss": 3.899,
+      "step": 1922048
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.8521888305730684e-05,
+      "loss": 3.8894,
+      "step": 1922560
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.851350235822016e-05,
+      "loss": 3.896,
+      "step": 1923072
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.850511641070964e-05,
+      "loss": 3.8982,
+      "step": 1923584
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.849673046319912e-05,
+      "loss": 3.9058,
+      "step": 1924096
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.84883445156886e-05,
+      "loss": 3.8962,
+      "step": 1924608
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.847995856817808e-05,
+      "loss": 3.9116,
+      "step": 1925120
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8471572620667564e-05,
+      "loss": 3.8942,
+      "step": 1925632
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8463203051960773e-05,
+      "loss": 3.9007,
+      "step": 1926144
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8454833483253982e-05,
+      "loss": 3.9036,
+      "step": 1926656
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8446447535743462e-05,
+      "loss": 3.8833,
+      "step": 1927168
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8438061588232942e-05,
+      "loss": 3.8897,
+      "step": 1927680
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8429675640722426e-05,
+      "loss": 3.8891,
+      "step": 1928192
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8421289693211906e-05,
+      "loss": 3.8867,
+      "step": 1928704
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8412903745701385e-05,
+      "loss": 3.8955,
+      "step": 1929216
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8404517798190865e-05,
+      "loss": 3.9025,
+      "step": 1929728
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8396131850680342e-05,
+      "loss": 3.8896,
+      "step": 1930240
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8387745903169822e-05,
+      "loss": 3.8927,
+      "step": 1930752
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8379359955659302e-05,
+      "loss": 3.8928,
+      "step": 1931264
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8370974008148782e-05,
+      "loss": 3.899,
+      "step": 1931776
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8362588060638265e-05,
+      "loss": 3.8848,
+      "step": 1932288
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8354218491931474e-05,
+      "loss": 3.8785,
+      "step": 1932800
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8345832544420954e-05,
+      "loss": 3.9055,
+      "step": 1933312
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8337446596910434e-05,
+      "loss": 3.8992,
+      "step": 1933824
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8329060649399914e-05,
+      "loss": 3.9094,
+      "step": 1934336
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8320691080693127e-05,
+      "loss": 3.8884,
+      "step": 1934848
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8312305133182607e-05,
+      "loss": 3.8918,
+      "step": 1935360
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8303919185672087e-05,
+      "loss": 3.8927,
+      "step": 1935872
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8295549616965296e-05,
+      "loss": 3.8912,
+      "step": 1936384
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8287163669454776e-05,
+      "loss": 3.9051,
+      "step": 1936896
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8278777721944256e-05,
+      "loss": 3.878,
+      "step": 1937408
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8270391774433736e-05,
+      "loss": 3.9196,
+      "step": 1937920
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.826200582692322e-05,
+      "loss": 3.8948,
+      "step": 1938432
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8253619879412695e-05,
+      "loss": 3.8735,
+      "step": 1938944
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8245250310705908e-05,
+      "loss": 3.9017,
+      "step": 1939456
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8236864363195388e-05,
+      "loss": 3.8895,
+      "step": 1939968
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8228478415684868e-05,
+      "loss": 3.8753,
+      "step": 1940480
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8220092468174348e-05,
+      "loss": 3.8999,
+      "step": 1940992
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8211706520663828e-05,
+      "loss": 3.9005,
+      "step": 1941504
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8203320573153308e-05,
+      "loss": 3.8869,
+      "step": 1942016
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8194934625642788e-05,
+      "loss": 3.8949,
+      "step": 1942528
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8186565056935997e-05,
+      "loss": 3.8739,
+      "step": 1943040
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8178179109425477e-05,
+      "loss": 3.8872,
+      "step": 1943552
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8169793161914957e-05,
+      "loss": 3.8958,
+      "step": 1944064
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.816142359320817e-05,
+      "loss": 3.8886,
+      "step": 1944576
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.815303764569765e-05,
+      "loss": 3.8996,
+      "step": 1945088
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.814465169818713e-05,
+      "loss": 3.8867,
+      "step": 1945600
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.813626575067661e-05,
+      "loss": 3.9095,
+      "step": 1946112
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.812787980316609e-05,
+      "loss": 3.8904,
+      "step": 1946624
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.811949385565557e-05,
+      "loss": 3.8956,
+      "step": 1947136
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.811110790814505e-05,
+      "loss": 3.8934,
+      "step": 1947648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.810272196063453e-05,
+      "loss": 3.8894,
+      "step": 1948160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.809433601312401e-05,
+      "loss": 3.897,
+      "step": 1948672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.808596644441722e-05,
+      "loss": 3.8903,
+      "step": 1949184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8077580496906698e-05,
+      "loss": 3.8972,
+      "step": 1949696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8069194549396178e-05,
+      "loss": 3.899,
+      "step": 1950208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8060824980689394e-05,
+      "loss": 3.8886,
+      "step": 1950720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.8052439033178874e-05,
+      "loss": 3.8808,
+      "step": 1951232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.804405308566835e-05,
+      "loss": 3.8911,
+      "step": 1951744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.803566713815783e-05,
+      "loss": 3.8981,
+      "step": 1952256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.802728119064731e-05,
+      "loss": 3.886,
+      "step": 1952768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.801889524313679e-05,
+      "loss": 3.8945,
+      "step": 1953280
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.801050929562627e-05,
+      "loss": 3.8843,
+      "step": 1953792
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.800212334811575e-05,
+      "loss": 3.888,
+      "step": 1954304
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7993737400605233e-05,
+      "loss": 3.8846,
+      "step": 1954816
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7985367831898442e-05,
+      "loss": 3.889,
+      "step": 1955328
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7976981884387922e-05,
+      "loss": 3.9044,
+      "step": 1955840
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7968595936877402e-05,
+      "loss": 3.8929,
+      "step": 1956352
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.796020998936688e-05,
+      "loss": 3.8978,
+      "step": 1956864
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7951840420660095e-05,
+      "loss": 3.8892,
+      "step": 1957376
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7943454473149575e-05,
+      "loss": 3.8885,
+      "step": 1957888
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7935068525639055e-05,
+      "loss": 3.8904,
+      "step": 1958400
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.792668257812853e-05,
+      "loss": 3.8987,
+      "step": 1958912
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.791829663061801e-05,
+      "loss": 3.8974,
+      "step": 1959424
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.790991068310749e-05,
+      "loss": 3.8977,
+      "step": 1959936
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.790152473559697e-05,
+      "loss": 3.8943,
+      "step": 1960448
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.789313878808645e-05,
+      "loss": 3.8898,
+      "step": 1960960
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7884752840575934e-05,
+      "loss": 3.8951,
+      "step": 1961472
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7876399650672876e-05,
+      "loss": 3.8887,
+      "step": 1961984
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7868013703162353e-05,
+      "loss": 3.8852,
+      "step": 1962496
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7859627755651833e-05,
+      "loss": 3.8953,
+      "step": 1963008
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7851241808141313e-05,
+      "loss": 3.8872,
+      "step": 1963520
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7842855860630796e-05,
+      "loss": 3.8863,
+      "step": 1964032
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7834469913120276e-05,
+      "loss": 3.8849,
+      "step": 1964544
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7826100344413485e-05,
+      "loss": 3.8844,
+      "step": 1965056
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7817714396902965e-05,
+      "loss": 3.893,
+      "step": 1965568
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7809328449392445e-05,
+      "loss": 3.8868,
+      "step": 1966080
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7800942501881925e-05,
+      "loss": 3.8991,
+      "step": 1966592
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7792556554371405e-05,
+      "loss": 3.8998,
+      "step": 1967104
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7784186985664617e-05,
+      "loss": 3.9051,
+      "step": 1967616
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7775801038154097e-05,
+      "loss": 3.8915,
+      "step": 1968128
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7767431469447306e-05,
+      "loss": 3.8908,
+      "step": 1968640
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7759045521936786e-05,
+      "loss": 3.8826,
+      "step": 1969152
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7750659574426266e-05,
+      "loss": 3.8987,
+      "step": 1969664
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.774227362691575e-05,
+      "loss": 3.896,
+      "step": 1970176
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.773388767940523e-05,
+      "loss": 3.8793,
+      "step": 1970688
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.772551811069844e-05,
+      "loss": 3.8898,
+      "step": 1971200
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.771713216318792e-05,
+      "loss": 3.894,
+      "step": 1971712
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.77087462156774e-05,
+      "loss": 3.896,
+      "step": 1972224
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.770037664697061e-05,
+      "loss": 3.8884,
+      "step": 1972736
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.769199069946009e-05,
+      "loss": 3.8893,
+      "step": 1973248
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.768360475194957e-05,
+      "loss": 3.8842,
+      "step": 1973760
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.767521880443905e-05,
+      "loss": 3.9025,
+      "step": 1974272
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.766683285692853e-05,
+      "loss": 3.8917,
+      "step": 1974784
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7658446909418007e-05,
+      "loss": 3.9043,
+      "step": 1975296
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7650060961907487e-05,
+      "loss": 3.884,
+      "step": 1975808
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7641675014396967e-05,
+      "loss": 3.8988,
+      "step": 1976320
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.763328906688645e-05,
+      "loss": 3.9028,
+      "step": 1976832
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.762490311937593e-05,
+      "loss": 3.8873,
+      "step": 1977344
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.761651717186541e-05,
+      "loss": 3.8956,
+      "step": 1977856
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.760813122435489e-05,
+      "loss": 3.8942,
+      "step": 1978368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7599745276844367e-05,
+      "loss": 3.9003,
+      "step": 1978880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7591359329333847e-05,
+      "loss": 3.9079,
+      "step": 1979392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7582973381823327e-05,
+      "loss": 3.8952,
+      "step": 1979904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.757458743431281e-05,
+      "loss": 3.8959,
+      "step": 1980416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.756620148680229e-05,
+      "loss": 3.8976,
+      "step": 1980928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.75578319180955e-05,
+      "loss": 3.8928,
+      "step": 1981440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.754944597058498e-05,
+      "loss": 3.8878,
+      "step": 1981952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.754106002307446e-05,
+      "loss": 3.8869,
+      "step": 1982464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.753267407556394e-05,
+      "loss": 3.8889,
+      "step": 1982976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.7524304506857152e-05,
+      "loss": 3.8891,
+      "step": 1983488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.751591855934663e-05,
+      "loss": 3.8925,
+      "step": 1984000
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.9946038722991943,
+      "eval_runtime": 301.4588,
+      "eval_samples_per_second": 1265.815,
+      "eval_steps_per_second": 39.558,
+      "step": 1984320
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.750753261183611e-05,
+      "loss": 3.8923,
+      "step": 1984512
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.749914666432559e-05,
+      "loss": 3.8873,
+      "step": 1985024
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.749076071681507e-05,
+      "loss": 3.8927,
+      "step": 1985536
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7482374769304548e-05,
+      "loss": 3.8992,
+      "step": 1986048
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7473988821794028e-05,
+      "loss": 3.9066,
+      "step": 1986560
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.746560287428351e-05,
+      "loss": 3.8932,
+      "step": 1987072
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.745721692677299e-05,
+      "loss": 3.8966,
+      "step": 1987584
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.744883097926247e-05,
+      "loss": 3.8874,
+      "step": 1988096
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.744044503175195e-05,
+      "loss": 3.8858,
+      "step": 1988608
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7432059084241428e-05,
+      "loss": 3.8947,
+      "step": 1989120
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7423673136730908e-05,
+      "loss": 3.8993,
+      "step": 1989632
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7415287189220388e-05,
+      "loss": 3.8901,
+      "step": 1990144
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7406917620513604e-05,
+      "loss": 3.8974,
+      "step": 1990656
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.739853167300308e-05,
+      "loss": 3.8833,
+      "step": 1991168
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.739014572549256e-05,
+      "loss": 3.8898,
+      "step": 1991680
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.738175977798204e-05,
+      "loss": 3.8712,
+      "step": 1992192
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.737337383047152e-05,
+      "loss": 3.8957,
+      "step": 1992704
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.736500426176473e-05,
+      "loss": 3.895,
+      "step": 1993216
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7356618314254213e-05,
+      "loss": 3.8844,
+      "step": 1993728
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7348232366743692e-05,
+      "loss": 3.8993,
+      "step": 1994240
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7339846419233172e-05,
+      "loss": 3.903,
+      "step": 1994752
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7331460471722652e-05,
+      "loss": 3.9025,
+      "step": 1995264
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7323074524212132e-05,
+      "loss": 3.8911,
+      "step": 1995776
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.731468857670161e-05,
+      "loss": 3.8942,
+      "step": 1996288
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.730630262919109e-05,
+      "loss": 3.8956,
+      "step": 1996800
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.729791668168057e-05,
+      "loss": 3.8998,
+      "step": 1997312
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7289530734170052e-05,
+      "loss": 3.8805,
+      "step": 1997824
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.7281144786659532e-05,
+      "loss": 3.8909,
+      "step": 1998336
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.727277521795274e-05,
+      "loss": 3.889,
+      "step": 1998848
+    },
+    {
+      "epoch": 0.0,
+      "learning_rate": 1.726438927044222e-05,
+      "loss": 3.8874,
+      "step": 1999360
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.72560033229317e-05,
+      "loss": 3.8935,
+      "step": 1999872
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.724761737542118e-05,
+      "loss": 3.8997,
+      "step": 2000384
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7239264185518126e-05,
+      "loss": 3.8947,
+      "step": 2000896
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7230878238007606e-05,
+      "loss": 3.9027,
+      "step": 2001408
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7222492290497083e-05,
+      "loss": 3.8893,
+      "step": 2001920
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7214122721790295e-05,
+      "loss": 3.8966,
+      "step": 2002432
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7205736774279775e-05,
+      "loss": 3.9013,
+      "step": 2002944
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.719735082676926e-05,
+      "loss": 3.8778,
+      "step": 2003456
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7188964879258735e-05,
+      "loss": 3.8826,
+      "step": 2003968
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7180578931748215e-05,
+      "loss": 3.885,
+      "step": 2004480
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7172192984237695e-05,
+      "loss": 3.8775,
+      "step": 2004992
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7163807036727175e-05,
+      "loss": 3.8898,
+      "step": 2005504
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7155421089216655e-05,
+      "loss": 3.8999,
+      "step": 2006016
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7147035141706135e-05,
+      "loss": 3.8853,
+      "step": 2006528
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7138649194195615e-05,
+      "loss": 3.8876,
+      "step": 2007040
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7130263246685095e-05,
+      "loss": 3.8919,
+      "step": 2007552
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7121877299174575e-05,
+      "loss": 3.8941,
+      "step": 2008064
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7113507730467787e-05,
+      "loss": 3.8802,
+      "step": 2008576
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7105121782957264e-05,
+      "loss": 3.872,
+      "step": 2009088
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7096735835446744e-05,
+      "loss": 3.897,
+      "step": 2009600
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7088349887936227e-05,
+      "loss": 3.8967,
+      "step": 2010112
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7079963940425707e-05,
+      "loss": 3.9063,
+      "step": 2010624
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7071577992915187e-05,
+      "loss": 3.8828,
+      "step": 2011136
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7063192045404667e-05,
+      "loss": 3.885,
+      "step": 2011648
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7054806097894147e-05,
+      "loss": 3.8886,
+      "step": 2012160
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.704646928679482e-05,
+      "loss": 3.8851,
+      "step": 2012672
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.70380833392843e-05,
+      "loss": 3.9,
+      "step": 2013184
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.702969739177378e-05,
+      "loss": 3.8709,
+      "step": 2013696
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.702131144426326e-05,
+      "loss": 3.9151,
+      "step": 2014208
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7012925496752737e-05,
+      "loss": 3.8936,
+      "step": 2014720
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.7004539549242217e-05,
+      "loss": 3.8706,
+      "step": 2015232
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6996153601731697e-05,
+      "loss": 3.8934,
+      "step": 2015744
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6987767654221177e-05,
+      "loss": 3.8842,
+      "step": 2016256
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.697938170671066e-05,
+      "loss": 3.8714,
+      "step": 2016768
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.697099575920014e-05,
+      "loss": 3.8966,
+      "step": 2017280
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.696262619049335e-05,
+      "loss": 3.8959,
+      "step": 2017792
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.695424024298283e-05,
+      "loss": 3.8813,
+      "step": 2018304
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.694585429547231e-05,
+      "loss": 3.8938,
+      "step": 2018816
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.693746834796179e-05,
+      "loss": 3.8677,
+      "step": 2019328
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6929098779255002e-05,
+      "loss": 3.883,
+      "step": 2019840
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6920712831744482e-05,
+      "loss": 3.8839,
+      "step": 2020352
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6912326884233962e-05,
+      "loss": 3.8869,
+      "step": 2020864
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6903940936723442e-05,
+      "loss": 3.892,
+      "step": 2021376
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.689555498921292e-05,
+      "loss": 3.8851,
+      "step": 2021888
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.68871690417024e-05,
+      "loss": 3.9069,
+      "step": 2022400
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6878783094191882e-05,
+      "loss": 3.8861,
+      "step": 2022912
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.687039714668136e-05,
+      "loss": 3.8935,
+      "step": 2023424
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.686202757797457e-05,
+      "loss": 3.8864,
+      "step": 2023936
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.685364163046405e-05,
+      "loss": 3.8838,
+      "step": 2024448
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.684525568295353e-05,
+      "loss": 3.8939,
+      "step": 2024960
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6836886114246743e-05,
+      "loss": 3.8866,
+      "step": 2025472
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6828500166736223e-05,
+      "loss": 3.8906,
+      "step": 2025984
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6820114219225703e-05,
+      "loss": 3.8972,
+      "step": 2026496
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6811728271715183e-05,
+      "loss": 3.8844,
+      "step": 2027008
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6803342324204663e-05,
+      "loss": 3.8743,
+      "step": 2027520
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6794956376694143e-05,
+      "loss": 3.8849,
+      "step": 2028032
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6786586807987352e-05,
+      "loss": 3.8966,
+      "step": 2028544
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6778200860476835e-05,
+      "loss": 3.877,
+      "step": 2029056
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6769814912966315e-05,
+      "loss": 3.8941,
+      "step": 2029568
+    },
+    {
+      "epoch": 0.01,
+      "learning_rate": 1.6761428965455795e-05,
+      "loss": 3.8849,
+      "step": 2030080
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6753043017945275e-05,
+      "loss": 3.8787,
+      "step": 2030592
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6744673449238484e-05,
+      "loss": 3.8805,
+      "step": 2031104
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6736287501727964e-05,
+      "loss": 3.8839,
+      "step": 2031616
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6727901554217444e-05,
+      "loss": 3.9027,
+      "step": 2032128
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6719515606706924e-05,
+      "loss": 3.8855,
+      "step": 2032640
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6711129659196404e-05,
+      "loss": 3.8923,
+      "step": 2033152
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6702743711685884e-05,
+      "loss": 3.8838,
+      "step": 2033664
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6694357764175364e-05,
+      "loss": 3.8811,
+      "step": 2034176
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6685971816664844e-05,
+      "loss": 3.8837,
+      "step": 2034688
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6677585869154324e-05,
+      "loss": 3.8954,
+      "step": 2035200
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6669216300447536e-05,
+      "loss": 3.8949,
+      "step": 2035712
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6660830352937016e-05,
+      "loss": 3.8909,
+      "step": 2036224
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6652444405426496e-05,
+      "loss": 3.8907,
+      "step": 2036736
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6644074836719705e-05,
+      "loss": 3.8818,
+      "step": 2037248
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6635688889209185e-05,
+      "loss": 3.8915,
+      "step": 2037760
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6627302941698665e-05,
+      "loss": 3.8858,
+      "step": 2038272
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6618916994188145e-05,
+      "loss": 3.8815,
+      "step": 2038784
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.661053104667763e-05,
+      "loss": 3.887,
+      "step": 2039296
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6602145099167105e-05,
+      "loss": 3.8841,
+      "step": 2039808
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6593759151656585e-05,
+      "loss": 3.884,
+      "step": 2040320
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6585373204146065e-05,
+      "loss": 3.8808,
+      "step": 2040832
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6577020014243007e-05,
+      "loss": 3.8786,
+      "step": 2041344
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.656863406673249e-05,
+      "loss": 3.8862,
+      "step": 2041856
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.656024811922197e-05,
+      "loss": 3.8879,
+      "step": 2042368
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.655186217171145e-05,
+      "loss": 3.8896,
+      "step": 2042880
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.654347622420093e-05,
+      "loss": 3.8958,
+      "step": 2043392
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6535090276690407e-05,
+      "loss": 3.9015,
+      "step": 2043904
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6526704329179887e-05,
+      "loss": 3.8858,
+      "step": 2044416
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6518318381669366e-05,
+      "loss": 3.8866,
+      "step": 2044928
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.650994881296258e-05,
+      "loss": 3.8785,
+      "step": 2045440
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.650156286545206e-05,
+      "loss": 3.8907,
+      "step": 2045952
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.649317691794154e-05,
+      "loss": 3.889,
+      "step": 2046464
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.648479097043102e-05,
+      "loss": 3.8783,
+      "step": 2046976
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6476421401724228e-05,
+      "loss": 3.8819,
+      "step": 2047488
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6468035454213708e-05,
+      "loss": 3.8927,
+      "step": 2048000
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.645964950670319e-05,
+      "loss": 3.8912,
+      "step": 2048512
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.645126355919267e-05,
+      "loss": 3.8815,
+      "step": 2049024
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.644287761168215e-05,
+      "loss": 3.8845,
+      "step": 2049536
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.643450804297536e-05,
+      "loss": 3.8836,
+      "step": 2050048
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.642612209546484e-05,
+      "loss": 3.8932,
+      "step": 2050560
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6417752526758053e-05,
+      "loss": 3.8907,
+      "step": 2051072
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6409366579247533e-05,
+      "loss": 3.8992,
+      "step": 2051584
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6400980631737013e-05,
+      "loss": 3.8804,
+      "step": 2052096
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6392611063030225e-05,
+      "loss": 3.8941,
+      "step": 2052608
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.63842251155197e-05,
+      "loss": 3.8959,
+      "step": 2053120
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.637583916800918e-05,
+      "loss": 3.88,
+      "step": 2053632
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.636745322049866e-05,
+      "loss": 3.8881,
+      "step": 2054144
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6359067272988145e-05,
+      "loss": 3.8914,
+      "step": 2054656
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6350681325477625e-05,
+      "loss": 3.8978,
+      "step": 2055168
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6342295377967105e-05,
+      "loss": 3.8989,
+      "step": 2055680
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.633390943045658e-05,
+      "loss": 3.8872,
+      "step": 2056192
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.632552348294606e-05,
+      "loss": 3.8922,
+      "step": 2056704
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.631713753543554e-05,
+      "loss": 3.8933,
+      "step": 2057216
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.630875158792502e-05,
+      "loss": 3.8873,
+      "step": 2057728
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.63003656404145e-05,
+      "loss": 3.8838,
+      "step": 2058240
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6292012450511446e-05,
+      "loss": 3.8814,
+      "step": 2058752
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6283626503000926e-05,
+      "loss": 3.8804,
+      "step": 2059264
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6275240555490406e-05,
+      "loss": 3.8872,
+      "step": 2059776
+    },
+    {
+      "epoch": 0.02,
+      "learning_rate": 1.6266854607979883e-05,
+      "loss": 3.8861,
+      "step": 2060288
+    },
+    {
+      "epoch": 0.03,
+      "eval_loss": 3.9931588172912598,
+      "eval_runtime": 293.1849,
+      "eval_samples_per_second": 1301.537,
+      "eval_steps_per_second": 40.674,
+      "step": 2060640
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6258468660469363e-05,
+      "loss": 3.8847,
+      "step": 2060800
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6250082712958846e-05,
+      "loss": 3.8838,
+      "step": 2061312
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6241696765448326e-05,
+      "loss": 3.8898,
+      "step": 2061824
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6233310817937806e-05,
+      "loss": 3.8923,
+      "step": 2062336
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6224924870427286e-05,
+      "loss": 3.9009,
+      "step": 2062848
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6216538922916762e-05,
+      "loss": 3.8897,
+      "step": 2063360
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6208169354209975e-05,
+      "loss": 3.8936,
+      "step": 2063872
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6199783406699455e-05,
+      "loss": 3.8863,
+      "step": 2064384
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6191397459188938e-05,
+      "loss": 3.8792,
+      "step": 2064896
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6183011511678415e-05,
+      "loss": 3.8858,
+      "step": 2065408
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6174625564167895e-05,
+      "loss": 3.8989,
+      "step": 2065920
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6166239616657375e-05,
+      "loss": 3.8823,
+      "step": 2066432
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6157870047950587e-05,
+      "loss": 3.8973,
+      "step": 2066944
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6149484100440067e-05,
+      "loss": 3.8762,
+      "step": 2067456
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6141098152929547e-05,
+      "loss": 3.8832,
+      "step": 2067968
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6132712205419027e-05,
+      "loss": 3.8698,
+      "step": 2068480
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6124342636712236e-05,
+      "loss": 3.89,
+      "step": 2068992
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6115956689201716e-05,
+      "loss": 3.888,
+      "step": 2069504
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6107570741691196e-05,
+      "loss": 3.8849,
+      "step": 2070016
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6099184794180676e-05,
+      "loss": 3.8864,
+      "step": 2070528
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6090798846670156e-05,
+      "loss": 3.9021,
+      "step": 2071040
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.608241289915964e-05,
+      "loss": 3.8925,
+      "step": 2071552
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.607402695164912e-05,
+      "loss": 3.8902,
+      "step": 2072064
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6065641004138596e-05,
+      "loss": 3.8844,
+      "step": 2072576
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6057255056628076e-05,
+      "loss": 3.8953,
+      "step": 2073088
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6048885487921288e-05,
+      "loss": 3.8902,
+      "step": 2073600
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.6040499540410768e-05,
+      "loss": 3.8815,
+      "step": 2074112
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.603212997170398e-05,
+      "loss": 3.8862,
+      "step": 2074624
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.602374402419346e-05,
+      "loss": 3.8821,
+      "step": 2075136
+    },
+    {
+      "epoch": 1.0,
+      "learning_rate": 1.601535807668294e-05,
+      "loss": 3.8781,
+      "step": 2075648
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.6006972129172417e-05,
+      "loss": 3.8922,
+      "step": 2076160
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.599860256046563e-05,
+      "loss": 3.8872,
+      "step": 2076672
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.599021661295511e-05,
+      "loss": 3.8942,
+      "step": 2077184
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5981830665444593e-05,
+      "loss": 3.897,
+      "step": 2077696
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.597344471793407e-05,
+      "loss": 3.8834,
+      "step": 2078208
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5965075149227282e-05,
+      "loss": 3.8937,
+      "step": 2078720
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5956689201716762e-05,
+      "loss": 3.8961,
+      "step": 2079232
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5948303254206242e-05,
+      "loss": 3.8759,
+      "step": 2079744
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5939917306695722e-05,
+      "loss": 3.8832,
+      "step": 2080256
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5931531359185202e-05,
+      "loss": 3.8746,
+      "step": 2080768
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5923145411674682e-05,
+      "loss": 3.8765,
+      "step": 2081280
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5914759464164162e-05,
+      "loss": 3.8871,
+      "step": 2081792
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5906373516653642e-05,
+      "loss": 3.894,
+      "step": 2082304
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.589798756914312e-05,
+      "loss": 3.8794,
+      "step": 2082816
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5889601621632598e-05,
+      "loss": 3.8813,
+      "step": 2083328
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.588123205292581e-05,
+      "loss": 3.8874,
+      "step": 2083840
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5872846105415294e-05,
+      "loss": 3.8876,
+      "step": 2084352
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5864460157904774e-05,
+      "loss": 3.8785,
+      "step": 2084864
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.585607421039425e-05,
+      "loss": 3.8686,
+      "step": 2085376
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.584768826288373e-05,
+      "loss": 3.8921,
+      "step": 2085888
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.583930231537321e-05,
+      "loss": 3.8968,
+      "step": 2086400
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.583091636786269e-05,
+      "loss": 3.8982,
+      "step": 2086912
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5822546799155903e-05,
+      "loss": 3.8796,
+      "step": 2087424
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5814160851645383e-05,
+      "loss": 3.8822,
+      "step": 2087936
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5805774904134863e-05,
+      "loss": 3.8831,
+      "step": 2088448
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5797405335428072e-05,
+      "loss": 3.8786,
+      "step": 2088960
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5789019387917552e-05,
+      "loss": 3.8962,
+      "step": 2089472
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5780633440407032e-05,
+      "loss": 3.87,
+      "step": 2089984
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5772247492896515e-05,
+      "loss": 3.9073,
+      "step": 2090496
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5763861545385995e-05,
+      "loss": 3.8881,
+      "step": 2091008
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5755475597875475e-05,
+      "loss": 3.8687,
+      "step": 2091520
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5747089650364955e-05,
+      "loss": 3.8876,
+      "step": 2092032
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.573870370285443e-05,
+      "loss": 3.8818,
+      "step": 2092544
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.573031775534391e-05,
+      "loss": 3.8692,
+      "step": 2093056
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5721948186637124e-05,
+      "loss": 3.8862,
+      "step": 2093568
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5713562239126604e-05,
+      "loss": 3.8915,
+      "step": 2094080
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5705176291616084e-05,
+      "loss": 3.878,
+      "step": 2094592
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5696790344105564e-05,
+      "loss": 3.8897,
+      "step": 2095104
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5688404396595044e-05,
+      "loss": 3.8644,
+      "step": 2095616
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5680034827888253e-05,
+      "loss": 3.8795,
+      "step": 2096128
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5671648880377733e-05,
+      "loss": 3.88,
+      "step": 2096640
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5663262932867216e-05,
+      "loss": 3.8842,
+      "step": 2097152
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5654876985356696e-05,
+      "loss": 3.8811,
+      "step": 2097664
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5646491037846176e-05,
+      "loss": 3.8802,
+      "step": 2098176
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5638105090335656e-05,
+      "loss": 3.8985,
+      "step": 2098688
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5629719142825136e-05,
+      "loss": 3.8866,
+      "step": 2099200
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5621333195314613e-05,
+      "loss": 3.8867,
+      "step": 2099712
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5612963626607825e-05,
+      "loss": 3.8826,
+      "step": 2100224
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.560457767909731e-05,
+      "loss": 3.8781,
+      "step": 2100736
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5596191731586785e-05,
+      "loss": 3.8909,
+      "step": 2101248
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5587805784076265e-05,
+      "loss": 3.8821,
+      "step": 2101760
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5579436215369478e-05,
+      "loss": 3.8822,
+      "step": 2102272
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5571050267858957e-05,
+      "loss": 3.8914,
+      "step": 2102784
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5562664320348434e-05,
+      "loss": 3.8853,
+      "step": 2103296
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5554278372837917e-05,
+      "loss": 3.8665,
+      "step": 2103808
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.5545892425327397e-05,
+      "loss": 3.8828,
+      "step": 2104320
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.553752285662061e-05,
+      "loss": 3.8887,
+      "step": 2104832
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.552915328791382e-05,
+      "loss": 3.8749,
+      "step": 2105344
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.55207673404033e-05,
+      "loss": 3.8857,
+      "step": 2105856
+    },
+    {
+      "epoch": 1.01,
+      "learning_rate": 1.551238139289278e-05,
+      "loss": 3.8825,
+      "step": 2106368
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.550401182418599e-05,
+      "loss": 3.8717,
+      "step": 2106880
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.549562587667547e-05,
+      "loss": 3.8804,
+      "step": 2107392
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.548723992916495e-05,
+      "loss": 3.8745,
+      "step": 2107904
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.547885398165443e-05,
+      "loss": 3.8944,
+      "step": 2108416
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5470468034143908e-05,
+      "loss": 3.8837,
+      "step": 2108928
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5462082086633388e-05,
+      "loss": 3.8904,
+      "step": 2109440
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.545369613912287e-05,
+      "loss": 3.8768,
+      "step": 2109952
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.544531019161235e-05,
+      "loss": 3.8775,
+      "step": 2110464
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.543692424410183e-05,
+      "loss": 3.8829,
+      "step": 2110976
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.542853829659131e-05,
+      "loss": 3.89,
+      "step": 2111488
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.542015234908079e-05,
+      "loss": 3.8891,
+      "step": 2112000
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5411766401570267e-05,
+      "loss": 3.8852,
+      "step": 2112512
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.540339683286348e-05,
+      "loss": 3.8827,
+      "step": 2113024
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5395010885352963e-05,
+      "loss": 3.8767,
+      "step": 2113536
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.538662493784244e-05,
+      "loss": 3.8884,
+      "step": 2114048
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.537823899033192e-05,
+      "loss": 3.8787,
+      "step": 2114560
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.53698530428214e-05,
+      "loss": 3.8799,
+      "step": 2115072
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5361483474114612e-05,
+      "loss": 3.8812,
+      "step": 2115584
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5353113905407825e-05,
+      "loss": 3.881,
+      "step": 2116096
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5344727957897305e-05,
+      "loss": 3.8807,
+      "step": 2116608
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5336358389190514e-05,
+      "loss": 3.8742,
+      "step": 2117120
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5327972441679994e-05,
+      "loss": 3.8727,
+      "step": 2117632
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5319586494169474e-05,
+      "loss": 3.8845,
+      "step": 2118144
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5311200546658954e-05,
+      "loss": 3.8829,
+      "step": 2118656
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5302814599148434e-05,
+      "loss": 3.8832,
+      "step": 2119168
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5294428651637914e-05,
+      "loss": 3.8908,
+      "step": 2119680
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5286042704127394e-05,
+      "loss": 3.8987,
+      "step": 2120192
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5277656756616873e-05,
+      "loss": 3.8805,
+      "step": 2120704
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5269270809106353e-05,
+      "loss": 3.8774,
+      "step": 2121216
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5260884861595833e-05,
+      "loss": 3.877,
+      "step": 2121728
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5252498914085315e-05,
+      "loss": 3.8861,
+      "step": 2122240
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5244129345378524e-05,
+      "loss": 3.8834,
+      "step": 2122752
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5235743397868004e-05,
+      "loss": 3.8757,
+      "step": 2123264
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5227357450357486e-05,
+      "loss": 3.8781,
+      "step": 2123776
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5218971502846966e-05,
+      "loss": 3.8896,
+      "step": 2124288
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5210585555336442e-05,
+      "loss": 3.8811,
+      "step": 2124800
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5202199607825924e-05,
+      "loss": 3.8792,
+      "step": 2125312
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5193813660315404e-05,
+      "loss": 3.8859,
+      "step": 2125824
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5185444091608616e-05,
+      "loss": 3.8794,
+      "step": 2126336
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5177058144098095e-05,
+      "loss": 3.8821,
+      "step": 2126848
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5168672196587575e-05,
+      "loss": 3.889,
+      "step": 2127360
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5160286249077055e-05,
+      "loss": 3.8917,
+      "step": 2127872
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5151900301566534e-05,
+      "loss": 3.8771,
+      "step": 2128384
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5143514354056016e-05,
+      "loss": 3.8894,
+      "step": 2128896
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5135128406545496e-05,
+      "loss": 3.8908,
+      "step": 2129408
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5126758837838705e-05,
+      "loss": 3.8789,
+      "step": 2129920
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5118372890328187e-05,
+      "loss": 3.8818,
+      "step": 2130432
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5109986942817667e-05,
+      "loss": 3.8851,
+      "step": 2130944
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5101600995307147e-05,
+      "loss": 3.8942,
+      "step": 2131456
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5093215047796625e-05,
+      "loss": 3.8936,
+      "step": 2131968
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5084845479089837e-05,
+      "loss": 3.8862,
+      "step": 2132480
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5076459531579317e-05,
+      "loss": 3.8855,
+      "step": 2132992
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5068073584068797e-05,
+      "loss": 3.8893,
+      "step": 2133504
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5059687636558276e-05,
+      "loss": 3.8872,
+      "step": 2134016
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5051318067851488e-05,
+      "loss": 3.8746,
+      "step": 2134528
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.504293212034097e-05,
+      "loss": 3.8835,
+      "step": 2135040
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.503454617283045e-05,
+      "loss": 3.874,
+      "step": 2135552
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.5026160225319926e-05,
+      "loss": 3.8829,
+      "step": 2136064
+    },
+    {
+      "epoch": 1.02,
+      "learning_rate": 1.501779065661314e-05,
+      "loss": 3.8822,
+      "step": 2136576
+    },
+    {
+      "epoch": 1.03,
+      "eval_loss": 3.9916534423828125,
+      "eval_runtime": 294.0708,
+      "eval_samples_per_second": 1297.616,
+      "eval_steps_per_second": 40.551,
+      "step": 2136960
+    }
+  ],
+  "logging_steps": 512,
+  "max_steps": 3052726,
+  "num_train_epochs": 9223372036854775807,
+  "save_steps": 10,
+  "total_flos": 8.66254058415639e+17,
+  "trial_name": null,
+  "trial_params": null
+}