Fill-Mask
Transformers
PyTorch
4 languages
xlm-roberta
Inference Endpoints
xlm-r-slobertic / trainer_state.json
5roop's picture
Upload 16 files
68e0c0c
raw
history blame contribute delete
No virus
23.5 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.7564184767031974,
"global_step": 48000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"learning_rate": 1e-05,
"loss": 1.3064,
"step": 250
},
{
"epoch": 0.01,
"learning_rate": 2e-05,
"loss": 1.2686,
"step": 500
},
{
"epoch": 0.01,
"learning_rate": 3e-05,
"loss": 1.2536,
"step": 750
},
{
"epoch": 0.02,
"learning_rate": 4e-05,
"loss": 1.2402,
"step": 1000
},
{
"epoch": 0.02,
"learning_rate": 5e-05,
"loss": 1.2291,
"step": 1250
},
{
"epoch": 0.02,
"learning_rate": 6e-05,
"loss": 1.2202,
"step": 1500
},
{
"epoch": 0.03,
"learning_rate": 7e-05,
"loss": 1.2107,
"step": 1750
},
{
"epoch": 0.03,
"learning_rate": 8e-05,
"loss": 1.2039,
"step": 2000
},
{
"epoch": 0.04,
"learning_rate": 9e-05,
"loss": 1.1978,
"step": 2250
},
{
"epoch": 0.04,
"learning_rate": 0.0001,
"loss": 1.1937,
"step": 2500
},
{
"epoch": 0.04,
"learning_rate": 9.9873417721519e-05,
"loss": 1.1933,
"step": 2750
},
{
"epoch": 0.05,
"learning_rate": 9.974683544303798e-05,
"loss": 1.1742,
"step": 3000
},
{
"epoch": 0.05,
"learning_rate": 9.962025316455697e-05,
"loss": 1.1665,
"step": 3250
},
{
"epoch": 0.06,
"learning_rate": 9.949367088607595e-05,
"loss": 1.1584,
"step": 3500
},
{
"epoch": 0.06,
"learning_rate": 9.936708860759493e-05,
"loss": 1.1502,
"step": 3750
},
{
"epoch": 0.06,
"learning_rate": 9.924050632911392e-05,
"loss": 1.147,
"step": 4000
},
{
"epoch": 0.07,
"learning_rate": 9.911392405063291e-05,
"loss": 1.1423,
"step": 4250
},
{
"epoch": 0.07,
"learning_rate": 9.89873417721519e-05,
"loss": 1.1343,
"step": 4500
},
{
"epoch": 0.07,
"learning_rate": 9.88607594936709e-05,
"loss": 1.1306,
"step": 4750
},
{
"epoch": 0.08,
"learning_rate": 9.873417721518988e-05,
"loss": 1.1237,
"step": 5000
},
{
"epoch": 0.08,
"learning_rate": 9.860759493670887e-05,
"loss": 1.1181,
"step": 5250
},
{
"epoch": 0.09,
"learning_rate": 9.848101265822785e-05,
"loss": 1.1135,
"step": 5500
},
{
"epoch": 0.09,
"learning_rate": 9.835443037974684e-05,
"loss": 1.1087,
"step": 5750
},
{
"epoch": 0.09,
"learning_rate": 9.822784810126583e-05,
"loss": 1.1047,
"step": 6000
},
{
"epoch": 0.1,
"learning_rate": 9.810126582278482e-05,
"loss": 1.101,
"step": 6250
},
{
"epoch": 0.1,
"learning_rate": 9.797468354430381e-05,
"loss": 1.0963,
"step": 6500
},
{
"epoch": 0.11,
"learning_rate": 9.784810126582278e-05,
"loss": 1.0933,
"step": 6750
},
{
"epoch": 0.11,
"learning_rate": 9.772151898734177e-05,
"loss": 1.0895,
"step": 7000
},
{
"epoch": 0.11,
"learning_rate": 9.759493670886076e-05,
"loss": 1.0879,
"step": 7250
},
{
"epoch": 0.12,
"learning_rate": 9.746835443037975e-05,
"loss": 1.0845,
"step": 7500
},
{
"epoch": 0.12,
"learning_rate": 9.734177215189874e-05,
"loss": 1.0802,
"step": 7750
},
{
"epoch": 0.13,
"learning_rate": 9.721518987341773e-05,
"loss": 1.077,
"step": 8000
},
{
"epoch": 0.13,
"learning_rate": 9.708860759493671e-05,
"loss": 1.075,
"step": 8250
},
{
"epoch": 0.13,
"learning_rate": 9.696202531645569e-05,
"loss": 1.0714,
"step": 8500
},
{
"epoch": 0.14,
"learning_rate": 9.683544303797469e-05,
"loss": 1.0696,
"step": 8750
},
{
"epoch": 0.14,
"learning_rate": 9.670886075949368e-05,
"loss": 1.0674,
"step": 9000
},
{
"epoch": 0.15,
"learning_rate": 9.658227848101267e-05,
"loss": 1.0647,
"step": 9250
},
{
"epoch": 0.15,
"learning_rate": 9.645569620253166e-05,
"loss": 1.0652,
"step": 9500
},
{
"epoch": 0.15,
"learning_rate": 9.632911392405065e-05,
"loss": 1.0622,
"step": 9750
},
{
"epoch": 0.16,
"learning_rate": 9.620253164556962e-05,
"loss": 1.0599,
"step": 10000
},
{
"epoch": 0.16,
"learning_rate": 9.607594936708861e-05,
"loss": 1.0568,
"step": 10250
},
{
"epoch": 0.17,
"learning_rate": 9.59493670886076e-05,
"loss": 1.0537,
"step": 10500
},
{
"epoch": 0.17,
"learning_rate": 9.582278481012659e-05,
"loss": 1.0504,
"step": 10750
},
{
"epoch": 0.17,
"learning_rate": 9.569620253164557e-05,
"loss": 1.0494,
"step": 11000
},
{
"epoch": 0.18,
"learning_rate": 9.556962025316456e-05,
"loss": 1.0485,
"step": 11250
},
{
"epoch": 0.18,
"learning_rate": 9.544303797468355e-05,
"loss": 1.0445,
"step": 11500
},
{
"epoch": 0.19,
"learning_rate": 9.531645569620253e-05,
"loss": 1.0423,
"step": 11750
},
{
"epoch": 0.19,
"learning_rate": 9.518987341772152e-05,
"loss": 1.0412,
"step": 12000
},
{
"epoch": 0.19,
"learning_rate": 9.50632911392405e-05,
"loss": 1.0392,
"step": 12250
},
{
"epoch": 0.2,
"learning_rate": 9.493670886075949e-05,
"loss": 1.0368,
"step": 12500
},
{
"epoch": 0.2,
"learning_rate": 9.481012658227848e-05,
"loss": 1.035,
"step": 12750
},
{
"epoch": 0.2,
"learning_rate": 9.468354430379748e-05,
"loss": 1.035,
"step": 13000
},
{
"epoch": 0.21,
"learning_rate": 9.455696202531646e-05,
"loss": 1.0337,
"step": 13250
},
{
"epoch": 0.21,
"learning_rate": 9.443037974683545e-05,
"loss": 1.0318,
"step": 13500
},
{
"epoch": 0.22,
"learning_rate": 9.430379746835444e-05,
"loss": 1.0314,
"step": 13750
},
{
"epoch": 0.22,
"learning_rate": 9.417721518987342e-05,
"loss": 1.0288,
"step": 14000
},
{
"epoch": 0.22,
"learning_rate": 9.405063291139241e-05,
"loss": 1.03,
"step": 14250
},
{
"epoch": 0.23,
"learning_rate": 9.39240506329114e-05,
"loss": 1.0261,
"step": 14500
},
{
"epoch": 0.23,
"learning_rate": 9.379746835443039e-05,
"loss": 1.026,
"step": 14750
},
{
"epoch": 0.24,
"learning_rate": 9.367088607594936e-05,
"loss": 1.0238,
"step": 15000
},
{
"epoch": 0.24,
"learning_rate": 9.354430379746835e-05,
"loss": 1.0248,
"step": 15250
},
{
"epoch": 0.24,
"learning_rate": 9.341772151898734e-05,
"loss": 1.0213,
"step": 15500
},
{
"epoch": 0.25,
"learning_rate": 9.329113924050633e-05,
"loss": 1.0212,
"step": 15750
},
{
"epoch": 0.25,
"learning_rate": 9.316455696202532e-05,
"loss": 1.0213,
"step": 16000
},
{
"epoch": 0.26,
"learning_rate": 9.303797468354431e-05,
"loss": 1.0177,
"step": 16250
},
{
"epoch": 0.26,
"learning_rate": 9.29113924050633e-05,
"loss": 1.0192,
"step": 16500
},
{
"epoch": 0.26,
"learning_rate": 9.278481012658227e-05,
"loss": 1.0155,
"step": 16750
},
{
"epoch": 0.27,
"learning_rate": 9.265822784810127e-05,
"loss": 1.0148,
"step": 17000
},
{
"epoch": 0.27,
"learning_rate": 9.253164556962026e-05,
"loss": 1.0139,
"step": 17250
},
{
"epoch": 0.28,
"learning_rate": 9.240506329113925e-05,
"loss": 1.0126,
"step": 17500
},
{
"epoch": 0.28,
"learning_rate": 9.227848101265824e-05,
"loss": 1.012,
"step": 17750
},
{
"epoch": 0.28,
"learning_rate": 9.215189873417723e-05,
"loss": 1.0108,
"step": 18000
},
{
"epoch": 0.29,
"learning_rate": 9.20253164556962e-05,
"loss": 1.0114,
"step": 18250
},
{
"epoch": 0.29,
"learning_rate": 9.189873417721519e-05,
"loss": 1.0103,
"step": 18500
},
{
"epoch": 0.3,
"learning_rate": 9.177215189873418e-05,
"loss": 1.0086,
"step": 18750
},
{
"epoch": 0.3,
"learning_rate": 9.164556962025317e-05,
"loss": 1.0082,
"step": 19000
},
{
"epoch": 0.3,
"learning_rate": 9.151898734177216e-05,
"loss": 1.0058,
"step": 19250
},
{
"epoch": 0.31,
"learning_rate": 9.139240506329115e-05,
"loss": 1.0049,
"step": 19500
},
{
"epoch": 0.31,
"learning_rate": 9.126582278481013e-05,
"loss": 1.0055,
"step": 19750
},
{
"epoch": 0.32,
"learning_rate": 9.113924050632912e-05,
"loss": 1.0006,
"step": 20000
},
{
"epoch": 0.32,
"learning_rate": 9.10126582278481e-05,
"loss": 1.0028,
"step": 20250
},
{
"epoch": 0.32,
"learning_rate": 9.088607594936709e-05,
"loss": 1.0,
"step": 20500
},
{
"epoch": 0.33,
"learning_rate": 9.075949367088607e-05,
"loss": 1.0012,
"step": 20750
},
{
"epoch": 0.33,
"learning_rate": 9.063291139240506e-05,
"loss": 0.9976,
"step": 21000
},
{
"epoch": 0.33,
"learning_rate": 9.050632911392407e-05,
"loss": 0.9963,
"step": 21250
},
{
"epoch": 0.34,
"learning_rate": 9.037974683544304e-05,
"loss": 0.9969,
"step": 21500
},
{
"epoch": 0.34,
"learning_rate": 9.025316455696203e-05,
"loss": 0.9963,
"step": 21750
},
{
"epoch": 0.35,
"learning_rate": 9.012658227848102e-05,
"loss": 0.9959,
"step": 22000
},
{
"epoch": 0.35,
"learning_rate": 9e-05,
"loss": 0.9939,
"step": 22250
},
{
"epoch": 0.35,
"learning_rate": 8.9873417721519e-05,
"loss": 0.9925,
"step": 22500
},
{
"epoch": 0.36,
"learning_rate": 8.974683544303798e-05,
"loss": 0.9935,
"step": 22750
},
{
"epoch": 0.36,
"learning_rate": 8.962025316455697e-05,
"loss": 0.991,
"step": 23000
},
{
"epoch": 0.37,
"learning_rate": 8.949367088607596e-05,
"loss": 0.9894,
"step": 23250
},
{
"epoch": 0.37,
"learning_rate": 8.936708860759493e-05,
"loss": 0.9892,
"step": 23500
},
{
"epoch": 0.37,
"learning_rate": 8.924050632911392e-05,
"loss": 0.9892,
"step": 23750
},
{
"epoch": 0.38,
"learning_rate": 8.911392405063291e-05,
"loss": 0.9895,
"step": 24000
},
{
"epoch": 0.38,
"learning_rate": 8.89873417721519e-05,
"loss": 0.9871,
"step": 24250
},
{
"epoch": 0.39,
"learning_rate": 8.886075949367089e-05,
"loss": 0.9857,
"step": 24500
},
{
"epoch": 0.39,
"learning_rate": 8.873417721518988e-05,
"loss": 0.9862,
"step": 24750
},
{
"epoch": 0.39,
"learning_rate": 8.860759493670887e-05,
"loss": 0.9853,
"step": 25000
},
{
"epoch": 0.4,
"learning_rate": 8.848101265822785e-05,
"loss": 0.9836,
"step": 25250
},
{
"epoch": 0.4,
"learning_rate": 8.835443037974684e-05,
"loss": 0.981,
"step": 25500
},
{
"epoch": 0.41,
"learning_rate": 8.822784810126583e-05,
"loss": 0.9832,
"step": 25750
},
{
"epoch": 0.41,
"learning_rate": 8.810126582278482e-05,
"loss": 0.9806,
"step": 26000
},
{
"epoch": 0.41,
"learning_rate": 8.797468354430381e-05,
"loss": 0.9815,
"step": 26250
},
{
"epoch": 0.42,
"learning_rate": 8.78481012658228e-05,
"loss": 0.9785,
"step": 26500
},
{
"epoch": 0.42,
"learning_rate": 8.772151898734177e-05,
"loss": 0.9782,
"step": 26750
},
{
"epoch": 0.43,
"learning_rate": 8.759493670886076e-05,
"loss": 0.9779,
"step": 27000
},
{
"epoch": 0.43,
"learning_rate": 8.746835443037975e-05,
"loss": 0.9787,
"step": 27250
},
{
"epoch": 0.43,
"learning_rate": 8.734177215189874e-05,
"loss": 0.9772,
"step": 27500
},
{
"epoch": 0.44,
"learning_rate": 8.721518987341773e-05,
"loss": 0.9774,
"step": 27750
},
{
"epoch": 0.44,
"learning_rate": 8.708860759493672e-05,
"loss": 0.9751,
"step": 28000
},
{
"epoch": 0.45,
"learning_rate": 8.69620253164557e-05,
"loss": 0.9752,
"step": 28250
},
{
"epoch": 0.45,
"learning_rate": 8.683544303797468e-05,
"loss": 0.9753,
"step": 28500
},
{
"epoch": 0.45,
"learning_rate": 8.670886075949367e-05,
"loss": 0.9724,
"step": 28750
},
{
"epoch": 0.46,
"learning_rate": 8.658227848101266e-05,
"loss": 0.9739,
"step": 29000
},
{
"epoch": 0.46,
"learning_rate": 8.645569620253166e-05,
"loss": 0.9712,
"step": 29250
},
{
"epoch": 0.46,
"learning_rate": 8.632911392405065e-05,
"loss": 0.9707,
"step": 29500
},
{
"epoch": 0.47,
"learning_rate": 8.620253164556964e-05,
"loss": 0.9747,
"step": 29750
},
{
"epoch": 0.47,
"learning_rate": 8.607594936708861e-05,
"loss": 0.97,
"step": 30000
},
{
"epoch": 0.48,
"learning_rate": 8.59493670886076e-05,
"loss": 0.9707,
"step": 30250
},
{
"epoch": 0.48,
"learning_rate": 8.582278481012659e-05,
"loss": 0.9708,
"step": 30500
},
{
"epoch": 0.48,
"learning_rate": 8.569620253164558e-05,
"loss": 0.9676,
"step": 30750
},
{
"epoch": 0.49,
"learning_rate": 8.556962025316456e-05,
"loss": 0.9672,
"step": 31000
},
{
"epoch": 0.49,
"learning_rate": 8.544303797468355e-05,
"loss": 0.9691,
"step": 31250
},
{
"epoch": 0.5,
"learning_rate": 8.531645569620254e-05,
"loss": 0.965,
"step": 31500
},
{
"epoch": 0.5,
"learning_rate": 8.518987341772152e-05,
"loss": 0.9645,
"step": 31750
},
{
"epoch": 0.5,
"learning_rate": 8.50632911392405e-05,
"loss": 0.9643,
"step": 32000
},
{
"epoch": 0.51,
"learning_rate": 8.49367088607595e-05,
"loss": 0.9631,
"step": 32250
},
{
"epoch": 0.51,
"learning_rate": 8.481012658227848e-05,
"loss": 0.9644,
"step": 32500
},
{
"epoch": 0.52,
"learning_rate": 8.468354430379747e-05,
"loss": 0.964,
"step": 32750
},
{
"epoch": 0.52,
"learning_rate": 8.455696202531646e-05,
"loss": 0.9627,
"step": 33000
},
{
"epoch": 0.52,
"learning_rate": 8.443037974683545e-05,
"loss": 0.9638,
"step": 33250
},
{
"epoch": 0.53,
"learning_rate": 8.430379746835444e-05,
"loss": 0.9606,
"step": 33500
},
{
"epoch": 0.53,
"learning_rate": 8.417721518987342e-05,
"loss": 0.9605,
"step": 33750
},
{
"epoch": 0.54,
"learning_rate": 8.405063291139241e-05,
"loss": 0.9604,
"step": 34000
},
{
"epoch": 0.54,
"learning_rate": 8.39240506329114e-05,
"loss": 0.9583,
"step": 34250
},
{
"epoch": 0.54,
"learning_rate": 8.379746835443039e-05,
"loss": 0.9589,
"step": 34500
},
{
"epoch": 0.55,
"learning_rate": 8.367088607594938e-05,
"loss": 0.9575,
"step": 34750
},
{
"epoch": 0.55,
"learning_rate": 8.354430379746835e-05,
"loss": 0.956,
"step": 35000
},
{
"epoch": 0.56,
"learning_rate": 8.341772151898734e-05,
"loss": 0.9562,
"step": 35250
},
{
"epoch": 0.56,
"learning_rate": 8.329113924050633e-05,
"loss": 0.9551,
"step": 35500
},
{
"epoch": 0.56,
"learning_rate": 8.316455696202532e-05,
"loss": 0.9538,
"step": 35750
},
{
"epoch": 0.57,
"learning_rate": 8.303797468354431e-05,
"loss": 0.9555,
"step": 36000
},
{
"epoch": 0.57,
"learning_rate": 8.29113924050633e-05,
"loss": 0.9547,
"step": 36250
},
{
"epoch": 0.58,
"learning_rate": 8.278481012658229e-05,
"loss": 0.9566,
"step": 36500
},
{
"epoch": 0.58,
"learning_rate": 8.265822784810126e-05,
"loss": 0.954,
"step": 36750
},
{
"epoch": 0.58,
"learning_rate": 8.253164556962025e-05,
"loss": 0.9533,
"step": 37000
},
{
"epoch": 0.59,
"learning_rate": 8.240506329113924e-05,
"loss": 0.9544,
"step": 37250
},
{
"epoch": 0.59,
"learning_rate": 8.227848101265824e-05,
"loss": 0.9516,
"step": 37500
},
{
"epoch": 0.59,
"learning_rate": 8.215189873417723e-05,
"loss": 0.9531,
"step": 37750
},
{
"epoch": 0.6,
"learning_rate": 8.202531645569622e-05,
"loss": 0.9538,
"step": 38000
},
{
"epoch": 0.6,
"learning_rate": 8.189873417721519e-05,
"loss": 0.953,
"step": 38250
},
{
"epoch": 0.61,
"learning_rate": 8.177215189873418e-05,
"loss": 0.9507,
"step": 38500
},
{
"epoch": 0.61,
"learning_rate": 8.164556962025317e-05,
"loss": 0.9504,
"step": 38750
},
{
"epoch": 0.61,
"learning_rate": 8.151898734177216e-05,
"loss": 0.9519,
"step": 39000
},
{
"epoch": 0.62,
"learning_rate": 8.139240506329115e-05,
"loss": 0.9494,
"step": 39250
},
{
"epoch": 0.62,
"learning_rate": 8.126582278481013e-05,
"loss": 0.9504,
"step": 39500
},
{
"epoch": 0.63,
"learning_rate": 8.113924050632912e-05,
"loss": 0.9479,
"step": 39750
},
{
"epoch": 0.63,
"learning_rate": 8.10126582278481e-05,
"loss": 0.9497,
"step": 40000
},
{
"epoch": 0.63,
"learning_rate": 8.088607594936709e-05,
"loss": 0.9483,
"step": 40250
},
{
"epoch": 0.64,
"learning_rate": 8.075949367088608e-05,
"loss": 0.9464,
"step": 40500
},
{
"epoch": 0.64,
"learning_rate": 8.063291139240506e-05,
"loss": 0.9478,
"step": 40750
},
{
"epoch": 0.65,
"learning_rate": 8.050632911392405e-05,
"loss": 0.9476,
"step": 41000
},
{
"epoch": 0.65,
"learning_rate": 8.037974683544304e-05,
"loss": 0.9444,
"step": 41250
},
{
"epoch": 0.65,
"learning_rate": 8.025316455696203e-05,
"loss": 0.9461,
"step": 41500
},
{
"epoch": 0.66,
"learning_rate": 8.012658227848102e-05,
"loss": 0.946,
"step": 41750
},
{
"epoch": 0.66,
"learning_rate": 8e-05,
"loss": 0.9454,
"step": 42000
},
{
"epoch": 0.67,
"learning_rate": 7.9873417721519e-05,
"loss": 0.9441,
"step": 42250
},
{
"epoch": 0.67,
"learning_rate": 7.974683544303798e-05,
"loss": 0.9431,
"step": 42500
},
{
"epoch": 0.67,
"learning_rate": 7.962025316455697e-05,
"loss": 0.9419,
"step": 42750
},
{
"epoch": 0.68,
"learning_rate": 7.949367088607596e-05,
"loss": 0.944,
"step": 43000
},
{
"epoch": 0.68,
"learning_rate": 7.936708860759494e-05,
"loss": 0.9433,
"step": 43250
},
{
"epoch": 0.69,
"learning_rate": 7.924050632911392e-05,
"loss": 0.9428,
"step": 43500
},
{
"epoch": 0.69,
"learning_rate": 7.911392405063291e-05,
"loss": 0.9406,
"step": 43750
},
{
"epoch": 0.69,
"learning_rate": 7.89873417721519e-05,
"loss": 0.9397,
"step": 44000
},
{
"epoch": 0.7,
"learning_rate": 7.886075949367089e-05,
"loss": 0.9392,
"step": 44250
},
{
"epoch": 0.7,
"learning_rate": 7.873417721518988e-05,
"loss": 0.9404,
"step": 44500
},
{
"epoch": 0.71,
"learning_rate": 7.860759493670887e-05,
"loss": 0.9388,
"step": 44750
},
{
"epoch": 0.71,
"learning_rate": 7.848101265822784e-05,
"loss": 0.9365,
"step": 45000
},
{
"epoch": 0.71,
"learning_rate": 7.835443037974683e-05,
"loss": 0.9387,
"step": 45250
},
{
"epoch": 0.72,
"learning_rate": 7.822784810126582e-05,
"loss": 0.9366,
"step": 45500
},
{
"epoch": 0.72,
"learning_rate": 7.810126582278482e-05,
"loss": 0.9361,
"step": 45750
},
{
"epoch": 0.72,
"learning_rate": 7.797468354430381e-05,
"loss": 0.935,
"step": 46000
},
{
"epoch": 0.73,
"learning_rate": 7.78481012658228e-05,
"loss": 0.9355,
"step": 46250
},
{
"epoch": 0.73,
"learning_rate": 7.772151898734177e-05,
"loss": 0.9352,
"step": 46500
},
{
"epoch": 0.74,
"learning_rate": 7.759493670886076e-05,
"loss": 0.9347,
"step": 46750
},
{
"epoch": 0.74,
"learning_rate": 7.746835443037975e-05,
"loss": 0.9343,
"step": 47000
},
{
"epoch": 0.74,
"learning_rate": 7.734177215189874e-05,
"loss": 0.934,
"step": 47250
},
{
"epoch": 0.75,
"learning_rate": 7.721518987341773e-05,
"loss": 0.9336,
"step": 47500
},
{
"epoch": 0.75,
"learning_rate": 7.708860759493672e-05,
"loss": 0.9345,
"step": 47750
},
{
"epoch": 0.76,
"learning_rate": 7.69620253164557e-05,
"loss": 0.932,
"step": 48000
}
],
"max_steps": 200000,
"num_train_epochs": 4,
"total_flos": 5.730504888287232e+18,
"trial_name": null,
"trial_params": null
}