hinglish-sentence-bert / log_history.json
system's picture
system HF staff
Update log_history.json
ac7214a
[
{
"loss": 5.60179638671875,
"learning_rate": 4.9189600959512466e-05,
"epoch": 0.048623942429252165,
"total_flos": 127437846067872,
"step": 500
},
{
"loss": 4.56484326171875,
"learning_rate": 4.837920191902493e-05,
"epoch": 0.09724788485850433,
"total_flos": 258224201311392,
"step": 1000
},
{
"loss": 4.172212890625,
"learning_rate": 4.756880287853739e-05,
"epoch": 0.1458718272877565,
"total_flos": 389157743771424,
"step": 1500
},
{
"loss": 4.0773701171875,
"learning_rate": 4.6758403838049856e-05,
"epoch": 0.19449576971700866,
"total_flos": 515696696481312,
"step": 2000
},
{
"loss": 3.90251953125,
"learning_rate": 4.594800479756232e-05,
"epoch": 0.24311971214626082,
"total_flos": 643145055921792,
"step": 2500
},
{
"loss": 3.74609375,
"learning_rate": 4.513760575707479e-05,
"epoch": 0.291743654575513,
"total_flos": 771103313933760,
"step": 3000
},
{
"loss": 3.63271875,
"learning_rate": 4.432720671658725e-05,
"epoch": 0.34036759700476515,
"total_flos": 899392743182880,
"step": 3500
},
{
"loss": 3.617484375,
"learning_rate": 4.3516807676099715e-05,
"epoch": 0.3889915394340173,
"total_flos": 1031971628456064,
"step": 4000
},
{
"loss": 3.59037109375,
"learning_rate": 4.270640863561218e-05,
"epoch": 0.4376154818632695,
"total_flos": 1159745902447392,
"step": 4500
},
{
"loss": 3.5753984375,
"learning_rate": 4.189600959512464e-05,
"epoch": 0.48623942429252165,
"total_flos": 1288030075010208,
"step": 5000
},
{
"loss": 3.5130390625,
"learning_rate": 4.1085610554637105e-05,
"epoch": 0.5348633667217738,
"total_flos": 1414432353876192,
"step": 5500
},
{
"loss": 3.469171875,
"learning_rate": 4.027521151414957e-05,
"epoch": 0.583487309151026,
"total_flos": 1539068386144032,
"step": 6000
},
{
"loss": 3.4336640625,
"learning_rate": 3.946481247366203e-05,
"epoch": 0.6321112515802781,
"total_flos": 1667946564259200,
"step": 6500
},
{
"loss": 3.39712109375,
"learning_rate": 3.8654413433174495e-05,
"epoch": 0.6807351940095303,
"total_flos": 1794422436733440,
"step": 7000
},
{
"loss": 3.3770703125,
"learning_rate": 3.784401439268696e-05,
"epoch": 0.7293591364387825,
"total_flos": 1922738149414080,
"step": 7500
},
{
"loss": 3.3730703125,
"learning_rate": 3.703361535219942e-05,
"epoch": 0.7779830788680346,
"total_flos": 2052888445614816,
"step": 8000
},
{
"loss": 3.310546875,
"learning_rate": 3.622321631171189e-05,
"epoch": 0.8266070212972868,
"total_flos": 2186350454187072,
"step": 8500
},
{
"loss": 3.260265625,
"learning_rate": 3.5412817271224355e-05,
"epoch": 0.875230963726539,
"total_flos": 2314024851138624,
"step": 9000
},
{
"loss": 3.2306796875,
"learning_rate": 3.460241823073682e-05,
"epoch": 0.9238549061557911,
"total_flos": 2442719045233152,
"step": 9500
},
{
"loss": 3.2746796875,
"learning_rate": 3.379201919024928e-05,
"epoch": 0.9724788485850433,
"total_flos": 2569378901728032,
"step": 10000
},
{
"loss": 3.1525703125,
"learning_rate": 3.2981620149761745e-05,
"epoch": 1.0211027910142954,
"total_flos": 2698954904950056,
"step": 10500
},
{
"loss": 3.2410859375,
"learning_rate": 3.217122110927421e-05,
"epoch": 1.0697267334435476,
"total_flos": 2828821340090376,
"step": 11000
},
{
"loss": 3.0903984375,
"learning_rate": 3.136082206878667e-05,
"epoch": 1.1183506758727997,
"total_flos": 2958682518544392,
"step": 11500
},
{
"loss": 3.08865625,
"learning_rate": 3.0550423028299135e-05,
"epoch": 1.166974618302052,
"total_flos": 3088039055113224,
"step": 12000
},
{
"loss": 3.1113671875,
"learning_rate": 2.9740023987811598e-05,
"epoch": 1.215598560731304,
"total_flos": 3214535954332680,
"step": 12500
},
{
"loss": 3.088109375,
"learning_rate": 2.8929624947324065e-05,
"epoch": 1.2642225031605563,
"total_flos": 3340123446821544,
"step": 13000
},
{
"loss": 3.03825,
"learning_rate": 2.8119225906836528e-05,
"epoch": 1.3128464455898083,
"total_flos": 3469900518294696,
"step": 13500
},
{
"loss": 3.1165703125,
"learning_rate": 2.730882686634899e-05,
"epoch": 1.3614703880190606,
"total_flos": 3597664278913416,
"step": 14000
},
{
"loss": 3.0786875,
"learning_rate": 2.6498427825861454e-05,
"epoch": 1.4100943304483127,
"total_flos": 3726390013125768,
"step": 14500
},
{
"loss": 3.0769453125,
"learning_rate": 2.5688028785373918e-05,
"epoch": 1.458718272877565,
"total_flos": 3854211597293832,
"step": 15000
},
{
"loss": 2.9751015625,
"learning_rate": 2.487762974488638e-05,
"epoch": 1.5073422153068172,
"total_flos": 3979215597602952,
"step": 15500
},
{
"loss": 3.004953125,
"learning_rate": 2.4067230704398848e-05,
"epoch": 1.5559661577360693,
"total_flos": 4105360298840040,
"step": 16000
},
{
"loss": 3.0202265625,
"learning_rate": 2.325683166391131e-05,
"epoch": 1.6045901001653213,
"total_flos": 4232114775688392,
"step": 16500
},
{
"loss": 2.967203125,
"learning_rate": 2.2446432623423774e-05,
"epoch": 1.6532140425945736,
"total_flos": 4357849455393768,
"step": 17000
},
{
"loss": 2.95409375,
"learning_rate": 2.1636033582936237e-05,
"epoch": 1.7018379850238259,
"total_flos": 4487106114922824,
"step": 17500
},
{
"loss": 3.068265625,
"learning_rate": 2.08256345424487e-05,
"epoch": 1.750461927453078,
"total_flos": 4614885645600456,
"step": 18000
},
{
"loss": 2.992953125,
"learning_rate": 2.0015235501961167e-05,
"epoch": 1.79908586988233,
"total_flos": 4744000374599304,
"step": 18500
},
{
"loss": 2.9246953125,
"learning_rate": 1.920483646147363e-05,
"epoch": 1.8477098123115823,
"total_flos": 4872962659695336,
"step": 19000
},
{
"loss": 2.980765625,
"learning_rate": 1.8394437420986094e-05,
"epoch": 1.8963337547408345,
"total_flos": 5000447815939944,
"step": 19500
},
{
"loss": 2.95859375,
"learning_rate": 1.7584038380498557e-05,
"epoch": 1.9449576971700866,
"total_flos": 5130419384806344,
"step": 20000
},
{
"loss": 2.951875,
"learning_rate": 1.677363934001102e-05,
"epoch": 1.9935816395993387,
"total_flos": 5260301590005576,
"step": 20500
},
{
"loss": 2.90753125,
"learning_rate": 1.5963240299523484e-05,
"epoch": 2.0422055820285907,
"total_flos": 5386174914812220,
"step": 21000
},
{
"loss": 2.8725,
"learning_rate": 1.5152841259035949e-05,
"epoch": 2.090829524457843,
"total_flos": 5515216050202812,
"step": 21500
},
{
"loss": 2.813546875,
"learning_rate": 1.4342442218548414e-05,
"epoch": 2.1394534668870953,
"total_flos": 5644814394341628,
"step": 22000
},
{
"loss": 2.867796875,
"learning_rate": 1.3532043178060877e-05,
"epoch": 2.1880774093163473,
"total_flos": 5770811908362204,
"step": 22500
},
{
"loss": 2.797328125,
"learning_rate": 1.272164413757334e-05,
"epoch": 2.2367013517455994,
"total_flos": 5901487873193340,
"step": 23000
},
{
"loss": 2.8206875,
"learning_rate": 1.1911245097085805e-05,
"epoch": 2.285325294174852,
"total_flos": 6032090244416220,
"step": 23500
},
{
"loss": 2.835703125,
"learning_rate": 1.110084605659827e-05,
"epoch": 2.333949236604104,
"total_flos": 6158765870970012,
"step": 24000
},
{
"loss": 2.888296875,
"learning_rate": 1.0290447016110734e-05,
"epoch": 2.382573179033356,
"total_flos": 6289205284917468,
"step": 24500
},
{
"loss": 2.82090625,
"learning_rate": 9.480047975623198e-06,
"epoch": 2.431197121462608,
"total_flos": 6419592132001884,
"step": 25000
},
{
"loss": 2.832359375,
"learning_rate": 8.669648935135662e-06,
"epoch": 2.4798210638918605,
"total_flos": 6547245502208220,
"step": 25500
},
{
"loss": 2.81575,
"learning_rate": 7.859249894648125e-06,
"epoch": 2.5284450063211126,
"total_flos": 6676738712620956,
"step": 26000
},
{
"loss": 2.750359375,
"learning_rate": 7.048850854160589e-06,
"epoch": 2.5770689487503646,
"total_flos": 6806862725390172,
"step": 26500
},
{
"loss": 2.80775,
"learning_rate": 6.2384518136730525e-06,
"epoch": 2.6256928911796167,
"total_flos": 6936345422430300,
"step": 27000
},
{
"loss": 2.7769375,
"learning_rate": 5.4280527731855174e-06,
"epoch": 2.674316833608869,
"total_flos": 7062795011473020,
"step": 27500
},
{
"loss": 2.812875,
"learning_rate": 4.617653732697981e-06,
"epoch": 2.7229407760381212,
"total_flos": 7193202885302652,
"step": 28000
},
{
"loss": 2.82128125,
"learning_rate": 3.807254692210445e-06,
"epoch": 2.7715647184673733,
"total_flos": 7325014294375452,
"step": 28500
},
{
"loss": 2.7645625,
"learning_rate": 2.9968556517229085e-06,
"epoch": 2.8201886608966253,
"total_flos": 7450607043550620,
"step": 29000
},
{
"loss": 2.867234375,
"learning_rate": 2.1864566112353722e-06,
"epoch": 2.8688126033258774,
"total_flos": 7577203819809852,
"step": 29500
},
{
"loss": 2.772796875,
"learning_rate": 1.3760575707478364e-06,
"epoch": 2.91743654575513,
"total_flos": 7705477479000060,
"step": 30000
},
{
"loss": 2.80721875,
"learning_rate": 5.656585302603003e-07,
"epoch": 2.966060488184382,
"total_flos": 7829551045833372,
"step": 30500
}
]