gpt-peter-2.7B / trainer_state.json
pszemraj's picture
update model with additional 1.8ish epochs training
12115ff
raw
history blame
37.8 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.4515819023292278,
"global_step": 1793,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"learning_rate": 8.228161798644422e-06,
"loss": 3.1603,
"step": 5
},
{
"epoch": 0.01,
"learning_rate": 1.1771838201355582e-05,
"loss": 2.7601,
"step": 10
},
{
"epoch": 0.01,
"learning_rate": 1.384475601163205e-05,
"loss": 2.6729,
"step": 15
},
{
"epoch": 0.02,
"learning_rate": 1.5315514604066738e-05,
"loss": 2.3958,
"step": 20
},
{
"epoch": 0.02,
"learning_rate": 1.6456323597288844e-05,
"loss": 2.3595,
"step": 25
},
{
"epoch": 0.02,
"learning_rate": 1.738843241434321e-05,
"loss": 2.4,
"step": 30
},
{
"epoch": 0.03,
"learning_rate": 1.817651918997498e-05,
"loss": 2.2831,
"step": 35
},
{
"epoch": 0.03,
"learning_rate": 1.8859191006777896e-05,
"loss": 2.2999,
"step": 40
},
{
"epoch": 0.04,
"learning_rate": 1.946135022461968e-05,
"loss": 2.1469,
"step": 45
},
{
"epoch": 0.04,
"learning_rate": 2e-05,
"loss": 2.2508,
"step": 50
},
{
"epoch": 0.04,
"learning_rate": 2e-05,
"loss": 2.285,
"step": 55
},
{
"epoch": 0.05,
"learning_rate": 2e-05,
"loss": 2.2009,
"step": 60
},
{
"epoch": 0.05,
"learning_rate": 2e-05,
"loss": 2.1514,
"step": 65
},
{
"epoch": 0.06,
"learning_rate": 2e-05,
"loss": 2.137,
"step": 70
},
{
"epoch": 0.06,
"learning_rate": 2e-05,
"loss": 2.2193,
"step": 75
},
{
"epoch": 0.06,
"learning_rate": 2e-05,
"loss": 2.2193,
"step": 80
},
{
"epoch": 0.07,
"learning_rate": 2e-05,
"loss": 2.1647,
"step": 85
},
{
"epoch": 0.07,
"learning_rate": 2e-05,
"loss": 2.2805,
"step": 90
},
{
"epoch": 0.08,
"learning_rate": 2e-05,
"loss": 2.1456,
"step": 95
},
{
"epoch": 0.08,
"learning_rate": 2e-05,
"loss": 2.1043,
"step": 100
},
{
"epoch": 0.08,
"learning_rate": 2e-05,
"loss": 2.1215,
"step": 105
},
{
"epoch": 0.09,
"learning_rate": 2e-05,
"loss": 2.1528,
"step": 110
},
{
"epoch": 0.09,
"learning_rate": 2e-05,
"loss": 2.134,
"step": 115
},
{
"epoch": 0.1,
"learning_rate": 2e-05,
"loss": 2.0645,
"step": 120
},
{
"epoch": 0.1,
"learning_rate": 2e-05,
"loss": 2.1347,
"step": 125
},
{
"epoch": 0.11,
"learning_rate": 2e-05,
"loss": 2.1372,
"step": 130
},
{
"epoch": 0.11,
"learning_rate": 2e-05,
"loss": 2.0921,
"step": 135
},
{
"epoch": 0.11,
"learning_rate": 2e-05,
"loss": 2.1157,
"step": 140
},
{
"epoch": 0.12,
"learning_rate": 2e-05,
"loss": 2.0916,
"step": 145
},
{
"epoch": 0.12,
"learning_rate": 2e-05,
"loss": 2.0519,
"step": 150
},
{
"epoch": 0.13,
"learning_rate": 2e-05,
"loss": 2.0987,
"step": 155
},
{
"epoch": 0.13,
"learning_rate": 2e-05,
"loss": 1.9597,
"step": 160
},
{
"epoch": 0.13,
"learning_rate": 2e-05,
"loss": 2.1301,
"step": 165
},
{
"epoch": 0.14,
"learning_rate": 2e-05,
"loss": 2.1803,
"step": 170
},
{
"epoch": 0.14,
"learning_rate": 2e-05,
"loss": 2.1488,
"step": 175
},
{
"epoch": 0.15,
"learning_rate": 2e-05,
"loss": 2.0421,
"step": 180
},
{
"epoch": 0.15,
"learning_rate": 2e-05,
"loss": 2.2094,
"step": 185
},
{
"epoch": 0.15,
"learning_rate": 2e-05,
"loss": 2.1173,
"step": 190
},
{
"epoch": 0.16,
"learning_rate": 2e-05,
"loss": 2.0779,
"step": 195
},
{
"epoch": 0.16,
"learning_rate": 2e-05,
"loss": 2.1213,
"step": 200
},
{
"epoch": 0.17,
"learning_rate": 2e-05,
"loss": 2.1425,
"step": 205
},
{
"epoch": 0.17,
"learning_rate": 2e-05,
"loss": 2.0807,
"step": 210
},
{
"epoch": 0.17,
"learning_rate": 2e-05,
"loss": 2.1772,
"step": 215
},
{
"epoch": 0.18,
"learning_rate": 2e-05,
"loss": 2.0721,
"step": 220
},
{
"epoch": 0.18,
"learning_rate": 2e-05,
"loss": 2.1006,
"step": 225
},
{
"epoch": 0.19,
"learning_rate": 2e-05,
"loss": 2.1579,
"step": 230
},
{
"epoch": 0.19,
"learning_rate": 2e-05,
"loss": 1.9796,
"step": 235
},
{
"epoch": 0.19,
"learning_rate": 2e-05,
"loss": 2.0889,
"step": 240
},
{
"epoch": 0.2,
"learning_rate": 2e-05,
"loss": 1.977,
"step": 245
},
{
"epoch": 0.2,
"learning_rate": 2e-05,
"loss": 2.1304,
"step": 250
},
{
"epoch": 0.21,
"learning_rate": 2e-05,
"loss": 2.1525,
"step": 255
},
{
"epoch": 0.21,
"learning_rate": 2e-05,
"loss": 2.0773,
"step": 260
},
{
"epoch": 0.21,
"learning_rate": 2e-05,
"loss": 2.145,
"step": 265
},
{
"epoch": 0.22,
"learning_rate": 2e-05,
"loss": 2.0542,
"step": 270
},
{
"epoch": 0.22,
"learning_rate": 2e-05,
"loss": 2.0588,
"step": 275
},
{
"epoch": 0.23,
"learning_rate": 2e-05,
"loss": 2.2034,
"step": 280
},
{
"epoch": 0.23,
"learning_rate": 2e-05,
"loss": 2.0698,
"step": 285
},
{
"epoch": 0.23,
"learning_rate": 2e-05,
"loss": 2.0572,
"step": 290
},
{
"epoch": 0.24,
"learning_rate": 2e-05,
"loss": 2.1129,
"step": 295
},
{
"epoch": 0.24,
"learning_rate": 2e-05,
"loss": 2.1429,
"step": 300
},
{
"epoch": 0.25,
"learning_rate": 2e-05,
"loss": 2.0936,
"step": 305
},
{
"epoch": 0.25,
"learning_rate": 2e-05,
"loss": 1.9964,
"step": 310
},
{
"epoch": 0.25,
"learning_rate": 2e-05,
"loss": 2.1298,
"step": 315
},
{
"epoch": 0.26,
"learning_rate": 2e-05,
"loss": 2.0689,
"step": 320
},
{
"epoch": 0.26,
"learning_rate": 2e-05,
"loss": 2.0804,
"step": 325
},
{
"epoch": 0.27,
"learning_rate": 2e-05,
"loss": 2.0465,
"step": 330
},
{
"epoch": 0.27,
"learning_rate": 2e-05,
"loss": 2.0871,
"step": 335
},
{
"epoch": 0.28,
"learning_rate": 2e-05,
"loss": 2.099,
"step": 340
},
{
"epoch": 0.28,
"learning_rate": 2e-05,
"loss": 2.0433,
"step": 345
},
{
"epoch": 0.28,
"learning_rate": 2e-05,
"loss": 2.0825,
"step": 350
},
{
"epoch": 0.29,
"learning_rate": 2e-05,
"loss": 2.0902,
"step": 355
},
{
"epoch": 0.29,
"learning_rate": 2e-05,
"loss": 2.063,
"step": 360
},
{
"epoch": 0.3,
"learning_rate": 2e-05,
"loss": 2.1341,
"step": 365
},
{
"epoch": 0.3,
"learning_rate": 2e-05,
"loss": 2.0562,
"step": 370
},
{
"epoch": 0.3,
"learning_rate": 2e-05,
"loss": 2.0492,
"step": 375
},
{
"epoch": 0.31,
"learning_rate": 2e-05,
"loss": 1.9866,
"step": 380
},
{
"epoch": 0.31,
"learning_rate": 2e-05,
"loss": 2.1307,
"step": 385
},
{
"epoch": 0.32,
"learning_rate": 2e-05,
"loss": 2.1265,
"step": 390
},
{
"epoch": 0.32,
"learning_rate": 2e-05,
"loss": 2.0729,
"step": 395
},
{
"epoch": 0.32,
"learning_rate": 2e-05,
"loss": 2.0072,
"step": 400
},
{
"epoch": 0.33,
"learning_rate": 2e-05,
"loss": 2.1163,
"step": 405
},
{
"epoch": 0.33,
"learning_rate": 2e-05,
"loss": 2.1249,
"step": 410
},
{
"epoch": 0.34,
"learning_rate": 2e-05,
"loss": 2.0166,
"step": 415
},
{
"epoch": 0.34,
"learning_rate": 2e-05,
"loss": 2.0386,
"step": 420
},
{
"epoch": 0.34,
"learning_rate": 2e-05,
"loss": 2.0629,
"step": 425
},
{
"epoch": 0.35,
"learning_rate": 2e-05,
"loss": 2.0647,
"step": 430
},
{
"epoch": 0.35,
"learning_rate": 2e-05,
"loss": 2.1011,
"step": 435
},
{
"epoch": 0.36,
"learning_rate": 2e-05,
"loss": 1.9827,
"step": 440
},
{
"epoch": 0.36,
"learning_rate": 2e-05,
"loss": 2.1154,
"step": 445
},
{
"epoch": 0.36,
"learning_rate": 2e-05,
"loss": 2.0073,
"step": 450
},
{
"epoch": 0.37,
"learning_rate": 2e-05,
"loss": 2.11,
"step": 455
},
{
"epoch": 0.37,
"learning_rate": 2e-05,
"loss": 2.0181,
"step": 460
},
{
"epoch": 0.38,
"learning_rate": 2e-05,
"loss": 2.0479,
"step": 465
},
{
"epoch": 0.38,
"learning_rate": 2e-05,
"loss": 2.0791,
"step": 470
},
{
"epoch": 0.38,
"learning_rate": 2e-05,
"loss": 2.1109,
"step": 475
},
{
"epoch": 0.39,
"learning_rate": 2e-05,
"loss": 2.0062,
"step": 480
},
{
"epoch": 0.39,
"learning_rate": 2e-05,
"loss": 2.0485,
"step": 485
},
{
"epoch": 0.4,
"learning_rate": 2e-05,
"loss": 1.9999,
"step": 490
},
{
"epoch": 0.4,
"learning_rate": 2e-05,
"loss": 1.9982,
"step": 495
},
{
"epoch": 0.4,
"learning_rate": 2e-05,
"loss": 2.0399,
"step": 500
},
{
"epoch": 0.41,
"learning_rate": 2e-05,
"loss": 2.0282,
"step": 505
},
{
"epoch": 0.41,
"learning_rate": 2e-05,
"loss": 2.064,
"step": 510
},
{
"epoch": 0.42,
"learning_rate": 2e-05,
"loss": 2.0239,
"step": 515
},
{
"epoch": 0.42,
"learning_rate": 2e-05,
"loss": 1.9875,
"step": 520
},
{
"epoch": 0.42,
"learning_rate": 2e-05,
"loss": 2.1219,
"step": 525
},
{
"epoch": 0.43,
"learning_rate": 2e-05,
"loss": 1.9841,
"step": 530
},
{
"epoch": 0.43,
"learning_rate": 2e-05,
"loss": 2.0484,
"step": 535
},
{
"epoch": 0.44,
"learning_rate": 2e-05,
"loss": 2.0757,
"step": 540
},
{
"epoch": 0.44,
"learning_rate": 2e-05,
"loss": 2.0275,
"step": 545
},
{
"epoch": 0.45,
"learning_rate": 2e-05,
"loss": 2.1235,
"step": 550
},
{
"epoch": 0.45,
"learning_rate": 2e-05,
"loss": 1.994,
"step": 555
},
{
"epoch": 0.45,
"learning_rate": 2e-05,
"loss": 2.0548,
"step": 560
},
{
"epoch": 0.46,
"learning_rate": 2e-05,
"loss": 2.0246,
"step": 565
},
{
"epoch": 0.46,
"learning_rate": 2e-05,
"loss": 2.0438,
"step": 570
},
{
"epoch": 0.47,
"learning_rate": 2e-05,
"loss": 1.9877,
"step": 575
},
{
"epoch": 0.47,
"learning_rate": 2e-05,
"loss": 2.0073,
"step": 580
},
{
"epoch": 0.47,
"learning_rate": 2e-05,
"loss": 2.0335,
"step": 585
},
{
"epoch": 0.48,
"learning_rate": 2e-05,
"loss": 1.9349,
"step": 590
},
{
"epoch": 0.48,
"learning_rate": 2e-05,
"loss": 2.0598,
"step": 595
},
{
"epoch": 0.49,
"learning_rate": 2e-05,
"loss": 2.1281,
"step": 600
},
{
"epoch": 0.49,
"learning_rate": 2e-05,
"loss": 2.1086,
"step": 605
},
{
"epoch": 0.49,
"learning_rate": 2e-05,
"loss": 2.0188,
"step": 610
},
{
"epoch": 0.5,
"learning_rate": 2e-05,
"loss": 2.0448,
"step": 615
},
{
"epoch": 0.5,
"learning_rate": 2e-05,
"loss": 2.1233,
"step": 620
},
{
"epoch": 0.51,
"learning_rate": 2e-05,
"loss": 1.9751,
"step": 625
},
{
"epoch": 0.51,
"learning_rate": 2e-05,
"loss": 2.0676,
"step": 630
},
{
"epoch": 0.51,
"learning_rate": 2e-05,
"loss": 2.0068,
"step": 635
},
{
"epoch": 0.52,
"learning_rate": 2e-05,
"loss": 2.0636,
"step": 640
},
{
"epoch": 0.52,
"learning_rate": 2e-05,
"loss": 2.012,
"step": 645
},
{
"epoch": 0.53,
"learning_rate": 2e-05,
"loss": 2.0323,
"step": 650
},
{
"epoch": 0.53,
"learning_rate": 2e-05,
"loss": 2.0697,
"step": 655
},
{
"epoch": 0.53,
"learning_rate": 2e-05,
"loss": 2.0221,
"step": 660
},
{
"epoch": 0.54,
"learning_rate": 2e-05,
"loss": 2.0391,
"step": 665
},
{
"epoch": 0.54,
"learning_rate": 2e-05,
"loss": 2.0517,
"step": 670
},
{
"epoch": 0.55,
"learning_rate": 2e-05,
"loss": 2.1092,
"step": 675
},
{
"epoch": 0.55,
"learning_rate": 2e-05,
"loss": 2.0735,
"step": 680
},
{
"epoch": 0.55,
"learning_rate": 2e-05,
"loss": 2.1768,
"step": 685
},
{
"epoch": 0.56,
"learning_rate": 2e-05,
"loss": 1.9814,
"step": 690
},
{
"epoch": 0.56,
"learning_rate": 2e-05,
"loss": 2.0082,
"step": 695
},
{
"epoch": 0.57,
"learning_rate": 2e-05,
"loss": 1.9932,
"step": 700
},
{
"epoch": 0.57,
"learning_rate": 2e-05,
"loss": 2.0701,
"step": 705
},
{
"epoch": 0.57,
"learning_rate": 2e-05,
"loss": 1.9849,
"step": 710
},
{
"epoch": 0.58,
"learning_rate": 2e-05,
"loss": 2.0469,
"step": 715
},
{
"epoch": 0.58,
"learning_rate": 2e-05,
"loss": 2.0328,
"step": 720
},
{
"epoch": 0.59,
"learning_rate": 2e-05,
"loss": 2.1638,
"step": 725
},
{
"epoch": 0.59,
"learning_rate": 2e-05,
"loss": 1.9913,
"step": 730
},
{
"epoch": 0.59,
"learning_rate": 2e-05,
"loss": 2.0312,
"step": 735
},
{
"epoch": 0.6,
"learning_rate": 2e-05,
"loss": 2.0468,
"step": 740
},
{
"epoch": 0.6,
"learning_rate": 2e-05,
"loss": 1.9856,
"step": 745
},
{
"epoch": 0.61,
"learning_rate": 2e-05,
"loss": 2.0258,
"step": 750
},
{
"epoch": 0.61,
"learning_rate": 2e-05,
"loss": 2.0983,
"step": 755
},
{
"epoch": 0.62,
"learning_rate": 2e-05,
"loss": 1.9871,
"step": 760
},
{
"epoch": 0.62,
"learning_rate": 2e-05,
"loss": 2.01,
"step": 765
},
{
"epoch": 0.62,
"learning_rate": 2e-05,
"loss": 2.0858,
"step": 770
},
{
"epoch": 0.63,
"learning_rate": 2e-05,
"loss": 2.056,
"step": 775
},
{
"epoch": 0.63,
"learning_rate": 2e-05,
"loss": 2.0671,
"step": 780
},
{
"epoch": 0.64,
"learning_rate": 2e-05,
"loss": 2.0151,
"step": 785
},
{
"epoch": 0.64,
"learning_rate": 2e-05,
"loss": 2.014,
"step": 790
},
{
"epoch": 0.64,
"learning_rate": 2e-05,
"loss": 1.9757,
"step": 795
},
{
"epoch": 0.65,
"learning_rate": 2e-05,
"loss": 2.0435,
"step": 800
},
{
"epoch": 0.65,
"learning_rate": 2e-05,
"loss": 1.9699,
"step": 805
},
{
"epoch": 0.66,
"learning_rate": 2e-05,
"loss": 2.1073,
"step": 810
},
{
"epoch": 0.66,
"learning_rate": 2e-05,
"loss": 2.0371,
"step": 815
},
{
"epoch": 0.66,
"learning_rate": 2e-05,
"loss": 1.9612,
"step": 820
},
{
"epoch": 0.67,
"learning_rate": 2e-05,
"loss": 2.0357,
"step": 825
},
{
"epoch": 0.67,
"learning_rate": 2e-05,
"loss": 1.9747,
"step": 830
},
{
"epoch": 0.68,
"learning_rate": 2e-05,
"loss": 2.0736,
"step": 835
},
{
"epoch": 0.68,
"learning_rate": 2e-05,
"loss": 2.0831,
"step": 840
},
{
"epoch": 0.68,
"learning_rate": 2e-05,
"loss": 2.1133,
"step": 845
},
{
"epoch": 0.69,
"learning_rate": 2e-05,
"loss": 1.9934,
"step": 850
},
{
"epoch": 0.69,
"learning_rate": 2e-05,
"loss": 1.8631,
"step": 855
},
{
"epoch": 0.7,
"learning_rate": 2e-05,
"loss": 2.0426,
"step": 860
},
{
"epoch": 0.7,
"learning_rate": 2e-05,
"loss": 2.0171,
"step": 865
},
{
"epoch": 0.7,
"learning_rate": 2e-05,
"loss": 1.9415,
"step": 870
},
{
"epoch": 0.71,
"learning_rate": 2e-05,
"loss": 1.914,
"step": 875
},
{
"epoch": 0.71,
"learning_rate": 2e-05,
"loss": 2.0661,
"step": 880
},
{
"epoch": 0.72,
"learning_rate": 2e-05,
"loss": 2.0792,
"step": 885
},
{
"epoch": 0.72,
"learning_rate": 2e-05,
"loss": 1.9541,
"step": 890
},
{
"epoch": 0.72,
"learning_rate": 2e-05,
"loss": 2.0045,
"step": 895
},
{
"epoch": 0.73,
"learning_rate": 2e-05,
"loss": 2.0326,
"step": 900
},
{
"epoch": 0.73,
"learning_rate": 2e-05,
"loss": 2.0406,
"step": 905
},
{
"epoch": 0.74,
"learning_rate": 2e-05,
"loss": 2.0131,
"step": 910
},
{
"epoch": 0.74,
"learning_rate": 2e-05,
"loss": 2.0479,
"step": 915
},
{
"epoch": 0.74,
"learning_rate": 2e-05,
"loss": 1.9801,
"step": 920
},
{
"epoch": 0.75,
"learning_rate": 2e-05,
"loss": 2.0129,
"step": 925
},
{
"epoch": 0.75,
"learning_rate": 2e-05,
"loss": 1.9463,
"step": 930
},
{
"epoch": 0.76,
"learning_rate": 2e-05,
"loss": 1.9324,
"step": 935
},
{
"epoch": 0.76,
"learning_rate": 2e-05,
"loss": 1.9729,
"step": 940
},
{
"epoch": 0.76,
"learning_rate": 2e-05,
"loss": 2.1066,
"step": 945
},
{
"epoch": 0.77,
"learning_rate": 2e-05,
"loss": 1.9615,
"step": 950
},
{
"epoch": 0.77,
"learning_rate": 2e-05,
"loss": 2.054,
"step": 955
},
{
"epoch": 0.78,
"learning_rate": 2e-05,
"loss": 1.9475,
"step": 960
},
{
"epoch": 0.78,
"learning_rate": 2e-05,
"loss": 1.9504,
"step": 965
},
{
"epoch": 0.79,
"learning_rate": 2e-05,
"loss": 1.9985,
"step": 970
},
{
"epoch": 0.79,
"learning_rate": 2e-05,
"loss": 1.9808,
"step": 975
},
{
"epoch": 0.79,
"learning_rate": 2e-05,
"loss": 1.9699,
"step": 980
},
{
"epoch": 0.8,
"learning_rate": 2e-05,
"loss": 1.9928,
"step": 985
},
{
"epoch": 0.8,
"learning_rate": 2e-05,
"loss": 1.9927,
"step": 990
},
{
"epoch": 0.81,
"learning_rate": 2e-05,
"loss": 2.0522,
"step": 995
},
{
"epoch": 0.81,
"learning_rate": 2e-05,
"loss": 2.0342,
"step": 1000
},
{
"epoch": 0.81,
"learning_rate": 2e-05,
"loss": 2.0291,
"step": 1005
},
{
"epoch": 0.82,
"learning_rate": 2e-05,
"loss": 2.0542,
"step": 1010
},
{
"epoch": 0.82,
"learning_rate": 2e-05,
"loss": 1.9575,
"step": 1015
},
{
"epoch": 0.83,
"learning_rate": 2e-05,
"loss": 2.0268,
"step": 1020
},
{
"epoch": 0.83,
"learning_rate": 2e-05,
"loss": 1.9652,
"step": 1025
},
{
"epoch": 0.83,
"learning_rate": 2e-05,
"loss": 2.0988,
"step": 1030
},
{
"epoch": 0.84,
"learning_rate": 2e-05,
"loss": 1.9705,
"step": 1035
},
{
"epoch": 0.84,
"learning_rate": 2e-05,
"loss": 1.9683,
"step": 1040
},
{
"epoch": 0.85,
"learning_rate": 2e-05,
"loss": 2.0333,
"step": 1045
},
{
"epoch": 0.85,
"learning_rate": 2e-05,
"loss": 2.0807,
"step": 1050
},
{
"epoch": 0.85,
"learning_rate": 2e-05,
"loss": 2.0138,
"step": 1055
},
{
"epoch": 0.86,
"learning_rate": 2e-05,
"loss": 1.9915,
"step": 1060
},
{
"epoch": 0.86,
"learning_rate": 2e-05,
"loss": 2.0396,
"step": 1065
},
{
"epoch": 0.87,
"learning_rate": 2e-05,
"loss": 2.0208,
"step": 1070
},
{
"epoch": 0.87,
"learning_rate": 2e-05,
"loss": 2.0008,
"step": 1075
},
{
"epoch": 0.87,
"learning_rate": 2e-05,
"loss": 1.9907,
"step": 1080
},
{
"epoch": 0.88,
"learning_rate": 2e-05,
"loss": 1.9137,
"step": 1085
},
{
"epoch": 0.88,
"learning_rate": 2e-05,
"loss": 2.0786,
"step": 1090
},
{
"epoch": 0.89,
"learning_rate": 2e-05,
"loss": 1.9445,
"step": 1095
},
{
"epoch": 0.89,
"learning_rate": 2e-05,
"loss": 2.0405,
"step": 1100
},
{
"epoch": 0.89,
"learning_rate": 2e-05,
"loss": 1.9961,
"step": 1105
},
{
"epoch": 0.9,
"learning_rate": 2e-05,
"loss": 1.9778,
"step": 1110
},
{
"epoch": 0.9,
"learning_rate": 2e-05,
"loss": 1.9785,
"step": 1115
},
{
"epoch": 0.91,
"learning_rate": 2e-05,
"loss": 2.0893,
"step": 1120
},
{
"epoch": 0.91,
"learning_rate": 2e-05,
"loss": 2.0206,
"step": 1125
},
{
"epoch": 0.91,
"learning_rate": 2e-05,
"loss": 2.0435,
"step": 1130
},
{
"epoch": 0.92,
"learning_rate": 2e-05,
"loss": 1.9512,
"step": 1135
},
{
"epoch": 0.92,
"learning_rate": 2e-05,
"loss": 2.0032,
"step": 1140
},
{
"epoch": 0.93,
"learning_rate": 2e-05,
"loss": 1.9493,
"step": 1145
},
{
"epoch": 0.93,
"learning_rate": 2e-05,
"loss": 1.9244,
"step": 1150
},
{
"epoch": 0.93,
"learning_rate": 2e-05,
"loss": 2.0021,
"step": 1155
},
{
"epoch": 0.94,
"learning_rate": 2e-05,
"loss": 1.9534,
"step": 1160
},
{
"epoch": 0.94,
"learning_rate": 2e-05,
"loss": 1.925,
"step": 1165
},
{
"epoch": 0.95,
"learning_rate": 2e-05,
"loss": 2.1044,
"step": 1170
},
{
"epoch": 0.95,
"learning_rate": 2e-05,
"loss": 2.0376,
"step": 1175
},
{
"epoch": 0.95,
"learning_rate": 2e-05,
"loss": 2.0542,
"step": 1180
},
{
"epoch": 0.96,
"learning_rate": 2e-05,
"loss": 1.9229,
"step": 1185
},
{
"epoch": 0.96,
"learning_rate": 2e-05,
"loss": 1.952,
"step": 1190
},
{
"epoch": 0.97,
"learning_rate": 2e-05,
"loss": 1.9914,
"step": 1195
},
{
"epoch": 0.97,
"learning_rate": 2e-05,
"loss": 1.9991,
"step": 1200
},
{
"epoch": 0.98,
"learning_rate": 2e-05,
"loss": 1.9702,
"step": 1205
},
{
"epoch": 0.98,
"learning_rate": 2e-05,
"loss": 1.8712,
"step": 1210
},
{
"epoch": 0.98,
"learning_rate": 2e-05,
"loss": 2.0369,
"step": 1215
},
{
"epoch": 0.99,
"learning_rate": 2e-05,
"loss": 2.1375,
"step": 1220
},
{
"epoch": 0.99,
"learning_rate": 2e-05,
"loss": 2.0707,
"step": 1225
},
{
"epoch": 1.0,
"learning_rate": 2e-05,
"loss": 1.9964,
"step": 1230
},
{
"epoch": 1.0,
"learning_rate": 2e-05,
"loss": 1.9424,
"step": 1235
},
{
"epoch": 1.0,
"learning_rate": 2e-05,
"loss": 1.9935,
"step": 1240
},
{
"epoch": 1.01,
"learning_rate": 2e-05,
"loss": 1.6404,
"step": 1245
},
{
"epoch": 1.01,
"learning_rate": 2e-05,
"loss": 1.5555,
"step": 1250
},
{
"epoch": 1.02,
"learning_rate": 2e-05,
"loss": 1.5406,
"step": 1255
},
{
"epoch": 1.02,
"learning_rate": 2e-05,
"loss": 1.6253,
"step": 1260
},
{
"epoch": 1.02,
"learning_rate": 2e-05,
"loss": 1.5516,
"step": 1265
},
{
"epoch": 1.03,
"learning_rate": 2e-05,
"loss": 1.4896,
"step": 1270
},
{
"epoch": 1.03,
"learning_rate": 2e-05,
"loss": 1.5545,
"step": 1275
},
{
"epoch": 1.04,
"learning_rate": 2e-05,
"loss": 1.5125,
"step": 1280
},
{
"epoch": 1.04,
"learning_rate": 2e-05,
"loss": 1.5455,
"step": 1285
},
{
"epoch": 1.04,
"learning_rate": 2e-05,
"loss": 1.4915,
"step": 1290
},
{
"epoch": 1.05,
"learning_rate": 2e-05,
"loss": 1.453,
"step": 1295
},
{
"epoch": 1.05,
"learning_rate": 2e-05,
"loss": 1.4764,
"step": 1300
},
{
"epoch": 1.06,
"learning_rate": 2e-05,
"loss": 1.5592,
"step": 1305
},
{
"epoch": 1.06,
"learning_rate": 2e-05,
"loss": 1.5472,
"step": 1310
},
{
"epoch": 1.06,
"learning_rate": 2e-05,
"loss": 1.4989,
"step": 1315
},
{
"epoch": 1.07,
"learning_rate": 2e-05,
"loss": 1.4987,
"step": 1320
},
{
"epoch": 1.07,
"learning_rate": 2e-05,
"loss": 1.451,
"step": 1325
},
{
"epoch": 1.08,
"learning_rate": 2e-05,
"loss": 1.5672,
"step": 1330
},
{
"epoch": 1.08,
"learning_rate": 2e-05,
"loss": 1.5399,
"step": 1335
},
{
"epoch": 1.08,
"learning_rate": 2e-05,
"loss": 1.5285,
"step": 1340
},
{
"epoch": 1.09,
"learning_rate": 2e-05,
"loss": 1.594,
"step": 1345
},
{
"epoch": 1.09,
"learning_rate": 2e-05,
"loss": 1.53,
"step": 1350
},
{
"epoch": 1.1,
"learning_rate": 2e-05,
"loss": 1.5641,
"step": 1355
},
{
"epoch": 1.1,
"learning_rate": 2e-05,
"loss": 1.5668,
"step": 1360
},
{
"epoch": 1.11,
"learning_rate": 2e-05,
"loss": 1.5265,
"step": 1365
},
{
"epoch": 1.11,
"learning_rate": 2e-05,
"loss": 1.4965,
"step": 1370
},
{
"epoch": 1.11,
"learning_rate": 2e-05,
"loss": 1.4974,
"step": 1375
},
{
"epoch": 1.12,
"learning_rate": 2e-05,
"loss": 1.5232,
"step": 1380
},
{
"epoch": 1.12,
"learning_rate": 2e-05,
"loss": 1.5223,
"step": 1385
},
{
"epoch": 1.13,
"learning_rate": 2e-05,
"loss": 1.5672,
"step": 1390
},
{
"epoch": 1.13,
"learning_rate": 2e-05,
"loss": 1.5194,
"step": 1395
},
{
"epoch": 1.13,
"learning_rate": 2e-05,
"loss": 1.4906,
"step": 1400
},
{
"epoch": 1.14,
"learning_rate": 2e-05,
"loss": 1.5467,
"step": 1405
},
{
"epoch": 1.14,
"learning_rate": 2e-05,
"loss": 1.428,
"step": 1410
},
{
"epoch": 1.15,
"learning_rate": 2e-05,
"loss": 1.5783,
"step": 1415
},
{
"epoch": 1.15,
"learning_rate": 2e-05,
"loss": 1.4938,
"step": 1420
},
{
"epoch": 1.15,
"learning_rate": 2e-05,
"loss": 1.485,
"step": 1425
},
{
"epoch": 1.16,
"learning_rate": 2e-05,
"loss": 1.5796,
"step": 1430
},
{
"epoch": 1.16,
"learning_rate": 2e-05,
"loss": 1.491,
"step": 1435
},
{
"epoch": 1.17,
"learning_rate": 2e-05,
"loss": 1.4907,
"step": 1440
},
{
"epoch": 1.17,
"learning_rate": 2e-05,
"loss": 1.4874,
"step": 1445
},
{
"epoch": 1.17,
"learning_rate": 2e-05,
"loss": 1.5549,
"step": 1450
},
{
"epoch": 1.18,
"learning_rate": 2e-05,
"loss": 1.5494,
"step": 1455
},
{
"epoch": 1.18,
"learning_rate": 2e-05,
"loss": 1.5035,
"step": 1460
},
{
"epoch": 1.19,
"learning_rate": 2e-05,
"loss": 1.545,
"step": 1465
},
{
"epoch": 1.19,
"learning_rate": 2e-05,
"loss": 1.498,
"step": 1470
},
{
"epoch": 1.19,
"learning_rate": 2e-05,
"loss": 1.5352,
"step": 1475
},
{
"epoch": 1.2,
"learning_rate": 2e-05,
"loss": 1.4939,
"step": 1480
},
{
"epoch": 1.2,
"learning_rate": 2e-05,
"loss": 1.5376,
"step": 1485
},
{
"epoch": 1.21,
"learning_rate": 2e-05,
"loss": 1.4745,
"step": 1490
},
{
"epoch": 1.21,
"learning_rate": 2e-05,
"loss": 1.5165,
"step": 1495
},
{
"epoch": 1.21,
"learning_rate": 2e-05,
"loss": 1.4921,
"step": 1500
},
{
"epoch": 1.22,
"learning_rate": 2e-05,
"loss": 1.5543,
"step": 1505
},
{
"epoch": 1.22,
"learning_rate": 2e-05,
"loss": 1.5212,
"step": 1510
},
{
"epoch": 1.23,
"learning_rate": 2e-05,
"loss": 1.5417,
"step": 1515
},
{
"epoch": 1.23,
"learning_rate": 2e-05,
"loss": 1.5707,
"step": 1520
},
{
"epoch": 1.23,
"learning_rate": 2e-05,
"loss": 1.5293,
"step": 1525
},
{
"epoch": 1.24,
"learning_rate": 2e-05,
"loss": 1.5243,
"step": 1530
},
{
"epoch": 1.24,
"learning_rate": 2e-05,
"loss": 1.5852,
"step": 1535
},
{
"epoch": 1.25,
"learning_rate": 2e-05,
"loss": 1.5043,
"step": 1540
},
{
"epoch": 1.25,
"learning_rate": 2e-05,
"loss": 1.4132,
"step": 1545
},
{
"epoch": 1.25,
"learning_rate": 2e-05,
"loss": 1.5171,
"step": 1550
},
{
"epoch": 1.26,
"learning_rate": 2e-05,
"loss": 1.4841,
"step": 1555
},
{
"epoch": 1.26,
"learning_rate": 2e-05,
"loss": 1.5396,
"step": 1560
},
{
"epoch": 1.27,
"learning_rate": 2e-05,
"loss": 1.5959,
"step": 1565
},
{
"epoch": 1.27,
"learning_rate": 2e-05,
"loss": 1.5522,
"step": 1570
},
{
"epoch": 1.28,
"learning_rate": 2e-05,
"loss": 1.5074,
"step": 1575
},
{
"epoch": 1.28,
"learning_rate": 2e-05,
"loss": 1.5717,
"step": 1580
},
{
"epoch": 1.28,
"learning_rate": 2e-05,
"loss": 1.6052,
"step": 1585
},
{
"epoch": 1.29,
"learning_rate": 2e-05,
"loss": 1.5021,
"step": 1590
},
{
"epoch": 1.29,
"learning_rate": 2e-05,
"loss": 1.5688,
"step": 1595
},
{
"epoch": 1.3,
"learning_rate": 2e-05,
"loss": 1.5272,
"step": 1600
},
{
"epoch": 1.3,
"learning_rate": 2e-05,
"loss": 1.6127,
"step": 1605
},
{
"epoch": 1.3,
"learning_rate": 2e-05,
"loss": 1.5163,
"step": 1610
},
{
"epoch": 1.31,
"learning_rate": 2e-05,
"loss": 1.4874,
"step": 1615
},
{
"epoch": 1.31,
"learning_rate": 2e-05,
"loss": 1.4935,
"step": 1620
},
{
"epoch": 1.32,
"learning_rate": 2e-05,
"loss": 1.5667,
"step": 1625
},
{
"epoch": 1.32,
"learning_rate": 2e-05,
"loss": 1.5686,
"step": 1630
},
{
"epoch": 1.32,
"learning_rate": 2e-05,
"loss": 1.5291,
"step": 1635
},
{
"epoch": 1.33,
"learning_rate": 2e-05,
"loss": 1.5299,
"step": 1640
},
{
"epoch": 1.33,
"learning_rate": 2e-05,
"loss": 1.5583,
"step": 1645
},
{
"epoch": 1.34,
"learning_rate": 2e-05,
"loss": 1.5004,
"step": 1650
},
{
"epoch": 1.34,
"learning_rate": 2e-05,
"loss": 1.552,
"step": 1655
},
{
"epoch": 1.34,
"learning_rate": 2e-05,
"loss": 1.5334,
"step": 1660
},
{
"epoch": 1.35,
"learning_rate": 2e-05,
"loss": 1.5249,
"step": 1665
},
{
"epoch": 1.35,
"learning_rate": 2e-05,
"loss": 1.5694,
"step": 1670
},
{
"epoch": 1.36,
"learning_rate": 2e-05,
"loss": 1.4854,
"step": 1675
},
{
"epoch": 1.36,
"learning_rate": 2e-05,
"loss": 1.4816,
"step": 1680
},
{
"epoch": 1.36,
"learning_rate": 2e-05,
"loss": 1.5268,
"step": 1685
},
{
"epoch": 1.37,
"learning_rate": 2e-05,
"loss": 1.4951,
"step": 1690
},
{
"epoch": 1.37,
"learning_rate": 2e-05,
"loss": 1.5026,
"step": 1695
},
{
"epoch": 1.38,
"learning_rate": 2e-05,
"loss": 1.5675,
"step": 1700
},
{
"epoch": 1.38,
"learning_rate": 2e-05,
"loss": 1.5503,
"step": 1705
},
{
"epoch": 1.38,
"learning_rate": 2e-05,
"loss": 1.6134,
"step": 1710
},
{
"epoch": 1.39,
"learning_rate": 2e-05,
"loss": 1.5292,
"step": 1715
},
{
"epoch": 1.39,
"learning_rate": 2e-05,
"loss": 1.4991,
"step": 1720
},
{
"epoch": 1.4,
"learning_rate": 2e-05,
"loss": 1.5183,
"step": 1725
},
{
"epoch": 1.4,
"learning_rate": 2e-05,
"loss": 1.5103,
"step": 1730
},
{
"epoch": 1.4,
"learning_rate": 2e-05,
"loss": 1.5129,
"step": 1735
},
{
"epoch": 1.41,
"learning_rate": 2e-05,
"loss": 1.6066,
"step": 1740
},
{
"epoch": 1.41,
"learning_rate": 2e-05,
"loss": 1.6065,
"step": 1745
},
{
"epoch": 1.42,
"learning_rate": 2e-05,
"loss": 1.5733,
"step": 1750
},
{
"epoch": 1.42,
"learning_rate": 2e-05,
"loss": 1.4913,
"step": 1755
},
{
"epoch": 1.42,
"learning_rate": 2e-05,
"loss": 1.5468,
"step": 1760
},
{
"epoch": 1.43,
"learning_rate": 2e-05,
"loss": 1.596,
"step": 1765
},
{
"epoch": 1.43,
"learning_rate": 2e-05,
"loss": 1.49,
"step": 1770
},
{
"epoch": 1.44,
"learning_rate": 2e-05,
"loss": 1.5589,
"step": 1775
},
{
"epoch": 1.44,
"learning_rate": 2e-05,
"loss": 1.5661,
"step": 1780
},
{
"epoch": 1.45,
"learning_rate": 2e-05,
"loss": 1.5703,
"step": 1785
},
{
"epoch": 1.45,
"learning_rate": 2e-05,
"loss": 1.6092,
"step": 1790
}
],
"max_steps": 2470,
"num_train_epochs": 2,
"total_flos": 8.862715475793019e+17,
"trial_name": null,
"trial_params": null
}