Llama-2-7b-ultrachat200k-3e / trainer_state.json
kykim0's picture
Upload folder using huggingface_hub
a74ed97 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.700431034482759,
"eval_steps": 500,
"global_step": 855,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"learning_rate": 1.999996673596474e-05,
"loss": 1.5474,
"step": 1
},
{
"epoch": 0.01,
"learning_rate": 1.999916841018338e-05,
"loss": 1.1821,
"step": 5
},
{
"epoch": 0.02,
"learning_rate": 1.999667377904184e-05,
"loss": 1.0465,
"step": 10
},
{
"epoch": 0.04,
"learning_rate": 1.999251652147735e-05,
"loss": 1.0136,
"step": 15
},
{
"epoch": 0.05,
"learning_rate": 1.9986697328916525e-05,
"loss": 1.0102,
"step": 20
},
{
"epoch": 0.06,
"learning_rate": 1.997921716919562e-05,
"loss": 0.9887,
"step": 25
},
{
"epoch": 0.07,
"learning_rate": 1.997007728639956e-05,
"loss": 0.9841,
"step": 30
},
{
"epoch": 0.09,
"learning_rate": 1.9959279200655044e-05,
"loss": 0.9791,
"step": 35
},
{
"epoch": 0.1,
"learning_rate": 1.9946824707877695e-05,
"loss": 0.9779,
"step": 40
},
{
"epoch": 0.11,
"learning_rate": 1.9932715879473385e-05,
"loss": 0.9757,
"step": 45
},
{
"epoch": 0.12,
"learning_rate": 1.9916955061993724e-05,
"loss": 0.9683,
"step": 50
},
{
"epoch": 0.14,
"learning_rate": 1.9899544876745774e-05,
"loss": 0.967,
"step": 55
},
{
"epoch": 0.15,
"learning_rate": 1.9880488219356086e-05,
"loss": 0.9688,
"step": 60
},
{
"epoch": 0.16,
"learning_rate": 1.9859788259289104e-05,
"loss": 0.956,
"step": 65
},
{
"epoch": 0.17,
"learning_rate": 1.9837448439320027e-05,
"loss": 0.9524,
"step": 70
},
{
"epoch": 0.18,
"learning_rate": 1.981347247496222e-05,
"loss": 0.9517,
"step": 75
},
{
"epoch": 0.2,
"learning_rate": 1.9787864353849232e-05,
"loss": 0.9568,
"step": 80
},
{
"epoch": 0.21,
"learning_rate": 1.976062833507162e-05,
"loss": 0.9546,
"step": 85
},
{
"epoch": 0.22,
"learning_rate": 1.973176894846855e-05,
"loss": 0.9612,
"step": 90
},
{
"epoch": 0.23,
"learning_rate": 1.970129099387443e-05,
"loss": 0.9496,
"step": 95
},
{
"epoch": 0.25,
"learning_rate": 1.966919954032059e-05,
"loss": 0.9406,
"step": 100
},
{
"epoch": 0.26,
"learning_rate": 1.963549992519223e-05,
"loss": 0.9516,
"step": 105
},
{
"epoch": 0.27,
"learning_rate": 1.96001977533407e-05,
"loss": 0.9667,
"step": 110
},
{
"epoch": 0.28,
"learning_rate": 1.9563298896151325e-05,
"loss": 0.9505,
"step": 115
},
{
"epoch": 0.3,
"learning_rate": 1.9524809490566878e-05,
"loss": 0.9409,
"step": 120
},
{
"epoch": 0.31,
"learning_rate": 1.948473593806691e-05,
"loss": 0.9459,
"step": 125
},
{
"epoch": 0.32,
"learning_rate": 1.9443084903603052e-05,
"loss": 0.9455,
"step": 130
},
{
"epoch": 0.33,
"learning_rate": 1.939986331449053e-05,
"loss": 0.9489,
"step": 135
},
{
"epoch": 0.34,
"learning_rate": 1.935507835925601e-05,
"loss": 0.9582,
"step": 140
},
{
"epoch": 0.36,
"learning_rate": 1.9308737486442045e-05,
"loss": 0.9386,
"step": 145
},
{
"epoch": 0.37,
"learning_rate": 1.926084840336821e-05,
"loss": 0.9492,
"step": 150
},
{
"epoch": 0.38,
"learning_rate": 1.9211419074849275e-05,
"loss": 0.9403,
"step": 155
},
{
"epoch": 0.39,
"learning_rate": 1.9160457721870483e-05,
"loss": 0.9418,
"step": 160
},
{
"epoch": 0.41,
"learning_rate": 1.910797282022027e-05,
"loss": 0.9336,
"step": 165
},
{
"epoch": 0.42,
"learning_rate": 1.9053973099080585e-05,
"loss": 0.9359,
"step": 170
},
{
"epoch": 0.43,
"learning_rate": 1.899846753957507e-05,
"loss": 0.942,
"step": 175
},
{
"epoch": 0.44,
"learning_rate": 1.894146537327533e-05,
"loss": 0.9366,
"step": 180
},
{
"epoch": 0.46,
"learning_rate": 1.8882976080665573e-05,
"loss": 0.9332,
"step": 185
},
{
"epoch": 0.47,
"learning_rate": 1.8823009389565818e-05,
"loss": 0.9437,
"step": 190
},
{
"epoch": 0.48,
"learning_rate": 1.8761575273514005e-05,
"loss": 0.9452,
"step": 195
},
{
"epoch": 0.49,
"learning_rate": 1.8698683950107185e-05,
"loss": 0.9409,
"step": 200
},
{
"epoch": 0.5,
"learning_rate": 1.863434587930218e-05,
"loss": 0.9361,
"step": 205
},
{
"epoch": 0.52,
"learning_rate": 1.8568571761675893e-05,
"loss": 0.9345,
"step": 210
},
{
"epoch": 0.53,
"learning_rate": 1.8501372536645607e-05,
"loss": 0.9343,
"step": 215
},
{
"epoch": 0.54,
"learning_rate": 1.8432759380649565e-05,
"loss": 0.9368,
"step": 220
},
{
"epoch": 0.55,
"learning_rate": 1.8362743705288127e-05,
"loss": 0.9482,
"step": 225
},
{
"epoch": 0.57,
"learning_rate": 1.8291337155425823e-05,
"loss": 0.9349,
"step": 230
},
{
"epoch": 0.58,
"learning_rate": 1.8218551607254594e-05,
"loss": 0.9381,
"step": 235
},
{
"epoch": 0.59,
"learning_rate": 1.814439916631857e-05,
"loss": 0.9344,
"step": 240
},
{
"epoch": 0.6,
"learning_rate": 1.8068892165500704e-05,
"loss": 0.9367,
"step": 245
},
{
"epoch": 0.62,
"learning_rate": 1.799204316297159e-05,
"loss": 0.9392,
"step": 250
},
{
"epoch": 0.63,
"learning_rate": 1.791386494010081e-05,
"loss": 0.9404,
"step": 255
},
{
"epoch": 0.64,
"learning_rate": 1.7834370499331165e-05,
"loss": 0.9367,
"step": 260
},
{
"epoch": 0.65,
"learning_rate": 1.7753573062016146e-05,
"loss": 0.9228,
"step": 265
},
{
"epoch": 0.67,
"learning_rate": 1.7671486066220965e-05,
"loss": 0.934,
"step": 270
},
{
"epoch": 0.68,
"learning_rate": 1.7588123164487583e-05,
"loss": 0.932,
"step": 275
},
{
"epoch": 0.69,
"learning_rate": 1.7503498221564026e-05,
"loss": 0.9342,
"step": 280
},
{
"epoch": 0.7,
"learning_rate": 1.7417625312098453e-05,
"loss": 0.931,
"step": 285
},
{
"epoch": 0.7,
"eval_loss": 0.9349672198295593,
"eval_runtime": 647.743,
"eval_samples_per_second": 35.678,
"eval_steps_per_second": 0.559,
"step": 285
},
{
"epoch": 1.01,
"learning_rate": 1.7330518718298263e-05,
"loss": 0.9067,
"step": 290
},
{
"epoch": 1.02,
"learning_rate": 1.724219292755474e-05,
"loss": 0.9057,
"step": 295
},
{
"epoch": 1.04,
"learning_rate": 1.7152662630033506e-05,
"loss": 0.8938,
"step": 300
},
{
"epoch": 1.05,
"learning_rate": 1.7061942716231295e-05,
"loss": 0.9137,
"step": 305
},
{
"epoch": 1.06,
"learning_rate": 1.697004827449941e-05,
"loss": 0.9014,
"step": 310
},
{
"epoch": 1.07,
"learning_rate": 1.6876994588534234e-05,
"loss": 0.8978,
"step": 315
},
{
"epoch": 1.09,
"learning_rate": 1.6782797134835305e-05,
"loss": 0.8976,
"step": 320
},
{
"epoch": 1.1,
"learning_rate": 1.6687471580131266e-05,
"loss": 0.9012,
"step": 325
},
{
"epoch": 1.11,
"learning_rate": 1.659103377877423e-05,
"loss": 0.8996,
"step": 330
},
{
"epoch": 1.12,
"learning_rate": 1.6493499770102906e-05,
"loss": 0.8946,
"step": 335
},
{
"epoch": 1.13,
"learning_rate": 1.6394885775774976e-05,
"loss": 0.893,
"step": 340
},
{
"epoch": 1.15,
"learning_rate": 1.629520819706912e-05,
"loss": 0.8968,
"step": 345
},
{
"epoch": 1.16,
"learning_rate": 1.6194483612157232e-05,
"loss": 0.8834,
"step": 350
},
{
"epoch": 1.17,
"learning_rate": 1.6092728773347118e-05,
"loss": 0.8833,
"step": 355
},
{
"epoch": 1.18,
"learning_rate": 1.598996060429634e-05,
"loss": 0.8822,
"step": 360
},
{
"epoch": 1.2,
"learning_rate": 1.588619619719746e-05,
"loss": 0.8876,
"step": 365
},
{
"epoch": 1.21,
"learning_rate": 1.578145280993533e-05,
"loss": 0.8854,
"step": 370
},
{
"epoch": 1.22,
"learning_rate": 1.56757478632168e-05,
"loss": 0.8896,
"step": 375
},
{
"epoch": 1.23,
"learning_rate": 1.556909893767332e-05,
"loss": 0.8805,
"step": 380
},
{
"epoch": 1.25,
"learning_rate": 1.546152377093697e-05,
"loss": 0.8744,
"step": 385
},
{
"epoch": 1.26,
"learning_rate": 1.5353040254690396e-05,
"loss": 0.886,
"step": 390
},
{
"epoch": 1.27,
"learning_rate": 1.5243666431691061e-05,
"loss": 0.893,
"step": 395
},
{
"epoch": 1.28,
"learning_rate": 1.5133420492770463e-05,
"loss": 0.8829,
"step": 400
},
{
"epoch": 1.29,
"learning_rate": 1.5022320773808612e-05,
"loss": 0.8776,
"step": 405
},
{
"epoch": 1.31,
"learning_rate": 1.4910385752684506e-05,
"loss": 0.8792,
"step": 410
},
{
"epoch": 1.32,
"learning_rate": 1.4797634046202876e-05,
"loss": 0.8786,
"step": 415
},
{
"epoch": 1.33,
"learning_rate": 1.4684084406997903e-05,
"loss": 0.881,
"step": 420
},
{
"epoch": 1.34,
"learning_rate": 1.456975572041432e-05,
"loss": 0.8899,
"step": 425
},
{
"epoch": 1.36,
"learning_rate": 1.4454667001366429e-05,
"loss": 0.8753,
"step": 430
},
{
"epoch": 1.37,
"learning_rate": 1.4338837391175582e-05,
"loss": 0.8837,
"step": 435
},
{
"epoch": 1.38,
"learning_rate": 1.4222286154386641e-05,
"loss": 0.8724,
"step": 440
},
{
"epoch": 1.39,
"learning_rate": 1.4105032675563928e-05,
"loss": 0.8752,
"step": 445
},
{
"epoch": 1.41,
"learning_rate": 1.3987096456067236e-05,
"loss": 0.8671,
"step": 450
},
{
"epoch": 1.42,
"learning_rate": 1.3868497110808394e-05,
"loss": 0.8731,
"step": 455
},
{
"epoch": 1.43,
"learning_rate": 1.3749254364988955e-05,
"loss": 0.8749,
"step": 460
},
{
"epoch": 1.44,
"learning_rate": 1.3629388050819547e-05,
"loss": 0.8738,
"step": 465
},
{
"epoch": 1.45,
"learning_rate": 1.3508918104221414e-05,
"loss": 0.8704,
"step": 470
},
{
"epoch": 1.47,
"learning_rate": 1.3387864561510713e-05,
"loss": 0.8744,
"step": 475
},
{
"epoch": 1.48,
"learning_rate": 1.3266247556066122e-05,
"loss": 0.8796,
"step": 480
},
{
"epoch": 1.49,
"learning_rate": 1.3144087314980295e-05,
"loss": 0.8792,
"step": 485
},
{
"epoch": 1.5,
"learning_rate": 1.3021404155695728e-05,
"loss": 0.8711,
"step": 490
},
{
"epoch": 1.52,
"learning_rate": 1.2898218482625606e-05,
"loss": 0.8706,
"step": 495
},
{
"epoch": 1.53,
"learning_rate": 1.2774550783760182e-05,
"loss": 0.8745,
"step": 500
},
{
"epoch": 1.54,
"learning_rate": 1.2650421627259264e-05,
"loss": 0.8705,
"step": 505
},
{
"epoch": 1.55,
"learning_rate": 1.252585165803135e-05,
"loss": 0.8833,
"step": 510
},
{
"epoch": 1.57,
"learning_rate": 1.2400861594300015e-05,
"loss": 0.8743,
"step": 515
},
{
"epoch": 1.58,
"learning_rate": 1.2275472224158089e-05,
"loss": 0.8744,
"step": 520
},
{
"epoch": 1.59,
"learning_rate": 1.2149704402110243e-05,
"loss": 0.8711,
"step": 525
},
{
"epoch": 1.6,
"learning_rate": 1.2023579045604485e-05,
"loss": 0.8738,
"step": 530
},
{
"epoch": 1.61,
"learning_rate": 1.1897117131553239e-05,
"loss": 0.8754,
"step": 535
},
{
"epoch": 1.63,
"learning_rate": 1.1770339692844484e-05,
"loss": 0.8788,
"step": 540
},
{
"epoch": 1.64,
"learning_rate": 1.1643267814843624e-05,
"loss": 0.8776,
"step": 545
},
{
"epoch": 1.65,
"learning_rate": 1.1515922631886605e-05,
"loss": 0.8595,
"step": 550
},
{
"epoch": 1.66,
"learning_rate": 1.1388325323764889e-05,
"loss": 0.8736,
"step": 555
},
{
"epoch": 1.68,
"learning_rate": 1.1260497112202895e-05,
"loss": 0.869,
"step": 560
},
{
"epoch": 1.69,
"learning_rate": 1.1132459257328423e-05,
"loss": 0.8741,
"step": 565
},
{
"epoch": 1.7,
"learning_rate": 1.1004233054136726e-05,
"loss": 0.8672,
"step": 570
},
{
"epoch": 1.7,
"eval_loss": 0.9244597554206848,
"eval_runtime": 648.5324,
"eval_samples_per_second": 35.634,
"eval_steps_per_second": 0.558,
"step": 570
},
{
"epoch": 2.01,
"learning_rate": 1.0875839828948758e-05,
"loss": 0.8471,
"step": 575
},
{
"epoch": 2.02,
"learning_rate": 1.0747300935864245e-05,
"loss": 0.8476,
"step": 580
},
{
"epoch": 2.04,
"learning_rate": 1.0618637753210086e-05,
"loss": 0.8358,
"step": 585
},
{
"epoch": 2.05,
"learning_rate": 1.0489871679984777e-05,
"loss": 0.8481,
"step": 590
},
{
"epoch": 2.06,
"learning_rate": 1.0361024132299364e-05,
"loss": 0.8443,
"step": 595
},
{
"epoch": 2.07,
"learning_rate": 1.0232116539815558e-05,
"loss": 0.8424,
"step": 600
},
{
"epoch": 2.08,
"learning_rate": 1.0103170342181595e-05,
"loss": 0.8359,
"step": 605
},
{
"epoch": 2.1,
"learning_rate": 9.974206985466442e-06,
"loss": 0.842,
"step": 610
},
{
"epoch": 2.11,
"learning_rate": 9.845247918592937e-06,
"loss": 0.8403,
"step": 615
},
{
"epoch": 2.12,
"learning_rate": 9.71631458977043e-06,
"loss": 0.8332,
"step": 620
},
{
"epoch": 2.13,
"learning_rate": 9.587428442927581e-06,
"loss": 0.8351,
"step": 625
},
{
"epoch": 2.15,
"learning_rate": 9.458610914145826e-06,
"loss": 0.8371,
"step": 630
},
{
"epoch": 2.16,
"learning_rate": 9.3298834280942e-06,
"loss": 0.8245,
"step": 635
},
{
"epoch": 2.17,
"learning_rate": 9.201267394465997e-06,
"loss": 0.8248,
"step": 640
},
{
"epoch": 2.18,
"learning_rate": 9.072784204417995e-06,
"loss": 0.8198,
"step": 645
},
{
"epoch": 2.2,
"learning_rate": 8.944455227012667e-06,
"loss": 0.83,
"step": 650
},
{
"epoch": 2.21,
"learning_rate": 8.816301805664188e-06,
"loss": 0.8298,
"step": 655
},
{
"epoch": 2.22,
"learning_rate": 8.688345254588579e-06,
"loss": 0.8299,
"step": 660
},
{
"epoch": 2.23,
"learning_rate": 8.560606855258808e-06,
"loss": 0.8229,
"step": 665
},
{
"epoch": 2.24,
"learning_rate": 8.433107852865299e-06,
"loss": 0.8186,
"step": 670
},
{
"epoch": 2.26,
"learning_rate": 8.305869452782446e-06,
"loss": 0.8281,
"step": 675
},
{
"epoch": 2.27,
"learning_rate": 8.178912817041816e-06,
"loss": 0.8346,
"step": 680
},
{
"epoch": 2.28,
"learning_rate": 8.052259060812489e-06,
"loss": 0.8253,
"step": 685
},
{
"epoch": 2.29,
"learning_rate": 7.92592924888925e-06,
"loss": 0.8222,
"step": 690
},
{
"epoch": 2.31,
"learning_rate": 7.799944392189128e-06,
"loss": 0.82,
"step": 695
},
{
"epoch": 2.32,
"learning_rate": 7.674325444256899e-06,
"loss": 0.8186,
"step": 700
},
{
"epoch": 2.33,
"learning_rate": 7.549093297780133e-06,
"loss": 0.8291,
"step": 705
},
{
"epoch": 2.34,
"learning_rate": 7.4242687811143795e-06,
"loss": 0.8318,
"step": 710
},
{
"epoch": 2.36,
"learning_rate": 7.29987265481902e-06,
"loss": 0.8179,
"step": 715
},
{
"epoch": 2.37,
"learning_rate": 7.175925608204428e-06,
"loss": 0.8286,
"step": 720
},
{
"epoch": 2.38,
"learning_rate": 7.052448255890958e-06,
"loss": 0.8162,
"step": 725
},
{
"epoch": 2.39,
"learning_rate": 6.9294611343803594e-06,
"loss": 0.8194,
"step": 730
},
{
"epoch": 2.4,
"learning_rate": 6.806984698640202e-06,
"loss": 0.8151,
"step": 735
},
{
"epoch": 2.42,
"learning_rate": 6.685039318701827e-06,
"loss": 0.816,
"step": 740
},
{
"epoch": 2.43,
"learning_rate": 6.563645276272466e-06,
"loss": 0.8222,
"step": 745
},
{
"epoch": 2.44,
"learning_rate": 6.442822761362015e-06,
"loss": 0.8187,
"step": 750
},
{
"epoch": 2.45,
"learning_rate": 6.322591868925082e-06,
"loss": 0.8177,
"step": 755
},
{
"epoch": 2.47,
"learning_rate": 6.2029725955188165e-06,
"loss": 0.8203,
"step": 760
},
{
"epoch": 2.48,
"learning_rate": 6.083984835977154e-06,
"loss": 0.8251,
"step": 765
},
{
"epoch": 2.49,
"learning_rate": 5.965648380101916e-06,
"loss": 0.8249,
"step": 770
},
{
"epoch": 2.5,
"learning_rate": 5.847982909371438e-06,
"loss": 0.8193,
"step": 775
},
{
"epoch": 2.52,
"learning_rate": 5.731007993667155e-06,
"loss": 0.8212,
"step": 780
},
{
"epoch": 2.53,
"learning_rate": 5.6147430880188145e-06,
"loss": 0.8196,
"step": 785
},
{
"epoch": 2.54,
"learning_rate": 5.499207529368734e-06,
"loss": 0.8201,
"step": 790
},
{
"epoch": 2.55,
"learning_rate": 5.38442053335571e-06,
"loss": 0.8295,
"step": 795
},
{
"epoch": 2.56,
"learning_rate": 5.270401191119143e-06,
"loss": 0.8242,
"step": 800
},
{
"epoch": 2.58,
"learning_rate": 5.1571684661238075e-06,
"loss": 0.8222,
"step": 805
},
{
"epoch": 2.59,
"learning_rate": 5.044741191005908e-06,
"loss": 0.8251,
"step": 810
},
{
"epoch": 2.6,
"learning_rate": 4.933138064440866e-06,
"loss": 0.822,
"step": 815
},
{
"epoch": 2.61,
"learning_rate": 4.822377648033394e-06,
"loss": 0.828,
"step": 820
},
{
"epoch": 2.63,
"learning_rate": 4.712478363230361e-06,
"loss": 0.8295,
"step": 825
},
{
"epoch": 2.64,
"learning_rate": 4.603458488256992e-06,
"loss": 0.8257,
"step": 830
},
{
"epoch": 2.65,
"learning_rate": 4.495336155076848e-06,
"loss": 0.8129,
"step": 835
},
{
"epoch": 2.66,
"learning_rate": 4.388129346376177e-06,
"loss": 0.8254,
"step": 840
},
{
"epoch": 2.68,
"learning_rate": 4.281855892573056e-06,
"loss": 0.8204,
"step": 845
},
{
"epoch": 2.69,
"learning_rate": 4.176533468851877e-06,
"loss": 0.8223,
"step": 850
},
{
"epoch": 2.7,
"learning_rate": 4.07217959222365e-06,
"loss": 0.8189,
"step": 855
},
{
"epoch": 2.7,
"eval_loss": 0.9248305559158325,
"eval_runtime": 648.4552,
"eval_samples_per_second": 35.639,
"eval_steps_per_second": 0.558,
"step": 855
},
{
"epoch": 2.7,
"step": 855,
"total_flos": 1433386950328320.0,
"train_loss": 0.8895609569828413,
"train_runtime": 69721.223,
"train_samples_per_second": 8.944,
"train_steps_per_second": 0.017
}
],
"logging_steps": 5,
"max_steps": 1218,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 1433386950328320.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}