|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 2032, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004921259842519685, |
|
"grad_norm": 23.289178620841536, |
|
"learning_rate": 4.901960784313726e-08, |
|
"loss": 1.3042, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0024606299212598425, |
|
"grad_norm": 22.578339014503392, |
|
"learning_rate": 2.4509803921568627e-07, |
|
"loss": 1.3271, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004921259842519685, |
|
"grad_norm": 15.254145113240074, |
|
"learning_rate": 4.901960784313725e-07, |
|
"loss": 1.2787, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0073818897637795275, |
|
"grad_norm": 11.623341812407018, |
|
"learning_rate": 7.352941176470589e-07, |
|
"loss": 1.1561, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.00984251968503937, |
|
"grad_norm": 9.64376871387551, |
|
"learning_rate": 9.80392156862745e-07, |
|
"loss": 1.034, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.012303149606299213, |
|
"grad_norm": 3.4638336177505513, |
|
"learning_rate": 1.2254901960784314e-06, |
|
"loss": 0.9177, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.014763779527559055, |
|
"grad_norm": 3.5862986630527347, |
|
"learning_rate": 1.4705882352941177e-06, |
|
"loss": 0.8909, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0172244094488189, |
|
"grad_norm": 2.9675977602993884, |
|
"learning_rate": 1.715686274509804e-06, |
|
"loss": 0.8673, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01968503937007874, |
|
"grad_norm": 3.0016020610328322, |
|
"learning_rate": 1.96078431372549e-06, |
|
"loss": 0.8523, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02214566929133858, |
|
"grad_norm": 3.044795051743997, |
|
"learning_rate": 2.2058823529411767e-06, |
|
"loss": 0.836, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.024606299212598427, |
|
"grad_norm": 2.9272556512306047, |
|
"learning_rate": 2.450980392156863e-06, |
|
"loss": 0.8255, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.027066929133858268, |
|
"grad_norm": 2.9326611094737216, |
|
"learning_rate": 2.696078431372549e-06, |
|
"loss": 0.8163, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.02952755905511811, |
|
"grad_norm": 3.1751190158414553, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 0.8098, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03198818897637795, |
|
"grad_norm": 3.267001091193129, |
|
"learning_rate": 3.1862745098039216e-06, |
|
"loss": 0.7963, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0344488188976378, |
|
"grad_norm": 3.2037792452122074, |
|
"learning_rate": 3.431372549019608e-06, |
|
"loss": 0.7904, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.036909448818897635, |
|
"grad_norm": 3.1335298386999817, |
|
"learning_rate": 3.6764705882352946e-06, |
|
"loss": 0.7767, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03937007874015748, |
|
"grad_norm": 3.0748371474571066, |
|
"learning_rate": 3.92156862745098e-06, |
|
"loss": 0.7566, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.041830708661417325, |
|
"grad_norm": 3.168808058148803, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.7642, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04429133858267716, |
|
"grad_norm": 3.099061017093863, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 0.7567, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04675196850393701, |
|
"grad_norm": 3.1397025998475545, |
|
"learning_rate": 4.65686274509804e-06, |
|
"loss": 0.7489, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04921259842519685, |
|
"grad_norm": 3.267009036532827, |
|
"learning_rate": 4.901960784313726e-06, |
|
"loss": 0.7428, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05167322834645669, |
|
"grad_norm": 3.0814329808676137, |
|
"learning_rate": 5.147058823529411e-06, |
|
"loss": 0.739, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.054133858267716536, |
|
"grad_norm": 2.922079677455855, |
|
"learning_rate": 5.392156862745098e-06, |
|
"loss": 0.7285, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.056594488188976375, |
|
"grad_norm": 3.1140432595060723, |
|
"learning_rate": 5.637254901960784e-06, |
|
"loss": 0.7279, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05905511811023622, |
|
"grad_norm": 2.890134615014165, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 0.7061, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.061515748031496065, |
|
"grad_norm": 3.003266954139478, |
|
"learning_rate": 6.1274509803921575e-06, |
|
"loss": 0.7124, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0639763779527559, |
|
"grad_norm": 3.490944793138708, |
|
"learning_rate": 6.372549019607843e-06, |
|
"loss": 0.7, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06643700787401575, |
|
"grad_norm": 2.957015579169823, |
|
"learning_rate": 6.61764705882353e-06, |
|
"loss": 0.7109, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.0688976377952756, |
|
"grad_norm": 2.9802255807893343, |
|
"learning_rate": 6.862745098039216e-06, |
|
"loss": 0.7307, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07135826771653543, |
|
"grad_norm": 2.979885982433863, |
|
"learning_rate": 7.107843137254903e-06, |
|
"loss": 0.6999, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07381889763779527, |
|
"grad_norm": 3.4724625938807483, |
|
"learning_rate": 7.352941176470589e-06, |
|
"loss": 0.7018, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07627952755905512, |
|
"grad_norm": 2.8756072791555822, |
|
"learning_rate": 7.598039215686275e-06, |
|
"loss": 0.6946, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07874015748031496, |
|
"grad_norm": 2.888280542800706, |
|
"learning_rate": 7.84313725490196e-06, |
|
"loss": 0.6953, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0812007874015748, |
|
"grad_norm": 3.0409331177011176, |
|
"learning_rate": 8.088235294117648e-06, |
|
"loss": 0.693, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08366141732283465, |
|
"grad_norm": 2.905841883072287, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.6995, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08612204724409449, |
|
"grad_norm": 2.8022565504484476, |
|
"learning_rate": 8.57843137254902e-06, |
|
"loss": 0.6941, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08858267716535433, |
|
"grad_norm": 2.7594498957096523, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 0.6852, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09104330708661418, |
|
"grad_norm": 2.928739510451242, |
|
"learning_rate": 9.068627450980392e-06, |
|
"loss": 0.6849, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.09350393700787402, |
|
"grad_norm": 2.922241571597126, |
|
"learning_rate": 9.31372549019608e-06, |
|
"loss": 0.6961, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.09596456692913385, |
|
"grad_norm": 2.733962541795604, |
|
"learning_rate": 9.558823529411766e-06, |
|
"loss": 0.6964, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.0984251968503937, |
|
"grad_norm": 3.3436619097327585, |
|
"learning_rate": 9.803921568627451e-06, |
|
"loss": 0.6903, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.10088582677165354, |
|
"grad_norm": 2.819756078395206, |
|
"learning_rate": 9.999992616075212e-06, |
|
"loss": 0.6871, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.10334645669291338, |
|
"grad_norm": 2.8245769395745217, |
|
"learning_rate": 9.999734180997554e-06, |
|
"loss": 0.7001, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.10580708661417323, |
|
"grad_norm": 2.8711422702525207, |
|
"learning_rate": 9.999106571489132e-06, |
|
"loss": 0.6851, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.10826771653543307, |
|
"grad_norm": 3.0388643695545077, |
|
"learning_rate": 9.998109833891883e-06, |
|
"loss": 0.6927, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11072834645669291, |
|
"grad_norm": 2.7895631269121393, |
|
"learning_rate": 9.996744041803731e-06, |
|
"loss": 0.6716, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.11318897637795275, |
|
"grad_norm": 2.686288810148222, |
|
"learning_rate": 9.995009296073138e-06, |
|
"loss": 0.6725, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1156496062992126, |
|
"grad_norm": 2.800082841358338, |
|
"learning_rate": 9.992905724791669e-06, |
|
"loss": 0.6717, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.11811023622047244, |
|
"grad_norm": 2.566573753796988, |
|
"learning_rate": 9.990433483284527e-06, |
|
"loss": 0.6666, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12057086614173228, |
|
"grad_norm": 2.6155377853742467, |
|
"learning_rate": 9.987592754099086e-06, |
|
"loss": 0.6724, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.12303149606299213, |
|
"grad_norm": 2.6609874435959795, |
|
"learning_rate": 9.984383746991416e-06, |
|
"loss": 0.6756, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.12549212598425197, |
|
"grad_norm": 2.651450027647874, |
|
"learning_rate": 9.980806698910787e-06, |
|
"loss": 0.6764, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.1279527559055118, |
|
"grad_norm": 2.738239456171724, |
|
"learning_rate": 9.976861873982177e-06, |
|
"loss": 0.6858, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.13041338582677164, |
|
"grad_norm": 2.5749543275223767, |
|
"learning_rate": 9.972549563486776e-06, |
|
"loss": 0.681, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1328740157480315, |
|
"grad_norm": 2.54868336015626, |
|
"learning_rate": 9.967870085840463e-06, |
|
"loss": 0.6822, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.13533464566929135, |
|
"grad_norm": 2.348509038708982, |
|
"learning_rate": 9.962823786570306e-06, |
|
"loss": 0.6701, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.1377952755905512, |
|
"grad_norm": 2.545785789782642, |
|
"learning_rate": 9.95741103828905e-06, |
|
"loss": 0.6672, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.14025590551181102, |
|
"grad_norm": 2.724173815762578, |
|
"learning_rate": 9.951632240667592e-06, |
|
"loss": 0.6659, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.14271653543307086, |
|
"grad_norm": 2.6494992216672197, |
|
"learning_rate": 9.945487820405487e-06, |
|
"loss": 0.6642, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1451771653543307, |
|
"grad_norm": 2.477475682520284, |
|
"learning_rate": 9.938978231199419e-06, |
|
"loss": 0.6794, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.14763779527559054, |
|
"grad_norm": 2.589636626095672, |
|
"learning_rate": 9.932103953709724e-06, |
|
"loss": 0.6583, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1500984251968504, |
|
"grad_norm": 2.53566392464488, |
|
"learning_rate": 9.924865495524884e-06, |
|
"loss": 0.6737, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.15255905511811024, |
|
"grad_norm": 3.013667820537959, |
|
"learning_rate": 9.917263391124046e-06, |
|
"loss": 0.6475, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.15501968503937008, |
|
"grad_norm": 2.5986666259102837, |
|
"learning_rate": 9.90929820183757e-06, |
|
"loss": 0.6496, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.15748031496062992, |
|
"grad_norm": 3.1918308775019715, |
|
"learning_rate": 9.900970515805564e-06, |
|
"loss": 0.6527, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.15994094488188976, |
|
"grad_norm": 2.58199571141749, |
|
"learning_rate": 9.892280947934472e-06, |
|
"loss": 0.6507, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.1624015748031496, |
|
"grad_norm": 2.5096275929429828, |
|
"learning_rate": 9.883230139851656e-06, |
|
"loss": 0.6493, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.16486220472440946, |
|
"grad_norm": 2.543717236052826, |
|
"learning_rate": 9.873818759858034e-06, |
|
"loss": 0.6398, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.1673228346456693, |
|
"grad_norm": 2.3455715291925987, |
|
"learning_rate": 9.864047502878717e-06, |
|
"loss": 0.6509, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.16978346456692914, |
|
"grad_norm": 2.7443847653291606, |
|
"learning_rate": 9.853917090411713e-06, |
|
"loss": 0.6587, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.17224409448818898, |
|
"grad_norm": 2.574575364550669, |
|
"learning_rate": 9.84342827047464e-06, |
|
"loss": 0.6351, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.1747047244094488, |
|
"grad_norm": 2.555747928874768, |
|
"learning_rate": 9.832581817549497e-06, |
|
"loss": 0.6223, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.17716535433070865, |
|
"grad_norm": 2.3868178771545834, |
|
"learning_rate": 9.821378532525479e-06, |
|
"loss": 0.6423, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.1796259842519685, |
|
"grad_norm": 2.3906560211167434, |
|
"learning_rate": 9.809819242639841e-06, |
|
"loss": 0.6203, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.18208661417322836, |
|
"grad_norm": 2.613672178779019, |
|
"learning_rate": 9.79790480141681e-06, |
|
"loss": 0.6397, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.1845472440944882, |
|
"grad_norm": 2.704435062795677, |
|
"learning_rate": 9.785636088604571e-06, |
|
"loss": 0.6401, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.18700787401574803, |
|
"grad_norm": 2.4110784601265935, |
|
"learning_rate": 9.773014010110298e-06, |
|
"loss": 0.6506, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.18946850393700787, |
|
"grad_norm": 2.3808900476483434, |
|
"learning_rate": 9.760039497933266e-06, |
|
"loss": 0.6427, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.1919291338582677, |
|
"grad_norm": 2.443382230868993, |
|
"learning_rate": 9.74671351009604e-06, |
|
"loss": 0.6536, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.19438976377952755, |
|
"grad_norm": 2.53346201637983, |
|
"learning_rate": 9.733037030573725e-06, |
|
"loss": 0.629, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.1968503937007874, |
|
"grad_norm": 2.7306008040110776, |
|
"learning_rate": 9.719011069221316e-06, |
|
"loss": 0.6395, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.19931102362204725, |
|
"grad_norm": 2.5885107627509316, |
|
"learning_rate": 9.704636661699133e-06, |
|
"loss": 0.6142, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.2017716535433071, |
|
"grad_norm": 2.5758279906754993, |
|
"learning_rate": 9.68991486939635e-06, |
|
"loss": 0.6332, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.20423228346456693, |
|
"grad_norm": 2.527125806186406, |
|
"learning_rate": 9.674846779352613e-06, |
|
"loss": 0.6155, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.20669291338582677, |
|
"grad_norm": 2.352822836622023, |
|
"learning_rate": 9.659433504177786e-06, |
|
"loss": 0.6108, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2091535433070866, |
|
"grad_norm": 2.428790076631395, |
|
"learning_rate": 9.643676181969792e-06, |
|
"loss": 0.6204, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.21161417322834647, |
|
"grad_norm": 2.5614397854394055, |
|
"learning_rate": 9.62757597623058e-06, |
|
"loss": 0.6088, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2140748031496063, |
|
"grad_norm": 2.4898996021687227, |
|
"learning_rate": 9.611134075780209e-06, |
|
"loss": 0.6211, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.21653543307086615, |
|
"grad_norm": 2.4088704164520927, |
|
"learning_rate": 9.59435169466907e-06, |
|
"loss": 0.6242, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.21899606299212598, |
|
"grad_norm": 2.5289526680832113, |
|
"learning_rate": 9.577230072088246e-06, |
|
"loss": 0.613, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.22145669291338582, |
|
"grad_norm": 2.40778858369484, |
|
"learning_rate": 9.559770472277996e-06, |
|
"loss": 0.6106, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.22391732283464566, |
|
"grad_norm": 3.00357065325349, |
|
"learning_rate": 9.541974184434426e-06, |
|
"loss": 0.6086, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2263779527559055, |
|
"grad_norm": 2.420620535949469, |
|
"learning_rate": 9.523842522614285e-06, |
|
"loss": 0.604, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.22883858267716536, |
|
"grad_norm": 2.8983682401439936, |
|
"learning_rate": 9.505376825637933e-06, |
|
"loss": 0.5962, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.2312992125984252, |
|
"grad_norm": 2.6058841172777334, |
|
"learning_rate": 9.486578456990494e-06, |
|
"loss": 0.6022, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.23375984251968504, |
|
"grad_norm": 2.420955967093701, |
|
"learning_rate": 9.467448804721171e-06, |
|
"loss": 0.5849, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.23622047244094488, |
|
"grad_norm": 2.557055641112582, |
|
"learning_rate": 9.447989281340753e-06, |
|
"loss": 0.6052, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.23868110236220472, |
|
"grad_norm": 2.6358527915325625, |
|
"learning_rate": 9.428201323717327e-06, |
|
"loss": 0.6066, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.24114173228346455, |
|
"grad_norm": 2.514164645393038, |
|
"learning_rate": 9.408086392970167e-06, |
|
"loss": 0.6057, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.24360236220472442, |
|
"grad_norm": 2.386864670354864, |
|
"learning_rate": 9.387645974361858e-06, |
|
"loss": 0.5921, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.24606299212598426, |
|
"grad_norm": 2.572659612274251, |
|
"learning_rate": 9.36688157718862e-06, |
|
"loss": 0.5766, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2485236220472441, |
|
"grad_norm": 2.4584516295055194, |
|
"learning_rate": 9.345794734668866e-06, |
|
"loss": 0.5922, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.25098425196850394, |
|
"grad_norm": 2.27813169588504, |
|
"learning_rate": 9.324387003829993e-06, |
|
"loss": 0.6002, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.2534448818897638, |
|
"grad_norm": 2.513354918132864, |
|
"learning_rate": 9.302659965393404e-06, |
|
"loss": 0.5863, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.2559055118110236, |
|
"grad_norm": 2.57065216369518, |
|
"learning_rate": 9.280615223657801e-06, |
|
"loss": 0.5827, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.25836614173228345, |
|
"grad_norm": 2.508285807949247, |
|
"learning_rate": 9.258254406380718e-06, |
|
"loss": 0.6003, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.2608267716535433, |
|
"grad_norm": 2.393181758359751, |
|
"learning_rate": 9.23557916465833e-06, |
|
"loss": 0.5876, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.2632874015748031, |
|
"grad_norm": 2.45141675908852, |
|
"learning_rate": 9.212591172803541e-06, |
|
"loss": 0.5782, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.265748031496063, |
|
"grad_norm": 2.602808329250647, |
|
"learning_rate": 9.189292128222355e-06, |
|
"loss": 0.5817, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.26820866141732286, |
|
"grad_norm": 2.6264024238291332, |
|
"learning_rate": 9.165683751288537e-06, |
|
"loss": 0.5987, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.2706692913385827, |
|
"grad_norm": 2.5206627459795086, |
|
"learning_rate": 9.141767785216585e-06, |
|
"loss": 0.5773, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.27312992125984253, |
|
"grad_norm": 2.538664530429439, |
|
"learning_rate": 9.117545995933015e-06, |
|
"loss": 0.5725, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.2755905511811024, |
|
"grad_norm": 2.541257403182122, |
|
"learning_rate": 9.093020171945966e-06, |
|
"loss": 0.5667, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2780511811023622, |
|
"grad_norm": 2.3883496422885937, |
|
"learning_rate": 9.068192124213135e-06, |
|
"loss": 0.5681, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.28051181102362205, |
|
"grad_norm": 2.4768611856821128, |
|
"learning_rate": 9.043063686008066e-06, |
|
"loss": 0.5693, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.2829724409448819, |
|
"grad_norm": 2.531509664449831, |
|
"learning_rate": 9.017636712784776e-06, |
|
"loss": 0.5712, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.2854330708661417, |
|
"grad_norm": 2.4497026467822134, |
|
"learning_rate": 8.991913082040752e-06, |
|
"loss": 0.5711, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.28789370078740156, |
|
"grad_norm": 2.3627002230344027, |
|
"learning_rate": 8.96589469317832e-06, |
|
"loss": 0.5746, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.2903543307086614, |
|
"grad_norm": 2.5848492576021207, |
|
"learning_rate": 8.9395834673644e-06, |
|
"loss": 0.5638, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.29281496062992124, |
|
"grad_norm": 2.7943897726154026, |
|
"learning_rate": 8.912981347388634e-06, |
|
"loss": 0.5716, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.2952755905511811, |
|
"grad_norm": 2.4233634727846547, |
|
"learning_rate": 8.886090297519956e-06, |
|
"loss": 0.5589, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.29773622047244097, |
|
"grad_norm": 2.406617785286132, |
|
"learning_rate": 8.85891230336153e-06, |
|
"loss": 0.5743, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3001968503937008, |
|
"grad_norm": 2.375668869002511, |
|
"learning_rate": 8.83144937170415e-06, |
|
"loss": 0.5536, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.30265748031496065, |
|
"grad_norm": 2.3455722754718416, |
|
"learning_rate": 8.803703530378059e-06, |
|
"loss": 0.5516, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3051181102362205, |
|
"grad_norm": 2.5237377829623107, |
|
"learning_rate": 8.775676828103205e-06, |
|
"loss": 0.5565, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.3075787401574803, |
|
"grad_norm": 2.287022202884204, |
|
"learning_rate": 8.747371334337983e-06, |
|
"loss": 0.5725, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.31003937007874016, |
|
"grad_norm": 2.291688895957761, |
|
"learning_rate": 8.718789139126417e-06, |
|
"loss": 0.545, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 2.527855069790082, |
|
"learning_rate": 8.689932352943837e-06, |
|
"loss": 0.543, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.31496062992125984, |
|
"grad_norm": 2.3987364116312535, |
|
"learning_rate": 8.660803106541044e-06, |
|
"loss": 0.5475, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3174212598425197, |
|
"grad_norm": 2.61276055348719, |
|
"learning_rate": 8.631403550786979e-06, |
|
"loss": 0.5488, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3198818897637795, |
|
"grad_norm": 2.419050184531339, |
|
"learning_rate": 8.601735856509903e-06, |
|
"loss": 0.5476, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.32234251968503935, |
|
"grad_norm": 2.3768631155511537, |
|
"learning_rate": 8.571802214337107e-06, |
|
"loss": 0.5637, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.3248031496062992, |
|
"grad_norm": 2.4228616659745987, |
|
"learning_rate": 8.541604834533159e-06, |
|
"loss": 0.5305, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.32726377952755903, |
|
"grad_norm": 2.3754663831523875, |
|
"learning_rate": 8.511145946836704e-06, |
|
"loss": 0.5331, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3297244094488189, |
|
"grad_norm": 2.3398125333991535, |
|
"learning_rate": 8.48042780029581e-06, |
|
"loss": 0.5357, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.33218503937007876, |
|
"grad_norm": 2.4983820414217663, |
|
"learning_rate": 8.449452663101918e-06, |
|
"loss": 0.5332, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.3346456692913386, |
|
"grad_norm": 2.3721219410260397, |
|
"learning_rate": 8.418222822422348e-06, |
|
"loss": 0.5417, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.33710629921259844, |
|
"grad_norm": 2.360196232033206, |
|
"learning_rate": 8.386740584231431e-06, |
|
"loss": 0.5461, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.3395669291338583, |
|
"grad_norm": 2.466049773095662, |
|
"learning_rate": 8.355008273140222e-06, |
|
"loss": 0.5253, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.3420275590551181, |
|
"grad_norm": 2.348269078433154, |
|
"learning_rate": 8.323028232224863e-06, |
|
"loss": 0.5304, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.34448818897637795, |
|
"grad_norm": 2.476352965234454, |
|
"learning_rate": 8.290802822853576e-06, |
|
"loss": 0.5349, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.3469488188976378, |
|
"grad_norm": 2.279222616967455, |
|
"learning_rate": 8.258334424512293e-06, |
|
"loss": 0.546, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.3494094488188976, |
|
"grad_norm": 2.3637014928251623, |
|
"learning_rate": 8.22562543462897e-06, |
|
"loss": 0.5371, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.35187007874015747, |
|
"grad_norm": 2.619852828253986, |
|
"learning_rate": 8.192678268396545e-06, |
|
"loss": 0.5237, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.3543307086614173, |
|
"grad_norm": 2.5571753512444912, |
|
"learning_rate": 8.159495358594627e-06, |
|
"loss": 0.5296, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.35679133858267714, |
|
"grad_norm": 2.312802552942875, |
|
"learning_rate": 8.126079155409845e-06, |
|
"loss": 0.5235, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.359251968503937, |
|
"grad_norm": 2.5951586860371374, |
|
"learning_rate": 8.092432126254933e-06, |
|
"loss": 0.5031, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.3617125984251969, |
|
"grad_norm": 2.325097293624387, |
|
"learning_rate": 8.058556755586537e-06, |
|
"loss": 0.5233, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.3641732283464567, |
|
"grad_norm": 2.2732745100546725, |
|
"learning_rate": 8.024455544721778e-06, |
|
"loss": 0.5247, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.36663385826771655, |
|
"grad_norm": 2.363542176910271, |
|
"learning_rate": 7.990131011653545e-06, |
|
"loss": 0.5218, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.3690944881889764, |
|
"grad_norm": 2.3003464358723127, |
|
"learning_rate": 7.955585690864567e-06, |
|
"loss": 0.5116, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.3715551181102362, |
|
"grad_norm": 2.3293766926582213, |
|
"learning_rate": 7.920822133140285e-06, |
|
"loss": 0.5263, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.37401574803149606, |
|
"grad_norm": 2.334188982780053, |
|
"learning_rate": 7.88584290538049e-06, |
|
"loss": 0.4941, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.3764763779527559, |
|
"grad_norm": 2.351749135985343, |
|
"learning_rate": 7.850650590409795e-06, |
|
"loss": 0.5116, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.37893700787401574, |
|
"grad_norm": 2.296567696173685, |
|
"learning_rate": 7.815247786786919e-06, |
|
"loss": 0.5225, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.3813976377952756, |
|
"grad_norm": 2.4483931002237975, |
|
"learning_rate": 7.779637108612813e-06, |
|
"loss": 0.5188, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.3838582677165354, |
|
"grad_norm": 2.372343104732536, |
|
"learning_rate": 7.743821185337634e-06, |
|
"loss": 0.5138, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.38631889763779526, |
|
"grad_norm": 2.561786660857743, |
|
"learning_rate": 7.7078026615666e-06, |
|
"loss": 0.5005, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.3887795275590551, |
|
"grad_norm": 2.2678689682364017, |
|
"learning_rate": 7.671584196864703e-06, |
|
"loss": 0.5144, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.391240157480315, |
|
"grad_norm": 2.4118009586518223, |
|
"learning_rate": 7.635168465560343e-06, |
|
"loss": 0.507, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.3937007874015748, |
|
"grad_norm": 2.3699190289261742, |
|
"learning_rate": 7.598558156547842e-06, |
|
"loss": 0.5058, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.39616141732283466, |
|
"grad_norm": 2.3834254512275894, |
|
"learning_rate": 7.561755973088917e-06, |
|
"loss": 0.5098, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.3986220472440945, |
|
"grad_norm": 2.3484210670312304, |
|
"learning_rate": 7.52476463261306e-06, |
|
"loss": 0.5101, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.40108267716535434, |
|
"grad_norm": 2.259743181885874, |
|
"learning_rate": 7.487586866516897e-06, |
|
"loss": 0.4957, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.4035433070866142, |
|
"grad_norm": 2.459196143941421, |
|
"learning_rate": 7.450225419962498e-06, |
|
"loss": 0.5018, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.406003937007874, |
|
"grad_norm": 2.3987573923118326, |
|
"learning_rate": 7.412683051674681e-06, |
|
"loss": 0.505, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.40846456692913385, |
|
"grad_norm": 2.3945193465931527, |
|
"learning_rate": 7.374962533737306e-06, |
|
"loss": 0.5021, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4109251968503937, |
|
"grad_norm": 2.450543787256226, |
|
"learning_rate": 7.3370666513885965e-06, |
|
"loss": 0.5045, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.41338582677165353, |
|
"grad_norm": 2.3773634574014624, |
|
"learning_rate": 7.298998202815474e-06, |
|
"loss": 0.4843, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.41584645669291337, |
|
"grad_norm": 2.3218222930991312, |
|
"learning_rate": 7.260759998946945e-06, |
|
"loss": 0.4999, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.4183070866141732, |
|
"grad_norm": 2.2812480048868045, |
|
"learning_rate": 7.2223548632465424e-06, |
|
"loss": 0.4903, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.42076771653543305, |
|
"grad_norm": 2.3409310514361246, |
|
"learning_rate": 7.183785631503851e-06, |
|
"loss": 0.4816, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.42322834645669294, |
|
"grad_norm": 2.3088182610498897, |
|
"learning_rate": 7.145055151625113e-06, |
|
"loss": 0.4754, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.4256889763779528, |
|
"grad_norm": 2.3963782413431325, |
|
"learning_rate": 7.1061662834229375e-06, |
|
"loss": 0.4918, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.4281496062992126, |
|
"grad_norm": 2.375549548917402, |
|
"learning_rate": 7.0671218984051385e-06, |
|
"loss": 0.4797, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.43061023622047245, |
|
"grad_norm": 2.340240253487199, |
|
"learning_rate": 7.0279248795627156e-06, |
|
"loss": 0.4804, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.4330708661417323, |
|
"grad_norm": 2.5999252908460564, |
|
"learning_rate": 6.988578121156956e-06, |
|
"loss": 0.4975, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.43553149606299213, |
|
"grad_norm": 2.412860995827959, |
|
"learning_rate": 6.9490845285057505e-06, |
|
"loss": 0.4757, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.43799212598425197, |
|
"grad_norm": 2.3723415230376053, |
|
"learning_rate": 6.909447017769047e-06, |
|
"loss": 0.485, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.4404527559055118, |
|
"grad_norm": 2.2444505424132357, |
|
"learning_rate": 6.869668515733536e-06, |
|
"loss": 0.4791, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.44291338582677164, |
|
"grad_norm": 2.4018284029434627, |
|
"learning_rate": 6.829751959596544e-06, |
|
"loss": 0.4781, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4453740157480315, |
|
"grad_norm": 2.2719247540147873, |
|
"learning_rate": 6.789700296749141e-06, |
|
"loss": 0.4664, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.4478346456692913, |
|
"grad_norm": 2.5647531601957296, |
|
"learning_rate": 6.749516484558518e-06, |
|
"loss": 0.4771, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.45029527559055116, |
|
"grad_norm": 2.209823332066905, |
|
"learning_rate": 6.709203490149615e-06, |
|
"loss": 0.4775, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.452755905511811, |
|
"grad_norm": 2.3889046698682734, |
|
"learning_rate": 6.668764290186039e-06, |
|
"loss": 0.4632, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.4552165354330709, |
|
"grad_norm": 2.3497617502204142, |
|
"learning_rate": 6.628201870650262e-06, |
|
"loss": 0.4609, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.45767716535433073, |
|
"grad_norm": 2.286479842578479, |
|
"learning_rate": 6.587519226623137e-06, |
|
"loss": 0.4584, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.46013779527559057, |
|
"grad_norm": 2.346679839886123, |
|
"learning_rate": 6.546719362062763e-06, |
|
"loss": 0.4627, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.4625984251968504, |
|
"grad_norm": 2.372517236495747, |
|
"learning_rate": 6.50580528958265e-06, |
|
"loss": 0.4627, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.46505905511811024, |
|
"grad_norm": 2.352894112351414, |
|
"learning_rate": 6.464780030229297e-06, |
|
"loss": 0.4613, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.4675196850393701, |
|
"grad_norm": 2.3155134840690073, |
|
"learning_rate": 6.423646613259103e-06, |
|
"loss": 0.4643, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.4699803149606299, |
|
"grad_norm": 2.2802677404420604, |
|
"learning_rate": 6.382408075914698e-06, |
|
"loss": 0.4675, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.47244094488188976, |
|
"grad_norm": 2.3261149198375577, |
|
"learning_rate": 6.341067463200678e-06, |
|
"loss": 0.4661, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.4749015748031496, |
|
"grad_norm": 2.3024057597107723, |
|
"learning_rate": 6.299627827658757e-06, |
|
"loss": 0.4695, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.47736220472440943, |
|
"grad_norm": 2.3579401662222805, |
|
"learning_rate": 6.258092229142383e-06, |
|
"loss": 0.4557, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.47982283464566927, |
|
"grad_norm": 2.4039007796045624, |
|
"learning_rate": 6.216463734590797e-06, |
|
"loss": 0.4733, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.4822834645669291, |
|
"grad_norm": 2.350383170709395, |
|
"learning_rate": 6.174745417802563e-06, |
|
"loss": 0.4542, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.484744094488189, |
|
"grad_norm": 2.3727437081826195, |
|
"learning_rate": 6.132940359208625e-06, |
|
"loss": 0.4599, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.48720472440944884, |
|
"grad_norm": 2.2638903752102877, |
|
"learning_rate": 6.09105164564483e-06, |
|
"loss": 0.4581, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.4896653543307087, |
|
"grad_norm": 2.391666707671801, |
|
"learning_rate": 6.049082370124011e-06, |
|
"loss": 0.4335, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.4921259842519685, |
|
"grad_norm": 2.285287290642185, |
|
"learning_rate": 6.007035631607605e-06, |
|
"loss": 0.4322, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.49458661417322836, |
|
"grad_norm": 2.3527234691645886, |
|
"learning_rate": 5.964914534776814e-06, |
|
"loss": 0.4452, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.4970472440944882, |
|
"grad_norm": 2.388047521619243, |
|
"learning_rate": 5.9227221898033785e-06, |
|
"loss": 0.4452, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.49950787401574803, |
|
"grad_norm": 2.330957204189204, |
|
"learning_rate": 5.880461712119913e-06, |
|
"loss": 0.4509, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.5019685039370079, |
|
"grad_norm": 2.3767662326352146, |
|
"learning_rate": 5.838136222189874e-06, |
|
"loss": 0.4417, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5044291338582677, |
|
"grad_norm": 2.3296456305613553, |
|
"learning_rate": 5.795748845277143e-06, |
|
"loss": 0.4456, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.5068897637795275, |
|
"grad_norm": 2.3238395450456317, |
|
"learning_rate": 5.75330271121526e-06, |
|
"loss": 0.4483, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5093503937007874, |
|
"grad_norm": 2.271731748541768, |
|
"learning_rate": 5.710800954176326e-06, |
|
"loss": 0.4439, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.5118110236220472, |
|
"grad_norm": 2.2730785805154716, |
|
"learning_rate": 5.668246712439579e-06, |
|
"loss": 0.438, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5142716535433071, |
|
"grad_norm": 2.2984158447258856, |
|
"learning_rate": 5.625643128159658e-06, |
|
"loss": 0.4371, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.5167322834645669, |
|
"grad_norm": 2.138298543480362, |
|
"learning_rate": 5.582993347134604e-06, |
|
"loss": 0.4358, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5191929133858267, |
|
"grad_norm": 2.2656933007145326, |
|
"learning_rate": 5.540300518573564e-06, |
|
"loss": 0.4456, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.5216535433070866, |
|
"grad_norm": 2.4687742042434437, |
|
"learning_rate": 5.4975677948642704e-06, |
|
"loss": 0.4292, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.5241141732283464, |
|
"grad_norm": 2.2023440824897538, |
|
"learning_rate": 5.454798331340261e-06, |
|
"loss": 0.4448, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.5265748031496063, |
|
"grad_norm": 2.3820756070238844, |
|
"learning_rate": 5.4119952860479e-06, |
|
"loss": 0.4322, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5290354330708661, |
|
"grad_norm": 2.307966181110738, |
|
"learning_rate": 5.369161819513189e-06, |
|
"loss": 0.4345, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.531496062992126, |
|
"grad_norm": 2.4566374524002006, |
|
"learning_rate": 5.3263010945083994e-06, |
|
"loss": 0.4342, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.5339566929133859, |
|
"grad_norm": 2.304141656628352, |
|
"learning_rate": 5.283416275818531e-06, |
|
"loss": 0.4304, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.5364173228346457, |
|
"grad_norm": 2.3743587538000974, |
|
"learning_rate": 5.240510530007641e-06, |
|
"loss": 0.4248, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.5388779527559056, |
|
"grad_norm": 2.275764809511964, |
|
"learning_rate": 5.1975870251850105e-06, |
|
"loss": 0.4294, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.5413385826771654, |
|
"grad_norm": 2.4755101283685823, |
|
"learning_rate": 5.1546489307712345e-06, |
|
"loss": 0.4248, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5437992125984252, |
|
"grad_norm": 2.1985695933129406, |
|
"learning_rate": 5.111699417264177e-06, |
|
"loss": 0.423, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.5462598425196851, |
|
"grad_norm": 2.2532688868144537, |
|
"learning_rate": 5.06874165600488e-06, |
|
"loss": 0.4247, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5487204724409449, |
|
"grad_norm": 2.5064255810867553, |
|
"learning_rate": 5.025778818943391e-06, |
|
"loss": 0.4171, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.5511811023622047, |
|
"grad_norm": 2.223795201431944, |
|
"learning_rate": 4.982814078404543e-06, |
|
"loss": 0.4136, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5536417322834646, |
|
"grad_norm": 2.321073710905454, |
|
"learning_rate": 4.939850606853724e-06, |
|
"loss": 0.4128, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5561023622047244, |
|
"grad_norm": 2.352196407309558, |
|
"learning_rate": 4.89689157666262e-06, |
|
"loss": 0.4225, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.5585629921259843, |
|
"grad_norm": 2.3165609074323386, |
|
"learning_rate": 4.853940159874972e-06, |
|
"loss": 0.4137, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.5610236220472441, |
|
"grad_norm": 2.249863447456596, |
|
"learning_rate": 4.8109995279723556e-06, |
|
"loss": 0.4124, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.5634842519685039, |
|
"grad_norm": 2.3324932344927927, |
|
"learning_rate": 4.768072851640006e-06, |
|
"loss": 0.4092, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.5659448818897638, |
|
"grad_norm": 2.175622171626361, |
|
"learning_rate": 4.7251633005326935e-06, |
|
"loss": 0.4044, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.5684055118110236, |
|
"grad_norm": 2.244989030279403, |
|
"learning_rate": 4.682274043040682e-06, |
|
"loss": 0.4102, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.5708661417322834, |
|
"grad_norm": 2.3655854821968583, |
|
"learning_rate": 4.639408246055781e-06, |
|
"loss": 0.3998, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5733267716535433, |
|
"grad_norm": 2.3662328414472147, |
|
"learning_rate": 4.596569074737501e-06, |
|
"loss": 0.4209, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.5757874015748031, |
|
"grad_norm": 2.2898685582377114, |
|
"learning_rate": 4.5537596922793475e-06, |
|
"loss": 0.3884, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.578248031496063, |
|
"grad_norm": 2.175371768003494, |
|
"learning_rate": 4.510983259675252e-06, |
|
"loss": 0.413, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.5807086614173228, |
|
"grad_norm": 2.174940506600783, |
|
"learning_rate": 4.468242935486164e-06, |
|
"loss": 0.3977, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.5831692913385826, |
|
"grad_norm": 2.2645627809171205, |
|
"learning_rate": 4.425541875606837e-06, |
|
"loss": 0.4, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.5856299212598425, |
|
"grad_norm": 2.3434052194119097, |
|
"learning_rate": 4.38288323303279e-06, |
|
"loss": 0.416, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.5880905511811023, |
|
"grad_norm": 2.1589090436361027, |
|
"learning_rate": 4.340270157627496e-06, |
|
"loss": 0.3975, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.5905511811023622, |
|
"grad_norm": 2.2770509922795226, |
|
"learning_rate": 4.29770579588981e-06, |
|
"loss": 0.4133, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.593011811023622, |
|
"grad_norm": 2.133861488496685, |
|
"learning_rate": 4.255193290721626e-06, |
|
"loss": 0.3965, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.5954724409448819, |
|
"grad_norm": 2.0592654419552474, |
|
"learning_rate": 4.2127357811958006e-06, |
|
"loss": 0.3944, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.5979330708661418, |
|
"grad_norm": 2.3061861279285756, |
|
"learning_rate": 4.170336402324393e-06, |
|
"loss": 0.3989, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.6003937007874016, |
|
"grad_norm": 2.1897218075138665, |
|
"learning_rate": 4.127998284827148e-06, |
|
"loss": 0.3922, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.6028543307086615, |
|
"grad_norm": 2.273569046487088, |
|
"learning_rate": 4.085724554900359e-06, |
|
"loss": 0.3919, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.6053149606299213, |
|
"grad_norm": 2.316647904273548, |
|
"learning_rate": 4.043518333986009e-06, |
|
"loss": 0.4004, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.6077755905511811, |
|
"grad_norm": 2.139410821479213, |
|
"learning_rate": 4.001382738541291e-06, |
|
"loss": 0.3868, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.610236220472441, |
|
"grad_norm": 2.4158380958610266, |
|
"learning_rate": 3.9593208798085094e-06, |
|
"loss": 0.3968, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6126968503937008, |
|
"grad_norm": 2.2664180097106934, |
|
"learning_rate": 3.9173358635853285e-06, |
|
"loss": 0.3977, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.6151574803149606, |
|
"grad_norm": 2.1617748237173995, |
|
"learning_rate": 3.875430789995454e-06, |
|
"loss": 0.3951, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.6176181102362205, |
|
"grad_norm": 2.3139618808394258, |
|
"learning_rate": 3.833608753259729e-06, |
|
"loss": 0.3883, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.6200787401574803, |
|
"grad_norm": 2.2557729958410646, |
|
"learning_rate": 3.791872841467643e-06, |
|
"loss": 0.3833, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.6225393700787402, |
|
"grad_norm": 2.288978630108445, |
|
"learning_rate": 3.7502261363493346e-06, |
|
"loss": 0.392, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 2.400211890621822, |
|
"learning_rate": 3.708671713048017e-06, |
|
"loss": 0.382, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.6274606299212598, |
|
"grad_norm": 2.14291748228325, |
|
"learning_rate": 3.6672126398929273e-06, |
|
"loss": 0.3868, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.6299212598425197, |
|
"grad_norm": 2.189481811128098, |
|
"learning_rate": 3.625851978172765e-06, |
|
"loss": 0.3844, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.6323818897637795, |
|
"grad_norm": 2.17033479220634, |
|
"learning_rate": 3.5845927819096405e-06, |
|
"loss": 0.3767, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.6348425196850394, |
|
"grad_norm": 2.1024337319523285, |
|
"learning_rate": 3.543438097633577e-06, |
|
"loss": 0.385, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.6373031496062992, |
|
"grad_norm": 2.1601959551273584, |
|
"learning_rate": 3.5023909641575604e-06, |
|
"loss": 0.3655, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.639763779527559, |
|
"grad_norm": 2.2147245957921324, |
|
"learning_rate": 3.4614544123531476e-06, |
|
"loss": 0.3846, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.6422244094488189, |
|
"grad_norm": 2.2767016812027037, |
|
"learning_rate": 3.4206314649266813e-06, |
|
"loss": 0.3829, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.6446850393700787, |
|
"grad_norm": 2.189301400471183, |
|
"learning_rate": 3.3799251361960883e-06, |
|
"loss": 0.3793, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.6471456692913385, |
|
"grad_norm": 2.2486478577990328, |
|
"learning_rate": 3.339338431868311e-06, |
|
"loss": 0.3651, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.6496062992125984, |
|
"grad_norm": 2.1239701183150186, |
|
"learning_rate": 3.29887434881737e-06, |
|
"loss": 0.3962, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.6520669291338582, |
|
"grad_norm": 2.0943809822170194, |
|
"learning_rate": 3.2585358748630725e-06, |
|
"loss": 0.3781, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.6545275590551181, |
|
"grad_norm": 2.302998816495133, |
|
"learning_rate": 3.2183259885504003e-06, |
|
"loss": 0.3599, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.656988188976378, |
|
"grad_norm": 2.093137463735972, |
|
"learning_rate": 3.1782476589295803e-06, |
|
"loss": 0.3693, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.6594488188976378, |
|
"grad_norm": 2.0537589165415038, |
|
"learning_rate": 3.138303845336844e-06, |
|
"loss": 0.3767, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.6619094488188977, |
|
"grad_norm": 2.1644873169815018, |
|
"learning_rate": 3.098497497175925e-06, |
|
"loss": 0.3758, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.6643700787401575, |
|
"grad_norm": 2.1158460604452904, |
|
"learning_rate": 3.0588315537002682e-06, |
|
"loss": 0.3715, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.6668307086614174, |
|
"grad_norm": 2.312489752052895, |
|
"learning_rate": 3.0193089437960043e-06, |
|
"loss": 0.3764, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.6692913385826772, |
|
"grad_norm": 2.4373363981556686, |
|
"learning_rate": 2.9799325857656856e-06, |
|
"loss": 0.3639, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.671751968503937, |
|
"grad_norm": 2.051650821654651, |
|
"learning_rate": 2.940705387112798e-06, |
|
"loss": 0.3456, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.6742125984251969, |
|
"grad_norm": 2.162923091354617, |
|
"learning_rate": 2.901630244327075e-06, |
|
"loss": 0.3737, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.6766732283464567, |
|
"grad_norm": 2.1826289176609617, |
|
"learning_rate": 2.862710042670629e-06, |
|
"loss": 0.3777, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.6791338582677166, |
|
"grad_norm": 2.271492512262752, |
|
"learning_rate": 2.8239476559649013e-06, |
|
"loss": 0.3672, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.6815944881889764, |
|
"grad_norm": 2.0841809045493442, |
|
"learning_rate": 2.7853459463784643e-06, |
|
"loss": 0.3658, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.6840551181102362, |
|
"grad_norm": 2.220969106748428, |
|
"learning_rate": 2.7469077642156844e-06, |
|
"loss": 0.3602, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.6865157480314961, |
|
"grad_norm": 2.009625282982446, |
|
"learning_rate": 2.7086359477062542e-06, |
|
"loss": 0.353, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.6889763779527559, |
|
"grad_norm": 2.267920231263409, |
|
"learning_rate": 2.6705333227956304e-06, |
|
"loss": 0.3672, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.6914370078740157, |
|
"grad_norm": 2.099753211120495, |
|
"learning_rate": 2.6326027029363575e-06, |
|
"loss": 0.3657, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.6938976377952756, |
|
"grad_norm": 2.3887622578641046, |
|
"learning_rate": 2.5948468888803323e-06, |
|
"loss": 0.3641, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.6963582677165354, |
|
"grad_norm": 2.2574854591596063, |
|
"learning_rate": 2.557268668472002e-06, |
|
"loss": 0.3681, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.6988188976377953, |
|
"grad_norm": 2.1541998550392285, |
|
"learning_rate": 2.5198708164425046e-06, |
|
"loss": 0.3666, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.7012795275590551, |
|
"grad_norm": 2.2114896772148924, |
|
"learning_rate": 2.482656094204794e-06, |
|
"loss": 0.3721, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.7037401574803149, |
|
"grad_norm": 1.999095279733753, |
|
"learning_rate": 2.445627249649742e-06, |
|
"loss": 0.3533, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7062007874015748, |
|
"grad_norm": 2.447157001336924, |
|
"learning_rate": 2.4087870169432263e-06, |
|
"loss": 0.3512, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.7086614173228346, |
|
"grad_norm": 2.1383643267190875, |
|
"learning_rate": 2.372138116324254e-06, |
|
"loss": 0.3637, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.7111220472440944, |
|
"grad_norm": 2.2411777344710933, |
|
"learning_rate": 2.3356832539040976e-06, |
|
"loss": 0.3521, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.7135826771653543, |
|
"grad_norm": 2.081326294904533, |
|
"learning_rate": 2.2994251214664754e-06, |
|
"loss": 0.3636, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.7160433070866141, |
|
"grad_norm": 2.0265851050443717, |
|
"learning_rate": 2.263366396268806e-06, |
|
"loss": 0.3631, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.718503937007874, |
|
"grad_norm": 2.016445263024121, |
|
"learning_rate": 2.227509740844508e-06, |
|
"loss": 0.3506, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.7209645669291339, |
|
"grad_norm": 1.9859142840819781, |
|
"learning_rate": 2.191857802806409e-06, |
|
"loss": 0.3492, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.7234251968503937, |
|
"grad_norm": 2.131894669259727, |
|
"learning_rate": 2.1564132146512494e-06, |
|
"loss": 0.361, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.7258858267716536, |
|
"grad_norm": 2.054521134481261, |
|
"learning_rate": 2.1211785935652974e-06, |
|
"loss": 0.353, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.7283464566929134, |
|
"grad_norm": 2.195553612057405, |
|
"learning_rate": 2.086156541231109e-06, |
|
"loss": 0.3541, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.7308070866141733, |
|
"grad_norm": 2.078190944421504, |
|
"learning_rate": 2.05134964363541e-06, |
|
"loss": 0.3532, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.7332677165354331, |
|
"grad_norm": 1.9784288837683923, |
|
"learning_rate": 2.016760470878158e-06, |
|
"loss": 0.3537, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.7357283464566929, |
|
"grad_norm": 2.161906206866742, |
|
"learning_rate": 1.9823915769827672e-06, |
|
"loss": 0.3635, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.7381889763779528, |
|
"grad_norm": 2.0523238713671135, |
|
"learning_rate": 1.948245499707523e-06, |
|
"loss": 0.3486, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.7406496062992126, |
|
"grad_norm": 2.08663663246033, |
|
"learning_rate": 1.9143247603581925e-06, |
|
"loss": 0.3517, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.7431102362204725, |
|
"grad_norm": 2.247439527318004, |
|
"learning_rate": 1.8806318636018666e-06, |
|
"loss": 0.3485, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.7455708661417323, |
|
"grad_norm": 2.2948321577508355, |
|
"learning_rate": 1.8471692972820027e-06, |
|
"loss": 0.3554, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.7480314960629921, |
|
"grad_norm": 2.1604348165019065, |
|
"learning_rate": 1.8139395322347335e-06, |
|
"loss": 0.3489, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.750492125984252, |
|
"grad_norm": 2.125600663304225, |
|
"learning_rate": 1.780945022106424e-06, |
|
"loss": 0.3589, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.7529527559055118, |
|
"grad_norm": 1.9910694715906239, |
|
"learning_rate": 1.7481882031724929e-06, |
|
"loss": 0.3483, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.7554133858267716, |
|
"grad_norm": 2.024386438154311, |
|
"learning_rate": 1.7156714941575292e-06, |
|
"loss": 0.3582, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.7578740157480315, |
|
"grad_norm": 2.236178264159934, |
|
"learning_rate": 1.6833972960566868e-06, |
|
"loss": 0.3535, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.7603346456692913, |
|
"grad_norm": 2.0509148837648876, |
|
"learning_rate": 1.6513679919583975e-06, |
|
"loss": 0.3485, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.7627952755905512, |
|
"grad_norm": 1.9020807079292121, |
|
"learning_rate": 1.6195859468684199e-06, |
|
"loss": 0.3496, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.765255905511811, |
|
"grad_norm": 2.1302050953871694, |
|
"learning_rate": 1.588053507535195e-06, |
|
"loss": 0.3385, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.7677165354330708, |
|
"grad_norm": 2.184409457671906, |
|
"learning_rate": 1.5567730022765753e-06, |
|
"loss": 0.342, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.7701771653543307, |
|
"grad_norm": 2.1109842289764735, |
|
"learning_rate": 1.5257467408078996e-06, |
|
"loss": 0.3461, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.7726377952755905, |
|
"grad_norm": 1.9447642578789646, |
|
"learning_rate": 1.494977014071441e-06, |
|
"loss": 0.3321, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.7750984251968503, |
|
"grad_norm": 2.160598206464441, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.3463, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.7775590551181102, |
|
"grad_norm": 1.9954045529577038, |
|
"learning_rate": 1.434216233685441e-06, |
|
"loss": 0.354, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.78001968503937, |
|
"grad_norm": 1.8705318043615158, |
|
"learning_rate": 1.4042296665397187e-06, |
|
"loss": 0.3354, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.78248031496063, |
|
"grad_norm": 2.131526940157942, |
|
"learning_rate": 1.374508606802586e-06, |
|
"loss": 0.326, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.7849409448818898, |
|
"grad_norm": 1.9854582148960758, |
|
"learning_rate": 1.3450552490417712e-06, |
|
"loss": 0.3286, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.7874015748031497, |
|
"grad_norm": 2.005367208929083, |
|
"learning_rate": 1.3158717680582128e-06, |
|
"loss": 0.3422, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.7898622047244095, |
|
"grad_norm": 2.0697719484600614, |
|
"learning_rate": 1.286960318725471e-06, |
|
"loss": 0.3405, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.7923228346456693, |
|
"grad_norm": 2.1676514262257656, |
|
"learning_rate": 1.2583230358306053e-06, |
|
"loss": 0.3519, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.7947834645669292, |
|
"grad_norm": 2.030399197550805, |
|
"learning_rate": 1.2299620339165607e-06, |
|
"loss": 0.3371, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.797244094488189, |
|
"grad_norm": 1.8800725084378718, |
|
"learning_rate": 1.201879407126012e-06, |
|
"loss": 0.3387, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.7997047244094488, |
|
"grad_norm": 2.1095666405799345, |
|
"learning_rate": 1.1740772290467518e-06, |
|
"loss": 0.343, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.8021653543307087, |
|
"grad_norm": 2.036216759579623, |
|
"learning_rate": 1.1465575525585743e-06, |
|
"loss": 0.3254, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.8046259842519685, |
|
"grad_norm": 1.9725419805321742, |
|
"learning_rate": 1.119322409681689e-06, |
|
"loss": 0.3377, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.8070866141732284, |
|
"grad_norm": 2.261947410750145, |
|
"learning_rate": 1.0923738114266824e-06, |
|
"loss": 0.3414, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.8095472440944882, |
|
"grad_norm": 1.9967677016675829, |
|
"learning_rate": 1.0657137476460272e-06, |
|
"loss": 0.3427, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.812007874015748, |
|
"grad_norm": 1.9329332696972708, |
|
"learning_rate": 1.0393441868871507e-06, |
|
"loss": 0.3309, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.8144685039370079, |
|
"grad_norm": 2.080216309757756, |
|
"learning_rate": 1.0132670762470875e-06, |
|
"loss": 0.3403, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.8169291338582677, |
|
"grad_norm": 1.9963077191898775, |
|
"learning_rate": 9.874843412286994e-07, |
|
"loss": 0.3395, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.8193897637795275, |
|
"grad_norm": 2.1468283823628584, |
|
"learning_rate": 9.619978855985017e-07, |
|
"loss": 0.3365, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.8218503937007874, |
|
"grad_norm": 2.030201170967621, |
|
"learning_rate": 9.368095912460934e-07, |
|
"loss": 0.3343, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.8243110236220472, |
|
"grad_norm": 2.144207392793775, |
|
"learning_rate": 9.119213180451974e-07, |
|
"loss": 0.3365, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.8267716535433071, |
|
"grad_norm": 2.139359174863488, |
|
"learning_rate": 8.87334903716332e-07, |
|
"loss": 0.3344, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.8292322834645669, |
|
"grad_norm": 2.0756053748869028, |
|
"learning_rate": 8.630521636911171e-07, |
|
"loss": 0.3396, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.8316929133858267, |
|
"grad_norm": 1.9417094509672648, |
|
"learning_rate": 8.390748909782204e-07, |
|
"loss": 0.3319, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.8341535433070866, |
|
"grad_norm": 1.904699920697159, |
|
"learning_rate": 8.154048560309669e-07, |
|
"loss": 0.3383, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.8366141732283464, |
|
"grad_norm": 1.9020653291530694, |
|
"learning_rate": 7.920438066166097e-07, |
|
"loss": 0.3349, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.8390748031496063, |
|
"grad_norm": 2.00777989664957, |
|
"learning_rate": 7.689934676872768e-07, |
|
"loss": 0.337, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.8415354330708661, |
|
"grad_norm": 2.1247990851762677, |
|
"learning_rate": 7.462555412526062e-07, |
|
"loss": 0.3299, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.843996062992126, |
|
"grad_norm": 2.0897419693416004, |
|
"learning_rate": 7.238317062540661e-07, |
|
"loss": 0.3261, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.8464566929133859, |
|
"grad_norm": 2.243164262785359, |
|
"learning_rate": 7.017236184409859e-07, |
|
"loss": 0.3378, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.8489173228346457, |
|
"grad_norm": 1.9439608610518924, |
|
"learning_rate": 6.799329102482988e-07, |
|
"loss": 0.3206, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.8513779527559056, |
|
"grad_norm": 2.001456213276007, |
|
"learning_rate": 6.584611906760036e-07, |
|
"loss": 0.3224, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.8538385826771654, |
|
"grad_norm": 1.9357999946640365, |
|
"learning_rate": 6.373100451703601e-07, |
|
"loss": 0.3514, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.8562992125984252, |
|
"grad_norm": 1.8706183982176696, |
|
"learning_rate": 6.164810355068179e-07, |
|
"loss": 0.3355, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.8587598425196851, |
|
"grad_norm": 1.9372386803072519, |
|
"learning_rate": 5.959756996746996e-07, |
|
"loss": 0.3383, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.8612204724409449, |
|
"grad_norm": 1.9978519375008006, |
|
"learning_rate": 5.757955517636365e-07, |
|
"loss": 0.329, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.8636811023622047, |
|
"grad_norm": 2.260582620281468, |
|
"learning_rate": 5.559420818517702e-07, |
|
"loss": 0.3352, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.8661417322834646, |
|
"grad_norm": 1.905722889331543, |
|
"learning_rate": 5.364167558957267e-07, |
|
"loss": 0.3241, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.8686023622047244, |
|
"grad_norm": 2.08290721357846, |
|
"learning_rate": 5.172210156223745e-07, |
|
"loss": 0.3336, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.8710629921259843, |
|
"grad_norm": 2.0264311598023728, |
|
"learning_rate": 4.983562784223645e-07, |
|
"loss": 0.3191, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.8735236220472441, |
|
"grad_norm": 1.9306929816740388, |
|
"learning_rate": 4.798239372454738e-07, |
|
"loss": 0.3256, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.8759842519685039, |
|
"grad_norm": 1.9166412725439652, |
|
"learning_rate": 4.6162536049775387e-07, |
|
"loss": 0.3284, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.8784448818897638, |
|
"grad_norm": 1.9640277798271495, |
|
"learning_rate": 4.437618919404851e-07, |
|
"loss": 0.3233, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.8809055118110236, |
|
"grad_norm": 1.9101328543862819, |
|
"learning_rate": 4.262348505909608e-07, |
|
"loss": 0.326, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.8833661417322834, |
|
"grad_norm": 1.9078892149815414, |
|
"learning_rate": 4.0904553062508677e-07, |
|
"loss": 0.3273, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.8858267716535433, |
|
"grad_norm": 1.92469620169423, |
|
"learning_rate": 3.9219520128182087e-07, |
|
"loss": 0.3222, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8882874015748031, |
|
"grad_norm": 1.8872478826399433, |
|
"learning_rate": 3.756851067694606e-07, |
|
"loss": 0.3233, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.890748031496063, |
|
"grad_norm": 2.1313117581123366, |
|
"learning_rate": 3.5951646617376603e-07, |
|
"loss": 0.3309, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.8932086614173228, |
|
"grad_norm": 1.857899228482062, |
|
"learning_rate": 3.436904733679436e-07, |
|
"loss": 0.3231, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.8956692913385826, |
|
"grad_norm": 1.9630046166560744, |
|
"learning_rate": 3.2820829692449984e-07, |
|
"loss": 0.3314, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.8981299212598425, |
|
"grad_norm": 1.878464206381759, |
|
"learning_rate": 3.130710800289416e-07, |
|
"loss": 0.315, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.9005905511811023, |
|
"grad_norm": 1.948299936140257, |
|
"learning_rate": 2.982799403953801e-07, |
|
"loss": 0.3245, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.9030511811023622, |
|
"grad_norm": 1.9803613486774017, |
|
"learning_rate": 2.8383597018398876e-07, |
|
"loss": 0.3106, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.905511811023622, |
|
"grad_norm": 1.9228830866585032, |
|
"learning_rate": 2.697402359203638e-07, |
|
"loss": 0.3206, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.9079724409448819, |
|
"grad_norm": 1.8821637790876162, |
|
"learning_rate": 2.559937784167743e-07, |
|
"loss": 0.3158, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.9104330708661418, |
|
"grad_norm": 2.0523446694418364, |
|
"learning_rate": 2.4259761269530667e-07, |
|
"loss": 0.3182, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.9128937007874016, |
|
"grad_norm": 1.9067478250870236, |
|
"learning_rate": 2.2955272791291894e-07, |
|
"loss": 0.33, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.9153543307086615, |
|
"grad_norm": 1.8161748223299183, |
|
"learning_rate": 2.1686008728840301e-07, |
|
"loss": 0.3195, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.9178149606299213, |
|
"grad_norm": 1.9778227776433959, |
|
"learning_rate": 2.0452062803126005e-07, |
|
"loss": 0.3229, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.9202755905511811, |
|
"grad_norm": 1.8169343313243027, |
|
"learning_rate": 1.9253526127249787e-07, |
|
"loss": 0.328, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.922736220472441, |
|
"grad_norm": 1.9674779916821903, |
|
"learning_rate": 1.8090487199735663e-07, |
|
"loss": 0.3235, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.9251968503937008, |
|
"grad_norm": 2.066617055366464, |
|
"learning_rate": 1.6963031897995863e-07, |
|
"loss": 0.3216, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.9276574803149606, |
|
"grad_norm": 1.9662299527781584, |
|
"learning_rate": 1.5871243471990372e-07, |
|
"loss": 0.3279, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.9301181102362205, |
|
"grad_norm": 2.0282199981854414, |
|
"learning_rate": 1.4815202538079e-07, |
|
"loss": 0.314, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.9325787401574803, |
|
"grad_norm": 1.9766928164218986, |
|
"learning_rate": 1.379498707306942e-07, |
|
"loss": 0.3152, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.9350393700787402, |
|
"grad_norm": 1.8773781832541783, |
|
"learning_rate": 1.28106724084594e-07, |
|
"loss": 0.3286, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 1.9756880092340237, |
|
"learning_rate": 1.1862331224873902e-07, |
|
"loss": 0.3211, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.9399606299212598, |
|
"grad_norm": 1.946646764579541, |
|
"learning_rate": 1.0950033546699213e-07, |
|
"loss": 0.3243, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.9424212598425197, |
|
"grad_norm": 1.8898884090335948, |
|
"learning_rate": 1.0073846736911697e-07, |
|
"loss": 0.3274, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.9448818897637795, |
|
"grad_norm": 1.816110689404615, |
|
"learning_rate": 9.233835492104326e-08, |
|
"loss": 0.3157, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.9473425196850394, |
|
"grad_norm": 1.903238646884656, |
|
"learning_rate": 8.430061837709058e-08, |
|
"loss": 0.3158, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.9498031496062992, |
|
"grad_norm": 1.8739160354182305, |
|
"learning_rate": 7.662585123417609e-08, |
|
"loss": 0.3136, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.952263779527559, |
|
"grad_norm": 2.015422876061845, |
|
"learning_rate": 6.931462018798407e-08, |
|
"loss": 0.3274, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.9547244094488189, |
|
"grad_norm": 1.7724513355973053, |
|
"learning_rate": 6.236746509112824e-08, |
|
"loss": 0.3288, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.9571850393700787, |
|
"grad_norm": 1.9490022383605574, |
|
"learning_rate": 5.5784898913284754e-08, |
|
"loss": 0.3268, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.9596456692913385, |
|
"grad_norm": 2.00148527557363, |
|
"learning_rate": 4.9567407703319247e-08, |
|
"loss": 0.327, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.9621062992125984, |
|
"grad_norm": 1.9582200223538588, |
|
"learning_rate": 4.3715450553393765e-08, |
|
"loss": 0.3252, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.9645669291338582, |
|
"grad_norm": 1.837922483605557, |
|
"learning_rate": 3.8229459565070074e-08, |
|
"loss": 0.31, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.9670275590551181, |
|
"grad_norm": 1.910793470177359, |
|
"learning_rate": 3.3109839817404564e-08, |
|
"loss": 0.318, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.969488188976378, |
|
"grad_norm": 1.8816675788362491, |
|
"learning_rate": 2.8356969337035578e-08, |
|
"loss": 0.3262, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.9719488188976378, |
|
"grad_norm": 1.991419593684435, |
|
"learning_rate": 2.3971199070271234e-08, |
|
"loss": 0.3217, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.9744094488188977, |
|
"grad_norm": 1.8312406446386904, |
|
"learning_rate": 1.99528528571763e-08, |
|
"loss": 0.3267, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.9768700787401575, |
|
"grad_norm": 1.9479093178370912, |
|
"learning_rate": 1.6302227407660744e-08, |
|
"loss": 0.3209, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.9793307086614174, |
|
"grad_norm": 1.9735060238944024, |
|
"learning_rate": 1.3019592279569503e-08, |
|
"loss": 0.3243, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.9817913385826772, |
|
"grad_norm": 2.043603259800287, |
|
"learning_rate": 1.0105189858779507e-08, |
|
"loss": 0.323, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.984251968503937, |
|
"grad_norm": 1.852960962616925, |
|
"learning_rate": 7.559235341302872e-09, |
|
"loss": 0.322, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.9867125984251969, |
|
"grad_norm": 1.8539802968962942, |
|
"learning_rate": 5.381916717395186e-09, |
|
"loss": 0.3222, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.9891732283464567, |
|
"grad_norm": 1.9807602796212282, |
|
"learning_rate": 3.573394757676596e-09, |
|
"loss": 0.3203, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.9916338582677166, |
|
"grad_norm": 1.9586954229212994, |
|
"learning_rate": 2.1338030012596488e-09, |
|
"loss": 0.3272, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.9940944881889764, |
|
"grad_norm": 1.966512685382069, |
|
"learning_rate": 1.0632477458888401e-09, |
|
"loss": 0.3285, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.9965551181102362, |
|
"grad_norm": 1.8305164111174446, |
|
"learning_rate": 3.618080400924484e-10, |
|
"loss": 0.3272, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.9990157480314961, |
|
"grad_norm": 1.8345259182578462, |
|
"learning_rate": 2.9535677343872637e-11, |
|
"loss": 0.3234, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_runtime": 3.3017, |
|
"eval_samples_per_second": 3.029, |
|
"eval_steps_per_second": 0.909, |
|
"step": 2032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 2032, |
|
"total_flos": 212729730170880.0, |
|
"train_loss": 0.4844009231218672, |
|
"train_runtime": 18284.3129, |
|
"train_samples_per_second": 1.778, |
|
"train_steps_per_second": 0.111 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2032, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 212729730170880.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|