Omriy123's picture
pushed
960b0b6 verified
raw
history blame contribute delete
No virus
18.2 kB
{
"best_metric": 0.27312836050987244,
"best_model_checkpoint": "vit_epochs1_batch32_lr5e-05_size224_tiles10_seed1_q3_dropout_v2_test11\\checkpoint-469",
"epoch": 1.0,
"eval_steps": 500,
"global_step": 469,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.010660980810234541,
"grad_norm": 2.002079963684082,
"learning_rate": 4.9466950959488276e-05,
"loss": 0.3909,
"step": 5
},
{
"epoch": 0.021321961620469083,
"grad_norm": 0.9621203541755676,
"learning_rate": 4.893390191897655e-05,
"loss": 0.1535,
"step": 10
},
{
"epoch": 0.031982942430703626,
"grad_norm": 0.5087786912918091,
"learning_rate": 4.840085287846482e-05,
"loss": 0.0848,
"step": 15
},
{
"epoch": 0.042643923240938165,
"grad_norm": 0.3503257632255554,
"learning_rate": 4.78678038379531e-05,
"loss": 0.0567,
"step": 20
},
{
"epoch": 0.053304904051172705,
"grad_norm": 0.2416423112154007,
"learning_rate": 4.7334754797441364e-05,
"loss": 0.0344,
"step": 25
},
{
"epoch": 0.06396588486140725,
"grad_norm": 0.3675519824028015,
"learning_rate": 4.6801705756929645e-05,
"loss": 0.0263,
"step": 30
},
{
"epoch": 0.07462686567164178,
"grad_norm": 0.17501111328601837,
"learning_rate": 4.626865671641791e-05,
"loss": 0.0262,
"step": 35
},
{
"epoch": 0.08528784648187633,
"grad_norm": 1.7560172080993652,
"learning_rate": 4.5735607675906185e-05,
"loss": 0.0274,
"step": 40
},
{
"epoch": 0.09594882729211088,
"grad_norm": 0.12559843063354492,
"learning_rate": 4.520255863539446e-05,
"loss": 0.0159,
"step": 45
},
{
"epoch": 0.10660980810234541,
"grad_norm": 9.90837574005127,
"learning_rate": 4.466950959488273e-05,
"loss": 0.0229,
"step": 50
},
{
"epoch": 0.11727078891257996,
"grad_norm": 19.459095001220703,
"learning_rate": 4.4136460554371006e-05,
"loss": 0.0845,
"step": 55
},
{
"epoch": 0.1279317697228145,
"grad_norm": 0.15913185477256775,
"learning_rate": 4.360341151385928e-05,
"loss": 0.0649,
"step": 60
},
{
"epoch": 0.13859275053304904,
"grad_norm": 0.10663477331399918,
"learning_rate": 4.307036247334755e-05,
"loss": 0.0129,
"step": 65
},
{
"epoch": 0.14925373134328357,
"grad_norm": 0.09200357645750046,
"learning_rate": 4.253731343283582e-05,
"loss": 0.0284,
"step": 70
},
{
"epoch": 0.15991471215351813,
"grad_norm": 8.210641860961914,
"learning_rate": 4.2004264392324094e-05,
"loss": 0.0336,
"step": 75
},
{
"epoch": 0.17057569296375266,
"grad_norm": 0.12569181621074677,
"learning_rate": 4.147121535181237e-05,
"loss": 0.0281,
"step": 80
},
{
"epoch": 0.1812366737739872,
"grad_norm": 0.44192206859588623,
"learning_rate": 4.093816631130064e-05,
"loss": 0.0292,
"step": 85
},
{
"epoch": 0.19189765458422176,
"grad_norm": 0.15402592718601227,
"learning_rate": 4.0405117270788915e-05,
"loss": 0.0495,
"step": 90
},
{
"epoch": 0.2025586353944563,
"grad_norm": 17.383081436157227,
"learning_rate": 3.987206823027719e-05,
"loss": 0.0904,
"step": 95
},
{
"epoch": 0.21321961620469082,
"grad_norm": 0.07923204451799393,
"learning_rate": 3.9339019189765456e-05,
"loss": 0.0503,
"step": 100
},
{
"epoch": 0.22388059701492538,
"grad_norm": 0.07286439090967178,
"learning_rate": 3.8805970149253736e-05,
"loss": 0.0317,
"step": 105
},
{
"epoch": 0.2345415778251599,
"grad_norm": 1.0418739318847656,
"learning_rate": 3.8272921108742e-05,
"loss": 0.0346,
"step": 110
},
{
"epoch": 0.24520255863539445,
"grad_norm": 3.832359552383423,
"learning_rate": 3.7739872068230284e-05,
"loss": 0.0988,
"step": 115
},
{
"epoch": 0.255863539445629,
"grad_norm": 0.06542658805847168,
"learning_rate": 3.720682302771855e-05,
"loss": 0.0075,
"step": 120
},
{
"epoch": 0.26652452025586354,
"grad_norm": 9.610258102416992,
"learning_rate": 3.6673773987206824e-05,
"loss": 0.0269,
"step": 125
},
{
"epoch": 0.2771855010660981,
"grad_norm": 0.17276552319526672,
"learning_rate": 3.61407249466951e-05,
"loss": 0.0231,
"step": 130
},
{
"epoch": 0.2878464818763326,
"grad_norm": 0.08185400068759918,
"learning_rate": 3.560767590618337e-05,
"loss": 0.0755,
"step": 135
},
{
"epoch": 0.29850746268656714,
"grad_norm": 0.07418997585773468,
"learning_rate": 3.5074626865671645e-05,
"loss": 0.0424,
"step": 140
},
{
"epoch": 0.3091684434968017,
"grad_norm": 2.326040267944336,
"learning_rate": 3.454157782515991e-05,
"loss": 0.1041,
"step": 145
},
{
"epoch": 0.31982942430703626,
"grad_norm": 1.4266926050186157,
"learning_rate": 3.400852878464819e-05,
"loss": 0.0466,
"step": 150
},
{
"epoch": 0.3304904051172708,
"grad_norm": 25.644779205322266,
"learning_rate": 3.347547974413646e-05,
"loss": 0.0492,
"step": 155
},
{
"epoch": 0.3411513859275053,
"grad_norm": 4.534906387329102,
"learning_rate": 3.294243070362473e-05,
"loss": 0.033,
"step": 160
},
{
"epoch": 0.35181236673773986,
"grad_norm": 10.8761625289917,
"learning_rate": 3.240938166311301e-05,
"loss": 0.0232,
"step": 165
},
{
"epoch": 0.3624733475479744,
"grad_norm": 0.3756234645843506,
"learning_rate": 3.187633262260128e-05,
"loss": 0.0225,
"step": 170
},
{
"epoch": 0.373134328358209,
"grad_norm": 11.933363914489746,
"learning_rate": 3.1343283582089554e-05,
"loss": 0.0702,
"step": 175
},
{
"epoch": 0.3837953091684435,
"grad_norm": 2.681648015975952,
"learning_rate": 3.081023454157783e-05,
"loss": 0.1197,
"step": 180
},
{
"epoch": 0.39445628997867804,
"grad_norm": 0.5134532451629639,
"learning_rate": 3.0277185501066102e-05,
"loss": 0.0339,
"step": 185
},
{
"epoch": 0.4051172707889126,
"grad_norm": 13.620699882507324,
"learning_rate": 2.9744136460554372e-05,
"loss": 0.0276,
"step": 190
},
{
"epoch": 0.4157782515991471,
"grad_norm": 0.09190113842487335,
"learning_rate": 2.9211087420042642e-05,
"loss": 0.0086,
"step": 195
},
{
"epoch": 0.42643923240938164,
"grad_norm": 0.10281543433666229,
"learning_rate": 2.867803837953092e-05,
"loss": 0.0129,
"step": 200
},
{
"epoch": 0.43710021321961623,
"grad_norm": 6.414993762969971,
"learning_rate": 2.814498933901919e-05,
"loss": 0.0555,
"step": 205
},
{
"epoch": 0.44776119402985076,
"grad_norm": 0.06216156855225563,
"learning_rate": 2.7611940298507467e-05,
"loss": 0.0076,
"step": 210
},
{
"epoch": 0.4584221748400853,
"grad_norm": 12.08731746673584,
"learning_rate": 2.7078891257995737e-05,
"loss": 0.0403,
"step": 215
},
{
"epoch": 0.4690831556503198,
"grad_norm": 0.10379679501056671,
"learning_rate": 2.6545842217484007e-05,
"loss": 0.0404,
"step": 220
},
{
"epoch": 0.47974413646055436,
"grad_norm": 0.06435238569974899,
"learning_rate": 2.6012793176972285e-05,
"loss": 0.0072,
"step": 225
},
{
"epoch": 0.4904051172707889,
"grad_norm": 0.07799974828958511,
"learning_rate": 2.5479744136460555e-05,
"loss": 0.0209,
"step": 230
},
{
"epoch": 0.5010660980810234,
"grad_norm": 0.05462189391255379,
"learning_rate": 2.494669509594883e-05,
"loss": 0.006,
"step": 235
},
{
"epoch": 0.511727078891258,
"grad_norm": 0.0564899705350399,
"learning_rate": 2.44136460554371e-05,
"loss": 0.0277,
"step": 240
},
{
"epoch": 0.5223880597014925,
"grad_norm": 0.6151730418205261,
"learning_rate": 2.3880597014925373e-05,
"loss": 0.0118,
"step": 245
},
{
"epoch": 0.5330490405117271,
"grad_norm": 3.0278522968292236,
"learning_rate": 2.3347547974413646e-05,
"loss": 0.0635,
"step": 250
},
{
"epoch": 0.5437100213219617,
"grad_norm": 23.011789321899414,
"learning_rate": 2.281449893390192e-05,
"loss": 0.0231,
"step": 255
},
{
"epoch": 0.5543710021321961,
"grad_norm": 2.13313889503479,
"learning_rate": 2.2281449893390194e-05,
"loss": 0.0391,
"step": 260
},
{
"epoch": 0.5650319829424307,
"grad_norm": 0.14648886024951935,
"learning_rate": 2.1748400852878467e-05,
"loss": 0.0054,
"step": 265
},
{
"epoch": 0.5756929637526652,
"grad_norm": 0.046188462525606155,
"learning_rate": 2.1215351812366738e-05,
"loss": 0.0069,
"step": 270
},
{
"epoch": 0.5863539445628998,
"grad_norm": 0.047539375722408295,
"learning_rate": 2.068230277185501e-05,
"loss": 0.0067,
"step": 275
},
{
"epoch": 0.5970149253731343,
"grad_norm": 0.051974523812532425,
"learning_rate": 2.0149253731343285e-05,
"loss": 0.0055,
"step": 280
},
{
"epoch": 0.6076759061833689,
"grad_norm": 0.049149394035339355,
"learning_rate": 1.961620469083156e-05,
"loss": 0.0053,
"step": 285
},
{
"epoch": 0.6183368869936035,
"grad_norm": 0.041319601237773895,
"learning_rate": 1.9083155650319832e-05,
"loss": 0.0221,
"step": 290
},
{
"epoch": 0.6289978678038379,
"grad_norm": 0.05550704523921013,
"learning_rate": 1.8550106609808106e-05,
"loss": 0.0212,
"step": 295
},
{
"epoch": 0.6396588486140725,
"grad_norm": 0.04463819041848183,
"learning_rate": 1.8017057569296376e-05,
"loss": 0.0348,
"step": 300
},
{
"epoch": 0.650319829424307,
"grad_norm": 2.7232251167297363,
"learning_rate": 1.7484008528784647e-05,
"loss": 0.0339,
"step": 305
},
{
"epoch": 0.6609808102345416,
"grad_norm": 0.04294276237487793,
"learning_rate": 1.695095948827292e-05,
"loss": 0.007,
"step": 310
},
{
"epoch": 0.6716417910447762,
"grad_norm": 0.04751422628760338,
"learning_rate": 1.6417910447761194e-05,
"loss": 0.0058,
"step": 315
},
{
"epoch": 0.6823027718550106,
"grad_norm": 0.06318726390600204,
"learning_rate": 1.5884861407249468e-05,
"loss": 0.0351,
"step": 320
},
{
"epoch": 0.6929637526652452,
"grad_norm": 0.04593655467033386,
"learning_rate": 1.535181236673774e-05,
"loss": 0.0084,
"step": 325
},
{
"epoch": 0.7036247334754797,
"grad_norm": 0.04337684437632561,
"learning_rate": 1.4818763326226012e-05,
"loss": 0.0048,
"step": 330
},
{
"epoch": 0.7142857142857143,
"grad_norm": 1.3650192022323608,
"learning_rate": 1.4285714285714285e-05,
"loss": 0.0054,
"step": 335
},
{
"epoch": 0.7249466950959488,
"grad_norm": 0.04110950231552124,
"learning_rate": 1.3752665245202559e-05,
"loss": 0.0048,
"step": 340
},
{
"epoch": 0.7356076759061834,
"grad_norm": 0.05691000819206238,
"learning_rate": 1.3219616204690833e-05,
"loss": 0.005,
"step": 345
},
{
"epoch": 0.746268656716418,
"grad_norm": 18.876996994018555,
"learning_rate": 1.2686567164179105e-05,
"loss": 0.031,
"step": 350
},
{
"epoch": 0.7569296375266524,
"grad_norm": 0.8067087531089783,
"learning_rate": 1.2153518123667377e-05,
"loss": 0.0403,
"step": 355
},
{
"epoch": 0.767590618336887,
"grad_norm": 0.04626353457570076,
"learning_rate": 1.162046908315565e-05,
"loss": 0.0049,
"step": 360
},
{
"epoch": 0.7782515991471215,
"grad_norm": 0.04179658740758896,
"learning_rate": 1.1087420042643924e-05,
"loss": 0.0047,
"step": 365
},
{
"epoch": 0.7889125799573561,
"grad_norm": 0.05049879848957062,
"learning_rate": 1.0554371002132196e-05,
"loss": 0.0049,
"step": 370
},
{
"epoch": 0.7995735607675906,
"grad_norm": 28.65748405456543,
"learning_rate": 1.002132196162047e-05,
"loss": 0.0207,
"step": 375
},
{
"epoch": 0.8102345415778252,
"grad_norm": 0.05166960135102272,
"learning_rate": 9.488272921108744e-06,
"loss": 0.0268,
"step": 380
},
{
"epoch": 0.8208955223880597,
"grad_norm": 0.03881421312689781,
"learning_rate": 8.955223880597016e-06,
"loss": 0.0206,
"step": 385
},
{
"epoch": 0.8315565031982942,
"grad_norm": 0.0383550263941288,
"learning_rate": 8.422174840085288e-06,
"loss": 0.0043,
"step": 390
},
{
"epoch": 0.8422174840085288,
"grad_norm": 0.04295238107442856,
"learning_rate": 7.889125799573561e-06,
"loss": 0.0042,
"step": 395
},
{
"epoch": 0.8528784648187633,
"grad_norm": 1.7127622365951538,
"learning_rate": 7.356076759061833e-06,
"loss": 0.0053,
"step": 400
},
{
"epoch": 0.8635394456289979,
"grad_norm": 0.049339987337589264,
"learning_rate": 6.823027718550107e-06,
"loss": 0.0044,
"step": 405
},
{
"epoch": 0.8742004264392325,
"grad_norm": 0.06354337185621262,
"learning_rate": 6.28997867803838e-06,
"loss": 0.0045,
"step": 410
},
{
"epoch": 0.8848614072494669,
"grad_norm": 0.1981351524591446,
"learning_rate": 5.756929637526653e-06,
"loss": 0.0054,
"step": 415
},
{
"epoch": 0.8955223880597015,
"grad_norm": 0.03931286185979843,
"learning_rate": 5.2238805970149255e-06,
"loss": 0.0208,
"step": 420
},
{
"epoch": 0.906183368869936,
"grad_norm": 0.0348835252225399,
"learning_rate": 4.690831556503199e-06,
"loss": 0.0157,
"step": 425
},
{
"epoch": 0.9168443496801706,
"grad_norm": 0.03531981259584427,
"learning_rate": 4.157782515991471e-06,
"loss": 0.0063,
"step": 430
},
{
"epoch": 0.9275053304904051,
"grad_norm": 4.256789684295654,
"learning_rate": 3.624733475479744e-06,
"loss": 0.0051,
"step": 435
},
{
"epoch": 0.9381663113006397,
"grad_norm": 0.03756505623459816,
"learning_rate": 3.0916844349680173e-06,
"loss": 0.0045,
"step": 440
},
{
"epoch": 0.9488272921108742,
"grad_norm": 0.16175320744514465,
"learning_rate": 2.55863539445629e-06,
"loss": 0.0075,
"step": 445
},
{
"epoch": 0.9594882729211087,
"grad_norm": 0.03726266324520111,
"learning_rate": 2.025586353944563e-06,
"loss": 0.0041,
"step": 450
},
{
"epoch": 0.9701492537313433,
"grad_norm": 19.503908157348633,
"learning_rate": 1.4925373134328358e-06,
"loss": 0.0224,
"step": 455
},
{
"epoch": 0.9808102345415778,
"grad_norm": 0.04280271381139755,
"learning_rate": 9.594882729211088e-07,
"loss": 0.0602,
"step": 460
},
{
"epoch": 0.9914712153518124,
"grad_norm": 0.03506707400083542,
"learning_rate": 4.264392324093817e-07,
"loss": 0.0233,
"step": 465
},
{
"epoch": 1.0,
"eval_accuracy": 0.9381333333333334,
"eval_loss": 0.27312836050987244,
"eval_runtime": 18.3733,
"eval_samples_per_second": 204.1,
"eval_steps_per_second": 6.422,
"step": 469
},
{
"epoch": 1.0,
"step": 469,
"total_flos": 1.16237984421888e+18,
"train_loss": 0.03354339535508964,
"train_runtime": 172.9262,
"train_samples_per_second": 86.742,
"train_steps_per_second": 2.712
}
],
"logging_steps": 5,
"max_steps": 469,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.16237984421888e+18,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}