home-standard-results / 08-05-2023_norbert_batchsize_128_all_units_0.0001_1_200_freeze_True_earlystop_3.txt
ececet's picture
commit files to HF hub
095b0b1
Started at: 12:12:18
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {})
Epoch: 0
Training loss: 1.1006720215082169 - MAE: 0.9385178221693342
Validation loss : 0.3364674378843868 - MAE: 0.4550059916177428
Epoch: 1
Training loss: 0.2378660583496094 - MAE: 0.37276055966635346
Validation loss : 0.1963765437112135 - MAE: 0.34272739887256237
Epoch: 2
Training loss: 0.19169337004423143 - MAE: 0.3349637829761448
Validation loss : 0.1931756366701687 - MAE: 0.34055783097602843
Epoch: 3
Training loss: 0.18910380512475966 - MAE: 0.3318428617734502
Validation loss : 0.18860271222451153 - MAE: 0.3358887044598495
Epoch: 4
Training loss: 0.186059138327837 - MAE: 0.32876787579915384
Validation loss : 0.1856619251124999 - MAE: 0.33292138410324873
Epoch: 5
Training loss: 0.18313051164150237 - MAE: 0.32580828382716187
Validation loss : 0.1833767566610785 - MAE: 0.3306058463127525
Epoch: 6
Training loss: 0.18097147583961487 - MAE: 0.32398510773591493
Validation loss : 0.1812712576459436 - MAE: 0.3284241419338835
Epoch: 7
Training loss: 0.17751911029219627 - MAE: 0.32037782771308876
Validation loss : 0.17958090322859147 - MAE: 0.3267873033045649
Epoch: 8
Training loss: 0.1786111931502819 - MAE: 0.321583163490193
Validation loss : 0.17803318272618687 - MAE: 0.32520436183204793
Epoch: 9
Training loss: 0.17398411139845849 - MAE: 0.31673606421156436
Validation loss : 0.17673131735885844 - MAE: 0.3238730944125878
Epoch: 10
Training loss: 0.17270002499222756 - MAE: 0.31745671014834814
Validation loss : 0.17539521964157329 - MAE: 0.3224941170700243
Epoch: 11
Training loss: 0.17052328288555146 - MAE: 0.3148727480077667
Validation loss : 0.174127253539422 - MAE: 0.3211262794601334
Epoch: 12
Training loss: 0.16809561610221863 - MAE: 0.311770730424323
Validation loss : 0.1734507837716271 - MAE: 0.3205848137252918
Epoch: 13
Training loss: 0.16760414585471153 - MAE: 0.3136055973164791
Validation loss : 0.17252265530474045 - MAE: 0.3196495827839378
Epoch: 14
Training loss: 0.1682390233874321 - MAE: 0.3112908435346189
Validation loss : 0.1711487393168842 - MAE: 0.31795443358982844
Epoch: 15
Training loss: 0.16781458392739296 - MAE: 0.3113590658699254
Validation loss : 0.1707548902315252 - MAE: 0.3177303930498194
Epoch: 16
Training loss: 0.16693725898861886 - MAE: 0.31030240300129464
Validation loss : 0.16986496132962844 - MAE: 0.3166979421895762
Epoch: 17
Training loss: 0.16504903733730317 - MAE: 0.31016979911031045
Validation loss : 0.16925524350474863 - MAE: 0.31608280831472446
Epoch: 18
Training loss: 0.16303982958197594 - MAE: 0.30655030591901256
Validation loss : 0.1685786873978727 - MAE: 0.3153196465443914
Epoch: 19
Training loss: 0.1638409486413002 - MAE: 0.3080217884644864
Validation loss : 0.16801373134641087 - MAE: 0.314735410389949
Epoch: 20
Training loss: 0.1614493829011917 - MAE: 0.30555361713211904
Validation loss : 0.1675474293091718 - MAE: 0.3142234109718509
Epoch: 21
Training loss: 0.16097557842731475 - MAE: 0.30540769164567677
Validation loss : 0.1668663542060291 - MAE: 0.31328988497467153
Epoch: 22
Training loss: 0.15972515404224397 - MAE: 0.3035523077998348
Validation loss : 0.16639567999278798 - MAE: 0.31270315476408916
Epoch: 23
Training loss: 0.15968465387821199 - MAE: 0.3035132420493961
Validation loss : 0.16612719788270838 - MAE: 0.3124643849701637
Epoch: 24
Training loss: 0.16047569021582603 - MAE: 0.3044606072582237
Validation loss : 0.16531289544175654 - MAE: 0.3113139200118973
Epoch: 25
Training loss: 0.1594112578034401 - MAE: 0.30380153537656146
Validation loss : 0.16510769549538107 - MAE: 0.31113827420553686
Epoch: 26
Training loss: 0.15761824071407318 - MAE: 0.30175219485110094
Validation loss : 0.16493081169969895 - MAE: 0.3109766566528519
Epoch: 27
Training loss: 0.15642025277018548 - MAE: 0.2996838668675141
Validation loss : 0.16460545141907298 - MAE: 0.3105549194584687
Epoch: 28
Training loss: 0.1564825715124607 - MAE: 0.29999498344721554
Validation loss : 0.16410903238198338 - MAE: 0.3098771926366311
Epoch: 29
Training loss: 0.15597251012921334 - MAE: 0.3000410448533109
Validation loss : 0.16374634469256683 - MAE: 0.3094041646235353
Epoch: 30
Training loss: 0.15675333008170128 - MAE: 0.3009904709763715
Validation loss : 0.16385302824132583 - MAE: 0.30973321130028236
Epoch: 31
Training loss: 0.15694587096571921 - MAE: 0.2996392168091802
Validation loss : 0.16349118728848064 - MAE: 0.3092097406182407
Epoch: 32
Training loss: 0.15658894553780556 - MAE: 0.2999551267813354
Validation loss : 0.16314544002799428 - MAE: 0.308750879464887
Epoch: 33
Training loss: 0.15591454565525054 - MAE: 0.3000454994119826
Validation loss : 0.16286137261811426 - MAE: 0.30841066500734077
Epoch: 34
Training loss: 0.15509203642606736 - MAE: 0.29930140182204795
Validation loss : 0.16284336149692535 - MAE: 0.3085285183124655
Epoch: 35
Training loss: 0.15405697703361512 - MAE: 0.2988630300365712
Validation loss : 0.16246377238455942 - MAE: 0.3079503563975846
Epoch: 36
Training loss: 0.15352507963776588 - MAE: 0.29748678107055193
Validation loss : 0.16194848672432058 - MAE: 0.3071383866301113
Epoch: 37
Training loss: 0.15278249979019165 - MAE: 0.2962795231384297
Validation loss : 0.16206031862427206 - MAE: 0.30743531882524455
Epoch: 38
Training loss: 0.15470703780651093 - MAE: 0.2978209194206603
Validation loss : 0.16202253015602336 - MAE: 0.30753639777984476
Epoch: 39
Training loss: 0.1523985606431961 - MAE: 0.2964090176553577
Validation loss : 0.16172013650922215 - MAE: 0.30709932655101335
Epoch: 40
Training loss: 0.1530853621661663 - MAE: 0.297757612122841
Validation loss : 0.1613460998324787 - MAE: 0.30660857549224224
Epoch: 41
Training loss: 0.15282656386494636 - MAE: 0.297049206683197
Validation loss : 0.1608997815672089 - MAE: 0.30598334249346854
Epoch: 42
Training loss: 0.15108009725809096 - MAE: 0.29496736148642777
Validation loss : 0.16095499475212657 - MAE: 0.3062596970514697
Epoch: 43
Training loss: 0.15141472831368447 - MAE: 0.29662705431765973
Validation loss : 0.16070682117167642 - MAE: 0.30597615449664417
Epoch: 44
Training loss: 0.15191791325807572 - MAE: 0.2950182572471767
Validation loss : 0.1603742417167215 - MAE: 0.30558954112078246
Epoch: 45
Training loss: 0.15128972373902796 - MAE: 0.29699661967263946
Validation loss : 0.159849391264074 - MAE: 0.30474853877294245
Epoch: 46
Training loss: 0.15125623732805252 - MAE: 0.2951512536815187
Validation loss : 0.16034438154276678 - MAE: 0.3057817503470798
Epoch: 47
Training loss: 0.1518179516494274 - MAE: 0.2959772343677161
Validation loss : 0.16012860746944652 - MAE: 0.3054851252496958