home-standard-results / old_res /04-04-2023-11:52:44_norbert2_house_0.006_1_50.txt
ececet's picture
.
364af0f
raw
history blame
7.29 kB
Started at: 11:52:44
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {})
Epoch: 0
Training loss: 0.19169998451283105 - MSE: 0.3224822171590252
Validation loss : 0.1576854281593114 - MSE: 0.3052585763866773
Epoch: 1
Training loss: 0.170721908933238 - MSE: 0.3106345883186025
Validation loss : 0.14974205638282 - MSE: 0.30164828824445067
Epoch: 2
Training loss: 0.16939535588026047 - MSE: 0.31008999561987205
Validation loss : 0.15034097363241017 - MSE: 0.30232355450380055
Epoch: 3
Training loss: 0.1694463299293267 - MSE: 0.3101087405316537
Validation loss : 0.15061823884025216 - MSE: 0.3026353720119914
Epoch: 4
Training loss: 0.16941545233130456 - MSE: 0.310085018434928
Validation loss : 0.15079936920665205 - MSE: 0.30283750300395695
Epoch: 5
Training loss: 0.16935620966710543 - MSE: 0.31004690146169944
Validation loss : 0.15084448363631964 - MSE: 0.3028875614759272
Epoch: 6
Training loss: 0.16949052591072886 - MSE: 0.3101781524663373
Validation loss : 0.15096625522710383 - MSE: 0.30302316902771054
Epoch: 7
Training loss: 0.16949893361643742 - MSE: 0.310188779241269
Validation loss : 0.15100383502431214 - MSE: 0.30306482790138034
Epoch: 8
Training loss: 0.1694979808440334 - MSE: 0.3101921144601653
Validation loss : 0.15103243081830442 - MSE: 0.30309663850448487
Epoch: 9
Training loss: 0.1694973421724219 - MSE: 0.3101948706230913
Validation loss : 0.1510547660291195 - MSE: 0.303121549986372
Epoch: 10
Training loss: 0.16949688792228698 - MSE: 0.31019701891325113
Validation loss : 0.15107234381139278 - MSE: 0.3031413168423569
Epoch: 11
Training loss: 0.16949651574617938 - MSE: 0.3101986945843064
Validation loss : 0.15108662215061486 - MSE: 0.3031575035779497
Epoch: 12
Training loss: 0.16949623556513535 - MSE: 0.3102000477910796
Validation loss : 0.1510984308551997 - MSE: 0.30317088319088725
Epoch: 13
Training loss: 0.16949602914483924 - MSE: 0.3102011731046972
Validation loss : 0.15110818133689463 - MSE: 0.30318192262302546
Epoch: 14
Training loss: 0.16949585684035953 - MSE: 0.31020210549164046
Validation loss : 0.15111643797717988 - MSE: 0.30319126495260207
Epoch: 15
Training loss: 0.1694957421798455 - MSE: 0.3102031148284955
Validation loss : 0.15112340752966702 - MSE: 0.30319914952951876
Epoch: 16
Training loss: 0.1694956130103061 - MSE: 0.3102041142706642
Validation loss : 0.15112938289530575 - MSE: 0.3032059101165032
Epoch: 17
Training loss: 0.16949550995701237 - MSE: 0.31020503356981505
Validation loss : 0.1511345561593771 - MSE: 0.30321175591188876
Epoch: 18
Training loss: 0.16949546227329657 - MSE: 0.31020586811368406
Validation loss : 0.15113910171203315 - MSE: 0.3032168940185329
Epoch: 19
Training loss: 0.1694954017275258 - MSE: 0.3102065734353139
Validation loss : 0.15114298881962895 - MSE: 0.30322128473881094
Epoch: 20
Training loss: 0.16949532439834195 - MSE: 0.3102071649483396
Validation loss : 0.15114647196605802 - MSE: 0.30322521829612015
Epoch: 21
Training loss: 0.16949528412599313 - MSE: 0.3102077097474076
Validation loss : 0.15114951902069151 - MSE: 0.3032286566226503
Epoch: 22
Training loss: 0.16949621936992595 - MSE: 0.310208494463055
Validation loss : 0.1511614411137998 - MSE: 0.30324212040386556
Epoch: 23
Training loss: 0.16949620082190162 - MSE: 0.3102095742453467
Validation loss : 0.15115444944240153 - MSE: 0.30323422767787633
Epoch: 24
Training loss: 0.1694950992339536 - MSE: 0.3102089707483183
Validation loss : 0.15115649672225118 - MSE: 0.30323653712503074
Epoch: 25
Training loss: 0.16949508558762702 - MSE: 0.3102093517922974
Validation loss : 0.15115846111439168 - MSE: 0.3032387536727583
Epoch: 26
Training loss: 0.16949506166734193 - MSE: 0.31020968383589725
Validation loss : 0.15116004645824432 - MSE: 0.3032405468179604
Epoch: 27
Training loss: 0.16949506578476806 - MSE: 0.3102100091384956
Validation loss : 0.1511616394855082 - MSE: 0.30324234275713025
Epoch: 28
Training loss: 0.1694950695884855 - MSE: 0.31021030116971815
Validation loss : 0.15116294636391103 - MSE: 0.30324382076605616
Epoch: 29
Training loss: 0.16949506362802105 - MSE: 0.31021054124398184
Validation loss : 0.15116422879509628 - MSE: 0.30324526722642986
Epoch: 30
Training loss: 0.16949505092282044 - MSE: 0.3102107802538768
Validation loss : 0.15116519923321903 - MSE: 0.3032463587364873
Epoch: 31
Training loss: 0.16949503472761104 - MSE: 0.3102109880028702
Validation loss : 0.15116616361774504 - MSE: 0.30324744815106897
Epoch: 32
Training loss: 0.1694950102975494 - MSE: 0.3102111595239082
Validation loss : 0.1511670839972794 - MSE: 0.3032484904174453
Epoch: 33
Training loss: 0.16949503817840628 - MSE: 0.3102113451182651
Validation loss : 0.15116780903190374 - MSE: 0.303249302414315
Epoch: 34
Training loss: 0.16949502108128447 - MSE: 0.3102115117116704
Validation loss : 0.15116860624402761 - MSE: 0.30325020067493824
Epoch: 35
Training loss: 0.16949496159428043 - MSE: 0.3102115773871586
Validation loss : 0.1511691091582179 - MSE: 0.30325077460247485
Epoch: 36
Training loss: 0.16949501370913103 - MSE: 0.31021177953836127
Validation loss : 0.15116953826509416 - MSE: 0.30325125679473786
Epoch: 37
Training loss: 0.16959698172776322 - MSE: 0.3102504026385966
Validation loss : 0.15178115805611014 - MSE: 0.30394958434726504
Epoch: 38
Training loss: 0.16956856172335774 - MSE: 0.3102638844444762
Validation loss : 0.15115563897415996 - MSE: 0.3032355727405047
Epoch: 39
Training loss: 0.1694874994064632 - MSE: 0.3102015640433627
Validation loss : 0.15116063226014376 - MSE: 0.30324120677641986
Epoch: 40
Training loss: 0.1694887787103653 - MSE: 0.31020338604539766
Validation loss : 0.1511625424027443 - MSE: 0.3032433639523333
Epoch: 41
Training loss: 0.16948962152788513 - MSE: 0.3102046410937282
Validation loss : 0.15116414008662105 - MSE: 0.30324516582868455
Epoch: 42
Training loss: 0.16949031372603618 - MSE: 0.3102056846480645
Validation loss : 0.15116540039889514 - MSE: 0.3032465870269334
Epoch: 43
Training loss: 0.1694909881996481 - MSE: 0.31020666497101834
Validation loss : 0.15116674755699933 - MSE: 0.3032481097393429
Epoch: 44
Training loss: 0.16949151315187153 - MSE: 0.31020746001498384
Validation loss : 0.15116780903190374 - MSE: 0.30324930253073035
Epoch: 45
Training loss: 0.16949198057777004 - MSE: 0.3102081938774621
Validation loss : 0.15116868331097066 - MSE: 0.3032502922937965
Epoch: 46
Training loss: 0.16949239694758467 - MSE: 0.3102088308429716
Validation loss : 0.1511694707442075 - MSE: 0.30325118054270206
Epoch: 47
Training loss: 0.16949274116440824 - MSE: 0.3102093728825653
Validation loss : 0.1511702830903232 - MSE: 0.30325209417014776
Epoch: 48
Training loss: 0.1694930295803045 - MSE: 0.310209832059088
Validation loss : 0.1511708211619407 - MSE: 0.30325270325511156
Epoch: 49
Training loss: 0.16949331807462792 - MSE: 0.3102102694751848
Validation loss : 0.15117131732404232 - MSE: 0.30325326158299504
Prediction MSE: 0.330
Finished at: 11:52:44
Time taken: 3135 s.
0 days 0 hours 52 minutes 15 seconds