Started at: 21:34:05 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.32175946950912476 - MAE: 0.4437903503551826 Validation loss : 0.19621961481041378 - MAE: 0.3388283822410594 Epoch: 1 Training loss: 0.21072698533535003 - MAE: 0.35116530289867337 Validation loss : 0.1876767095592287 - MAE: 0.33628140968919695 Epoch: 2 Training loss: 0.1893942326307297 - MAE: 0.3312522516218666 Validation loss : 0.1748287214173211 - MAE: 0.3213797447248068 Epoch: 3 Training loss: 0.17969159841537474 - MAE: 0.3215382240664255 Validation loss : 0.1697687026527193 - MAE: 0.31446870224950263 Epoch: 4 Training loss: 0.17220519423484804 - MAE: 0.3129526796594085 Validation loss : 0.16811848680178323 - MAE: 0.3117262145282266 Epoch: 5 Training loss: 0.17296144247055054 - MAE: 0.31494873624825814 Validation loss : 0.16849000917540657 - MAE: 0.31104514179599985 Epoch: 6 Training loss: 0.16641035348176955 - MAE: 0.30768475494362546 Validation loss : 0.16421516074074638 - MAE: 0.30715220958263423 Epoch: 7 Training loss: 0.16057054400444032 - MAE: 0.30329494898002457 Validation loss : 0.161027698053254 - MAE: 0.304297376785587 Epoch: 8 Training loss: 0.1566170272231102 - MAE: 0.29957014870206655 Validation loss : 0.15982638630602095 - MAE: 0.30397747682822174 Epoch: 9 Training loss: 0.15673983812332154 - MAE: 0.3000540458404174 Validation loss : 0.159184659520785 - MAE: 0.3035235378542429 Epoch: 10 Training loss: 0.15630604147911073 - MAE: 0.29690365842430627 Validation loss : 0.15921704471111298 - MAE: 0.30240467396157955 Epoch: 11 Training loss: 0.15595020622015 - MAE: 0.29754135005638765 Validation loss : 0.15686986181471083 - MAE: 0.30119996351152656 Epoch: 12 Training loss: 0.15274528950452804 - MAE: 0.2944161972508913 Validation loss : 0.15652166141404045 - MAE: 0.3012713923501961 Epoch: 13 Training loss: 0.15205995708703995 - MAE: 0.29424324063629004 Validation loss : 0.1575678288936615 - MAE: 0.3004794443503115 Epoch: 14 Training loss: 0.15087180227041244 - MAE: 0.292504579327589 Validation loss : 0.15574459234873453 - MAE: 0.29932356111019714 Epoch: 15 Training loss: 0.1518820345401764 - MAE: 0.2940904971897683 Validation loss : 0.15581322544150883 - MAE: 0.30048684662514935 Epoch: 16 Training loss: 0.14916340589523316 - MAE: 0.2900369708605816 Validation loss : 0.15502946575482687 - MAE: 0.2986623058669649 Epoch: 17 Training loss: 0.14969733864068985 - MAE: 0.2916869417468108 Validation loss : 0.15631223718325296 - MAE: 0.2994015598567933 Epoch: 18 Training loss: 0.14621972769498826 - MAE: 0.2875086269286061 Validation loss : 0.1557358337773217 - MAE: 0.29931970436403404 Epoch: 19 Training loss: 0.14798570185899734 - MAE: 0.29024787778757843 Validation loss : 0.15424930387073094 - MAE: 0.2984811820958494 Epoch: 20 Training loss: 0.14769238144159316 - MAE: 0.2896109423923894 Validation loss : 0.1536422868569692 - MAE: 0.2987445844262027 Epoch: 21 Training loss: 0.14361368536949157 - MAE: 0.2842206176667335 Validation loss : 0.1533755792511834 - MAE: 0.29642898867488277 Epoch: 22 Training loss: 0.14456430345773696 - MAE: 0.2860957867005398 Validation loss : 0.1542035871081882 - MAE: 0.2971775973100257 Epoch: 23 Training loss: 0.1426533755660057 - MAE: 0.28512021628758083 Validation loss : 0.15245803362793392 - MAE: 0.2961228783995986 Epoch: 24 Training loss: 0.14542177826166153 - MAE: 0.2855342243309425 Validation loss : 0.15294104317824045 - MAE: 0.29643245601047447 Epoch: 25 Training loss: 0.14522234290838243 - MAE: 0.2851345876886818 Validation loss : 0.15342944694889915 - MAE: 0.2958033843008907 Epoch: 26 Training loss: 0.14386597275733948 - MAE: 0.2838243646140143 Validation loss : 0.1515038808186849 - MAE: 0.2936464496284943 Epoch: 27 Training loss: 0.1415349954366684 - MAE: 0.2810586437104536 Validation loss : 0.1507236527072059 - MAE: 0.29409404478735646 Epoch: 28 Training loss: 0.14115683376789093 - MAE: 0.2815221038197439 Validation loss : 0.15061336921321022 - MAE: 0.29280129784932213 Epoch: 29 Training loss: 0.14186541587114335 - MAE: 0.28239653007887255 Validation loss : 0.15151139597098032 - MAE: 0.2953867482779256 Epoch: 30 Training loss: 0.14276256740093232 - MAE: 0.283208699006944 Validation loss : 0.1504600097735723 - MAE: 0.29276670315056585 Epoch: 31 Training loss: 0.14241950929164887 - MAE: 0.28277882078383293 Validation loss : 0.1505332812666893 - MAE: 0.29293145394981956 Epoch: 32 Training loss: 0.14204519629478454 - MAE: 0.28192363607736504 Validation loss : 0.15044088496102226 - MAE: 0.2929076596067834 Epoch: 33 Training loss: 0.13920597046613692 - MAE: 0.2793298976614221 Validation loss : 0.15006180769867367 - MAE: 0.29292432343849906 Epoch: 34 Training loss: 0.1410813584923744 - MAE: 0.2815379053911258 Validation loss : 0.14930015967951882 - MAE: 0.2933547268525188 Epoch: 35 Training loss: 0.14150760889053346 - MAE: 0.2832031330097777 Validation loss : 0.15295125875208113 - MAE: 0.2936935655701593 Epoch: 36 Training loss: 0.1405733221769333 - MAE: 0.2821643593441243 Validation loss : 0.14867112785577774 - MAE: 0.2921335742814868 Epoch: 37 Training loss: 0.14206359952688216 - MAE: 0.28261241361934286 Validation loss : 0.14988169405195448 - MAE: 0.2927604772074144 Epoch: 38 Training loss: 0.14064243495464324 - MAE: 0.28185439579063915 Validation loss : 0.1515509072277281 - MAE: 0.29319069832718325 Epoch: 39 Training loss: 0.14297345608472825 - MAE: 0.282847817609895 Validation loss : 0.15042786465750801 - MAE: 0.29282722784595605 Epoch: 40 Training loss: 0.13990240722894667 - MAE: 0.27964537481988466 Validation loss : 0.14972211172183356 - MAE: 0.29260355981671565 Epoch: 41 Training loss: 0.13921964198350906 - MAE: 0.28078253547199195 Validation loss : 0.14965214083592096 - MAE: 0.2924700199853063 Epoch: 42 Training loss: 0.1393338233232498 - MAE: 0.28024463401390987 Validation loss : 0.1507701873779297 - MAE: 0.29355734742186085 Epoch: 43 Training loss: 0.1382669734954834 - MAE: 0.27992590956331337 Validation loss : 0.14967524343066746 - MAE: 0.29149855699855537 Epoch: 44 Training loss: 0.14029370576143266 - MAE: 0.28265328374425863 Validation loss : 0.1491319098406368 - MAE: 0.29219012845466735 Epoch: 45 Training loss: 0.14041882872581482 - MAE: 0.2809566443072132 Validation loss : 0.14969545520014232 - MAE: 0.2909081862153574 Epoch: 46 Training loss: 0.140560123026371 - MAE: 0.28191663739835304 Validation loss : 0.14977325167920855 - MAE: 0.291604275381065 Epoch: 47 Training loss: 0.1414617270231247 - MAE: 0.28282842532543745 Validation loss : 0.1506698860062493 - MAE: 0.2930552446484735 Epoch: 48 Training loss: 0.1373920688033104 - MAE: 0.27917131881687096 Validation loss : 0.15224417712953356 - MAE: 0.2934837599479946 Epoch: 49 Training loss: 0.14192317247390748 - MAE: 0.2842585649178634 Validation loss : 0.14885787086354363 - MAE: 0.29192342005218896 Epoch: 50