Started at: 15:24:47 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.48961155652999877 - MAE: 0.5667749118208294 Validation loss : 0.2341225031349394 - MAE: 0.3800908488488268 Epoch: 1 Training loss: 0.23933333277702332 - MAE: 0.3816049737297095 Validation loss : 0.2157200359635883 - MAE: 0.36262968535385853 Epoch: 2 Training loss: 0.2227860403060913 - MAE: 0.362914211886953 Validation loss : 0.20576173894935185 - MAE: 0.351088469157324 Epoch: 3 Training loss: 0.21454387307167053 - MAE: 0.355829191485835 Validation loss : 0.20201933715078566 - MAE: 0.3476490434817412 Epoch: 4 Training loss: 0.20782955586910248 - MAE: 0.3492763092693279 Validation loss : 0.19840116136603886 - MAE: 0.34438782184298933 Epoch: 5 Training loss: 0.20974749684333802 - MAE: 0.3499676263302698 Validation loss : 0.19500345322820875 - MAE: 0.3412627339955124 Epoch: 6 Training loss: 0.20287642538547515 - MAE: 0.3440909850190504 Validation loss : 0.19372935758696663 - MAE: 0.3405082616642143 Epoch: 7 Training loss: 0.20112746179103852 - MAE: 0.3424945919935925 Validation loss : 0.19070245987839168 - MAE: 0.33745151530681705 Epoch: 8 Training loss: 0.19633454322814942 - MAE: 0.3393076079842827 Validation loss : 0.18811953398916456 - MAE: 0.33480433699694667 Epoch: 9 Training loss: 0.19513990223407746 - MAE: 0.33655391167095905 Validation loss : 0.18763755758603415 - MAE: 0.33493136829479836 Epoch: 10 Training loss: 0.1945616537332535 - MAE: 0.3339688628047326 Validation loss : 0.18520050081941816 - MAE: 0.3322033838398172 Epoch: 11 Training loss: 0.1919734501838684 - MAE: 0.3348057854308913 Validation loss : 0.18356686499383715 - MAE: 0.3305542036966663 Epoch: 12 Training loss: 0.18977773189544678 - MAE: 0.33093456128200255 Validation loss : 0.18238851262463462 - MAE: 0.3295210611243764 Epoch: 13 Training loss: 0.18959189653396608 - MAE: 0.33203631813041046 Validation loss : 0.18165012366241878 - MAE: 0.32894981523820216 Epoch: 14 Training loss: 0.188436096906662 - MAE: 0.3289166590553581 Validation loss : 0.18062874178091684 - MAE: 0.3279516450745761 Epoch: 15 Training loss: 0.1888059014081955 - MAE: 0.32938225827858314 Validation loss : 0.17884226640065512 - MAE: 0.32618892569948843 Epoch: 16 Training loss: 0.18457180321216582 - MAE: 0.32768304932122155 Validation loss : 0.1773804161283705 - MAE: 0.3244846629889762 Epoch: 17 Training loss: 0.1834845507144928 - MAE: 0.32513390306967505 Validation loss : 0.17784527275297377 - MAE: 0.325394248134103 Epoch: 18 Training loss: 0.17817132532596588 - MAE: 0.3205999365056764 Validation loss : 0.1772983885473675 - MAE: 0.3250120704872884 Epoch: 19 Training loss: 0.18241925418376922 - MAE: 0.32608043975119405 Validation loss : 0.1762605524725384 - MAE: 0.3239821380292207 Epoch: 20 Training loss: 0.1826972782611847 - MAE: 0.3236272201051217 Validation loss : 0.17492684887515175 - MAE: 0.32241326907282936 Epoch: 21 Training loss: 0.17828085482120515 - MAE: 0.32068803213274405 Validation loss : 0.17474046183956993 - MAE: 0.322295118924069 Epoch: 22 Training loss: 0.1751816302537918 - MAE: 0.3174733856779282 Validation loss : 0.17336992588308123 - MAE: 0.3207070293999617 Epoch: 23 Training loss: 0.17363301634788514 - MAE: 0.316815118397366 Validation loss : 0.17201685905456543 - MAE: 0.3189948372036725 Epoch: 24 Training loss: 0.1767018473148346 - MAE: 0.31913121630235014 Validation loss : 0.17214929395251805 - MAE: 0.31945983617544055 Epoch: 25 Training loss: 0.17439736306667328 - MAE: 0.31661741321206066 Validation loss : 0.17139916784233517 - MAE: 0.31865817509617683 Epoch: 26 Training loss: 0.1737448263168335 - MAE: 0.31659987435815284 Validation loss : 0.1706781718466017 - MAE: 0.318036288529697 Epoch: 27 Training loss: 0.17146194338798523 - MAE: 0.3118728405354431 Validation loss : 0.1697299944029914 - MAE: 0.31685192311272076 Epoch: 28 Training loss: 0.1718020862340927 - MAE: 0.3131538647314398 Validation loss : 0.16994635926352608 - MAE: 0.3175874889959908 Epoch: 29 Training loss: 0.1683420032262802 - MAE: 0.31101584504763863 Validation loss : 0.16925568216376835 - MAE: 0.3167970568735372 Epoch: 30 Training loss: 0.16941484928131104 - MAE: 0.3106103617384786 Validation loss : 0.1684651639726427 - MAE: 0.3159602735208132 Epoch: 31 Training loss: 0.1693895423412323 - MAE: 0.3118286356685513 Validation loss : 0.16787751846843296 - MAE: 0.3153364280899408 Epoch: 32 Training loss: 0.16764336854219436 - MAE: 0.3100005090011756 Validation loss : 0.1668115324444241 - MAE: 0.3138394321127746 Epoch: 33 Training loss: 0.16786101460456848 - MAE: 0.30754219991076703 Validation loss : 0.16722293363677132 - MAE: 0.3147444447232888 Epoch: 34 Training loss: 0.16658054411411286 - MAE: 0.3089389341184129 Validation loss : 0.1656692756546868 - MAE: 0.31278805971203405 Epoch: 35 Training loss: 0.16597358763217926 - MAE: 0.3086932329966142 Validation loss : 0.1659683949417538 - MAE: 0.31345250928240936 Epoch: 36 Training loss: 0.16379758477210998 - MAE: 0.30606558255167177