Started at: 10:20:56 norbert2, 0.001, 320 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.3527084864675999 - MAE: 0.462953214037108 Validation loss : 0.26738621081624714 - MAE: 0.4117436401566978 Epoch: 1 Training loss: 0.21354530677199363 - MAE: 0.35412568088618607 Validation loss : 0.18583983182907104 - MAE: 0.3299781625243953 Epoch: 2 Training loss: 0.18886808156967164 - MAE: 0.330421134278654 Validation loss : 0.1822844914027623 - MAE: 0.33056134626981226 Epoch: 3 Training loss: 0.18264293819665908 - MAE: 0.3242538405037828 Validation loss : 0.1772120531116213 - MAE: 0.3251080552524876 Epoch: 4 Training loss: 0.17669695615768433 - MAE: 0.3190416206929785 Validation loss : 0.1738429559128625 - MAE: 0.322266401087556 Epoch: 5 Training loss: 0.17284812778234482 - MAE: 0.3136576419048662 Validation loss : 0.17294207853930338 - MAE: 0.32187514594462957 Epoch: 6 Training loss: 0.16915602013468742 - MAE: 0.3109971127550157 Validation loss : 0.16980814508029393 - MAE: 0.3186160670865416 Epoch: 7 Training loss: 0.16845666095614434 - MAE: 0.3089849145668241 Validation loss : 0.16714746824332646 - MAE: 0.3155506548087574 Epoch: 8 Training loss: 0.16868472024798392 - MAE: 0.30949657276040055 Validation loss : 0.16395465178149088 - MAE: 0.3112663647803296 Epoch: 9 Training loss: 0.16725652366876603 - MAE: 0.3066475873241304 Validation loss : 0.1607010385819844 - MAE: 0.3061224215087366 Epoch: 10 Training loss: 0.16232142224907875 - MAE: 0.30304105641340856 Validation loss : 0.1601495338337762 - MAE: 0.30464326769452593 Epoch: 11 Training loss: 0.1600735753774643 - MAE: 0.3024141941842313 Validation loss : 0.16005327233246394 - MAE: 0.30402928496694465 Epoch: 12 Training loss: 0.1590587668120861 - MAE: 0.30053309807554257 Validation loss : 0.16130509546824864 - MAE: 0.30322365490516023 Epoch: 13 Training loss: 0.1575142566114664 - MAE: 0.3000214052697603 Validation loss : 0.16007075778075627 - MAE: 0.3021056102178289 Epoch: 14 Training loss: 0.15866125449538232 - MAE: 0.2997664917473247 Validation loss : 0.15920681825705937 - MAE: 0.30173646847418345 Epoch: 15 Training loss: 0.15266035348176957 - MAE: 0.2944185943451775 Validation loss : 0.15711544241224015 - MAE: 0.3000104381366772 Epoch: 16 Training loss: 0.15278259553015233 - MAE: 0.29361855853063534 Validation loss : 0.1574518233537674 - MAE: 0.29962551415919003 Epoch: 17 Training loss: 0.15116168446838857 - MAE: 0.2937445197592836 Validation loss : 0.1565468375171934 - MAE: 0.29831129957449803 Epoch: 18 Training loss: 0.1503951381891966 - MAE: 0.2918888435388691 Validation loss : 0.15591060050896235 - MAE: 0.2980704319089749 Epoch: 19 Training loss: 0.14914484806358813 - MAE: 0.29063372038535307 Validation loss : 0.15442968904972076 - MAE: 0.2982528868297389