Started at: 12:14:32 norbert2, 0.001, 64 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.2346923259472606 - MAE: 0.3705308435463924 Validation loss : 0.19074462873466086 - MAE: 0.3313150927797273 Epoch: 1 Training loss: 0.17915461460749307 - MAE: 0.32204418629646675 Validation loss : 0.1686817996881225 - MAE: 0.3079816741843133 Epoch: 2 Training loss: 0.16708625108003616 - MAE: 0.3104278141877798 Validation loss : 0.15879175066947937 - MAE: 0.29951784275998267 Epoch: 3 Training loss: 0.1608594950250905 - MAE: 0.3030867682949858 Validation loss : 0.1521104287469026 - MAE: 0.29517622301587365 Epoch: 4 Training loss: 0.15881932037647326 - MAE: 0.3016115035604243 Validation loss : 0.151956861669367 - MAE: 0.2931966101881672 Epoch: 5 Training loss: 0.15447019379247318 - MAE: 0.29864073465130664 Validation loss : 0.14964995217142682 - MAE: 0.29184392887804894 Epoch: 6 Training loss: 0.1534611372785135 - MAE: 0.2953557769595899 Validation loss : 0.1529107755332282 - MAE: 0.2926261188857105 Epoch: 7 Training loss: 0.15290822758518083 - MAE: 0.29560101824705004 Validation loss : 0.1476953180901932 - MAE: 0.29014511293691564 Epoch: 8 Training loss: 0.1538246487728273 - MAE: 0.29723563080095333 Validation loss : 0.1460684903643348 - MAE: 0.287791014285968 Epoch: 9 Training loss: 0.15038134737147224 - MAE: 0.2936421786979355 Validation loss : 0.14928820909875812 - MAE: 0.2951354237269502 Epoch: 10 Training loss: 0.14904259724749458 - MAE: 0.2924159131298703 Validation loss : 0.14602639413241184 - MAE: 0.289262284213569 Epoch: 11