Started at: 15:42:32 nb-bert-base, 5e-06, 128 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 0.33608432710170744 - MAE: 0.43243840742639095 Validation loss : 0.1794234970036675 - MAE: 0.32278704808536035 Epoch: 1 Training loss: 0.1674842369556427 - MAE: 0.31167954456795716 Validation loss : 0.1640064299983137 - MAE: 0.3063768331901366 Epoch: 2 Training loss: 0.1580326673388481 - MAE: 0.3004621400964959 Validation loss : 0.15081930116695516 - MAE: 0.2939987153720106 Epoch: 3 Training loss: 0.14433746680617332 - MAE: 0.2853955953382289 Validation loss : 0.13955745363936706 - MAE: 0.2805828900770382 Epoch: 4 Training loss: 0.12773224733769895 - MAE: 0.2682634582590704 Validation loss : 0.12777050628381617 - MAE: 0.26856869819032614 Epoch: 5 Training loss: 0.11507055133581162 - MAE: 0.2548698064814896 Validation loss : 0.11819195002317429 - MAE: 0.2559857516534626 Epoch: 6 Training loss: 0.10605520464479923 - MAE: 0.24685413996018704 Validation loss : 0.11385800119708567 - MAE: 0.2514239431805721 Epoch: 7 Training loss: 0.09987761311233044 - MAE: 0.23910935102620393 Validation loss : 0.10999320841887418 - MAE: 0.2490825267050983 Epoch: 8 Training loss: 0.09503072924911976 - MAE: 0.23424281710060202 Validation loss : 0.10815327263930265 - MAE: 0.24874807522133224