Started at: 00:24:23 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {}) Epoch: 0 Training loss: 0.6361208361387253 - MAE: 0.6347873231265873 Validation loss : 0.33865517377853394 - MAE: 0.4740741020180635 Epoch: 1 Training loss: 0.21345499098300935 - MAE: 0.3540912858998663 Validation loss : 0.18473448190424177 - MAE: 0.3282540270842396 Epoch: 2 Training loss: 0.18606460213661194 - MAE: 0.32889828686188877 Validation loss : 0.18045285178555381 - MAE: 0.32713884374037794 Epoch: 3 Training loss: 0.17846946954727172 - MAE: 0.31904566117043975 Validation loss : 0.17773988511827257 - MAE: 0.32482577310153565 Epoch: 4 Training loss: 0.17139309346675874 - MAE: 0.31441827165194275 Validation loss : 0.17418616182274288 - MAE: 0.32107063596093277 Epoch: 5 Training loss: 0.17030848145484925 - MAE: 0.31198532199217943 Validation loss : 0.17218486799134147 - MAE: 0.3190577970415032 Epoch: 6 Training loss: 0.16878579378128053 - MAE: 0.31078672095469284 Validation loss : 0.17055005000697243 - MAE: 0.3174862826881529 Epoch: 7 Training loss: 0.1679440525174141 - MAE: 0.31018192416787727 Validation loss : 0.16925331950187683 - MAE: 0.31617688409581507 Epoch: 8 Training loss: 0.16459781765937806 - MAE: 0.3076978685246546 Validation loss : 0.16737455626328787 - MAE: 0.313686903818875 Epoch: 9 Training loss: 0.1616947764158249 - MAE: 0.3042913169405816 Validation loss : 0.16636485523647732 - MAE: 0.3125500280647287 Started at: 01:47:29 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {}) Epoch: 0 Training loss: 0.6361208361387253 - MAE: 0.6347873231265873 Validation loss : 0.33865517377853394 - MAE: 0.4740741020180635 Epoch: 1 Training loss: 0.21345499098300935 - MAE: 0.3540912858998663 Validation loss : 0.18473448190424177 - MAE: 0.3282540270842396 Epoch: 2 Training loss: 0.18606460213661194 - MAE: 0.32889828686188877 Validation loss : 0.18045285178555381 - MAE: 0.32713884374037794 Epoch: 3 Training loss: 0.17846946954727172 - MAE: 0.31904566117043975 Validation loss : 0.17773988511827257 - MAE: 0.32482577310153565 Epoch: 4 Training loss: 0.17139309346675874 - MAE: 0.31441827165194275 Validation loss : 0.17418616182274288 - MAE: 0.32107063596093277 Epoch: 5 Training loss: 0.17030848145484925 - MAE: 0.31198532199217943 Validation loss : 0.17218486799134147 - MAE: 0.3190577970415032 Epoch: 6 Training loss: 0.16878579378128053 - MAE: 0.31078672095469284 Validation loss : 0.17055005000697243 - MAE: 0.3174862826881529 Epoch: 7 Training loss: 0.1679440525174141 - MAE: 0.31018192416787727 Validation loss : 0.16925331950187683 - MAE: 0.31617688409581507 Epoch: 8 Training loss: 0.16459781765937806 - MAE: 0.3076978685246546 Validation loss : 0.16737455626328787 - MAE: 0.313686903818875 Epoch: 9 Training loss: 0.1616947764158249 - MAE: 0.3042913169405816 Validation loss : 0.16636485523647732 - MAE: 0.3125500280647287 Epoch: 10 Training loss: 0.1614231312274933 - MAE: 0.30281138726351875 Validation loss : 0.16527396109369066 - MAE: 0.310935884275779