Started at: 13:54:54 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '44815f7e109b53547cccdf3c6847f4c28b989816'}, {}) Epoch: 0 Training loss: 0.4353753740065976 - MSE: 0.4631018830797368 Validation loss : 0.14201142988167703 - MSE: 0.292727476662094 Epoch: 1 Training loss: 0.16082680688092582 - MSE: 0.3039855683631176 Validation loss : 0.14794749207794666 - MSE: 0.29952175843163786 Epoch: 2 Training loss: 0.15888278323568797 - MSE: 0.30232633269739506 Validation loss : 0.12977031315676868 - MSE: 0.2779232412353849 Epoch: 3 Training loss: 0.1516539692878723 - MSE: 0.29570308847882737 Validation loss : 0.1342183407396078 - MSE: 0.28315487688041685 Epoch: 4 Training loss: 0.14969512843771984 - MSE: 0.29376316316788137 Validation loss : 0.13651572703383863 - MSE: 0.286356273434194 Epoch: 5 Training loss: 0.14825457373732015 - MSE: 0.29236288646470276 Validation loss : 0.13325480837374926 - MSE: 0.28235259219172804 Epoch: 6 Training loss: 0.14587709978222846 - MSE: 0.29145181360233374 Validation loss : 0.14294268772937357 - MSE: 0.29422283310077546 Epoch: 7 Training loss: 0.148439571535901 - MSE: 0.29440375310883926 Validation loss : 0.14034398854710162 - MSE: 0.2907182476260459 Epoch: 8 Training loss: 0.14813685872052845 - MSE: 0.2931949322407332 Validation loss : 0.12979571800678968 - MSE: 0.2782447249405777 Epoch: 9 Training loss: 0.14627560220266644 - MSE: 0.2917503264555033 Validation loss : 0.1504771877080202 - MSE: 0.30260855387996344 Epoch: 10 Training loss: 0.14390636309981347 - MSE: 0.29031416543032734 Validation loss : 0.13953579450026155 - MSE: 0.2890264796219526 Epoch: 11 Training loss: 0.14922026603629715 - MSE: 0.292561392520073 Validation loss : 0.1444824270438403 - MSE: 0.2944073201865649 Epoch: 12 Training loss: 0.14153452319534202 - MSE: 0.285389527479883 Validation loss : 0.1263227758463472 - MSE: 0.27377457326610966 Epoch: 13 Training loss: 0.14998281272618394 - MSE: 0.2927288895743673 Validation loss : 0.12917391466908157 - MSE: 0.27674699036970196 Epoch: 14 Training loss: 0.1430773071160442 - MSE: 0.2892675879716682 Validation loss : 0.14142304356209934 - MSE: 0.2941539923554046 Epoch: 15 Training loss: 0.15678064901577798 - MSE: 0.3018205089469717 Validation loss : 0.12917302176356316 - MSE: 0.27513299298107086 Epoch: 16 Training loss: 0.1494230675854181 - MSE: 0.2951704079181553 Validation loss : 0.1280019199475646 - MSE: 0.2731706016706994 Epoch: 17 Training loss: 0.14316309780666703 - MSE: 0.28891571660292187 Validation loss : 0.12425365822855383 - MSE: 0.27112690135936646 Epoch: 18 Training loss: 0.1434906577593402 - MSE: 0.28606728956978134 Validation loss : 0.15053234971128404 - MSE: 0.30457389003231583 Epoch: 19 Training loss: 0.14480229098545877 - MSE: 0.2907542990574569 Validation loss : 0.1469436811748892 - MSE: 0.3000679240572026 Epoch: 20 Training loss: 0.1462563581372562 - MSE: 0.2903288957751347 Validation loss : 0.16302492283284664 - MSE: 0.3174909429867512 Epoch: 21 Training loss: 0.14268505094867004 - MSE: 0.28808178734771134 Validation loss : 0.12645738548599184 - MSE: 0.2734583813939935 Epoch: 22 Training loss: 0.1431882705735533 - MSE: 0.29013400485776225 Validation loss : 0.14717783266678452 - MSE: 0.3002806941290146