Started at: 10:48:19 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {}) Epoch: 0 Training loss: 0.1924845103344639 - MSE: 0.3322236655963751 Validation loss : 0.17660720612515102 - MSE: 0.324802509627284 Epoch: 1 Training loss: 0.1750537462078678 - MSE: 0.3180729018571566 Validation loss : 0.15942594849250533 - MSE: 0.303487836083192 Epoch: 2 Training loss: 0.16873684886672774 - MSE: 0.31160564513902284 Validation loss : 0.15245808395698215 - MSE: 0.29108694559682735 Epoch: 3 Training loss: 0.1641668309528513 - MSE: 0.30635566679511805 Validation loss : 0.15943744812499394 - MSE: 0.2995468239484796 Epoch: 4 Training loss: 0.16207607389252804 - MSE: 0.30587689084159564 Validation loss : 0.15767109924645134 - MSE: 0.30170587154140893 Epoch: 5 Training loss: 0.1646450460713527 - MSE: 0.30737025815788666 Validation loss : 0.15219450781516958 - MSE: 0.29307598227245624 Epoch: 6 Training loss: 0.15751262988687167 - MSE: 0.2992461270486002 Validation loss : 0.14895058451502613 - MSE: 0.2874865253358561 Epoch: 7 Training loss: 0.16588557305447946 - MSE: 0.3104177206449217 Validation loss : 0.15715703639117154 - MSE: 0.2987129812343567 Epoch: 8 Training loss: 0.15816156373259985 - MSE: 0.301503453503837 Validation loss : 0.15006912420644905 - MSE: 0.288090082300874 Epoch: 9 Training loss: 0.16364081340090272 - MSE: 0.30751142892997274 Validation loss : 0.15224947774726333 - MSE: 0.29088555215174683 Epoch: 10 Training loss: 0.15624311541769711 - MSE: 0.29860838299589704 Validation loss : 0.15263732759789986 - MSE: 0.28903691439119805 Epoch: 11 Training loss: 0.15317542393346728 - MSE: 0.29561704899634816 Validation loss : 0.1465996318129879 - MSE: 0.2883039288153636 Epoch: 12 Training loss: 0.15394915797308012 - MSE: 0.29761309661938545 Validation loss : 0.14934668831075681 - MSE: 0.2889731477139799 Epoch: 13 Training loss: 0.1515518317405645 - MSE: 0.293403737656915 Validation loss : 0.14790588775367447 - MSE: 0.2870366854261706 Epoch: 14 Training loss: 0.15473641882541822 - MSE: 0.2978288996002309 Validation loss : 0.14924548374432506 - MSE: 0.2876623407733963 Epoch: 15 Training loss: 0.15178937489535602 - MSE: 0.29491923654080343 Validation loss : 0.1417709912991885 - MSE: 0.28157878501973976 Epoch: 16 Training loss: 0.1528880622635033 - MSE: 0.29616020437308754 Validation loss : 0.14535920316296996 - MSE: 0.2877532904456922 Epoch: 17 Training loss: 0.14908536092430202 - MSE: 0.29369922969737966 Validation loss : 0.14267539989316103 - MSE: 0.2845677065163917 Epoch: 18 Training loss: 0.1528856832401704 - MSE: 0.29543899790480355 Validation loss : 0.14451625288435907 - MSE: 0.2880325385683692 Epoch: 19 Training loss: 0.14715763338401838 - MSE: 0.2894570824084226 Validation loss : 0.1392400273206559 - MSE: 0.28102629808100493 Epoch: 20 Training loss: 0.1496212546935844 - MSE: 0.2936362344117262 Validation loss : 0.14045483167424347 - MSE: 0.2781410631520079 Epoch: 21 Training loss: 0.15020131709309398 - MSE: 0.29264906647405664 Validation loss : 0.14210618603410144 - MSE: 0.279907465990816 Epoch: 22 Training loss: 0.14703214569442769 - MSE: 0.2902175784407383 Validation loss : 0.15019087527285924 - MSE: 0.29736463912534333 Epoch: 23 Training loss: 0.14753472385218905 - MSE: 0.2909915520698562 Validation loss : 0.14351169821439366 - MSE: 0.28804395697824053 Epoch: 24 Training loss: 0.14944542596485408 - MSE: 0.29246118657753634 Validation loss : 0.1458486494692889 - MSE: 0.2909268395457848 Epoch: 25 Training loss: 0.1488783634669587 - MSE: 0.2935176709923817 Validation loss : 0.15622048407341493 - MSE: 0.3037462926966687 Epoch: 26