Started at: 10:44:21 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {}) Epoch: 0 Training loss: 0.19173999391134502 - MSE: 0.3262300431900936 Validation loss : 0.1707202664443425 - MSE: 0.3181949529015193 Epoch: 1 Training loss: 0.1834320445517892 - MSE: 0.3230671429831198 Validation loss : 0.17099061012268066 - MSE: 0.31859832233250407 Epoch: 2 Training loss: 0.18393246207422423 - MSE: 0.3238126735351295 Validation loss : 0.17154425446476254 - MSE: 0.31947564650493276 Epoch: 3 Training loss: 0.18445654531705727 - MSE: 0.32449193418262723 Validation loss : 0.17155078649520875 - MSE: 0.31949343827852444 Epoch: 4 Training loss: 0.18468433119428967 - MSE: 0.3248966974226236 Validation loss : 0.17185703590512275 - MSE: 0.31997947147548467 Epoch: 5 Training loss: 0.18483373783167126 - MSE: 0.32512973743557905 Validation loss : 0.1718519987804549 - MSE: 0.3199738939708498 Epoch: 6 Training loss: 0.18488002355092936 - MSE: 0.32523622541441644 Validation loss : 0.17180264528308595 - MSE: 0.3198964095291948 Epoch: 7 Training loss: 0.18488125423493895 - MSE: 0.3252789020479855 Validation loss : 0.17174329182931355 - MSE: 0.3198061523443487 Epoch: 8 Training loss: 0.18489378878792512 - MSE: 0.3253228091660482 Validation loss : 0.17357715804662024 - MSE: 0.3226380277707774 Epoch: 9 Training loss: 0.18595946008719286 - MSE: 0.32635185643103526 Validation loss : 0.17145181340830667 - MSE: 0.31935092495654577 Epoch: 10 Training loss: 0.18484974442755134 - MSE: 0.3253414797140243 Validation loss : 0.171514799871615 - MSE: 0.31944891148013993 Epoch: 11 Training loss: 0.1848593037394644 - MSE: 0.3253549544164942 Validation loss : 0.17148066344005722 - MSE: 0.3193935607975748 Epoch: 12 Training loss: 0.18484833496577532 - MSE: 0.3253483776600709 Validation loss : 0.17145045633826936 - MSE: 0.3193448287850645 Epoch: 13 Training loss: 0.18483807320154985 - MSE: 0.32534201221261033 Validation loss : 0.1714242550943579 - MSE: 0.3193027320467601 Epoch: 14 Training loss: 0.184799948146621 - MSE: 0.32530459791156613 Validation loss : 0.17139992096594403 - MSE: 0.3192637658295488 Epoch: 15 Training loss: 0.18460696313566374 - MSE: 0.32509753975495237 Validation loss : 0.1713372460433415 - MSE: 0.3191811611360338 Epoch: 16 Training loss: 0.18478047471601985 - MSE: 0.32529885939908587 Validation loss : 0.17135789075068064 - MSE: 0.31920771904988215 Epoch: 17 Training loss: 0.18478687360738089 - MSE: 0.3253013729808102 Validation loss : 0.17135313632232801 - MSE: 0.3191962242941372 Epoch: 18 Training loss: 0.18478634794360227 - MSE: 0.3252988952896622 Validation loss : 0.17134608296411377 - MSE: 0.31918270404483856 Epoch: 19 Training loss: 0.18478439364907812 - MSE: 0.32529616494054064 Validation loss : 0.1713385373353958 - MSE: 0.3191693609064844 Epoch: 20 Training loss: 0.18478169060737185 - MSE: 0.3252932951001202 Validation loss : 0.17133102927889143 - MSE: 0.31915661807205264 Epoch: 21 Training loss: 0.18477885548061537 - MSE: 0.325290603982356 Validation loss : 0.17132410705089568 - MSE: 0.31914513406643114 Epoch: 22 Training loss: 0.18477606650405717 - MSE: 0.3252881301248911 Validation loss : 0.17131790316530637 - MSE: 0.3191349769027771 Epoch: 23 Training loss: 0.18475595528928979 - MSE: 0.3252490983746809 Validation loss : 0.17116585086498942 - MSE: 0.3188991654947001 Epoch: 24 Training loss: 0.18471557654223394 - MSE: 0.32523076226991837 Validation loss : 0.17129607019679888 - MSE: 0.31910639966871324 Epoch: 25 Training loss: 0.1847534055993395 - MSE: 0.32526731010380944 Validation loss : 0.1712971685188157 - MSE: 0.31910609611077234 Epoch: 26 Training loss: 0.18475418508920854 - MSE: 0.3252666713391783 Validation loss : 0.17129676256861007 - MSE: 0.31910429168658866