Started at: 14:58:05 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '44815f7e109b53547cccdf3c6847f4c28b989816'}, {}) Epoch: 0 Training loss: 0.25046255227726727 - MSE: 0.36727700175627953 Validation loss : 0.17648151460470576 - MSE: 0.32341695900998885 Epoch: 1 Training loss: 0.17091115193469875 - MSE: 0.31430697735027086 Validation loss : 0.16562944582917474 - MSE: 0.31155962633828027 Epoch: 2 Training loss: 0.16704481191517132 - MSE: 0.3114155539092336 Validation loss : 0.1604571777085463 - MSE: 0.30340937581848676 Epoch: 3 Training loss: 0.16466754909396777 - MSE: 0.30768393210761735 Validation loss : 0.1632813240091006 - MSE: 0.3109833563942495 Epoch: 4 Training loss: 0.16455070379421796 - MSE: 0.3093531171941453 Validation loss : 0.16564318244204376 - MSE: 0.30289557314252813 Epoch: 5 Training loss: 0.16568223224376058 - MSE: 0.30846559528561224 Validation loss : 0.1605968801022479 - MSE: 0.2988926674776124 Epoch: 6 Training loss: 0.15717696056223762 - MSE: 0.3022554811747332 Validation loss : 0.16166257779255058 - MSE: 0.29849396869512723 Epoch: 7 Training loss: 0.15822318865714338 - MSE: 0.30195677281494104 Validation loss : 0.1757645166733048 - MSE: 0.3080704490543514 Epoch: 8 Training loss: 0.15552518461939646 - MSE: 0.2998732285398632 Validation loss : 0.1911594748045459 - MSE: 0.32289788546992626 Epoch: 9 Training loss: 0.15594329739812063 - MSE: 0.3009645091966726 Validation loss : 0.19870018710692725 - MSE: 0.33138930551919304 Epoch: 10 Training loss: 0.1547534751521452 - MSE: 0.2992649989557918 Validation loss : 0.19730406333551262 - MSE: 0.3297122567265123 Epoch: 11 Training loss: 0.15529195780908395 - MSE: 0.29975488127620853 Validation loss : 0.18655535093311107 - MSE: 0.3217206286243775 Epoch: 12 Training loss: 0.15309068918001228 - MSE: 0.2969653755509521 Validation loss : 0.17314820917266788 - MSE: 0.30849219803901623 Epoch: 13 Training loss: 0.15322715030784534 - MSE: 0.29618723127423563 Validation loss : 0.1593460923794544 - MSE: 0.29643875794480956 Epoch: 14 Training loss: 0.1519091561209732 - MSE: 0.2967038231193114 Validation loss : 0.15947996328274408 - MSE: 0.2962177626613215 Epoch: 15 Training loss: 0.1513304633234963 - MSE: 0.2948141613710868 Validation loss : 0.17381140318783847 - MSE: 0.30743163511355315 Epoch: 16 Training loss: 0.15304585124559814 - MSE: 0.2953887584017137 Validation loss : 0.15291882255537945 - MSE: 0.2937530252336034 Epoch: 17 Training loss: 0.15207511008844762 - MSE: 0.29601101879974745 Validation loss : 0.15711521870936407 - MSE: 0.2956258048557993 Epoch: 18 Training loss: 0.1522078831032448 - MSE: 0.2947807228620845 Validation loss : 0.1593479738768303 - MSE: 0.2975361759408073 Epoch: 19 Training loss: 0.15030200151622597 - MSE: 0.29364889133341376 Validation loss : 0.15571271227390476 - MSE: 0.2947020680712253 Epoch: 20 Training loss: 0.14997340040990545 - MSE: 0.29321906843107093 Validation loss : 0.16053046500592522 - MSE: 0.2972562162079979 Epoch: 21 Training loss: 0.1503814266153096 - MSE: 0.29498264967501453 Validation loss : 0.18625259749365575 - MSE: 0.3214374853139268 Epoch: 22 Training loss: 0.15071523613130985 - MSE: 0.2931879129534037 Validation loss : 0.15868466793361938 - MSE: 0.29779115185817956 Epoch: 23 Training loss: 0.14925291788230088 - MSE: 0.29253552849679587 Validation loss : 0.17538098990917206 - MSE: 0.30947426368924724 Epoch: 24 Training loss: 0.15000015682526652 - MSE: 0.2927369972856095 Validation loss : 0.1700212608909968 - MSE: 0.3071491417052287 Epoch: 25 Training loss: 0.15024784067409291 - MSE: 0.29311721077485986 Validation loss : 0.19164935623606047 - MSE: 0.32203448128239803 Epoch: 26 Training loss: 0.15137434639225755 - MSE: 0.29379136046127924 Validation loss : 0.1671447205272588 - MSE: 0.30391976326317943 Epoch: 27 Training loss: 0.15017312853998943 - MSE: 0.29455603880812276 Validation loss : 0.18036871166391807 - MSE: 0.3125465653289461 Epoch: 28 Training loss: 0.14778636641762583 - MSE: 0.2905298917289279 Validation loss : 0.16160754949757547 - MSE: 0.2979193112805207 Epoch: 29 Training loss: 0.1477772275241196 - MSE: 0.2912173747535554 Validation loss : 0.17026665750326533 - MSE: 0.30567947675889745 Epoch: 30 Training loss: 0.14744956316757324 - MSE: 0.291312328070814 Validation loss : 0.16202468379880441 - MSE: 0.29700271247754034 Epoch: 31 Training loss: 0.1484179409778663 - MSE: 0.2921315750433443 Validation loss : 0.16338278116150337 - MSE: 0.2987860072168981 Epoch: 32 Training loss: 0.14738556740777142 - MSE: 0.29104356719237584 Validation loss : 0.15635579254365328 - MSE: 0.29443009978003276 Epoch: 33 Training loss: 0.148553161413839 - MSE: 0.29089201478673565 Validation loss : 0.15951512471744508 - MSE: 0.2966586790896474 Epoch: 34 Training loss: 0.15104458401650947 - MSE: 0.2934267254216827 Validation loss : 0.17196247625080022 - MSE: 0.3055353329027785 Epoch: 35 Training loss: 0.1503695050623211 - MSE: 0.29389694550707784 Validation loss : 0.16170481603705522 - MSE: 0.29835679015904104 Epoch: 36 Training loss: 0.14925730479928442 - MSE: 0.2924833646517312 Validation loss : 0.17856141236243825 - MSE: 0.31225271412806316 Epoch: 37 Training loss: 0.15118206122199895 - MSE: 0.29395904130187084 Validation loss : 0.17895790042750764 - MSE: 0.3121755853923419 Epoch: 38 Training loss: 0.1497652710505246 - MSE: 0.2912881449244226 Validation loss : 0.15206407851567774 - MSE: 0.2909835362196566 Epoch: 39 Training loss: 0.14738748415472544 - MSE: 0.29036707520169275 Validation loss : 0.16739020203099106 - MSE: 0.30124313077212783 Epoch: 40 Training loss: 0.14675953452886664 - MSE: 0.2900500768121404 Validation loss : 0.17623373211333246 - MSE: 0.30992423744210557 Epoch: 41 Training loss: 0.14824477195512825 - MSE: 0.29090776402279417 Validation loss : 0.16109058317361455 - MSE: 0.2974189646320188 Epoch: 42 Training loss: 0.14692961365967838 - MSE: 0.2904768427610488 Validation loss : 0.1707474204401175 - MSE: 0.30493211675533977 Epoch: 43 Training loss: 0.14761571415941122 - MSE: 0.2907837329648465 Validation loss : 0.1536927172970591 - MSE: 0.2910231603894442 Epoch: 44 Training loss: 0.15164254637917285 - MSE: 0.2942169462116838 Validation loss : 0.15892887950846643 - MSE: 0.2960443273887821 Epoch: 45 Training loss: 0.14786733518624065 - MSE: 0.2917048977763665 Validation loss : 0.1669148841139042 - MSE: 0.30125252101874894 Epoch: 46