Started at: 14:55:50 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.2707270306348801 - MAE: 0.4014548746281838 Validation loss : 0.18827014460283167 - MAE: 0.3350395914193632 Epoch: 1 Training loss: 0.18397963613271714 - MAE: 0.3274942520732981 Validation loss : 0.17896055035731373 - MAE: 0.32625804300542655 Epoch: 2 Training loss: 0.17076618894934653 - MAE: 0.3156013590381467 Validation loss : 0.17423390377970302 - MAE: 0.32323377271249604 Epoch: 3 Training loss: 0.1643083082139492 - MAE: 0.3095641840936019 Validation loss : 0.16728906508754282 - MAE: 0.3149824083534162 Epoch: 4 Training loss: 0.15957146167755126 - MAE: 0.3037565492992936 Validation loss : 0.16289721429347992 - MAE: 0.3089864069106985 Epoch: 5 Training loss: 0.15854436449706555 - MAE: 0.301500893432691 Validation loss : 0.16129758033682318 - MAE: 0.308202317708313 Epoch: 6 Training loss: 0.15570181787014006 - MAE: 0.29918709351469924 Validation loss : 0.16344413292758606 - MAE: 0.30938825983369334 Epoch: 7 Training loss: 0.15292704798281193 - MAE: 0.2961117975066867 Validation loss : 0.16081692103077383 - MAE: 0.3033625257590202 Epoch: 8 Training loss: 0.15216302141547203 - MAE: 0.2959306926556599 Validation loss : 0.15965510378865636 - MAE: 0.30640949969727455 Epoch: 9 Training loss: 0.1456899941712618 - MAE: 0.2888247514317626 Validation loss : 0.15742756864603827 - MAE: 0.3014585494849406 Epoch: 10 Training loss: 0.1467710939049721 - MAE: 0.2907957975079187 Validation loss : 0.15912020206451416 - MAE: 0.30151619716495687 Epoch: 11 Training loss: 0.14659826673567294 - MAE: 0.2893981534314371 Validation loss : 0.1602539621731814 - MAE: 0.3022486893374817 Epoch: 12 Training loss: 0.14781650044023992 - MAE: 0.29275159014985785 Validation loss : 0.15743818177896388 - MAE: 0.3011467784998341 Epoch: 13 Training loss: 0.14709322392940521 - MAE: 0.2905235164202929 Validation loss : 0.15774143706349766 - MAE: 0.30035432342912505 Epoch: 14 Training loss: 0.14297918483614921 - MAE: 0.2870641656789985 Validation loss : 0.16193263846285202 - MAE: 0.3045518551046045 Epoch: 15 Training loss: 0.14410636112093925 - MAE: 0.2889387564652954 Validation loss : 0.15722107492825566 - MAE: 0.299146052737687 Epoch: 16 Training loss: 0.14345928631722926 - MAE: 0.28585450120342 Validation loss : 0.15835655086180744 - MAE: 0.2999799426627903 Epoch: 17 Training loss: 0.14413826659321785 - MAE: 0.2881013153573912 Validation loss : 0.15604389984818065 - MAE: 0.29791508329064814 Epoch: 18 Training loss: 0.14309908427298068 - MAE: 0.2863054251735944 Validation loss : 0.15559008366921367 - MAE: 0.2975231734983981 Epoch: 19 Training loss: 0.14340508081018924 - MAE: 0.2865255988742361 Validation loss : 0.15817041651291006 - MAE: 0.30007397316689965 Epoch: 20 Training loss: 0.14153706684708595 - MAE: 0.28535710740880754 Validation loss : 0.15578039034324534 - MAE: 0.29770187771554596 Epoch: 21 Training loss: 0.1428557775169611 - MAE: 0.285682293954449 Validation loss : 0.16232867451275096 - MAE: 0.3028449341916031 Epoch: 22 Training loss: 0.1438133302330971 - MAE: 0.2875040690451074 Validation loss : 0.15943333638065002 - MAE: 0.30116253984475694 Epoch: 23 Training loss: 0.1429821538925171 - MAE: 0.2868833888762416 Validation loss : 0.16224380889359644 - MAE: 0.3035733609314583 Epoch: 24 Training loss: 0.1443456995487213 - MAE: 0.28863224872068594 Validation loss : 0.16213381728705237 - MAE: 0.30267648828913235 Epoch: 25 Training loss: 0.1441011916846037 - MAE: 0.2873510918998528 Validation loss : 0.1571306345217368 - MAE: 0.29800568097664254 Epoch: 26 Training loss: 0.1413027948886156 - MAE: 0.2845599852463436 Validation loss : 0.16157625484115937 - MAE: 0.302082414790674 Epoch: 27 Training loss: 0.1430261830240488 - MAE: 0.28583150224560333 Validation loss : 0.15557704515316906 - MAE: 0.2962375028009326 Epoch: 28 Training loss: 0.13997980147600175 - MAE: 0.2843562749380958 Validation loss : 0.15982679803581798 - MAE: 0.3009880212003318 Epoch: 29 Training loss: 0.14315382800996304 - MAE: 0.286772557351704 Validation loss : 0.1625496679369141 - MAE: 0.3033521796959797 Epoch: 30 Training loss: 0.14252791218459607 - MAE: 0.2867981758810579 Validation loss : 0.16115986687295578 - MAE: 0.3021782659616576 Epoch: 31 Training loss: 0.1429748132824898 - MAE: 0.2856617862509035 Validation loss : 0.157088626833523 - MAE: 0.2981007278628173 Epoch: 32 Training loss: 0.14145717337727548 - MAE: 0.28519633331447963 Validation loss : 0.15426941435126698 - MAE: 0.29495634573875945 Epoch: 33 Training loss: 0.1397652292251587 - MAE: 0.2824192674074606 Validation loss : 0.16185673939831116 - MAE: 0.30235075639458764 Epoch: 34 Training loss: 0.14383683420717716 - MAE: 0.28783451246641834