|
Started at: 12:07:49 |
|
norbert2, 1e-06, 256 |
|
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) |
|
Epoch: 0 |
|
Training loss: 0.30783953905105593 - MAE: 0.43719580843584505 |
|
Validation loss : 0.21560138960679373 - MAE: 0.3596361274665927 |
|
Epoch: 1 |
|
Training loss: 0.2266489964723587 - MAE: 0.3668055945655404 |
|
Validation loss : 0.2019458363453547 - MAE: 0.34808209428471554 |
|
Epoch: 2 |
|
Training loss: 0.19879530966281891 - MAE: 0.3426665569097344 |
|
Validation loss : 0.18689965373939937 - MAE: 0.3323873764788491 |
|
Epoch: 3 |
|
Training loss: 0.19044960021972657 - MAE: 0.3341355262436057 |
|
Validation loss : 0.17861351039674547 - MAE: 0.32349605653410407 |
|
Epoch: 4 |
|
|