File size: 929 Bytes
0d02323
bf891b0
6e2ecb9
3f8a159
6f03efb
90386ed
66ff3ed
bb1a11a
9e7aeba
4ac23e9
a3d750e
e83a107
026596e
2603bff
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
Started at: 12:34:41
norbert2, 0.001, 128
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.2549108934402466 - MAE: 0.3869476704225354
Validation loss : 0.17967465169289532 - MAE: 0.32511726772455946
Epoch: 1
Training loss: 0.18461654603481292 - MAE: 0.32631207394936323
Validation loss : 0.16705343916135676 - MAE: 0.3136336903292028
Epoch: 2
Training loss: 0.1698657414317131 - MAE: 0.312740198772678
Validation loss : 0.16012747936389027 - MAE: 0.3062873321185852
Epoch: 3
Training loss: 0.16400991275906562 - MAE: 0.30695357768639636