File size: 868 Bytes
205d9b9 bf1e66b e17b32c ec707fb e3de3dc 672db64 97428ce 2e68b75 2e43061 02b6ea4 a1347a9 75c9872 e54982f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
Started at: 22:43:41 norbert2, 0.0005, 256 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {}) Epoch: 0 Training loss: 0.3353100383281708 - MAE: 0.4553341033264035 Validation loss : 0.20341295666164821 - MAE: 0.3450441513966228 Epoch: 1 Training loss: 0.21215539872646333 - MAE: 0.3530871885300038 Validation loss : 0.19502520395649803 - MAE: 0.3432616956552773 Epoch: 2 Training loss: 0.19663703739643096 - MAE: 0.3396870770030848 Validation loss : 0.18265276153882345 - MAE: 0.3286732326987826 Epoch: 3 |