File size: 1,264 Bytes
dd05d84
785327b
5892a5a
2f6a8c9
8aa7084
c9464a1
e8f1b9e
113de16
1442abf
5aaadeb
6638e12
ae04915
1
2
3
4
5
6
7
8
9
10
11
12
13
Started at: 15:00:49
nb-bert-base, lr: 0.005, bs: 256, ml: 512, oversample: True, frozen: True
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.48549762964248655 - MAE: 0.5464755460237409
Validation loss : 0.32136343184270355 - MAE: 0.44572845093237595
Epoch: 1
Training loss: 0.3226211439479481 - MAE: 0.44861805548692757
Validation loss : 0.29554246758159836 - MAE: 0.42440333521262635
Epoch: 2
Training loss: 0.31240331368012864 - MAE: 0.44097710053291156
Validation loss : 0.2837091582386117 - MAE: 0.41816877024433474