File size: 1,607 Bytes
24b3561
44ffac2
bc13da5
dca4917
2b40235
c7e9b30
6e6c50d
1b9a17f
00fe6aa
5df5650
56b2efd
60bb8f2
76c4dcc
c3761e5
5cae6d8
6578a93
545dd44
b01437c
edb36a0
20a7976
e663e95
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
Started at: 10:02:30
nb-bert-base, 0.001, 128
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.33461407989263536 - MAE: 0.4395451926176116
Validation loss : 0.17470807801274693 - MAE: 0.316590567311014
Epoch: 1
Training loss: 0.167546104490757 - MAE: 0.3104947695666053
Validation loss : 0.1605402261895292 - MAE: 0.3041267300080164
Epoch: 2
Training loss: 0.15953998982906342 - MAE: 0.30230224392344485
Validation loss : 0.156581671360661 - MAE: 0.3003702843252824
Epoch: 3
Training loss: 0.15586254999041557 - MAE: 0.2976227208408245
Validation loss : 0.15435482824549956 - MAE: 0.29782302866508426
Epoch: 4
Training loss: 0.153594990670681 - MAE: 0.2951023367494838
Validation loss : 0.15327392255558686 - MAE: 0.29758807291884654
Epoch: 5
Training loss: 0.1521633943915367 - MAE: 0.293013468516263
Validation loss : 0.1528188367100323 - MAE: 0.29786525073805276