File size: 1,464 Bytes
0292aec
2971ef0
2593aca
102eb3a
354d201
ffc7072
0eb949f
92ef2a4
ee5ab8c
beca0d6
8ba5556
c8f58b8
174ce95
5b89728
73c501e
ef1dd38
d5013d1
a135643
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
Started at: 11:40:26
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.4581766623258591 - MAE: 0.5292641480208709
Validation loss : 0.22879805664221445 - MAE: 0.3594399771230401
Epoch: 1
Training loss: 0.19436429917812348 - MAE: 0.3385698724059062
Validation loss : 0.17431378530131447 - MAE: 0.3219149815721847
Epoch: 2
Training loss: 0.17444411993026734 - MAE: 0.31684918185002736
Validation loss : 0.16497086816363865 - MAE: 0.31185002325092886
Epoch: 3
Training loss: 0.1655295819044113 - MAE: 0.3086558034500663
Validation loss : 0.15977457662423453 - MAE: 0.3062515869775394
Epoch: 4
Training loss: 0.16229379653930665 - MAE: 0.3037281934486638
Validation loss : 0.15585461341672474 - MAE: 0.3013561711510056
Epoch: 5