File size: 1,925 Bytes
731efcc
0480f99
62272c0
621da63
8ff7cd3
4373e8d
de181ec
6e2ef8b
883450b
1c87137
8ff35e9
26b31bb
c899379
d665531
f1ff6f0
beb8aa6
a16d03c
1360308
dfcc125
b38e1b9
bf71273
25eec36
21f51b9
8bf369b
aec42da
d49fb9c
36b9cc0
28c3563
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
Started at: 14:05:05
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.4581766623258591 - MAE: 0.5292641480208709
Validation loss : 0.22879805664221445 - MAE: 0.3594399771230401
Epoch: 1
Training loss: 0.19436429917812348 - MAE: 0.3385698724059062
Validation loss : 0.17431378530131447 - MAE: 0.3219149815721847
Epoch: 2
Training loss: 0.17444411993026734 - MAE: 0.31684918185002736
Validation loss : 0.16497086816363865 - MAE: 0.31185002325092886
Epoch: 3
Training loss: 0.1655295819044113 - MAE: 0.3086558034500663
Validation loss : 0.15977457662423453 - MAE: 0.3062515869775394
Epoch: 4
Training loss: 0.16229379653930665 - MAE: 0.3037281934486638
Validation loss : 0.15585461341672474 - MAE: 0.3013561711510056
Epoch: 5
Training loss: 0.15735343039035798 - MAE: 0.299567204911211
Validation loss : 0.1539518212278684 - MAE: 0.2993376751781608
Epoch: 6
Training loss: 0.15725925147533418 - MAE: 0.2996774680828566
Validation loss : 0.15224697606431115 - MAE: 0.2976658126019026
Epoch: 7
Training loss: 0.15550604462623596 - MAE: 0.2963955747939702
Validation loss : 0.15032305154535505 - MAE: 0.29504313159460227
Epoch: 8
Training loss: 0.1532408055663109 - MAE: 0.2955262865803256