File size: 2,919 Bytes
45dcbce
bcdf6e5
d41ee57
cd44060
403a17f
e66ae0f
df0717b
fe09190
b748117
5cd17a9
14bef3e
27fedd6
8cba533
f7e2668
df22987
5596678
3e0d44f
a132e2e
300d2d3
3d4e48f
0303da1
0e9d41e
bf99401
63edb27
d1343d7
713ed25
120e232
39a5abc
54ae285
3f7e270
be28ecc
3fa88f2
07b596c
b290acc
2a1db6c
61f6a2d
037bf85
deb66e5
8cf9cba
abfbeae
7df8448
57ca92a
357bb31
83ffacf
67f0c27
d91948b
fd30360
08f6ec2
7cda4ff
194731f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
Started at: 14:40:19
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 1.2811461687088013 - MAE: 1.0408094668466794
Validation loss : 1.3419398069381714 - MAE: 1.0652254769721856
Epoch: 1
Training loss: 0.7854926586151123 - MAE: 0.7721470455473632
Validation loss : 0.8864973783493042 - MAE: 0.8435096881137445
Epoch: 2
Training loss: 0.4843243956565857 - MAE: 0.5272605160349294
Validation loss : 0.5820499062538147 - MAE: 0.6337268329583682
Epoch: 3
Training loss: 0.41485145688056946 - MAE: 0.4802625859156251
Validation loss : 0.421680212020874 - MAE: 0.4904769934140719
Epoch: 4
Training loss: 0.48013365268707275 - MAE: 0.5623592561797092
Validation loss : 0.35784491896629333 - MAE: 0.4395319429727701
Epoch: 5
Training loss: 0.5700742602348328 - MAE: 0.62281050650697
Validation loss : 0.3381924331188202 - MAE: 0.4468093950014848
Epoch: 6
Training loss: 0.5534572601318359 - MAE: 0.6398394494072387
Validation loss : 0.3276384174823761 - MAE: 0.4481881180634865
Epoch: 7
Training loss: 0.5136637687683105 - MAE: 0.6030835897514695
Validation loss : 0.3143889605998993 - MAE: 0.4345858062689121
Epoch: 8
Training loss: 0.4339531362056732 - MAE: 0.5305713756304038
Validation loss : 0.30018025636672974 - MAE: 0.41813509395489323
Epoch: 9
Training loss: 0.3681242763996124 - MAE: 0.4937124432701814
Validation loss : 0.28909191489219666 - MAE: 0.4049994842364238
Epoch: 10
Training loss: 0.28562891483306885 - MAE: 0.4289360728703047
Validation loss : 0.2845577895641327 - MAE: 0.39556940358418685
Epoch: 11
Training loss: 0.22621430456638336 - MAE: 0.3758177969016527
Validation loss : 0.28580838441848755 - MAE: 0.3986670867754863
Epoch: 12
Training loss: 0.23422713577747345 - MAE: 0.38526535191034017
Validation loss : 0.28847622871398926 - MAE: 0.40267336712433743
Epoch: 13
Training loss: 0.22982938587665558 - MAE: 0.35741449578812245
Validation loss : 0.28796255588531494 - MAE: 0.4020649595902516
Epoch: 14
Training loss: 0.23996150493621826 - MAE: 0.39404939899319097
Validation loss : 0.2812049388885498 - MAE: 0.39587824505109054
Epoch: 15
Training loss: 0.24414275586605072 - MAE: 0.379991781554724
Validation loss : 0.2680702209472656 - MAE: 0.383818744466855