File size: 2,668 Bytes
109878e
812f96b
84dea52
acf4767
d894b6b
c6ad312
a8baf0f
4022cbe
8c97bc3
ca29c4b
298c292
7e96c6d
31e5835
46a8fd0
f2877e8
4533000
65feab4
05ec0b5
f0be090
c6ab986
a3ba46d
b242d00
78b8095
a4f43a9
c4c7949
2bb66f4
8c3c1fb
0832816
f65db6a
e1a5388
2753153
a77a7f7
7fa1384
2078d66
4da7eba
511161f
f8f9070
64ebf50
3bce773
b6fbd59
9f3a4f5
6560b53
93198fd
b08f72d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
Started at: 09:30:12
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.21533221261281715 - MSE: 0.34197339416593503
Validation loss : 0.14451597188599408 - MSE: 0.2960304229995927
Epoch: 1
Training loss: 0.1517588474640721 - MSE: 0.2956301456982305
Validation loss : 0.12380113103426993 - MSE: 0.26862038821082024
Epoch: 2
Training loss: 0.14443182972700971 - MSE: 0.28899192076676716
Validation loss : 0.12112187955062836 - MSE: 0.2663072116915828
Epoch: 3
Training loss: 0.14475514602504277 - MSE: 0.29038970057934027
Validation loss : 0.13478531874716282 - MSE: 0.2850182042234337
Epoch: 4
Training loss: 0.1408511215134671 - MSE: 0.2861137754569417
Validation loss : 0.13267319882288575 - MSE: 0.2831877799249014
Epoch: 5
Training loss: 0.136032929232246 - MSE: 0.279424393321389
Validation loss : 0.11587779759429395 - MSE: 0.2579734090027159
Epoch: 6
Training loss: 0.1351911084432351 - MSE: 0.2779842455851462
Validation loss : 0.11928518384229392 - MSE: 0.25956903308497203
Epoch: 7
Training loss: 0.14176342871628309 - MSE: 0.28619337123151156
Validation loss : 0.11641600204166025 - MSE: 0.2570362210167332
Epoch: 8
Training loss: 0.13039547287319836 - MSE: 0.2738442464898942
Validation loss : 0.11378515849355608 - MSE: 0.2556338147619499
Epoch: 9
Training loss: 0.13758943127958398 - MSE: 0.2815961984005678
Validation loss : 0.11927694408223033 - MSE: 0.2592867935668437
Epoch: 10
Training loss: 0.13600184517471414 - MSE: 0.27845210879496773
Validation loss : 0.12289933965075761 - MSE: 0.2641740314788876
Epoch: 11
Training loss: 0.12924143111235217 - MSE: 0.27266305881012975
Validation loss : 0.11782969336491078 - MSE: 0.258937672893353
Epoch: 12
Training loss: 0.1344190981827284 - MSE: 0.28026616499269114
Validation loss : 0.11472726217471063 - MSE: 0.25845796944440735
Epoch: 13
Training loss: 0.1375312733414926 - MSE: 0.28103011966626457
Validation loss : 0.11630514287389815 - MSE: 0.25740234173235876