File size: 3,671 Bytes
b90e2a6
36d6c19
180e27a
d21c1de
233804b
8c42b15
baa5549
5a35f31
725419d
b373c9d
636d532
90acfea
396896d
8ab3946
38990b4
6d19545
4048ef7
08d5397
b7c0dbb
0851144
d5f6027
55d669f
d9514e2
7dfaef5
25a3ed1
d4aef87
7b6dbcf
2889ea8
a051a8f
7b199fd
d2a9ebf
bf0d959
efd4014
7d3a7e6
2b5c2f1
6743094
ed09421
8e3f40c
9f11cb3
318b226
da4560c
0a1fef0
1aba117
ce1a1b5
60c42e6
3b6a81f
b781ec0
282fb27
4a25886
d1c4e87
abccbe1
ea57707
b1d644f
943cbab
96a2bf0
cfbb7d2
d85f59a
5e2db89
5240129
8604de2
46c1421
218c4dc
05c4421
624cf0f
8d8de56
0d27d2e
a851f16
384fd6b
400781a
68bcd90
952ec51
fe09a69
d283c14
8a9388c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
Started at: 13:54:30
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {})
Epoch: 0
Training loss: 0.19169998451283105 - MSE: 0.3224822171590252
Validation loss : 0.1545075431931764 - MSE: 0.30456596715475825
Epoch: 1
Training loss: 0.16974398517294934 - MSE: 0.3103326546437019
Validation loss : 0.15334105514921248 - MSE: 0.3032539253831601
Epoch: 2
Training loss: 0.16987535333947132 - MSE: 0.3104551468798216
Validation loss : 0.15374588128179312 - MSE: 0.30371004876315055
Epoch: 3
Training loss: 0.16981600977872546 - MSE: 0.31038815772414813
Validation loss : 0.15414767409674823 - MSE: 0.3041637827186605
Epoch: 4
Training loss: 0.16997354869779788 - MSE: 0.3106644945154869
Validation loss : 0.15224769292399287 - MSE: 0.30202376234910844
Epoch: 5
Training loss: 0.17002245697535967 - MSE: 0.3105952131827796
Validation loss : 0.15425994875840843 - MSE: 0.3042897454938611
Epoch: 6
Training loss: 0.1700476981307331 - MSE: 0.310650163816472
Validation loss : 0.15435221185907722 - MSE: 0.3043926291643402
Epoch: 7
Training loss: 0.16941748441834198 - MSE: 0.31001210870889107
Validation loss : 0.1539384766947478 - MSE: 0.30392832597817687
Epoch: 8
Training loss: 0.16997813641240722 - MSE: 0.3105949710586207
Validation loss : 0.15462648100219667 - MSE: 0.30469803253754435
Epoch: 9
Training loss: 0.17010739684889192 - MSE: 0.31050279334273895
Validation loss : 0.155730452388525 - MSE: 0.30595527761715857
Epoch: 10
Training loss: 0.17010818978673534 - MSE: 0.31067252789705496
Validation loss : 0.15445392183028162 - MSE: 0.3045059578503242
Epoch: 11
Training loss: 0.1698176296133744 - MSE: 0.3104176910766342
Validation loss : 0.1544747839216143 - MSE: 0.30452919958725033
Epoch: 12
Training loss: 0.16994338412033885 - MSE: 0.3105494737531266
Validation loss : 0.15396608458831906 - MSE: 0.30395947231545506
Epoch: 13
Training loss: 0.1698948621357742 - MSE: 0.3105059461998054
Validation loss : 0.1545115364715457 - MSE: 0.3045701227160862
Epoch: 14
Training loss: 0.1699589282666382 - MSE: 0.31056594253224645
Validation loss : 0.1545204329304397 - MSE: 0.304580016388627
Epoch: 15
Training loss: 0.16995860153907225 - MSE: 0.3105668987295108
Validation loss : 0.15452667814679444 - MSE: 0.3045869592820054
Epoch: 16
Training loss: 0.1698562132684808 - MSE: 0.31058698633003384
Validation loss : 0.15058960486203432 - MSE: 0.30260322473577617
Epoch: 17
Training loss: 0.16920853351291856 - MSE: 0.30992660221922314
Validation loss : 0.15110802161507308 - MSE: 0.303181744856829
Epoch: 18
Training loss: 0.16951832692874105 - MSE: 0.31024539454754047
Validation loss : 0.15118786157108843 - MSE: 0.3032720424012041
Epoch: 19
Training loss: 0.16952366397569055 - MSE: 0.31024719080762236
Validation loss : 0.15118418936617672 - MSE: 0.303267858201707
Epoch: 20
Training loss: 0.16951929793546075 - MSE: 0.3102413832574418
Validation loss : 0.1511813565157354 - MSE: 0.30326463116898594
Epoch: 21
Training loss: 0.16951563405363182 - MSE: 0.3102366537550263
Validation loss : 0.15117910131812096 - MSE: 0.30326207003190575
Epoch: 22
Training loss: 0.16951254367044097 - MSE: 0.31023282786217926
Validation loss : 0.15117732365615666 - MSE: 0.3032600467336124
Epoch: 23
Training loss: 0.1695099739651931 - MSE: 0.31022965084000215
Validation loss : 0.15117588196881115 - MSE: 0.30325841342664717