File size: 3,007 Bytes
1ff3348
6ca59ff
3dac5c8
2817051
e84707f
807e588
487323b
94abce7
692c90d
0333182
824e550
1d96fc8
55e3160
f88bd4f
a9cc9cc
54388cf
6eaaaa2
435de4a
ba02aaf
7a0c0e4
ca42f43
0cd6061
f4f3e47
c6c00ae
85c6628
b35048b
9160f15
9130a06
3222366
f9a74fe
c741625
817c05e
c48e1a3
9966885
eff3373
ade884f
727c77e
e3e3999
09c05fc
aec959b
b5758c7
ee20795
c152f02
b6c652a
d376128
e43f43b
48f3655
87467fa
6ed9f02
dd31107
1e3853c
f9962a1
bf6ae09
76c3972
10abb68
c4ee07e
a4dd16e
49c7fcb
0b3bb00
ce05ef4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
Started at: 09:18:54
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {})
Epoch: 0
Training loss: 0.19173999391134502 - MSE: 0.3262300431900936
Validation loss : 0.1707202664443425 - MSE: 0.3181949529015193
Epoch: 1
Training loss: 0.1834320445517892 - MSE: 0.3230671429831198
Validation loss : 0.17099061012268066 - MSE: 0.31859832233250407
Epoch: 2
Training loss: 0.18393246207422423 - MSE: 0.3238126735351295
Validation loss : 0.17154425446476254 - MSE: 0.31947564650493276
Epoch: 3
Training loss: 0.18445654531705727 - MSE: 0.32449193418262723
Validation loss : 0.17155078649520875 - MSE: 0.31949343827852444
Epoch: 4
Training loss: 0.18468433119428967 - MSE: 0.3248966974226236
Validation loss : 0.17185703590512275 - MSE: 0.31997947147548467
Epoch: 5
Training loss: 0.18483373783167126 - MSE: 0.32512973743557905
Validation loss : 0.1718519987804549 - MSE: 0.3199738939708498
Epoch: 6
Training loss: 0.18488002355092936 - MSE: 0.32523622541441644
Validation loss : 0.17180264528308595 - MSE: 0.3198964095291948
Epoch: 7
Training loss: 0.18488125423493895 - MSE: 0.3252789020479855
Validation loss : 0.17174329182931355 - MSE: 0.3198061523443487
Epoch: 8
Training loss: 0.18489378878792512 - MSE: 0.3253228091660482
Validation loss : 0.17357715804662024 - MSE: 0.3226380277707774
Epoch: 9
Training loss: 0.18595946008719286 - MSE: 0.32635185643103526
Validation loss : 0.17145181340830667 - MSE: 0.31935092495654577
Epoch: 10
Training loss: 0.18484974442755134 - MSE: 0.3253414797140243
Validation loss : 0.171514799871615 - MSE: 0.31944891148013993
Epoch: 11
Training loss: 0.1848593037394644 - MSE: 0.3253549544164942
Validation loss : 0.17148066344005722 - MSE: 0.3193935607975748
Epoch: 12
Training loss: 0.18484833496577532 - MSE: 0.3253483776600709
Validation loss : 0.17145045633826936 - MSE: 0.3193448287850645
Epoch: 13
Training loss: 0.18483807320154985 - MSE: 0.32534201221261033
Validation loss : 0.1714242550943579 - MSE: 0.3193027320467601
Epoch: 14
Training loss: 0.184799948146621 - MSE: 0.32530459791156613
Validation loss : 0.17139992096594403 - MSE: 0.3192637658295488
Epoch: 15
Training loss: 0.18460696313566374 - MSE: 0.32509753975495237
Validation loss : 0.1713372460433415 - MSE: 0.3191811611360338
Epoch: 16
Training loss: 0.18478047471601985 - MSE: 0.32529885939908587
Validation loss : 0.17135789075068064 - MSE: 0.31920771904988215
Epoch: 17
Training loss: 0.18478687360738089 - MSE: 0.3253013729808102
Validation loss : 0.17135313632232801 - MSE: 0.3191962242941372
Epoch: 18
Training loss: 0.18478634794360227 - MSE: 0.3252988952896622
Validation loss : 0.17134608296411377 - MSE: 0.31918270404483856
Epoch: 19