File size: 3,745 Bytes
b872d84
7edb427
3820d18
12d9bec
cfa8efe
89f2e6b
2add8e3
ec58dfc
a1e99f7
51c9f31
e00dc32
eb4f0c6
c1451c5
f976783
d67c81d
58a10fa
8a8320e
25019f8
c1f0242
48b9598
338638a
9188b52
ef4c949
7d11bbb
cd3a120
b058c7b
ecb5355
4294111
0ed9b00
912bfea
e51a329
b8eac80
62866e2
cb8695b
2a5990a
515ffd1
489a36d
3d06fd4
065a810
a2fac5f
5a6bc4d
2545c10
aa878c6
6b819a6
a9d6e0e
cca16ac
28b3cec
4f08e9f
24d95a8
0f96708
61d660e
1b54e42
f5156d2
83bbd25
842116d
71ee75c
959c48e
c2f74c8
31a89c1
d13cb4c
5772218
ee4be1f
dc430e6
7faf3ba
d0ffdb1
bb7b340
4ecdd09
7a755a8
708626c
375e453
672d183
bbd7c21
a362c7c
74f174d
b2016a4
5692999
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
Started at: 10:44:21
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {})
Epoch: 0
Training loss: 0.19173999391134502 - MSE: 0.3262300431900936
Validation loss : 0.1707202664443425 - MSE: 0.3181949529015193
Epoch: 1
Training loss: 0.1834320445517892 - MSE: 0.3230671429831198
Validation loss : 0.17099061012268066 - MSE: 0.31859832233250407
Epoch: 2
Training loss: 0.18393246207422423 - MSE: 0.3238126735351295
Validation loss : 0.17154425446476254 - MSE: 0.31947564650493276
Epoch: 3
Training loss: 0.18445654531705727 - MSE: 0.32449193418262723
Validation loss : 0.17155078649520875 - MSE: 0.31949343827852444
Epoch: 4
Training loss: 0.18468433119428967 - MSE: 0.3248966974226236
Validation loss : 0.17185703590512275 - MSE: 0.31997947147548467
Epoch: 5
Training loss: 0.18483373783167126 - MSE: 0.32512973743557905
Validation loss : 0.1718519987804549 - MSE: 0.3199738939708498
Epoch: 6
Training loss: 0.18488002355092936 - MSE: 0.32523622541441644
Validation loss : 0.17180264528308595 - MSE: 0.3198964095291948
Epoch: 7
Training loss: 0.18488125423493895 - MSE: 0.3252789020479855
Validation loss : 0.17174329182931355 - MSE: 0.3198061523443487
Epoch: 8
Training loss: 0.18489378878792512 - MSE: 0.3253228091660482
Validation loss : 0.17357715804662024 - MSE: 0.3226380277707774
Epoch: 9
Training loss: 0.18595946008719286 - MSE: 0.32635185643103526
Validation loss : 0.17145181340830667 - MSE: 0.31935092495654577
Epoch: 10
Training loss: 0.18484974442755134 - MSE: 0.3253414797140243
Validation loss : 0.171514799871615 - MSE: 0.31944891148013993
Epoch: 11
Training loss: 0.1848593037394644 - MSE: 0.3253549544164942
Validation loss : 0.17148066344005722 - MSE: 0.3193935607975748
Epoch: 12
Training loss: 0.18484833496577532 - MSE: 0.3253483776600709
Validation loss : 0.17145045633826936 - MSE: 0.3193448287850645
Epoch: 13
Training loss: 0.18483807320154985 - MSE: 0.32534201221261033
Validation loss : 0.1714242550943579 - MSE: 0.3193027320467601
Epoch: 14
Training loss: 0.184799948146621 - MSE: 0.32530459791156613
Validation loss : 0.17139992096594403 - MSE: 0.3192637658295488
Epoch: 15
Training loss: 0.18460696313566374 - MSE: 0.32509753975495237
Validation loss : 0.1713372460433415 - MSE: 0.3191811611360338
Epoch: 16
Training loss: 0.18478047471601985 - MSE: 0.32529885939908587
Validation loss : 0.17135789075068064 - MSE: 0.31920771904988215
Epoch: 17
Training loss: 0.18478687360738089 - MSE: 0.3253013729808102
Validation loss : 0.17135313632232801 - MSE: 0.3191962242941372
Epoch: 18
Training loss: 0.18478634794360227 - MSE: 0.3252988952896622
Validation loss : 0.17134608296411377 - MSE: 0.31918270404483856
Epoch: 19
Training loss: 0.18478439364907812 - MSE: 0.32529616494054064
Validation loss : 0.1713385373353958 - MSE: 0.3191693609064844
Epoch: 20
Training loss: 0.18478169060737185 - MSE: 0.3252932951001202
Validation loss : 0.17133102927889143 - MSE: 0.31915661807205264
Epoch: 21
Training loss: 0.18477885548061537 - MSE: 0.325290603982356
Validation loss : 0.17132410705089568 - MSE: 0.31914513406643114
Epoch: 22
Training loss: 0.18477606650405717 - MSE: 0.3252881301248911
Validation loss : 0.17131790316530637 - MSE: 0.3191349769027771
Epoch: 23
Training loss: 0.18475595528928979 - MSE: 0.3252490983746809
Validation loss : 0.17116585086498942 - MSE: 0.3188991654947001
Epoch: 24
Training loss: 0.18471557654223394 - MSE: 0.32523076226991837