File size: 2,670 Bytes
fd32488
cbe769a
fa18bdc
c117686
540d12d
9d7e8c4
8b0eb84
3f32498
5b21d58
f0f6ff0
9aa084e
5eaa3b7
b9da547
8dcf2cc
b93ce6f
3074baa
cedee85
de1811a
08f3e80
5076d29
59df5ad
8d78666
1d1169d
c1ce569
8c4c3e2
da01cef
eeaf256
b56f9c5
5099657
9c0ee27
1563e61
7d370b6
ff50cb5
2b54ead
4eff91c
b828b77
e984fd8
b6224da
29e7bf2
e44dd3d
7e92b28
ceab61a
36fbf1d
623f746
b58c7ab
4e88a10
55234f3
2465de2
23a5e05
567fb27
c2087b8
70e5cf1
1436710
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
Started at: 13:10:36
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {})
Epoch: 0
Training loss: 2.7300986201327464 - MSE: 1.5973443396223515
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 1
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 2
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 3
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 4
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 5
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 6
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 7
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 8
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 9
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 10
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 11
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 12
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 13
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 14
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 15
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938
Epoch: 16
Training loss: 2.7547256934461255 - MSE: 1.6089351524297428
Validation loss : 2.7675703980705957 - MSE: 1.609589820176938