File size: 2,664 Bytes
7f99320
9122de6
5e4039d
7f1bbc2
2807178
8bd3fb1
81d9120
b8c6f56
2688177
8ea039a
b4ef71f
eea503c
0846f5c
42b1b03
e81451f
ef1819f
6cf76e1
e14811e
6833185
6b17d4a
b5f0dab
c8e7b2b
c3fe21b
a9a07c5
5db5925
0fa2bb9
2751f07
5657e0c
17f6808
8ace8b3
d808a8f
fbd4d60
cf8b32a
b741e33
c60a263
a1e2064
d0d5975
eb6b07f
4ef2eb9
a276798
062c28e
06efaea
ec69949
3f42c8f
10f4b46
9fc2908
705b10c
cb8b741
4ce4d62
1015240
2a4f785
a3595b6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
Started at: 16:49:23
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 32922, '_commit_hash': '075d4e3705390691013e859faffc5696d071e33b'}, {})
Epoch: 0
Training loss: 0.5667896963655948 - MAE: 0.6061317877768119
Validation loss : 0.203789513025965 - MAE: 0.35416855011653287
Epoch: 1
Training loss: 0.2108965314924717 - MAE: 0.34637541457062393
Validation loss : 0.2082464269229344 - MAE: 0.3600320203398089
Epoch: 2
Training loss: 0.18308445289731026 - MAE: 0.32601322404867966
Validation loss : 0.17594509039606368 - MAE: 0.3242118801233004
Epoch: 3
Training loss: 0.1728629194200039 - MAE: 0.31534360312572923
Validation loss : 0.17077444280896867 - MAE: 0.31773444856437383
Epoch: 4
Training loss: 0.16811294555664064 - MAE: 0.31006240678041075
Validation loss : 0.16851326610360826 - MAE: 0.3153601244520371
Epoch: 5
Training loss: 0.16502456665039061 - MAE: 0.30727297754888744
Validation loss : 0.16689058925424302 - MAE: 0.31343508612956145
Epoch: 6
Training loss: 0.1630000188946724 - MAE: 0.3057954604335588
Validation loss : 0.16549592358725412 - MAE: 0.3118342921838298
Epoch: 7
Training loss: 0.1617749534547329 - MAE: 0.3052927962671703
Validation loss : 0.16392718255519867 - MAE: 0.30942739591218243
Epoch: 8
Training loss: 0.16146896183490753 - MAE: 0.3032446478251277
Validation loss : 0.1630906824554716 - MAE: 0.30861916504093045
Epoch: 9
Training loss: 0.15880076959729195 - MAE: 0.3010108642757721
Validation loss : 0.1622341764824731 - MAE: 0.30763147521892625
Epoch: 10
Training loss: 0.15709432661533357 - MAE: 0.30005160435303907
Validation loss : 0.16156114850725448 - MAE: 0.3069836120415014
Epoch: 11
Training loss: 0.1562284104526043 - MAE: 0.2971917225261673
Validation loss : 0.16086377842085703 - MAE: 0.30616937318847653
Epoch: 12
Training loss: 0.15665148869156836 - MAE: 0.29886107484499885
Validation loss : 0.16063292537416732 - MAE: 0.30627242834325336
Epoch: 13
Training loss: 0.15311288759112357 - MAE: 0.2959721916005743
Validation loss : 0.15952973706381662 - MAE: 0.30466797674604745
Epoch: 14
Training loss: 0.15453336387872696 - MAE: 0.2957146544027995
Validation loss : 0.15920051293713705 - MAE: 0.30456549311198844
Epoch: 15
Training loss: 0.15352084413170813 - MAE: 0.296151794753851
Validation loss : 0.15867003904921667 - MAE: 0.30387889517935024
Epoch: 16
Training loss: 0.15266137197613716 - MAE: 0.29473264327626775