File size: 2,490 Bytes
359ad8b
640ac29
f1493a9
fcc960a
b74c7cf
bd24396
27df419
92f6e23
7acf204
435fc98
f6ba227
4e59ab5
df0f9ba
81dd0d3
c43a1d4
8362424
9127a32
9e2e682
0a87eb3
2620e48
1002360
ded5fa0
dee16a5
d3637cb
4d8f5da
f1d7f88
0fe1301
f3a2080
d5b5354
48545eb
2341b03
ace8ce2
d0a0e35
d74c2d4
07702e5
c4d8881
edac255
4d4561d
cbabfcf
3b72c23
af43afe
31dff66
2b6ce5b
8646b12
3df4d5b
b125f09
545a935
91270fe
497214d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
Started at: 13:07:42
norbert2, 0.001, 256
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.29203572511672976 - MAE: 0.4185605652063043
Validation loss : 0.20290079547299278 - MAE: 0.3455509528402281
Epoch: 1
Training loss: 0.20370910704135894 - MAE: 0.34512545205719725
Validation loss : 0.18058336940076616 - MAE: 0.3245456262237876
Epoch: 2
Training loss: 0.1859086936712265 - MAE: 0.32826601535007893
Validation loss : 0.17307381166352165 - MAE: 0.3166332618810329
Epoch: 3
Training loss: 0.1757565474510193 - MAE: 0.3194243809752533
Validation loss : 0.1650606112347709 - MAE: 0.3097177455860293
Epoch: 4
Training loss: 0.17038145661354065 - MAE: 0.3124012787965599
Validation loss : 0.1609694245788786 - MAE: 0.3054813623818689
Epoch: 5
Training loss: 0.16121495842933656 - MAE: 0.30260754653397715
Validation loss : 0.1592115279701021 - MAE: 0.30372550751938654
Epoch: 6
Training loss: 0.16413364052772522 - MAE: 0.30532635951090464
Validation loss : 0.15721883045302498 - MAE: 0.30045129273330407
Epoch: 7
Training loss: 0.15969595968723296 - MAE: 0.30192885077315224
Validation loss : 0.15661187138822344 - MAE: 0.2997491487422971
Epoch: 8
Training loss: 0.15666726291179656 - MAE: 0.29833324755755447
Validation loss : 0.15392393701606327 - MAE: 0.2974819475926045
Epoch: 9
Training loss: 0.15374731242656708 - MAE: 0.29566034619320813
Validation loss : 0.15155919889609018 - MAE: 0.2955871184841888
Epoch: 10
Training loss: 0.15196440517902374 - MAE: 0.29395062352241813
Validation loss : 0.1518704816699028 - MAE: 0.29535742031867257
Epoch: 11
Training loss: 0.14995998084545137 - MAE: 0.29091456288550377
Validation loss : 0.14924502538310158 - MAE: 0.29317513193029937
Epoch: 12
Training loss: 0.14995479434728623 - MAE: 0.2908446813915522
Validation loss : 0.14824609375662273 - MAE: 0.2918560397551131
Epoch: 13
Training loss: 0.14749383240938185 - MAE: 0.28855871636879044
Validation loss : 0.14788946178224352 - MAE: 0.291098009006565
Epoch: 14
Training loss: 0.14847832500934602 - MAE: 0.29029853398295835
Validation loss : 0.14755769239531624 - MAE: 0.2906674587492677
Epoch: 15