File size: 3,190 Bytes
3579dbb
014c138
4f65d85
e1bfdc2
89ebcc1
80aae7b
19ac7b9
4e3dc16
d81ed4b
a266cf5
374625f
ed604b5
936f2dc
39b045e
b9b3753
180fe95
e7f8806
22f416c
4918828
bb2aaba
bb48bc1
448d246
682e415
d915b0d
45e8616
3deb87b
7ef4a4a
b1c7a34
1244ac3
66811a5
cba090e
0c27cd2
fca5381
c93be1b
89790ec
7f1d15d
10f9a0e
09592c9
c7cb343
6b1de94
43dac97
c88e597
ee7ad8f
86fa654
467c4d4
aa9e75c
47bc238
0a09c4e
ba6bb7c
be7c7ae
04cf672
e4a4327
5a4e5be
5ba3b58
c7c51c3
16cf9a9
26524a4
71f0edb
fee622a
f801fc8
60daa9d
f1752e0
5513ae5
a291c82
1068069
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
Started at: 07:12:36
norbert2, 0.001, 320
({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'f22bb47f536f62edfcd86ca9320ade990eafbe22'}, {})
Epoch: 0
Training loss: 0.3532784849405289 - MAE: 0.4670037483599114
Validation loss : 0.2768782547542027 - MAE: 0.42022237556116127
Epoch: 1
Training loss: 0.2149739757180214 - MAE: 0.35659756905009127
Validation loss : 0.18670830769198282 - MAE: 0.332010069275546
Epoch: 2
Training loss: 0.19223860129714013 - MAE: 0.3341908845597538
Validation loss : 0.18065378282751357 - MAE: 0.3276773701287296
Epoch: 3
Training loss: 0.18267506137490272 - MAE: 0.32204697756425843
Validation loss : 0.17629351360457285 - MAE: 0.3232328892238157
Epoch: 4
Training loss: 0.17724562138319017 - MAE: 0.3184336083924317
Validation loss : 0.17597177624702454 - MAE: 0.3246827085074674
Epoch: 5
Training loss: 0.17323379665613176 - MAE: 0.315665849870586
Validation loss : 0.1731493834938322 - MAE: 0.3218653565928077
Epoch: 6
Training loss: 0.17015334516763686 - MAE: 0.3124225365765558
Validation loss : 0.16903618616717203 - MAE: 0.3173628836244732
Epoch: 7
Training loss: 0.16733192503452302 - MAE: 0.30821431460419746
Validation loss : 0.16630683200699942 - MAE: 0.31361782968184887
Epoch: 8
Training loss: 0.16473447754979134 - MAE: 0.3063740717882017
Validation loss : 0.1640524651323046 - MAE: 0.31127526838966124
Epoch: 9
Training loss: 0.16429233476519584 - MAE: 0.30495064896348884
Validation loss : 0.1619275254862649 - MAE: 0.308032721089819
Epoch: 10
Training loss: 0.16504003778100013 - MAE: 0.30767144570696153
Validation loss : 0.1616602156843458 - MAE: 0.30507819780145606
Epoch: 11
Training loss: 0.16262706443667413 - MAE: 0.3036855354971657
Validation loss : 0.15870441496372223 - MAE: 0.3020884268212755
Epoch: 12
Training loss: 0.15984584838151933 - MAE: 0.3020569311274707
Validation loss : 0.1584023024354662 - MAE: 0.3011171731134149
Epoch: 13
Training loss: 0.15794557370245457 - MAE: 0.2997596327966126
Validation loss : 0.16060944114412581 - MAE: 0.3023205950101459
Epoch: 14
Training loss: 0.1547440316528082 - MAE: 0.2970264285250956
Validation loss : 0.1580854526587895 - MAE: 0.3008519902551677
Epoch: 15
Training loss: 0.1542632382363081 - MAE: 0.2950711675008595
Validation loss : 0.1567665593964713 - MAE: 0.2992491086220254
Epoch: 16
Training loss: 0.15160554870963097 - MAE: 0.2943624127736374
Validation loss : 0.15607393213680812 - MAE: 0.2993329082850877
Epoch: 17
Training loss: 0.15076256059110166 - MAE: 0.29138762120969075
Validation loss : 0.15495712629386357 - MAE: 0.29848027247589065
Epoch: 18
Training loss: 0.14870206639170647 - MAE: 0.29066662712501057
Validation loss : 0.1542458619390215 - MAE: 0.2984311317874588
Epoch: 19
Training loss: 0.14955925270915033 - MAE: 0.29022475462577546
Validation loss : 0.15516912511416844 - MAE: 0.29738643332620596
Prediction MAE: 0.2804
Finished at: 07:12:36