File size: 4,557 Bytes
731efcc
0480f99
62272c0
621da63
8ff7cd3
4373e8d
de181ec
6e2ef8b
883450b
1c87137
8ff35e9
26b31bb
c899379
d665531
f1ff6f0
beb8aa6
a16d03c
1360308
dfcc125
b38e1b9
bf71273
25eec36
21f51b9
8bf369b
aec42da
d49fb9c
36b9cc0
28c3563
00866b5
5b3889b
396a036
e6d2e95
d537bbe
56c44da
0f6c2e6
f280017
b52bd97
70f0430
d8db83a
785c08e
77b456a
30e1726
a0edc4e
e61fff5
bf4927b
bf2b7ac
f61b3b1
663641e
6fb4900
63a7c61
40d7014
d64e099
0b7939e
4e5f098
fb924a7
0afd149
6deba77
ebe6d08
b5ce47d
76103d2
d7e9e05
78a992c
41f54a9
8f4a1a4
f9b19df
edab364
b2d7328
0789768
ee8c5d3
d24722c
141554a
9395e9c
7b49f78
4be241c
42d53d7
27aeb5b
f3a45b0
a8555b9
586f285
a168dc5
4ea5284
3414772
d15a62f
a612dda
76217fc
19ad79f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
Started at: 14:05:05
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.4581766623258591 - MAE: 0.5292641480208709
Validation loss : 0.22879805664221445 - MAE: 0.3594399771230401
Epoch: 1
Training loss: 0.19436429917812348 - MAE: 0.3385698724059062
Validation loss : 0.17431378530131447 - MAE: 0.3219149815721847
Epoch: 2
Training loss: 0.17444411993026734 - MAE: 0.31684918185002736
Validation loss : 0.16497086816363865 - MAE: 0.31185002325092886
Epoch: 3
Training loss: 0.1655295819044113 - MAE: 0.3086558034500663
Validation loss : 0.15977457662423453 - MAE: 0.3062515869775394
Epoch: 4
Training loss: 0.16229379653930665 - MAE: 0.3037281934486638
Validation loss : 0.15585461341672474 - MAE: 0.3013561711510056
Epoch: 5
Training loss: 0.15735343039035798 - MAE: 0.299567204911211
Validation loss : 0.1539518212278684 - MAE: 0.2993376751781608
Epoch: 6
Training loss: 0.15725925147533418 - MAE: 0.2996774680828566
Validation loss : 0.15224697606431115 - MAE: 0.2976658126019026
Epoch: 7
Training loss: 0.15550604462623596 - MAE: 0.2963955747939702
Validation loss : 0.15032305154535505 - MAE: 0.29504313159460227
Epoch: 8
Training loss: 0.1532408055663109 - MAE: 0.2955262865803256
Validation loss : 0.1494835408197509 - MAE: 0.2939865938164665
Epoch: 9
Training loss: 0.15284976929426194 - MAE: 0.2941793210474116
Validation loss : 0.1482765343454149 - MAE: 0.2924292223271092
Epoch: 10
Training loss: 0.15122360587120057 - MAE: 0.29247309200113303
Validation loss : 0.14710667315456602 - MAE: 0.2907982624220039
Epoch: 11
Training loss: 0.14901623457670213 - MAE: 0.290842407098567
Validation loss : 0.14528998070293003 - MAE: 0.28806596351171554
Epoch: 12
Training loss: 0.15042486935853958 - MAE: 0.29147855511346055
Validation loss : 0.14535025589995915 - MAE: 0.2886085535457345
Epoch: 13
Training loss: 0.1494656953215599 - MAE: 0.2891299573328347
Validation loss : 0.14426815013090769 - MAE: 0.2873694180544381
Epoch: 14
Training loss: 0.1479952174425125 - MAE: 0.28811694403834054
Validation loss : 0.14319962759812674 - MAE: 0.28503047862306335
Epoch: 15
Training loss: 0.14743517249822616 - MAE: 0.2894441080912908
Validation loss : 0.1425529478324784 - MAE: 0.28427138674014546
Epoch: 16
Training loss: 0.1472056606411934 - MAE: 0.28826150001116063
Validation loss : 0.14231768581602308 - MAE: 0.2836957414049199
Epoch: 17
Training loss: 0.14746369540691376 - MAE: 0.2876719212786702
Validation loss : 0.14165614379776847 - MAE: 0.2825364447060136
Epoch: 18
Training loss: 0.14586206912994384 - MAE: 0.28726508598945344
Validation loss : 0.14172622975375918 - MAE: 0.2823440762876256
Epoch: 19
Training loss: 0.14411092609167098 - MAE: 0.28331525149801606
Validation loss : 0.14173858612775803 - MAE: 0.28165875641190596
Epoch: 20
Training loss: 0.1458412790298462 - MAE: 0.28836162601454185
Validation loss : 0.1420300528407097 - MAE: 0.2818135184418199
Epoch: 21
Training loss: 0.14359384417533874 - MAE: 0.28367008833581675
Validation loss : 0.14278884480396906 - MAE: 0.282470371147959
Epoch: 22
Training loss: 0.14312834650278092 - MAE: 0.2853536132440727
Validation loss : 0.14090417987770504 - MAE: 0.28100331209885826
Epoch: 23
Training loss: 0.14215544492006302 - MAE: 0.2830347538878356
Validation loss : 0.14096137053436703 - MAE: 0.28121853405025504
Epoch: 24
Training loss: 0.13985051870346069 - MAE: 0.2811610997161699
Validation loss : 0.14087682300143772 - MAE: 0.2808628692269156
Epoch: 25
Training loss: 0.14080241113901137 - MAE: 0.28194490194175625
Validation loss : 0.14065353241231707 - MAE: 0.2808099722745957
Epoch: 26
Training loss: 0.1384972044825554 - MAE: 0.2791605790436576
Validation loss : 0.1407366825474633 - MAE: 0.2805860446655857
Epoch: 27
Training loss: 0.13983467131853103 - MAE: 0.28133399854582614
Validation loss : 0.14025809367497763 - MAE: 0.2799297949765587