File size: 2,083 Bytes
bbb722c
4c157ac
2bde72e
3081378
16c7808
3cc8443
9d9ea54
debea42
4bd8d4e
25b4faa
a29f512
36bda71
745b97f
2f3d716
2cb0d6f
eb83d32
5aeb413
686fa7c
45248f0
4a5b452
c2d5a7f
bb137ee
8fee9fc
c6b6989
41fde32
f7a96cd
3c7fcf3
b8dd206
15fc6d8
9cb438d
10c488e
20e2cf6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
Started at: 13:11:04
nb-bert-base, 0.001, 256
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.47113645792007447 - MAE: 0.5372690642533006
Validation loss : 0.25360237227545845 - MAE: 0.38100557323888096
Epoch: 1
Training loss: 0.1951601755619049 - MAE: 0.3381085981919985
Validation loss : 0.16845231420463985 - MAE: 0.3146699028671109
Epoch: 2
Training loss: 0.1700725769996643 - MAE: 0.3132894074707096
Validation loss : 0.16252560251288944 - MAE: 0.3101315378266
Epoch: 3
Training loss: 0.16299866735935212 - MAE: 0.30571749229812295
Validation loss : 0.1575031735830837 - MAE: 0.3036086169337241
Epoch: 4
Training loss: 0.16002290964126586 - MAE: 0.3030143814677838
Validation loss : 0.1555153015587065 - MAE: 0.30161708462789844
Epoch: 5
Training loss: 0.15610405176877976 - MAE: 0.2988610182826646
Validation loss : 0.15254790584246317 - MAE: 0.2976352880446658
Epoch: 6
Training loss: 0.15590642750263214 - MAE: 0.2972598594587939
Validation loss : 0.15147964821921456 - MAE: 0.29633871076747587
Epoch: 7
Training loss: 0.15364289730787278 - MAE: 0.2946944064180254
Validation loss : 0.1510375224881702 - MAE: 0.29637298396301465
Epoch: 8
Training loss: 0.1531897461414337 - MAE: 0.29468431902613157
Validation loss : 0.1495788519581159 - MAE: 0.29459826933056066
Epoch: 9
Training loss: 0.15212808579206466 - MAE: 0.2927512017766453