File size: 1,950 Bytes
c52d838
aa8d837
b6435a6
a3e4394
a0b7450
c193897
8e09ada
dd4925d
e75369b
99b16da
7d96d00
80255fa
f484afc
a428044
1505b52
9bb2a57
1cfe69b
8bf4aca
a5a2fe7
8a8ab1f
f4adfd4
555ebf8
112270d
262b04f
db82818
1b2e573
dbab8a3
e699cd5
24ab2e8
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
Started at: 15:49:06
nb-bert-base, 0.001, 64
({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {})
Epoch: 0
Training loss: 0.25718529296643805 - MAE: 0.37999076735911946
Validation loss : 0.1592742403348287 - MAE: 0.3065637084501305
Epoch: 1
Training loss: 0.15984965507129226 - MAE: 0.3011462185386914
Validation loss : 0.14935108996701962 - MAE: 0.2937389535824439
Epoch: 2
Training loss: 0.15315821834585883 - MAE: 0.2948535177806519
Validation loss : 0.1472618676947825 - MAE: 0.2882854062636122
Epoch: 3
Training loss: 0.14876405646403631 - MAE: 0.2911724842484354
Validation loss : 0.14868425713344055 - MAE: 0.28793878924782046
Epoch: 4
Training loss: 0.14827223573670242 - MAE: 0.2906396281733135
Validation loss : 0.15348663451996716 - MAE: 0.2916254111596949
Epoch: 5
Training loss: 0.1450011846844596 - MAE: 0.28871313182558683
Validation loss : 0.148570165263884 - MAE: 0.28736430778508204
Epoch: 6
Training loss: 0.14508122566974524 - MAE: 0.2884730409613601
Validation loss : 0.14304539132298846 - MAE: 0.2826475758886136
Epoch: 7
Training loss: 0.14336689305726927 - MAE: 0.28601634651329577
Validation loss : 0.15827531480427945 - MAE: 0.2975032714579335
Epoch: 8
Training loss: 0.1435294568990216 - MAE: 0.2867421152532087