Started at: 16:43:45 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 0.20329919241860434 - MSE: 0.3402557295436599 Validation loss : 0.18580390038815411 - MSE: 0.3241432956664492 Epoch: 1 Training loss: 0.1732676090095854 - MSE: 0.31724595576337034 Validation loss : 0.16568760144891162 - MSE: 0.31341614153228575 Epoch: 2 Training loss: 0.16030060382663902 - MSE: 0.3035005222248579 Validation loss : 0.1516422192481431 - MSE: 0.2875130449669631 Epoch: 3 Training loss: 0.1554781354509937 - MSE: 0.2979727920176376 Validation loss : 0.14750216573928343 - MSE: 0.28489314670482657 Epoch: 4 Training loss: 0.15081479527094038 - MSE: 0.2927710830201153 Validation loss : 0.14724563451653178 - MSE: 0.2861008250329437 Epoch: 5 Training loss: 0.1466910548621628 - MSE: 0.28974043263491844 Validation loss : 0.14556188587889526 - MSE: 0.28255917721777324 Epoch: 6 Training loss: 0.1464147042048159 - MSE: 0.28871972411446317 Validation loss : 0.14734464714472945 - MSE: 0.2857798778567098 Epoch: 7 Training loss: 0.14595557395652467 - MSE: 0.28856819400962264 Validation loss : 0.1446235611131697 - MSE: 0.28117785966411335 Epoch: 8 Training loss: 0.1449509447601241 - MSE: 0.2885284100938226 Validation loss : 0.1455823565748605 - MSE: 0.2817915015842891 Epoch: 9 Training loss: 0.1447959460771931 - MSE: 0.2872239444923332 Validation loss : 0.14529388951081218 - MSE: 0.2819837225561805 Epoch: 10 Training loss: 0.14472211318724046 - MSE: 0.28719455320410664 Validation loss : 0.14937494666964718 - MSE: 0.2843606734781727 Epoch: 11 Training loss: 0.144088552139586 - MSE: 0.28760964775038966 Validation loss : 0.14968568538174484 - MSE: 0.2861486726980596 Epoch: 12 Training loss: 0.14606445747886213 - MSE: 0.28844804390686263 Validation loss : 0.1448706823090712 - MSE: 0.28046784730610597 Epoch: 13 Training loss: 0.14478144813612634 - MSE: 0.2881406565554036 Validation loss : 0.15223626566655707 - MSE: 0.28788476966778725 Epoch: 14 Training loss: 0.13903559124106682 - MSE: 0.28273852721941334 Validation loss : 0.14466467048182632 - MSE: 0.2818046804860061 Epoch: 15 Training loss: 0.14228089620846177 - MSE: 0.28594855431899785 Validation loss : 0.14179454258445537 - MSE: 0.279193046159515 Epoch: 16 Training loss: 0.13887919387236464 - MSE: 0.2823545795451341 Validation loss : 0.14775521138852293 - MSE: 0.28455426443221177 Epoch: 17 Training loss: 0.13864240927744637 - MSE: 0.28109458266700194 Validation loss : 0.14809496587876117 - MSE: 0.28279495799750376 Epoch: 18 Training loss: 0.1387332887365128 - MSE: 0.28150766164938196 Validation loss : 0.14517276073721322 - MSE: 0.28345849166153503 Epoch: 19 Training loss: 0.13895187230052683 - MSE: 0.283011639691845 Validation loss : 0.13953923958946357 - MSE: 0.2801639866984212 Epoch: 20 Training loss: 0.13918349640396646 - MSE: 0.2821651974106403 Validation loss : 0.13979203293495107 - MSE: 0.27861376313959874 Epoch: 21 Training loss: 0.14063993230613356 - MSE: 0.28420372002952893 Validation loss : 0.13928090024626616 - MSE: 0.27858824725509246 Epoch: 22 Training loss: 0.1396664222182356 - MSE: 0.2826851016305493 Validation loss : 0.14619293684760729 - MSE: 0.28350754677889844 Epoch: 23 Training loss: 0.1393597536734518 - MSE: 0.2831695874891608 Validation loss : 0.15760497906894394 - MSE: 0.2929710551416292 Epoch: 24 Training loss: 0.14330284036506857 - MSE: 0.2854830684384262 Validation loss : 0.13994943711793784 - MSE: 0.27753756844449295 Epoch: 25 Training loss: 0.13945241206264133 - MSE: 0.2826231307316076 Validation loss : 0.14450635089341438 - MSE: 0.2799117028245645 Epoch: 26 Training loss: 0.1400487285655767 - MSE: 0.2827613417904652 Validation loss : 0.14119875927766165 - MSE: 0.2802039488934804 Epoch: 27 Training loss: 0.13957865950344178 - MSE: 0.2835329892746324 Validation loss : 0.13819768674897426 - MSE: 0.2794946100211438 Epoch: 28 Training loss: 0.1374973415156004 - MSE: 0.28128534494898844 Validation loss : 0.14083260241331477 - MSE: 0.2814639894770441 Epoch: 29 Training loss: 0.13817684208681136 - MSE: 0.28115783518427634 Validation loss : 0.14037107574668797 - MSE: 0.27838395685726863 Epoch: 30 Training loss: 0.13721867764207918 - MSE: 0.2802049705394877 Validation loss : 0.1422693511527596 - MSE: 0.2787260013750915 Epoch: 31 Training loss: 0.13815009665822015 - MSE: 0.28156990658373454 Validation loss : 0.1465979339272687 - MSE: 0.2843070852652152 Epoch: 32 Training loss: 0.13885138959146393 - MSE: 0.2836372357890927 Validation loss : 0.13849051680528757 - MSE: 0.27857965217533776 Epoch: 33 Training loss: 0.1401642515952817 - MSE: 0.28342677132398825 Validation loss : 0.13788489980453794 - MSE: 0.27930783007810955 Epoch: 34 Training loss: 0.1361726027000979 - MSE: 0.27985046840323413 Validation loss : 0.16987047333157423 - MSE: 0.30493016221803637 Epoch: 35 Training loss: 0.1386311886463371 - MSE: 0.28124844572171204 Validation loss : 0.14169367549545836 - MSE: 0.2802098872485924 Epoch: 36 Training loss: 0.14259846011167251 - MSE: 0.2853321706129094 Validation loss : 0.1517656207310431 - MSE: 0.2935094882384407 Epoch: 37 Training loss: 0.13557088204900625 - MSE: 0.2800028284901953 Validation loss : 0.14517026417183154 - MSE: 0.2817183299169993 Epoch: 38 Training loss: 0.13652241110877336 - MSE: 0.2794668161174052 Validation loss : 0.13902895558964123 - MSE: 0.27818083237902175 Epoch: 39 Training loss: 0.1393957434193737 - MSE: 0.2823959565417317 Validation loss : 0.1395419848461946 - MSE: 0.2798299307917249 Epoch: 40 Training loss: 0.13666216206505213 - MSE: 0.28018458493590065 Validation loss : 0.15288511528210205 - MSE: 0.2918520444865227 Epoch: 41 Training loss: 0.13617279751197942 - MSE: 0.27786906350611423 Validation loss : 0.13901080366111163 - MSE: 0.28098257594247894 Epoch: 42 Training loss: 0.13920053845356564 - MSE: 0.28319340622171985 Validation loss : 0.15065034484547196 - MSE: 0.28718627272826913 Epoch: 43 Training loss: 0.13663471834308605 - MSE: 0.27944792830679593 Validation loss : 0.1535997837781906 - MSE: 0.2978820976756964 Epoch: 44 Training loss: 0.1377708750781674 - MSE: 0.28052896536186017 Validation loss : 0.13884662611014914 - MSE: 0.28029353583997607 Epoch: 45 Training loss: 0.13761436545909358 - MSE: 0.27968942549565456 Validation loss : 0.14552195652416258 - MSE: 0.2821291957997396 Epoch: 46 Training loss: 0.13772785032007295 - MSE: 0.28121185390996406 Validation loss : 0.14855478478200507 - MSE: 0.2842055445676991 Epoch: 47 Training loss: 0.13615723759843612 - MSE: 0.27797362166311385