Started at: 15:31:13 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 1.2954848909378052 - MAE: 1.050244734015666 Validation loss : 0.8470989664395651 - MAE: 0.8281701794455048 Epoch: 1 Training loss: 0.4445240414142609 - MAE: 0.5411769913687082 Validation loss : 0.324721054898368 - MAE: 0.43869614013342645 Epoch: 2 Training loss: 0.25153143346309664 - MAE: 0.3816801249414825 Validation loss : 0.23877331614494324 - MAE: 0.3700370086786411 Epoch: 3 Training loss: 0.23011713743209838 - MAE: 0.36750653866750815 Validation loss : 0.22173692286014557 - MAE: 0.3575197016352192 Epoch: 4 Training loss: 0.21643910229206084 - MAE: 0.35473120380428824 Validation loss : 0.2111869735850228 - MAE: 0.3495476388776553 Epoch: 5 Training loss: 0.20841679513454436 - MAE: 0.3475277955848527 Validation loss : 0.2027071151468489 - MAE: 0.3434146884806285 Epoch: 6 Training loss: 0.20075951278209686 - MAE: 0.3414739466792711 Validation loss : 0.19626588291592068 - MAE: 0.3390224698180928 Epoch: 7 Training loss: 0.1959322625398636 - MAE: 0.33718433600506764 Validation loss : 0.19155074821578133 - MAE: 0.33601220203771287 Epoch: 8 Training loss: 0.19064979493618012 - MAE: 0.33252034985072276 Validation loss : 0.18807757563061184 - MAE: 0.33369018106396875 Epoch: 9 Training loss: 0.18758945107460023 - MAE: 0.3310842562717609 Validation loss : 0.18544620275497437 - MAE: 0.33179714328905824 Epoch: 10 Training loss: 0.18699397385120392 - MAE: 0.3293496073915552 Validation loss : 0.1833840227789349 - MAE: 0.3305601596359932 Epoch: 11 Training loss: 0.1838909602165222 - MAE: 0.326863464187819 Validation loss : 0.18136033746931288 - MAE: 0.32891395485147645 Epoch: 12 Training loss: 0.18286386430263518 - MAE: 0.325863773872513 Validation loss : 0.17971531218952602 - MAE: 0.32756646233438397 Epoch: 13 Training loss: 0.1797239762544632 - MAE: 0.32309299652089063 Validation loss : 0.17807276712523568 - MAE: 0.32608809156272295 Epoch: 14 Training loss: 0.17857051372528077 - MAE: 0.3213261058182539 Validation loss : 0.1767602562904358 - MAE: 0.3250377233246472 Epoch: 15 Training loss: 0.17911633133888244 - MAE: 0.3217271718139446 Validation loss : 0.1751896556880739 - MAE: 0.3234474552955444 Epoch: 16 Training loss: 0.17600405514240264 - MAE: 0.3189804320611656 Validation loss : 0.17363134854369694 - MAE: 0.3217130704298945 Epoch: 17 Training loss: 0.17391868591308593 - MAE: 0.3165747054747144 Validation loss : 0.17256545523802438 - MAE: 0.3208692052839463 Epoch: 18 Training loss: 0.17313318729400634 - MAE: 0.3169905761419007 Validation loss : 0.17127088208993277 - MAE: 0.3194913697705306 Epoch: 19 Training loss: 0.17114326536655425 - MAE: 0.31336970082229104 Validation loss : 0.17016845610406664 - MAE: 0.31830314372899243 Epoch: 20 Training loss: 0.1727628666162491 - MAE: 0.31565258354376935 Validation loss : 0.16911302506923676 - MAE: 0.31717762732632004 Epoch: 21 Training loss: 0.17012356221675873 - MAE: 0.31316639736858143 Validation loss : 0.1677489462825987 - MAE: 0.31546195651667186 Epoch: 22 Training loss: 0.16859103083610535 - MAE: 0.3127057693660738 Validation loss : 0.16697113381491768 - MAE: 0.31471562544725196 Epoch: 23 Training loss: 0.1686873698234558 - MAE: 0.311427263715106 Validation loss : 0.16612334880563948 - MAE: 0.3138178950174241 Epoch: 24 Training loss: 0.16640773981809617 - MAE: 0.3095450918700528 Validation loss : 0.16508907741970485 - MAE: 0.3125503103864031 Epoch: 25 Training loss: 0.1658923715353012 - MAE: 0.3082243922152051 Validation loss : 0.16451590259869894 - MAE: 0.3120238609455365 Epoch: 26 Training loss: 0.16436728775501253 - MAE: 0.3071946399101465 Validation loss : 0.16374372442563376 - MAE: 0.3111470200747217 Epoch: 27 Training loss: 0.16651157915592193 - MAE: 0.30963943402408695 Validation loss : 0.16270800100432503 - MAE: 0.3097923430317113 Epoch: 28 Training loss: 0.16491072356700898 - MAE: 0.3070697425803366 Validation loss : 0.1623369190427992 - MAE: 0.3095072696428577 Epoch: 29 Training loss: 0.16389427542686463 - MAE: 0.30555235896137595 Validation loss : 0.16124575005637276 - MAE: 0.308045409739515 Epoch: 30 Training loss: 0.16372050642967223 - MAE: 0.3068781009000078 Validation loss : 0.1606789148516125 - MAE: 0.3074341210225871 Epoch: 31 Training loss: 0.16316969752311705 - MAE: 0.3060961831725536 Validation loss : 0.16020016206635368 - MAE: 0.3069771875888861 Epoch: 32 Training loss: 0.1627587217092514 - MAE: 0.30426592400661 Validation loss : 0.15954788691467708 - MAE: 0.3061654847578481 Epoch: 33 Training loss: 0.16183113038539887 - MAE: 0.30331386520448744 Validation loss : 0.15876509911484188 - MAE: 0.30512034015868056 Epoch: 34 Training loss: 0.16216464459896088 - MAE: 0.3041525328658513 Validation loss : 0.1582637975613276 - MAE: 0.3045167264420522 Epoch: 35 Training loss: 0.16202212631702423 - MAE: 0.30412966504962624 Validation loss : 0.15774812797705332 - MAE: 0.30388464702304596 Epoch: 36 Training loss: 0.1605959129333496 - MAE: 0.3021611783717064 Validation loss : 0.15737126105361515 - MAE: 0.30351526731697764 Epoch: 37 Training loss: 0.1595464125275612 - MAE: 0.3019984243682177 Validation loss : 0.15694588753912184 - MAE: 0.30298915789121267 Epoch: 38 Training loss: 0.15968152701854707 - MAE: 0.30079639420604637 Validation loss : 0.1565687441163593 - MAE: 0.30254428690684504 Epoch: 39 Training loss: 0.16053873747587205 - MAE: 0.30269713976447044 Validation loss : 0.15606227185991076 - MAE: 0.30184525470267004 Epoch: 40 Training loss: 0.15907079100608826 - MAE: 0.30137026553661755 Validation loss : 0.15593233042293125 - MAE: 0.3018705285535124 Epoch: 41 Training loss: 0.15838283449411392 - MAE: 0.30094563904283755 Validation loss : 0.15537505514091915 - MAE: 0.30114440427331457 Epoch: 42 Training loss: 0.1576764366030693 - MAE: 0.29919268728124215 Validation loss : 0.15526685449812147 - MAE: 0.3011537437962524 Epoch: 43 Training loss: 0.15699974298477173 - MAE: 0.2988365116562079 Validation loss : 0.15481639156738916 - MAE: 0.30055854576382945 Epoch: 44 Training loss: 0.15606340169906616 - MAE: 0.2979114649606952 Validation loss : 0.15476271178987291 - MAE: 0.30063485786254074 Epoch: 45 Training loss: 0.1577802050113678 - MAE: 0.2999039090758928 Validation loss : 0.15419750743442112 - MAE: 0.2998442683771937 Epoch: 46 Training loss: 0.15624683797359468 - MAE: 0.29888288205111374 Validation loss : 0.15386036286751428 - MAE: 0.2993970499800218 Epoch: 47 Training loss: 0.1557799881696701 - MAE: 0.29786385364428364 Validation loss : 0.15358521540959677 - MAE: 0.2990845763730278 Epoch: 48 Training loss: 0.15620744496583938 - MAE: 0.29782631350358724 Validation loss : 0.1532444887691074 - MAE: 0.2986619118372658 Epoch: 49 Training loss: 0.15512809425592422 - MAE: 0.29642200152591325 Validation loss : 0.15320832282304764 - MAE: 0.29869317901549597 Epoch: 50 Training loss: 0.15422082662582398 - MAE: 0.29702908144831747 Validation loss : 0.15264475676748487 - MAE: 0.29781067221096785 Epoch: 51 Training loss: 0.15521558344364167 - MAE: 0.2982858617512268 Validation loss : 0.1524453726079729 - MAE: 0.2976454008963719 Epoch: 52 Training loss: 0.1535657474398613 - MAE: 0.2953671117793639 Validation loss : 0.15208186871475643 - MAE: 0.29721065066041774 Epoch: 53 Training loss: 0.154012570977211 - MAE: 0.29551133424955356 Validation loss : 0.15210432642036015 - MAE: 0.29740813232959074 Epoch: 54 Training loss: 0.15406817376613616 - MAE: 0.29551959469132016 Validation loss : 0.15143747876087824 - MAE: 0.29635976838639294 Epoch: 55 Training loss: 0.15317835301160812 - MAE: 0.2942490870567716 Validation loss : 0.15164858847856522 - MAE: 0.29676666791017275 Epoch: 56 Training loss: 0.15314337223768235 - MAE: 0.29498007116045316 Validation loss : 0.15093392216496998 - MAE: 0.2956451306333567 Epoch: 57 Training loss: 0.15268929898738862 - MAE: 0.29407656720802344