Started at: 15:20:16 ({'_name_or_path': '/disk4/folder1/working/checkpoints/huggingface/native_pytorch/step4_8/', 'attention_probs_dropout_prob': 0.1, 'directionality': 'bidi', 'gradient_checkpointing': False, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'layer_norm_eps': 1e-12, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'pad_token_id': 0, 'pooler_fc_size': 768, 'pooler_num_attention_heads': 12, 'pooler_num_fc_layers': 3, 'pooler_size_per_head': 128, 'pooler_type': 'first_token_transform', 'position_embedding_type': 'absolute', 'type_vocab_size': 2, 'vocab_size': 119547, '_commit_hash': '82b194c0b3ea1fcad65f1eceee04adb26f9f71ac'}, {}) Epoch: 0 Training loss: 0.6578984168859628 - MAE: 0.6674722109137042 Validation loss : 0.32113080024719237 - MAE: 0.45643225009255417 Epoch: 1 Training loss: 0.25543879431027633 - MAE: 0.3910323237625205 Validation loss : 0.2070162296295166 - MAE: 0.3427800552704421 Epoch: 2 Training loss: 0.20022909687115595 - MAE: 0.34185446647043155 Validation loss : 0.1910509407520294 - MAE: 0.3408369084923425 Epoch: 3 Training loss: 0.18548667545502 - MAE: 0.324754784674793 Validation loss : 0.17563332319259645 - MAE: 0.32411278291801865 Epoch: 4 Training loss: 0.17486068262503698 - MAE: 0.3197417418736398 Validation loss : 0.16728453636169432 - MAE: 0.31319684318909913 Epoch: 5 Training loss: 0.16917219643409437 - MAE: 0.3116997644461795 Validation loss : 0.16494652032852172 - MAE: 0.31156720464741927 Epoch: 6 Training loss: 0.1663729869402372 - MAE: 0.3068122622576569 Validation loss : 0.16140909492969513 - MAE: 0.30731822951750715 Epoch: 7 Training loss: 0.16431669317759 - MAE: 0.3058750525941657 Validation loss : 0.15877128541469573 - MAE: 0.30407342135000287 Epoch: 8 Training loss: 0.16241646844607133 - MAE: 0.3041819685850804 Validation loss : 0.1569153904914856 - MAE: 0.3017167806049155 Epoch: 9 Training loss: 0.16105010303167197 - MAE: 0.3032572371335961 Validation loss : 0.15558656454086303 - MAE: 0.3001891695223057 Epoch: 10 Training loss: 0.15921874344348907 - MAE: 0.3000818604914875 Validation loss : 0.15467585921287536 - MAE: 0.29914423021883907 Epoch: 11 Training loss: 0.15774007026965803 - MAE: 0.2982069839270297 Validation loss : 0.1533532589673996 - MAE: 0.297580975135282 Epoch: 12 Training loss: 0.15732021515186018 - MAE: 0.2976011320081585 Validation loss : 0.15192450284957887 - MAE: 0.29573920735107395 Epoch: 13 Training loss: 0.15720971043293291 - MAE: 0.29652890957399775 Validation loss : 0.1509649246931076 - MAE: 0.29458318810555245 Epoch: 14 Training loss: 0.15484117773862985 - MAE: 0.29611318781416973 Validation loss : 0.1503614068031311 - MAE: 0.2939076775146755 Epoch: 15 Training loss: 0.155086215872031 - MAE: 0.29664500405408706 Validation loss : 0.15024077594280244 - MAE: 0.29392331689441004 Epoch: 16 Training loss: 0.15504858241631433 - MAE: 0.2957256243301222 Validation loss : 0.1485931918025017 - MAE: 0.2914224770774145 Epoch: 17 Training loss: 0.15379978257876176 - MAE: 0.29412706894690854 Validation loss : 0.14767061471939086 - MAE: 0.2902352961206649 Epoch: 18 Training loss: 0.1524271907714697 - MAE: 0.29522762759884463 Validation loss : 0.14687707424163818 - MAE: 0.2890248553978195 Epoch: 19 Training loss: 0.15298244586357704 - MAE: 0.2947185932796371 Validation loss : 0.14664213359355927 - MAE: 0.28895800873643585 Epoch: 20 Training loss: 0.15084516085111177 - MAE: 0.292000504315752 Validation loss : 0.14631550312042235 - MAE: 0.28849219420284944 Epoch: 21 Training loss: 0.15149428523503816 - MAE: 0.2936117120334934 Validation loss : 0.14516318440437317 - MAE: 0.28667382433218613 Epoch: 22 Training loss: 0.1504942155801333 - MAE: 0.2927925834077908 Validation loss : 0.1446590781211853 - MAE: 0.28609412944511414 Epoch: 23 Training loss: 0.1504766345024109 - MAE: 0.29175161025587215 Validation loss : 0.14430544078350066 - MAE: 0.28555207548422323 Epoch: 24 Training loss: 0.15013286700615516 - MAE: 0.2912126204325197 Validation loss : 0.1440100222826004 - MAE: 0.28499906257314495 Epoch: 25 Training loss: 0.14975401071401742 - MAE: 0.2924956689023364 Validation loss : 0.14365987926721574 - MAE: 0.28487079413975225 Epoch: 26 Training loss: 0.1486645724910956 - MAE: 0.2897513086297186 Validation loss : 0.14352002441883088 - MAE: 0.28460829742287724 Epoch: 27 Training loss: 0.14726154735455146 - MAE: 0.289499577762165 Validation loss : 0.14327441155910492 - MAE: 0.2841265074565353 Epoch: 28 Training loss: 0.1473093697657952 - MAE: 0.28847736514612604 Validation loss : 0.14346123337745667 - MAE: 0.28399052003823066 Epoch: 29 Training loss: 0.14938327096975768 - MAE: 0.29230637823618844 Validation loss : 0.14395955055952073 - MAE: 0.2843064640604636 Epoch: 30 Training loss: 0.14854242480718172 - MAE: 0.29098683805232695 Validation loss : 0.14319908618927002 - MAE: 0.2834741994556754 Epoch: 31 Training loss: 0.14840638179045457 - MAE: 0.29053861147428733 Validation loss : 0.1427803859114647 - MAE: 0.2833583845099959 Epoch: 32 Training loss: 0.1459128730572187 - MAE: 0.2879114311089722 Validation loss : 0.14306370466947554 - MAE: 0.2832036745205818 Epoch: 33 Training loss: 0.14653718242278466 - MAE: 0.2880150190532383 Validation loss : 0.1422912061214447 - MAE: 0.28235958335404876 Epoch: 34 Training loss: 0.14544093838104835 - MAE: 0.28814428108456663 Validation loss : 0.1417948916554451 - MAE: 0.28217832070705684 Epoch: 35 Training loss: 0.14233749818343383 - MAE: 0.2848128630365372 Validation loss : 0.14197540581226348 - MAE: 0.2819948387701627 Epoch: 36 Training loss: 0.14591436661206758 - MAE: 0.2867867222453149 Validation loss : 0.14212212413549424 - MAE: 0.28220088665256465 Epoch: 37 Training loss: 0.14779657813218924 - MAE: 0.2896514004121749 Validation loss : 0.14221723079681398 - MAE: 0.28241207694137505 Epoch: 38 Training loss: 0.14406724446094954 - MAE: 0.28502251664532363 Validation loss : 0.141465026140213 - MAE: 0.28188930577870985 Epoch: 39 Training loss: 0.14614520508509415 - MAE: 0.2860779968086246 Validation loss : 0.14187605679035187 - MAE: 0.2822775998655205 Epoch: 40 Training loss: 0.1447937310888217 - MAE: 0.2860658662966973 Validation loss : 0.1419243961572647 - MAE: 0.2820589519403943 Epoch: 41 Training loss: 0.1429994381391085 - MAE: 0.2839222755689062 Validation loss : 0.1411146491765976 - MAE: 0.2814599892237311 Epoch: 42 Training loss: 0.14441114434829125 - MAE: 0.2875613457441875 Validation loss : 0.14121354520320892 - MAE: 0.28157821050262266 Epoch: 43 Training loss: 0.14265902397724298 - MAE: 0.2836004137758473 Validation loss : 0.1405272126197815 - MAE: 0.28080432792609217 Epoch: 44 Training loss: 0.1434896015203916 - MAE: 0.28443744152757766 Validation loss : 0.14023690074682235 - MAE: 0.28072583348933833 Epoch: 45 Training loss: 0.1423586578323291 - MAE: 0.28374326575148284 Validation loss : 0.1404753804206848 - MAE: 0.2810644422827585 Epoch: 46 Training loss: 0.14247320707027727 - MAE: 0.28401886364945994 Validation loss : 0.14009937793016433 - MAE: 0.2806794477226281 Epoch: 47 Training loss: 0.14120651552310357 - MAE: 0.28229517384472164 Validation loss : 0.139960640668869 - MAE: 0.2806472530553984 Epoch: 48 Training loss: 0.14218630813635313 - MAE: 0.2832958298045566 Validation loss : 0.1400438129901886 - MAE: 0.2806376852174643 Epoch: 49 Training loss: 0.14072744032511345 - MAE: 0.2815358373971234 Validation loss : 0.13983349055051802 - MAE: 0.28035547979667097 Epoch: 50