File size: 7,285 Bytes
c80765a 57c5f87 fe885cb c3db1af fb9d943 a96bb15 426374a 33034e8 e4d3891 80986d2 165f615 db01149 ba6de8a 62ec26b fb06b70 7095b4c 9f1cdf5 8c747d1 ec83c1e 42c6cf5 efeae8b 8ec0d6f dd8dd1f 7ebcd20 bd2c140 bc6d7ec d03a806 dd28607 9ed2b39 5a31ea4 7dbf6b5 eef46ef 16a4945 0d7e410 8602263 aced0b3 b7cd71a e35c421 f63161c 7403d46 a98bea1 28f1d7b 4143126 4d5f2f6 7e91ee5 2850126 fc5d92d 63a3f60 809248e f06c467 c4140f4 d92075f f345587 f515eb6 c6df857 b76c8e8 46f9763 03bf202 77c2f3e 3c13c79 80f0412 5ee1fef aa31b2b 6f4c06d be8434e a0b4773 8034ff8 3cdd4d6 6f12ae2 deb01ad 67dd01f 8372a34 56592f5 d306245 218b532 d97f8df 9b1c099 b76b38e 74617d3 260aba8 86c2f10 c46325f f783622 83130f6 3720fad 4b212cf e37f979 ad5ac5d 6402653 4af6cbd 5c8f8ce c3c1566 1028828 244cc9c 2a3ba4a 070a39a db22e8e 585bee4 c36f21a 1495f5b e65c3b8 1088b5b 3e6801a 87a5b4d 0f36497 b89d3f2 6c81929 06a1d30 d827766 9e55364 3804ca1 d8acdef 484af1d c873893 830a0e8 1be2944 2f8b791 366aa0a c37e337 8ab81eb 4a086d4 626a912 7f8ede1 cd73036 9ba5575 d69e9f9 1eba893 f1a5075 ed62e40 d1a758b 0d717e6 3246064 6f45986 35fab39 31ce101 5b66d27 1d87183 fea37a5 4974c13 b602010 85c3e65 4ab1d2b 9e9d1d7 ef8045c 2f85759 fa7d507 0428ae4 911bd49 c9e82c0 70c1aec ce3e8c2 8f48897 cdd99fb 8e9b665 53b86b8 db29a7d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 |
Started at: 11:52:44 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {}) Epoch: 0 Training loss: 0.19169998451283105 - MSE: 0.3224822171590252 Validation loss : 0.1576854281593114 - MSE: 0.3052585763866773 Epoch: 1 Training loss: 0.170721908933238 - MSE: 0.3106345883186025 Validation loss : 0.14974205638282 - MSE: 0.30164828824445067 Epoch: 2 Training loss: 0.16939535588026047 - MSE: 0.31008999561987205 Validation loss : 0.15034097363241017 - MSE: 0.30232355450380055 Epoch: 3 Training loss: 0.1694463299293267 - MSE: 0.3101087405316537 Validation loss : 0.15061823884025216 - MSE: 0.3026353720119914 Epoch: 4 Training loss: 0.16941545233130456 - MSE: 0.310085018434928 Validation loss : 0.15079936920665205 - MSE: 0.30283750300395695 Epoch: 5 Training loss: 0.16935620966710543 - MSE: 0.31004690146169944 Validation loss : 0.15084448363631964 - MSE: 0.3028875614759272 Epoch: 6 Training loss: 0.16949052591072886 - MSE: 0.3101781524663373 Validation loss : 0.15096625522710383 - MSE: 0.30302316902771054 Epoch: 7 Training loss: 0.16949893361643742 - MSE: 0.310188779241269 Validation loss : 0.15100383502431214 - MSE: 0.30306482790138034 Epoch: 8 Training loss: 0.1694979808440334 - MSE: 0.3101921144601653 Validation loss : 0.15103243081830442 - MSE: 0.30309663850448487 Epoch: 9 Training loss: 0.1694973421724219 - MSE: 0.3101948706230913 Validation loss : 0.1510547660291195 - MSE: 0.303121549986372 Epoch: 10 Training loss: 0.16949688792228698 - MSE: 0.31019701891325113 Validation loss : 0.15107234381139278 - MSE: 0.3031413168423569 Epoch: 11 Training loss: 0.16949651574617938 - MSE: 0.3101986945843064 Validation loss : 0.15108662215061486 - MSE: 0.3031575035779497 Epoch: 12 Training loss: 0.16949623556513535 - MSE: 0.3102000477910796 Validation loss : 0.1510984308551997 - MSE: 0.30317088319088725 Epoch: 13 Training loss: 0.16949602914483924 - MSE: 0.3102011731046972 Validation loss : 0.15110818133689463 - MSE: 0.30318192262302546 Epoch: 14 Training loss: 0.16949585684035953 - MSE: 0.31020210549164046 Validation loss : 0.15111643797717988 - MSE: 0.30319126495260207 Epoch: 15 Training loss: 0.1694957421798455 - MSE: 0.3102031148284955 Validation loss : 0.15112340752966702 - MSE: 0.30319914952951876 Epoch: 16 Training loss: 0.1694956130103061 - MSE: 0.3102041142706642 Validation loss : 0.15112938289530575 - MSE: 0.3032059101165032 Epoch: 17 Training loss: 0.16949550995701237 - MSE: 0.31020503356981505 Validation loss : 0.1511345561593771 - MSE: 0.30321175591188876 Epoch: 18 Training loss: 0.16949546227329657 - MSE: 0.31020586811368406 Validation loss : 0.15113910171203315 - MSE: 0.3032168940185329 Epoch: 19 Training loss: 0.1694954017275258 - MSE: 0.3102065734353139 Validation loss : 0.15114298881962895 - MSE: 0.30322128473881094 Epoch: 20 Training loss: 0.16949532439834195 - MSE: 0.3102071649483396 Validation loss : 0.15114647196605802 - MSE: 0.30322521829612015 Epoch: 21 Training loss: 0.16949528412599313 - MSE: 0.3102077097474076 Validation loss : 0.15114951902069151 - MSE: 0.3032286566226503 Epoch: 22 Training loss: 0.16949621936992595 - MSE: 0.310208494463055 Validation loss : 0.1511614411137998 - MSE: 0.30324212040386556 Epoch: 23 Training loss: 0.16949620082190162 - MSE: 0.3102095742453467 Validation loss : 0.15115444944240153 - MSE: 0.30323422767787633 Epoch: 24 Training loss: 0.1694950992339536 - MSE: 0.3102089707483183 Validation loss : 0.15115649672225118 - MSE: 0.30323653712503074 Epoch: 25 Training loss: 0.16949508558762702 - MSE: 0.3102093517922974 Validation loss : 0.15115846111439168 - MSE: 0.3032387536727583 Epoch: 26 Training loss: 0.16949506166734193 - MSE: 0.31020968383589725 Validation loss : 0.15116004645824432 - MSE: 0.3032405468179604 Epoch: 27 Training loss: 0.16949506578476806 - MSE: 0.3102100091384956 Validation loss : 0.1511616394855082 - MSE: 0.30324234275713025 Epoch: 28 Training loss: 0.1694950695884855 - MSE: 0.31021030116971815 Validation loss : 0.15116294636391103 - MSE: 0.30324382076605616 Epoch: 29 Training loss: 0.16949506362802105 - MSE: 0.31021054124398184 Validation loss : 0.15116422879509628 - MSE: 0.30324526722642986 Epoch: 30 Training loss: 0.16949505092282044 - MSE: 0.3102107802538768 Validation loss : 0.15116519923321903 - MSE: 0.3032463587364873 Epoch: 31 Training loss: 0.16949503472761104 - MSE: 0.3102109880028702 Validation loss : 0.15116616361774504 - MSE: 0.30324744815106897 Epoch: 32 Training loss: 0.1694950102975494 - MSE: 0.3102111595239082 Validation loss : 0.1511670839972794 - MSE: 0.3032484904174453 Epoch: 33 Training loss: 0.16949503817840628 - MSE: 0.3102113451182651 Validation loss : 0.15116780903190374 - MSE: 0.303249302414315 Epoch: 34 Training loss: 0.16949502108128447 - MSE: 0.3102115117116704 Validation loss : 0.15116860624402761 - MSE: 0.30325020067493824 Epoch: 35 Training loss: 0.16949496159428043 - MSE: 0.3102115773871586 Validation loss : 0.1511691091582179 - MSE: 0.30325077460247485 Epoch: 36 Training loss: 0.16949501370913103 - MSE: 0.31021177953836127 Validation loss : 0.15116953826509416 - MSE: 0.30325125679473786 Epoch: 37 Training loss: 0.16959698172776322 - MSE: 0.3102504026385966 Validation loss : 0.15178115805611014 - MSE: 0.30394958434726504 Epoch: 38 Training loss: 0.16956856172335774 - MSE: 0.3102638844444762 Validation loss : 0.15115563897415996 - MSE: 0.3032355727405047 Epoch: 39 Training loss: 0.1694874994064632 - MSE: 0.3102015640433627 Validation loss : 0.15116063226014376 - MSE: 0.30324120677641986 Epoch: 40 Training loss: 0.1694887787103653 - MSE: 0.31020338604539766 Validation loss : 0.1511625424027443 - MSE: 0.3032433639523333 Epoch: 41 Training loss: 0.16948962152788513 - MSE: 0.3102046410937282 Validation loss : 0.15116414008662105 - MSE: 0.30324516582868455 Epoch: 42 Training loss: 0.16949031372603618 - MSE: 0.3102056846480645 Validation loss : 0.15116540039889514 - MSE: 0.3032465870269334 Epoch: 43 Training loss: 0.1694909881996481 - MSE: 0.31020666497101834 Validation loss : 0.15116674755699933 - MSE: 0.3032481097393429 Epoch: 44 Training loss: 0.16949151315187153 - MSE: 0.31020746001498384 Validation loss : 0.15116780903190374 - MSE: 0.30324930253073035 Epoch: 45 Training loss: 0.16949198057777004 - MSE: 0.3102081938774621 Validation loss : 0.15116868331097066 - MSE: 0.3032502922937965 Epoch: 46 Training loss: 0.16949239694758467 - MSE: 0.3102088308429716 Validation loss : 0.1511694707442075 - MSE: 0.30325118054270206 Epoch: 47 Training loss: 0.16949274116440824 - MSE: 0.3102093728825653 Validation loss : 0.1511702830903232 - MSE: 0.30325209417014776 Epoch: 48 Training loss: 0.1694930295803045 - MSE: 0.310209832059088 Validation loss : 0.1511708211619407 - MSE: 0.30325270325511156 Epoch: 49 Training loss: 0.16949331807462792 - MSE: 0.3102102694751848 Validation loss : 0.15117131732404232 - MSE: 0.30325326158299504 Prediction MSE: 0.330 Finished at: 11:52:44 Time taken: 3135 s. 0 days 0 hours 52 minutes 15 seconds |