File size: 7,277 Bytes
b90e2a6 36d6c19 180e27a d21c1de 233804b 8c42b15 baa5549 5a35f31 725419d b373c9d 636d532 90acfea 396896d 8ab3946 38990b4 6d19545 4048ef7 08d5397 b7c0dbb 0851144 d5f6027 55d669f d9514e2 7dfaef5 25a3ed1 d4aef87 7b6dbcf 2889ea8 a051a8f 7b199fd d2a9ebf bf0d959 efd4014 7d3a7e6 2b5c2f1 6743094 ed09421 8e3f40c 9f11cb3 318b226 da4560c 0a1fef0 1aba117 ce1a1b5 60c42e6 3b6a81f b781ec0 282fb27 4a25886 d1c4e87 abccbe1 ea57707 b1d644f 943cbab 96a2bf0 cfbb7d2 d85f59a 5e2db89 5240129 8604de2 46c1421 218c4dc 05c4421 624cf0f 8d8de56 0d27d2e a851f16 384fd6b 400781a 68bcd90 952ec51 fe09a69 d283c14 8a9388c 450ecb6 fec279e 358ba6d 81a41c0 06cc6fa 9d29478 d3318d3 526718d 9587634 b13586f 670f81c 4b13cb1 2076526 cd964e4 4f417a4 6ee34e8 f7ff337 ed0117c 868dda8 850173f f68f0a5 ad0d2c2 a575336 eb4f462 4c7f33e eb55ddd 0338840 3a801bc 310c5bd 31b608f 30ec06a bc9f120 f3dd63c c580fc9 c22c437 e71af27 7f8b440 9a17b9f 52a9fa5 bb2b3ee 5e444c0 316b9c6 529b70b 68b12ed 33ed61a 729b581 32b3d7d f21b4c0 77fc83d b88427a b5d0d45 ed8b339 30d3709 da3a5bc a354a7c 805e1ac 7f8af97 868f4fb e5f1454 0058e88 884ec42 d23e127 ae490d9 ccbba83 7d4be07 d5bf442 ec902d2 717899f b8c31d2 9b524f0 427b4ad ceb976f 1f17d17 57ab9e8 c4a53f5 ec89c31 bf91867 31fda7b 4670adf 2ddb7ff 90024a4 80c8b03 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 |
Started at: 13:54:30 ({'architectures': ['BertForMaskedLM'], 'attention_probs_dropout_prob': 0.1, 'hidden_act': 'gelu', 'hidden_dropout_prob': 0.1, 'hidden_size': 768, 'initializer_range': 0.02, 'intermediate_size': 3072, 'max_position_embeddings': 512, 'model_type': 'bert', 'num_attention_heads': 12, 'num_hidden_layers': 12, 'type_vocab_size': 2, 'vocab_size': 50104, '_commit_hash': 'afb829e3d0b861bd5f8cda6522b32ca0b097d7eb'}, {}) Epoch: 0 Training loss: 0.19169998451283105 - MSE: 0.3224822171590252 Validation loss : 0.1545075431931764 - MSE: 0.30456596715475825 Epoch: 1 Training loss: 0.16974398517294934 - MSE: 0.3103326546437019 Validation loss : 0.15334105514921248 - MSE: 0.3032539253831601 Epoch: 2 Training loss: 0.16987535333947132 - MSE: 0.3104551468798216 Validation loss : 0.15374588128179312 - MSE: 0.30371004876315055 Epoch: 3 Training loss: 0.16981600977872546 - MSE: 0.31038815772414813 Validation loss : 0.15414767409674823 - MSE: 0.3041637827186605 Epoch: 4 Training loss: 0.16997354869779788 - MSE: 0.3106644945154869 Validation loss : 0.15224769292399287 - MSE: 0.30202376234910844 Epoch: 5 Training loss: 0.17002245697535967 - MSE: 0.3105952131827796 Validation loss : 0.15425994875840843 - MSE: 0.3042897454938611 Epoch: 6 Training loss: 0.1700476981307331 - MSE: 0.310650163816472 Validation loss : 0.15435221185907722 - MSE: 0.3043926291643402 Epoch: 7 Training loss: 0.16941748441834198 - MSE: 0.31001210870889107 Validation loss : 0.1539384766947478 - MSE: 0.30392832597817687 Epoch: 8 Training loss: 0.16997813641240722 - MSE: 0.3105949710586207 Validation loss : 0.15462648100219667 - MSE: 0.30469803253754435 Epoch: 9 Training loss: 0.17010739684889192 - MSE: 0.31050279334273895 Validation loss : 0.155730452388525 - MSE: 0.30595527761715857 Epoch: 10 Training loss: 0.17010818978673534 - MSE: 0.31067252789705496 Validation loss : 0.15445392183028162 - MSE: 0.3045059578503242 Epoch: 11 Training loss: 0.1698176296133744 - MSE: 0.3104176910766342 Validation loss : 0.1544747839216143 - MSE: 0.30452919958725033 Epoch: 12 Training loss: 0.16994338412033885 - MSE: 0.3105494737531266 Validation loss : 0.15396608458831906 - MSE: 0.30395947231545506 Epoch: 13 Training loss: 0.1698948621357742 - MSE: 0.3105059461998054 Validation loss : 0.1545115364715457 - MSE: 0.3045701227160862 Epoch: 14 Training loss: 0.1699589282666382 - MSE: 0.31056594253224645 Validation loss : 0.1545204329304397 - MSE: 0.304580016388627 Epoch: 15 Training loss: 0.16995860153907225 - MSE: 0.3105668987295108 Validation loss : 0.15452667814679444 - MSE: 0.3045869592820054 Epoch: 16 Training loss: 0.1698562132684808 - MSE: 0.31058698633003384 Validation loss : 0.15058960486203432 - MSE: 0.30260322473577617 Epoch: 17 Training loss: 0.16920853351291856 - MSE: 0.30992660221922314 Validation loss : 0.15110802161507308 - MSE: 0.303181744856829 Epoch: 18 Training loss: 0.16951832692874105 - MSE: 0.31024539454754047 Validation loss : 0.15118786157108843 - MSE: 0.3032720424012041 Epoch: 19 Training loss: 0.16952366397569055 - MSE: 0.31024719080762236 Validation loss : 0.15118418936617672 - MSE: 0.303267858201707 Epoch: 20 Training loss: 0.16951929793546075 - MSE: 0.3102413832574418 Validation loss : 0.1511813565157354 - MSE: 0.30326463116898594 Epoch: 21 Training loss: 0.16951563405363182 - MSE: 0.3102366537550263 Validation loss : 0.15117910131812096 - MSE: 0.30326207003190575 Epoch: 22 Training loss: 0.16951254367044097 - MSE: 0.31023282786217926 Validation loss : 0.15117732365615666 - MSE: 0.3032600467336124 Epoch: 23 Training loss: 0.1695099739651931 - MSE: 0.31022965084000215 Validation loss : 0.15117588196881115 - MSE: 0.30325841342664717 Epoch: 24 Training loss: 0.169507770318734 - MSE: 0.31022694836856174 Validation loss : 0.15117496205493808 - MSE: 0.30325737325574664 Epoch: 25 Training loss: 0.16950593904445047 - MSE: 0.310224739764178 Validation loss : 0.15117412828840315 - MSE: 0.3032564339005148 Epoch: 26 Training loss: 0.16950435136493883 - MSE: 0.31022287116644853 Validation loss : 0.1511734768282622 - MSE: 0.3032556981556809 Epoch: 27 Training loss: 0.16950298979094153 - MSE: 0.3102212937720993 Validation loss : 0.15117311687208712 - MSE: 0.3032552918662077 Epoch: 28 Training loss: 0.1695018798897141 - MSE: 0.3102200012343394 Validation loss : 0.15117282583378255 - MSE: 0.30325496194518564 Epoch: 29 Training loss: 0.16950091852953558 - MSE: 0.31021890611725483 Validation loss : 0.15117264399304986 - MSE: 0.3032547589168644 Epoch: 30 Training loss: 0.16950009982836875 - MSE: 0.3102179851229983 Validation loss : 0.1511725322343409 - MSE: 0.3032546320241636 Epoch: 31 Training loss: 0.1694993701812468 - MSE: 0.3102171767547141 Validation loss : 0.1511724202428013 - MSE: 0.3032545051314628 Epoch: 32 Training loss: 0.1694987534692413 - MSE: 0.3102164999345116 Validation loss : 0.15117244189605117 - MSE: 0.30325453051000295 Epoch: 33 Training loss: 0.16949823667344294 - MSE: 0.31021594102270367 Validation loss : 0.15117246261797845 - MSE: 0.3032545558885431 Epoch: 34 Training loss: 0.16949779610884816 - MSE: 0.31021548358070766 Validation loss : 0.15117257554084063 - MSE: 0.3032546827812439 Epoch: 35 Training loss: 0.16949741824677117 - MSE: 0.31021508199835557 Validation loss : 0.1511726665776223 - MSE: 0.30325478429540453 Epoch: 36 Training loss: 0.1694970832059258 - MSE: 0.3102147305201725 Validation loss : 0.15117280068807304 - MSE: 0.3032549365666455 Epoch: 37 Training loss: 0.16949680725994862 - MSE: 0.31021446009169157 Validation loss : 0.15117288893088698 - MSE: 0.3032550380808061 Epoch: 38 Training loss: 0.16949655209717 - MSE: 0.31021420484031925 Validation loss : 0.15117298113182187 - MSE: 0.30325513959496675 Epoch: 39 Training loss: 0.16949634481417505 - MSE: 0.3102140082869073 Validation loss : 0.15117311687208712 - MSE: 0.3032552918662077 Epoch: 40 Training loss: 0.16949616552967775 - MSE: 0.3102138376725538 Validation loss : 0.15117327379994094 - MSE: 0.3032554695159888 Epoch: 41 Training loss: 0.1694960145573867 - MSE: 0.3102137004872602 Validation loss : 0.15117336553521454 - MSE: 0.30325557103014944 Epoch: 42 Training loss: 0.1694958654673476 - MSE: 0.3102135644057564 Validation loss : 0.1511735450476408 - MSE: 0.3032557740584707 Epoch: 43 Training loss: 0.16949574014073923 - MSE: 0.31021345406620565 Validation loss : 0.1511736111715436 - MSE: 0.3032558501940912 Epoch: 44 Training loss: 0.1694956334013688 - MSE: 0.31021337002050287 Validation loss : 0.1511737012770027 - MSE: 0.3032559517082518 Epoch: 45 Training loss: 0.1694955766593155 - MSE: 0.3102133261448616 Validation loss : 0.1511738603003323 - MSE: 0.3032561293580329 Epoch: 46 Training loss: 0.16949547846850596 - MSE: 0.31021324525284366 Validation loss : 0.15117392782121897 - MSE: 0.3032562054936534 Epoch: 47 Training loss: 0.16949542227544281 - MSE: 0.3102132010224128 Validation loss : 0.15117401676252484 - MSE: 0.30325630700781403 Epoch: 48 Training loss: 0.1694953542398779 - MSE: 0.31021315525456056 Validation loss : 0.1511741520371288 - MSE: 0.303256459279055 Epoch: 49 Training loss: 0.16949533977006612 - MSE: 0.3102131488683486 Validation loss : 0.15117421676404774 - MSE: 0.30325653541467545 Prediction MSE: 0.330 Finished at: 13:54:30 Time taken: 3274 s. 0 days 0 hours 54 minutes 34 seconds |