{
 "cells": [
  {
   "cell_type": "code",
   "outputs": [],
   "source": [
    "import warnings  # 提供了一种处理警告信息的机制\n",
    "import torch\n",
    "from train import Train\n",
    "from utils import plot_auc_curves, plot_prc_curves"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2024-05-07T03:39:00.871301100Z",
     "start_time": "2024-05-07T03:38:57.900503900Z"
    }
   },
   "id": "b3112d9d5ef5a53f",
   "execution_count": 1
  },
  {
   "cell_type": "code",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "sample_num 50\n",
      "dropout 0.2\n",
      "## vertices: 285\n",
      "## edges: 2760\n",
      "## disease nodes: 44\n",
      "## metabolite nodes:  241\n",
      "## microbe nodes:  308\n",
      "Training for Fold 1\n",
      "## Training edges: 2208\n",
      "## Testing edges: 552\n",
      "Epoch: 1 Train Loss: 1.0353 Val Loss: 1.3492 Acc: 0.6558 Pre: 0.6098 Recall: 0.9190 F1: 0.7331 Train AUC: 0.4427 Val AUC: 0.6674 Time: 12.77\n",
      "Epoch: 2 Train Loss: 1.4891 Val Loss: 1.1157 Acc: 0.5217 Pre: 1.0000 Recall: 0.0704 F1: 0.1316 Train AUC: 0.5602 Val AUC: 0.7277 Time: 11.42\n",
      "Epoch: 3 Train Loss: 1.5243 Val Loss: 0.5385 Acc: 0.7228 Pre: 0.6795 Recall: 0.8732 F1: 0.7643 Train AUC: 0.5705 Val AUC: 0.8364 Time: 11.38\n",
      "Epoch: 4 Train Loss: 0.7830 Val Loss: 0.6248 Acc: 0.6232 Pre: 0.5798 Recall: 0.9718 F1: 0.7263 Train AUC: 0.6649 Val AUC: 0.8985 Time: 11.50\n",
      "Epoch: 5 Train Loss: 0.7628 Val Loss: 0.4676 Acc: 0.7572 Pre: 0.7143 Recall: 0.8803 F1: 0.7886 Train AUC: 0.7923 Val AUC: 0.8864 Time: 11.38\n",
      "Epoch: 6 Train Loss: 0.6186 Val Loss: 0.4279 Acc: 0.7953 Pre: 0.8940 Recall: 0.6831 F1: 0.7745 Train AUC: 0.8014 Val AUC: 0.8938 Time: 11.34\n",
      "Epoch: 7 Train Loss: 0.6892 Val Loss: 0.4362 Acc: 0.8406 Pre: 0.9804 Recall: 0.7042 F1: 0.8197 Train AUC: 0.7307 Val AUC: 0.9198 Time: 11.52\n",
      "Epoch: 8 Train Loss: 0.6944 Val Loss: 0.3855 Acc: 0.8406 Pre: 0.9336 Recall: 0.7430 F1: 0.8275 Train AUC: 0.7799 Val AUC: 0.9117 Time: 11.41\n",
      "Epoch: 9 Train Loss: 0.5357 Val Loss: 0.3524 Acc: 0.8460 Pre: 0.8645 Recall: 0.8310 F1: 0.8474 Train AUC: 0.8310 Val AUC: 0.9154 Time: 11.39\n",
      "Epoch: 10 Train Loss: 0.4828 Val Loss: 0.3497 Acc: 0.8333 Pre: 0.8288 Recall: 0.8521 F1: 0.8403 Train AUC: 0.8606 Val AUC: 0.9218 Time: 11.41\n",
      "Epoch: 11 Train Loss: 0.5107 Val Loss: 0.3394 Acc: 0.8496 Pre: 0.8454 Recall: 0.8662 F1: 0.8557 Train AUC: 0.8617 Val AUC: 0.9277 Time: 11.36\n",
      "Epoch: 12 Train Loss: 0.4917 Val Loss: 0.3229 Acc: 0.8659 Pre: 0.8918 Recall: 0.8415 F1: 0.8659 Train AUC: 0.8748 Val AUC: 0.9306 Time: 11.36\n",
      "Epoch: 13 Train Loss: 0.4440 Val Loss: 0.3229 Acc: 0.8659 Pre: 0.9234 Recall: 0.8063 F1: 0.8609 Train AUC: 0.8898 Val AUC: 0.9301 Time: 15.82\n",
      "Epoch: 14 Train Loss: 0.4335 Val Loss: 0.3374 Acc: 0.8514 Pre: 0.9280 Recall: 0.7711 F1: 0.8423 Train AUC: 0.8861 Val AUC: 0.9294 Time: 14.86\n",
      "Epoch: 15 Train Loss: 0.4771 Val Loss: 0.3516 Acc: 0.8514 Pre: 0.9391 Recall: 0.7606 F1: 0.8405 Train AUC: 0.8729 Val AUC: 0.9292 Time: 13.08\n",
      "Epoch: 16 Train Loss: 0.5338 Val Loss: 0.3409 Acc: 0.8587 Pre: 0.9402 Recall: 0.7746 F1: 0.8494 Train AUC: 0.8542 Val AUC: 0.9303 Time: 12.38\n",
      "Epoch: 17 Train Loss: 0.4659 Val Loss: 0.3263 Acc: 0.8641 Pre: 0.9265 Recall: 0.7993 F1: 0.8582 Train AUC: 0.8818 Val AUC: 0.9311 Time: 11.40\n",
      "Epoch: 18 Train Loss: 0.4352 Val Loss: 0.3224 Acc: 0.8514 Pre: 0.9008 Recall: 0.7993 F1: 0.8470 Train AUC: 0.8871 Val AUC: 0.9311 Time: 11.33\n",
      "Epoch: 19 Train Loss: 0.4386 Val Loss: 0.3268 Acc: 0.8587 Pre: 0.8843 Recall: 0.8345 F1: 0.8587 Train AUC: 0.8838 Val AUC: 0.9311 Time: 11.38\n",
      "Epoch: 20 Train Loss: 0.4197 Val Loss: 0.3307 Acc: 0.8587 Pre: 0.8787 Recall: 0.8415 F1: 0.8597 Train AUC: 0.8909 Val AUC: 0.9300 Time: 11.36\n",
      "Epoch: 21 Train Loss: 0.4311 Val Loss: 0.3303 Acc: 0.8569 Pre: 0.8839 Recall: 0.8310 F1: 0.8566 Train AUC: 0.8843 Val AUC: 0.9290 Time: 11.47\n",
      "Epoch: 22 Train Loss: 0.4378 Val Loss: 0.3272 Acc: 0.8569 Pre: 0.8927 Recall: 0.8204 F1: 0.8550 Train AUC: 0.8889 Val AUC: 0.9291 Time: 11.67\n",
      "Epoch: 23 Train Loss: 0.4264 Val Loss: 0.3278 Acc: 0.8533 Pre: 0.9109 Recall: 0.7923 F1: 0.8475 Train AUC: 0.8849 Val AUC: 0.9295 Time: 11.82\n",
      "Epoch: 24 Train Loss: 0.3940 Val Loss: 0.3329 Acc: 0.8569 Pre: 0.9253 Recall: 0.7852 F1: 0.8495 Train AUC: 0.8984 Val AUC: 0.9297 Time: 12.73\n",
      "Epoch: 25 Train Loss: 0.3895 Val Loss: 0.3368 Acc: 0.8569 Pre: 0.9289 Recall: 0.7817 F1: 0.8489 Train AUC: 0.9079 Val AUC: 0.9305 Time: 13.04\n",
      "Epoch: 26 Train Loss: 0.4065 Val Loss: 0.3344 Acc: 0.8587 Pre: 0.9328 Recall: 0.7817 F1: 0.8506 Train AUC: 0.9028 Val AUC: 0.9310 Time: 13.44\n",
      "Epoch: 27 Train Loss: 0.3916 Val Loss: 0.3307 Acc: 0.8623 Pre: 0.9298 Recall: 0.7923 F1: 0.8555 Train AUC: 0.9063 Val AUC: 0.9310 Time: 13.93\n",
      "Epoch: 28 Train Loss: 0.3824 Val Loss: 0.3275 Acc: 0.8533 Pre: 0.9109 Recall: 0.7923 F1: 0.8475 Train AUC: 0.9086 Val AUC: 0.9307 Time: 13.65\n",
      "Epoch: 29 Train Loss: 0.3720 Val Loss: 0.3264 Acc: 0.8514 Pre: 0.9008 Recall: 0.7993 F1: 0.8470 Train AUC: 0.9102 Val AUC: 0.9300 Time: 12.58\n",
      "Epoch: 30 Train Loss: 0.3855 Val Loss: 0.3270 Acc: 0.8478 Pre: 0.8876 Recall: 0.8063 F1: 0.8450 Train AUC: 0.9030 Val AUC: 0.9294 Time: 11.66\n",
      "Epoch: 31 Train Loss: 0.3863 Val Loss: 0.3284 Acc: 0.8478 Pre: 0.8846 Recall: 0.8099 F1: 0.8456 Train AUC: 0.9022 Val AUC: 0.9294 Time: 11.43\n",
      "Epoch: 32 Train Loss: 0.3985 Val Loss: 0.3279 Acc: 0.8496 Pre: 0.8851 Recall: 0.8134 F1: 0.8477 Train AUC: 0.8978 Val AUC: 0.9294 Time: 11.62\n",
      "Epoch: 33 Train Loss: 0.3934 Val Loss: 0.3267 Acc: 0.8496 Pre: 0.8851 Recall: 0.8134 F1: 0.8477 Train AUC: 0.9040 Val AUC: 0.9298 Time: 12.21\n",
      "Epoch: 34 Train Loss: 0.4153 Val Loss: 0.3233 Acc: 0.8424 Pre: 0.8803 Recall: 0.8028 F1: 0.8398 Train AUC: 0.8914 Val AUC: 0.9310 Time: 12.80\n",
      "Epoch: 35 Train Loss: 0.3663 Val Loss: 0.3206 Acc: 0.8478 Pre: 0.8937 Recall: 0.7993 F1: 0.8439 Train AUC: 0.9118 Val AUC: 0.9326 Time: 13.06\n",
      "Epoch: 36 Train Loss: 0.3619 Val Loss: 0.3192 Acc: 0.8460 Pre: 0.8964 Recall: 0.7923 F1: 0.8411 Train AUC: 0.9156 Val AUC: 0.9341 Time: 13.01\n",
      "Epoch: 37 Train Loss: 0.3410 Val Loss: 0.3187 Acc: 0.8478 Pre: 0.9000 Recall: 0.7923 F1: 0.8427 Train AUC: 0.9252 Val AUC: 0.9346 Time: 13.20\n",
      "Epoch: 38 Train Loss: 0.3738 Val Loss: 0.3177 Acc: 0.8496 Pre: 0.9004 Recall: 0.7958 F1: 0.8449 Train AUC: 0.9129 Val AUC: 0.9349 Time: 12.62\n",
      "Epoch: 39 Train Loss: 0.3721 Val Loss: 0.3171 Acc: 0.8514 Pre: 0.9008 Recall: 0.7993 F1: 0.8470 Train AUC: 0.9108 Val AUC: 0.9353 Time: 11.74\n",
      "Epoch: 40 Train Loss: 0.3823 Val Loss: 0.3163 Acc: 0.8533 Pre: 0.9044 Recall: 0.7993 F1: 0.8486 Train AUC: 0.9069 Val AUC: 0.9359 Time: 11.74\n",
      "Epoch: 41 Train Loss: 0.3525 Val Loss: 0.3158 Acc: 0.8533 Pre: 0.9044 Recall: 0.7993 F1: 0.8486 Train AUC: 0.9211 Val AUC: 0.9359 Time: 12.05\n",
      "Epoch: 42 Train Loss: 0.3809 Val Loss: 0.3144 Acc: 0.8496 Pre: 0.8911 Recall: 0.8063 F1: 0.8466 Train AUC: 0.9117 Val AUC: 0.9358 Time: 12.51\n",
      "Epoch: 43 Train Loss: 0.3758 Val Loss: 0.3126 Acc: 0.8496 Pre: 0.8851 Recall: 0.8134 F1: 0.8477 Train AUC: 0.9098 Val AUC: 0.9364 Time: 13.12\n",
      "Epoch: 44 Train Loss: 0.3589 Val Loss: 0.3114 Acc: 0.8514 Pre: 0.8855 Recall: 0.8169 F1: 0.8498 Train AUC: 0.9176 Val AUC: 0.9368 Time: 13.36\n",
      "Epoch: 45 Train Loss: 0.3554 Val Loss: 0.3103 Acc: 0.8533 Pre: 0.8830 Recall: 0.8239 F1: 0.8525 Train AUC: 0.9200 Val AUC: 0.9374 Time: 12.63\n",
      "Epoch: 46 Train Loss: 0.3585 Val Loss: 0.3091 Acc: 0.8587 Pre: 0.8843 Recall: 0.8345 F1: 0.8587 Train AUC: 0.9190 Val AUC: 0.9377 Time: 12.50\n",
      "Epoch: 47 Train Loss: 0.3499 Val Loss: 0.3078 Acc: 0.8605 Pre: 0.8848 Recall: 0.8380 F1: 0.8608 Train AUC: 0.9235 Val AUC: 0.9381 Time: 12.10\n",
      "Epoch: 48 Train Loss: 0.3488 Val Loss: 0.3073 Acc: 0.8569 Pre: 0.8897 Recall: 0.8239 F1: 0.8556 Train AUC: 0.9215 Val AUC: 0.9385 Time: 12.61\n",
      "Epoch: 49 Train Loss: 0.3914 Val Loss: 0.3066 Acc: 0.8551 Pre: 0.8953 Recall: 0.8134 F1: 0.8524 Train AUC: 0.9046 Val AUC: 0.9392 Time: 13.14\n",
      "Epoch: 50 Train Loss: 0.3525 Val Loss: 0.3073 Acc: 0.8551 Pre: 0.9048 Recall: 0.8028 F1: 0.8507 Train AUC: 0.9210 Val AUC: 0.9398 Time: 13.36\n",
      "Epoch: 51 Train Loss: 0.3486 Val Loss: 0.3085 Acc: 0.8569 Pre: 0.9150 Recall: 0.7958 F1: 0.8512 Train AUC: 0.9230 Val AUC: 0.9403 Time: 12.83\n",
      "Epoch: 52 Train Loss: 0.3406 Val Loss: 0.3104 Acc: 0.8533 Pre: 0.9109 Recall: 0.7923 F1: 0.8475 Train AUC: 0.9242 Val AUC: 0.9405 Time: 12.25\n",
      "Epoch: 53 Train Loss: 0.3293 Val Loss: 0.3114 Acc: 0.8569 Pre: 0.9084 Recall: 0.8028 F1: 0.8523 Train AUC: 0.9321 Val AUC: 0.9405 Time: 12.11\n",
      "Epoch: 54 Train Loss: 0.3493 Val Loss: 0.3136 Acc: 0.8587 Pre: 0.8962 Recall: 0.8204 F1: 0.8566 Train AUC: 0.9247 Val AUC: 0.9402 Time: 12.52\n",
      "Epoch: 55 Train Loss: 0.3470 Val Loss: 0.3135 Acc: 0.8569 Pre: 0.8810 Recall: 0.8345 F1: 0.8571 Train AUC: 0.9278 Val AUC: 0.9398 Time: 13.08\n",
      "Epoch: 56 Train Loss: 0.3305 Val Loss: 0.3131 Acc: 0.8659 Pre: 0.8804 Recall: 0.8556 F1: 0.8679 Train AUC: 0.9328 Val AUC: 0.9393 Time: 13.11\n",
      "Epoch: 57 Train Loss: 0.3433 Val Loss: 0.3094 Acc: 0.8659 Pre: 0.8804 Recall: 0.8556 F1: 0.8679 Train AUC: 0.9270 Val AUC: 0.9399 Time: 12.88\n",
      "Epoch: 58 Train Loss: 0.3453 Val Loss: 0.3029 Acc: 0.8641 Pre: 0.8828 Recall: 0.8486 F1: 0.8654 Train AUC: 0.9273 Val AUC: 0.9408 Time: 12.15\n",
      "Epoch: 59 Train Loss: 0.3320 Val Loss: 0.2989 Acc: 0.8623 Pre: 0.8852 Recall: 0.8415 F1: 0.8628 Train AUC: 0.9314 Val AUC: 0.9413 Time: 11.84\n",
      "Epoch: 60 Train Loss: 0.3265 Val Loss: 0.2977 Acc: 0.8678 Pre: 0.8981 Recall: 0.8380 F1: 0.8670 Train AUC: 0.9313 Val AUC: 0.9415 Time: 12.16\n",
      "Epoch: 61 Train Loss: 0.3377 Val Loss: 0.2983 Acc: 0.8714 Pre: 0.9080 Recall: 0.8345 F1: 0.8697 Train AUC: 0.9285 Val AUC: 0.9419 Time: 12.56\n",
      "Epoch: 62 Train Loss: 0.3513 Val Loss: 0.2971 Acc: 0.8696 Pre: 0.9015 Recall: 0.8380 F1: 0.8686 Train AUC: 0.9267 Val AUC: 0.9419 Time: 13.05\n",
      "Epoch: 63 Train Loss: 0.3479 Val Loss: 0.2960 Acc: 0.8641 Pre: 0.8856 Recall: 0.8451 F1: 0.8649 Train AUC: 0.9259 Val AUC: 0.9421 Time: 13.27\n",
      "Epoch: 64 Train Loss: 0.3287 Val Loss: 0.2961 Acc: 0.8641 Pre: 0.8828 Recall: 0.8486 F1: 0.8654 Train AUC: 0.9311 Val AUC: 0.9423 Time: 13.34\n",
      "Epoch: 65 Train Loss: 0.3275 Val Loss: 0.2970 Acc: 0.8641 Pre: 0.8800 Recall: 0.8521 F1: 0.8658 Train AUC: 0.9318 Val AUC: 0.9427 Time: 12.90\n",
      "Epoch: 66 Train Loss: 0.3179 Val Loss: 0.2993 Acc: 0.8623 Pre: 0.8796 Recall: 0.8486 F1: 0.8638 Train AUC: 0.9358 Val AUC: 0.9430 Time: 12.05\n",
      "Epoch: 67 Train Loss: 0.3110 Val Loss: 0.3025 Acc: 0.8641 Pre: 0.8885 Recall: 0.8415 F1: 0.8644 Train AUC: 0.9395 Val AUC: 0.9428 Time: 11.58\n",
      "Epoch: 68 Train Loss: 0.3482 Val Loss: 0.3021 Acc: 0.8678 Pre: 0.8951 Recall: 0.8415 F1: 0.8675 Train AUC: 0.9259 Val AUC: 0.9435 Time: 11.79\n",
      "Epoch: 69 Train Loss: 0.3317 Val Loss: 0.2998 Acc: 0.8678 Pre: 0.8981 Recall: 0.8380 F1: 0.8670 Train AUC: 0.9336 Val AUC: 0.9442 Time: 12.29\n",
      "Epoch: 70 Train Loss: 0.3285 Val Loss: 0.2987 Acc: 0.8659 Pre: 0.8977 Recall: 0.8345 F1: 0.8650 Train AUC: 0.9313 Val AUC: 0.9448 Time: 12.81\n",
      "Epoch: 71 Train Loss: 0.3345 Val Loss: 0.2960 Acc: 0.8678 Pre: 0.9105 Recall: 0.8239 F1: 0.8651 Train AUC: 0.9314 Val AUC: 0.9449 Time: 13.28\n",
      "Epoch: 72 Train Loss: 0.3239 Val Loss: 0.2941 Acc: 0.8714 Pre: 0.9080 Recall: 0.8345 F1: 0.8697 Train AUC: 0.9331 Val AUC: 0.9448 Time: 13.94\n",
      "Epoch: 73 Train Loss: 0.3159 Val Loss: 0.2926 Acc: 0.8714 Pre: 0.8989 Recall: 0.8451 F1: 0.8711 Train AUC: 0.9385 Val AUC: 0.9446 Time: 14.01\n",
      "Epoch: 74 Train Loss: 0.3159 Val Loss: 0.2924 Acc: 0.8678 Pre: 0.8864 Recall: 0.8521 F1: 0.8689 Train AUC: 0.9374 Val AUC: 0.9441 Time: 12.89\n",
      "Epoch: 75 Train Loss: 0.3241 Val Loss: 0.2935 Acc: 0.8678 Pre: 0.8809 Recall: 0.8592 F1: 0.8699 Train AUC: 0.9345 Val AUC: 0.9440 Time: 11.84\n",
      "Epoch: 76 Train Loss: 0.3176 Val Loss: 0.2949 Acc: 0.8659 Pre: 0.8777 Recall: 0.8592 F1: 0.8683 Train AUC: 0.9360 Val AUC: 0.9437 Time: 11.65\n",
      "Epoch: 77 Train Loss: 0.3105 Val Loss: 0.2953 Acc: 0.8659 Pre: 0.8777 Recall: 0.8592 F1: 0.8683 Train AUC: 0.9399 Val AUC: 0.9434 Time: 11.62\n",
      "Epoch: 78 Train Loss: 0.3246 Val Loss: 0.2944 Acc: 0.8659 Pre: 0.8804 Recall: 0.8556 F1: 0.8679 Train AUC: 0.9348 Val AUC: 0.9437 Time: 12.09\n",
      "Epoch: 79 Train Loss: 0.3141 Val Loss: 0.2955 Acc: 0.8641 Pre: 0.8914 Recall: 0.8380 F1: 0.8639 Train AUC: 0.9379 Val AUC: 0.9438 Time: 12.60\n",
      "Epoch: 80 Train Loss: 0.3089 Val Loss: 0.2983 Acc: 0.8605 Pre: 0.8996 Recall: 0.8204 F1: 0.8582 Train AUC: 0.9396 Val AUC: 0.9439 Time: 13.05\n",
      "Epoch: 81 Train Loss: 0.3132 Val Loss: 0.2993 Acc: 0.8605 Pre: 0.8966 Recall: 0.8239 F1: 0.8587 Train AUC: 0.9418 Val AUC: 0.9437 Time: 13.50\n",
      "Epoch: 82 Train Loss: 0.3115 Val Loss: 0.2997 Acc: 0.8569 Pre: 0.8839 Recall: 0.8310 F1: 0.8566 Train AUC: 0.9396 Val AUC: 0.9436 Time: 12.83\n",
      "Epoch: 83 Train Loss: 0.3081 Val Loss: 0.3007 Acc: 0.8569 Pre: 0.8782 Recall: 0.8380 F1: 0.8577 Train AUC: 0.9407 Val AUC: 0.9433 Time: 12.36\n",
      "Epoch: 84 Train Loss: 0.3078 Val Loss: 0.3025 Acc: 0.8659 Pre: 0.8777 Recall: 0.8592 F1: 0.8683 Train AUC: 0.9413 Val AUC: 0.9429 Time: 11.61\n",
      "Epoch: 85 Train Loss: 0.3000 Val Loss: 0.3044 Acc: 0.8641 Pre: 0.8693 Recall: 0.8662 F1: 0.8677 Train AUC: 0.9434 Val AUC: 0.9428 Time: 11.43\n",
      "Epoch: 86 Train Loss: 0.3119 Val Loss: 0.3006 Acc: 0.8623 Pre: 0.8741 Recall: 0.8556 F1: 0.8648 Train AUC: 0.9411 Val AUC: 0.9438 Time: 11.99\n",
      "Epoch: 87 Train Loss: 0.2895 Val Loss: 0.2976 Acc: 0.8623 Pre: 0.8910 Recall: 0.8345 F1: 0.8618 Train AUC: 0.9479 Val AUC: 0.9446 Time: 12.36\n",
      "Epoch: 88 Train Loss: 0.3046 Val Loss: 0.2965 Acc: 0.8641 Pre: 0.8973 Recall: 0.8310 F1: 0.8629 Train AUC: 0.9432 Val AUC: 0.9451 Time: 12.97\n",
      "Epoch: 89 Train Loss: 0.3149 Val Loss: 0.2943 Acc: 0.8641 Pre: 0.9035 Recall: 0.8239 F1: 0.8619 Train AUC: 0.9398 Val AUC: 0.9456 Time: 13.33\n",
      "Epoch: 90 Train Loss: 0.3089 Val Loss: 0.2921 Acc: 0.8659 Pre: 0.9008 Recall: 0.8310 F1: 0.8645 Train AUC: 0.9411 Val AUC: 0.9458 Time: 13.91\n",
      "Epoch: 91 Train Loss: 0.2938 Val Loss: 0.2905 Acc: 0.8623 Pre: 0.8881 Recall: 0.8380 F1: 0.8623 Train AUC: 0.9463 Val AUC: 0.9459 Time: 14.12\n",
      "Epoch: 92 Train Loss: 0.3153 Val Loss: 0.2909 Acc: 0.8696 Pre: 0.8813 Recall: 0.8627 F1: 0.8719 Train AUC: 0.9400 Val AUC: 0.9458 Time: 12.73\n",
      "Epoch: 93 Train Loss: 0.2929 Val Loss: 0.2927 Acc: 0.8659 Pre: 0.8723 Recall: 0.8662 F1: 0.8693 Train AUC: 0.9452 Val AUC: 0.9455 Time: 12.20\n",
      "Epoch: 94 Train Loss: 0.3057 Val Loss: 0.2926 Acc: 0.8641 Pre: 0.8693 Recall: 0.8662 F1: 0.8677 Train AUC: 0.9431 Val AUC: 0.9456 Time: 11.83\n",
      "Epoch: 95 Train Loss: 0.3023 Val Loss: 0.2910 Acc: 0.8714 Pre: 0.8845 Recall: 0.8627 F1: 0.8734 Train AUC: 0.9432 Val AUC: 0.9459 Time: 11.83\n",
      "Epoch: 96 Train Loss: 0.3113 Val Loss: 0.2897 Acc: 0.8659 Pre: 0.8860 Recall: 0.8486 F1: 0.8669 Train AUC: 0.9401 Val AUC: 0.9462 Time: 12.17\n",
      "Epoch: 97 Train Loss: 0.2985 Val Loss: 0.2901 Acc: 0.8678 Pre: 0.8981 Recall: 0.8380 F1: 0.8670 Train AUC: 0.9435 Val AUC: 0.9464 Time: 12.70\n",
      "Epoch: 98 Train Loss: 0.3041 Val Loss: 0.2907 Acc: 0.8659 Pre: 0.9008 Recall: 0.8310 F1: 0.8645 Train AUC: 0.9435 Val AUC: 0.9465 Time: 13.23\n",
      "Epoch: 99 Train Loss: 0.2880 Val Loss: 0.2911 Acc: 0.8641 Pre: 0.8973 Recall: 0.8310 F1: 0.8629 Train AUC: 0.9486 Val AUC: 0.9464 Time: 13.61\n",
      "Epoch: 100 Train Loss: 0.2938 Val Loss: 0.2913 Acc: 0.8641 Pre: 0.8885 Recall: 0.8415 F1: 0.8644 Train AUC: 0.9468 Val AUC: 0.9458 Time: 13.94\n",
      "Epoch: 101 Train Loss: 0.2961 Val Loss: 0.2927 Acc: 0.8641 Pre: 0.8828 Recall: 0.8486 F1: 0.8654 Train AUC: 0.9455 Val AUC: 0.9452 Time: 13.10\n",
      "Epoch: 102 Train Loss: 0.2871 Val Loss: 0.2941 Acc: 0.8659 Pre: 0.8750 Recall: 0.8627 F1: 0.8688 Train AUC: 0.9503 Val AUC: 0.9450 Time: 12.18\n",
      "Epoch: 103 Train Loss: 0.2980 Val Loss: 0.2915 Acc: 0.8641 Pre: 0.8773 Recall: 0.8556 F1: 0.8663 Train AUC: 0.9460 Val AUC: 0.9456 Time: 12.20\n",
      "Epoch: 104 Train Loss: 0.2911 Val Loss: 0.2893 Acc: 0.8696 Pre: 0.8955 Recall: 0.8451 F1: 0.8696 Train AUC: 0.9478 Val AUC: 0.9462 Time: 12.27\n",
      "Epoch: 105 Train Loss: 0.2863 Val Loss: 0.2882 Acc: 0.8678 Pre: 0.8981 Recall: 0.8380 F1: 0.8670 Train AUC: 0.9500 Val AUC: 0.9466 Time: 12.53\n",
      "Epoch: 106 Train Loss: 0.2929 Val Loss: 0.2882 Acc: 0.8696 Pre: 0.9046 Recall: 0.8345 F1: 0.8681 Train AUC: 0.9475 Val AUC: 0.9470 Time: 13.06\n",
      "Epoch: 107 Train Loss: 0.2903 Val Loss: 0.2870 Acc: 0.8678 Pre: 0.8981 Recall: 0.8380 F1: 0.8670 Train AUC: 0.9493 Val AUC: 0.9470 Time: 13.42\n",
      "Epoch: 108 Train Loss: 0.2994 Val Loss: 0.2869 Acc: 0.8714 Pre: 0.8901 Recall: 0.8556 F1: 0.8725 Train AUC: 0.9447 Val AUC: 0.9469 Time: 13.21\n",
      "Epoch: 109 Train Loss: 0.3043 Val Loss: 0.2893 Acc: 0.8659 Pre: 0.8697 Recall: 0.8697 F1: 0.8697 Train AUC: 0.9430 Val AUC: 0.9463 Time: 12.55\n",
      "Epoch: 110 Train Loss: 0.2886 Val Loss: 0.2914 Acc: 0.8678 Pre: 0.8702 Recall: 0.8732 F1: 0.8717 Train AUC: 0.9488 Val AUC: 0.9460 Time: 11.89\n",
      "Epoch: 111 Train Loss: 0.2925 Val Loss: 0.2918 Acc: 0.8678 Pre: 0.8781 Recall: 0.8627 F1: 0.8703 Train AUC: 0.9491 Val AUC: 0.9458 Time: 12.01\n",
      "Epoch: 112 Train Loss: 0.2877 Val Loss: 0.2927 Acc: 0.8659 Pre: 0.8889 Recall: 0.8451 F1: 0.8664 Train AUC: 0.9493 Val AUC: 0.9459 Time: 12.51\n",
      "Epoch: 113 Train Loss: 0.2883 Val Loss: 0.2937 Acc: 0.8659 Pre: 0.9008 Recall: 0.8310 F1: 0.8645 Train AUC: 0.9504 Val AUC: 0.9460 Time: 12.92\n",
      "Epoch: 114 Train Loss: 0.2856 Val Loss: 0.2928 Acc: 0.8678 Pre: 0.9011 Recall: 0.8345 F1: 0.8665 Train AUC: 0.9514 Val AUC: 0.9459 Time: 13.45\n",
      "Epoch: 115 Train Loss: 0.2758 Val Loss: 0.2909 Acc: 0.8678 Pre: 0.8951 Recall: 0.8415 F1: 0.8675 Train AUC: 0.9544 Val AUC: 0.9458 Time: 13.54\n",
      "Epoch: 116 Train Loss: 0.2863 Val Loss: 0.2901 Acc: 0.8678 Pre: 0.8864 Recall: 0.8521 F1: 0.8689 Train AUC: 0.9499 Val AUC: 0.9458 Time: 13.16\n",
      "Epoch: 117 Train Loss: 0.2796 Val Loss: 0.2921 Acc: 0.8659 Pre: 0.8697 Recall: 0.8697 F1: 0.8697 Train AUC: 0.9513 Val AUC: 0.9456 Time: 12.51\n",
      "Epoch: 118 Train Loss: 0.2878 Val Loss: 0.2919 Acc: 0.8659 Pre: 0.8697 Recall: 0.8697 F1: 0.8697 Train AUC: 0.9509 Val AUC: 0.9455 Time: 11.79\n",
      "Epoch: 119 Train Loss: 0.2832 Val Loss: 0.2897 Acc: 0.8750 Pre: 0.8938 Recall: 0.8592 F1: 0.8761 Train AUC: 0.9513 Val AUC: 0.9457 Time: 12.00\n",
      "Epoch: 120 Train Loss: 0.2769 Val Loss: 0.2892 Acc: 0.8696 Pre: 0.9015 Recall: 0.8380 F1: 0.8686 Train AUC: 0.9526 Val AUC: 0.9462 Time: 11.89\n",
      "Epoch: 121 Train Loss: 0.2829 Val Loss: 0.2896 Acc: 0.8678 Pre: 0.9011 Recall: 0.8345 F1: 0.8665 Train AUC: 0.9505 Val AUC: 0.9462 Time: 12.38\n",
      "Epoch: 122 Train Loss: 0.2817 Val Loss: 0.2891 Acc: 0.8732 Pre: 0.8905 Recall: 0.8592 F1: 0.8746 Train AUC: 0.9538 Val AUC: 0.9459 Time: 12.72\n",
      "Epoch: 123 Train Loss: 0.2771 Val Loss: 0.2929 Acc: 0.8659 Pre: 0.8646 Recall: 0.8768 F1: 0.8706 Train AUC: 0.9520 Val AUC: 0.9456 Time: 13.34\n",
      "Epoch: 124 Train Loss: 0.2831 Val Loss: 0.2946 Acc: 0.8659 Pre: 0.8646 Recall: 0.8768 F1: 0.8706 Train AUC: 0.9512 Val AUC: 0.9456 Time: 13.85\n",
      "Epoch: 125 Train Loss: 0.2778 Val Loss: 0.2906 Acc: 0.8714 Pre: 0.8817 Recall: 0.8662 F1: 0.8739 Train AUC: 0.9544 Val AUC: 0.9464 Time: 14.08\n",
      "Epoch: 126 Train Loss: 0.2873 Val Loss: 0.2893 Acc: 0.8659 Pre: 0.8947 Recall: 0.8380 F1: 0.8655 Train AUC: 0.9486 Val AUC: 0.9471 Time: 13.00\n",
      "Epoch: 127 Train Loss: 0.2875 Val Loss: 0.2887 Acc: 0.8659 Pre: 0.9008 Recall: 0.8310 F1: 0.8645 Train AUC: 0.9508 Val AUC: 0.9469 Time: 12.04\n",
      "Epoch: 128 Train Loss: 0.2866 Val Loss: 0.2861 Acc: 0.8696 Pre: 0.9015 Recall: 0.8380 F1: 0.8686 Train AUC: 0.9513 Val AUC: 0.9473 Time: 11.77\n",
      "Epoch: 129 Train Loss: 0.2731 Val Loss: 0.2846 Acc: 0.8696 Pre: 0.8841 Recall: 0.8592 F1: 0.8714 Train AUC: 0.9539 Val AUC: 0.9473 Time: 12.11\n",
      "Epoch: 130 Train Loss: 0.2689 Val Loss: 0.2861 Acc: 0.8714 Pre: 0.8790 Recall: 0.8697 F1: 0.8743 Train AUC: 0.9556 Val AUC: 0.9471 Time: 12.75\n",
      "Epoch: 131 Train Loss: 0.2732 Val Loss: 0.2877 Acc: 0.8678 Pre: 0.8728 Recall: 0.8697 F1: 0.8713 Train AUC: 0.9553 Val AUC: 0.9473 Time: 13.10\n",
      "Epoch: 132 Train Loss: 0.2863 Val Loss: 0.2844 Acc: 0.8714 Pre: 0.8790 Recall: 0.8697 F1: 0.8743 Train AUC: 0.9520 Val AUC: 0.9477 Time: 13.51\n",
      "Epoch: 133 Train Loss: 0.2731 Val Loss: 0.2849 Acc: 0.8732 Pre: 0.8993 Recall: 0.8486 F1: 0.8732 Train AUC: 0.9535 Val AUC: 0.9478 Time: 13.62\n",
      "Epoch: 134 Train Loss: 0.2739 Val Loss: 0.2873 Acc: 0.8696 Pre: 0.9046 Recall: 0.8345 F1: 0.8681 Train AUC: 0.9546 Val AUC: 0.9477 Time: 12.63\n",
      "Epoch: 135 Train Loss: 0.2781 Val Loss: 0.2867 Acc: 0.8714 Pre: 0.8989 Recall: 0.8451 F1: 0.8711 Train AUC: 0.9562 Val AUC: 0.9480 Time: 12.28\n",
      "Epoch: 136 Train Loss: 0.2745 Val Loss: 0.2860 Acc: 0.8714 Pre: 0.8873 Recall: 0.8592 F1: 0.8730 Train AUC: 0.9562 Val AUC: 0.9480 Time: 12.57\n",
      "Epoch: 137 Train Loss: 0.2748 Val Loss: 0.2855 Acc: 0.8696 Pre: 0.8841 Recall: 0.8592 F1: 0.8714 Train AUC: 0.9541 Val AUC: 0.9482 Time: 12.67\n",
      "Epoch: 138 Train Loss: 0.2588 Val Loss: 0.2853 Acc: 0.8696 Pre: 0.8869 Recall: 0.8556 F1: 0.8710 Train AUC: 0.9597 Val AUC: 0.9479 Time: 13.21\n",
      "Epoch: 139 Train Loss: 0.2719 Val Loss: 0.2854 Acc: 0.8714 Pre: 0.8930 Recall: 0.8521 F1: 0.8721 Train AUC: 0.9557 Val AUC: 0.9479 Time: 13.58\n",
      "Epoch: 140 Train Loss: 0.2592 Val Loss: 0.2861 Acc: 0.8641 Pre: 0.8914 Recall: 0.8380 F1: 0.8639 Train AUC: 0.9593 Val AUC: 0.9478 Time: 13.18\n",
      "Epoch: 141 Train Loss: 0.2755 Val Loss: 0.2865 Acc: 0.8623 Pre: 0.8910 Recall: 0.8345 F1: 0.8618 Train AUC: 0.9544 Val AUC: 0.9474 Time: 12.34\n",
      "Epoch: 142 Train Loss: 0.2683 Val Loss: 0.2856 Acc: 0.8732 Pre: 0.8934 Recall: 0.8556 F1: 0.8741 Train AUC: 0.9574 Val AUC: 0.9471 Time: 12.94\n",
      "Epoch: 143 Train Loss: 0.2569 Val Loss: 0.2866 Acc: 0.8714 Pre: 0.8817 Recall: 0.8662 F1: 0.8739 Train AUC: 0.9600 Val AUC: 0.9474 Time: 12.89\n",
      "Epoch: 144 Train Loss: 0.2621 Val Loss: 0.2859 Acc: 0.8750 Pre: 0.8881 Recall: 0.8662 F1: 0.8770 Train AUC: 0.9584 Val AUC: 0.9480 Time: 12.21\n",
      "Epoch: 145 Train Loss: 0.2564 Val Loss: 0.2832 Acc: 0.8714 Pre: 0.8901 Recall: 0.8556 F1: 0.8725 Train AUC: 0.9604 Val AUC: 0.9486 Time: 12.59\n",
      "Epoch: 146 Train Loss: 0.2604 Val Loss: 0.2826 Acc: 0.8732 Pre: 0.9023 Recall: 0.8451 F1: 0.8727 Train AUC: 0.9590 Val AUC: 0.9494 Time: 12.43\n",
      "Epoch: 147 Train Loss: 0.2542 Val Loss: 0.2829 Acc: 0.8750 Pre: 0.9057 Recall: 0.8451 F1: 0.8743 Train AUC: 0.9610 Val AUC: 0.9495 Time: 12.98\n",
      "Epoch: 148 Train Loss: 0.2668 Val Loss: 0.2826 Acc: 0.8768 Pre: 0.9000 Recall: 0.8556 F1: 0.8773 Train AUC: 0.9563 Val AUC: 0.9495 Time: 13.54\n",
      "Epoch: 149 Train Loss: 0.2594 Val Loss: 0.2824 Acc: 0.8732 Pre: 0.8905 Recall: 0.8592 F1: 0.8746 Train AUC: 0.9594 Val AUC: 0.9494 Time: 12.89\n",
      "Epoch: 150 Train Loss: 0.2604 Val Loss: 0.2832 Acc: 0.8714 Pre: 0.8845 Recall: 0.8627 F1: 0.8734 Train AUC: 0.9584 Val AUC: 0.9493 Time: 12.81\n",
      "Epoch: 151 Train Loss: 0.2562 Val Loss: 0.2833 Acc: 0.8732 Pre: 0.8877 Recall: 0.8627 F1: 0.8750 Train AUC: 0.9607 Val AUC: 0.9489 Time: 13.04\n",
      "Epoch: 152 Train Loss: 0.2629 Val Loss: 0.2847 Acc: 0.8714 Pre: 0.8901 Recall: 0.8556 F1: 0.8725 Train AUC: 0.9579 Val AUC: 0.9480 Time: 13.21\n",
      "Epoch: 153 Train Loss: 0.2679 Val Loss: 0.2864 Acc: 0.8714 Pre: 0.8901 Recall: 0.8556 F1: 0.8725 Train AUC: 0.9554 Val AUC: 0.9475 Time: 13.02\n",
      "Epoch: 154 Train Loss: 0.2591 Val Loss: 0.2877 Acc: 0.8714 Pre: 0.8901 Recall: 0.8556 F1: 0.8725 Train AUC: 0.9588 Val AUC: 0.9468 Time: 13.22\n",
      "Epoch: 155 Train Loss: 0.2578 Val Loss: 0.2881 Acc: 0.8732 Pre: 0.8905 Recall: 0.8592 F1: 0.8746 Train AUC: 0.9588 Val AUC: 0.9462 Time: 12.62\n",
      "Epoch: 156 Train Loss: 0.2563 Val Loss: 0.2881 Acc: 0.8678 Pre: 0.8809 Recall: 0.8592 F1: 0.8699 Train AUC: 0.9595 Val AUC: 0.9465 Time: 12.45\n",
      "Epoch: 157 Train Loss: 0.2502 Val Loss: 0.2869 Acc: 0.8696 Pre: 0.8813 Recall: 0.8627 F1: 0.8719 Train AUC: 0.9620 Val AUC: 0.9476 Time: 13.17\n",
      "Epoch: 158 Train Loss: 0.2582 Val Loss: 0.2854 Acc: 0.8696 Pre: 0.8841 Recall: 0.8592 F1: 0.8714 Train AUC: 0.9609 Val AUC: 0.9482 Time: 12.86\n",
      "Epoch: 159 Train Loss: 0.2487 Val Loss: 0.2821 Acc: 0.8786 Pre: 0.9004 Recall: 0.8592 F1: 0.8793 Train AUC: 0.9640 Val AUC: 0.9488 Time: 13.07\n",
      "Epoch: 160 Train Loss: 0.2470 Val Loss: 0.2818 Acc: 0.8768 Pre: 0.9091 Recall: 0.8451 F1: 0.8759 Train AUC: 0.9635 Val AUC: 0.9494 Time: 13.18\n",
      "Epoch: 161 Train Loss: 0.2574 Val Loss: 0.2817 Acc: 0.8714 Pre: 0.8873 Recall: 0.8592 F1: 0.8730 Train AUC: 0.9610 Val AUC: 0.9495 Time: 12.66\n",
      "Epoch: 162 Train Loss: 0.2529 Val Loss: 0.2857 Acc: 0.8786 Pre: 0.8834 Recall: 0.8803 F1: 0.8818 Train AUC: 0.9604 Val AUC: 0.9487 Time: 12.28\n",
      "Epoch: 163 Train Loss: 0.2550 Val Loss: 0.2872 Acc: 0.8696 Pre: 0.8813 Recall: 0.8627 F1: 0.8719 Train AUC: 0.9614 Val AUC: 0.9474 Time: 13.01\n",
      "Epoch: 164 Train Loss: 0.2483 Val Loss: 0.2882 Acc: 0.8696 Pre: 0.8955 Recall: 0.8451 F1: 0.8696 Train AUC: 0.9632 Val AUC: 0.9465 Time: 13.01\n",
      "Epoch: 165 Train Loss: 0.2512 Val Loss: 0.2906 Acc: 0.8768 Pre: 0.9154 Recall: 0.8380 F1: 0.8750 Train AUC: 0.9621 Val AUC: 0.9470 Time: 13.36\n",
      "Epoch: 166 Train Loss: 0.2480 Val Loss: 0.2883 Acc: 0.8804 Pre: 0.9160 Recall: 0.8451 F1: 0.8791 Train AUC: 0.9632 Val AUC: 0.9471 Time: 13.89\n",
      "Epoch: 167 Train Loss: 0.2478 Val Loss: 0.2904 Acc: 0.8678 Pre: 0.8702 Recall: 0.8732 F1: 0.8717 Train AUC: 0.9635 Val AUC: 0.9469 Time: 13.33\n",
      "Epoch: 168 Train Loss: 0.2501 Val Loss: 0.2900 Acc: 0.8678 Pre: 0.8702 Recall: 0.8732 F1: 0.8717 Train AUC: 0.9648 Val AUC: 0.9469 Time: 12.34\n",
      "Epoch: 169 Train Loss: 0.2527 Val Loss: 0.2890 Acc: 0.8732 Pre: 0.8905 Recall: 0.8592 F1: 0.8746 Train AUC: 0.9624 Val AUC: 0.9460 Time: 11.68\n",
      "Epoch: 170 Train Loss: 0.2481 Val Loss: 0.2956 Acc: 0.8659 Pre: 0.9008 Recall: 0.8310 F1: 0.8645 Train AUC: 0.9628 Val AUC: 0.9451 Time: 11.60\n",
      "Epoch: 171 Train Loss: 0.2615 Val Loss: 0.2933 Acc: 0.8641 Pre: 0.8914 Recall: 0.8380 F1: 0.8639 Train AUC: 0.9611 Val AUC: 0.9450 Time: 12.19\n",
      "Epoch: 172 Train Loss: 0.2544 Val Loss: 0.2932 Acc: 0.8678 Pre: 0.8702 Recall: 0.8732 F1: 0.8717 Train AUC: 0.9617 Val AUC: 0.9463 Time: 12.77\n",
      "Epoch: 173 Train Loss: 0.2498 Val Loss: 0.2874 Acc: 0.8750 Pre: 0.8772 Recall: 0.8803 F1: 0.8787 Train AUC: 0.9640 Val AUC: 0.9482 Time: 13.12\n",
      "Epoch: 174 Train Loss: 0.2493 Val Loss: 0.2801 Acc: 0.8841 Pre: 0.9044 Recall: 0.8662 F1: 0.8849 Train AUC: 0.9635 Val AUC: 0.9500 Time: 13.57\n",
      "Epoch: 175 Train Loss: 0.2509 Val Loss: 0.2805 Acc: 0.8768 Pre: 0.9060 Recall: 0.8486 F1: 0.8764 Train AUC: 0.9622 Val AUC: 0.9506 Time: 13.32\n",
      "Epoch: 176 Train Loss: 0.2505 Val Loss: 0.2826 Acc: 0.8804 Pre: 0.9129 Recall: 0.8486 F1: 0.8796 Train AUC: 0.9619 Val AUC: 0.9500 Time: 12.62\n",
      "Epoch: 177 Train Loss: 0.2533 Val Loss: 0.2851 Acc: 0.8822 Pre: 0.9132 Recall: 0.8521 F1: 0.8816 Train AUC: 0.9611 Val AUC: 0.9482 Time: 12.75\n",
      "Epoch: 178 Train Loss: 0.2415 Val Loss: 0.2894 Acc: 0.8732 Pre: 0.8963 Recall: 0.8521 F1: 0.8736 Train AUC: 0.9660 Val AUC: 0.9460 Time: 12.74\n",
      "Epoch: 179 Train Loss: 0.2403 Val Loss: 0.2932 Acc: 0.8714 Pre: 0.8959 Recall: 0.8486 F1: 0.8716 Train AUC: 0.9656 Val AUC: 0.9448 Time: 13.05\n",
      "Epoch: 180 Train Loss: 0.2455 Val Loss: 0.2901 Acc: 0.8678 Pre: 0.8836 Recall: 0.8556 F1: 0.8694 Train AUC: 0.9648 Val AUC: 0.9461 Time: 12.83\n",
      "Epoch: 181 Train Loss: 0.2375 Val Loss: 0.2856 Acc: 0.8696 Pre: 0.8841 Recall: 0.8592 F1: 0.8714 Train AUC: 0.9663 Val AUC: 0.9480 Time: 12.48\n",
      "Epoch: 182 Train Loss: 0.2407 Val Loss: 0.2812 Acc: 0.8768 Pre: 0.9000 Recall: 0.8556 F1: 0.8773 Train AUC: 0.9656 Val AUC: 0.9499 Time: 13.16\n",
      "Epoch: 183 Train Loss: 0.2378 Val Loss: 0.2800 Acc: 0.8822 Pre: 0.9041 Recall: 0.8627 F1: 0.8829 Train AUC: 0.9665 Val AUC: 0.9506 Time: 13.45\n",
      "Epoch: 184 Train Loss: 0.2365 Val Loss: 0.2800 Acc: 0.8877 Pre: 0.8993 Recall: 0.8803 F1: 0.8897 Train AUC: 0.9669 Val AUC: 0.9508 Time: 13.80\n",
      "Epoch: 185 Train Loss: 0.2493 Val Loss: 0.2817 Acc: 0.8822 Pre: 0.9011 Recall: 0.8662 F1: 0.8833 Train AUC: 0.9625 Val AUC: 0.9500 Time: 12.43\n",
      "Epoch: 186 Train Loss: 0.2464 Val Loss: 0.2874 Acc: 0.8786 Pre: 0.8974 Recall: 0.8627 F1: 0.8797 Train AUC: 0.9637 Val AUC: 0.9476 Time: 11.97\n",
      "Epoch: 187 Train Loss: 0.2371 Val Loss: 0.2924 Acc: 0.8714 Pre: 0.8873 Recall: 0.8592 F1: 0.8730 Train AUC: 0.9665 Val AUC: 0.9457 Time: 12.08\n",
      "Epoch: 188 Train Loss: 0.2434 Val Loss: 0.2954 Acc: 0.8732 Pre: 0.8905 Recall: 0.8592 F1: 0.8746 Train AUC: 0.9647 Val AUC: 0.9448 Time: 12.64\n",
      "Epoch: 189 Train Loss: 0.2476 Val Loss: 0.2931 Acc: 0.8732 Pre: 0.8877 Recall: 0.8627 F1: 0.8750 Train AUC: 0.9638 Val AUC: 0.9455 Time: 13.03\n",
      "Epoch: 190 Train Loss: 0.2372 Val Loss: 0.2888 Acc: 0.8768 Pre: 0.8971 Recall: 0.8592 F1: 0.8777 Train AUC: 0.9669 Val AUC: 0.9469 Time: 13.51\n",
      "Epoch: 191 Train Loss: 0.2454 Val Loss: 0.2840 Acc: 0.8804 Pre: 0.9067 Recall: 0.8556 F1: 0.8804 Train AUC: 0.9639 Val AUC: 0.9490 Time: 13.64\n",
      "Epoch: 192 Train Loss: 0.2428 Val Loss: 0.2816 Acc: 0.8931 Pre: 0.9061 Recall: 0.8838 F1: 0.8948 Train AUC: 0.9644 Val AUC: 0.9499 Time: 12.97\n",
      "Epoch: 193 Train Loss: 0.2405 Val Loss: 0.2817 Acc: 0.8949 Pre: 0.9094 Recall: 0.8838 F1: 0.8964 Train AUC: 0.9659 Val AUC: 0.9497 Time: 12.23\n",
      "Epoch: 194 Train Loss: 0.2368 Val Loss: 0.2834 Acc: 0.8931 Pre: 0.9121 Recall: 0.8768 F1: 0.8941 Train AUC: 0.9667 Val AUC: 0.9488 Time: 12.00\n",
      "Epoch: 195 Train Loss: 0.2398 Val Loss: 0.2868 Acc: 0.8931 Pre: 0.9061 Recall: 0.8838 F1: 0.8948 Train AUC: 0.9671 Val AUC: 0.9477 Time: 12.43\n",
      "Epoch: 196 Train Loss: 0.2397 Val Loss: 0.2947 Acc: 0.8786 Pre: 0.9033 Recall: 0.8556 F1: 0.8788 Train AUC: 0.9654 Val AUC: 0.9444 Time: 13.08\n",
      "Epoch: 197 Train Loss: 0.2431 Val Loss: 0.2981 Acc: 0.8732 Pre: 0.8821 Recall: 0.8697 F1: 0.8759 Train AUC: 0.9653 Val AUC: 0.9433 Time: 13.42\n",
      "Epoch: 198 Train Loss: 0.2343 Val Loss: 0.2969 Acc: 0.8732 Pre: 0.8741 Recall: 0.8803 F1: 0.8772 Train AUC: 0.9670 Val AUC: 0.9444 Time: 13.99\n",
      "Epoch: 199 Train Loss: 0.2391 Val Loss: 0.2878 Acc: 0.8931 Pre: 0.9091 Recall: 0.8803 F1: 0.8945 Train AUC: 0.9671 Val AUC: 0.9472 Time: 13.27\n",
      "Epoch: 200 Train Loss: 0.2384 Val Loss: 0.2829 Acc: 0.8822 Pre: 0.9132 Recall: 0.8521 F1: 0.8816 Train AUC: 0.9670 Val AUC: 0.9491 Time: 12.19\n",
      "Epoch: 201 Train Loss: 0.2322 Val Loss: 0.2790 Acc: 0.8949 Pre: 0.9154 Recall: 0.8768 F1: 0.8957 Train AUC: 0.9681 Val AUC: 0.9505 Time: 12.57\n",
      "Epoch: 202 Train Loss: 0.2419 Val Loss: 0.2771 Acc: 0.8949 Pre: 0.9154 Recall: 0.8768 F1: 0.8957 Train AUC: 0.9640 Val AUC: 0.9514 Time: 12.56\n",
      "Epoch: 203 Train Loss: 0.2367 Val Loss: 0.2778 Acc: 0.9004 Pre: 0.9134 Recall: 0.8908 F1: 0.9020 Train AUC: 0.9660 Val AUC: 0.9510 Time: 12.97\n",
      "Epoch: 204 Train Loss: 0.2369 Val Loss: 0.2805 Acc: 0.8913 Pre: 0.9118 Recall: 0.8732 F1: 0.8921 Train AUC: 0.9668 Val AUC: 0.9501 Time: 13.68\n",
      "Epoch: 205 Train Loss: 0.2398 Val Loss: 0.2842 Acc: 0.8786 Pre: 0.9157 Recall: 0.8415 F1: 0.8771 Train AUC: 0.9660 Val AUC: 0.9485 Time: 13.63\n",
      "Epoch: 206 Train Loss: 0.2292 Val Loss: 0.2892 Acc: 0.8804 Pre: 0.9225 Recall: 0.8380 F1: 0.8782 Train AUC: 0.9692 Val AUC: 0.9470 Time: 13.17\n",
      "Epoch: 207 Train Loss: 0.2316 Val Loss: 0.2891 Acc: 0.8841 Pre: 0.9167 Recall: 0.8521 F1: 0.8832 Train AUC: 0.9686 Val AUC: 0.9468 Time: 12.54\n",
      "Epoch: 208 Train Loss: 0.2378 Val Loss: 0.2845 Acc: 0.8822 Pre: 0.8953 Recall: 0.8732 F1: 0.8841 Train AUC: 0.9672 Val AUC: 0.9489 Time: 12.19\n",
      "Epoch: 209 Train Loss: 0.2329 Val Loss: 0.2791 Acc: 0.8895 Pre: 0.9055 Recall: 0.8768 F1: 0.8909 Train AUC: 0.9690 Val AUC: 0.9508 Time: 12.47\n",
      "Epoch: 210 Train Loss: 0.2354 Val Loss: 0.2773 Acc: 0.8895 Pre: 0.9176 Recall: 0.8627 F1: 0.8893 Train AUC: 0.9682 Val AUC: 0.9515 Time: 13.14\n",
      "Epoch: 211 Train Loss: 0.2356 Val Loss: 0.2784 Acc: 0.8822 Pre: 0.9163 Recall: 0.8486 F1: 0.8812 Train AUC: 0.9666 Val AUC: 0.9512 Time: 12.96\n",
      "Epoch: 212 Train Loss: 0.2311 Val Loss: 0.2795 Acc: 0.8877 Pre: 0.9142 Recall: 0.8627 F1: 0.8877 Train AUC: 0.9682 Val AUC: 0.9502 Time: 13.52\n",
      "Epoch: 213 Train Loss: 0.2338 Val Loss: 0.2833 Acc: 0.8895 Pre: 0.9084 Recall: 0.8732 F1: 0.8905 Train AUC: 0.9680 Val AUC: 0.9486 Time: 13.64\n",
      "Epoch: 214 Train Loss: 0.2305 Val Loss: 0.2869 Acc: 0.8877 Pre: 0.9081 Recall: 0.8697 F1: 0.8885 Train AUC: 0.9682 Val AUC: 0.9472 Time: 12.69\n",
      "Epoch: 215 Train Loss: 0.2415 Val Loss: 0.2891 Acc: 0.8841 Pre: 0.9015 Recall: 0.8697 F1: 0.8853 Train AUC: 0.9673 Val AUC: 0.9465 Time: 12.21\n",
      "Epoch: 216 Train Loss: 0.2321 Val Loss: 0.2888 Acc: 0.8859 Pre: 0.9170 Recall: 0.8556 F1: 0.8852 Train AUC: 0.9684 Val AUC: 0.9469 Time: 11.89\n",
      "Epoch: 217 Train Loss: 0.2286 Val Loss: 0.2916 Acc: 0.8768 Pre: 0.9154 Recall: 0.8380 F1: 0.8750 Train AUC: 0.9693 Val AUC: 0.9467 Time: 12.52\n",
      "Epoch: 218 Train Loss: 0.2290 Val Loss: 0.2901 Acc: 0.8877 Pre: 0.9111 Recall: 0.8662 F1: 0.8881 Train AUC: 0.9694 Val AUC: 0.9465 Time: 12.83\n",
      "Epoch: 219 Train Loss: 0.2272 Val Loss: 0.2906 Acc: 0.8877 Pre: 0.9022 Recall: 0.8768 F1: 0.8893 Train AUC: 0.9697 Val AUC: 0.9464 Time: 13.51\n",
      "Epoch: 220 Train Loss: 0.2263 Val Loss: 0.2903 Acc: 0.8877 Pre: 0.9051 Recall: 0.8732 F1: 0.8889 Train AUC: 0.9702 Val AUC: 0.9465 Time: 13.77\n",
      "Epoch: 221 Train Loss: 0.2286 Val Loss: 0.2898 Acc: 0.8877 Pre: 0.9051 Recall: 0.8732 F1: 0.8889 Train AUC: 0.9692 Val AUC: 0.9469 Time: 13.20\n",
      "Epoch: 222 Train Loss: 0.2308 Val Loss: 0.2916 Acc: 0.8841 Pre: 0.8929 Recall: 0.8803 F1: 0.8865 Train AUC: 0.9682 Val AUC: 0.9469 Time: 12.77\n",
      "Epoch: 223 Train Loss: 0.2273 Val Loss: 0.2901 Acc: 0.8841 Pre: 0.9104 Recall: 0.8592 F1: 0.8841 Train AUC: 0.9699 Val AUC: 0.9468 Time: 12.48\n",
      "Epoch: 224 Train Loss: 0.2223 Val Loss: 0.2922 Acc: 0.8786 Pre: 0.9157 Recall: 0.8415 F1: 0.8771 Train AUC: 0.9706 Val AUC: 0.9466 Time: 12.54\n",
      "Epoch: 225 Train Loss: 0.2304 Val Loss: 0.2877 Acc: 0.8895 Pre: 0.9145 Recall: 0.8662 F1: 0.8897 Train AUC: 0.9695 Val AUC: 0.9478 Time: 12.31\n",
      "Epoch: 226 Train Loss: 0.2246 Val Loss: 0.2883 Acc: 0.8913 Pre: 0.9088 Recall: 0.8768 F1: 0.8925 Train AUC: 0.9705 Val AUC: 0.9479 Time: 12.97\n",
      "Epoch: 227 Train Loss: 0.2315 Val Loss: 0.2865 Acc: 0.8859 Pre: 0.9139 Recall: 0.8592 F1: 0.8857 Train AUC: 0.9683 Val AUC: 0.9479 Time: 13.52\n",
      "Epoch: 228 Train Loss: 0.2217 Val Loss: 0.2899 Acc: 0.8768 Pre: 0.9154 Recall: 0.8380 F1: 0.8750 Train AUC: 0.9711 Val AUC: 0.9477 Time: 13.68\n",
      "Epoch: 229 Train Loss: 0.2327 Val Loss: 0.2883 Acc: 0.8931 Pre: 0.9151 Recall: 0.8732 F1: 0.8937 Train AUC: 0.9701 Val AUC: 0.9473 Time: 13.04\n",
      "Epoch: 230 Train Loss: 0.2249 Val Loss: 0.2907 Acc: 0.8768 Pre: 0.8750 Recall: 0.8873 F1: 0.8811 Train AUC: 0.9704 Val AUC: 0.9469 Time: 12.05\n",
      "Epoch: 231 Train Loss: 0.2247 Val Loss: 0.2853 Acc: 0.8967 Pre: 0.9068 Recall: 0.8908 F1: 0.8988 Train AUC: 0.9709 Val AUC: 0.9485 Time: 12.59\n",
      "Epoch: 232 Train Loss: 0.2236 Val Loss: 0.2819 Acc: 0.8877 Pre: 0.9111 Recall: 0.8662 F1: 0.8881 Train AUC: 0.9712 Val AUC: 0.9495 Time: 12.78\n",
      "Epoch: 233 Train Loss: 0.2227 Val Loss: 0.2801 Acc: 0.8949 Pre: 0.9124 Recall: 0.8803 F1: 0.8961 Train AUC: 0.9710 Val AUC: 0.9499 Time: 12.80\n",
      "Epoch: 234 Train Loss: 0.2236 Val Loss: 0.2794 Acc: 0.8895 Pre: 0.9114 Recall: 0.8697 F1: 0.8901 Train AUC: 0.9707 Val AUC: 0.9502 Time: 13.70\n",
      "Epoch: 235 Train Loss: 0.2201 Val Loss: 0.2815 Acc: 0.8949 Pre: 0.9036 Recall: 0.8908 F1: 0.8972 Train AUC: 0.9713 Val AUC: 0.9496 Time: 12.83\n",
      "Epoch: 236 Train Loss: 0.2199 Val Loss: 0.2837 Acc: 0.8895 Pre: 0.9025 Recall: 0.8803 F1: 0.8913 Train AUC: 0.9721 Val AUC: 0.9486 Time: 12.61\n",
      "Epoch: 237 Train Loss: 0.2262 Val Loss: 0.2822 Acc: 0.8913 Pre: 0.9088 Recall: 0.8768 F1: 0.8925 Train AUC: 0.9699 Val AUC: 0.9493 Time: 13.03\n",
      "Epoch: 238 Train Loss: 0.2263 Val Loss: 0.2831 Acc: 0.8859 Pre: 0.9202 Recall: 0.8521 F1: 0.8848 Train AUC: 0.9694 Val AUC: 0.9493 Time: 12.81\n",
      "Epoch: 239 Train Loss: 0.2164 Val Loss: 0.2797 Acc: 0.8841 Pre: 0.9104 Recall: 0.8592 F1: 0.8841 Train AUC: 0.9734 Val AUC: 0.9504 Time: 12.88\n",
      "Epoch: 240 Train Loss: 0.2219 Val Loss: 0.2794 Acc: 0.8895 Pre: 0.9025 Recall: 0.8803 F1: 0.8913 Train AUC: 0.9709 Val AUC: 0.9509 Time: 13.33\n",
      "Epoch: 241 Train Loss: 0.2265 Val Loss: 0.2790 Acc: 0.8913 Pre: 0.9029 Recall: 0.8838 F1: 0.8932 Train AUC: 0.9707 Val AUC: 0.9507 Time: 13.36\n",
      "Epoch: 242 Train Loss: 0.2186 Val Loss: 0.2880 Acc: 0.8786 Pre: 0.9157 Recall: 0.8415 F1: 0.8771 Train AUC: 0.9724 Val AUC: 0.9481 Time: 12.52\n",
      "Epoch: 243 Train Loss: 0.2287 Val Loss: 0.2928 Acc: 0.8822 Pre: 0.9071 Recall: 0.8592 F1: 0.8825 Train AUC: 0.9694 Val AUC: 0.9466 Time: 12.02\n",
      "Epoch: 244 Train Loss: 0.2161 Val Loss: 0.2932 Acc: 0.8786 Pre: 0.8974 Recall: 0.8627 F1: 0.8797 Train AUC: 0.9728 Val AUC: 0.9462 Time: 12.56\n",
      "Epoch: 245 Train Loss: 0.2233 Val Loss: 0.2814 Acc: 0.8931 Pre: 0.9061 Recall: 0.8838 F1: 0.8948 Train AUC: 0.9699 Val AUC: 0.9497 Time: 13.12\n",
      "Epoch: 246 Train Loss: 0.2181 Val Loss: 0.2796 Acc: 0.8859 Pre: 0.8961 Recall: 0.8803 F1: 0.8881 Train AUC: 0.9714 Val AUC: 0.9513 Time: 13.42\n",
      "Epoch: 247 Train Loss: 0.2220 Val Loss: 0.2768 Acc: 0.8877 Pre: 0.9051 Recall: 0.8732 F1: 0.8889 Train AUC: 0.9727 Val AUC: 0.9520 Time: 12.93\n",
      "Epoch: 248 Train Loss: 0.2277 Val Loss: 0.2731 Acc: 0.8931 Pre: 0.9151 Recall: 0.8732 F1: 0.8937 Train AUC: 0.9720 Val AUC: 0.9528 Time: 12.72\n",
      "Epoch: 249 Train Loss: 0.2144 Val Loss: 0.2785 Acc: 0.8931 Pre: 0.9151 Recall: 0.8732 F1: 0.8937 Train AUC: 0.9729 Val AUC: 0.9507 Time: 13.34\n",
      "Epoch: 250 Train Loss: 0.2147 Val Loss: 0.2876 Acc: 0.8841 Pre: 0.9135 Recall: 0.8556 F1: 0.8836 Train AUC: 0.9729 Val AUC: 0.9487 Time: 12.96\n",
      "Epoch: 251 Train Loss: 0.2141 Val Loss: 0.2876 Acc: 0.8841 Pre: 0.9104 Recall: 0.8592 F1: 0.8841 Train AUC: 0.9727 Val AUC: 0.9479 Time: 12.78\n",
      "Epoch: 252 Train Loss: 0.2162 Val Loss: 0.2830 Acc: 0.8986 Pre: 0.9191 Recall: 0.8803 F1: 0.8993 Train AUC: 0.9718 Val AUC: 0.9491 Time: 12.68\n",
      "Epoch: 253 Train Loss: 0.2170 Val Loss: 0.2863 Acc: 0.8859 Pre: 0.8850 Recall: 0.8944 F1: 0.8897 Train AUC: 0.9729 Val AUC: 0.9496 Time: 13.18\n",
      "Epoch: 254 Train Loss: 0.2205 Val Loss: 0.2802 Acc: 0.8859 Pre: 0.8961 Recall: 0.8803 F1: 0.8881 Train AUC: 0.9725 Val AUC: 0.9510 Time: 12.77\n",
      "Epoch: 255 Train Loss: 0.2136 Val Loss: 0.2764 Acc: 0.8877 Pre: 0.9051 Recall: 0.8732 F1: 0.8889 Train AUC: 0.9736 Val AUC: 0.9521 Time: 12.46\n",
      "Epoch: 256 Train Loss: 0.2176 Val Loss: 0.2756 Acc: 0.8913 Pre: 0.9179 Recall: 0.8662 F1: 0.8913 Train AUC: 0.9720 Val AUC: 0.9518 Time: 12.73\n",
      "Epoch: 257 Train Loss: 0.2082 Val Loss: 0.2841 Acc: 0.8913 Pre: 0.8972 Recall: 0.8908 F1: 0.8940 Train AUC: 0.9748 Val AUC: 0.9495 Time: 13.20\n",
      "Epoch: 258 Train Loss: 0.2134 Val Loss: 0.2931 Acc: 0.8804 Pre: 0.8785 Recall: 0.8908 F1: 0.8846 Train AUC: 0.9734 Val AUC: 0.9480 Time: 13.47\n",
      "Epoch: 259 Train Loss: 0.2153 Val Loss: 0.2858 Acc: 0.8859 Pre: 0.8905 Recall: 0.8873 F1: 0.8889 Train AUC: 0.9730 Val AUC: 0.9487 Time: 12.67\n",
      "Epoch: 260 Train Loss: 0.2097 Val Loss: 0.2777 Acc: 0.8859 Pre: 0.9170 Recall: 0.8556 F1: 0.8852 Train AUC: 0.9743 Val AUC: 0.9516 Time: 12.29\n",
      "Epoch: 261 Train Loss: 0.2176 Val Loss: 0.2738 Acc: 0.8877 Pre: 0.9022 Recall: 0.8768 F1: 0.8893 Train AUC: 0.9738 Val AUC: 0.9526 Time: 12.71\n",
      "Epoch: 262 Train Loss: 0.2065 Val Loss: 0.2784 Acc: 0.8804 Pre: 0.8838 Recall: 0.8838 F1: 0.8838 Train AUC: 0.9758 Val AUC: 0.9525 Time: 12.81\n",
      "Epoch: 263 Train Loss: 0.2198 Val Loss: 0.2724 Acc: 0.8931 Pre: 0.9121 Recall: 0.8768 F1: 0.8941 Train AUC: 0.9728 Val AUC: 0.9530 Time: 13.25\n",
      "Epoch: 264 Train Loss: 0.2120 Val Loss: 0.2788 Acc: 0.8786 Pre: 0.9189 Recall: 0.8380 F1: 0.8766 Train AUC: 0.9754 Val AUC: 0.9532 Time: 13.39\n",
      "Epoch: 265 Train Loss: 0.2158 Val Loss: 0.2761 Acc: 0.8913 Pre: 0.9118 Recall: 0.8732 F1: 0.8921 Train AUC: 0.9745 Val AUC: 0.9518 Time: 12.47\n",
      "Epoch: 266 Train Loss: 0.2152 Val Loss: 0.2856 Acc: 0.8877 Pre: 0.8881 Recall: 0.8944 F1: 0.8912 Train AUC: 0.9727 Val AUC: 0.9505 Time: 11.81\n",
      "Epoch: 267 Train Loss: 0.2197 Val Loss: 0.2783 Acc: 0.8931 Pre: 0.9004 Recall: 0.8908 F1: 0.8956 Train AUC: 0.9726 Val AUC: 0.9513 Time: 12.33\n",
      "Epoch: 268 Train Loss: 0.2177 Val Loss: 0.2737 Acc: 0.8931 Pre: 0.9182 Recall: 0.8697 F1: 0.8933 Train AUC: 0.9719 Val AUC: 0.9525 Time: 13.15\n",
      "Epoch: 269 Train Loss: 0.2150 Val Loss: 0.2744 Acc: 0.8949 Pre: 0.9065 Recall: 0.8873 F1: 0.8968 Train AUC: 0.9733 Val AUC: 0.9523 Time: 13.12\n",
      "Epoch: 270 Train Loss: 0.2080 Val Loss: 0.2763 Acc: 0.8895 Pre: 0.8912 Recall: 0.8944 F1: 0.8928 Train AUC: 0.9749 Val AUC: 0.9528 Time: 13.22\n",
      "Epoch: 271 Train Loss: 0.2135 Val Loss: 0.2708 Acc: 0.8913 Pre: 0.9118 Recall: 0.8732 F1: 0.8921 Train AUC: 0.9755 Val AUC: 0.9535 Time: 13.46\n",
      "Epoch: 272 Train Loss: 0.2029 Val Loss: 0.2744 Acc: 0.8877 Pre: 0.9173 Recall: 0.8592 F1: 0.8873 Train AUC: 0.9774 Val AUC: 0.9533 Time: 12.98\n",
      "Epoch: 273 Train Loss: 0.2147 Val Loss: 0.2761 Acc: 0.8967 Pre: 0.9068 Recall: 0.8908 F1: 0.8988 Train AUC: 0.9743 Val AUC: 0.9522 Time: 12.39\n",
      "Epoch: 274 Train Loss: 0.2024 Val Loss: 0.2840 Acc: 0.8859 Pre: 0.8850 Recall: 0.8944 F1: 0.8897 Train AUC: 0.9761 Val AUC: 0.9507 Time: 12.37\n",
      "Epoch: 275 Train Loss: 0.2178 Val Loss: 0.2715 Acc: 0.8986 Pre: 0.9191 Recall: 0.8803 F1: 0.8993 Train AUC: 0.9746 Val AUC: 0.9532 Time: 12.72\n",
      "Epoch: 276 Train Loss: 0.2086 Val Loss: 0.2710 Acc: 0.8841 Pre: 0.9135 Recall: 0.8556 F1: 0.8836 Train AUC: 0.9750 Val AUC: 0.9544 Time: 13.28\n",
      "Epoch: 277 Train Loss: 0.2084 Val Loss: 0.2714 Acc: 0.8822 Pre: 0.8982 Recall: 0.8697 F1: 0.8837 Train AUC: 0.9747 Val AUC: 0.9540 Time: 14.06\n",
      "Epoch: 278 Train Loss: 0.2126 Val Loss: 0.2743 Acc: 0.8859 Pre: 0.8877 Recall: 0.8908 F1: 0.8893 Train AUC: 0.9734 Val AUC: 0.9537 Time: 13.36\n",
      "Epoch: 279 Train Loss: 0.2110 Val Loss: 0.2766 Acc: 0.8895 Pre: 0.9055 Recall: 0.8768 F1: 0.8909 Train AUC: 0.9748 Val AUC: 0.9513 Time: 12.27\n",
      "Epoch: 280 Train Loss: 0.2057 Val Loss: 0.2815 Acc: 0.8877 Pre: 0.9111 Recall: 0.8662 F1: 0.8881 Train AUC: 0.9768 Val AUC: 0.9496 Time: 11.68\n",
      "Epoch: 281 Train Loss: 0.2048 Val Loss: 0.2851 Acc: 0.8877 Pre: 0.9142 Recall: 0.8627 F1: 0.8877 Train AUC: 0.9754 Val AUC: 0.9491 Time: 11.72\n",
      "Epoch: 282 Train Loss: 0.2067 Val Loss: 0.2835 Acc: 0.8877 Pre: 0.9051 Recall: 0.8732 F1: 0.8889 Train AUC: 0.9754 Val AUC: 0.9489 Time: 12.44\n",
      "Epoch: 283 Train Loss: 0.2054 Val Loss: 0.2801 Acc: 0.8877 Pre: 0.8936 Recall: 0.8873 F1: 0.8905 Train AUC: 0.9752 Val AUC: 0.9508 Time: 12.79\n",
      "Epoch: 284 Train Loss: 0.2099 Val Loss: 0.2747 Acc: 0.8931 Pre: 0.9151 Recall: 0.8732 F1: 0.8937 Train AUC: 0.9748 Val AUC: 0.9517 Time: 13.15\n",
      "Epoch: 285 Train Loss: 0.2048 Val Loss: 0.2741 Acc: 0.8877 Pre: 0.9173 Recall: 0.8592 F1: 0.8873 Train AUC: 0.9755 Val AUC: 0.9518 Time: 13.72\n",
      "Epoch: 286 Train Loss: 0.2018 Val Loss: 0.2698 Acc: 0.8895 Pre: 0.9145 Recall: 0.8662 F1: 0.8897 Train AUC: 0.9777 Val AUC: 0.9534 Time: 14.35\n",
      "Epoch: 287 Train Loss: 0.1987 Val Loss: 0.2680 Acc: 0.8949 Pre: 0.9124 Recall: 0.8803 F1: 0.8961 Train AUC: 0.9776 Val AUC: 0.9540 Time: 13.15\n",
      "Epoch: 288 Train Loss: 0.1989 Val Loss: 0.2701 Acc: 0.8949 Pre: 0.8979 Recall: 0.8979 F1: 0.8979 Train AUC: 0.9775 Val AUC: 0.9538 Time: 12.11\n",
      "Epoch: 289 Train Loss: 0.1993 Val Loss: 0.2654 Acc: 0.8986 Pre: 0.9161 Recall: 0.8838 F1: 0.8996 Train AUC: 0.9778 Val AUC: 0.9547 Time: 11.56\n",
      "Epoch: 290 Train Loss: 0.2012 Val Loss: 0.2656 Acc: 0.8895 Pre: 0.9208 Recall: 0.8592 F1: 0.8889 Train AUC: 0.9762 Val AUC: 0.9555 Time: 11.50\n",
      "Epoch: 291 Train Loss: 0.2056 Val Loss: 0.2630 Acc: 0.8931 Pre: 0.9151 Recall: 0.8732 F1: 0.8937 Train AUC: 0.9767 Val AUC: 0.9556 Time: 12.15\n",
      "Epoch: 292 Train Loss: 0.2021 Val Loss: 0.2665 Acc: 0.8913 Pre: 0.8972 Recall: 0.8908 F1: 0.8940 Train AUC: 0.9758 Val AUC: 0.9551 Time: 12.69\n",
      "Epoch: 293 Train Loss: 0.2091 Val Loss: 0.2672 Acc: 0.8931 Pre: 0.9032 Recall: 0.8873 F1: 0.8952 Train AUC: 0.9757 Val AUC: 0.9543 Time: 12.98\n",
      "Epoch: 294 Train Loss: 0.1943 Val Loss: 0.2691 Acc: 0.8949 Pre: 0.9124 Recall: 0.8803 F1: 0.8961 Train AUC: 0.9788 Val AUC: 0.9534 Time: 13.41\n",
      "Epoch: 295 Train Loss: 0.1991 Val Loss: 0.2698 Acc: 0.8949 Pre: 0.9124 Recall: 0.8803 F1: 0.8961 Train AUC: 0.9773 Val AUC: 0.9532 Time: 14.24\n",
      "Epoch: 296 Train Loss: 0.1926 Val Loss: 0.2712 Acc: 0.9004 Pre: 0.9075 Recall: 0.8979 F1: 0.9027 Train AUC: 0.9786 Val AUC: 0.9530 Time: 14.36\n",
      "Epoch: 297 Train Loss: 0.1938 Val Loss: 0.2718 Acc: 0.8931 Pre: 0.8947 Recall: 0.8979 F1: 0.8963 Train AUC: 0.9784 Val AUC: 0.9532 Time: 13.11\n",
      "Epoch: 298 Train Loss: 0.1992 Val Loss: 0.2676 Acc: 0.8913 Pre: 0.9118 Recall: 0.8732 F1: 0.8921 Train AUC: 0.9777 Val AUC: 0.9540 Time: 12.18\n",
      "Epoch: 299 Train Loss: 0.1921 Val Loss: 0.2679 Acc: 0.8877 Pre: 0.9111 Recall: 0.8662 F1: 0.8881 Train AUC: 0.9789 Val AUC: 0.9544 Time: 11.99\n",
      "Epoch: 300 Train Loss: 0.2021 Val Loss: 0.2651 Acc: 0.8877 Pre: 0.8993 Recall: 0.8803 F1: 0.8897 Train AUC: 0.9772 Val AUC: 0.9549 Time: 12.44\n",
      "Epoch: 301 Train Loss: 0.2008 Val Loss: 0.2667 Acc: 0.8931 Pre: 0.8920 Recall: 0.9014 F1: 0.8967 Train AUC: 0.9775 Val AUC: 0.9555 Time: 13.04\n",
      "Epoch: 302 Train Loss: 0.1994 Val Loss: 0.2640 Acc: 0.8986 Pre: 0.9191 Recall: 0.8803 F1: 0.8993 Train AUC: 0.9779 Val AUC: 0.9557 Time: 13.63\n",
      "Epoch: 303 Train Loss: 0.1989 Val Loss: 0.2690 Acc: 0.8895 Pre: 0.9176 Recall: 0.8627 F1: 0.8893 Train AUC: 0.9768 Val AUC: 0.9544 Time: 14.05\n",
      "Epoch: 304 Train Loss: 0.1945 Val Loss: 0.2701 Acc: 0.8949 Pre: 0.9154 Recall: 0.8768 F1: 0.8957 Train AUC: 0.9779 Val AUC: 0.9532 Time: 14.12\n",
      "Epoch: 305 Train Loss: 0.1915 Val Loss: 0.2737 Acc: 0.8877 Pre: 0.8908 Recall: 0.8908 F1: 0.8908 Train AUC: 0.9789 Val AUC: 0.9521 Time: 12.66\n",
      "Epoch: 306 Train Loss: 0.1970 Val Loss: 0.2696 Acc: 0.8895 Pre: 0.9114 Recall: 0.8697 F1: 0.8901 Train AUC: 0.9780 Val AUC: 0.9535 Time: 11.68\n",
      "Epoch: 307 Train Loss: 0.1892 Val Loss: 0.2671 Acc: 0.8949 Pre: 0.9094 Recall: 0.8838 F1: 0.8964 Train AUC: 0.9799 Val AUC: 0.9548 Time: 11.47\n",
      "Epoch: 308 Train Loss: 0.1927 Val Loss: 0.2701 Acc: 0.8986 Pre: 0.9014 Recall: 0.9014 F1: 0.9014 Train AUC: 0.9781 Val AUC: 0.9542 Time: 11.78\n",
      "Epoch: 309 Train Loss: 0.1931 Val Loss: 0.2683 Acc: 0.8949 Pre: 0.9065 Recall: 0.8873 F1: 0.8968 Train AUC: 0.9792 Val AUC: 0.9543 Time: 12.50\n",
      "Epoch: 310 Train Loss: 0.1905 Val Loss: 0.2694 Acc: 0.9004 Pre: 0.9194 Recall: 0.8838 F1: 0.9013 Train AUC: 0.9794 Val AUC: 0.9539 Time: 12.96\n",
      "Epoch: 311 Train Loss: 0.1945 Val Loss: 0.2723 Acc: 0.8895 Pre: 0.8940 Recall: 0.8908 F1: 0.8924 Train AUC: 0.9784 Val AUC: 0.9534 Time: 13.23\n",
      "Epoch: 312 Train Loss: 0.1894 Val Loss: 0.2719 Acc: 0.8913 Pre: 0.8916 Recall: 0.8979 F1: 0.8947 Train AUC: 0.9787 Val AUC: 0.9539 Time: 13.87\n",
      "Epoch: 313 Train Loss: 0.1992 Val Loss: 0.2659 Acc: 0.9004 Pre: 0.9164 Recall: 0.8873 F1: 0.9016 Train AUC: 0.9772 Val AUC: 0.9548 Time: 14.24\n",
      "Epoch: 314 Train Loss: 0.1895 Val Loss: 0.2635 Acc: 0.8967 Pre: 0.9097 Recall: 0.8873 F1: 0.8984 Train AUC: 0.9793 Val AUC: 0.9560 Time: 13.19\n",
      "Epoch: 315 Train Loss: 0.1870 Val Loss: 0.2622 Acc: 0.8913 Pre: 0.9000 Recall: 0.8873 F1: 0.8936 Train AUC: 0.9799 Val AUC: 0.9563 Time: 12.04\n",
      "Epoch: 316 Train Loss: 0.1941 Val Loss: 0.2624 Acc: 0.8949 Pre: 0.8951 Recall: 0.9014 F1: 0.8982 Train AUC: 0.9784 Val AUC: 0.9568 Time: 11.71\n",
      "Epoch: 317 Train Loss: 0.1858 Val Loss: 0.2674 Acc: 0.8895 Pre: 0.8805 Recall: 0.9085 F1: 0.8943 Train AUC: 0.9804 Val AUC: 0.9559 Time: 11.60\n",
      "Epoch: 318 Train Loss: 0.1897 Val Loss: 0.2658 Acc: 0.8967 Pre: 0.9127 Recall: 0.8838 F1: 0.8980 Train AUC: 0.9809 Val AUC: 0.9548 Time: 12.30\n",
      "Epoch: 319 Train Loss: 0.1808 Val Loss: 0.2673 Acc: 0.8986 Pre: 0.9101 Recall: 0.8908 F1: 0.9004 Train AUC: 0.9812 Val AUC: 0.9541 Time: 12.69\n",
      "Epoch: 320 Train Loss: 0.1818 Val Loss: 0.2666 Acc: 0.8913 Pre: 0.9029 Recall: 0.8838 F1: 0.8932 Train AUC: 0.9811 Val AUC: 0.9544 Time: 13.09\n",
      "Epoch: 321 Train Loss: 0.1841 Val Loss: 0.2680 Acc: 0.8949 Pre: 0.8979 Recall: 0.8979 F1: 0.8979 Train AUC: 0.9809 Val AUC: 0.9541 Time: 13.59\n",
      "Epoch: 322 Train Loss: 0.1915 Val Loss: 0.2645 Acc: 0.8931 Pre: 0.9004 Recall: 0.8908 F1: 0.8956 Train AUC: 0.9803 Val AUC: 0.9551 Time: 13.98\n",
      "Epoch: 323 Train Loss: 0.1857 Val Loss: 0.2644 Acc: 0.8967 Pre: 0.9158 Recall: 0.8803 F1: 0.8977 Train AUC: 0.9809 Val AUC: 0.9563 Time: 13.98\n",
      "Epoch: 324 Train Loss: 0.1844 Val Loss: 0.2678 Acc: 0.8931 Pre: 0.8947 Recall: 0.8979 F1: 0.8963 Train AUC: 0.9816 Val AUC: 0.9555 Time: 13.03\n",
      "Epoch: 325 Train Loss: 0.1844 Val Loss: 0.2737 Acc: 0.8931 Pre: 0.8866 Recall: 0.9085 F1: 0.8974 Train AUC: 0.9807 Val AUC: 0.9549 Time: 12.07\n",
      "Epoch: 326 Train Loss: 0.1813 Val Loss: 0.2671 Acc: 0.8949 Pre: 0.9094 Recall: 0.8838 F1: 0.8964 Train AUC: 0.9817 Val AUC: 0.9548 Time: 12.07\n",
      "Epoch: 327 Train Loss: 0.1846 Val Loss: 0.2690 Acc: 0.8931 Pre: 0.9151 Recall: 0.8732 F1: 0.8937 Train AUC: 0.9801 Val AUC: 0.9550 Time: 12.51\n",
      "Epoch: 328 Train Loss: 0.1873 Val Loss: 0.2674 Acc: 0.8913 Pre: 0.9000 Recall: 0.8873 F1: 0.8936 Train AUC: 0.9802 Val AUC: 0.9546 Time: 13.14\n",
      "Epoch: 329 Train Loss: 0.1806 Val Loss: 0.2714 Acc: 0.8895 Pre: 0.8858 Recall: 0.9014 F1: 0.8935 Train AUC: 0.9813 Val AUC: 0.9546 Time: 13.39\n",
      "Epoch: 330 Train Loss: 0.1865 Val Loss: 0.2630 Acc: 0.8949 Pre: 0.9036 Recall: 0.8908 F1: 0.8972 Train AUC: 0.9811 Val AUC: 0.9563 Time: 12.74\n",
      "Epoch: 331 Train Loss: 0.1897 Val Loss: 0.2694 Acc: 0.8949 Pre: 0.9185 Recall: 0.8732 F1: 0.8953 Train AUC: 0.9785 Val AUC: 0.9558 Time: 12.79\n",
      "Epoch: 332 Train Loss: 0.1907 Val Loss: 0.2696 Acc: 0.8986 Pre: 0.8986 Recall: 0.9049 F1: 0.9018 Train AUC: 0.9821 Val AUC: 0.9548 Time: 12.67\n",
      "Epoch: 333 Train Loss: 0.1845 Val Loss: 0.2699 Acc: 0.8895 Pre: 0.8912 Recall: 0.8944 F1: 0.8928 Train AUC: 0.9815 Val AUC: 0.9542 Time: 13.28\n",
      "Epoch: 334 Train Loss: 0.1880 Val Loss: 0.2640 Acc: 0.8986 Pre: 0.9161 Recall: 0.8838 F1: 0.8996 Train AUC: 0.9794 Val AUC: 0.9557 Time: 13.61\n",
      "Epoch: 335 Train Loss: 0.1722 Val Loss: 0.2656 Acc: 0.8949 Pre: 0.9124 Recall: 0.8803 F1: 0.8961 Train AUC: 0.9834 Val AUC: 0.9564 Time: 13.70\n",
      "Epoch: 336 Train Loss: 0.1855 Val Loss: 0.2618 Acc: 0.8931 Pre: 0.8893 Recall: 0.9049 F1: 0.8970 Train AUC: 0.9809 Val AUC: 0.9564 Time: 12.53\n",
      "Epoch: 337 Train Loss: 0.1801 Val Loss: 0.2768 Acc: 0.8804 Pre: 0.8609 Recall: 0.9155 F1: 0.8874 Train AUC: 0.9815 Val AUC: 0.9566 Time: 12.25\n",
      "Epoch: 338 Train Loss: 0.1921 Val Loss: 0.2617 Acc: 0.8986 Pre: 0.9130 Recall: 0.8873 F1: 0.9000 Train AUC: 0.9821 Val AUC: 0.9561 Time: 12.54\n",
      "Epoch: 339 Train Loss: 0.1773 Val Loss: 0.2735 Acc: 0.8859 Pre: 0.9234 Recall: 0.8486 F1: 0.8844 Train AUC: 0.9815 Val AUC: 0.9544 Time: 12.74\n",
      "Epoch: 340 Train Loss: 0.1868 Val Loss: 0.2649 Acc: 0.9004 Pre: 0.9194 Recall: 0.8838 F1: 0.9013 Train AUC: 0.9808 Val AUC: 0.9553 Time: 13.24\n",
      "Epoch: 341 Train Loss: 0.1777 Val Loss: 0.2705 Acc: 0.8822 Pre: 0.8712 Recall: 0.9049 F1: 0.8877 Train AUC: 0.9813 Val AUC: 0.9556 Time: 13.71\n",
      "Epoch: 342 Train Loss: 0.1803 Val Loss: 0.2613 Acc: 0.8949 Pre: 0.8979 Recall: 0.8979 F1: 0.8979 Train AUC: 0.9829 Val AUC: 0.9569 Time: 13.13\n",
      "Epoch: 343 Train Loss: 0.1882 Val Loss: 0.2585 Acc: 0.8967 Pre: 0.9188 Recall: 0.8768 F1: 0.8973 Train AUC: 0.9796 Val AUC: 0.9584 Time: 12.44\n",
      "Epoch: 344 Train Loss: 0.1738 Val Loss: 0.2583 Acc: 0.9040 Pre: 0.9170 Recall: 0.8944 F1: 0.9055 Train AUC: 0.9842 Val AUC: 0.9582 Time: 12.60\n",
      "Epoch: 345 Train Loss: 0.1779 Val Loss: 0.2694 Acc: 0.8967 Pre: 0.8847 Recall: 0.9190 F1: 0.9016 Train AUC: 0.9813 Val AUC: 0.9558 Time: 12.82\n",
      "Epoch: 346 Train Loss: 0.1747 Val Loss: 0.2776 Acc: 0.8822 Pre: 0.8687 Recall: 0.9085 F1: 0.8881 Train AUC: 0.9825 Val AUC: 0.9541 Time: 13.45\n",
      "Epoch: 347 Train Loss: 0.1784 Val Loss: 0.2736 Acc: 0.8895 Pre: 0.9055 Recall: 0.8768 F1: 0.8909 Train AUC: 0.9837 Val AUC: 0.9525 Time: 13.74\n",
      "Epoch: 348 Train Loss: 0.1741 Val Loss: 0.2780 Acc: 0.8841 Pre: 0.9167 Recall: 0.8521 F1: 0.8832 Train AUC: 0.9830 Val AUC: 0.9521 Time: 13.37\n",
      "Epoch: 349 Train Loss: 0.1797 Val Loss: 0.2653 Acc: 0.8949 Pre: 0.9036 Recall: 0.8908 F1: 0.8972 Train AUC: 0.9831 Val AUC: 0.9556 Time: 12.41\n",
      "Epoch: 350 Train Loss: 0.1651 Val Loss: 0.2594 Acc: 0.9040 Pre: 0.9110 Recall: 0.9014 F1: 0.9062 Train AUC: 0.9844 Val AUC: 0.9585 Time: 11.94\n",
      "Epoch: 351 Train Loss: 0.1758 Val Loss: 0.2575 Acc: 0.9058 Pre: 0.9056 Recall: 0.9120 F1: 0.9088 Train AUC: 0.9815 Val AUC: 0.9596 Time: 12.42\n",
      "Epoch: 352 Train Loss: 0.1760 Val Loss: 0.2564 Acc: 0.9040 Pre: 0.8997 Recall: 0.9155 F1: 0.9075 Train AUC: 0.9816 Val AUC: 0.9590 Time: 13.05\n",
      "Epoch: 353 Train Loss: 0.1738 Val Loss: 0.2637 Acc: 0.8913 Pre: 0.8810 Recall: 0.9120 F1: 0.8962 Train AUC: 0.9819 Val AUC: 0.9579 Time: 13.32\n",
      "Epoch: 354 Train Loss: 0.1740 Val Loss: 0.2608 Acc: 0.8931 Pre: 0.9032 Recall: 0.8873 F1: 0.8952 Train AUC: 0.9833 Val AUC: 0.9566 Time: 13.75\n",
      "Epoch: 355 Train Loss: 0.1771 Val Loss: 0.2622 Acc: 0.8931 Pre: 0.8947 Recall: 0.8979 F1: 0.8963 Train AUC: 0.9822 Val AUC: 0.9564 Time: 13.44\n",
      "Epoch: 356 Train Loss: 0.1644 Val Loss: 0.2612 Acc: 0.8913 Pre: 0.8836 Recall: 0.9085 F1: 0.8958 Train AUC: 0.9845 Val AUC: 0.9570 Time: 13.00\n",
      "Epoch: 357 Train Loss: 0.1700 Val Loss: 0.2555 Acc: 0.9040 Pre: 0.9053 Recall: 0.9085 F1: 0.9069 Train AUC: 0.9837 Val AUC: 0.9585 Time: 12.37\n",
      "Epoch: 358 Train Loss: 0.1630 Val Loss: 0.2515 Acc: 0.9004 Pre: 0.9134 Recall: 0.8908 F1: 0.9020 Train AUC: 0.9845 Val AUC: 0.9594 Time: 12.43\n",
      "Epoch: 359 Train Loss: 0.1673 Val Loss: 0.2523 Acc: 0.8967 Pre: 0.8927 Recall: 0.9085 F1: 0.9005 Train AUC: 0.9840 Val AUC: 0.9594 Time: 12.82\n",
      "Epoch: 360 Train Loss: 0.1666 Val Loss: 0.2562 Acc: 0.8859 Pre: 0.8746 Recall: 0.9085 F1: 0.8912 Train AUC: 0.9842 Val AUC: 0.9584 Time: 13.26\n",
      "Epoch: 361 Train Loss: 0.1700 Val Loss: 0.2503 Acc: 0.8949 Pre: 0.8924 Recall: 0.9049 F1: 0.8986 Train AUC: 0.9844 Val AUC: 0.9597 Time: 12.80\n",
      "Epoch: 362 Train Loss: 0.1632 Val Loss: 0.2498 Acc: 0.8967 Pre: 0.9039 Recall: 0.8944 F1: 0.8991 Train AUC: 0.9855 Val AUC: 0.9598 Time: 12.50\n",
      "Epoch: 363 Train Loss: 0.1660 Val Loss: 0.2488 Acc: 0.9022 Pre: 0.9078 Recall: 0.9014 F1: 0.9046 Train AUC: 0.9839 Val AUC: 0.9605 Time: 12.12\n",
      "Epoch: 364 Train Loss: 0.1743 Val Loss: 0.2548 Acc: 0.8913 Pre: 0.8733 Recall: 0.9225 F1: 0.8973 Train AUC: 0.9823 Val AUC: 0.9603 Time: 12.56\n",
      "Epoch: 365 Train Loss: 0.1667 Val Loss: 0.2534 Acc: 0.8967 Pre: 0.8982 Recall: 0.9014 F1: 0.8998 Train AUC: 0.9851 Val AUC: 0.9595 Time: 12.91\n",
      "Epoch: 366 Train Loss: 0.1710 Val Loss: 0.2515 Acc: 0.8986 Pre: 0.9071 Recall: 0.8944 F1: 0.9007 Train AUC: 0.9843 Val AUC: 0.9599 Time: 13.38\n",
      "Epoch: 367 Train Loss: 0.1593 Val Loss: 0.2499 Acc: 0.8967 Pre: 0.8982 Recall: 0.9014 F1: 0.8998 Train AUC: 0.9863 Val AUC: 0.9605 Time: 13.40\n",
      "Epoch: 368 Train Loss: 0.1668 Val Loss: 0.2564 Acc: 0.8895 Pre: 0.8729 Recall: 0.9190 F1: 0.8954 Train AUC: 0.9843 Val AUC: 0.9604 Time: 13.04\n",
      "Epoch: 369 Train Loss: 0.1607 Val Loss: 0.2541 Acc: 0.8859 Pre: 0.8696 Recall: 0.9155 F1: 0.8919 Train AUC: 0.9860 Val AUC: 0.9600 Time: 12.18\n",
      "Epoch: 370 Train Loss: 0.1726 Val Loss: 0.2512 Acc: 0.8949 Pre: 0.9007 Recall: 0.8944 F1: 0.8975 Train AUC: 0.9829 Val AUC: 0.9602 Time: 12.02\n",
      "Epoch: 371 Train Loss: 0.1643 Val Loss: 0.2532 Acc: 0.8949 Pre: 0.9036 Recall: 0.8908 F1: 0.8972 Train AUC: 0.9843 Val AUC: 0.9597 Time: 12.50\n",
      "Epoch: 372 Train Loss: 0.1676 Val Loss: 0.2668 Acc: 0.8750 Pre: 0.8571 Recall: 0.9085 F1: 0.8821 Train AUC: 0.9848 Val AUC: 0.9595 Time: 12.85\n",
      "Epoch: 373 Train Loss: 0.1676 Val Loss: 0.2532 Acc: 0.8895 Pre: 0.8729 Recall: 0.9190 F1: 0.8954 Train AUC: 0.9858 Val AUC: 0.9621 Time: 13.46\n",
      "Epoch: 374 Train Loss: 0.1679 Val Loss: 0.2435 Acc: 0.9058 Pre: 0.9143 Recall: 0.9014 F1: 0.9078 Train AUC: 0.9846 Val AUC: 0.9629 Time: 14.27\n",
      "Epoch: 375 Train Loss: 0.1631 Val Loss: 0.2456 Acc: 0.9040 Pre: 0.9140 Recall: 0.8979 F1: 0.9059 Train AUC: 0.9842 Val AUC: 0.9623 Time: 15.70\n",
      "Epoch: 376 Train Loss: 0.1634 Val Loss: 0.2453 Acc: 0.9004 Pre: 0.9104 Recall: 0.8944 F1: 0.9023 Train AUC: 0.9849 Val AUC: 0.9622 Time: 18.32\n",
      "Epoch: 377 Train Loss: 0.1606 Val Loss: 0.2563 Acc: 0.8877 Pre: 0.8750 Recall: 0.9120 F1: 0.8931 Train AUC: 0.9851 Val AUC: 0.9607 Time: 18.23\n",
      "Epoch: 378 Train Loss: 0.1618 Val Loss: 0.2541 Acc: 0.8949 Pre: 0.8951 Recall: 0.9014 F1: 0.8982 Train AUC: 0.9863 Val AUC: 0.9598 Time: 16.59\n",
      "Epoch: 379 Train Loss: 0.1507 Val Loss: 0.2514 Acc: 0.9022 Pre: 0.9197 Recall: 0.8873 F1: 0.9032 Train AUC: 0.9882 Val AUC: 0.9605 Time: 15.29\n",
      "Epoch: 380 Train Loss: 0.1617 Val Loss: 0.2496 Acc: 0.9004 Pre: 0.9134 Recall: 0.8908 F1: 0.9020 Train AUC: 0.9850 Val AUC: 0.9605 Time: 15.16\n",
      "Epoch: 381 Train Loss: 0.1584 Val Loss: 0.2573 Acc: 0.8877 Pre: 0.8675 Recall: 0.9225 F1: 0.8942 Train AUC: 0.9852 Val AUC: 0.9607 Time: 13.41\n",
      "Epoch: 382 Train Loss: 0.1559 Val Loss: 0.2567 Acc: 0.8841 Pre: 0.8642 Recall: 0.9190 F1: 0.8908 Train AUC: 0.9867 Val AUC: 0.9615 Time: 13.15\n",
      "Epoch: 383 Train Loss: 0.1636 Val Loss: 0.2493 Acc: 0.8986 Pre: 0.8986 Recall: 0.9049 F1: 0.9018 Train AUC: 0.9850 Val AUC: 0.9615 Time: 12.66\n",
      "Epoch: 384 Train Loss: 0.1604 Val Loss: 0.2549 Acc: 0.8949 Pre: 0.9065 Recall: 0.8873 F1: 0.8968 Train AUC: 0.9850 Val AUC: 0.9611 Time: 11.76\n",
      "Epoch: 385 Train Loss: 0.1703 Val Loss: 0.2464 Acc: 0.8931 Pre: 0.8893 Recall: 0.9049 F1: 0.8970 Train AUC: 0.9852 Val AUC: 0.9620 Time: 11.91\n",
      "Epoch: 386 Train Loss: 0.1465 Val Loss: 0.2531 Acc: 0.8895 Pre: 0.8704 Recall: 0.9225 F1: 0.8957 Train AUC: 0.9881 Val AUC: 0.9624 Time: 11.94\n",
      "Epoch: 387 Train Loss: 0.1578 Val Loss: 0.2501 Acc: 0.8877 Pre: 0.8675 Recall: 0.9225 F1: 0.8942 Train AUC: 0.9864 Val AUC: 0.9623 Time: 12.51\n",
      "Epoch: 388 Train Loss: 0.1610 Val Loss: 0.2464 Acc: 0.9040 Pre: 0.9140 Recall: 0.8979 F1: 0.9059 Train AUC: 0.9859 Val AUC: 0.9620 Time: 13.09\n",
      "Epoch: 389 Train Loss: 0.1697 Val Loss: 0.2439 Acc: 0.8986 Pre: 0.8958 Recall: 0.9085 F1: 0.9021 Train AUC: 0.9835 Val AUC: 0.9630 Time: 13.47\n",
      "Epoch: 390 Train Loss: 0.1526 Val Loss: 0.2562 Acc: 0.8913 Pre: 0.8810 Recall: 0.9120 F1: 0.8962 Train AUC: 0.9864 Val AUC: 0.9612 Time: 13.98\n",
      "Epoch: 391 Train Loss: 0.1605 Val Loss: 0.2545 Acc: 0.8931 Pre: 0.8840 Recall: 0.9120 F1: 0.8977 Train AUC: 0.9853 Val AUC: 0.9617 Time: 13.84\n",
      "Epoch: 392 Train Loss: 0.1572 Val Loss: 0.2502 Acc: 0.8967 Pre: 0.9068 Recall: 0.8908 F1: 0.8988 Train AUC: 0.9856 Val AUC: 0.9610 Time: 12.55\n",
      "Epoch: 393 Train Loss: 0.1509 Val Loss: 0.2573 Acc: 0.8986 Pre: 0.9101 Recall: 0.8908 F1: 0.9004 Train AUC: 0.9867 Val AUC: 0.9587 Time: 12.13\n",
      "Epoch: 394 Train Loss: 0.1554 Val Loss: 0.2702 Acc: 0.8569 Pre: 0.8296 Recall: 0.9085 F1: 0.8672 Train AUC: 0.9864 Val AUC: 0.9585 Time: 11.96\n",
      "Epoch: 395 Train Loss: 0.1478 Val Loss: 0.2580 Acc: 0.8750 Pre: 0.8548 Recall: 0.9120 F1: 0.8825 Train AUC: 0.9887 Val AUC: 0.9614 Time: 12.33\n",
      "Epoch: 396 Train Loss: 0.1492 Val Loss: 0.2387 Acc: 0.9040 Pre: 0.9110 Recall: 0.9014 F1: 0.9062 Train AUC: 0.9886 Val AUC: 0.9638 Time: 12.83\n",
      "Epoch: 397 Train Loss: 0.1502 Val Loss: 0.2410 Acc: 0.9040 Pre: 0.9200 Recall: 0.8908 F1: 0.9052 Train AUC: 0.9869 Val AUC: 0.9647 Time: 13.47\n",
      "Epoch: 398 Train Loss: 0.1565 Val Loss: 0.2361 Acc: 0.8986 Pre: 0.8931 Recall: 0.9120 F1: 0.9024 Train AUC: 0.9859 Val AUC: 0.9656 Time: 13.78\n",
      "Epoch: 399 Train Loss: 0.1497 Val Loss: 0.2446 Acc: 0.8913 Pre: 0.8784 Recall: 0.9155 F1: 0.8966 Train AUC: 0.9872 Val AUC: 0.9647 Time: 13.55\n",
      "Epoch: 400 Train Loss: 0.1541 Val Loss: 0.2428 Acc: 0.8967 Pre: 0.8900 Recall: 0.9120 F1: 0.9009 Train AUC: 0.9870 Val AUC: 0.9634 Time: 12.75\n",
      "Epoch: 401 Train Loss: 0.1466 Val Loss: 0.2503 Acc: 0.9004 Pre: 0.9134 Recall: 0.8908 F1: 0.9020 Train AUC: 0.9885 Val AUC: 0.9609 Time: 11.73\n",
      "Epoch: 402 Train Loss: 0.1567 Val Loss: 0.2506 Acc: 0.8949 Pre: 0.9036 Recall: 0.8908 F1: 0.8972 Train AUC: 0.9861 Val AUC: 0.9608 Time: 11.76\n",
      "Epoch: 403 Train Loss: 0.1557 Val Loss: 0.2620 Acc: 0.8750 Pre: 0.8525 Recall: 0.9155 F1: 0.8829 Train AUC: 0.9863 Val AUC: 0.9617 Time: 12.40\n",
      "Epoch: 404 Train Loss: 0.1551 Val Loss: 0.2529 Acc: 0.8913 Pre: 0.8836 Recall: 0.9085 F1: 0.8958 Train AUC: 0.9878 Val AUC: 0.9627 Time: 12.76\n",
      "Epoch: 405 Train Loss: 0.1520 Val Loss: 0.2480 Acc: 0.8913 Pre: 0.8862 Recall: 0.9049 F1: 0.8955 Train AUC: 0.9865 Val AUC: 0.9626 Time: 13.20\n",
      "Epoch: 406 Train Loss: 0.1532 Val Loss: 0.2491 Acc: 0.8931 Pre: 0.9151 Recall: 0.8732 F1: 0.8937 Train AUC: 0.9865 Val AUC: 0.9632 Time: 13.92\n",
      "Epoch: 407 Train Loss: 0.1521 Val Loss: 0.2428 Acc: 0.8931 Pre: 0.9032 Recall: 0.8873 F1: 0.8952 Train AUC: 0.9879 Val AUC: 0.9634 Time: 13.99\n",
      "Epoch: 408 Train Loss: 0.1530 Val Loss: 0.2568 Acc: 0.8786 Pre: 0.8557 Recall: 0.9190 F1: 0.8862 Train AUC: 0.9875 Val AUC: 0.9636 Time: 12.92\n",
      "Epoch: 409 Train Loss: 0.1552 Val Loss: 0.2490 Acc: 0.8913 Pre: 0.8836 Recall: 0.9085 F1: 0.8958 Train AUC: 0.9878 Val AUC: 0.9627 Time: 11.95\n",
      "Epoch: 410 Train Loss: 0.1563 Val Loss: 0.2465 Acc: 0.9076 Pre: 0.9176 Recall: 0.9014 F1: 0.9094 Train AUC: 0.9856 Val AUC: 0.9624 Time: 11.65\n",
      "Epoch: 411 Train Loss: 0.1513 Val Loss: 0.2527 Acc: 0.8859 Pre: 0.8746 Recall: 0.9085 F1: 0.8912 Train AUC: 0.9872 Val AUC: 0.9614 Time: 12.17\n",
      "Epoch: 412 Train Loss: 0.1532 Val Loss: 0.2660 Acc: 0.8804 Pre: 0.8609 Recall: 0.9155 F1: 0.8874 Train AUC: 0.9863 Val AUC: 0.9610 Time: 12.65\n",
      "Epoch: 413 Train Loss: 0.1509 Val Loss: 0.2450 Acc: 0.8913 Pre: 0.8784 Recall: 0.9155 F1: 0.8966 Train AUC: 0.9877 Val AUC: 0.9639 Time: 13.04\n",
      "Epoch: 414 Train Loss: 0.1453 Val Loss: 0.2403 Acc: 0.9076 Pre: 0.9206 Recall: 0.8979 F1: 0.9091 Train AUC: 0.9884 Val AUC: 0.9639 Time: 13.58\n",
      "Epoch: 415 Train Loss: 0.1491 Val Loss: 0.2352 Acc: 0.9022 Pre: 0.9021 Recall: 0.9085 F1: 0.9053 Train AUC: 0.9883 Val AUC: 0.9653 Time: 14.26\n",
      "Epoch: 416 Train Loss: 0.1461 Val Loss: 0.2388 Acc: 0.8913 Pre: 0.8733 Recall: 0.9225 F1: 0.8973 Train AUC: 0.9880 Val AUC: 0.9665 Time: 13.21\n",
      "Epoch: 417 Train Loss: 0.1483 Val Loss: 0.2309 Acc: 0.9022 Pre: 0.8966 Recall: 0.9155 F1: 0.9059 Train AUC: 0.9891 Val AUC: 0.9674 Time: 12.11\n",
      "Epoch: 418 Train Loss: 0.1381 Val Loss: 0.2340 Acc: 0.8986 Pre: 0.8904 Recall: 0.9155 F1: 0.9028 Train AUC: 0.9892 Val AUC: 0.9668 Time: 11.88\n",
      "Epoch: 419 Train Loss: 0.1433 Val Loss: 0.2370 Acc: 0.8986 Pre: 0.8958 Recall: 0.9085 F1: 0.9021 Train AUC: 0.9883 Val AUC: 0.9656 Time: 11.40\n",
      "Epoch: 420 Train Loss: 0.1477 Val Loss: 0.2390 Acc: 0.8895 Pre: 0.8780 Recall: 0.9120 F1: 0.8946 Train AUC: 0.9878 Val AUC: 0.9649 Time: 11.95\n",
      "Epoch: 421 Train Loss: 0.1442 Val Loss: 0.2429 Acc: 0.8967 Pre: 0.8955 Recall: 0.9049 F1: 0.9002 Train AUC: 0.9880 Val AUC: 0.9635 Time: 12.55\n",
      "Epoch: 422 Train Loss: 0.1453 Val Loss: 0.2414 Acc: 0.9022 Pre: 0.9107 Recall: 0.8979 F1: 0.9043 Train AUC: 0.9877 Val AUC: 0.9632 Time: 12.76\n",
      "Epoch: 423 Train Loss: 0.1455 Val Loss: 0.2402 Acc: 0.8913 Pre: 0.8836 Recall: 0.9085 F1: 0.8958 Train AUC: 0.9883 Val AUC: 0.9650 Time: 13.27\n",
      "Epoch: 424 Train Loss: 0.1457 Val Loss: 0.2394 Acc: 0.8877 Pre: 0.8750 Recall: 0.9120 F1: 0.8931 Train AUC: 0.9882 Val AUC: 0.9662 Time: 13.88\n",
      "Epoch: 425 Train Loss: 0.1398 Val Loss: 0.2366 Acc: 0.8913 Pre: 0.8862 Recall: 0.9049 F1: 0.8955 Train AUC: 0.9894 Val AUC: 0.9662 Time: 13.11\n",
      "Epoch: 426 Train Loss: 0.1398 Val Loss: 0.2375 Acc: 0.8949 Pre: 0.8924 Recall: 0.9049 F1: 0.8986 Train AUC: 0.9886 Val AUC: 0.9655 Time: 12.67\n",
      "Epoch: 427 Train Loss: 0.1484 Val Loss: 0.2415 Acc: 0.8967 Pre: 0.8955 Recall: 0.9049 F1: 0.9002 Train AUC: 0.9878 Val AUC: 0.9645 Time: 12.82\n",
      "Epoch: 428 Train Loss: 0.1337 Val Loss: 0.2529 Acc: 0.8949 Pre: 0.8897 Recall: 0.9085 F1: 0.8990 Train AUC: 0.9900 Val AUC: 0.9617 Time: 12.96\n",
      "Epoch: 429 Train Loss: 0.1434 Val Loss: 0.2423 Acc: 0.9040 Pre: 0.9081 Recall: 0.9049 F1: 0.9065 Train AUC: 0.9896 Val AUC: 0.9635 Time: 13.60\n",
      "Epoch: 430 Train Loss: 0.1399 Val Loss: 0.2402 Acc: 0.9040 Pre: 0.9110 Recall: 0.9014 F1: 0.9062 Train AUC: 0.9897 Val AUC: 0.9637 Time: 12.92\n",
      "Epoch: 431 Train Loss: 0.1488 Val Loss: 0.2369 Acc: 0.9022 Pre: 0.9049 Recall: 0.9049 F1: 0.9049 Train AUC: 0.9869 Val AUC: 0.9655 Time: 12.29\n",
      "Epoch: 432 Train Loss: 0.1510 Val Loss: 0.2353 Acc: 0.8931 Pre: 0.8840 Recall: 0.9120 F1: 0.8977 Train AUC: 0.9864 Val AUC: 0.9670 Time: 12.02\n",
      "Epoch: 433 Train Loss: 0.1340 Val Loss: 0.2425 Acc: 0.8949 Pre: 0.8951 Recall: 0.9014 F1: 0.8982 Train AUC: 0.9907 Val AUC: 0.9635 Time: 12.58\n",
      "Epoch: 434 Train Loss: 0.1474 Val Loss: 0.2401 Acc: 0.8986 Pre: 0.9101 Recall: 0.8908 F1: 0.9004 Train AUC: 0.9882 Val AUC: 0.9652 Time: 13.07\n",
      "Epoch: 435 Train Loss: 0.1399 Val Loss: 0.2376 Acc: 0.8949 Pre: 0.8897 Recall: 0.9085 F1: 0.8990 Train AUC: 0.9894 Val AUC: 0.9658 Time: 13.64\n",
      "Epoch: 436 Train Loss: 0.1360 Val Loss: 0.2583 Acc: 0.8696 Pre: 0.8442 Recall: 0.9155 F1: 0.8784 Train AUC: 0.9894 Val AUC: 0.9650 Time: 13.87\n",
      "Epoch: 437 Train Loss: 0.1472 Val Loss: 0.2447 Acc: 0.8913 Pre: 0.8836 Recall: 0.9085 F1: 0.8958 Train AUC: 0.9888 Val AUC: 0.9645 Time: 12.63\n",
      "Epoch: 438 Train Loss: 0.1371 Val Loss: 0.2417 Acc: 0.9004 Pre: 0.9164 Recall: 0.8873 F1: 0.9016 Train AUC: 0.9893 Val AUC: 0.9649 Time: 11.72\n",
      "Epoch: 439 Train Loss: 0.1402 Val Loss: 0.2393 Acc: 0.8913 Pre: 0.9000 Recall: 0.8873 F1: 0.8936 Train AUC: 0.9892 Val AUC: 0.9647 Time: 11.51\n",
      "Epoch: 440 Train Loss: 0.1403 Val Loss: 0.2487 Acc: 0.8859 Pre: 0.8746 Recall: 0.9085 F1: 0.8912 Train AUC: 0.9900 Val AUC: 0.9645 Time: 11.86\n",
      "Epoch: 441 Train Loss: 0.1369 Val Loss: 0.2311 Acc: 0.8949 Pre: 0.8897 Recall: 0.9085 F1: 0.8990 Train AUC: 0.9913 Val AUC: 0.9677 Time: 12.30\n",
      "Epoch: 442 Train Loss: 0.1388 Val Loss: 0.2370 Acc: 0.9076 Pre: 0.9206 Recall: 0.8979 F1: 0.9091 Train AUC: 0.9898 Val AUC: 0.9667 Time: 12.88\n",
      "Epoch: 443 Train Loss: 0.1431 Val Loss: 0.2357 Acc: 0.9040 Pre: 0.9081 Recall: 0.9049 F1: 0.9065 Train AUC: 0.9876 Val AUC: 0.9667 Time: 13.75\n",
      "Epoch: 444 Train Loss: 0.1351 Val Loss: 0.2380 Acc: 0.8931 Pre: 0.8840 Recall: 0.9120 F1: 0.8977 Train AUC: 0.9892 Val AUC: 0.9668 Time: 13.88\n",
      "Epoch: 445 Train Loss: 0.1315 Val Loss: 0.2477 Acc: 0.8895 Pre: 0.8805 Recall: 0.9085 F1: 0.8943 Train AUC: 0.9905 Val AUC: 0.9634 Time: 13.59\n",
      "Epoch: 446 Train Loss: 0.1366 Val Loss: 0.2389 Acc: 0.8949 Pre: 0.8897 Recall: 0.9085 F1: 0.8990 Train AUC: 0.9897 Val AUC: 0.9657 Time: 12.48\n",
      "Epoch: 447 Train Loss: 0.1291 Val Loss: 0.2344 Acc: 0.9112 Pre: 0.9273 Recall: 0.8979 F1: 0.9123 Train AUC: 0.9906 Val AUC: 0.9662 Time: 12.06\n",
      "Epoch: 448 Train Loss: 0.1357 Val Loss: 0.2309 Acc: 0.9004 Pre: 0.9018 Recall: 0.9049 F1: 0.9033 Train AUC: 0.9896 Val AUC: 0.9676 Time: 12.33\n",
      "Epoch: 449 Train Loss: 0.1316 Val Loss: 0.2308 Acc: 0.8967 Pre: 0.8847 Recall: 0.9190 F1: 0.9016 Train AUC: 0.9902 Val AUC: 0.9690 Time: 12.77\n",
      "Epoch: 450 Train Loss: 0.1296 Val Loss: 0.2329 Acc: 0.8931 Pre: 0.8763 Recall: 0.9225 F1: 0.8988 Train AUC: 0.9917 Val AUC: 0.9695 Time: 12.69\n",
      "Epoch: 451 Train Loss: 0.1299 Val Loss: 0.2274 Acc: 0.8949 Pre: 0.8818 Recall: 0.9190 F1: 0.9000 Train AUC: 0.9911 Val AUC: 0.9699 Time: 13.14\n",
      "Epoch: 452 Train Loss: 0.1345 Val Loss: 0.2279 Acc: 0.8949 Pre: 0.8897 Recall: 0.9085 F1: 0.8990 Train AUC: 0.9898 Val AUC: 0.9693 Time: 12.69\n",
      "Epoch: 453 Train Loss: 0.1314 Val Loss: 0.2300 Acc: 0.8949 Pre: 0.8870 Recall: 0.9120 F1: 0.8993 Train AUC: 0.9912 Val AUC: 0.9693 Time: 13.06\n",
      "Epoch: 454 Train Loss: 0.1309 Val Loss: 0.2312 Acc: 0.8931 Pre: 0.8840 Recall: 0.9120 F1: 0.8977 Train AUC: 0.9909 Val AUC: 0.9687 Time: 13.08\n",
      "Epoch: 455 Train Loss: 0.1311 Val Loss: 0.2393 Acc: 0.8913 Pre: 0.8784 Recall: 0.9155 F1: 0.8966 Train AUC: 0.9913 Val AUC: 0.9679 Time: 12.85\n",
      "Epoch: 456 Train Loss: 0.1319 Val Loss: 0.2429 Acc: 0.8949 Pre: 0.8844 Recall: 0.9155 F1: 0.8997 Train AUC: 0.9900 Val AUC: 0.9660 Time: 12.71\n",
      "Epoch: 457 Train Loss: 0.1176 Val Loss: 0.2469 Acc: 0.8877 Pre: 0.8801 Recall: 0.9049 F1: 0.8924 Train AUC: 0.9926 Val AUC: 0.9636 Time: 12.86\n",
      "Epoch: 458 Train Loss: 0.1320 Val Loss: 0.2422 Acc: 0.8895 Pre: 0.9025 Recall: 0.8803 F1: 0.8913 Train AUC: 0.9905 Val AUC: 0.9651 Time: 12.87\n",
      "Epoch: 459 Train Loss: 0.1389 Val Loss: 0.2320 Acc: 0.8949 Pre: 0.8870 Recall: 0.9120 F1: 0.8993 Train AUC: 0.9894 Val AUC: 0.9687 Time: 12.67\n",
      "Epoch: 460 Train Loss: 0.1374 Val Loss: 0.2431 Acc: 0.8841 Pre: 0.8618 Recall: 0.9225 F1: 0.8912 Train AUC: 0.9886 Val AUC: 0.9701 Time: 15.76\n",
      "Epoch: 461 Train Loss: 0.1236 Val Loss: 0.2283 Acc: 0.9004 Pre: 0.8881 Recall: 0.9225 F1: 0.9050 Train AUC: 0.9930 Val AUC: 0.9703 Time: 13.48\n",
      "Epoch: 462 Train Loss: 0.1298 Val Loss: 0.2300 Acc: 0.9076 Pre: 0.9267 Recall: 0.8908 F1: 0.9084 Train AUC: 0.9910 Val AUC: 0.9697 Time: 12.29\n",
      "Epoch: 463 Train Loss: 0.1336 Val Loss: 0.2291 Acc: 0.8967 Pre: 0.9011 Recall: 0.8979 F1: 0.8995 Train AUC: 0.9902 Val AUC: 0.9687 Time: 12.47\n",
      "Epoch: 464 Train Loss: 0.1282 Val Loss: 0.2493 Acc: 0.8877 Pre: 0.8725 Recall: 0.9155 F1: 0.8935 Train AUC: 0.9912 Val AUC: 0.9654 Time: 12.83\n",
      "Epoch: 465 Train Loss: 0.1348 Val Loss: 0.2407 Acc: 0.8949 Pre: 0.8818 Recall: 0.9190 F1: 0.9000 Train AUC: 0.9909 Val AUC: 0.9668 Time: 13.21\n",
      "Epoch: 466 Train Loss: 0.1296 Val Loss: 0.2324 Acc: 0.9094 Pre: 0.9209 Recall: 0.9014 F1: 0.9110 Train AUC: 0.9909 Val AUC: 0.9680 Time: 13.26\n",
      "Epoch: 467 Train Loss: 0.1336 Val Loss: 0.2314 Acc: 0.9149 Pre: 0.9278 Recall: 0.9049 F1: 0.9162 Train AUC: 0.9896 Val AUC: 0.9691 Time: 13.04\n",
      "Epoch: 468 Train Loss: 0.1303 Val Loss: 0.2393 Acc: 0.8895 Pre: 0.8680 Recall: 0.9261 F1: 0.8961 Train AUC: 0.9901 Val AUC: 0.9708 Time: 12.51\n",
      "Epoch: 469 Train Loss: 0.1239 Val Loss: 0.2395 Acc: 0.8913 Pre: 0.8733 Recall: 0.9225 F1: 0.8973 Train AUC: 0.9921 Val AUC: 0.9692 Time: 12.55\n",
      "Epoch: 470 Train Loss: 0.1296 Val Loss: 0.2291 Acc: 0.9004 Pre: 0.9018 Recall: 0.9049 F1: 0.9033 Train AUC: 0.9914 Val AUC: 0.9687 Time: 12.75\n",
      "Epoch: 471 Train Loss: 0.1135 Val Loss: 0.2333 Acc: 0.9076 Pre: 0.9267 Recall: 0.8908 F1: 0.9084 Train AUC: 0.9939 Val AUC: 0.9680 Time: 12.71\n",
      "Epoch: 472 Train Loss: 0.1297 Val Loss: 0.2369 Acc: 0.8949 Pre: 0.8870 Recall: 0.9120 F1: 0.8993 Train AUC: 0.9911 Val AUC: 0.9673 Time: 12.88\n",
      "Epoch: 473 Train Loss: 0.1155 Val Loss: 0.2487 Acc: 0.8841 Pre: 0.8571 Recall: 0.9296 F1: 0.8919 Train AUC: 0.9932 Val AUC: 0.9672 Time: 13.08\n",
      "Epoch: 474 Train Loss: 0.1198 Val Loss: 0.2366 Acc: 0.8913 Pre: 0.8810 Recall: 0.9120 F1: 0.8962 Train AUC: 0.9929 Val AUC: 0.9676 Time: 13.01\n",
      "Epoch: 475 Train Loss: 0.1259 Val Loss: 0.2339 Acc: 0.9058 Pre: 0.9203 Recall: 0.8944 F1: 0.9071 Train AUC: 0.9907 Val AUC: 0.9685 Time: 12.68\n",
      "Epoch: 476 Train Loss: 0.1333 Val Loss: 0.2270 Acc: 0.8986 Pre: 0.8931 Recall: 0.9120 F1: 0.9024 Train AUC: 0.9904 Val AUC: 0.9695 Time: 12.28\n",
      "Epoch: 477 Train Loss: 0.1218 Val Loss: 0.2330 Acc: 0.8986 Pre: 0.8826 Recall: 0.9261 F1: 0.9038 Train AUC: 0.9926 Val AUC: 0.9712 Time: 12.51\n",
      "Epoch: 478 Train Loss: 0.1259 Val Loss: 0.2228 Acc: 0.9040 Pre: 0.8942 Recall: 0.9225 F1: 0.9081 Train AUC: 0.9926 Val AUC: 0.9718 Time: 12.94\n",
      "Epoch: 479 Train Loss: 0.1179 Val Loss: 0.2249 Acc: 0.9185 Pre: 0.9377 Recall: 0.9014 F1: 0.9192 Train AUC: 0.9927 Val AUC: 0.9707 Time: 13.43\n",
      "Epoch: 480 Train Loss: 0.1280 Val Loss: 0.2205 Acc: 0.9040 Pre: 0.8942 Recall: 0.9225 F1: 0.9081 Train AUC: 0.9911 Val AUC: 0.9712 Time: 13.46\n",
      "Epoch: 481 Train Loss: 0.1195 Val Loss: 0.2288 Acc: 0.8986 Pre: 0.8851 Recall: 0.9225 F1: 0.9034 Train AUC: 0.9927 Val AUC: 0.9700 Time: 13.07\n",
      "Epoch: 482 Train Loss: 0.1272 Val Loss: 0.2277 Acc: 0.9004 Pre: 0.8935 Recall: 0.9155 F1: 0.9043 Train AUC: 0.9915 Val AUC: 0.9693 Time: 12.13\n",
      "Epoch: 483 Train Loss: 0.1146 Val Loss: 0.2293 Acc: 0.8986 Pre: 0.8931 Recall: 0.9120 F1: 0.9024 Train AUC: 0.9924 Val AUC: 0.9689 Time: 12.23\n",
      "Epoch: 484 Train Loss: 0.1210 Val Loss: 0.2279 Acc: 0.9076 Pre: 0.9117 Recall: 0.9085 F1: 0.9101 Train AUC: 0.9915 Val AUC: 0.9688 Time: 12.35\n",
      "Epoch: 485 Train Loss: 0.1191 Val Loss: 0.2301 Acc: 0.9022 Pre: 0.8938 Recall: 0.9190 F1: 0.9062 Train AUC: 0.9921 Val AUC: 0.9692 Time: 13.00\n",
      "Epoch: 486 Train Loss: 0.1172 Val Loss: 0.2273 Acc: 0.9022 Pre: 0.8912 Recall: 0.9225 F1: 0.9066 Train AUC: 0.9929 Val AUC: 0.9705 Time: 13.33\n",
      "Epoch: 487 Train Loss: 0.1120 Val Loss: 0.2244 Acc: 0.9022 Pre: 0.8966 Recall: 0.9155 F1: 0.9059 Train AUC: 0.9936 Val AUC: 0.9713 Time: 13.86\n",
      "Epoch: 488 Train Loss: 0.1043 Val Loss: 0.2238 Acc: 0.9022 Pre: 0.8993 Recall: 0.9120 F1: 0.9056 Train AUC: 0.9944 Val AUC: 0.9711 Time: 13.02\n",
      "Epoch: 489 Train Loss: 0.1130 Val Loss: 0.2226 Acc: 0.9004 Pre: 0.8962 Recall: 0.9120 F1: 0.9040 Train AUC: 0.9931 Val AUC: 0.9711 Time: 12.02\n",
      "Epoch: 490 Train Loss: 0.1158 Val Loss: 0.2265 Acc: 0.8967 Pre: 0.8982 Recall: 0.9014 F1: 0.8998 Train AUC: 0.9928 Val AUC: 0.9698 Time: 11.80\n",
      "Epoch: 491 Train Loss: 0.1196 Val Loss: 0.2318 Acc: 0.8986 Pre: 0.8958 Recall: 0.9085 F1: 0.9021 Train AUC: 0.9925 Val AUC: 0.9690 Time: 11.74\n",
      "Epoch: 492 Train Loss: 0.1142 Val Loss: 0.2291 Acc: 0.8949 Pre: 0.8924 Recall: 0.9049 F1: 0.8986 Train AUC: 0.9939 Val AUC: 0.9698 Time: 12.20\n",
      "Epoch: 493 Train Loss: 0.1148 Val Loss: 0.2319 Acc: 0.8949 Pre: 0.8844 Recall: 0.9155 F1: 0.8997 Train AUC: 0.9927 Val AUC: 0.9695 Time: 12.73\n",
      "Epoch: 494 Train Loss: 0.1143 Val Loss: 0.2344 Acc: 0.8931 Pre: 0.8814 Recall: 0.9155 F1: 0.8981 Train AUC: 0.9931 Val AUC: 0.9694 Time: 13.33\n",
      "Epoch: 495 Train Loss: 0.1154 Val Loss: 0.2289 Acc: 0.8967 Pre: 0.8874 Recall: 0.9155 F1: 0.9012 Train AUC: 0.9927 Val AUC: 0.9691 Time: 13.90\n",
      "Epoch: 496 Train Loss: 0.1118 Val Loss: 0.2286 Acc: 0.8949 Pre: 0.8870 Recall: 0.9120 F1: 0.8993 Train AUC: 0.9937 Val AUC: 0.9696 Time: 14.12\n",
      "Epoch: 497 Train Loss: 0.1100 Val Loss: 0.2254 Acc: 0.8986 Pre: 0.8931 Recall: 0.9120 F1: 0.9024 Train AUC: 0.9936 Val AUC: 0.9702 Time: 12.99\n",
      "Epoch: 498 Train Loss: 0.1178 Val Loss: 0.2227 Acc: 0.9112 Pre: 0.9152 Recall: 0.9120 F1: 0.9136 Train AUC: 0.9930 Val AUC: 0.9709 Time: 12.44\n",
      "Epoch: 499 Train Loss: 0.1184 Val Loss: 0.2258 Acc: 0.9022 Pre: 0.8966 Recall: 0.9155 F1: 0.9059 Train AUC: 0.9929 Val AUC: 0.9717 Time: 12.35\n",
      "Epoch: 500 Train Loss: 0.1168 Val Loss: 0.2337 Acc: 0.8986 Pre: 0.8851 Recall: 0.9225 F1: 0.9034 Train AUC: 0.9923 Val AUC: 0.9709 Time: 12.81\n",
      "Epoch: 501 Train Loss: 0.1078 Val Loss: 0.2357 Acc: 0.8986 Pre: 0.8878 Recall: 0.9190 F1: 0.9031 Train AUC: 0.9943 Val AUC: 0.9697 Time: 13.02\n",
      "Epoch: 502 Train Loss: 0.1162 Val Loss: 0.2358 Acc: 0.8986 Pre: 0.9071 Recall: 0.8944 F1: 0.9007 Train AUC: 0.9925 Val AUC: 0.9685 Time: 13.26\n",
      "Epoch: 503 Train Loss: 0.1086 Val Loss: 0.2383 Acc: 0.8949 Pre: 0.8951 Recall: 0.9014 F1: 0.8982 Train AUC: 0.9940 Val AUC: 0.9683 Time: 13.68\n",
      "Epoch: 504 Train Loss: 0.1062 Val Loss: 0.2539 Acc: 0.8877 Pre: 0.8651 Recall: 0.9261 F1: 0.8946 Train AUC: 0.9940 Val AUC: 0.9681 Time: 12.90\n",
      "Epoch: 505 Train Loss: 0.1253 Val Loss: 0.2287 Acc: 0.9004 Pre: 0.9018 Recall: 0.9049 F1: 0.9033 Train AUC: 0.9918 Val AUC: 0.9703 Time: 12.34\n",
      "Epoch: 506 Train Loss: 0.1143 Val Loss: 0.2237 Acc: 0.9076 Pre: 0.9146 Recall: 0.9049 F1: 0.9097 Train AUC: 0.9927 Val AUC: 0.9714 Time: 12.31\n",
      "Epoch: 507 Train Loss: 0.1133 Val Loss: 0.2218 Acc: 0.9004 Pre: 0.8990 Recall: 0.9085 F1: 0.9037 Train AUC: 0.9934 Val AUC: 0.9723 Time: 12.74\n",
      "Epoch: 508 Train Loss: 0.1171 Val Loss: 0.2218 Acc: 0.9022 Pre: 0.8966 Recall: 0.9155 F1: 0.9059 Train AUC: 0.9925 Val AUC: 0.9726 Time: 13.31\n",
      "Epoch: 509 Train Loss: 0.1120 Val Loss: 0.2245 Acc: 0.8967 Pre: 0.8927 Recall: 0.9085 F1: 0.9005 Train AUC: 0.9936 Val AUC: 0.9713 Time: 13.28\n",
      "Epoch: 510 Train Loss: 0.1104 Val Loss: 0.2275 Acc: 0.8986 Pre: 0.9014 Recall: 0.9014 F1: 0.9014 Train AUC: 0.9935 Val AUC: 0.9694 Time: 12.49\n",
      "Epoch: 511 Train Loss: 0.1170 Val Loss: 0.2318 Acc: 0.8949 Pre: 0.8897 Recall: 0.9085 F1: 0.8990 Train AUC: 0.9925 Val AUC: 0.9693 Time: 12.49\n",
      "Epoch: 512 Train Loss: 0.1151 Val Loss: 0.2377 Acc: 0.8895 Pre: 0.8729 Recall: 0.9190 F1: 0.8954 Train AUC: 0.9928 Val AUC: 0.9698 Time: 12.60\n",
      "Epoch: 513 Train Loss: 0.1171 Val Loss: 0.2238 Acc: 0.8967 Pre: 0.8874 Recall: 0.9155 F1: 0.9012 Train AUC: 0.9924 Val AUC: 0.9717 Time: 13.40\n",
      "Epoch: 514 Train Loss: 0.1107 Val Loss: 0.2178 Acc: 0.9058 Pre: 0.9028 Recall: 0.9155 F1: 0.9091 Train AUC: 0.9935 Val AUC: 0.9728 Time: 13.44\n",
      "Epoch: 515 Train Loss: 0.1085 Val Loss: 0.2208 Acc: 0.9076 Pre: 0.8949 Recall: 0.9296 F1: 0.9119 Train AUC: 0.9940 Val AUC: 0.9723 Time: 12.94\n",
      "Epoch: 516 Train Loss: 0.1163 Val Loss: 0.2311 Acc: 0.8931 Pre: 0.8664 Recall: 0.9366 F1: 0.9002 Train AUC: 0.9933 Val AUC: 0.9712 Time: 12.38\n",
      "Epoch: 517 Train Loss: 0.1176 Val Loss: 0.2232 Acc: 0.9112 Pre: 0.9152 Recall: 0.9120 F1: 0.9136 Train AUC: 0.9934 Val AUC: 0.9723 Time: 12.53\n",
      "Epoch: 518 Train Loss: 0.1133 Val Loss: 0.2353 Acc: 0.9094 Pre: 0.9270 Recall: 0.8944 F1: 0.9104 Train AUC: 0.9930 Val AUC: 0.9712 Time: 13.02\n",
      "Epoch: 519 Train Loss: 0.1198 Val Loss: 0.2258 Acc: 0.8967 Pre: 0.8927 Recall: 0.9085 F1: 0.9005 Train AUC: 0.9914 Val AUC: 0.9716 Time: 13.15\n",
      "Epoch: 520 Train Loss: 0.1083 Val Loss: 0.2699 Acc: 0.8877 Pre: 0.8581 Recall: 0.9366 F1: 0.8956 Train AUC: 0.9938 Val AUC: 0.9655 Time: 12.87\n",
      "Epoch: 521 Train Loss: 0.1287 Val Loss: 0.2282 Acc: 0.8967 Pre: 0.8927 Recall: 0.9085 F1: 0.9005 Train AUC: 0.9929 Val AUC: 0.9703 Time: 12.87\n",
      "Epoch: 522 Train Loss: 0.1189 Val Loss: 0.2280 Acc: 0.9112 Pre: 0.9242 Recall: 0.9014 F1: 0.9127 Train AUC: 0.9919 Val AUC: 0.9701 Time: 12.72\n",
      "Epoch: 523 Train Loss: 0.0993 Val Loss: 0.2279 Acc: 0.9112 Pre: 0.9152 Recall: 0.9120 F1: 0.9136 Train AUC: 0.9945 Val AUC: 0.9708 Time: 13.41\n",
      "Epoch: 524 Train Loss: 0.1139 Val Loss: 0.2244 Acc: 0.8967 Pre: 0.8874 Recall: 0.9155 F1: 0.9012 Train AUC: 0.9923 Val AUC: 0.9717 Time: 13.35\n",
      "Epoch: 525 Train Loss: 0.1130 Val Loss: 0.2241 Acc: 0.8949 Pre: 0.8844 Recall: 0.9155 F1: 0.8997 Train AUC: 0.9929 Val AUC: 0.9715 Time: 12.81\n",
      "Epoch: 526 Train Loss: 0.1017 Val Loss: 0.2217 Acc: 0.9004 Pre: 0.9046 Recall: 0.9014 F1: 0.9030 Train AUC: 0.9943 Val AUC: 0.9712 Time: 12.89\n",
      "Epoch: 527 Train Loss: 0.1114 Val Loss: 0.2257 Acc: 0.8986 Pre: 0.8851 Recall: 0.9225 F1: 0.9034 Train AUC: 0.9946 Val AUC: 0.9718 Time: 12.79\n",
      "Epoch: 528 Train Loss: 0.1017 Val Loss: 0.2361 Acc: 0.9040 Pre: 0.8915 Recall: 0.9261 F1: 0.9085 Train AUC: 0.9950 Val AUC: 0.9714 Time: 13.36\n",
      "Epoch: 529 Train Loss: 0.1130 Val Loss: 0.2271 Acc: 0.9076 Pre: 0.9117 Recall: 0.9085 F1: 0.9101 Train AUC: 0.9928 Val AUC: 0.9714 Time: 12.43\n",
      "Epoch: 530 Train Loss: 0.1075 Val Loss: 0.2209 Acc: 0.9058 Pre: 0.9143 Recall: 0.9014 F1: 0.9078 Train AUC: 0.9935 Val AUC: 0.9720 Time: 11.89\n",
      "Epoch: 531 Train Loss: 0.1141 Val Loss: 0.2342 Acc: 0.8877 Pre: 0.8700 Recall: 0.9190 F1: 0.8938 Train AUC: 0.9933 Val AUC: 0.9715 Time: 12.15\n",
      "Epoch: 532 Train Loss: 0.1119 Val Loss: 0.2243 Acc: 0.8913 Pre: 0.8758 Recall: 0.9190 F1: 0.8969 Train AUC: 0.9949 Val AUC: 0.9727 Time: 12.48\n",
      "Epoch: 533 Train Loss: 0.1071 Val Loss: 0.2184 Acc: 0.9076 Pre: 0.9117 Recall: 0.9085 F1: 0.9101 Train AUC: 0.9946 Val AUC: 0.9734 Time: 12.94\n",
      "Epoch: 534 Train Loss: 0.1132 Val Loss: 0.2188 Acc: 0.9022 Pre: 0.9078 Recall: 0.9014 F1: 0.9046 Train AUC: 0.9929 Val AUC: 0.9733 Time: 13.41\n",
      "Epoch: 535 Train Loss: 0.1053 Val Loss: 0.2245 Acc: 0.8967 Pre: 0.8900 Recall: 0.9120 F1: 0.9009 Train AUC: 0.9939 Val AUC: 0.9718 Time: 13.61\n",
      "Epoch: 536 Train Loss: 0.1075 Val Loss: 0.2330 Acc: 0.8967 Pre: 0.8822 Recall: 0.9225 F1: 0.9019 Train AUC: 0.9944 Val AUC: 0.9712 Time: 12.98\n",
      "Epoch: 537 Train Loss: 0.1062 Val Loss: 0.2320 Acc: 0.9058 Pre: 0.9085 Recall: 0.9085 F1: 0.9085 Train AUC: 0.9945 Val AUC: 0.9707 Time: 13.23\n",
      "Epoch: 538 Train Loss: 0.1142 Val Loss: 0.2304 Acc: 0.9149 Pre: 0.9247 Recall: 0.9085 F1: 0.9165 Train AUC: 0.9923 Val AUC: 0.9701 Time: 13.69\n",
      "Epoch: 539 Train Loss: 0.1163 Val Loss: 0.2312 Acc: 0.9149 Pre: 0.9129 Recall: 0.9225 F1: 0.9177 Train AUC: 0.9921 Val AUC: 0.9703 Time: 13.03\n",
      "Epoch: 540 Train Loss: 0.0985 Val Loss: 0.2322 Acc: 0.9094 Pre: 0.9120 Recall: 0.9120 F1: 0.9120 Train AUC: 0.9952 Val AUC: 0.9682 Time: 12.73\n",
      "Epoch: 541 Train Loss: 0.1161 Val Loss: 0.2202 Acc: 0.9149 Pre: 0.9247 Recall: 0.9085 F1: 0.9165 Train AUC: 0.9935 Val AUC: 0.9719 Time: 11.77\n",
      "Epoch: 542 Train Loss: 0.0993 Val Loss: 0.2225 Acc: 0.9130 Pre: 0.9245 Recall: 0.9049 F1: 0.9146 Train AUC: 0.9950 Val AUC: 0.9726 Time: 12.29\n",
      "Epoch: 543 Train Loss: 0.1096 Val Loss: 0.2299 Acc: 0.8913 Pre: 0.8784 Recall: 0.9155 F1: 0.8966 Train AUC: 0.9928 Val AUC: 0.9732 Time: 12.61\n",
      "Epoch: 544 Train Loss: 0.1140 Val Loss: 0.2417 Acc: 0.8822 Pre: 0.8638 Recall: 0.9155 F1: 0.8889 Train AUC: 0.9931 Val AUC: 0.9717 Time: 13.23\n",
      "Epoch: 545 Train Loss: 0.1033 Val Loss: 0.2338 Acc: 0.8877 Pre: 0.8776 Recall: 0.9085 F1: 0.8927 Train AUC: 0.9947 Val AUC: 0.9692 Time: 13.94\n",
      "Epoch: 546 Train Loss: 0.1148 Val Loss: 0.2342 Acc: 0.9022 Pre: 0.9107 Recall: 0.8979 F1: 0.9043 Train AUC: 0.9928 Val AUC: 0.9693 Time: 14.22\n",
      "Epoch: 547 Train Loss: 0.1073 Val Loss: 0.2318 Acc: 0.8949 Pre: 0.8951 Recall: 0.9014 F1: 0.8982 Train AUC: 0.9937 Val AUC: 0.9713 Time: 13.09\n",
      "Epoch: 548 Train Loss: 0.1053 Val Loss: 0.2409 Acc: 0.8913 Pre: 0.8684 Recall: 0.9296 F1: 0.8980 Train AUC: 0.9940 Val AUC: 0.9719 Time: 12.47\n",
      "Epoch: 549 Train Loss: 0.1179 Val Loss: 0.2362 Acc: 0.8841 Pre: 0.8691 Recall: 0.9120 F1: 0.8900 Train AUC: 0.9934 Val AUC: 0.9713 Time: 12.27\n",
      "Epoch: 550 Train Loss: 0.0992 Val Loss: 0.2340 Acc: 0.8986 Pre: 0.9043 Recall: 0.8979 F1: 0.9011 Train AUC: 0.9946 Val AUC: 0.9707 Time: 12.80\n",
      "Epoch: 551 Train Loss: 0.1090 Val Loss: 0.2300 Acc: 0.9040 Pre: 0.9140 Recall: 0.8979 F1: 0.9059 Train AUC: 0.9932 Val AUC: 0.9710 Time: 13.20\n",
      "Epoch: 552 Train Loss: 0.1117 Val Loss: 0.2323 Acc: 0.8913 Pre: 0.8784 Recall: 0.9155 F1: 0.8966 Train AUC: 0.9936 Val AUC: 0.9705 Time: 13.24\n",
      "Epoch: 553 Train Loss: 0.0983 Val Loss: 0.2476 Acc: 0.8859 Pre: 0.8553 Recall: 0.9366 F1: 0.8941 Train AUC: 0.9956 Val AUC: 0.9692 Time: 12.57\n",
      "Epoch: 554 Train Loss: 0.1158 Val Loss: 0.2228 Acc: 0.8986 Pre: 0.8931 Recall: 0.9120 F1: 0.9024 Train AUC: 0.9946 Val AUC: 0.9712 Time: 11.98\n",
      "Epoch: 555 Train Loss: 0.0993 Val Loss: 0.2225 Acc: 0.9094 Pre: 0.9270 Recall: 0.8944 F1: 0.9104 Train AUC: 0.9950 Val AUC: 0.9729 Time: 12.37\n",
      "Epoch: 556 Train Loss: 0.1043 Val Loss: 0.2175 Acc: 0.9040 Pre: 0.9170 Recall: 0.8944 F1: 0.9055 Train AUC: 0.9943 Val AUC: 0.9735 Time: 12.84\n",
      "Epoch: 557 Train Loss: 0.1039 Val Loss: 0.2328 Acc: 0.8931 Pre: 0.8713 Recall: 0.9296 F1: 0.8995 Train AUC: 0.9944 Val AUC: 0.9728 Time: 13.27\n",
      "Epoch: 558 Train Loss: 0.1039 Val Loss: 0.2457 Acc: 0.8877 Pre: 0.8604 Recall: 0.9331 F1: 0.8953 Train AUC: 0.9951 Val AUC: 0.9728 Time: 13.63\n",
      "Epoch: 559 Train Loss: 0.1115 Val Loss: 0.2261 Acc: 0.9112 Pre: 0.9273 Recall: 0.8979 F1: 0.9123 Train AUC: 0.9949 Val AUC: 0.9721 Time: 13.36\n",
      "Epoch: 560 Train Loss: 0.1009 Val Loss: 0.2367 Acc: 0.9167 Pre: 0.9312 Recall: 0.9049 F1: 0.9179 Train AUC: 0.9943 Val AUC: 0.9704 Time: 12.48\n",
      "Epoch: 561 Train Loss: 0.1268 Val Loss: 0.2302 Acc: 0.8913 Pre: 0.8733 Recall: 0.9225 F1: 0.8973 Train AUC: 0.9914 Val AUC: 0.9704 Time: 12.28\n",
      "Epoch: 562 Train Loss: 0.1158 Val Loss: 0.2439 Acc: 0.8877 Pre: 0.8604 Recall: 0.9331 F1: 0.8953 Train AUC: 0.9936 Val AUC: 0.9730 Time: 12.40\n",
      "Epoch: 563 Train Loss: 0.1175 Val Loss: 0.2279 Acc: 0.9004 Pre: 0.8962 Recall: 0.9120 F1: 0.9040 Train AUC: 0.9946 Val AUC: 0.9724 Time: 12.76\n",
      "Epoch: 564 Train Loss: 0.1113 Val Loss: 0.2319 Acc: 0.9022 Pre: 0.9137 Recall: 0.8944 F1: 0.9039 Train AUC: 0.9927 Val AUC: 0.9713 Time: 13.24\n",
      "Epoch: 565 Train Loss: 0.1276 Val Loss: 0.2280 Acc: 0.9040 Pre: 0.8942 Recall: 0.9225 F1: 0.9081 Train AUC: 0.9914 Val AUC: 0.9716 Time: 13.92\n",
      "Epoch: 566 Train Loss: 0.1062 Val Loss: 0.2330 Acc: 0.9004 Pre: 0.8779 Recall: 0.9366 F1: 0.9063 Train AUC: 0.9939 Val AUC: 0.9717 Time: 13.95\n",
      "Epoch: 567 Train Loss: 0.1174 Val Loss: 0.2237 Acc: 0.9058 Pre: 0.8919 Recall: 0.9296 F1: 0.9103 Train AUC: 0.9932 Val AUC: 0.9719 Time: 12.74\n",
      "Epoch: 568 Train Loss: 0.1080 Val Loss: 0.2290 Acc: 0.9221 Pre: 0.9382 Recall: 0.9085 F1: 0.9231 Train AUC: 0.9944 Val AUC: 0.9707 Time: 12.05\n",
      "Epoch: 569 Train Loss: 0.1173 Val Loss: 0.2257 Acc: 0.9167 Pre: 0.9312 Recall: 0.9049 F1: 0.9179 Train AUC: 0.9921 Val AUC: 0.9725 Time: 11.64\n",
      "Epoch: 570 Train Loss: 0.1024 Val Loss: 0.2437 Acc: 0.8768 Pre: 0.8529 Recall: 0.9190 F1: 0.8847 Train AUC: 0.9939 Val AUC: 0.9702 Time: 12.26\n",
      "Epoch: 571 Train Loss: 0.1058 Val Loss: 0.2524 Acc: 0.8786 Pre: 0.8534 Recall: 0.9225 F1: 0.8866 Train AUC: 0.9941 Val AUC: 0.9687 Time: 12.67\n",
      "Epoch: 572 Train Loss: 0.1165 Val Loss: 0.2315 Acc: 0.8931 Pre: 0.8920 Recall: 0.9014 F1: 0.8967 Train AUC: 0.9919 Val AUC: 0.9717 Time: 13.13\n",
      "Epoch: 573 Train Loss: 0.1038 Val Loss: 0.2225 Acc: 0.8967 Pre: 0.9011 Recall: 0.8979 F1: 0.8995 Train AUC: 0.9937 Val AUC: 0.9730 Time: 13.66\n",
      "Epoch: 574 Train Loss: 0.0996 Val Loss: 0.2276 Acc: 0.8895 Pre: 0.8754 Recall: 0.9155 F1: 0.8950 Train AUC: 0.9946 Val AUC: 0.9726 Time: 14.16\n",
      "Epoch: 575 Train Loss: 0.1000 Val Loss: 0.2339 Acc: 0.8750 Pre: 0.8479 Recall: 0.9225 F1: 0.8836 Train AUC: 0.9950 Val AUC: 0.9710 Time: 12.93\n",
      "Epoch: 576 Train Loss: 0.1180 Val Loss: 0.2170 Acc: 0.8949 Pre: 0.8897 Recall: 0.9085 F1: 0.8990 Train AUC: 0.9941 Val AUC: 0.9740 Time: 12.03\n",
      "Epoch: 577 Train Loss: 0.1083 Val Loss: 0.2260 Acc: 0.9022 Pre: 0.9107 Recall: 0.8979 F1: 0.9043 Train AUC: 0.9944 Val AUC: 0.9728 Time: 11.48\n",
      "Epoch: 578 Train Loss: 0.1134 Val Loss: 0.2334 Acc: 0.8931 Pre: 0.8947 Recall: 0.8979 F1: 0.8963 Train AUC: 0.9924 Val AUC: 0.9716 Time: 11.42\n",
      "Epoch: 579 Train Loss: 0.1099 Val Loss: 0.2479 Acc: 0.8931 Pre: 0.8763 Recall: 0.9225 F1: 0.8988 Train AUC: 0.9929 Val AUC: 0.9692 Time: 12.08\n",
      "Epoch: 580 Train Loss: 0.1126 Val Loss: 0.2350 Acc: 0.8931 Pre: 0.8893 Recall: 0.9049 F1: 0.8970 Train AUC: 0.9931 Val AUC: 0.9688 Time: 12.65\n",
      "Epoch: 581 Train Loss: 0.0964 Val Loss: 0.2261 Acc: 0.9167 Pre: 0.9375 Recall: 0.8979 F1: 0.9173 Train AUC: 0.9957 Val AUC: 0.9713 Time: 13.10\n",
      "Epoch: 582 Train Loss: 0.1047 Val Loss: 0.2319 Acc: 0.9149 Pre: 0.9309 Recall: 0.9014 F1: 0.9159 Train AUC: 0.9944 Val AUC: 0.9706 Time: 13.30\n",
      "Epoch: 583 Train Loss: 0.1161 Val Loss: 0.2263 Acc: 0.9004 Pre: 0.8935 Recall: 0.9155 F1: 0.9043 Train AUC: 0.9923 Val AUC: 0.9725 Time: 14.61\n",
      "Epoch: 584 Train Loss: 0.1144 Val Loss: 0.2384 Acc: 0.8931 Pre: 0.8738 Recall: 0.9261 F1: 0.8991 Train AUC: 0.9928 Val AUC: 0.9700 Time: 14.84\n",
      "Epoch: 585 Train Loss: 0.1093 Val Loss: 0.2303 Acc: 0.8949 Pre: 0.8897 Recall: 0.9085 F1: 0.8990 Train AUC: 0.9946 Val AUC: 0.9697 Time: 14.16\n",
      "Epoch: 586 Train Loss: 0.1005 Val Loss: 0.2178 Acc: 0.8967 Pre: 0.8982 Recall: 0.9014 F1: 0.8998 Train AUC: 0.9953 Val AUC: 0.9736 Time: 12.99\n",
      "Epoch: 587 Train Loss: 0.1026 Val Loss: 0.2198 Acc: 0.9058 Pre: 0.9085 Recall: 0.9085 F1: 0.9085 Train AUC: 0.9941 Val AUC: 0.9745 Time: 11.92\n",
      "Epoch: 588 Train Loss: 0.1083 Val Loss: 0.2172 Acc: 0.9076 Pre: 0.9003 Recall: 0.9225 F1: 0.9113 Train AUC: 0.9928 Val AUC: 0.9753 Time: 11.46\n",
      "Epoch: 589 Train Loss: 0.0987 Val Loss: 0.2226 Acc: 0.8895 Pre: 0.8656 Recall: 0.9296 F1: 0.8964 Train AUC: 0.9945 Val AUC: 0.9750 Time: 12.08\n",
      "Epoch: 590 Train Loss: 0.0956 Val Loss: 0.2299 Acc: 0.9040 Pre: 0.8942 Recall: 0.9225 F1: 0.9081 Train AUC: 0.9960 Val AUC: 0.9708 Time: 12.26\n",
      "Epoch: 591 Train Loss: 0.0941 Val Loss: 0.2289 Acc: 0.8949 Pre: 0.9124 Recall: 0.8803 F1: 0.8961 Train AUC: 0.9962 Val AUC: 0.9696 Time: 12.64\n",
      "Epoch: 592 Train Loss: 0.1088 Val Loss: 0.2215 Acc: 0.9058 Pre: 0.9203 Recall: 0.8944 F1: 0.9071 Train AUC: 0.9938 Val AUC: 0.9717 Time: 13.29\n",
      "Epoch: 593 Train Loss: 0.1034 Val Loss: 0.2214 Acc: 0.9058 Pre: 0.8946 Recall: 0.9261 F1: 0.9100 Train AUC: 0.9948 Val AUC: 0.9736 Time: 13.88\n",
      "Epoch: 594 Train Loss: 0.0984 Val Loss: 0.2249 Acc: 0.9076 Pre: 0.8976 Recall: 0.9261 F1: 0.9116 Train AUC: 0.9948 Val AUC: 0.9744 Time: 14.40\n",
      "Epoch: 595 Train Loss: 0.1019 Val Loss: 0.2174 Acc: 0.9094 Pre: 0.9062 Recall: 0.9190 F1: 0.9126 Train AUC: 0.9942 Val AUC: 0.9749 Time: 13.48\n",
      "Epoch: 596 Train Loss: 0.0945 Val Loss: 0.2150 Acc: 0.9058 Pre: 0.9056 Recall: 0.9120 F1: 0.9088 Train AUC: 0.9954 Val AUC: 0.9738 Time: 12.35\n",
      "Epoch: 597 Train Loss: 0.0943 Val Loss: 0.2246 Acc: 0.9004 Pre: 0.8881 Recall: 0.9225 F1: 0.9050 Train AUC: 0.9949 Val AUC: 0.9714 Time: 11.57\n",
      "Epoch: 598 Train Loss: 0.0922 Val Loss: 0.2326 Acc: 0.9040 Pre: 0.8915 Recall: 0.9261 F1: 0.9085 Train AUC: 0.9958 Val AUC: 0.9692 Time: 11.78\n",
      "Epoch: 599 Train Loss: 0.1016 Val Loss: 0.2251 Acc: 0.8895 Pre: 0.8729 Recall: 0.9190 F1: 0.8954 Train AUC: 0.9954 Val AUC: 0.9732 Time: 12.07\n",
      "Epoch: 600 Train Loss: 0.0946 Val Loss: 0.2261 Acc: 0.8967 Pre: 0.8874 Recall: 0.9155 F1: 0.9012 Train AUC: 0.9955 Val AUC: 0.9730 Time: 12.56\n",
      "Epoch: 601 Train Loss: 0.0985 Val Loss: 0.2245 Acc: 0.9004 Pre: 0.9018 Recall: 0.9049 F1: 0.9033 Train AUC: 0.9944 Val AUC: 0.9736 Time: 13.12\n",
      "Epoch: 602 Train Loss: 0.0959 Val Loss: 0.2167 Acc: 0.9040 Pre: 0.9110 Recall: 0.9014 F1: 0.9062 Train AUC: 0.9950 Val AUC: 0.9743 Time: 13.53\n",
      "Epoch: 603 Train Loss: 0.0942 Val Loss: 0.2191 Acc: 0.8967 Pre: 0.8955 Recall: 0.9049 F1: 0.9002 Train AUC: 0.9948 Val AUC: 0.9722 Time: 14.14\n",
      "Epoch: 604 Train Loss: 0.1023 Val Loss: 0.2222 Acc: 0.8967 Pre: 0.8874 Recall: 0.9155 F1: 0.9012 Train AUC: 0.9948 Val AUC: 0.9718 Time: 13.30\n",
      "Epoch: 605 Train Loss: 0.0940 Val Loss: 0.2192 Acc: 0.8913 Pre: 0.8784 Recall: 0.9155 F1: 0.8966 Train AUC: 0.9957 Val AUC: 0.9745 Time: 12.25\n",
      "Epoch: 606 Train Loss: 0.0918 Val Loss: 0.2250 Acc: 0.9058 Pre: 0.9056 Recall: 0.9120 F1: 0.9088 Train AUC: 0.9959 Val AUC: 0.9747 Time: 11.96\n",
      "Epoch: 607 Train Loss: 0.1016 Val Loss: 0.2276 Acc: 0.8986 Pre: 0.8958 Recall: 0.9085 F1: 0.9021 Train AUC: 0.9939 Val AUC: 0.9744 Time: 12.69\n",
      "Epoch: 608 Train Loss: 0.1045 Val Loss: 0.2354 Acc: 0.8859 Pre: 0.8647 Recall: 0.9225 F1: 0.8927 Train AUC: 0.9939 Val AUC: 0.9727 Time: 13.03\n",
      "Epoch: 609 Train Loss: 0.1006 Val Loss: 0.2398 Acc: 0.9004 Pre: 0.8935 Recall: 0.9155 F1: 0.9043 Train AUC: 0.9947 Val AUC: 0.9690 Time: 13.51\n",
      "Epoch: 610 Train Loss: 0.1001 Val Loss: 0.2276 Acc: 0.9130 Pre: 0.9214 Recall: 0.9085 F1: 0.9149 Train AUC: 0.9949 Val AUC: 0.9712 Time: 14.11\n",
      "Epoch: 611 Train Loss: 0.0949 Val Loss: 0.2222 Acc: 0.9058 Pre: 0.9028 Recall: 0.9155 F1: 0.9091 Train AUC: 0.9957 Val AUC: 0.9731 Time: 13.18\n",
      "Epoch: 612 Train Loss: 0.0877 Val Loss: 0.2256 Acc: 0.9022 Pre: 0.8912 Recall: 0.9225 F1: 0.9066 Train AUC: 0.9958 Val AUC: 0.9733 Time: 12.20\n",
      "Epoch: 613 Train Loss: 0.1008 Val Loss: 0.2291 Acc: 0.9022 Pre: 0.8859 Recall: 0.9296 F1: 0.9072 Train AUC: 0.9944 Val AUC: 0.9739 Time: 12.09\n",
      "Epoch: 614 Train Loss: 0.0841 Val Loss: 0.2248 Acc: 0.9022 Pre: 0.8966 Recall: 0.9155 F1: 0.9059 Train AUC: 0.9967 Val AUC: 0.9739 Time: 11.98\n",
      "Epoch: 615 Train Loss: 0.0936 Val Loss: 0.2269 Acc: 0.8986 Pre: 0.8904 Recall: 0.9155 F1: 0.9028 Train AUC: 0.9950 Val AUC: 0.9728 Time: 12.36\n",
      "Epoch: 616 Train Loss: 0.0849 Val Loss: 0.2378 Acc: 0.8913 Pre: 0.8784 Recall: 0.9155 F1: 0.8966 Train AUC: 0.9964 Val AUC: 0.9709 Time: 13.05\n",
      "Epoch: 617 Train Loss: 0.0943 Val Loss: 0.2412 Acc: 0.8913 Pre: 0.8733 Recall: 0.9225 F1: 0.8973 Train AUC: 0.9957 Val AUC: 0.9713 Time: 13.42\n",
      "Epoch: 618 Train Loss: 0.0933 Val Loss: 0.2327 Acc: 0.8877 Pre: 0.8675 Recall: 0.9225 F1: 0.8942 Train AUC: 0.9952 Val AUC: 0.9727 Time: 14.06\n",
      "Epoch: 619 Train Loss: 0.0916 Val Loss: 0.2274 Acc: 0.8967 Pre: 0.8927 Recall: 0.9085 F1: 0.9005 Train AUC: 0.9960 Val AUC: 0.9731 Time: 13.52\n",
      "Epoch: 620 Train Loss: 0.0942 Val Loss: 0.2263 Acc: 0.9058 Pre: 0.9113 Recall: 0.9049 F1: 0.9081 Train AUC: 0.9952 Val AUC: 0.9731 Time: 12.41\n",
      "Epoch: 621 Train Loss: 0.0886 Val Loss: 0.2218 Acc: 0.8986 Pre: 0.8986 Recall: 0.9049 F1: 0.9018 Train AUC: 0.9956 Val AUC: 0.9732 Time: 11.99\n",
      "Epoch: 622 Train Loss: 0.0954 Val Loss: 0.2235 Acc: 0.8967 Pre: 0.8847 Recall: 0.9190 F1: 0.9016 Train AUC: 0.9952 Val AUC: 0.9724 Time: 11.78\n",
      "Epoch: 623 Train Loss: 0.0907 Val Loss: 0.2235 Acc: 0.8895 Pre: 0.8704 Recall: 0.9225 F1: 0.8957 Train AUC: 0.9965 Val AUC: 0.9744 Time: 11.78\n",
      "Epoch: 624 Train Loss: 0.0943 Val Loss: 0.2216 Acc: 0.8913 Pre: 0.8758 Recall: 0.9190 F1: 0.8969 Train AUC: 0.9956 Val AUC: 0.9752 Time: 12.20\n",
      "Epoch: 625 Train Loss: 0.1001 Val Loss: 0.2198 Acc: 0.9112 Pre: 0.9123 Recall: 0.9155 F1: 0.9139 Train AUC: 0.9951 Val AUC: 0.9743 Time: 12.83\n",
      "Epoch: 626 Train Loss: 0.0975 Val Loss: 0.2209 Acc: 0.9112 Pre: 0.9123 Recall: 0.9155 F1: 0.9139 Train AUC: 0.9946 Val AUC: 0.9733 Time: 13.19\n",
      "Epoch: 627 Train Loss: 0.0861 Val Loss: 0.2307 Acc: 0.8859 Pre: 0.8671 Recall: 0.9190 F1: 0.8923 Train AUC: 0.9963 Val AUC: 0.9730 Time: 13.67\n",
      "Epoch: 628 Train Loss: 0.1013 Val Loss: 0.2344 Acc: 0.8895 Pre: 0.8729 Recall: 0.9190 F1: 0.8954 Train AUC: 0.9942 Val AUC: 0.9737 Time: 14.29\n",
      "Epoch: 629 Train Loss: 0.0824 Val Loss: 0.2300 Acc: 0.8986 Pre: 0.8904 Recall: 0.9155 F1: 0.9028 Train AUC: 0.9967 Val AUC: 0.9742 Time: 13.35\n",
      "Epoch: 630 Train Loss: 0.0802 Val Loss: 0.2227 Acc: 0.9058 Pre: 0.9028 Recall: 0.9155 F1: 0.9091 Train AUC: 0.9967 Val AUC: 0.9749 Time: 12.45\n",
      "Epoch: 631 Train Loss: 0.0859 Val Loss: 0.2207 Acc: 0.9094 Pre: 0.9034 Recall: 0.9225 F1: 0.9129 Train AUC: 0.9963 Val AUC: 0.9732 Time: 12.06\n",
      "Epoch: 632 Train Loss: 0.0897 Val Loss: 0.2277 Acc: 0.9058 Pre: 0.8919 Recall: 0.9296 F1: 0.9103 Train AUC: 0.9962 Val AUC: 0.9708 Time: 12.09\n",
      "Epoch: 633 Train Loss: 0.1025 Val Loss: 0.2219 Acc: 0.9004 Pre: 0.8935 Recall: 0.9155 F1: 0.9043 Train AUC: 0.9956 Val AUC: 0.9748 Time: 12.58\n",
      "Epoch: 634 Train Loss: 0.0936 Val Loss: 0.2299 Acc: 0.9022 Pre: 0.8966 Recall: 0.9155 F1: 0.9059 Train AUC: 0.9955 Val AUC: 0.9745 Time: 13.16\n",
      "Epoch: 635 Train Loss: 0.0926 Val Loss: 0.2333 Acc: 0.8913 Pre: 0.8810 Recall: 0.9120 F1: 0.8962 Train AUC: 0.9951 Val AUC: 0.9728 Time: 13.56\n",
      "Epoch: 636 Train Loss: 0.0926 Val Loss: 0.2429 Acc: 0.8895 Pre: 0.8780 Recall: 0.9120 F1: 0.8946 Train AUC: 0.9952 Val AUC: 0.9695 Time: 13.32\n",
      "Epoch: 637 Train Loss: 0.0981 Val Loss: 0.2395 Acc: 0.8877 Pre: 0.8750 Recall: 0.9120 F1: 0.8931 Train AUC: 0.9951 Val AUC: 0.9709 Time: 12.61\n",
      "Epoch: 638 Train Loss: 0.0905 Val Loss: 0.2299 Acc: 0.8967 Pre: 0.8927 Recall: 0.9085 F1: 0.9005 Train AUC: 0.9955 Val AUC: 0.9727 Time: 12.25\n",
      "Epoch: 639 Train Loss: 0.0860 Val Loss: 0.2242 Acc: 0.9094 Pre: 0.9091 Recall: 0.9155 F1: 0.9123 Train AUC: 0.9958 Val AUC: 0.9733 Time: 12.70\n",
      "Epoch: 640 Train Loss: 0.0904 Val Loss: 0.2271 Acc: 0.9022 Pre: 0.8885 Recall: 0.9261 F1: 0.9069 Train AUC: 0.9955 Val AUC: 0.9728 Time: 13.32\n",
      "Epoch: 641 Train Loss: 0.0894 Val Loss: 0.2315 Acc: 0.8986 Pre: 0.8826 Recall: 0.9261 F1: 0.9038 Train AUC: 0.9961 Val AUC: 0.9726 Time: 13.60\n",
      "Epoch: 642 Train Loss: 0.0889 Val Loss: 0.2248 Acc: 0.8986 Pre: 0.8931 Recall: 0.9120 F1: 0.9024 Train AUC: 0.9967 Val AUC: 0.9729 Time: 12.66\n",
      "Epoch: 643 Train Loss: 0.0974 Val Loss: 0.2253 Acc: 0.9040 Pre: 0.9110 Recall: 0.9014 F1: 0.9062 Train AUC: 0.9950 Val AUC: 0.9735 Time: 11.89\n",
      "Epoch: 644 Train Loss: 0.0927 Val Loss: 0.2274 Acc: 0.9040 Pre: 0.8997 Recall: 0.9155 F1: 0.9075 Train AUC: 0.9949 Val AUC: 0.9734 Time: 11.67\n",
      "Epoch: 645 Train Loss: 0.0875 Val Loss: 0.2232 Acc: 0.8986 Pre: 0.8904 Recall: 0.9155 F1: 0.9028 Train AUC: 0.9955 Val AUC: 0.9742 Time: 12.19\n",
      "Epoch: 646 Train Loss: 0.0857 Val Loss: 0.2198 Acc: 0.8986 Pre: 0.8851 Recall: 0.9225 F1: 0.9034 Train AUC: 0.9962 Val AUC: 0.9746 Time: 12.67\n",
      "Epoch: 647 Train Loss: 0.0915 Val Loss: 0.2175 Acc: 0.9130 Pre: 0.9126 Recall: 0.9190 F1: 0.9158 Train AUC: 0.9960 Val AUC: 0.9732 Time: 13.11\n",
      "Epoch: 648 Train Loss: 0.0907 Val Loss: 0.2187 Acc: 0.9149 Pre: 0.9187 Recall: 0.9155 F1: 0.9171 Train AUC: 0.9962 Val AUC: 0.9729 Time: 13.60\n",
      "Epoch: 649 Train Loss: 0.0907 Val Loss: 0.2249 Acc: 0.9076 Pre: 0.8976 Recall: 0.9261 F1: 0.9116 Train AUC: 0.9962 Val AUC: 0.9735 Time: 14.15\n",
      "Epoch: 650 Train Loss: 0.0939 Val Loss: 0.2364 Acc: 0.9022 Pre: 0.8885 Recall: 0.9261 F1: 0.9069 Train AUC: 0.9952 Val AUC: 0.9738 Time: 12.92\n",
      "Epoch: 651 Train Loss: 0.0966 Val Loss: 0.2338 Acc: 0.8895 Pre: 0.8754 Recall: 0.9155 F1: 0.8950 Train AUC: 0.9946 Val AUC: 0.9734 Time: 11.96\n",
      "Epoch: 652 Train Loss: 0.0895 Val Loss: 0.2324 Acc: 0.8841 Pre: 0.8716 Recall: 0.9085 F1: 0.8897 Train AUC: 0.9958 Val AUC: 0.9729 Time: 12.26\n",
      "Epoch: 653 Train Loss: 0.0944 Val Loss: 0.2287 Acc: 0.8931 Pre: 0.8814 Recall: 0.9155 F1: 0.8981 Train AUC: 0.9952 Val AUC: 0.9723 Time: 12.62\n",
      "Epoch: 654 Train Loss: 0.0942 Val Loss: 0.2299 Acc: 0.9022 Pre: 0.8859 Recall: 0.9296 F1: 0.9072 Train AUC: 0.9953 Val AUC: 0.9719 Time: 13.71\n",
      "Epoch: 655 Train Loss: 0.0937 Val Loss: 0.2348 Acc: 0.8967 Pre: 0.8900 Recall: 0.9120 F1: 0.9009 Train AUC: 0.9963 Val AUC: 0.9731 Time: 13.67\n",
      "Epoch: 656 Train Loss: 0.0929 Val Loss: 0.2351 Acc: 0.8986 Pre: 0.8958 Recall: 0.9085 F1: 0.9021 Train AUC: 0.9959 Val AUC: 0.9732 Time: 12.82\n",
      "Epoch: 657 Train Loss: 0.0851 Val Loss: 0.2338 Acc: 0.8913 Pre: 0.8836 Recall: 0.9085 F1: 0.8958 Train AUC: 0.9960 Val AUC: 0.9733 Time: 12.70\n",
      "Epoch: 658 Train Loss: 0.0899 Val Loss: 0.2364 Acc: 0.8913 Pre: 0.8810 Recall: 0.9120 F1: 0.8962 Train AUC: 0.9957 Val AUC: 0.9710 Time: 12.78\n",
      "Epoch: 659 Train Loss: 0.0903 Val Loss: 0.2367 Acc: 0.8967 Pre: 0.8900 Recall: 0.9120 F1: 0.9009 Train AUC: 0.9960 Val AUC: 0.9698 Time: 12.72\n",
      "Epoch: 660 Train Loss: 0.0910 Val Loss: 0.2286 Acc: 0.8949 Pre: 0.8844 Recall: 0.9155 F1: 0.8997 Train AUC: 0.9962 Val AUC: 0.9720 Time: 13.10\n",
      "Epoch: 661 Train Loss: 0.0900 Val Loss: 0.2265 Acc: 0.9022 Pre: 0.8859 Recall: 0.9296 F1: 0.9072 Train AUC: 0.9961 Val AUC: 0.9736 Time: 12.95\n",
      "Epoch: 662 Train Loss: 0.0916 Val Loss: 0.2284 Acc: 0.9040 Pre: 0.8915 Recall: 0.9261 F1: 0.9085 Train AUC: 0.9954 Val AUC: 0.9735 Time: 12.49\n",
      "Epoch: 663 Train Loss: 0.0948 Val Loss: 0.2250 Acc: 0.9130 Pre: 0.9214 Recall: 0.9085 F1: 0.9149 Train AUC: 0.9957 Val AUC: 0.9733 Time: 13.24\n",
      "Epoch: 664 Train Loss: 0.0854 Val Loss: 0.2260 Acc: 0.9058 Pre: 0.9203 Recall: 0.8944 F1: 0.9071 Train AUC: 0.9964 Val AUC: 0.9732 Time: 12.82\n",
      "Epoch: 665 Train Loss: 0.0895 Val Loss: 0.2298 Acc: 0.8949 Pre: 0.8870 Recall: 0.9120 F1: 0.8993 Train AUC: 0.9963 Val AUC: 0.9723 Time: 12.80\n",
      "Epoch: 666 Train Loss: 0.0928 Val Loss: 0.2466 Acc: 0.8877 Pre: 0.8675 Recall: 0.9225 F1: 0.8942 Train AUC: 0.9955 Val AUC: 0.9729 Time: 12.73\n",
      "Epoch: 667 Train Loss: 0.0903 Val Loss: 0.2403 Acc: 0.8949 Pre: 0.8792 Recall: 0.9225 F1: 0.9003 Train AUC: 0.9960 Val AUC: 0.9734 Time: 13.39\n",
      "Epoch: 668 Train Loss: 0.0818 Val Loss: 0.2296 Acc: 0.8967 Pre: 0.8900 Recall: 0.9120 F1: 0.9009 Train AUC: 0.9964 Val AUC: 0.9739 Time: 13.62\n",
      "Epoch: 669 Train Loss: 0.0906 Val Loss: 0.2248 Acc: 0.8986 Pre: 0.8878 Recall: 0.9190 F1: 0.9031 Train AUC: 0.9956 Val AUC: 0.9737 Time: 12.57\n",
      "Epoch: 670 Train Loss: 0.0789 Val Loss: 0.2253 Acc: 0.8967 Pre: 0.8796 Recall: 0.9261 F1: 0.9022 Train AUC: 0.9973 Val AUC: 0.9733 Time: 12.36\n",
      "Epoch: 671 Train Loss: 0.0859 Val Loss: 0.2206 Acc: 0.8986 Pre: 0.8826 Recall: 0.9261 F1: 0.9038 Train AUC: 0.9964 Val AUC: 0.9741 Time: 11.70\n",
      "Epoch: 672 Train Loss: 0.0869 Val Loss: 0.2195 Acc: 0.9167 Pre: 0.9220 Recall: 0.9155 F1: 0.9187 Train AUC: 0.9963 Val AUC: 0.9749 Time: 12.29\n",
      "Epoch: 673 Train Loss: 0.0764 Val Loss: 0.2225 Acc: 0.9076 Pre: 0.9059 Recall: 0.9155 F1: 0.9107 Train AUC: 0.9973 Val AUC: 0.9741 Time: 12.80\n",
      "Epoch: 674 Train Loss: 0.0850 Val Loss: 0.2294 Acc: 0.8931 Pre: 0.8788 Recall: 0.9190 F1: 0.8985 Train AUC: 0.9957 Val AUC: 0.9729 Time: 13.22\n",
      "Epoch: 675 Train Loss: 0.0802 Val Loss: 0.2272 Acc: 0.9004 Pre: 0.8881 Recall: 0.9225 F1: 0.9050 Train AUC: 0.9972 Val AUC: 0.9731 Time: 13.63\n",
      "Epoch: 676 Train Loss: 0.0790 Val Loss: 0.2225 Acc: 0.8986 Pre: 0.8851 Recall: 0.9225 F1: 0.9034 Train AUC: 0.9967 Val AUC: 0.9743 Time: 13.75\n",
      "Epoch: 677 Train Loss: 0.0788 Val Loss: 0.2187 Acc: 0.9040 Pre: 0.9024 Recall: 0.9120 F1: 0.9072 Train AUC: 0.9973 Val AUC: 0.9751 Time: 12.85\n",
      "Epoch: 678 Train Loss: 0.0885 Val Loss: 0.2193 Acc: 0.9022 Pre: 0.8938 Recall: 0.9190 F1: 0.9062 Train AUC: 0.9959 Val AUC: 0.9750 Time: 11.95\n",
      "Epoch: 679 Train Loss: 0.0952 Val Loss: 0.2228 Acc: 0.8931 Pre: 0.8763 Recall: 0.9225 F1: 0.8988 Train AUC: 0.9948 Val AUC: 0.9748 Time: 12.28\n",
      "Epoch: 680 Train Loss: 0.0788 Val Loss: 0.2284 Acc: 0.8895 Pre: 0.8704 Recall: 0.9225 F1: 0.8957 Train AUC: 0.9972 Val AUC: 0.9744 Time: 12.64\n",
      "Epoch: 681 Train Loss: 0.0906 Val Loss: 0.2312 Acc: 0.9040 Pre: 0.9024 Recall: 0.9120 F1: 0.9072 Train AUC: 0.9961 Val AUC: 0.9735 Time: 12.56\n",
      "Epoch: 682 Train Loss: 0.0817 Val Loss: 0.2322 Acc: 0.9058 Pre: 0.9113 Recall: 0.9049 F1: 0.9081 Train AUC: 0.9966 Val AUC: 0.9729 Time: 13.17\n",
      "Epoch: 683 Train Loss: 0.0847 Val Loss: 0.2285 Acc: 0.9058 Pre: 0.9056 Recall: 0.9120 F1: 0.9088 Train AUC: 0.9961 Val AUC: 0.9723 Time: 13.56\n",
      "Epoch: 684 Train Loss: 0.0903 Val Loss: 0.2463 Acc: 0.8913 Pre: 0.8709 Recall: 0.9261 F1: 0.8976 Train AUC: 0.9963 Val AUC: 0.9707 Time: 12.97\n",
      "Epoch: 685 Train Loss: 0.0847 Val Loss: 0.2496 Acc: 0.8859 Pre: 0.8623 Recall: 0.9261 F1: 0.8930 Train AUC: 0.9972 Val AUC: 0.9718 Time: 11.91\n",
      "Epoch: 686 Train Loss: 0.0932 Val Loss: 0.2274 Acc: 0.9112 Pre: 0.9181 Recall: 0.9085 F1: 0.9133 Train AUC: 0.9965 Val AUC: 0.9731 Time: 11.65\n",
      "Epoch: 687 Train Loss: 0.0847 Val Loss: 0.2308 Acc: 0.9130 Pre: 0.9245 Recall: 0.9049 F1: 0.9146 Train AUC: 0.9965 Val AUC: 0.9735 Time: 12.19\n",
      "Epoch: 688 Train Loss: 0.0921 Val Loss: 0.2271 Acc: 0.9058 Pre: 0.9028 Recall: 0.9155 F1: 0.9091 Train AUC: 0.9953 Val AUC: 0.9742 Time: 12.64\n",
      "Epoch: 689 Train Loss: 0.0832 Val Loss: 0.2366 Acc: 0.8931 Pre: 0.8713 Recall: 0.9296 F1: 0.8995 Train AUC: 0.9961 Val AUC: 0.9729 Time: 13.03\n",
      "Epoch: 690 Train Loss: 0.0767 Val Loss: 0.2527 Acc: 0.8913 Pre: 0.8636 Recall: 0.9366 F1: 0.8986 Train AUC: 0.9972 Val AUC: 0.9690 Time: 13.60\n",
      "Epoch: 691 Train Loss: 0.0789 Val Loss: 0.2359 Acc: 0.8895 Pre: 0.8729 Recall: 0.9190 F1: 0.8954 Train AUC: 0.9975 Val AUC: 0.9700 Time: 14.22\n",
      "Epoch: 692 Train Loss: 0.0917 Val Loss: 0.2270 Acc: 0.8986 Pre: 0.8986 Recall: 0.9049 F1: 0.9018 Train AUC: 0.9953 Val AUC: 0.9741 Time: 13.03\n",
      "Epoch: 693 Train Loss: 0.0889 Val Loss: 0.2313 Acc: 0.9040 Pre: 0.8997 Recall: 0.9155 F1: 0.9075 Train AUC: 0.9957 Val AUC: 0.9749 Time: 12.57\n",
      "Epoch: 694 Train Loss: 0.0830 Val Loss: 0.2372 Acc: 0.8967 Pre: 0.8721 Recall: 0.9366 F1: 0.9032 Train AUC: 0.9958 Val AUC: 0.9747 Time: 12.15\n",
      "Epoch: 695 Train Loss: 0.0839 Val Loss: 0.2343 Acc: 0.8949 Pre: 0.8693 Recall: 0.9366 F1: 0.9017 Train AUC: 0.9966 Val AUC: 0.9741 Time: 12.53\n",
      "Epoch: 696 Train Loss: 0.0834 Val Loss: 0.2208 Acc: 0.9149 Pre: 0.9100 Recall: 0.9261 F1: 0.9180 Train AUC: 0.9974 Val AUC: 0.9733 Time: 13.04\n",
      "Epoch: 697 Train Loss: 0.0856 Val Loss: 0.2230 Acc: 0.9094 Pre: 0.9270 Recall: 0.8944 F1: 0.9104 Train AUC: 0.9965 Val AUC: 0.9737 Time: 13.03\n",
      "Epoch: 698 Train Loss: 0.0942 Val Loss: 0.2193 Acc: 0.9094 Pre: 0.9034 Recall: 0.9225 F1: 0.9129 Train AUC: 0.9963 Val AUC: 0.9750 Time: 12.93\n",
      "Epoch: 699 Train Loss: 0.0814 Val Loss: 0.2337 Acc: 0.8895 Pre: 0.8656 Recall: 0.9296 F1: 0.8964 Train AUC: 0.9968 Val AUC: 0.9738 Time: 12.52\n",
      "Epoch: 700 Train Loss: 0.0878 Val Loss: 0.2298 Acc: 0.8895 Pre: 0.8754 Recall: 0.9155 F1: 0.8950 Train AUC: 0.9963 Val AUC: 0.9733 Time: 12.77\n",
      "Epoch: 701 Train Loss: 0.0864 Val Loss: 0.2221 Acc: 0.9022 Pre: 0.8938 Recall: 0.9190 F1: 0.9062 Train AUC: 0.9960 Val AUC: 0.9731 Time: 13.31\n",
      "Epoch: 702 Train Loss: 0.0761 Val Loss: 0.2208 Acc: 0.9112 Pre: 0.9181 Recall: 0.9085 F1: 0.9133 Train AUC: 0.9974 Val AUC: 0.9737 Time: 13.78\n",
      "Epoch: 703 Train Loss: 0.0864 Val Loss: 0.2192 Acc: 0.8986 Pre: 0.8986 Recall: 0.9049 F1: 0.9018 Train AUC: 0.9967 Val AUC: 0.9754 Time: 13.22\n",
      "Epoch: 704 Train Loss: 0.0959 Val Loss: 0.2329 Acc: 0.8949 Pre: 0.8693 Recall: 0.9366 F1: 0.9017 Train AUC: 0.9950 Val AUC: 0.9759 Time: 12.21\n",
      "Epoch: 705 Train Loss: 0.0912 Val Loss: 0.2399 Acc: 0.8967 Pre: 0.8673 Recall: 0.9437 F1: 0.9039 Train AUC: 0.9966 Val AUC: 0.9753 Time: 11.43\n",
      "Epoch: 706 Train Loss: 0.0859 Val Loss: 0.2241 Acc: 0.9040 Pre: 0.8969 Recall: 0.9190 F1: 0.9078 Train AUC: 0.9971 Val AUC: 0.9748 Time: 11.76\n",
      "Epoch: 707 Train Loss: 0.0748 Val Loss: 0.2307 Acc: 0.9040 Pre: 0.9200 Recall: 0.8908 F1: 0.9052 Train AUC: 0.9975 Val AUC: 0.9744 Time: 12.11\n",
      "Epoch: 708 Train Loss: 0.0918 Val Loss: 0.2246 Acc: 0.9058 Pre: 0.8973 Recall: 0.9225 F1: 0.9097 Train AUC: 0.9970 Val AUC: 0.9736 Time: 12.61\n",
      "Epoch: 709 Train Loss: 0.0788 Val Loss: 0.2475 Acc: 0.8913 Pre: 0.8684 Recall: 0.9296 F1: 0.8980 Train AUC: 0.9969 Val AUC: 0.9733 Time: 13.50\n",
      "Epoch: 710 Train Loss: 0.0890 Val Loss: 0.2352 Acc: 0.8931 Pre: 0.8763 Recall: 0.9225 F1: 0.8988 Train AUC: 0.9973 Val AUC: 0.9737 Time: 13.81\n",
      "Epoch: 711 Train Loss: 0.0961 Val Loss: 0.2259 Acc: 0.9112 Pre: 0.9123 Recall: 0.9155 F1: 0.9139 Train AUC: 0.9951 Val AUC: 0.9736 Time: 14.13\n",
      "Epoch: 712 Train Loss: 0.0877 Val Loss: 0.2308 Acc: 0.8986 Pre: 0.8878 Recall: 0.9190 F1: 0.9031 Train AUC: 0.9960 Val AUC: 0.9719 Time: 13.73\n",
      "Epoch: 713 Train Loss: 0.0810 Val Loss: 0.2394 Acc: 0.8913 Pre: 0.8733 Recall: 0.9225 F1: 0.8973 Train AUC: 0.9966 Val AUC: 0.9706 Time: 12.51\n",
      "Epoch: 714 Train Loss: 0.0931 Val Loss: 0.2407 Acc: 0.9004 Pre: 0.8881 Recall: 0.9225 F1: 0.9050 Train AUC: 0.9960 Val AUC: 0.9724 Time: 12.00\n",
      "Epoch: 715 Train Loss: 0.0847 Val Loss: 0.2411 Acc: 0.9004 Pre: 0.8962 Recall: 0.9120 F1: 0.9040 Train AUC: 0.9966 Val AUC: 0.9737 Time: 12.25\n",
      "Epoch: 716 Train Loss: 0.0800 Val Loss: 0.2294 Acc: 0.9040 Pre: 0.9081 Recall: 0.9049 F1: 0.9065 Train AUC: 0.9966 Val AUC: 0.9745 Time: 12.80\n",
      "Epoch: 717 Train Loss: 0.0802 Val Loss: 0.2280 Acc: 0.9040 Pre: 0.8942 Recall: 0.9225 F1: 0.9081 Train AUC: 0.9968 Val AUC: 0.9728 Time: 13.03\n",
      "Epoch: 718 Train Loss: 0.0802 Val Loss: 0.2472 Acc: 0.9112 Pre: 0.9010 Recall: 0.9296 F1: 0.9151 Train AUC: 0.9970 Val AUC: 0.9678 Time: 13.60\n",
      "Epoch: 719 Train Loss: 0.0879 Val Loss: 0.2323 Acc: 0.9004 Pre: 0.8855 Recall: 0.9261 F1: 0.9053 Train AUC: 0.9961 Val AUC: 0.9731 Time: 13.08\n",
      "Epoch: 720 Train Loss: 0.0807 Val Loss: 0.2340 Acc: 0.9094 Pre: 0.9091 Recall: 0.9155 F1: 0.9123 Train AUC: 0.9968 Val AUC: 0.9738 Time: 12.62\n",
      "Epoch: 721 Train Loss: 0.0843 Val Loss: 0.2359 Acc: 0.9149 Pre: 0.9129 Recall: 0.9225 F1: 0.9177 Train AUC: 0.9958 Val AUC: 0.9732 Time: 12.79\n",
      "Epoch: 722 Train Loss: 0.0805 Val Loss: 0.2287 Acc: 0.9004 Pre: 0.8829 Recall: 0.9296 F1: 0.9057 Train AUC: 0.9965 Val AUC: 0.9740 Time: 13.13\n",
      "Epoch: 723 Train Loss: 0.0866 Val Loss: 0.2226 Acc: 0.9112 Pre: 0.9094 Recall: 0.9190 F1: 0.9142 Train AUC: 0.9959 Val AUC: 0.9717 Time: 13.04\n",
      "Epoch: 724 Train Loss: 0.0852 Val Loss: 0.2239 Acc: 0.9185 Pre: 0.9283 Recall: 0.9120 F1: 0.9201 Train AUC: 0.9964 Val AUC: 0.9718 Time: 13.86\n",
      "Epoch: 725 Train Loss: 0.0975 Val Loss: 0.2237 Acc: 0.8986 Pre: 0.8878 Recall: 0.9190 F1: 0.9031 Train AUC: 0.9955 Val AUC: 0.9747 Time: 12.72\n",
      "Epoch: 726 Train Loss: 0.0820 Val Loss: 0.2419 Acc: 0.8913 Pre: 0.8684 Recall: 0.9296 F1: 0.8980 Train AUC: 0.9963 Val AUC: 0.9745 Time: 12.03\n",
      "Epoch: 727 Train Loss: 0.0826 Val Loss: 0.2533 Acc: 0.8913 Pre: 0.8660 Recall: 0.9331 F1: 0.8983 Train AUC: 0.9967 Val AUC: 0.9742 Time: 12.51\n",
      "Epoch: 728 Train Loss: 0.0812 Val Loss: 0.2428 Acc: 0.8949 Pre: 0.8717 Recall: 0.9331 F1: 0.9014 Train AUC: 0.9970 Val AUC: 0.9740 Time: 12.19\n",
      "Epoch: 729 Train Loss: 0.0840 Val Loss: 0.2261 Acc: 0.8967 Pre: 0.8822 Recall: 0.9225 F1: 0.9019 Train AUC: 0.9962 Val AUC: 0.9745 Time: 12.70\n",
      "Epoch: 730 Train Loss: 0.0829 Val Loss: 0.2214 Acc: 0.9112 Pre: 0.9123 Recall: 0.9155 F1: 0.9139 Train AUC: 0.9963 Val AUC: 0.9737 Time: 13.23\n",
      "Epoch: 731 Train Loss: 0.0791 Val Loss: 0.2194 Acc: 0.9149 Pre: 0.9072 Recall: 0.9296 F1: 0.9183 Train AUC: 0.9974 Val AUC: 0.9739 Time: 13.60\n",
      "Epoch: 732 Train Loss: 0.0817 Val Loss: 0.2218 Acc: 0.9076 Pre: 0.8949 Recall: 0.9296 F1: 0.9119 Train AUC: 0.9974 Val AUC: 0.9743 Time: 13.34\n",
      "Epoch: 733 Train Loss: 0.0838 Val Loss: 0.2261 Acc: 0.9185 Pre: 0.9135 Recall: 0.9296 F1: 0.9215 Train AUC: 0.9969 Val AUC: 0.9748 Time: 12.88\n",
      "Epoch: 734 Train Loss: 0.0802 Val Loss: 0.2301 Acc: 0.9167 Pre: 0.9132 Recall: 0.9261 F1: 0.9196 Train AUC: 0.9965 Val AUC: 0.9737 Time: 12.53\n",
      "Epoch: 735 Train Loss: 0.0829 Val Loss: 0.2466 Acc: 0.8967 Pre: 0.8771 Recall: 0.9296 F1: 0.9026 Train AUC: 0.9963 Val AUC: 0.9719 Time: 12.11\n",
      "Epoch: 736 Train Loss: 0.0840 Val Loss: 0.2491 Acc: 0.9004 Pre: 0.8804 Recall: 0.9331 F1: 0.9060 Train AUC: 0.9965 Val AUC: 0.9694 Time: 12.76\n",
      "Epoch: 737 Train Loss: 0.0875 Val Loss: 0.2404 Acc: 0.9004 Pre: 0.8881 Recall: 0.9225 F1: 0.9050 Train AUC: 0.9958 Val AUC: 0.9704 Time: 13.11\n",
      "Epoch: 738 Train Loss: 0.0887 Val Loss: 0.2384 Acc: 0.9167 Pre: 0.9161 Recall: 0.9225 F1: 0.9193 Train AUC: 0.9962 Val AUC: 0.9725 Time: 13.71\n",
      "Epoch: 739 Train Loss: 0.0953 Val Loss: 0.2353 Acc: 0.9130 Pre: 0.9184 Recall: 0.9120 F1: 0.9152 Train AUC: 0.9942 Val AUC: 0.9747 Time: 13.18\n",
      "Epoch: 740 Train Loss: 0.0912 Val Loss: 0.2338 Acc: 0.9004 Pre: 0.8829 Recall: 0.9296 F1: 0.9057 Train AUC: 0.9954 Val AUC: 0.9747 Time: 13.03\n",
      "Epoch: 741 Train Loss: 0.0767 Val Loss: 0.2593 Acc: 0.8877 Pre: 0.8581 Recall: 0.9366 F1: 0.8956 Train AUC: 0.9972 Val AUC: 0.9701 Time: 12.09\n",
      "Epoch: 742 Train Loss: 0.0876 Val Loss: 0.2345 Acc: 0.9058 Pre: 0.8973 Recall: 0.9225 F1: 0.9097 Train AUC: 0.9971 Val AUC: 0.9720 Time: 12.29\n",
      "Epoch: 743 Train Loss: 0.0962 Val Loss: 0.2309 Acc: 0.9112 Pre: 0.9242 Recall: 0.9014 F1: 0.9127 Train AUC: 0.9945 Val AUC: 0.9742 Time: 12.70\n",
      "Epoch: 744 Train Loss: 0.0887 Val Loss: 0.2233 Acc: 0.8931 Pre: 0.8788 Recall: 0.9190 F1: 0.8985 Train AUC: 0.9960 Val AUC: 0.9737 Time: 13.09\n",
      "Epoch: 745 Train Loss: 0.0772 Val Loss: 0.2500 Acc: 0.8877 Pre: 0.8581 Recall: 0.9366 F1: 0.8956 Train AUC: 0.9973 Val AUC: 0.9700 Time: 13.84\n",
      "Epoch: 746 Train Loss: 0.0933 Val Loss: 0.2354 Acc: 0.8895 Pre: 0.8704 Recall: 0.9225 F1: 0.8957 Train AUC: 0.9959 Val AUC: 0.9731 Time: 13.34\n",
      "Epoch: 747 Train Loss: 0.0764 Val Loss: 0.2281 Acc: 0.9040 Pre: 0.9053 Recall: 0.9085 F1: 0.9069 Train AUC: 0.9977 Val AUC: 0.9741 Time: 12.50\n",
      "Epoch: 748 Train Loss: 0.0756 Val Loss: 0.2333 Acc: 0.9058 Pre: 0.9203 Recall: 0.8944 F1: 0.9071 Train AUC: 0.9970 Val AUC: 0.9740 Time: 12.44\n",
      "Epoch: 749 Train Loss: 0.0828 Val Loss: 0.2280 Acc: 0.8949 Pre: 0.8818 Recall: 0.9190 F1: 0.9000 Train AUC: 0.9967 Val AUC: 0.9731 Time: 12.69\n",
      "Epoch: 750 Train Loss: 0.0859 Val Loss: 0.2451 Acc: 0.9022 Pre: 0.8758 Recall: 0.9437 F1: 0.9085 Train AUC: 0.9965 Val AUC: 0.9715 Time: 13.14\n",
      "Epoch: 751 Train Loss: 0.0832 Val Loss: 0.2233 Acc: 0.9040 Pre: 0.8863 Recall: 0.9331 F1: 0.9091 Train AUC: 0.9977 Val AUC: 0.9740 Time: 13.20\n",
      "Epoch: 752 Train Loss: 0.0829 Val Loss: 0.2151 Acc: 0.9040 Pre: 0.9053 Recall: 0.9085 F1: 0.9069 Train AUC: 0.9970 Val AUC: 0.9754 Time: 12.75\n",
      "Epoch: 753 Train Loss: 0.0843 Val Loss: 0.2218 Acc: 0.9112 Pre: 0.9181 Recall: 0.9085 F1: 0.9133 Train AUC: 0.9964 Val AUC: 0.9750 Time: 12.43\n",
      "Epoch: 754 Train Loss: 0.0769 Val Loss: 0.2228 Acc: 0.9076 Pre: 0.9031 Recall: 0.9190 F1: 0.9110 Train AUC: 0.9971 Val AUC: 0.9744 Time: 13.02\n",
      "Epoch: 755 Train Loss: 0.0770 Val Loss: 0.2265 Acc: 0.8913 Pre: 0.8733 Recall: 0.9225 F1: 0.8973 Train AUC: 0.9970 Val AUC: 0.9735 Time: 13.45\n",
      "Epoch: 756 Train Loss: 0.0789 Val Loss: 0.2317 Acc: 0.8895 Pre: 0.8656 Recall: 0.9296 F1: 0.8964 Train AUC: 0.9970 Val AUC: 0.9740 Time: 13.40\n",
      "Epoch: 757 Train Loss: 0.0739 Val Loss: 0.2252 Acc: 0.8986 Pre: 0.8904 Recall: 0.9155 F1: 0.9028 Train AUC: 0.9973 Val AUC: 0.9736 Time: 12.97\n",
      "Epoch: 758 Train Loss: 0.0740 Val Loss: 0.2278 Acc: 0.9076 Pre: 0.9059 Recall: 0.9155 F1: 0.9107 Train AUC: 0.9977 Val AUC: 0.9731 Time: 12.98\n",
      "Epoch: 759 Train Loss: 0.0811 Val Loss: 0.2346 Acc: 0.9004 Pre: 0.8804 Recall: 0.9331 F1: 0.9060 Train AUC: 0.9965 Val AUC: 0.9731 Time: 13.30\n",
      "Epoch: 760 Train Loss: 0.0880 Val Loss: 0.2236 Acc: 0.8931 Pre: 0.8713 Recall: 0.9296 F1: 0.8995 Train AUC: 0.9961 Val AUC: 0.9752 Time: 12.78\n",
      "Epoch: 761 Train Loss: 0.0812 Val Loss: 0.2102 Acc: 0.9185 Pre: 0.9164 Recall: 0.9261 F1: 0.9212 Train AUC: 0.9970 Val AUC: 0.9759 Time: 12.36\n",
      "Epoch: 762 Train Loss: 0.0840 Val Loss: 0.2188 Acc: 0.9112 Pre: 0.9094 Recall: 0.9190 F1: 0.9142 Train AUC: 0.9965 Val AUC: 0.9752 Time: 12.89\n",
      "Epoch: 763 Train Loss: 0.0793 Val Loss: 0.2251 Acc: 0.9040 Pre: 0.9053 Recall: 0.9085 F1: 0.9069 Train AUC: 0.9967 Val AUC: 0.9742 Time: 13.28\n",
      "Epoch: 764 Train Loss: 0.0823 Val Loss: 0.2228 Acc: 0.9040 Pre: 0.8889 Recall: 0.9296 F1: 0.9088 Train AUC: 0.9962 Val AUC: 0.9738 Time: 13.18\n",
      "Epoch: 765 Train Loss: 0.0821 Val Loss: 0.2222 Acc: 0.9058 Pre: 0.8919 Recall: 0.9296 F1: 0.9103 Train AUC: 0.9970 Val AUC: 0.9736 Time: 13.20\n",
      "Epoch: 766 Train Loss: 0.0804 Val Loss: 0.2304 Acc: 0.9040 Pre: 0.8889 Recall: 0.9296 F1: 0.9088 Train AUC: 0.9968 Val AUC: 0.9725 Time: 12.21\n",
      "Epoch: 767 Train Loss: 0.0846 Val Loss: 0.2325 Acc: 0.8967 Pre: 0.8771 Recall: 0.9296 F1: 0.9026 Train AUC: 0.9964 Val AUC: 0.9734 Time: 11.91\n",
      "Epoch: 768 Train Loss: 0.0800 Val Loss: 0.2260 Acc: 0.8986 Pre: 0.8826 Recall: 0.9261 F1: 0.9038 Train AUC: 0.9968 Val AUC: 0.9746 Time: 12.39\n",
      "Epoch: 769 Train Loss: 0.0825 Val Loss: 0.2254 Acc: 0.9076 Pre: 0.8976 Recall: 0.9261 F1: 0.9116 Train AUC: 0.9965 Val AUC: 0.9744 Time: 13.13\n",
      "Epoch: 770 Train Loss: 0.0768 Val Loss: 0.2236 Acc: 0.9076 Pre: 0.8976 Recall: 0.9261 F1: 0.9116 Train AUC: 0.9967 Val AUC: 0.9737 Time: 13.42\n",
      "Epoch: 771 Train Loss: 0.0776 Val Loss: 0.2251 Acc: 0.9112 Pre: 0.9038 Recall: 0.9261 F1: 0.9148 Train AUC: 0.9971 Val AUC: 0.9719 Time: 13.97\n",
      "Epoch: 772 Train Loss: 0.0744 Val Loss: 0.2281 Acc: 0.9004 Pre: 0.8855 Recall: 0.9261 F1: 0.9053 Train AUC: 0.9979 Val AUC: 0.9720 Time: 13.75\n",
      "Epoch: 773 Train Loss: 0.0971 Val Loss: 0.2399 Acc: 0.9112 Pre: 0.9010 Recall: 0.9296 F1: 0.9151 Train AUC: 0.9962 Val AUC: 0.9749 Time: 12.51\n",
      "Epoch: 774 Train Loss: 0.0835 Val Loss: 0.2534 Acc: 0.9130 Pre: 0.9069 Recall: 0.9261 F1: 0.9164 Train AUC: 0.9964 Val AUC: 0.9737 Time: 12.18\n",
      "Epoch: 775 Train Loss: 0.0961 Val Loss: 0.2532 Acc: 0.9004 Pre: 0.8779 Recall: 0.9366 F1: 0.9063 Train AUC: 0.9945 Val AUC: 0.9723 Time: 11.91\n",
      "Epoch: 776 Train Loss: 0.0917 Val Loss: 0.2644 Acc: 0.9004 Pre: 0.8779 Recall: 0.9366 F1: 0.9063 Train AUC: 0.9953 Val AUC: 0.9682 Time: 12.34\n",
      "Epoch: 777 Train Loss: 0.0948 Val Loss: 0.2454 Acc: 0.9004 Pre: 0.9018 Recall: 0.9049 F1: 0.9033 Train AUC: 0.9956 Val AUC: 0.9685 Time: 12.76\n",
      "Epoch: 778 Train Loss: 0.0887 Val Loss: 0.2298 Acc: 0.9130 Pre: 0.9338 Recall: 0.8944 F1: 0.9137 Train AUC: 0.9962 Val AUC: 0.9725 Time: 13.43\n",
      "Epoch: 779 Train Loss: 0.0863 Val Loss: 0.2312 Acc: 0.9058 Pre: 0.9085 Recall: 0.9085 F1: 0.9085 Train AUC: 0.9967 Val AUC: 0.9735 Time: 13.69\n",
      "Epoch: 780 Train Loss: 0.1004 Val Loss: 0.2582 Acc: 0.8931 Pre: 0.8664 Recall: 0.9366 F1: 0.9002 Train AUC: 0.9940 Val AUC: 0.9735 Time: 13.29\n",
      "Epoch: 781 Train Loss: 0.1003 Val Loss: 0.2441 Acc: 0.8949 Pre: 0.8645 Recall: 0.9437 F1: 0.9024 Train AUC: 0.9953 Val AUC: 0.9744 Time: 12.58\n",
      "Epoch: 782 Train Loss: 0.0939 Val Loss: 0.2257 Acc: 0.8967 Pre: 0.8874 Recall: 0.9155 F1: 0.9012 Train AUC: 0.9956 Val AUC: 0.9736 Time: 12.80\n",
      "Epoch: 783 Train Loss: 0.0872 Val Loss: 0.2394 Acc: 0.9094 Pre: 0.9149 Recall: 0.9085 F1: 0.9117 Train AUC: 0.9963 Val AUC: 0.9688 Time: 12.53\n",
      "Epoch: 784 Train Loss: 0.1039 Val Loss: 0.2404 Acc: 0.8986 Pre: 0.8750 Recall: 0.9366 F1: 0.9048 Train AUC: 0.9951 Val AUC: 0.9710 Time: 12.31\n",
      "Epoch: 785 Train Loss: 0.0846 Val Loss: 0.2332 Acc: 0.8931 Pre: 0.8664 Recall: 0.9366 F1: 0.9002 Train AUC: 0.9966 Val AUC: 0.9751 Time: 12.70\n",
      "Epoch: 786 Train Loss: 0.0830 Val Loss: 0.2350 Acc: 0.9058 Pre: 0.8867 Recall: 0.9366 F1: 0.9110 Train AUC: 0.9965 Val AUC: 0.9755 Time: 13.17\n",
      "Epoch: 787 Train Loss: 0.0896 Val Loss: 0.2257 Acc: 0.9257 Pre: 0.9293 Recall: 0.9261 F1: 0.9277 Train AUC: 0.9966 Val AUC: 0.9752 Time: 13.79\n",
      "Epoch: 788 Train Loss: 0.0830 Val Loss: 0.2245 Acc: 0.9203 Pre: 0.9286 Recall: 0.9155 F1: 0.9220 Train AUC: 0.9965 Val AUC: 0.9741 Time: 13.24\n",
      "Epoch: 789 Train Loss: 0.0841 Val Loss: 0.2308 Acc: 0.9058 Pre: 0.8973 Recall: 0.9225 F1: 0.9097 Train AUC: 0.9970 Val AUC: 0.9718 Time: 12.83\n",
      "Epoch: 790 Train Loss: 0.0792 Val Loss: 0.2611 Acc: 0.8913 Pre: 0.8613 Recall: 0.9401 F1: 0.8990 Train AUC: 0.9969 Val AUC: 0.9689 Time: 13.02\n",
      "Epoch: 791 Train Loss: 0.0919 Val Loss: 0.2218 Acc: 0.9185 Pre: 0.9135 Recall: 0.9296 F1: 0.9215 Train AUC: 0.9961 Val AUC: 0.9742 Time: 12.91\n",
      "Epoch: 792 Train Loss: 0.0725 Val Loss: 0.2224 Acc: 0.9112 Pre: 0.9211 Recall: 0.9049 F1: 0.9130 Train AUC: 0.9978 Val AUC: 0.9750 Time: 13.28\n",
      "Epoch: 793 Train Loss: 0.0864 Val Loss: 0.2247 Acc: 0.9167 Pre: 0.9220 Recall: 0.9155 F1: 0.9187 Train AUC: 0.9964 Val AUC: 0.9752 Time: 12.74\n",
      "Epoch: 794 Train Loss: 0.0869 Val Loss: 0.2338 Acc: 0.8931 Pre: 0.8713 Recall: 0.9296 F1: 0.8995 Train AUC: 0.9958 Val AUC: 0.9754 Time: 13.14\n",
      "Epoch: 795 Train Loss: 0.0790 Val Loss: 0.2383 Acc: 0.8895 Pre: 0.8656 Recall: 0.9296 F1: 0.8964 Train AUC: 0.9978 Val AUC: 0.9744 Time: 12.75\n",
      "Epoch: 796 Train Loss: 0.0720 Val Loss: 0.2290 Acc: 0.8986 Pre: 0.8851 Recall: 0.9225 F1: 0.9034 Train AUC: 0.9975 Val AUC: 0.9736 Time: 13.27\n",
      "Epoch: 797 Train Loss: 0.0805 Val Loss: 0.2239 Acc: 0.9149 Pre: 0.9129 Recall: 0.9225 F1: 0.9177 Train AUC: 0.9968 Val AUC: 0.9740 Time: 13.41\n",
      "Epoch: 798 Train Loss: 0.0755 Val Loss: 0.2253 Acc: 0.9076 Pre: 0.8949 Recall: 0.9296 F1: 0.9119 Train AUC: 0.9975 Val AUC: 0.9741 Time: 13.14\n",
      "Epoch: 799 Train Loss: 0.0824 Val Loss: 0.2329 Acc: 0.9004 Pre: 0.8829 Recall: 0.9296 F1: 0.9057 Train AUC: 0.9963 Val AUC: 0.9732 Time: 12.75\n",
      "Epoch: 800 Train Loss: 0.0790 Val Loss: 0.2304 Acc: 0.9040 Pre: 0.8915 Recall: 0.9261 F1: 0.9085 Train AUC: 0.9970 Val AUC: 0.9729 Time: 11.82\n",
      "Epoch: 801 Train Loss: 0.0774 Val Loss: 0.2281 Acc: 0.9076 Pre: 0.8976 Recall: 0.9261 F1: 0.9116 Train AUC: 0.9974 Val AUC: 0.9749 Time: 12.09\n",
      "Epoch: 802 Train Loss: 0.0681 Val Loss: 0.2307 Acc: 0.9094 Pre: 0.9034 Recall: 0.9225 F1: 0.9129 Train AUC: 0.9979 Val AUC: 0.9749 Time: 12.64\n",
      "Epoch: 803 Train Loss: 0.0774 Val Loss: 0.2333 Acc: 0.9004 Pre: 0.8829 Recall: 0.9296 F1: 0.9057 Train AUC: 0.9967 Val AUC: 0.9741 Time: 13.01\n",
      "Epoch: 804 Train Loss: 0.0667 Val Loss: 0.2380 Acc: 0.8949 Pre: 0.8742 Recall: 0.9296 F1: 0.9010 Train AUC: 0.9981 Val AUC: 0.9728 Time: 13.69\n",
      "Epoch: 805 Train Loss: 0.0759 Val Loss: 0.2406 Acc: 0.8895 Pre: 0.8656 Recall: 0.9296 F1: 0.8964 Train AUC: 0.9977 Val AUC: 0.9723 Time: 14.42\n",
      "Epoch: 806 Train Loss: 0.0752 Val Loss: 0.2310 Acc: 0.8967 Pre: 0.8771 Recall: 0.9296 F1: 0.9026 Train AUC: 0.9976 Val AUC: 0.9731 Time: 13.28\n",
      "Epoch: 807 Train Loss: 0.0682 Val Loss: 0.2286 Acc: 0.9112 Pre: 0.9038 Recall: 0.9261 F1: 0.9148 Train AUC: 0.9985 Val AUC: 0.9738 Time: 12.23\n",
      "Epoch: 808 Train Loss: 0.0747 Val Loss: 0.2319 Acc: 0.9076 Pre: 0.9059 Recall: 0.9155 F1: 0.9107 Train AUC: 0.9972 Val AUC: 0.9741 Time: 11.96\n",
      "Epoch: 809 Train Loss: 0.0706 Val Loss: 0.2377 Acc: 0.9040 Pre: 0.8942 Recall: 0.9225 F1: 0.9081 Train AUC: 0.9978 Val AUC: 0.9736 Time: 12.27\n",
      "Epoch: 810 Train Loss: 0.0707 Val Loss: 0.2520 Acc: 0.8986 Pre: 0.8750 Recall: 0.9366 F1: 0.9048 Train AUC: 0.9977 Val AUC: 0.9726 Time: 12.58\n",
      "Epoch: 811 Train Loss: 0.0802 Val Loss: 0.2444 Acc: 0.8913 Pre: 0.8733 Recall: 0.9225 F1: 0.8973 Train AUC: 0.9971 Val AUC: 0.9728 Time: 13.20\n",
      "Epoch: 812 Train Loss: 0.0872 Val Loss: 0.2319 Acc: 0.9149 Pre: 0.9072 Recall: 0.9296 F1: 0.9183 Train AUC: 0.9956 Val AUC: 0.9738 Time: 13.52\n",
      "Epoch: 813 Train Loss: 0.0805 Val Loss: 0.2259 Acc: 0.9185 Pre: 0.9135 Recall: 0.9296 F1: 0.9215 Train AUC: 0.9966 Val AUC: 0.9730 Time: 14.00\n",
      "Epoch: 814 Train Loss: 0.0836 Val Loss: 0.2267 Acc: 0.9004 Pre: 0.8829 Recall: 0.9296 F1: 0.9057 Train AUC: 0.9972 Val AUC: 0.9741 Time: 13.55\n",
      "Epoch: 815 Train Loss: 0.0740 Val Loss: 0.2266 Acc: 0.8986 Pre: 0.8800 Recall: 0.9296 F1: 0.9041 Train AUC: 0.9976 Val AUC: 0.9751 Time: 12.83\n",
      "Epoch: 816 Train Loss: 0.0715 Val Loss: 0.2276 Acc: 0.9022 Pre: 0.8859 Recall: 0.9296 F1: 0.9072 Train AUC: 0.9976 Val AUC: 0.9751 Time: 12.42\n",
      "Epoch: 817 Train Loss: 0.0754 Val Loss: 0.2303 Acc: 0.9004 Pre: 0.8829 Recall: 0.9296 F1: 0.9057 Train AUC: 0.9972 Val AUC: 0.9733 Time: 12.62\n",
      "Epoch: 818 Train Loss: 0.0782 Val Loss: 0.2491 Acc: 0.9004 Pre: 0.8779 Recall: 0.9366 F1: 0.9063 Train AUC: 0.9970 Val AUC: 0.9692 Time: 12.01\n",
      "Epoch: 819 Train Loss: 0.0873 Val Loss: 0.2349 Acc: 0.9022 Pre: 0.8833 Recall: 0.9331 F1: 0.9075 Train AUC: 0.9966 Val AUC: 0.9729 Time: 12.55\n",
      "Epoch: 820 Train Loss: 0.0754 Val Loss: 0.2305 Acc: 0.9130 Pre: 0.9041 Recall: 0.9296 F1: 0.9167 Train AUC: 0.9969 Val AUC: 0.9748 Time: 13.02\n",
      "Epoch: 821 Train Loss: 0.0696 Val Loss: 0.2335 Acc: 0.9130 Pre: 0.9041 Recall: 0.9296 F1: 0.9167 Train AUC: 0.9978 Val AUC: 0.9754 Time: 13.28\n",
      "Epoch: 822 Train Loss: 0.0799 Val Loss: 0.2418 Acc: 0.9022 Pre: 0.8808 Recall: 0.9366 F1: 0.9078 Train AUC: 0.9965 Val AUC: 0.9747 Time: 13.94\n",
      "Epoch: 823 Train Loss: 0.0813 Val Loss: 0.2392 Acc: 0.9040 Pre: 0.8787 Recall: 0.9437 F1: 0.9100 Train AUC: 0.9968 Val AUC: 0.9736 Time: 14.37\n",
      "Epoch: 824 Train Loss: 0.0737 Val Loss: 0.2288 Acc: 0.8986 Pre: 0.8800 Recall: 0.9296 F1: 0.9041 Train AUC: 0.9977 Val AUC: 0.9727 Time: 13.16\n",
      "Epoch: 825 Train Loss: 0.0768 Val Loss: 0.2229 Acc: 0.9130 Pre: 0.9155 Recall: 0.9155 F1: 0.9155 Train AUC: 0.9977 Val AUC: 0.9733 Time: 12.20\n",
      "Epoch: 826 Train Loss: 0.0787 Val Loss: 0.2176 Acc: 0.9130 Pre: 0.9069 Recall: 0.9261 F1: 0.9164 Train AUC: 0.9975 Val AUC: 0.9752 Time: 11.79\n",
      "Epoch: 827 Train Loss: 0.0735 Val Loss: 0.2257 Acc: 0.9022 Pre: 0.8912 Recall: 0.9225 F1: 0.9066 Train AUC: 0.9981 Val AUC: 0.9752 Time: 11.68\n",
      "Epoch: 828 Train Loss: 0.0796 Val Loss: 0.2303 Acc: 0.9112 Pre: 0.9038 Recall: 0.9261 F1: 0.9148 Train AUC: 0.9969 Val AUC: 0.9756 Time: 12.28\n",
      "Epoch: 829 Train Loss: 0.0914 Val Loss: 0.2258 Acc: 0.9058 Pre: 0.8919 Recall: 0.9296 F1: 0.9103 Train AUC: 0.9955 Val AUC: 0.9756 Time: 12.68\n",
      "Epoch: 830 Train Loss: 0.0666 Val Loss: 0.2290 Acc: 0.9094 Pre: 0.8980 Recall: 0.9296 F1: 0.9135 Train AUC: 0.9981 Val AUC: 0.9733 Time: 13.14\n",
      "Epoch: 831 Train Loss: 0.0902 Val Loss: 0.2290 Acc: 0.9167 Pre: 0.9161 Recall: 0.9225 F1: 0.9193 Train AUC: 0.9958 Val AUC: 0.9723 Time: 13.74\n",
      "Epoch: 832 Train Loss: 0.0826 Val Loss: 0.2303 Acc: 0.9076 Pre: 0.9003 Recall: 0.9225 F1: 0.9113 Train AUC: 0.9975 Val AUC: 0.9732 Time: 14.18\n",
      "Epoch: 833 Train Loss: 0.0783 Val Loss: 0.2359 Acc: 0.8931 Pre: 0.8738 Recall: 0.9261 F1: 0.8991 Train AUC: 0.9970 Val AUC: 0.9732 Time: 13.65\n",
      "Epoch: 834 Train Loss: 0.0722 Val Loss: 0.2310 Acc: 0.8967 Pre: 0.8771 Recall: 0.9296 F1: 0.9026 Train AUC: 0.9975 Val AUC: 0.9740 Time: 12.57\n",
      "Epoch: 835 Train Loss: 0.0745 Val Loss: 0.2242 Acc: 0.9040 Pre: 0.8942 Recall: 0.9225 F1: 0.9081 Train AUC: 0.9975 Val AUC: 0.9743 Time: 11.73\n",
      "Epoch: 836 Train Loss: 0.0726 Val Loss: 0.2255 Acc: 0.9112 Pre: 0.9152 Recall: 0.9120 F1: 0.9136 Train AUC: 0.9977 Val AUC: 0.9731 Time: 11.53\n",
      "Epoch: 837 Train Loss: 0.0887 Val Loss: 0.2223 Acc: 0.9203 Pre: 0.9225 Recall: 0.9225 F1: 0.9225 Train AUC: 0.9960 Val AUC: 0.9751 Time: 11.95\n",
      "Epoch: 838 Train Loss: 0.0833 Val Loss: 0.2346 Acc: 0.9130 Pre: 0.8882 Recall: 0.9507 F1: 0.9184 Train AUC: 0.9962 Val AUC: 0.9754 Time: 12.68\n",
      "Epoch: 839 Train Loss: 0.0765 Val Loss: 0.2389 Acc: 0.9040 Pre: 0.8738 Recall: 0.9507 F1: 0.9106 Train AUC: 0.9973 Val AUC: 0.9752 Time: 12.97\n",
      "Epoch: 840 Train Loss: 0.0877 Val Loss: 0.2208 Acc: 0.9185 Pre: 0.9164 Recall: 0.9261 F1: 0.9212 Train AUC: 0.9960 Val AUC: 0.9760 Time: 13.52\n",
      "Epoch: 841 Train Loss: 0.0724 Val Loss: 0.2245 Acc: 0.9167 Pre: 0.9312 Recall: 0.9049 F1: 0.9179 Train AUC: 0.9975 Val AUC: 0.9752 Time: 14.29\n",
      "Epoch: 842 Train Loss: 0.0890 Val Loss: 0.2211 Acc: 0.9112 Pre: 0.9010 Recall: 0.9296 F1: 0.9151 Train AUC: 0.9965 Val AUC: 0.9729 Time: 14.16\n",
      "Epoch: 843 Train Loss: 0.0794 Val Loss: 0.2420 Acc: 0.8986 Pre: 0.8701 Recall: 0.9437 F1: 0.9054 Train AUC: 0.9973 Val AUC: 0.9745 Time: 12.86\n",
      "Epoch: 844 Train Loss: 0.0871 Val Loss: 0.2398 Acc: 0.9167 Pre: 0.9048 Recall: 0.9366 F1: 0.9204 Train AUC: 0.9969 Val AUC: 0.9742 Time: 12.00\n",
      "Epoch: 845 Train Loss: 0.0754 Val Loss: 0.2450 Acc: 0.9167 Pre: 0.9250 Recall: 0.9120 F1: 0.9184 Train AUC: 0.9969 Val AUC: 0.9743 Time: 12.04\n",
      "Epoch: 846 Train Loss: 0.0934 Val Loss: 0.2316 Acc: 0.9130 Pre: 0.9097 Recall: 0.9225 F1: 0.9161 Train AUC: 0.9948 Val AUC: 0.9748 Time: 12.38\n",
      "Epoch: 847 Train Loss: 0.0652 Val Loss: 0.2343 Acc: 0.9058 Pre: 0.8919 Recall: 0.9296 F1: 0.9103 Train AUC: 0.9980 Val AUC: 0.9710 Time: 13.16\n",
      "Epoch: 848 Train Loss: 0.0762 Val Loss: 0.2497 Acc: 0.8986 Pre: 0.8750 Recall: 0.9366 F1: 0.9048 Train AUC: 0.9971 Val AUC: 0.9682 Time: 13.91\n",
      "Epoch: 849 Train Loss: 0.0768 Val Loss: 0.2280 Acc: 0.9149 Pre: 0.9072 Recall: 0.9296 F1: 0.9183 Train AUC: 0.9981 Val AUC: 0.9730 Time: 14.51\n",
      "Epoch: 850 Train Loss: 0.0719 Val Loss: 0.2402 Acc: 0.9203 Pre: 0.9225 Recall: 0.9225 F1: 0.9225 Train AUC: 0.9981 Val AUC: 0.9745 Time: 13.54\n",
      "Epoch: 851 Train Loss: 0.0755 Val Loss: 0.2469 Acc: 0.9130 Pre: 0.9126 Recall: 0.9190 F1: 0.9158 Train AUC: 0.9968 Val AUC: 0.9740 Time: 12.47\n",
      "Epoch: 852 Train Loss: 0.0763 Val Loss: 0.2466 Acc: 0.8986 Pre: 0.8800 Recall: 0.9296 F1: 0.9041 Train AUC: 0.9969 Val AUC: 0.9736 Time: 12.01\n",
      "Epoch: 853 Train Loss: 0.0730 Val Loss: 0.2584 Acc: 0.8895 Pre: 0.8585 Recall: 0.9401 F1: 0.8975 Train AUC: 0.9978 Val AUC: 0.9706 Time: 12.59\n",
      "Epoch: 854 Train Loss: 0.0883 Val Loss: 0.2339 Acc: 0.9112 Pre: 0.9038 Recall: 0.9261 F1: 0.9148 Train AUC: 0.9963 Val AUC: 0.9717 Time: 13.24\n",
      "Epoch: 855 Train Loss: 0.0755 Val Loss: 0.2241 Acc: 0.9149 Pre: 0.9072 Recall: 0.9296 F1: 0.9183 Train AUC: 0.9975 Val AUC: 0.9745 Time: 15.30\n",
      "Epoch: 856 Train Loss: 0.0753 Val Loss: 0.2272 Acc: 0.9058 Pre: 0.8946 Recall: 0.9261 F1: 0.9100 Train AUC: 0.9973 Val AUC: 0.9749 Time: 15.98\n",
      "Epoch: 857 Train Loss: 0.0844 Val Loss: 0.2250 Acc: 0.9094 Pre: 0.8926 Recall: 0.9366 F1: 0.9141 Train AUC: 0.9966 Val AUC: 0.9753 Time: 13.01\n",
      "Epoch: 858 Train Loss: 0.0702 Val Loss: 0.2207 Acc: 0.9149 Pre: 0.9072 Recall: 0.9296 F1: 0.9183 Train AUC: 0.9979 Val AUC: 0.9750 Time: 12.62\n",
      "Epoch: 859 Train Loss: 0.0698 Val Loss: 0.2235 Acc: 0.9149 Pre: 0.9100 Recall: 0.9261 F1: 0.9180 Train AUC: 0.9980 Val AUC: 0.9737 Time: 13.79\n",
      "Epoch: 860 Train Loss: 0.0791 Val Loss: 0.2278 Acc: 0.9094 Pre: 0.9007 Recall: 0.9261 F1: 0.9132 Train AUC: 0.9972 Val AUC: 0.9739 Time: 12.86\n",
      "Epoch: 861 Train Loss: 0.0717 Val Loss: 0.2380 Acc: 0.9022 Pre: 0.8758 Recall: 0.9437 F1: 0.9085 Train AUC: 0.9976 Val AUC: 0.9744 Time: 13.82\n",
      "Epoch: 862 Train Loss: 0.0753 Val Loss: 0.2315 Acc: 0.9040 Pre: 0.8837 Recall: 0.9366 F1: 0.9094 Train AUC: 0.9976 Val AUC: 0.9745 Time: 15.08\n",
      "Epoch: 863 Train Loss: 0.0753 Val Loss: 0.2211 Acc: 0.9076 Pre: 0.9059 Recall: 0.9155 F1: 0.9107 Train AUC: 0.9975 Val AUC: 0.9733 Time: 14.37\n",
      "Epoch: 864 Train Loss: 0.0889 Val Loss: 0.2206 Acc: 0.9040 Pre: 0.8942 Recall: 0.9225 F1: 0.9081 Train AUC: 0.9963 Val AUC: 0.9731 Time: 13.82\n",
      "Epoch: 865 Train Loss: 0.0757 Val Loss: 0.2304 Acc: 0.8931 Pre: 0.8713 Recall: 0.9296 F1: 0.8995 Train AUC: 0.9976 Val AUC: 0.9747 Time: 13.69\n",
      "Epoch: 866 Train Loss: 0.0646 Val Loss: 0.2325 Acc: 0.8986 Pre: 0.8750 Recall: 0.9366 F1: 0.9048 Train AUC: 0.9985 Val AUC: 0.9757 Time: 13.12\n",
      "Epoch: 867 Train Loss: 0.0778 Val Loss: 0.2309 Acc: 0.9076 Pre: 0.8923 Recall: 0.9331 F1: 0.9122 Train AUC: 0.9969 Val AUC: 0.9756 Time: 13.58\n",
      "Epoch: 868 Train Loss: 0.0772 Val Loss: 0.2240 Acc: 0.9112 Pre: 0.9066 Recall: 0.9225 F1: 0.9145 Train AUC: 0.9968 Val AUC: 0.9762 Time: 14.01\n",
      "Epoch: 869 Train Loss: 0.0744 Val Loss: 0.2183 Acc: 0.9130 Pre: 0.9041 Recall: 0.9296 F1: 0.9167 Train AUC: 0.9970 Val AUC: 0.9748 Time: 15.21\n",
      "Epoch: 870 Train Loss: 0.0826 Val Loss: 0.2278 Acc: 0.9130 Pre: 0.9014 Recall: 0.9331 F1: 0.9170 Train AUC: 0.9969 Val AUC: 0.9712 Time: 16.99\n",
      "Epoch: 871 Train Loss: 0.0752 Val Loss: 0.2305 Acc: 0.8986 Pre: 0.8800 Recall: 0.9296 F1: 0.9041 Train AUC: 0.9979 Val AUC: 0.9730 Time: 15.46\n",
      "Epoch: 872 Train Loss: 0.0646 Val Loss: 0.2400 Acc: 0.9004 Pre: 0.8754 Recall: 0.9401 F1: 0.9066 Train AUC: 0.9987 Val AUC: 0.9739 Time: 13.41\n",
      "Epoch: 873 Train Loss: 0.0657 Val Loss: 0.2443 Acc: 0.9094 Pre: 0.8953 Recall: 0.9331 F1: 0.9138 Train AUC: 0.9983 Val AUC: 0.9738 Time: 13.03\n",
      "Epoch: 874 Train Loss: 0.0775 Val Loss: 0.2348 Acc: 0.9149 Pre: 0.9072 Recall: 0.9296 F1: 0.9183 Train AUC: 0.9967 Val AUC: 0.9745 Time: 13.92\n",
      "Epoch: 875 Train Loss: 0.0819 Val Loss: 0.2221 Acc: 0.9094 Pre: 0.8980 Recall: 0.9296 F1: 0.9135 Train AUC: 0.9964 Val AUC: 0.9750 Time: 13.54\n",
      "Epoch: 876 Train Loss: 0.0695 Val Loss: 0.2283 Acc: 0.9004 Pre: 0.8804 Recall: 0.9331 F1: 0.9060 Train AUC: 0.9980 Val AUC: 0.9728 Time: 14.53\n",
      "Epoch: 877 Train Loss: 0.0787 Val Loss: 0.2243 Acc: 0.9022 Pre: 0.8859 Recall: 0.9296 F1: 0.9072 Train AUC: 0.9973 Val AUC: 0.9745 Time: 15.48\n",
      "Epoch: 878 Train Loss: 0.0723 Val Loss: 0.2226 Acc: 0.9167 Pre: 0.9103 Recall: 0.9296 F1: 0.9199 Train AUC: 0.9980 Val AUC: 0.9754 Time: 16.53\n",
      "Epoch: 879 Train Loss: 0.0761 Val Loss: 0.2265 Acc: 0.9094 Pre: 0.8980 Recall: 0.9296 F1: 0.9135 Train AUC: 0.9972 Val AUC: 0.9751 Time: 15.02\n",
      "Epoch: 880 Train Loss: 0.0703 Val Loss: 0.2349 Acc: 0.9004 Pre: 0.8804 Recall: 0.9331 F1: 0.9060 Train AUC: 0.9980 Val AUC: 0.9746 Time: 14.06\n",
      "Epoch: 881 Train Loss: 0.0874 Val Loss: 0.2267 Acc: 0.9040 Pre: 0.8889 Recall: 0.9296 F1: 0.9088 Train AUC: 0.9956 Val AUC: 0.9752 Time: 15.30\n",
      "Epoch: 882 Train Loss: 0.0641 Val Loss: 0.2217 Acc: 0.9112 Pre: 0.9010 Recall: 0.9296 F1: 0.9151 Train AUC: 0.9981 Val AUC: 0.9747 Time: 12.34\n",
      "Epoch: 883 Train Loss: 0.0718 Val Loss: 0.2238 Acc: 0.9112 Pre: 0.9038 Recall: 0.9261 F1: 0.9148 Train AUC: 0.9980 Val AUC: 0.9745 Time: 12.74\n",
      "Epoch: 884 Train Loss: 0.0763 Val Loss: 0.2268 Acc: 0.9094 Pre: 0.9007 Recall: 0.9261 F1: 0.9132 Train AUC: 0.9972 Val AUC: 0.9742 Time: 13.25\n",
      "Epoch: 885 Train Loss: 0.0830 Val Loss: 0.2394 Acc: 0.8967 Pre: 0.8721 Recall: 0.9366 F1: 0.9032 Train AUC: 0.9960 Val AUC: 0.9723 Time: 13.76\n",
      "Epoch: 886 Train Loss: 0.0750 Val Loss: 0.2358 Acc: 0.9076 Pre: 0.8949 Recall: 0.9296 F1: 0.9119 Train AUC: 0.9976 Val AUC: 0.9715 Time: 14.12\n",
      "Epoch: 887 Train Loss: 0.0819 Val Loss: 0.2290 Acc: 0.9112 Pre: 0.9123 Recall: 0.9155 F1: 0.9139 Train AUC: 0.9965 Val AUC: 0.9725 Time: 12.85\n",
      "Epoch: 888 Train Loss: 0.0837 Val Loss: 0.2244 Acc: 0.9167 Pre: 0.9103 Recall: 0.9296 F1: 0.9199 Train AUC: 0.9967 Val AUC: 0.9731 Time: 12.00\n",
      "Epoch: 889 Train Loss: 0.0908 Val Loss: 0.2359 Acc: 0.9058 Pre: 0.8919 Recall: 0.9296 F1: 0.9103 Train AUC: 0.9967 Val AUC: 0.9749 Time: 12.47\n",
      "Epoch: 890 Train Loss: 0.0817 Val Loss: 0.2456 Acc: 0.9112 Pre: 0.8904 Recall: 0.9437 F1: 0.9162 Train AUC: 0.9970 Val AUC: 0.9750 Time: 14.32\n",
      "Epoch: 891 Train Loss: 0.0971 Val Loss: 0.2386 Acc: 0.9058 Pre: 0.8893 Recall: 0.9331 F1: 0.9107 Train AUC: 0.9955 Val AUC: 0.9742 Time: 15.64\n",
      "Epoch: 892 Train Loss: 0.0781 Val Loss: 0.2473 Acc: 0.8986 Pre: 0.8800 Recall: 0.9296 F1: 0.9041 Train AUC: 0.9969 Val AUC: 0.9701 Time: 14.63\n",
      "Epoch: 893 Train Loss: 0.0736 Val Loss: 0.2512 Acc: 0.9022 Pre: 0.8859 Recall: 0.9296 F1: 0.9072 Train AUC: 0.9977 Val AUC: 0.9670 Time: 13.79\n",
      "Epoch: 894 Train Loss: 0.0889 Val Loss: 0.2416 Acc: 0.9130 Pre: 0.9069 Recall: 0.9261 F1: 0.9164 Train AUC: 0.9963 Val AUC: 0.9699 Time: 12.74\n",
      "Epoch: 895 Train Loss: 0.0811 Val Loss: 0.2384 Acc: 0.9167 Pre: 0.9048 Recall: 0.9366 F1: 0.9204 Train AUC: 0.9971 Val AUC: 0.9719 Time: 13.05\n",
      "Epoch: 896 Train Loss: 0.0842 Val Loss: 0.2337 Acc: 0.9167 Pre: 0.9048 Recall: 0.9366 F1: 0.9204 Train AUC: 0.9961 Val AUC: 0.9742 Time: 13.11\n",
      "Epoch: 897 Train Loss: 0.0752 Val Loss: 0.2387 Acc: 0.9130 Pre: 0.8960 Recall: 0.9401 F1: 0.9175 Train AUC: 0.9978 Val AUC: 0.9751 Time: 13.75\n",
      "Epoch: 898 Train Loss: 0.0832 Val Loss: 0.2347 Acc: 0.9112 Pre: 0.8956 Recall: 0.9366 F1: 0.9157 Train AUC: 0.9966 Val AUC: 0.9753 Time: 15.95\n",
      "Epoch: 899 Train Loss: 0.0875 Val Loss: 0.2246 Acc: 0.9076 Pre: 0.9003 Recall: 0.9225 F1: 0.9113 Train AUC: 0.9965 Val AUC: 0.9749 Time: 14.48\n",
      "Epoch: 900 Train Loss: 0.0795 Val Loss: 0.2259 Acc: 0.9112 Pre: 0.9123 Recall: 0.9155 F1: 0.9139 Train AUC: 0.9963 Val AUC: 0.9731 Time: 13.26\n",
      "Epoch: 901 Train Loss: 0.0755 Val Loss: 0.2300 Acc: 0.9004 Pre: 0.8855 Recall: 0.9261 F1: 0.9053 Train AUC: 0.9974 Val AUC: 0.9728 Time: 13.01\n",
      "Epoch: 902 Train Loss: 0.0881 Val Loss: 0.2407 Acc: 0.9022 Pre: 0.8758 Recall: 0.9437 F1: 0.9085 Train AUC: 0.9959 Val AUC: 0.9721 Time: 12.70\n",
      "Epoch: 903 Train Loss: 0.0839 Val Loss: 0.2343 Acc: 0.9167 Pre: 0.9048 Recall: 0.9366 F1: 0.9204 Train AUC: 0.9970 Val AUC: 0.9727 Time: 13.10\n",
      "Epoch: 904 Train Loss: 0.0784 Val Loss: 0.2392 Acc: 0.9130 Pre: 0.9184 Recall: 0.9120 F1: 0.9152 Train AUC: 0.9971 Val AUC: 0.9732 Time: 13.93\n",
      "Epoch: 905 Train Loss: 0.0722 Val Loss: 0.2368 Acc: 0.9040 Pre: 0.8997 Recall: 0.9155 F1: 0.9075 Train AUC: 0.9976 Val AUC: 0.9736 Time: 16.30\n",
      "Epoch: 906 Train Loss: 0.0794 Val Loss: 0.2404 Acc: 0.9076 Pre: 0.8923 Recall: 0.9331 F1: 0.9122 Train AUC: 0.9968 Val AUC: 0.9729 Time: 15.50\n",
      "Epoch: 907 Train Loss: 0.0808 Val Loss: 0.2554 Acc: 0.8986 Pre: 0.8701 Recall: 0.9437 F1: 0.9054 Train AUC: 0.9969 Val AUC: 0.9735 Time: 12.49\n",
      "Epoch: 908 Train Loss: 0.0738 Val Loss: 0.2442 Acc: 0.8967 Pre: 0.8771 Recall: 0.9296 F1: 0.9026 Train AUC: 0.9979 Val AUC: 0.9730 Time: 12.45\n",
      "Epoch: 909 Train Loss: 0.0747 Val Loss: 0.2392 Acc: 0.9149 Pre: 0.9247 Recall: 0.9085 F1: 0.9165 Train AUC: 0.9974 Val AUC: 0.9741 Time: 15.07\n",
      "Epoch: 910 Train Loss: 0.0837 Val Loss: 0.2372 Acc: 0.9275 Pre: 0.9236 Recall: 0.9366 F1: 0.9301 Train AUC: 0.9965 Val AUC: 0.9741 Time: 13.41\n",
      "Epoch: 911 Train Loss: 0.0746 Val Loss: 0.2358 Acc: 0.9149 Pre: 0.9017 Recall: 0.9366 F1: 0.9188 Train AUC: 0.9972 Val AUC: 0.9733 Time: 15.57\n",
      "Epoch: 912 Train Loss: 0.0868 Val Loss: 0.2327 Acc: 0.9040 Pre: 0.8889 Recall: 0.9296 F1: 0.9088 Train AUC: 0.9962 Val AUC: 0.9711 Time: 13.76\n",
      "Epoch: 913 Train Loss: 0.0803 Val Loss: 0.2300 Acc: 0.9130 Pre: 0.9041 Recall: 0.9296 F1: 0.9167 Train AUC: 0.9969 Val AUC: 0.9708 Time: 14.08\n",
      "Epoch: 914 Train Loss: 0.0707 Val Loss: 0.2365 Acc: 0.9058 Pre: 0.8946 Recall: 0.9261 F1: 0.9100 Train AUC: 0.9979 Val AUC: 0.9713 Time: 13.98\n",
      "Epoch: 915 Train Loss: 0.0766 Val Loss: 0.2381 Acc: 0.8967 Pre: 0.8874 Recall: 0.9155 F1: 0.9012 Train AUC: 0.9972 Val AUC: 0.9729 Time: 13.62\n",
      "Epoch: 916 Train Loss: 0.0840 Val Loss: 0.2552 Acc: 0.8913 Pre: 0.8636 Recall: 0.9366 F1: 0.8986 Train AUC: 0.9958 Val AUC: 0.9723 Time: 13.09\n",
      "Epoch: 917 Train Loss: 0.0746 Val Loss: 0.2459 Acc: 0.9022 Pre: 0.8808 Recall: 0.9366 F1: 0.9078 Train AUC: 0.9975 Val AUC: 0.9742 Time: 13.54\n",
      "Epoch: 918 Train Loss: 0.0794 Val Loss: 0.2289 Acc: 0.9167 Pre: 0.9103 Recall: 0.9296 F1: 0.9199 Train AUC: 0.9965 Val AUC: 0.9746 Time: 14.17\n",
      "Epoch: 919 Train Loss: 0.0720 Val Loss: 0.2245 Acc: 0.9167 Pre: 0.9161 Recall: 0.9225 F1: 0.9193 Train AUC: 0.9974 Val AUC: 0.9738 Time: 13.53\n",
      "Epoch: 920 Train Loss: 0.0725 Val Loss: 0.2195 Acc: 0.9221 Pre: 0.9199 Recall: 0.9296 F1: 0.9247 Train AUC: 0.9977 Val AUC: 0.9736 Time: 12.70\n",
      "Epoch: 921 Train Loss: 0.0702 Val Loss: 0.2263 Acc: 0.9022 Pre: 0.8859 Recall: 0.9296 F1: 0.9072 Train AUC: 0.9982 Val AUC: 0.9758 Time: 13.53\n",
      "Epoch: 922 Train Loss: 0.0729 Val Loss: 0.2404 Acc: 0.9004 Pre: 0.8730 Recall: 0.9437 F1: 0.9069 Train AUC: 0.9978 Val AUC: 0.9753 Time: 13.36\n",
      "Epoch: 923 Train Loss: 0.0802 Val Loss: 0.2292 Acc: 0.9094 Pre: 0.9007 Recall: 0.9261 F1: 0.9132 Train AUC: 0.9969 Val AUC: 0.9754 Time: 13.23\n",
      "Epoch: 924 Train Loss: 0.0701 Val Loss: 0.2256 Acc: 0.9130 Pre: 0.9069 Recall: 0.9261 F1: 0.9164 Train AUC: 0.9976 Val AUC: 0.9740 Time: 13.70\n",
      "Epoch: 925 Train Loss: 0.0783 Val Loss: 0.2210 Acc: 0.9257 Pre: 0.9263 Recall: 0.9296 F1: 0.9279 Train AUC: 0.9969 Val AUC: 0.9737 Time: 13.37\n",
      "Epoch: 926 Train Loss: 0.0846 Val Loss: 0.2356 Acc: 0.8986 Pre: 0.8750 Recall: 0.9366 F1: 0.9048 Train AUC: 0.9967 Val AUC: 0.9739 Time: 12.90\n",
      "Epoch: 927 Train Loss: 0.0818 Val Loss: 0.2326 Acc: 0.9130 Pre: 0.8986 Recall: 0.9366 F1: 0.9172 Train AUC: 0.9969 Val AUC: 0.9731 Time: 13.62\n",
      "Epoch: 928 Train Loss: 0.0807 Val Loss: 0.2248 Acc: 0.9185 Pre: 0.9135 Recall: 0.9296 F1: 0.9215 Train AUC: 0.9964 Val AUC: 0.9748 Time: 14.22\n",
      "Epoch: 929 Train Loss: 0.0676 Val Loss: 0.2244 Acc: 0.9130 Pre: 0.9041 Recall: 0.9296 F1: 0.9167 Train AUC: 0.9983 Val AUC: 0.9748 Time: 13.26\n",
      "Epoch: 930 Train Loss: 0.0689 Val Loss: 0.2378 Acc: 0.9004 Pre: 0.8779 Recall: 0.9366 F1: 0.9063 Train AUC: 0.9975 Val AUC: 0.9727 Time: 12.97\n",
      "Epoch: 931 Train Loss: 0.0772 Val Loss: 0.2503 Acc: 0.8949 Pre: 0.8693 Recall: 0.9366 F1: 0.9017 Train AUC: 0.9971 Val AUC: 0.9714 Time: 13.68\n",
      "Epoch: 932 Train Loss: 0.0819 Val Loss: 0.2322 Acc: 0.9040 Pre: 0.8889 Recall: 0.9296 F1: 0.9088 Train AUC: 0.9967 Val AUC: 0.9742 Time: 13.21\n",
      "Epoch: 933 Train Loss: 0.0714 Val Loss: 0.2385 Acc: 0.9203 Pre: 0.9196 Recall: 0.9261 F1: 0.9228 Train AUC: 0.9979 Val AUC: 0.9754 Time: 13.69\n",
      "Epoch: 934 Train Loss: 0.0830 Val Loss: 0.2367 Acc: 0.9257 Pre: 0.9204 Recall: 0.9366 F1: 0.9284 Train AUC: 0.9963 Val AUC: 0.9761 Time: 14.71\n",
      "Epoch: 935 Train Loss: 0.0774 Val Loss: 0.2232 Acc: 0.9130 Pre: 0.9041 Recall: 0.9296 F1: 0.9167 Train AUC: 0.9969 Val AUC: 0.9751 Time: 14.40\n",
      "Epoch: 936 Train Loss: 0.0722 Val Loss: 0.2227 Acc: 0.9130 Pre: 0.9041 Recall: 0.9296 F1: 0.9167 Train AUC: 0.9975 Val AUC: 0.9722 Time: 13.30\n",
      "Epoch: 937 Train Loss: 0.0867 Val Loss: 0.2175 Acc: 0.9239 Pre: 0.9231 Recall: 0.9296 F1: 0.9263 Train AUC: 0.9975 Val AUC: 0.9744 Time: 12.87\n",
      "Epoch: 938 Train Loss: 0.0816 Val Loss: 0.2272 Acc: 0.9203 Pre: 0.9225 Recall: 0.9225 F1: 0.9225 Train AUC: 0.9975 Val AUC: 0.9754 Time: 13.25\n",
      "Epoch: 939 Train Loss: 0.0786 Val Loss: 0.2406 Acc: 0.9004 Pre: 0.8779 Recall: 0.9366 F1: 0.9063 Train AUC: 0.9968 Val AUC: 0.9742 Time: 13.33\n",
      "Epoch: 940 Train Loss: 0.0839 Val Loss: 0.2640 Acc: 0.9022 Pre: 0.8710 Recall: 0.9507 F1: 0.9091 Train AUC: 0.9960 Val AUC: 0.9717 Time: 14.30\n",
      "Epoch: 941 Train Loss: 0.0852 Val Loss: 0.2330 Acc: 0.9112 Pre: 0.9038 Recall: 0.9261 F1: 0.9148 Train AUC: 0.9965 Val AUC: 0.9731 Time: 13.64\n",
      "Epoch: 942 Train Loss: 0.0775 Val Loss: 0.2272 Acc: 0.9293 Pre: 0.9329 Recall: 0.9296 F1: 0.9312 Train AUC: 0.9971 Val AUC: 0.9735 Time: 12.80\n",
      "Epoch: 943 Train Loss: 0.0797 Val Loss: 0.2269 Acc: 0.9149 Pre: 0.9072 Recall: 0.9296 F1: 0.9183 Train AUC: 0.9970 Val AUC: 0.9732 Time: 12.89\n",
      "Epoch: 944 Train Loss: 0.0722 Val Loss: 0.2346 Acc: 0.8913 Pre: 0.8684 Recall: 0.9296 F1: 0.8980 Train AUC: 0.9976 Val AUC: 0.9722 Time: 13.52\n",
      "Epoch: 945 Train Loss: 0.0814 Val Loss: 0.2255 Acc: 0.9076 Pre: 0.8949 Recall: 0.9296 F1: 0.9119 Train AUC: 0.9972 Val AUC: 0.9744 Time: 13.69\n",
      "Epoch: 946 Train Loss: 0.0691 Val Loss: 0.2231 Acc: 0.9040 Pre: 0.9081 Recall: 0.9049 F1: 0.9065 Train AUC: 0.9984 Val AUC: 0.9748 Time: 14.42\n",
      "Epoch: 947 Train Loss: 0.0789 Val Loss: 0.2238 Acc: 0.9112 Pre: 0.9038 Recall: 0.9261 F1: 0.9148 Train AUC: 0.9976 Val AUC: 0.9750 Time: 14.38\n",
      "Epoch: 948 Train Loss: 0.0867 Val Loss: 0.2380 Acc: 0.9022 Pre: 0.8783 Recall: 0.9401 F1: 0.9082 Train AUC: 0.9960 Val AUC: 0.9746 Time: 13.00\n",
      "Epoch: 949 Train Loss: 0.0739 Val Loss: 0.2440 Acc: 0.8967 Pre: 0.8650 Recall: 0.9472 F1: 0.9042 Train AUC: 0.9980 Val AUC: 0.9749 Time: 12.25\n",
      "Epoch: 950 Train Loss: 0.0714 Val Loss: 0.2257 Acc: 0.9076 Pre: 0.8976 Recall: 0.9261 F1: 0.9116 Train AUC: 0.9982 Val AUC: 0.9759 Time: 11.67\n",
      "Epoch: 951 Train Loss: 0.0701 Val Loss: 0.2310 Acc: 0.9094 Pre: 0.9149 Recall: 0.9085 F1: 0.9117 Train AUC: 0.9976 Val AUC: 0.9753 Time: 11.97\n",
      "Epoch: 952 Train Loss: 0.0776 Val Loss: 0.2220 Acc: 0.9112 Pre: 0.9094 Recall: 0.9190 F1: 0.9142 Train AUC: 0.9971 Val AUC: 0.9750 Time: 12.59\n",
      "Epoch: 953 Train Loss: 0.0701 Val Loss: 0.2217 Acc: 0.9167 Pre: 0.9103 Recall: 0.9296 F1: 0.9199 Train AUC: 0.9975 Val AUC: 0.9724 Time: 13.08\n",
      "Epoch: 954 Train Loss: 0.0744 Val Loss: 0.2289 Acc: 0.9167 Pre: 0.9048 Recall: 0.9366 F1: 0.9204 Train AUC: 0.9979 Val AUC: 0.9729 Time: 13.47\n",
      "Epoch: 955 Train Loss: 0.0684 Val Loss: 0.2367 Acc: 0.9076 Pre: 0.8896 Recall: 0.9366 F1: 0.9125 Train AUC: 0.9980 Val AUC: 0.9735 Time: 14.23\n",
      "Epoch: 956 Train Loss: 0.0695 Val Loss: 0.2443 Acc: 0.9040 Pre: 0.8863 Recall: 0.9331 F1: 0.9091 Train AUC: 0.9981 Val AUC: 0.9731 Time: 14.89\n",
      "Epoch: 957 Train Loss: 0.0785 Val Loss: 0.2373 Acc: 0.9130 Pre: 0.9097 Recall: 0.9225 F1: 0.9161 Train AUC: 0.9971 Val AUC: 0.9738 Time: 14.82\n",
      "Epoch: 958 Train Loss: 0.0811 Val Loss: 0.2293 Acc: 0.9058 Pre: 0.8973 Recall: 0.9225 F1: 0.9097 Train AUC: 0.9964 Val AUC: 0.9750 Time: 13.29\n",
      "Epoch: 959 Train Loss: 0.0810 Val Loss: 0.2458 Acc: 0.8949 Pre: 0.8645 Recall: 0.9437 F1: 0.9024 Train AUC: 0.9965 Val AUC: 0.9735 Time: 12.27\n",
      "Epoch: 960 Train Loss: 0.0753 Val Loss: 0.2288 Acc: 0.9022 Pre: 0.8808 Recall: 0.9366 F1: 0.9078 Train AUC: 0.9982 Val AUC: 0.9748 Time: 12.32\n",
      "Epoch: 961 Train Loss: 0.0806 Val Loss: 0.2221 Acc: 0.9167 Pre: 0.9103 Recall: 0.9296 F1: 0.9199 Train AUC: 0.9970 Val AUC: 0.9759 Time: 12.90\n",
      "Epoch: 962 Train Loss: 0.0776 Val Loss: 0.2279 Acc: 0.9149 Pre: 0.9100 Recall: 0.9261 F1: 0.9180 Train AUC: 0.9972 Val AUC: 0.9761 Time: 12.99\n",
      "Epoch: 963 Train Loss: 0.0740 Val Loss: 0.2292 Acc: 0.9076 Pre: 0.8949 Recall: 0.9296 F1: 0.9119 Train AUC: 0.9972 Val AUC: 0.9755 Time: 13.56\n",
      "Epoch: 964 Train Loss: 0.0678 Val Loss: 0.2313 Acc: 0.9022 Pre: 0.8808 Recall: 0.9366 F1: 0.9078 Train AUC: 0.9978 Val AUC: 0.9748 Time: 14.15\n",
      "Epoch: 965 Train Loss: 0.0676 Val Loss: 0.2311 Acc: 0.9076 Pre: 0.8923 Recall: 0.9331 F1: 0.9122 Train AUC: 0.9984 Val AUC: 0.9719 Time: 14.51\n",
      "Epoch: 966 Train Loss: 0.0697 Val Loss: 0.2263 Acc: 0.9094 Pre: 0.8980 Recall: 0.9296 F1: 0.9135 Train AUC: 0.9979 Val AUC: 0.9745 Time: 16.08\n",
      "Epoch: 967 Train Loss: 0.0691 Val Loss: 0.2276 Acc: 0.9076 Pre: 0.8896 Recall: 0.9366 F1: 0.9125 Train AUC: 0.9977 Val AUC: 0.9758 Time: 13.39\n",
      "Epoch: 968 Train Loss: 0.0684 Val Loss: 0.2322 Acc: 0.9094 Pre: 0.8874 Recall: 0.9437 F1: 0.9147 Train AUC: 0.9975 Val AUC: 0.9764 Time: 12.22\n",
      "Epoch: 969 Train Loss: 0.0707 Val Loss: 0.2246 Acc: 0.9239 Pre: 0.9116 Recall: 0.9437 F1: 0.9273 Train AUC: 0.9978 Val AUC: 0.9791 Time: 12.99\n",
      "Epoch: 970 Train Loss: 0.0713 Val Loss: 0.2180 Acc: 0.9094 Pre: 0.8980 Recall: 0.9296 F1: 0.9135 Train AUC: 0.9977 Val AUC: 0.9762 Time: 13.27\n",
      "Epoch: 971 Train Loss: 0.0713 Val Loss: 0.2178 Acc: 0.9167 Pre: 0.9103 Recall: 0.9296 F1: 0.9199 Train AUC: 0.9977 Val AUC: 0.9755 Time: 14.07\n",
      "Epoch: 972 Train Loss: 0.0703 Val Loss: 0.2217 Acc: 0.9149 Pre: 0.9072 Recall: 0.9296 F1: 0.9183 Train AUC: 0.9980 Val AUC: 0.9746 Time: 13.78\n",
      "Epoch: 973 Train Loss: 0.0786 Val Loss: 0.2309 Acc: 0.9094 Pre: 0.8900 Recall: 0.9401 F1: 0.9144 Train AUC: 0.9971 Val AUC: 0.9746 Time: 13.75\n",
      "Epoch: 974 Train Loss: 0.0732 Val Loss: 0.2329 Acc: 0.9076 Pre: 0.8870 Recall: 0.9401 F1: 0.9128 Train AUC: 0.9977 Val AUC: 0.9749 Time: 13.09\n",
      "Epoch: 975 Train Loss: 0.0736 Val Loss: 0.2264 Acc: 0.9040 Pre: 0.8889 Recall: 0.9296 F1: 0.9088 Train AUC: 0.9973 Val AUC: 0.9753 Time: 13.50\n",
      "Epoch: 976 Train Loss: 0.0635 Val Loss: 0.2276 Acc: 0.9058 Pre: 0.8893 Recall: 0.9331 F1: 0.9107 Train AUC: 0.9984 Val AUC: 0.9745 Time: 13.23\n",
      "Epoch: 977 Train Loss: 0.0698 Val Loss: 0.2271 Acc: 0.9040 Pre: 0.8889 Recall: 0.9296 F1: 0.9088 Train AUC: 0.9982 Val AUC: 0.9745 Time: 13.36\n",
      "Epoch: 978 Train Loss: 0.0694 Val Loss: 0.2320 Acc: 0.9022 Pre: 0.8859 Recall: 0.9296 F1: 0.9072 Train AUC: 0.9983 Val AUC: 0.9743 Time: 14.16\n",
      "Epoch: 979 Train Loss: 0.0712 Val Loss: 0.2382 Acc: 0.9022 Pre: 0.8859 Recall: 0.9296 F1: 0.9072 Train AUC: 0.9979 Val AUC: 0.9746 Time: 13.82\n",
      "Epoch: 980 Train Loss: 0.0756 Val Loss: 0.2375 Acc: 0.9094 Pre: 0.8926 Recall: 0.9366 F1: 0.9141 Train AUC: 0.9973 Val AUC: 0.9743 Time: 12.84\n",
      "Epoch: 981 Train Loss: 0.0705 Val Loss: 0.2245 Acc: 0.9058 Pre: 0.8893 Recall: 0.9331 F1: 0.9107 Train AUC: 0.9973 Val AUC: 0.9746 Time: 13.51\n",
      "Epoch: 982 Train Loss: 0.0673 Val Loss: 0.2218 Acc: 0.9185 Pre: 0.9107 Recall: 0.9331 F1: 0.9217 Train AUC: 0.9981 Val AUC: 0.9723 Time: 13.81\n",
      "Epoch: 983 Train Loss: 0.0734 Val Loss: 0.2183 Acc: 0.9203 Pre: 0.9138 Recall: 0.9331 F1: 0.9233 Train AUC: 0.9977 Val AUC: 0.9733 Time: 14.30\n",
      "Epoch: 984 Train Loss: 0.0760 Val Loss: 0.2187 Acc: 0.9167 Pre: 0.9075 Recall: 0.9331 F1: 0.9201 Train AUC: 0.9974 Val AUC: 0.9763 Time: 14.95\n",
      "Epoch: 985 Train Loss: 0.0662 Val Loss: 0.2299 Acc: 0.9239 Pre: 0.9116 Recall: 0.9437 F1: 0.9273 Train AUC: 0.9983 Val AUC: 0.9765 Time: 14.11\n",
      "Epoch: 986 Train Loss: 0.0804 Val Loss: 0.2250 Acc: 0.9112 Pre: 0.8956 Recall: 0.9366 F1: 0.9157 Train AUC: 0.9966 Val AUC: 0.9761 Time: 12.52\n",
      "Epoch: 987 Train Loss: 0.0718 Val Loss: 0.2249 Acc: 0.9040 Pre: 0.8889 Recall: 0.9296 F1: 0.9088 Train AUC: 0.9972 Val AUC: 0.9737 Time: 12.77\n",
      "Epoch: 988 Train Loss: 0.0787 Val Loss: 0.2309 Acc: 0.9040 Pre: 0.8889 Recall: 0.9296 F1: 0.9088 Train AUC: 0.9971 Val AUC: 0.9716 Time: 13.49\n",
      "Epoch: 989 Train Loss: 0.0747 Val Loss: 0.2232 Acc: 0.9076 Pre: 0.8949 Recall: 0.9296 F1: 0.9119 Train AUC: 0.9978 Val AUC: 0.9747 Time: 13.60\n",
      "Epoch: 990 Train Loss: 0.0669 Val Loss: 0.2335 Acc: 0.9149 Pre: 0.9044 Recall: 0.9331 F1: 0.9185 Train AUC: 0.9980 Val AUC: 0.9749 Time: 14.01\n",
      "Epoch: 991 Train Loss: 0.0723 Val Loss: 0.2342 Acc: 0.9130 Pre: 0.8986 Recall: 0.9366 F1: 0.9172 Train AUC: 0.9973 Val AUC: 0.9752 Time: 16.02\n",
      "Epoch: 992 Train Loss: 0.0723 Val Loss: 0.2345 Acc: 0.9094 Pre: 0.8874 Recall: 0.9437 F1: 0.9147 Train AUC: 0.9973 Val AUC: 0.9747 Time: 16.13\n",
      "Epoch: 993 Train Loss: 0.0658 Val Loss: 0.2470 Acc: 0.9094 Pre: 0.8874 Recall: 0.9437 F1: 0.9147 Train AUC: 0.9981 Val AUC: 0.9702 Time: 13.17\n",
      "Epoch: 994 Train Loss: 0.0727 Val Loss: 0.2373 Acc: 0.9094 Pre: 0.8980 Recall: 0.9296 F1: 0.9135 Train AUC: 0.9984 Val AUC: 0.9718 Time: 12.42\n",
      "Epoch: 995 Train Loss: 0.0717 Val Loss: 0.2283 Acc: 0.9112 Pre: 0.9066 Recall: 0.9225 F1: 0.9145 Train AUC: 0.9979 Val AUC: 0.9739 Time: 13.95\n",
      "Epoch: 996 Train Loss: 0.0685 Val Loss: 0.2348 Acc: 0.9203 Pre: 0.9054 Recall: 0.9437 F1: 0.9241 Train AUC: 0.9980 Val AUC: 0.9753 Time: 13.47\n",
      "Epoch: 997 Train Loss: 0.0687 Val Loss: 0.2408 Acc: 0.9130 Pre: 0.8933 Recall: 0.9437 F1: 0.9178 Train AUC: 0.9978 Val AUC: 0.9745 Time: 17.52\n",
      "Epoch: 998 Train Loss: 0.0655 Val Loss: 0.2330 Acc: 0.9112 Pre: 0.8956 Recall: 0.9366 F1: 0.9157 Train AUC: 0.9986 Val AUC: 0.9740 Time: 16.03\n",
      "Epoch: 999 Train Loss: 0.0694 Val Loss: 0.2248 Acc: 0.9167 Pre: 0.9103 Recall: 0.9296 F1: 0.9199 Train AUC: 0.9979 Val AUC: 0.9738 Time: 14.86\n",
      "Epoch: 1000 Train Loss: 0.0716 Val Loss: 0.2293 Acc: 0.9004 Pre: 0.8935 Recall: 0.9155 F1: 0.9043 Train AUC: 0.9978 Val AUC: 0.9723 Time: 14.35\n",
      "Fold: 1 Best Epoch: 969 Test acc: 0.9239 Test Pre: 0.9116 Test Recall: 0.9437 Test F1: 0.9273 Test PRC: 0.9806 Test AUC: 0.9791\n",
      "Training for Fold 2\n",
      "## Training edges: 2208\n",
      "## Testing edges: 552\n",
      "Epoch: 1 Train Loss: 1.0727 Val Loss: 1.9289 Acc: 0.5290 Pre: 0.5290 Recall: 1.0000 F1: 0.6919 Train AUC: 0.4213 Val AUC: 0.7873 Time: 14.91\n",
      "Epoch: 2 Train Loss: 1.9406 Val Loss: 1.2028 Acc: 0.5054 Pre: 0.7879 Recall: 0.0890 F1: 0.1600 Train AUC: 0.6998 Val AUC: 0.6359 Time: 14.94\n",
      "Epoch: 3 Train Loss: 1.5643 Val Loss: 0.6973 Acc: 0.6159 Pre: 0.9082 Recall: 0.3048 F1: 0.4564 Train AUC: 0.5483 Val AUC: 0.7680 Time: 14.29\n",
      "Epoch: 4 Train Loss: 0.7007 Val Loss: 0.5575 Acc: 0.7083 Pre: 0.6866 Recall: 0.8253 F1: 0.7496 Train AUC: 0.7318 Val AUC: 0.7915 Time: 15.08\n",
      "Epoch: 5 Train Loss: 0.7778 Val Loss: 0.6016 Acc: 0.6612 Pre: 0.6207 Recall: 0.9247 F1: 0.7428 Train AUC: 0.6543 Val AUC: 0.8447 Time: 14.64\n",
      "Epoch: 6 Train Loss: 0.6803 Val Loss: 0.5506 Acc: 0.6938 Pre: 0.6549 Recall: 0.8904 F1: 0.7547 Train AUC: 0.8058 Val AUC: 0.8460 Time: 12.63\n",
      "Epoch: 7 Train Loss: 0.6620 Val Loss: 0.4751 Acc: 0.7663 Pre: 0.7690 Recall: 0.7979 F1: 0.7832 Train AUC: 0.7935 Val AUC: 0.8517 Time: 12.17\n",
      "Epoch: 8 Train Loss: 0.6033 Val Loss: 0.4766 Acc: 0.7989 Pre: 0.8987 Recall: 0.6986 F1: 0.7861 Train AUC: 0.7728 Val AUC: 0.8742 Time: 13.18\n",
      "Epoch: 9 Train Loss: 0.5842 Val Loss: 0.5169 Acc: 0.8188 Pre: 0.9615 Recall: 0.6849 F1: 0.8000 Train AUC: 0.8070 Val AUC: 0.8783 Time: 13.35\n",
      "Epoch: 10 Train Loss: 0.6377 Val Loss: 0.5018 Acc: 0.8025 Pre: 0.9140 Recall: 0.6918 F1: 0.7875 Train AUC: 0.8016 Val AUC: 0.8735 Time: 13.71\n",
      "Epoch: 11 Train Loss: 0.4711 Val Loss: 0.4635 Acc: 0.8098 Pre: 0.8912 Recall: 0.7295 F1: 0.8023 Train AUC: 0.8775 Val AUC: 0.8751 Time: 14.12\n",
      "Epoch: 12 Train Loss: 0.4939 Val Loss: 0.4356 Acc: 0.8062 Pre: 0.8656 Recall: 0.7500 F1: 0.8037 Train AUC: 0.8540 Val AUC: 0.8786 Time: 13.90\n",
      "Epoch: 13 Train Loss: 0.4850 Val Loss: 0.4256 Acc: 0.7935 Pre: 0.8248 Recall: 0.7740 F1: 0.7986 Train AUC: 0.8574 Val AUC: 0.8810 Time: 13.38\n",
      "Epoch: 14 Train Loss: 0.4727 Val Loss: 0.4270 Acc: 0.7862 Pre: 0.8063 Recall: 0.7842 F1: 0.7951 Train AUC: 0.8723 Val AUC: 0.8840 Time: 13.09\n",
      "Epoch: 15 Train Loss: 0.4311 Val Loss: 0.4301 Acc: 0.7971 Pre: 0.8147 Recall: 0.7979 F1: 0.8062 Train AUC: 0.8849 Val AUC: 0.8844 Time: 12.65\n",
      "Epoch: 16 Train Loss: 0.4690 Val Loss: 0.4278 Acc: 0.7917 Pre: 0.8172 Recall: 0.7808 F1: 0.7986 Train AUC: 0.8783 Val AUC: 0.8862 Time: 12.25\n",
      "Epoch: 17 Train Loss: 0.4751 Val Loss: 0.4185 Acc: 0.8043 Pre: 0.8651 Recall: 0.7466 F1: 0.8015 Train AUC: 0.8708 Val AUC: 0.8909 Time: 12.48\n",
      "Epoch: 18 Train Loss: 0.4333 Val Loss: 0.4120 Acc: 0.8062 Pre: 0.8807 Recall: 0.7329 F1: 0.8000 Train AUC: 0.8861 Val AUC: 0.8964 Time: 13.96\n",
      "Epoch: 19 Train Loss: 0.4375 Val Loss: 0.4113 Acc: 0.8062 Pre: 0.9039 Recall: 0.7089 F1: 0.7946 Train AUC: 0.8859 Val AUC: 0.9013 Time: 13.81\n",
      "Epoch: 20 Train Loss: 0.4336 Val Loss: 0.4107 Acc: 0.8043 Pre: 0.9107 Recall: 0.6986 F1: 0.7907 Train AUC: 0.8907 Val AUC: 0.9029 Time: 14.27\n",
      "Epoch: 21 Train Loss: 0.4132 Val Loss: 0.4048 Acc: 0.8062 Pre: 0.9004 Recall: 0.7123 F1: 0.7954 Train AUC: 0.8999 Val AUC: 0.9031 Time: 15.05\n",
      "Epoch: 22 Train Loss: 0.4106 Val Loss: 0.3977 Acc: 0.8080 Pre: 0.8843 Recall: 0.7329 F1: 0.8015 Train AUC: 0.9005 Val AUC: 0.9018 Time: 13.14\n",
      "Epoch: 23 Train Loss: 0.3934 Val Loss: 0.3919 Acc: 0.8098 Pre: 0.8725 Recall: 0.7500 F1: 0.8066 Train AUC: 0.8997 Val AUC: 0.9012 Time: 13.02\n",
      "Epoch: 24 Train Loss: 0.4087 Val Loss: 0.3871 Acc: 0.8098 Pre: 0.8638 Recall: 0.7603 F1: 0.8087 Train AUC: 0.8944 Val AUC: 0.9025 Time: 12.55\n",
      "Epoch: 25 Train Loss: 0.3959 Val Loss: 0.3818 Acc: 0.8098 Pre: 0.8528 Recall: 0.7740 F1: 0.8115 Train AUC: 0.8994 Val AUC: 0.9048 Time: 12.61\n",
      "Epoch: 26 Train Loss: 0.3861 Val Loss: 0.3776 Acc: 0.8116 Pre: 0.8534 Recall: 0.7774 F1: 0.8136 Train AUC: 0.9020 Val AUC: 0.9072 Time: 12.63\n",
      "Epoch: 27 Train Loss: 0.3828 Val Loss: 0.3755 Acc: 0.8152 Pre: 0.8545 Recall: 0.7842 F1: 0.8179 Train AUC: 0.9030 Val AUC: 0.9078 Time: 13.89\n",
      "Epoch: 28 Train Loss: 0.4044 Val Loss: 0.3769 Acc: 0.8170 Pre: 0.8604 Recall: 0.7808 F1: 0.8187 Train AUC: 0.8951 Val AUC: 0.9070 Time: 13.94\n",
      "Epoch: 29 Train Loss: 0.3884 Val Loss: 0.3807 Acc: 0.8170 Pre: 0.8631 Recall: 0.7774 F1: 0.8180 Train AUC: 0.9024 Val AUC: 0.9048 Time: 14.67\n",
      "Epoch: 30 Train Loss: 0.3894 Val Loss: 0.3846 Acc: 0.8116 Pre: 0.8643 Recall: 0.7637 F1: 0.8109 Train AUC: 0.9039 Val AUC: 0.9038 Time: 14.55\n",
      "Epoch: 31 Train Loss: 0.3976 Val Loss: 0.3873 Acc: 0.8116 Pre: 0.8760 Recall: 0.7500 F1: 0.8081 Train AUC: 0.8989 Val AUC: 0.9043 Time: 12.66\n",
      "Epoch: 32 Train Loss: 0.3702 Val Loss: 0.3864 Acc: 0.8116 Pre: 0.8790 Recall: 0.7466 F1: 0.8074 Train AUC: 0.9119 Val AUC: 0.9058 Time: 13.11\n",
      "Epoch: 33 Train Loss: 0.3753 Val Loss: 0.3823 Acc: 0.8098 Pre: 0.8755 Recall: 0.7466 F1: 0.8059 Train AUC: 0.9099 Val AUC: 0.9080 Time: 12.53\n",
      "Epoch: 34 Train Loss: 0.4036 Val Loss: 0.3770 Acc: 0.8170 Pre: 0.8775 Recall: 0.7603 F1: 0.8147 Train AUC: 0.8982 Val AUC: 0.9102 Time: 12.63\n",
      "Epoch: 35 Train Loss: 0.3700 Val Loss: 0.3722 Acc: 0.8170 Pre: 0.8745 Recall: 0.7637 F1: 0.8154 Train AUC: 0.9102 Val AUC: 0.9113 Time: 13.31\n",
      "Epoch: 36 Train Loss: 0.3743 Val Loss: 0.3703 Acc: 0.8188 Pre: 0.8664 Recall: 0.7774 F1: 0.8195 Train AUC: 0.9092 Val AUC: 0.9113 Time: 14.14\n",
      "Epoch: 37 Train Loss: 0.3583 Val Loss: 0.3697 Acc: 0.8207 Pre: 0.8669 Recall: 0.7808 F1: 0.8216 Train AUC: 0.9160 Val AUC: 0.9112 Time: 14.63\n",
      "Epoch: 38 Train Loss: 0.3701 Val Loss: 0.3692 Acc: 0.8207 Pre: 0.8587 Recall: 0.7911 F1: 0.8235 Train AUC: 0.9132 Val AUC: 0.9117 Time: 16.34\n",
      "Epoch: 39 Train Loss: 0.3600 Val Loss: 0.3677 Acc: 0.8207 Pre: 0.8561 Recall: 0.7945 F1: 0.8242 Train AUC: 0.9150 Val AUC: 0.9122 Time: 12.84\n",
      "Epoch: 40 Train Loss: 0.3671 Val Loss: 0.3657 Acc: 0.8225 Pre: 0.8647 Recall: 0.7877 F1: 0.8244 Train AUC: 0.9129 Val AUC: 0.9130 Time: 12.06\n",
      "Epoch: 41 Train Loss: 0.3411 Val Loss: 0.3652 Acc: 0.8170 Pre: 0.8631 Recall: 0.7774 F1: 0.8180 Train AUC: 0.9249 Val AUC: 0.9138 Time: 14.47\n",
      "Epoch: 42 Train Loss: 0.3586 Val Loss: 0.3658 Acc: 0.8116 Pre: 0.8588 Recall: 0.7705 F1: 0.8123 Train AUC: 0.9170 Val AUC: 0.9139 Time: 15.26\n",
      "Epoch: 43 Train Loss: 0.3533 Val Loss: 0.3666 Acc: 0.8152 Pre: 0.8626 Recall: 0.7740 F1: 0.8159 Train AUC: 0.9206 Val AUC: 0.9134 Time: 14.03\n",
      "Epoch: 44 Train Loss: 0.3538 Val Loss: 0.3671 Acc: 0.8188 Pre: 0.8636 Recall: 0.7808 F1: 0.8201 Train AUC: 0.9187 Val AUC: 0.9133 Time: 14.54\n",
      "Epoch: 45 Train Loss: 0.3438 Val Loss: 0.3654 Acc: 0.8188 Pre: 0.8582 Recall: 0.7877 F1: 0.8214 Train AUC: 0.9253 Val AUC: 0.9137 Time: 13.72\n",
      "Epoch: 46 Train Loss: 0.3445 Val Loss: 0.3627 Acc: 0.8207 Pre: 0.8587 Recall: 0.7911 F1: 0.8235 Train AUC: 0.9259 Val AUC: 0.9145 Time: 12.66\n",
      "Epoch: 47 Train Loss: 0.3379 Val Loss: 0.3605 Acc: 0.8207 Pre: 0.8535 Recall: 0.7979 F1: 0.8248 Train AUC: 0.9268 Val AUC: 0.9151 Time: 12.24\n",
      "Epoch: 48 Train Loss: 0.3578 Val Loss: 0.3570 Acc: 0.8279 Pre: 0.8556 Recall: 0.8116 F1: 0.8330 Train AUC: 0.9181 Val AUC: 0.9165 Time: 14.78\n",
      "Epoch: 49 Train Loss: 0.3576 Val Loss: 0.3544 Acc: 0.8333 Pre: 0.8571 Recall: 0.8219 F1: 0.8392 Train AUC: 0.9189 Val AUC: 0.9174 Time: 12.75\n",
      "Epoch: 50 Train Loss: 0.3353 Val Loss: 0.3526 Acc: 0.8333 Pre: 0.8597 Recall: 0.8185 F1: 0.8386 Train AUC: 0.9267 Val AUC: 0.9189 Time: 12.93\n",
      "Epoch: 51 Train Loss: 0.3357 Val Loss: 0.3524 Acc: 0.8315 Pre: 0.8699 Recall: 0.8014 F1: 0.8342 Train AUC: 0.9306 Val AUC: 0.9205 Time: 13.48\n",
      "Epoch: 52 Train Loss: 0.3465 Val Loss: 0.3518 Acc: 0.8388 Pre: 0.8859 Recall: 0.7979 F1: 0.8396 Train AUC: 0.9235 Val AUC: 0.9209 Time: 14.32\n",
      "Epoch: 53 Train Loss: 0.3362 Val Loss: 0.3522 Acc: 0.8388 Pre: 0.8919 Recall: 0.7911 F1: 0.8385 Train AUC: 0.9287 Val AUC: 0.9212 Time: 13.64\n",
      "Epoch: 54 Train Loss: 0.3398 Val Loss: 0.3519 Acc: 0.8370 Pre: 0.8885 Recall: 0.7911 F1: 0.8370 Train AUC: 0.9287 Val AUC: 0.9213 Time: 12.87\n",
      "Epoch: 55 Train Loss: 0.3334 Val Loss: 0.3507 Acc: 0.8351 Pre: 0.8821 Recall: 0.7945 F1: 0.8360 Train AUC: 0.9280 Val AUC: 0.9215 Time: 11.59\n",
      "Epoch: 56 Train Loss: 0.3295 Val Loss: 0.3486 Acc: 0.8370 Pre: 0.8797 Recall: 0.8014 F1: 0.8387 Train AUC: 0.9303 Val AUC: 0.9215 Time: 11.83\n",
      "Epoch: 57 Train Loss: 0.3401 Val Loss: 0.3474 Acc: 0.8351 Pre: 0.8681 Recall: 0.8116 F1: 0.8389 Train AUC: 0.9270 Val AUC: 0.9216 Time: 12.06\n",
      "Epoch: 58 Train Loss: 0.3387 Val Loss: 0.3471 Acc: 0.8388 Pre: 0.8638 Recall: 0.8253 F1: 0.8441 Train AUC: 0.9279 Val AUC: 0.9217 Time: 12.35\n",
      "Epoch: 59 Train Loss: 0.3371 Val Loss: 0.3468 Acc: 0.8424 Pre: 0.8596 Recall: 0.8390 F1: 0.8492 Train AUC: 0.9273 Val AUC: 0.9224 Time: 12.76\n",
      "Epoch: 60 Train Loss: 0.3341 Val Loss: 0.3459 Acc: 0.8424 Pre: 0.8674 Recall: 0.8288 F1: 0.8476 Train AUC: 0.9293 Val AUC: 0.9229 Time: 13.38\n",
      "Epoch: 61 Train Loss: 0.3314 Val Loss: 0.3443 Acc: 0.8388 Pre: 0.8691 Recall: 0.8185 F1: 0.8430 Train AUC: 0.9295 Val AUC: 0.9240 Time: 14.07\n",
      "Epoch: 62 Train Loss: 0.3272 Val Loss: 0.3439 Acc: 0.8460 Pre: 0.8819 Recall: 0.8185 F1: 0.8490 Train AUC: 0.9326 Val AUC: 0.9249 Time: 14.52\n",
      "Epoch: 63 Train Loss: 0.3236 Val Loss: 0.3455 Acc: 0.8460 Pre: 0.8966 Recall: 0.8014 F1: 0.8463 Train AUC: 0.9336 Val AUC: 0.9258 Time: 13.52\n",
      "Epoch: 64 Train Loss: 0.3341 Val Loss: 0.3456 Acc: 0.8496 Pre: 0.9035 Recall: 0.8014 F1: 0.8494 Train AUC: 0.9312 Val AUC: 0.9263 Time: 13.23\n",
      "Epoch: 65 Train Loss: 0.3186 Val Loss: 0.3440 Acc: 0.8496 Pre: 0.8973 Recall: 0.8082 F1: 0.8505 Train AUC: 0.9341 Val AUC: 0.9268 Time: 12.00\n",
      "Epoch: 66 Train Loss: 0.3285 Val Loss: 0.3416 Acc: 0.8533 Pre: 0.8951 Recall: 0.8185 F1: 0.8551 Train AUC: 0.9326 Val AUC: 0.9268 Time: 11.20\n",
      "Epoch: 67 Train Loss: 0.3262 Val Loss: 0.3398 Acc: 0.8533 Pre: 0.8922 Recall: 0.8219 F1: 0.8556 Train AUC: 0.9339 Val AUC: 0.9270 Time: 11.58\n",
      "Epoch: 68 Train Loss: 0.3234 Val Loss: 0.3381 Acc: 0.8514 Pre: 0.8832 Recall: 0.8288 F1: 0.8551 Train AUC: 0.9348 Val AUC: 0.9273 Time: 11.96\n",
      "Epoch: 69 Train Loss: 0.3308 Val Loss: 0.3373 Acc: 0.8514 Pre: 0.8832 Recall: 0.8288 F1: 0.8551 Train AUC: 0.9315 Val AUC: 0.9278 Time: 13.12\n",
      "Epoch: 70 Train Loss: 0.3141 Val Loss: 0.3373 Acc: 0.8496 Pre: 0.8828 Recall: 0.8253 F1: 0.8531 Train AUC: 0.9368 Val AUC: 0.9287 Time: 12.91\n",
      "Epoch: 71 Train Loss: 0.3065 Val Loss: 0.3370 Acc: 0.8478 Pre: 0.8796 Recall: 0.8253 F1: 0.8516 Train AUC: 0.9401 Val AUC: 0.9291 Time: 16.21\n",
      "Epoch: 72 Train Loss: 0.3105 Val Loss: 0.3359 Acc: 0.8533 Pre: 0.8809 Recall: 0.8356 F1: 0.8576 Train AUC: 0.9381 Val AUC: 0.9294 Time: 15.06\n",
      "Epoch: 73 Train Loss: 0.3175 Val Loss: 0.3352 Acc: 0.8587 Pre: 0.8905 Recall: 0.8356 F1: 0.8622 Train AUC: 0.9379 Val AUC: 0.9296 Time: 14.89\n",
      "Epoch: 74 Train Loss: 0.3091 Val Loss: 0.3347 Acc: 0.8605 Pre: 0.8938 Recall: 0.8356 F1: 0.8637 Train AUC: 0.9390 Val AUC: 0.9300 Time: 13.88\n",
      "Epoch: 75 Train Loss: 0.3097 Val Loss: 0.3344 Acc: 0.8587 Pre: 0.8993 Recall: 0.8253 F1: 0.8607 Train AUC: 0.9387 Val AUC: 0.9304 Time: 12.59\n",
      "Epoch: 76 Train Loss: 0.3070 Val Loss: 0.3355 Acc: 0.8605 Pre: 0.9057 Recall: 0.8219 F1: 0.8618 Train AUC: 0.9402 Val AUC: 0.9309 Time: 11.76\n",
      "Epoch: 77 Train Loss: 0.3046 Val Loss: 0.3352 Acc: 0.8605 Pre: 0.9057 Recall: 0.8219 F1: 0.8618 Train AUC: 0.9421 Val AUC: 0.9312 Time: 12.68\n",
      "Epoch: 78 Train Loss: 0.3103 Val Loss: 0.3335 Acc: 0.8587 Pre: 0.9023 Recall: 0.8219 F1: 0.8602 Train AUC: 0.9391 Val AUC: 0.9315 Time: 12.85\n",
      "Epoch: 79 Train Loss: 0.3149 Val Loss: 0.3318 Acc: 0.8587 Pre: 0.8993 Recall: 0.8253 F1: 0.8607 Train AUC: 0.9377 Val AUC: 0.9315 Time: 13.14\n",
      "Epoch: 80 Train Loss: 0.3086 Val Loss: 0.3299 Acc: 0.8623 Pre: 0.8971 Recall: 0.8356 F1: 0.8652 Train AUC: 0.9395 Val AUC: 0.9317 Time: 13.46\n",
      "Epoch: 81 Train Loss: 0.3171 Val Loss: 0.3287 Acc: 0.8659 Pre: 0.8949 Recall: 0.8459 F1: 0.8697 Train AUC: 0.9376 Val AUC: 0.9321 Time: 15.94\n",
      "Epoch: 82 Train Loss: 0.3004 Val Loss: 0.3281 Acc: 0.8659 Pre: 0.8949 Recall: 0.8459 F1: 0.8697 Train AUC: 0.9432 Val AUC: 0.9327 Time: 12.90\n",
      "Epoch: 83 Train Loss: 0.3227 Val Loss: 0.3284 Acc: 0.8605 Pre: 0.8938 Recall: 0.8356 F1: 0.8637 Train AUC: 0.9353 Val AUC: 0.9327 Time: 12.09\n",
      "Epoch: 84 Train Loss: 0.3063 Val Loss: 0.3302 Acc: 0.8551 Pre: 0.8926 Recall: 0.8253 F1: 0.8577 Train AUC: 0.9414 Val AUC: 0.9323 Time: 12.10\n",
      "Epoch: 85 Train Loss: 0.2989 Val Loss: 0.3317 Acc: 0.8569 Pre: 0.8959 Recall: 0.8253 F1: 0.8592 Train AUC: 0.9438 Val AUC: 0.9322 Time: 12.54\n",
      "Epoch: 86 Train Loss: 0.3007 Val Loss: 0.3319 Acc: 0.8569 Pre: 0.8959 Recall: 0.8253 F1: 0.8592 Train AUC: 0.9435 Val AUC: 0.9321 Time: 12.37\n",
      "Epoch: 87 Train Loss: 0.3054 Val Loss: 0.3306 Acc: 0.8569 Pre: 0.8959 Recall: 0.8253 F1: 0.8592 Train AUC: 0.9415 Val AUC: 0.9323 Time: 13.10\n",
      "Epoch: 88 Train Loss: 0.3030 Val Loss: 0.3286 Acc: 0.8678 Pre: 0.8982 Recall: 0.8459 F1: 0.8713 Train AUC: 0.9421 Val AUC: 0.9329 Time: 14.08\n",
      "Epoch: 89 Train Loss: 0.2978 Val Loss: 0.3252 Acc: 0.8696 Pre: 0.9015 Recall: 0.8459 F1: 0.8728 Train AUC: 0.9440 Val AUC: 0.9339 Time: 14.46\n",
      "Epoch: 90 Train Loss: 0.2922 Val Loss: 0.3233 Acc: 0.8678 Pre: 0.8953 Recall: 0.8493 F1: 0.8717 Train AUC: 0.9460 Val AUC: 0.9350 Time: 13.59\n",
      "Epoch: 91 Train Loss: 0.3029 Val Loss: 0.3234 Acc: 0.8623 Pre: 0.8885 Recall: 0.8459 F1: 0.8667 Train AUC: 0.9428 Val AUC: 0.9353 Time: 13.31\n",
      "Epoch: 92 Train Loss: 0.3033 Val Loss: 0.3243 Acc: 0.8623 Pre: 0.8942 Recall: 0.8390 F1: 0.8657 Train AUC: 0.9430 Val AUC: 0.9349 Time: 13.04\n",
      "Epoch: 93 Train Loss: 0.3041 Val Loss: 0.3236 Acc: 0.8714 Pre: 0.9108 Recall: 0.8390 F1: 0.8734 Train AUC: 0.9437 Val AUC: 0.9349 Time: 14.04\n",
      "Epoch: 94 Train Loss: 0.3031 Val Loss: 0.3230 Acc: 0.8714 Pre: 0.9108 Recall: 0.8390 F1: 0.8734 Train AUC: 0.9433 Val AUC: 0.9350 Time: 13.16\n",
      "Epoch: 95 Train Loss: 0.2947 Val Loss: 0.3230 Acc: 0.8714 Pre: 0.9139 Recall: 0.8356 F1: 0.8730 Train AUC: 0.9446 Val AUC: 0.9350 Time: 13.51\n",
      "Epoch: 96 Train Loss: 0.3044 Val Loss: 0.3229 Acc: 0.8732 Pre: 0.9142 Recall: 0.8390 F1: 0.8750 Train AUC: 0.9430 Val AUC: 0.9351 Time: 16.12\n",
      "Epoch: 97 Train Loss: 0.2958 Val Loss: 0.3219 Acc: 0.8768 Pre: 0.9148 Recall: 0.8459 F1: 0.8790 Train AUC: 0.9433 Val AUC: 0.9355 Time: 14.95\n",
      "Epoch: 98 Train Loss: 0.2923 Val Loss: 0.3216 Acc: 0.8750 Pre: 0.9145 Recall: 0.8425 F1: 0.8770 Train AUC: 0.9465 Val AUC: 0.9355 Time: 14.53\n",
      "Epoch: 99 Train Loss: 0.2969 Val Loss: 0.3202 Acc: 0.8732 Pre: 0.9111 Recall: 0.8425 F1: 0.8754 Train AUC: 0.9438 Val AUC: 0.9360 Time: 12.77\n",
      "Epoch: 100 Train Loss: 0.2922 Val Loss: 0.3193 Acc: 0.8641 Pre: 0.9094 Recall: 0.8253 F1: 0.8654 Train AUC: 0.9458 Val AUC: 0.9365 Time: 11.55\n",
      "Epoch: 101 Train Loss: 0.2865 Val Loss: 0.3185 Acc: 0.8641 Pre: 0.9064 Recall: 0.8288 F1: 0.8658 Train AUC: 0.9481 Val AUC: 0.9371 Time: 11.99\n",
      "Epoch: 102 Train Loss: 0.2847 Val Loss: 0.3181 Acc: 0.8696 Pre: 0.9044 Recall: 0.8425 F1: 0.8723 Train AUC: 0.9502 Val AUC: 0.9373 Time: 12.64\n",
      "Epoch: 103 Train Loss: 0.2874 Val Loss: 0.3174 Acc: 0.8678 Pre: 0.9041 Recall: 0.8390 F1: 0.8703 Train AUC: 0.9489 Val AUC: 0.9373 Time: 12.89\n",
      "Epoch: 104 Train Loss: 0.2926 Val Loss: 0.3164 Acc: 0.8714 Pre: 0.9077 Recall: 0.8425 F1: 0.8739 Train AUC: 0.9483 Val AUC: 0.9372 Time: 13.56\n",
      "Epoch: 105 Train Loss: 0.2805 Val Loss: 0.3163 Acc: 0.8714 Pre: 0.9077 Recall: 0.8425 F1: 0.8739 Train AUC: 0.9511 Val AUC: 0.9373 Time: 14.49\n",
      "Epoch: 106 Train Loss: 0.2860 Val Loss: 0.3158 Acc: 0.8732 Pre: 0.9081 Recall: 0.8459 F1: 0.8759 Train AUC: 0.9475 Val AUC: 0.9373 Time: 14.88\n",
      "Epoch: 107 Train Loss: 0.2895 Val Loss: 0.3156 Acc: 0.8768 Pre: 0.9118 Recall: 0.8493 F1: 0.8794 Train AUC: 0.9466 Val AUC: 0.9376 Time: 15.86\n",
      "Epoch: 108 Train Loss: 0.2812 Val Loss: 0.3152 Acc: 0.8750 Pre: 0.9084 Recall: 0.8493 F1: 0.8779 Train AUC: 0.9504 Val AUC: 0.9379 Time: 13.70\n",
      "Epoch: 109 Train Loss: 0.2897 Val Loss: 0.3162 Acc: 0.8732 Pre: 0.9111 Recall: 0.8425 F1: 0.8754 Train AUC: 0.9471 Val AUC: 0.9378 Time: 12.39\n",
      "Epoch: 110 Train Loss: 0.2847 Val Loss: 0.3170 Acc: 0.8696 Pre: 0.9135 Recall: 0.8322 F1: 0.8710 Train AUC: 0.9497 Val AUC: 0.9377 Time: 11.50\n",
      "Epoch: 111 Train Loss: 0.2811 Val Loss: 0.3175 Acc: 0.8659 Pre: 0.9129 Recall: 0.8253 F1: 0.8669 Train AUC: 0.9495 Val AUC: 0.9384 Time: 11.93\n",
      "Epoch: 112 Train Loss: 0.2915 Val Loss: 0.3156 Acc: 0.8714 Pre: 0.9108 Recall: 0.8390 F1: 0.8734 Train AUC: 0.9472 Val AUC: 0.9387 Time: 12.04\n",
      "Epoch: 113 Train Loss: 0.2859 Val Loss: 0.3129 Acc: 0.8714 Pre: 0.9048 Recall: 0.8459 F1: 0.8743 Train AUC: 0.9489 Val AUC: 0.9388 Time: 12.31\n",
      "Epoch: 114 Train Loss: 0.2802 Val Loss: 0.3119 Acc: 0.8696 Pre: 0.8986 Recall: 0.8493 F1: 0.8732 Train AUC: 0.9524 Val AUC: 0.9387 Time: 13.05\n",
      "Epoch: 115 Train Loss: 0.2872 Val Loss: 0.3111 Acc: 0.8678 Pre: 0.8953 Recall: 0.8493 F1: 0.8717 Train AUC: 0.9484 Val AUC: 0.9393 Time: 13.46\n",
      "Epoch: 116 Train Loss: 0.2781 Val Loss: 0.3102 Acc: 0.8696 Pre: 0.8986 Recall: 0.8493 F1: 0.8732 Train AUC: 0.9520 Val AUC: 0.9396 Time: 13.80\n",
      "Epoch: 117 Train Loss: 0.2981 Val Loss: 0.3124 Acc: 0.8732 Pre: 0.9142 Recall: 0.8390 F1: 0.8750 Train AUC: 0.9451 Val AUC: 0.9394 Time: 14.90\n",
      "Epoch: 118 Train Loss: 0.2845 Val Loss: 0.3153 Acc: 0.8750 Pre: 0.9176 Recall: 0.8390 F1: 0.8766 Train AUC: 0.9513 Val AUC: 0.9383 Time: 14.95\n",
      "Epoch: 119 Train Loss: 0.2808 Val Loss: 0.3149 Acc: 0.8750 Pre: 0.9145 Recall: 0.8425 F1: 0.8770 Train AUC: 0.9508 Val AUC: 0.9377 Time: 13.34\n",
      "Epoch: 120 Train Loss: 0.2823 Val Loss: 0.3140 Acc: 0.8641 Pre: 0.8917 Recall: 0.8459 F1: 0.8682 Train AUC: 0.9510 Val AUC: 0.9376 Time: 12.35\n",
      "Epoch: 121 Train Loss: 0.2855 Val Loss: 0.3127 Acc: 0.8641 Pre: 0.8917 Recall: 0.8459 F1: 0.8682 Train AUC: 0.9501 Val AUC: 0.9381 Time: 12.40\n",
      "Epoch: 122 Train Loss: 0.2897 Val Loss: 0.3104 Acc: 0.8696 Pre: 0.9015 Recall: 0.8459 F1: 0.8728 Train AUC: 0.9475 Val AUC: 0.9391 Time: 12.42\n",
      "Epoch: 123 Train Loss: 0.2787 Val Loss: 0.3080 Acc: 0.8641 Pre: 0.8889 Recall: 0.8493 F1: 0.8687 Train AUC: 0.9515 Val AUC: 0.9403 Time: 12.67\n",
      "Epoch: 124 Train Loss: 0.2741 Val Loss: 0.3074 Acc: 0.8659 Pre: 0.8921 Recall: 0.8493 F1: 0.8702 Train AUC: 0.9550 Val AUC: 0.9405 Time: 13.15\n",
      "Epoch: 125 Train Loss: 0.2898 Val Loss: 0.3079 Acc: 0.8696 Pre: 0.8986 Recall: 0.8493 F1: 0.8732 Train AUC: 0.9491 Val AUC: 0.9403 Time: 13.72\n",
      "Epoch: 126 Train Loss: 0.2798 Val Loss: 0.3104 Acc: 0.8732 Pre: 0.9051 Recall: 0.8493 F1: 0.8763 Train AUC: 0.9513 Val AUC: 0.9398 Time: 13.96\n",
      "Epoch: 127 Train Loss: 0.2767 Val Loss: 0.3118 Acc: 0.8750 Pre: 0.9084 Recall: 0.8493 F1: 0.8779 Train AUC: 0.9523 Val AUC: 0.9395 Time: 13.22\n",
      "Epoch: 128 Train Loss: 0.2775 Val Loss: 0.3116 Acc: 0.8732 Pre: 0.9051 Recall: 0.8493 F1: 0.8763 Train AUC: 0.9525 Val AUC: 0.9393 Time: 12.71\n",
      "Epoch: 129 Train Loss: 0.2712 Val Loss: 0.3093 Acc: 0.8659 Pre: 0.8921 Recall: 0.8493 F1: 0.8702 Train AUC: 0.9538 Val AUC: 0.9395 Time: 12.86\n",
      "Epoch: 130 Train Loss: 0.2729 Val Loss: 0.3071 Acc: 0.8696 Pre: 0.8986 Recall: 0.8493 F1: 0.8732 Train AUC: 0.9531 Val AUC: 0.9403 Time: 13.93\n",
      "Epoch: 131 Train Loss: 0.2722 Val Loss: 0.3069 Acc: 0.8696 Pre: 0.8986 Recall: 0.8493 F1: 0.8732 Train AUC: 0.9540 Val AUC: 0.9407 Time: 13.79\n",
      "Epoch: 132 Train Loss: 0.2632 Val Loss: 0.3074 Acc: 0.8678 Pre: 0.9041 Recall: 0.8390 F1: 0.8703 Train AUC: 0.9583 Val AUC: 0.9411 Time: 14.40\n",
      "Epoch: 133 Train Loss: 0.2725 Val Loss: 0.3054 Acc: 0.8696 Pre: 0.9015 Recall: 0.8459 F1: 0.8728 Train AUC: 0.9550 Val AUC: 0.9416 Time: 15.06\n",
      "Epoch: 134 Train Loss: 0.2766 Val Loss: 0.3029 Acc: 0.8678 Pre: 0.8982 Recall: 0.8459 F1: 0.8713 Train AUC: 0.9533 Val AUC: 0.9422 Time: 13.22\n",
      "Epoch: 135 Train Loss: 0.2638 Val Loss: 0.3019 Acc: 0.8696 Pre: 0.8986 Recall: 0.8493 F1: 0.8732 Train AUC: 0.9573 Val AUC: 0.9426 Time: 12.43\n",
      "Epoch: 136 Train Loss: 0.2573 Val Loss: 0.3015 Acc: 0.8678 Pre: 0.8982 Recall: 0.8459 F1: 0.8713 Train AUC: 0.9586 Val AUC: 0.9429 Time: 12.89\n",
      "Epoch: 137 Train Loss: 0.2655 Val Loss: 0.3017 Acc: 0.8678 Pre: 0.8982 Recall: 0.8459 F1: 0.8713 Train AUC: 0.9555 Val AUC: 0.9429 Time: 12.91\n",
      "Epoch: 138 Train Loss: 0.2568 Val Loss: 0.3017 Acc: 0.8678 Pre: 0.8982 Recall: 0.8459 F1: 0.8713 Train AUC: 0.9580 Val AUC: 0.9431 Time: 14.50\n",
      "Epoch: 139 Train Loss: 0.2642 Val Loss: 0.3009 Acc: 0.8696 Pre: 0.9015 Recall: 0.8459 F1: 0.8728 Train AUC: 0.9563 Val AUC: 0.9435 Time: 13.74\n",
      "Epoch: 140 Train Loss: 0.2647 Val Loss: 0.3001 Acc: 0.8714 Pre: 0.9018 Recall: 0.8493 F1: 0.8748 Train AUC: 0.9564 Val AUC: 0.9439 Time: 13.11\n",
      "Epoch: 141 Train Loss: 0.2731 Val Loss: 0.2989 Acc: 0.8696 Pre: 0.9015 Recall: 0.8459 F1: 0.8728 Train AUC: 0.9523 Val AUC: 0.9440 Time: 12.75\n",
      "Epoch: 142 Train Loss: 0.2920 Val Loss: 0.3005 Acc: 0.8714 Pre: 0.9077 Recall: 0.8425 F1: 0.8739 Train AUC: 0.9476 Val AUC: 0.9433 Time: 13.36\n",
      "Epoch: 143 Train Loss: 0.2653 Val Loss: 0.3050 Acc: 0.8678 Pre: 0.9071 Recall: 0.8356 F1: 0.8699 Train AUC: 0.9547 Val AUC: 0.9422 Time: 13.35\n",
      "Epoch: 144 Train Loss: 0.2754 Val Loss: 0.3048 Acc: 0.8678 Pre: 0.9101 Recall: 0.8322 F1: 0.8694 Train AUC: 0.9530 Val AUC: 0.9416 Time: 14.14\n",
      "Epoch: 145 Train Loss: 0.2685 Val Loss: 0.3009 Acc: 0.8732 Pre: 0.9142 Recall: 0.8390 F1: 0.8750 Train AUC: 0.9550 Val AUC: 0.9426 Time: 13.43\n",
      "Epoch: 146 Train Loss: 0.2675 Val Loss: 0.2968 Acc: 0.8714 Pre: 0.9048 Recall: 0.8459 F1: 0.8743 Train AUC: 0.9557 Val AUC: 0.9437 Time: 13.12\n",
      "Epoch: 147 Train Loss: 0.2649 Val Loss: 0.2967 Acc: 0.8659 Pre: 0.8978 Recall: 0.8425 F1: 0.8693 Train AUC: 0.9568 Val AUC: 0.9443 Time: 12.33\n",
      "Epoch: 148 Train Loss: 0.2592 Val Loss: 0.2996 Acc: 0.8678 Pre: 0.9041 Recall: 0.8390 F1: 0.8703 Train AUC: 0.9602 Val AUC: 0.9443 Time: 12.30\n",
      "Epoch: 149 Train Loss: 0.2639 Val Loss: 0.3024 Acc: 0.8623 Pre: 0.9060 Recall: 0.8253 F1: 0.8638 Train AUC: 0.9583 Val AUC: 0.9434 Time: 13.14\n",
      "Epoch: 150 Train Loss: 0.2728 Val Loss: 0.3039 Acc: 0.8623 Pre: 0.9060 Recall: 0.8253 F1: 0.8638 Train AUC: 0.9568 Val AUC: 0.9433 Time: 14.32\n",
      "Epoch: 151 Train Loss: 0.2634 Val Loss: 0.3020 Acc: 0.8623 Pre: 0.9030 Recall: 0.8288 F1: 0.8643 Train AUC: 0.9569 Val AUC: 0.9437 Time: 13.39\n",
      "Epoch: 152 Train Loss: 0.2632 Val Loss: 0.2988 Acc: 0.8678 Pre: 0.9041 Recall: 0.8390 F1: 0.8703 Train AUC: 0.9566 Val AUC: 0.9442 Time: 13.41\n",
      "Epoch: 153 Train Loss: 0.2650 Val Loss: 0.2961 Acc: 0.8714 Pre: 0.8989 Recall: 0.8527 F1: 0.8752 Train AUC: 0.9551 Val AUC: 0.9445 Time: 14.82\n",
      "Epoch: 154 Train Loss: 0.2576 Val Loss: 0.2954 Acc: 0.8623 Pre: 0.8803 Recall: 0.8562 F1: 0.8681 Train AUC: 0.9589 Val AUC: 0.9450 Time: 12.91\n",
      "Epoch: 155 Train Loss: 0.2633 Val Loss: 0.2933 Acc: 0.8696 Pre: 0.8929 Recall: 0.8562 F1: 0.8741 Train AUC: 0.9593 Val AUC: 0.9451 Time: 12.49\n",
      "Epoch: 156 Train Loss: 0.2525 Val Loss: 0.2958 Acc: 0.8750 Pre: 0.9176 Recall: 0.8390 F1: 0.8766 Train AUC: 0.9609 Val AUC: 0.9453 Time: 11.62\n",
      "Epoch: 157 Train Loss: 0.2529 Val Loss: 0.2977 Acc: 0.8714 Pre: 0.9202 Recall: 0.8288 F1: 0.8721 Train AUC: 0.9622 Val AUC: 0.9456 Time: 11.39\n",
      "Epoch: 158 Train Loss: 0.2577 Val Loss: 0.2961 Acc: 0.8659 Pre: 0.9067 Recall: 0.8322 F1: 0.8679 Train AUC: 0.9613 Val AUC: 0.9459 Time: 11.92\n",
      "Epoch: 159 Train Loss: 0.2650 Val Loss: 0.2952 Acc: 0.8659 Pre: 0.9007 Recall: 0.8390 F1: 0.8688 Train AUC: 0.9569 Val AUC: 0.9463 Time: 12.32\n",
      "Epoch: 160 Train Loss: 0.2485 Val Loss: 0.2947 Acc: 0.8696 Pre: 0.9015 Recall: 0.8459 F1: 0.8728 Train AUC: 0.9623 Val AUC: 0.9461 Time: 12.73\n",
      "Epoch: 161 Train Loss: 0.2492 Val Loss: 0.2938 Acc: 0.8768 Pre: 0.9058 Recall: 0.8562 F1: 0.8803 Train AUC: 0.9618 Val AUC: 0.9460 Time: 13.39\n",
      "Epoch: 162 Train Loss: 0.2536 Val Loss: 0.2942 Acc: 0.8768 Pre: 0.9058 Recall: 0.8562 F1: 0.8803 Train AUC: 0.9588 Val AUC: 0.9456 Time: 14.07\n",
      "Epoch: 163 Train Loss: 0.2373 Val Loss: 0.2947 Acc: 0.8768 Pre: 0.9058 Recall: 0.8562 F1: 0.8803 Train AUC: 0.9652 Val AUC: 0.9453 Time: 14.41\n",
      "Epoch: 164 Train Loss: 0.2563 Val Loss: 0.2936 Acc: 0.8750 Pre: 0.9025 Recall: 0.8562 F1: 0.8787 Train AUC: 0.9592 Val AUC: 0.9457 Time: 12.91\n",
      "Epoch: 165 Train Loss: 0.2538 Val Loss: 0.2924 Acc: 0.8714 Pre: 0.8961 Recall: 0.8562 F1: 0.8757 Train AUC: 0.9605 Val AUC: 0.9461 Time: 12.04\n",
      "Epoch: 166 Train Loss: 0.2497 Val Loss: 0.2924 Acc: 0.8678 Pre: 0.8953 Recall: 0.8493 F1: 0.8717 Train AUC: 0.9611 Val AUC: 0.9465 Time: 11.76\n",
      "Epoch: 167 Train Loss: 0.2427 Val Loss: 0.2927 Acc: 0.8659 Pre: 0.8921 Recall: 0.8493 F1: 0.8702 Train AUC: 0.9643 Val AUC: 0.9469 Time: 12.28\n",
      "Epoch: 168 Train Loss: 0.2536 Val Loss: 0.2925 Acc: 0.8732 Pre: 0.9051 Recall: 0.8493 F1: 0.8763 Train AUC: 0.9610 Val AUC: 0.9473 Time: 12.25\n",
      "Epoch: 169 Train Loss: 0.2443 Val Loss: 0.2928 Acc: 0.8641 Pre: 0.9064 Recall: 0.8288 F1: 0.8658 Train AUC: 0.9627 Val AUC: 0.9477 Time: 12.56\n",
      "Epoch: 170 Train Loss: 0.2504 Val Loss: 0.2925 Acc: 0.8659 Pre: 0.9098 Recall: 0.8288 F1: 0.8674 Train AUC: 0.9614 Val AUC: 0.9478 Time: 13.08\n",
      "Epoch: 171 Train Loss: 0.2521 Val Loss: 0.2901 Acc: 0.8768 Pre: 0.9118 Recall: 0.8493 F1: 0.8794 Train AUC: 0.9597 Val AUC: 0.9479 Time: 13.63\n",
      "Epoch: 172 Train Loss: 0.2431 Val Loss: 0.2888 Acc: 0.8750 Pre: 0.9025 Recall: 0.8562 F1: 0.8787 Train AUC: 0.9637 Val AUC: 0.9476 Time: 14.35\n",
      "Epoch: 173 Train Loss: 0.2451 Val Loss: 0.2898 Acc: 0.8696 Pre: 0.8929 Recall: 0.8562 F1: 0.8741 Train AUC: 0.9637 Val AUC: 0.9473 Time: 13.58\n",
      "Epoch: 174 Train Loss: 0.2469 Val Loss: 0.2884 Acc: 0.8714 Pre: 0.8961 Recall: 0.8562 F1: 0.8757 Train AUC: 0.9632 Val AUC: 0.9478 Time: 12.92\n",
      "Epoch: 175 Train Loss: 0.2439 Val Loss: 0.2877 Acc: 0.8696 Pre: 0.9015 Recall: 0.8459 F1: 0.8728 Train AUC: 0.9639 Val AUC: 0.9481 Time: 11.90\n",
      "Epoch: 176 Train Loss: 0.2458 Val Loss: 0.2911 Acc: 0.8732 Pre: 0.9173 Recall: 0.8356 F1: 0.8746 Train AUC: 0.9630 Val AUC: 0.9479 Time: 11.49\n",
      "Epoch: 177 Train Loss: 0.2470 Val Loss: 0.2912 Acc: 0.8696 Pre: 0.9104 Recall: 0.8356 F1: 0.8714 Train AUC: 0.9632 Val AUC: 0.9476 Time: 12.06\n",
      "Epoch: 178 Train Loss: 0.2448 Val Loss: 0.2897 Acc: 0.8696 Pre: 0.9044 Recall: 0.8425 F1: 0.8723 Train AUC: 0.9638 Val AUC: 0.9478 Time: 12.47\n",
      "Epoch: 179 Train Loss: 0.2373 Val Loss: 0.2877 Acc: 0.8678 Pre: 0.8953 Recall: 0.8493 F1: 0.8717 Train AUC: 0.9662 Val AUC: 0.9482 Time: 12.88\n",
      "Epoch: 180 Train Loss: 0.2360 Val Loss: 0.2873 Acc: 0.8714 Pre: 0.8989 Recall: 0.8527 F1: 0.8752 Train AUC: 0.9666 Val AUC: 0.9477 Time: 14.11\n",
      "Epoch: 181 Train Loss: 0.2435 Val Loss: 0.2890 Acc: 0.8714 Pre: 0.9018 Recall: 0.8493 F1: 0.8748 Train AUC: 0.9651 Val AUC: 0.9475 Time: 14.40\n",
      "Epoch: 182 Train Loss: 0.2395 Val Loss: 0.2892 Acc: 0.8678 Pre: 0.8925 Recall: 0.8527 F1: 0.8722 Train AUC: 0.9651 Val AUC: 0.9480 Time: 14.80\n",
      "Epoch: 183 Train Loss: 0.2460 Val Loss: 0.2889 Acc: 0.8623 Pre: 0.8803 Recall: 0.8562 F1: 0.8681 Train AUC: 0.9628 Val AUC: 0.9485 Time: 13.66\n",
      "Epoch: 184 Train Loss: 0.2358 Val Loss: 0.2877 Acc: 0.8623 Pre: 0.8857 Recall: 0.8493 F1: 0.8671 Train AUC: 0.9667 Val AUC: 0.9492 Time: 13.31\n",
      "Epoch: 185 Train Loss: 0.2362 Val Loss: 0.2883 Acc: 0.8678 Pre: 0.9011 Recall: 0.8425 F1: 0.8708 Train AUC: 0.9667 Val AUC: 0.9491 Time: 13.40\n",
      "Epoch: 186 Train Loss: 0.2273 Val Loss: 0.2897 Acc: 0.8732 Pre: 0.9111 Recall: 0.8425 F1: 0.8754 Train AUC: 0.9679 Val AUC: 0.9481 Time: 11.40\n",
      "Epoch: 187 Train Loss: 0.2328 Val Loss: 0.2881 Acc: 0.8714 Pre: 0.9077 Recall: 0.8425 F1: 0.8739 Train AUC: 0.9675 Val AUC: 0.9481 Time: 11.80\n",
      "Epoch: 188 Train Loss: 0.2360 Val Loss: 0.2853 Acc: 0.8678 Pre: 0.9011 Recall: 0.8425 F1: 0.8708 Train AUC: 0.9663 Val AUC: 0.9486 Time: 13.58\n",
      "Epoch: 189 Train Loss: 0.2325 Val Loss: 0.2862 Acc: 0.8678 Pre: 0.8897 Recall: 0.8562 F1: 0.8726 Train AUC: 0.9674 Val AUC: 0.9485 Time: 12.54\n",
      "Epoch: 190 Train Loss: 0.2375 Val Loss: 0.2889 Acc: 0.8732 Pre: 0.9081 Recall: 0.8459 F1: 0.8759 Train AUC: 0.9662 Val AUC: 0.9473 Time: 13.44\n",
      "Epoch: 191 Train Loss: 0.2295 Val Loss: 0.2894 Acc: 0.8732 Pre: 0.9111 Recall: 0.8425 F1: 0.8754 Train AUC: 0.9673 Val AUC: 0.9478 Time: 13.85\n",
      "Epoch: 192 Train Loss: 0.2401 Val Loss: 0.2852 Acc: 0.8714 Pre: 0.9018 Recall: 0.8493 F1: 0.8748 Train AUC: 0.9640 Val AUC: 0.9492 Time: 14.41\n",
      "Epoch: 193 Train Loss: 0.2365 Val Loss: 0.2852 Acc: 0.8605 Pre: 0.8881 Recall: 0.8425 F1: 0.8647 Train AUC: 0.9662 Val AUC: 0.9502 Time: 15.81\n",
      "Epoch: 194 Train Loss: 0.2410 Val Loss: 0.2865 Acc: 0.8696 Pre: 0.9044 Recall: 0.8425 F1: 0.8723 Train AUC: 0.9658 Val AUC: 0.9492 Time: 16.10\n",
      "Epoch: 195 Train Loss: 0.2349 Val Loss: 0.2877 Acc: 0.8714 Pre: 0.9077 Recall: 0.8425 F1: 0.8739 Train AUC: 0.9666 Val AUC: 0.9489 Time: 14.24\n",
      "Epoch: 196 Train Loss: 0.2343 Val Loss: 0.2846 Acc: 0.8696 Pre: 0.9044 Recall: 0.8425 F1: 0.8723 Train AUC: 0.9667 Val AUC: 0.9494 Time: 12.18\n",
      "Epoch: 197 Train Loss: 0.2371 Val Loss: 0.2823 Acc: 0.8678 Pre: 0.9011 Recall: 0.8425 F1: 0.8708 Train AUC: 0.9664 Val AUC: 0.9498 Time: 12.42\n",
      "Epoch: 198 Train Loss: 0.2222 Val Loss: 0.2832 Acc: 0.8659 Pre: 0.8921 Recall: 0.8493 F1: 0.8702 Train AUC: 0.9700 Val AUC: 0.9500 Time: 13.62\n",
      "Epoch: 199 Train Loss: 0.2342 Val Loss: 0.2838 Acc: 0.8659 Pre: 0.8949 Recall: 0.8459 F1: 0.8697 Train AUC: 0.9686 Val AUC: 0.9497 Time: 13.00\n",
      "Epoch: 200 Train Loss: 0.2301 Val Loss: 0.2864 Acc: 0.8732 Pre: 0.9051 Recall: 0.8493 F1: 0.8763 Train AUC: 0.9680 Val AUC: 0.9500 Time: 13.31\n",
      "Epoch: 201 Train Loss: 0.2259 Val Loss: 0.2910 Acc: 0.8750 Pre: 0.9084 Recall: 0.8493 F1: 0.8779 Train AUC: 0.9688 Val AUC: 0.9501 Time: 13.79\n",
      "Epoch: 202 Train Loss: 0.2277 Val Loss: 0.2893 Acc: 0.8768 Pre: 0.9148 Recall: 0.8459 F1: 0.8790 Train AUC: 0.9678 Val AUC: 0.9505 Time: 14.56\n",
      "Epoch: 203 Train Loss: 0.2314 Val Loss: 0.2846 Acc: 0.8714 Pre: 0.9077 Recall: 0.8425 F1: 0.8739 Train AUC: 0.9667 Val AUC: 0.9498 Time: 15.48\n",
      "Epoch: 204 Train Loss: 0.2335 Val Loss: 0.2855 Acc: 0.8587 Pre: 0.8821 Recall: 0.8459 F1: 0.8636 Train AUC: 0.9667 Val AUC: 0.9491 Time: 16.17\n",
      "Epoch: 205 Train Loss: 0.2240 Val Loss: 0.2854 Acc: 0.8623 Pre: 0.8803 Recall: 0.8562 F1: 0.8681 Train AUC: 0.9712 Val AUC: 0.9487 Time: 19.71\n",
      "Epoch: 206 Train Loss: 0.2223 Val Loss: 0.2842 Acc: 0.8659 Pre: 0.8949 Recall: 0.8459 F1: 0.8697 Train AUC: 0.9725 Val AUC: 0.9493 Time: 12.78\n",
      "Epoch: 207 Train Loss: 0.2278 Val Loss: 0.2909 Acc: 0.8786 Pre: 0.9213 Recall: 0.8425 F1: 0.8801 Train AUC: 0.9682 Val AUC: 0.9493 Time: 13.33\n",
      "Epoch: 208 Train Loss: 0.2271 Val Loss: 0.2861 Acc: 0.8714 Pre: 0.9018 Recall: 0.8493 F1: 0.8748 Train AUC: 0.9687 Val AUC: 0.9504 Time: 13.89\n",
      "Epoch: 209 Train Loss: 0.2308 Val Loss: 0.2860 Acc: 0.8623 Pre: 0.8699 Recall: 0.8699 F1: 0.8699 Train AUC: 0.9677 Val AUC: 0.9515 Time: 14.95\n",
      "Epoch: 210 Train Loss: 0.2310 Val Loss: 0.2833 Acc: 0.8641 Pre: 0.8754 Recall: 0.8664 F1: 0.8709 Train AUC: 0.9687 Val AUC: 0.9514 Time: 14.06\n",
      "Epoch: 211 Train Loss: 0.2235 Val Loss: 0.2826 Acc: 0.8659 Pre: 0.9007 Recall: 0.8390 F1: 0.8688 Train AUC: 0.9719 Val AUC: 0.9498 Time: 12.86\n",
      "Epoch: 212 Train Loss: 0.2235 Val Loss: 0.2944 Acc: 0.8696 Pre: 0.9198 Recall: 0.8253 F1: 0.8700 Train AUC: 0.9702 Val AUC: 0.9488 Time: 12.18\n",
      "Epoch: 213 Train Loss: 0.2361 Val Loss: 0.2865 Acc: 0.8623 Pre: 0.9000 Recall: 0.8322 F1: 0.8648 Train AUC: 0.9688 Val AUC: 0.9497 Time: 12.87\n",
      "Epoch: 214 Train Loss: 0.2302 Val Loss: 0.2810 Acc: 0.8623 Pre: 0.8885 Recall: 0.8459 F1: 0.8667 Train AUC: 0.9699 Val AUC: 0.9510 Time: 12.43\n",
      "Epoch: 215 Train Loss: 0.2218 Val Loss: 0.2801 Acc: 0.8605 Pre: 0.8853 Recall: 0.8459 F1: 0.8651 Train AUC: 0.9721 Val AUC: 0.9530 Time: 13.01\n",
      "Epoch: 216 Train Loss: 0.2278 Val Loss: 0.2776 Acc: 0.8696 Pre: 0.8986 Recall: 0.8493 F1: 0.8732 Train AUC: 0.9708 Val AUC: 0.9528 Time: 13.83\n",
      "Epoch: 217 Train Loss: 0.2273 Val Loss: 0.2870 Acc: 0.8804 Pre: 0.9248 Recall: 0.8425 F1: 0.8817 Train AUC: 0.9694 Val AUC: 0.9512 Time: 14.97\n",
      "Epoch: 218 Train Loss: 0.2247 Val Loss: 0.2866 Acc: 0.8786 Pre: 0.9213 Recall: 0.8425 F1: 0.8801 Train AUC: 0.9696 Val AUC: 0.9504 Time: 16.82\n",
      "Epoch: 219 Train Loss: 0.2244 Val Loss: 0.2791 Acc: 0.8714 Pre: 0.8961 Recall: 0.8562 F1: 0.8757 Train AUC: 0.9702 Val AUC: 0.9516 Time: 14.79\n",
      "Epoch: 220 Train Loss: 0.2150 Val Loss: 0.2793 Acc: 0.8714 Pre: 0.8877 Recall: 0.8664 F1: 0.8769 Train AUC: 0.9718 Val AUC: 0.9527 Time: 13.25\n",
      "Epoch: 221 Train Loss: 0.2276 Val Loss: 0.2770 Acc: 0.8623 Pre: 0.8913 Recall: 0.8425 F1: 0.8662 Train AUC: 0.9694 Val AUC: 0.9536 Time: 12.56\n",
      "Epoch: 222 Train Loss: 0.2216 Val Loss: 0.2818 Acc: 0.8659 Pre: 0.8978 Recall: 0.8425 F1: 0.8693 Train AUC: 0.9702 Val AUC: 0.9524 Time: 12.01\n",
      "Epoch: 223 Train Loss: 0.2123 Val Loss: 0.2853 Acc: 0.8641 Pre: 0.9004 Recall: 0.8356 F1: 0.8668 Train AUC: 0.9738 Val AUC: 0.9517 Time: 12.56\n",
      "Epoch: 224 Train Loss: 0.2215 Val Loss: 0.2824 Acc: 0.8659 Pre: 0.8978 Recall: 0.8425 F1: 0.8693 Train AUC: 0.9709 Val AUC: 0.9511 Time: 13.83\n",
      "Epoch: 225 Train Loss: 0.2155 Val Loss: 0.2789 Acc: 0.8696 Pre: 0.8957 Recall: 0.8527 F1: 0.8737 Train AUC: 0.9731 Val AUC: 0.9509 Time: 13.59\n",
      "Epoch: 226 Train Loss: 0.2286 Val Loss: 0.2797 Acc: 0.8714 Pre: 0.8824 Recall: 0.8733 F1: 0.8778 Train AUC: 0.9689 Val AUC: 0.9506 Time: 14.15\n",
      "Epoch: 227 Train Loss: 0.2235 Val Loss: 0.2805 Acc: 0.8714 Pre: 0.8877 Recall: 0.8664 F1: 0.8769 Train AUC: 0.9703 Val AUC: 0.9505 Time: 14.86\n",
      "Epoch: 228 Train Loss: 0.2222 Val Loss: 0.2822 Acc: 0.8750 Pre: 0.8968 Recall: 0.8630 F1: 0.8796 Train AUC: 0.9698 Val AUC: 0.9507 Time: 15.50\n",
      "Epoch: 229 Train Loss: 0.2144 Val Loss: 0.2832 Acc: 0.8768 Pre: 0.9088 Recall: 0.8527 F1: 0.8799 Train AUC: 0.9720 Val AUC: 0.9519 Time: 13.58\n",
      "Epoch: 230 Train Loss: 0.2154 Val Loss: 0.2805 Acc: 0.8750 Pre: 0.8996 Recall: 0.8596 F1: 0.8792 Train AUC: 0.9714 Val AUC: 0.9527 Time: 13.45\n",
      "Epoch: 231 Train Loss: 0.2222 Val Loss: 0.2785 Acc: 0.8678 Pre: 0.8789 Recall: 0.8699 F1: 0.8744 Train AUC: 0.9709 Val AUC: 0.9527 Time: 13.74\n",
      "Epoch: 232 Train Loss: 0.2124 Val Loss: 0.2785 Acc: 0.8696 Pre: 0.8819 Recall: 0.8699 F1: 0.8759 Train AUC: 0.9743 Val AUC: 0.9521 Time: 13.19\n",
      "Epoch: 233 Train Loss: 0.2069 Val Loss: 0.2798 Acc: 0.8569 Pre: 0.8845 Recall: 0.8390 F1: 0.8612 Train AUC: 0.9755 Val AUC: 0.9511 Time: 13.57\n",
      "Epoch: 234 Train Loss: 0.2164 Val Loss: 0.2832 Acc: 0.8659 Pre: 0.9037 Recall: 0.8356 F1: 0.8683 Train AUC: 0.9722 Val AUC: 0.9513 Time: 17.16\n",
      "Epoch: 235 Train Loss: 0.2226 Val Loss: 0.2788 Acc: 0.8750 Pre: 0.8968 Recall: 0.8630 F1: 0.8796 Train AUC: 0.9726 Val AUC: 0.9524 Time: 14.51\n",
      "Epoch: 236 Train Loss: 0.2134 Val Loss: 0.2785 Acc: 0.8696 Pre: 0.8819 Recall: 0.8699 F1: 0.8759 Train AUC: 0.9721 Val AUC: 0.9522 Time: 13.09\n",
      "Epoch: 237 Train Loss: 0.2185 Val Loss: 0.2783 Acc: 0.8714 Pre: 0.8824 Recall: 0.8733 F1: 0.8778 Train AUC: 0.9717 Val AUC: 0.9524 Time: 12.74\n",
      "Epoch: 238 Train Loss: 0.2163 Val Loss: 0.2766 Acc: 0.8732 Pre: 0.8936 Recall: 0.8630 F1: 0.8780 Train AUC: 0.9724 Val AUC: 0.9529 Time: 14.07\n",
      "Epoch: 239 Train Loss: 0.2075 Val Loss: 0.2771 Acc: 0.8714 Pre: 0.8989 Recall: 0.8527 F1: 0.8752 Train AUC: 0.9747 Val AUC: 0.9530 Time: 13.41\n",
      "Epoch: 240 Train Loss: 0.2095 Val Loss: 0.2760 Acc: 0.8623 Pre: 0.8913 Recall: 0.8425 F1: 0.8662 Train AUC: 0.9734 Val AUC: 0.9532 Time: 13.94\n",
      "Epoch: 241 Train Loss: 0.2132 Val Loss: 0.2754 Acc: 0.8659 Pre: 0.8921 Recall: 0.8493 F1: 0.8702 Train AUC: 0.9731 Val AUC: 0.9536 Time: 14.56\n",
      "Epoch: 242 Train Loss: 0.1999 Val Loss: 0.2756 Acc: 0.8714 Pre: 0.8905 Recall: 0.8630 F1: 0.8765 Train AUC: 0.9763 Val AUC: 0.9535 Time: 12.94\n",
      "Epoch: 243 Train Loss: 0.2076 Val Loss: 0.2758 Acc: 0.8750 Pre: 0.8968 Recall: 0.8630 F1: 0.8796 Train AUC: 0.9738 Val AUC: 0.9537 Time: 12.60\n",
      "Epoch: 244 Train Loss: 0.2038 Val Loss: 0.2768 Acc: 0.8714 Pre: 0.9018 Recall: 0.8493 F1: 0.8748 Train AUC: 0.9750 Val AUC: 0.9533 Time: 13.91\n",
      "Epoch: 245 Train Loss: 0.2113 Val Loss: 0.2759 Acc: 0.8678 Pre: 0.9011 Recall: 0.8425 F1: 0.8708 Train AUC: 0.9729 Val AUC: 0.9534 Time: 13.97\n",
      "Epoch: 246 Train Loss: 0.2071 Val Loss: 0.2726 Acc: 0.8696 Pre: 0.8901 Recall: 0.8596 F1: 0.8746 Train AUC: 0.9747 Val AUC: 0.9538 Time: 14.67\n",
      "Epoch: 247 Train Loss: 0.2105 Val Loss: 0.2713 Acc: 0.8732 Pre: 0.8908 Recall: 0.8664 F1: 0.8785 Train AUC: 0.9738 Val AUC: 0.9548 Time: 13.54\n",
      "Epoch: 248 Train Loss: 0.2075 Val Loss: 0.2727 Acc: 0.8714 Pre: 0.8905 Recall: 0.8630 F1: 0.8765 Train AUC: 0.9745 Val AUC: 0.9549 Time: 12.40\n",
      "Epoch: 249 Train Loss: 0.2057 Val Loss: 0.2748 Acc: 0.8750 Pre: 0.8968 Recall: 0.8630 F1: 0.8796 Train AUC: 0.9745 Val AUC: 0.9549 Time: 12.32\n",
      "Epoch: 250 Train Loss: 0.2035 Val Loss: 0.2748 Acc: 0.8750 Pre: 0.8968 Recall: 0.8630 F1: 0.8796 Train AUC: 0.9755 Val AUC: 0.9545 Time: 13.33\n",
      "Epoch: 251 Train Loss: 0.2021 Val Loss: 0.2749 Acc: 0.8750 Pre: 0.9025 Recall: 0.8562 F1: 0.8787 Train AUC: 0.9752 Val AUC: 0.9542 Time: 13.32\n",
      "Epoch: 252 Train Loss: 0.2024 Val Loss: 0.2758 Acc: 0.8750 Pre: 0.9025 Recall: 0.8562 F1: 0.8787 Train AUC: 0.9755 Val AUC: 0.9534 Time: 13.77\n",
      "Epoch: 253 Train Loss: 0.2104 Val Loss: 0.2769 Acc: 0.8786 Pre: 0.9032 Recall: 0.8630 F1: 0.8827 Train AUC: 0.9735 Val AUC: 0.9531 Time: 15.40\n",
      "Epoch: 254 Train Loss: 0.1966 Val Loss: 0.2766 Acc: 0.8714 Pre: 0.8824 Recall: 0.8733 F1: 0.8778 Train AUC: 0.9771 Val AUC: 0.9535 Time: 16.05\n",
      "Epoch: 255 Train Loss: 0.2033 Val Loss: 0.2749 Acc: 0.8732 Pre: 0.8854 Recall: 0.8733 F1: 0.8793 Train AUC: 0.9750 Val AUC: 0.9541 Time: 12.49\n",
      "Epoch: 256 Train Loss: 0.2039 Val Loss: 0.2731 Acc: 0.8750 Pre: 0.8912 Recall: 0.8699 F1: 0.8804 Train AUC: 0.9749 Val AUC: 0.9544 Time: 12.24\n",
      "Epoch: 257 Train Loss: 0.1911 Val Loss: 0.2739 Acc: 0.8696 Pre: 0.8901 Recall: 0.8596 F1: 0.8746 Train AUC: 0.9779 Val AUC: 0.9543 Time: 12.32\n",
      "Epoch: 258 Train Loss: 0.2012 Val Loss: 0.2757 Acc: 0.8641 Pre: 0.8889 Recall: 0.8493 F1: 0.8687 Train AUC: 0.9755 Val AUC: 0.9539 Time: 13.20\n",
      "Epoch: 259 Train Loss: 0.2001 Val Loss: 0.2722 Acc: 0.8678 Pre: 0.8869 Recall: 0.8596 F1: 0.8730 Train AUC: 0.9775 Val AUC: 0.9547 Time: 14.12\n",
      "Epoch: 260 Train Loss: 0.2019 Val Loss: 0.2707 Acc: 0.8659 Pre: 0.8838 Recall: 0.8596 F1: 0.8715 Train AUC: 0.9761 Val AUC: 0.9548 Time: 14.22\n",
      "Epoch: 261 Train Loss: 0.2017 Val Loss: 0.2715 Acc: 0.8750 Pre: 0.8940 Recall: 0.8664 F1: 0.8800 Train AUC: 0.9758 Val AUC: 0.9545 Time: 14.17\n",
      "Epoch: 262 Train Loss: 0.2016 Val Loss: 0.2731 Acc: 0.8768 Pre: 0.9000 Recall: 0.8630 F1: 0.8811 Train AUC: 0.9756 Val AUC: 0.9544 Time: 13.29\n",
      "Epoch: 263 Train Loss: 0.2052 Val Loss: 0.2711 Acc: 0.8732 Pre: 0.8881 Recall: 0.8699 F1: 0.8789 Train AUC: 0.9743 Val AUC: 0.9549 Time: 12.98\n",
      "Epoch: 264 Train Loss: 0.2065 Val Loss: 0.2701 Acc: 0.8678 Pre: 0.8815 Recall: 0.8664 F1: 0.8739 Train AUC: 0.9739 Val AUC: 0.9557 Time: 12.89\n",
      "Epoch: 265 Train Loss: 0.1993 Val Loss: 0.2720 Acc: 0.8696 Pre: 0.8846 Recall: 0.8664 F1: 0.8754 Train AUC: 0.9768 Val AUC: 0.9554 Time: 17.15\n",
      "Epoch: 266 Train Loss: 0.2011 Val Loss: 0.2739 Acc: 0.8696 Pre: 0.8901 Recall: 0.8596 F1: 0.8746 Train AUC: 0.9762 Val AUC: 0.9553 Time: 15.33\n",
      "Epoch: 267 Train Loss: 0.2005 Val Loss: 0.2723 Acc: 0.8678 Pre: 0.8869 Recall: 0.8596 F1: 0.8730 Train AUC: 0.9761 Val AUC: 0.9557 Time: 14.28\n",
      "Epoch: 268 Train Loss: 0.1939 Val Loss: 0.2717 Acc: 0.8714 Pre: 0.8850 Recall: 0.8699 F1: 0.8774 Train AUC: 0.9773 Val AUC: 0.9545 Time: 15.29\n",
      "Epoch: 269 Train Loss: 0.1991 Val Loss: 0.2712 Acc: 0.8768 Pre: 0.8889 Recall: 0.8767 F1: 0.8828 Train AUC: 0.9765 Val AUC: 0.9547 Time: 14.17\n",
      "Epoch: 270 Train Loss: 0.1970 Val Loss: 0.2705 Acc: 0.8786 Pre: 0.8975 Recall: 0.8699 F1: 0.8835 Train AUC: 0.9764 Val AUC: 0.9557 Time: 14.79\n",
      "Epoch: 271 Train Loss: 0.1930 Val Loss: 0.2718 Acc: 0.8768 Pre: 0.8972 Recall: 0.8664 F1: 0.8815 Train AUC: 0.9781 Val AUC: 0.9558 Time: 14.80\n",
      "Epoch: 272 Train Loss: 0.1941 Val Loss: 0.2722 Acc: 0.8786 Pre: 0.8975 Recall: 0.8699 F1: 0.8835 Train AUC: 0.9771 Val AUC: 0.9557 Time: 13.55\n",
      "Epoch: 273 Train Loss: 0.2014 Val Loss: 0.2709 Acc: 0.8786 Pre: 0.8975 Recall: 0.8699 F1: 0.8835 Train AUC: 0.9759 Val AUC: 0.9555 Time: 14.10\n",
      "Epoch: 274 Train Loss: 0.1955 Val Loss: 0.2697 Acc: 0.8786 Pre: 0.8947 Recall: 0.8733 F1: 0.8839 Train AUC: 0.9769 Val AUC: 0.9560 Time: 13.97\n",
      "Epoch: 275 Train Loss: 0.1936 Val Loss: 0.2696 Acc: 0.8750 Pre: 0.8912 Recall: 0.8699 F1: 0.8804 Train AUC: 0.9779 Val AUC: 0.9555 Time: 14.63\n",
      "Epoch: 276 Train Loss: 0.1960 Val Loss: 0.2683 Acc: 0.8750 Pre: 0.8885 Recall: 0.8733 F1: 0.8808 Train AUC: 0.9778 Val AUC: 0.9558 Time: 14.41\n",
      "Epoch: 277 Train Loss: 0.1962 Val Loss: 0.2677 Acc: 0.8714 Pre: 0.8877 Recall: 0.8664 F1: 0.8769 Train AUC: 0.9772 Val AUC: 0.9565 Time: 13.40\n",
      "Epoch: 278 Train Loss: 0.1956 Val Loss: 0.2714 Acc: 0.8804 Pre: 0.9007 Recall: 0.8699 F1: 0.8850 Train AUC: 0.9781 Val AUC: 0.9565 Time: 12.98\n",
      "Epoch: 279 Train Loss: 0.1894 Val Loss: 0.2752 Acc: 0.8804 Pre: 0.9007 Recall: 0.8699 F1: 0.8850 Train AUC: 0.9783 Val AUC: 0.9567 Time: 13.04\n",
      "Epoch: 280 Train Loss: 0.1963 Val Loss: 0.2700 Acc: 0.8786 Pre: 0.8975 Recall: 0.8699 F1: 0.8835 Train AUC: 0.9762 Val AUC: 0.9564 Time: 13.70\n",
      "Epoch: 281 Train Loss: 0.1960 Val Loss: 0.2681 Acc: 0.8859 Pre: 0.8855 Recall: 0.9007 F1: 0.8930 Train AUC: 0.9768 Val AUC: 0.9563 Time: 13.46\n",
      "Epoch: 282 Train Loss: 0.1956 Val Loss: 0.2707 Acc: 0.8750 Pre: 0.8858 Recall: 0.8767 F1: 0.8812 Train AUC: 0.9778 Val AUC: 0.9545 Time: 13.53\n",
      "Epoch: 283 Train Loss: 0.1875 Val Loss: 0.2769 Acc: 0.8569 Pre: 0.8845 Recall: 0.8390 F1: 0.8612 Train AUC: 0.9794 Val AUC: 0.9547 Time: 14.54\n",
      "Epoch: 284 Train Loss: 0.1973 Val Loss: 0.2721 Acc: 0.8659 Pre: 0.8811 Recall: 0.8630 F1: 0.8720 Train AUC: 0.9797 Val AUC: 0.9561 Time: 14.60\n",
      "Epoch: 285 Train Loss: 0.1864 Val Loss: 0.2720 Acc: 0.8659 Pre: 0.8759 Recall: 0.8699 F1: 0.8729 Train AUC: 0.9792 Val AUC: 0.9569 Time: 14.31\n",
      "Epoch: 286 Train Loss: 0.1906 Val Loss: 0.2725 Acc: 0.8641 Pre: 0.8678 Recall: 0.8767 F1: 0.8722 Train AUC: 0.9787 Val AUC: 0.9571 Time: 13.52\n",
      "Epoch: 287 Train Loss: 0.1890 Val Loss: 0.2704 Acc: 0.8696 Pre: 0.8819 Recall: 0.8699 F1: 0.8759 Train AUC: 0.9791 Val AUC: 0.9556 Time: 13.15\n",
      "Epoch: 288 Train Loss: 0.1820 Val Loss: 0.2700 Acc: 0.8750 Pre: 0.8940 Recall: 0.8664 F1: 0.8800 Train AUC: 0.9792 Val AUC: 0.9559 Time: 13.03\n",
      "Epoch: 289 Train Loss: 0.1918 Val Loss: 0.2676 Acc: 0.8714 Pre: 0.8850 Recall: 0.8699 F1: 0.8774 Train AUC: 0.9776 Val AUC: 0.9562 Time: 13.04\n",
      "Epoch: 290 Train Loss: 0.1887 Val Loss: 0.2667 Acc: 0.8750 Pre: 0.8858 Recall: 0.8767 F1: 0.8812 Train AUC: 0.9799 Val AUC: 0.9575 Time: 13.62\n",
      "Epoch: 291 Train Loss: 0.1963 Val Loss: 0.2704 Acc: 0.8732 Pre: 0.8881 Recall: 0.8699 F1: 0.8789 Train AUC: 0.9773 Val AUC: 0.9574 Time: 14.19\n",
      "Epoch: 292 Train Loss: 0.1897 Val Loss: 0.2718 Acc: 0.8750 Pre: 0.8912 Recall: 0.8699 F1: 0.8804 Train AUC: 0.9784 Val AUC: 0.9573 Time: 13.69\n",
      "Epoch: 293 Train Loss: 0.1926 Val Loss: 0.2693 Acc: 0.8732 Pre: 0.8854 Recall: 0.8733 F1: 0.8793 Train AUC: 0.9772 Val AUC: 0.9566 Time: 13.58\n",
      "Epoch: 294 Train Loss: 0.1894 Val Loss: 0.2716 Acc: 0.8750 Pre: 0.8805 Recall: 0.8836 F1: 0.8821 Train AUC: 0.9785 Val AUC: 0.9552 Time: 13.86\n",
      "Epoch: 295 Train Loss: 0.1790 Val Loss: 0.2723 Acc: 0.8804 Pre: 0.8818 Recall: 0.8938 F1: 0.8878 Train AUC: 0.9809 Val AUC: 0.9552 Time: 13.70\n",
      "Epoch: 296 Train Loss: 0.1802 Val Loss: 0.2688 Acc: 0.8822 Pre: 0.8822 Recall: 0.8973 F1: 0.8896 Train AUC: 0.9811 Val AUC: 0.9563 Time: 13.02\n",
      "Epoch: 297 Train Loss: 0.1901 Val Loss: 0.2665 Acc: 0.8696 Pre: 0.8819 Recall: 0.8699 F1: 0.8759 Train AUC: 0.9792 Val AUC: 0.9580 Time: 12.44\n",
      "Epoch: 298 Train Loss: 0.1866 Val Loss: 0.2691 Acc: 0.8732 Pre: 0.8908 Recall: 0.8664 F1: 0.8785 Train AUC: 0.9787 Val AUC: 0.9579 Time: 12.58\n",
      "Epoch: 299 Train Loss: 0.1910 Val Loss: 0.2701 Acc: 0.8732 Pre: 0.8964 Recall: 0.8596 F1: 0.8776 Train AUC: 0.9776 Val AUC: 0.9579 Time: 13.71\n",
      "Epoch: 300 Train Loss: 0.1861 Val Loss: 0.2650 Acc: 0.8768 Pre: 0.8836 Recall: 0.8836 F1: 0.8836 Train AUC: 0.9798 Val AUC: 0.9586 Time: 13.65\n",
      "Epoch: 301 Train Loss: 0.1867 Val Loss: 0.2665 Acc: 0.8895 Pre: 0.8889 Recall: 0.9041 F1: 0.8964 Train AUC: 0.9796 Val AUC: 0.9579 Time: 14.25\n",
      "Epoch: 302 Train Loss: 0.1889 Val Loss: 0.2697 Acc: 0.8732 Pre: 0.8881 Recall: 0.8699 F1: 0.8789 Train AUC: 0.9786 Val AUC: 0.9558 Time: 14.46\n",
      "Epoch: 303 Train Loss: 0.1922 Val Loss: 0.2716 Acc: 0.8750 Pre: 0.8968 Recall: 0.8630 F1: 0.8796 Train AUC: 0.9779 Val AUC: 0.9556 Time: 13.57\n",
      "Epoch: 304 Train Loss: 0.1811 Val Loss: 0.2676 Acc: 0.8678 Pre: 0.8815 Recall: 0.8664 F1: 0.8739 Train AUC: 0.9801 Val AUC: 0.9565 Time: 13.29\n",
      "Epoch: 305 Train Loss: 0.1787 Val Loss: 0.2673 Acc: 0.8714 Pre: 0.8824 Recall: 0.8733 F1: 0.8778 Train AUC: 0.9814 Val AUC: 0.9570 Time: 13.52\n",
      "Epoch: 306 Train Loss: 0.1800 Val Loss: 0.2669 Acc: 0.8714 Pre: 0.8850 Recall: 0.8699 F1: 0.8774 Train AUC: 0.9825 Val AUC: 0.9577 Time: 13.84\n",
      "Epoch: 307 Train Loss: 0.1832 Val Loss: 0.2673 Acc: 0.8768 Pre: 0.8944 Recall: 0.8699 F1: 0.8819 Train AUC: 0.9805 Val AUC: 0.9584 Time: 13.14\n",
      "Epoch: 308 Train Loss: 0.1815 Val Loss: 0.2660 Acc: 0.8877 Pre: 0.9049 Recall: 0.8801 F1: 0.8924 Train AUC: 0.9802 Val AUC: 0.9587 Time: 13.97\n",
      "Epoch: 309 Train Loss: 0.1750 Val Loss: 0.2651 Acc: 0.8913 Pre: 0.9028 Recall: 0.8904 F1: 0.8966 Train AUC: 0.9818 Val AUC: 0.9584 Time: 15.77\n",
      "Epoch: 310 Train Loss: 0.1784 Val Loss: 0.2646 Acc: 0.8895 Pre: 0.8969 Recall: 0.8938 F1: 0.8954 Train AUC: 0.9806 Val AUC: 0.9582 Time: 13.63\n",
      "Epoch: 311 Train Loss: 0.1805 Val Loss: 0.2647 Acc: 0.8714 Pre: 0.8932 Recall: 0.8596 F1: 0.8761 Train AUC: 0.9803 Val AUC: 0.9575 Time: 12.74\n",
      "Epoch: 312 Train Loss: 0.1823 Val Loss: 0.2665 Acc: 0.8732 Pre: 0.8854 Recall: 0.8733 F1: 0.8793 Train AUC: 0.9806 Val AUC: 0.9578 Time: 11.75\n",
      "Epoch: 313 Train Loss: 0.1783 Val Loss: 0.2699 Acc: 0.8696 Pre: 0.8793 Recall: 0.8733 F1: 0.8763 Train AUC: 0.9812 Val AUC: 0.9574 Time: 12.08\n",
      "Epoch: 314 Train Loss: 0.1759 Val Loss: 0.2678 Acc: 0.8732 Pre: 0.8828 Recall: 0.8767 F1: 0.8797 Train AUC: 0.9825 Val AUC: 0.9575 Time: 12.63\n",
      "Epoch: 315 Train Loss: 0.1761 Val Loss: 0.2667 Acc: 0.8732 Pre: 0.8854 Recall: 0.8733 F1: 0.8793 Train AUC: 0.9810 Val AUC: 0.9571 Time: 13.00\n",
      "Epoch: 316 Train Loss: 0.1740 Val Loss: 0.2658 Acc: 0.8895 Pre: 0.8863 Recall: 0.9075 F1: 0.8968 Train AUC: 0.9826 Val AUC: 0.9572 Time: 13.54\n",
      "Epoch: 317 Train Loss: 0.1730 Val Loss: 0.2633 Acc: 0.8913 Pre: 0.8919 Recall: 0.9041 F1: 0.8980 Train AUC: 0.9824 Val AUC: 0.9587 Time: 14.28\n",
      "Epoch: 318 Train Loss: 0.1800 Val Loss: 0.2657 Acc: 0.8768 Pre: 0.9029 Recall: 0.8596 F1: 0.8807 Train AUC: 0.9810 Val AUC: 0.9596 Time: 14.19\n",
      "Epoch: 319 Train Loss: 0.1848 Val Loss: 0.2650 Acc: 0.8895 Pre: 0.9053 Recall: 0.8836 F1: 0.8943 Train AUC: 0.9794 Val AUC: 0.9598 Time: 13.20\n",
      "Epoch: 320 Train Loss: 0.1796 Val Loss: 0.2636 Acc: 0.8804 Pre: 0.8844 Recall: 0.8904 F1: 0.8874 Train AUC: 0.9793 Val AUC: 0.9595 Time: 12.31\n",
      "Epoch: 321 Train Loss: 0.1767 Val Loss: 0.2619 Acc: 0.8804 Pre: 0.8870 Recall: 0.8870 F1: 0.8870 Train AUC: 0.9814 Val AUC: 0.9589 Time: 12.06\n",
      "Epoch: 322 Train Loss: 0.1803 Val Loss: 0.2637 Acc: 0.8678 Pre: 0.8842 Recall: 0.8630 F1: 0.8735 Train AUC: 0.9803 Val AUC: 0.9579 Time: 13.19\n",
      "Epoch: 323 Train Loss: 0.1771 Val Loss: 0.2647 Acc: 0.8732 Pre: 0.8828 Recall: 0.8767 F1: 0.8797 Train AUC: 0.9818 Val AUC: 0.9573 Time: 14.64\n",
      "Epoch: 324 Train Loss: 0.1757 Val Loss: 0.2646 Acc: 0.8732 Pre: 0.8776 Recall: 0.8836 F1: 0.8805 Train AUC: 0.9818 Val AUC: 0.9580 Time: 13.90\n",
      "Epoch: 325 Train Loss: 0.1675 Val Loss: 0.2651 Acc: 0.8714 Pre: 0.8746 Recall: 0.8836 F1: 0.8790 Train AUC: 0.9840 Val AUC: 0.9587 Time: 14.63\n",
      "Epoch: 326 Train Loss: 0.1711 Val Loss: 0.2647 Acc: 0.8732 Pre: 0.8828 Recall: 0.8767 F1: 0.8797 Train AUC: 0.9827 Val AUC: 0.9589 Time: 17.64\n",
      "Epoch: 327 Train Loss: 0.1727 Val Loss: 0.2607 Acc: 0.8804 Pre: 0.8924 Recall: 0.8801 F1: 0.8862 Train AUC: 0.9817 Val AUC: 0.9599 Time: 17.33\n",
      "Epoch: 328 Train Loss: 0.1665 Val Loss: 0.2592 Acc: 0.8768 Pre: 0.8944 Recall: 0.8699 F1: 0.8819 Train AUC: 0.9835 Val AUC: 0.9596 Time: 12.79\n",
      "Epoch: 329 Train Loss: 0.1675 Val Loss: 0.2588 Acc: 0.8895 Pre: 0.8969 Recall: 0.8938 F1: 0.8954 Train AUC: 0.9836 Val AUC: 0.9602 Time: 14.44\n",
      "Epoch: 330 Train Loss: 0.1611 Val Loss: 0.2595 Acc: 0.8877 Pre: 0.8938 Recall: 0.8938 F1: 0.8938 Train AUC: 0.9851 Val AUC: 0.9599 Time: 14.85\n",
      "Epoch: 331 Train Loss: 0.1677 Val Loss: 0.2625 Acc: 0.8804 Pre: 0.8818 Recall: 0.8938 F1: 0.8878 Train AUC: 0.9846 Val AUC: 0.9599 Time: 16.25\n",
      "Epoch: 332 Train Loss: 0.1655 Val Loss: 0.2648 Acc: 0.8768 Pre: 0.8810 Recall: 0.8870 F1: 0.8840 Train AUC: 0.9841 Val AUC: 0.9594 Time: 14.85\n",
      "Epoch: 333 Train Loss: 0.1678 Val Loss: 0.2679 Acc: 0.8714 Pre: 0.8850 Recall: 0.8699 F1: 0.8774 Train AUC: 0.9833 Val AUC: 0.9594 Time: 17.44\n",
      "Epoch: 334 Train Loss: 0.1657 Val Loss: 0.2658 Acc: 0.8714 Pre: 0.8797 Recall: 0.8767 F1: 0.8782 Train AUC: 0.9834 Val AUC: 0.9586 Time: 16.65\n",
      "Epoch: 335 Train Loss: 0.1636 Val Loss: 0.2687 Acc: 0.8877 Pre: 0.8758 Recall: 0.9178 F1: 0.8963 Train AUC: 0.9837 Val AUC: 0.9581 Time: 13.27\n",
      "Epoch: 336 Train Loss: 0.1642 Val Loss: 0.2660 Acc: 0.8822 Pre: 0.8847 Recall: 0.8938 F1: 0.8893 Train AUC: 0.9847 Val AUC: 0.9579 Time: 13.19\n",
      "Epoch: 337 Train Loss: 0.1732 Val Loss: 0.2673 Acc: 0.8732 Pre: 0.8936 Recall: 0.8630 F1: 0.8780 Train AUC: 0.9824 Val AUC: 0.9587 Time: 13.67\n",
      "Epoch: 338 Train Loss: 0.1711 Val Loss: 0.2634 Acc: 0.8859 Pre: 0.8935 Recall: 0.8904 F1: 0.8919 Train AUC: 0.9840 Val AUC: 0.9601 Time: 13.83\n",
      "Epoch: 339 Train Loss: 0.1635 Val Loss: 0.2707 Acc: 0.8804 Pre: 0.8767 Recall: 0.9007 F1: 0.8885 Train AUC: 0.9842 Val AUC: 0.9602 Time: 14.40\n",
      "Epoch: 340 Train Loss: 0.1613 Val Loss: 0.2679 Acc: 0.8804 Pre: 0.8767 Recall: 0.9007 F1: 0.8885 Train AUC: 0.9847 Val AUC: 0.9601 Time: 16.35\n",
      "Epoch: 341 Train Loss: 0.1751 Val Loss: 0.2640 Acc: 0.8841 Pre: 0.8958 Recall: 0.8836 F1: 0.8897 Train AUC: 0.9814 Val AUC: 0.9593 Time: 17.29\n",
      "Epoch: 342 Train Loss: 0.1675 Val Loss: 0.2662 Acc: 0.8804 Pre: 0.8951 Recall: 0.8767 F1: 0.8858 Train AUC: 0.9851 Val AUC: 0.9582 Time: 16.10\n",
      "Epoch: 343 Train Loss: 0.1646 Val Loss: 0.2643 Acc: 0.8822 Pre: 0.8796 Recall: 0.9007 F1: 0.8900 Train AUC: 0.9847 Val AUC: 0.9580 Time: 18.54\n",
      "Epoch: 344 Train Loss: 0.1685 Val Loss: 0.2623 Acc: 0.8768 Pre: 0.8784 Recall: 0.8904 F1: 0.8844 Train AUC: 0.9841 Val AUC: 0.9599 Time: 15.24\n",
      "Epoch: 345 Train Loss: 0.1735 Val Loss: 0.2659 Acc: 0.9130 Pre: 0.9067 Recall: 0.9315 F1: 0.9189 Train AUC: 0.9836 Val AUC: 0.9707 Time: 13.85\n",
      "Epoch: 346 Train Loss: 0.1688 Val Loss: 0.2700 Acc: 0.8895 Pre: 0.8997 Recall: 0.8904 F1: 0.8950 Train AUC: 0.9828 Val AUC: 0.9602 Time: 13.32\n",
      "Epoch: 347 Train Loss: 0.1780 Val Loss: 0.2699 Acc: 0.8895 Pre: 0.8997 Recall: 0.8904 F1: 0.8950 Train AUC: 0.9804 Val AUC: 0.9599 Time: 16.95\n",
      "Epoch: 348 Train Loss: 0.1629 Val Loss: 0.2614 Acc: 0.8895 Pre: 0.8969 Recall: 0.8938 F1: 0.8954 Train AUC: 0.9839 Val AUC: 0.9608 Time: 13.56\n",
      "Epoch: 349 Train Loss: 0.1609 Val Loss: 0.2657 Acc: 0.8949 Pre: 0.8750 Recall: 0.9349 F1: 0.9040 Train AUC: 0.9848 Val AUC: 0.9597 Time: 13.48\n",
      "Epoch: 350 Train Loss: 0.1538 Val Loss: 0.2627 Acc: 0.8841 Pre: 0.8800 Recall: 0.9041 F1: 0.8919 Train AUC: 0.9883 Val AUC: 0.9582 Time: 14.74\n",
      "Epoch: 351 Train Loss: 0.1559 Val Loss: 0.2698 Acc: 0.8732 Pre: 0.8881 Recall: 0.8699 F1: 0.8789 Train AUC: 0.9866 Val AUC: 0.9573 Time: 14.79\n",
      "Epoch: 352 Train Loss: 0.1637 Val Loss: 0.2733 Acc: 0.8750 Pre: 0.8912 Recall: 0.8699 F1: 0.8804 Train AUC: 0.9851 Val AUC: 0.9576 Time: 14.82\n",
      "Epoch: 353 Train Loss: 0.1665 Val Loss: 0.2681 Acc: 0.8841 Pre: 0.8775 Recall: 0.9075 F1: 0.8923 Train AUC: 0.9836 Val AUC: 0.9582 Time: 19.19\n",
      "Epoch: 354 Train Loss: 0.1637 Val Loss: 0.2830 Acc: 0.8768 Pre: 0.8500 Recall: 0.9315 F1: 0.8889 Train AUC: 0.9842 Val AUC: 0.9594 Time: 15.42\n",
      "Epoch: 355 Train Loss: 0.1757 Val Loss: 0.2613 Acc: 0.9022 Pre: 0.8940 Recall: 0.9247 F1: 0.9091 Train AUC: 0.9848 Val AUC: 0.9609 Time: 14.12\n",
      "Epoch: 356 Train Loss: 0.1584 Val Loss: 0.2657 Acc: 0.8786 Pre: 0.9032 Recall: 0.8630 F1: 0.8827 Train AUC: 0.9853 Val AUC: 0.9609 Time: 12.14\n",
      "Epoch: 357 Train Loss: 0.1577 Val Loss: 0.2627 Acc: 0.8895 Pre: 0.9024 Recall: 0.8870 F1: 0.8946 Train AUC: 0.9863 Val AUC: 0.9604 Time: 13.79\n",
      "Epoch: 358 Train Loss: 0.1659 Val Loss: 0.2695 Acc: 0.8931 Pre: 0.8795 Recall: 0.9247 F1: 0.9015 Train AUC: 0.9838 Val AUC: 0.9591 Time: 13.01\n",
      "Epoch: 359 Train Loss: 0.1631 Val Loss: 0.2696 Acc: 0.8913 Pre: 0.8766 Recall: 0.9247 F1: 0.9000 Train AUC: 0.9857 Val AUC: 0.9597 Time: 16.18\n",
      "Epoch: 360 Train Loss: 0.1566 Val Loss: 0.2683 Acc: 0.8877 Pre: 0.9021 Recall: 0.8836 F1: 0.8927 Train AUC: 0.9868 Val AUC: 0.9607 Time: 15.69\n",
      "Epoch: 361 Train Loss: 0.1715 Val Loss: 0.2719 Acc: 0.8895 Pre: 0.9081 Recall: 0.8801 F1: 0.8939 Train AUC: 0.9833 Val AUC: 0.9591 Time: 16.14\n",
      "Epoch: 362 Train Loss: 0.1635 Val Loss: 0.2713 Acc: 0.8949 Pre: 0.8774 Recall: 0.9315 F1: 0.9037 Train AUC: 0.9841 Val AUC: 0.9585 Time: 16.66\n",
      "Epoch: 363 Train Loss: 0.1584 Val Loss: 0.2790 Acc: 0.8822 Pre: 0.8580 Recall: 0.9315 F1: 0.8933 Train AUC: 0.9864 Val AUC: 0.9587 Time: 16.49\n",
      "Epoch: 364 Train Loss: 0.1561 Val Loss: 0.2641 Acc: 0.8967 Pre: 0.8930 Recall: 0.9144 F1: 0.9036 Train AUC: 0.9868 Val AUC: 0.9604 Time: 14.31\n",
      "Epoch: 365 Train Loss: 0.1561 Val Loss: 0.2663 Acc: 0.8913 Pre: 0.9085 Recall: 0.8836 F1: 0.8958 Train AUC: 0.9852 Val AUC: 0.9607 Time: 13.97\n",
      "Epoch: 366 Train Loss: 0.1554 Val Loss: 0.2617 Acc: 0.8913 Pre: 0.9000 Recall: 0.8938 F1: 0.8969 Train AUC: 0.9869 Val AUC: 0.9606 Time: 13.40\n",
      "Epoch: 367 Train Loss: 0.1560 Val Loss: 0.2662 Acc: 0.8931 Pre: 0.8820 Recall: 0.9212 F1: 0.9012 Train AUC: 0.9856 Val AUC: 0.9594 Time: 13.96\n",
      "Epoch: 368 Train Loss: 0.1547 Val Loss: 0.2670 Acc: 0.8931 Pre: 0.8820 Recall: 0.9212 F1: 0.9012 Train AUC: 0.9869 Val AUC: 0.9592 Time: 17.07\n",
      "Epoch: 369 Train Loss: 0.1584 Val Loss: 0.2635 Acc: 0.8877 Pre: 0.8912 Recall: 0.8973 F1: 0.8942 Train AUC: 0.9865 Val AUC: 0.9598 Time: 17.98\n",
      "Epoch: 370 Train Loss: 0.1589 Val Loss: 0.2692 Acc: 0.8822 Pre: 0.8927 Recall: 0.8836 F1: 0.8881 Train AUC: 0.9858 Val AUC: 0.9594 Time: 13.30\n",
      "Epoch: 371 Train Loss: 0.1635 Val Loss: 0.2696 Acc: 0.8841 Pre: 0.8775 Recall: 0.9075 F1: 0.8923 Train AUC: 0.9849 Val AUC: 0.9593 Time: 13.77\n",
      "Epoch: 372 Train Loss: 0.1509 Val Loss: 0.2792 Acc: 0.8895 Pre: 0.8690 Recall: 0.9315 F1: 0.8992 Train AUC: 0.9866 Val AUC: 0.9586 Time: 13.23\n",
      "Epoch: 373 Train Loss: 0.1608 Val Loss: 0.2677 Acc: 0.8877 Pre: 0.8783 Recall: 0.9144 F1: 0.8960 Train AUC: 0.9858 Val AUC: 0.9593 Time: 17.74\n",
      "Epoch: 374 Train Loss: 0.1467 Val Loss: 0.2668 Acc: 0.8859 Pre: 0.8935 Recall: 0.8904 F1: 0.8919 Train AUC: 0.9871 Val AUC: 0.9594 Time: 14.32\n",
      "Epoch: 375 Train Loss: 0.1495 Val Loss: 0.2712 Acc: 0.8822 Pre: 0.8955 Recall: 0.8801 F1: 0.8877 Train AUC: 0.9870 Val AUC: 0.9597 Time: 16.84\n",
      "Epoch: 376 Train Loss: 0.1641 Val Loss: 0.2694 Acc: 0.8913 Pre: 0.8893 Recall: 0.9075 F1: 0.8983 Train AUC: 0.9855 Val AUC: 0.9595 Time: 13.57\n",
      "Epoch: 377 Train Loss: 0.1395 Val Loss: 0.2776 Acc: 0.8931 Pre: 0.8746 Recall: 0.9315 F1: 0.9022 Train AUC: 0.9886 Val AUC: 0.9589 Time: 13.94\n",
      "Epoch: 378 Train Loss: 0.1533 Val Loss: 0.2689 Acc: 0.8877 Pre: 0.8859 Recall: 0.9041 F1: 0.8949 Train AUC: 0.9866 Val AUC: 0.9599 Time: 18.04\n",
      "Epoch: 379 Train Loss: 0.1473 Val Loss: 0.2721 Acc: 0.8804 Pre: 0.8897 Recall: 0.8836 F1: 0.8866 Train AUC: 0.9874 Val AUC: 0.9599 Time: 16.13\n",
      "Epoch: 380 Train Loss: 0.1489 Val Loss: 0.2704 Acc: 0.8768 Pre: 0.8836 Recall: 0.8836 F1: 0.8836 Train AUC: 0.9880 Val AUC: 0.9587 Time: 12.96\n",
      "Epoch: 381 Train Loss: 0.1554 Val Loss: 0.2736 Acc: 0.8877 Pre: 0.8734 Recall: 0.9212 F1: 0.8967 Train AUC: 0.9866 Val AUC: 0.9575 Time: 12.72\n",
      "Epoch: 382 Train Loss: 0.1462 Val Loss: 0.2811 Acc: 0.8859 Pre: 0.8635 Recall: 0.9315 F1: 0.8962 Train AUC: 0.9885 Val AUC: 0.9576 Time: 12.80\n",
      "Epoch: 383 Train Loss: 0.1557 Val Loss: 0.2679 Acc: 0.9004 Pre: 0.8963 Recall: 0.9178 F1: 0.9069 Train AUC: 0.9877 Val AUC: 0.9602 Time: 16.71\n",
      "Epoch: 384 Train Loss: 0.1404 Val Loss: 0.2758 Acc: 0.8841 Pre: 0.8986 Recall: 0.8801 F1: 0.8893 Train AUC: 0.9883 Val AUC: 0.9605 Time: 15.35\n",
      "Epoch: 385 Train Loss: 0.1549 Val Loss: 0.2711 Acc: 0.8931 Pre: 0.9003 Recall: 0.8973 F1: 0.8988 Train AUC: 0.9860 Val AUC: 0.9607 Time: 15.63\n",
      "Epoch: 386 Train Loss: 0.1422 Val Loss: 0.2726 Acc: 0.8967 Pre: 0.8803 Recall: 0.9315 F1: 0.9052 Train AUC: 0.9877 Val AUC: 0.9602 Time: 15.94\n",
      "Epoch: 387 Train Loss: 0.1377 Val Loss: 0.2730 Acc: 0.8859 Pre: 0.8635 Recall: 0.9315 F1: 0.8962 Train AUC: 0.9894 Val AUC: 0.9599 Time: 14.14\n",
      "Epoch: 388 Train Loss: 0.1567 Val Loss: 0.2648 Acc: 0.8913 Pre: 0.8919 Recall: 0.9041 F1: 0.8980 Train AUC: 0.9868 Val AUC: 0.9598 Time: 11.66\n",
      "Epoch: 389 Train Loss: 0.1505 Val Loss: 0.2705 Acc: 0.8877 Pre: 0.8912 Recall: 0.8973 F1: 0.8942 Train AUC: 0.9873 Val AUC: 0.9606 Time: 13.62\n",
      "Epoch: 390 Train Loss: 0.1534 Val Loss: 0.2709 Acc: 0.8822 Pre: 0.8822 Recall: 0.8973 F1: 0.8896 Train AUC: 0.9857 Val AUC: 0.9602 Time: 14.79\n",
      "Epoch: 391 Train Loss: 0.1484 Val Loss: 0.2746 Acc: 0.8895 Pre: 0.8738 Recall: 0.9247 F1: 0.8985 Train AUC: 0.9867 Val AUC: 0.9601 Time: 13.66\n",
      "Epoch: 392 Train Loss: 0.1458 Val Loss: 0.2747 Acc: 0.8895 Pre: 0.8690 Recall: 0.9315 F1: 0.8992 Train AUC: 0.9883 Val AUC: 0.9597 Time: 14.68\n",
      "Epoch: 393 Train Loss: 0.1507 Val Loss: 0.2649 Acc: 0.8967 Pre: 0.8878 Recall: 0.9212 F1: 0.9042 Train AUC: 0.9870 Val AUC: 0.9602 Time: 19.41\n",
      "Epoch: 394 Train Loss: 0.1413 Val Loss: 0.2639 Acc: 0.8913 Pre: 0.8946 Recall: 0.9007 F1: 0.8976 Train AUC: 0.9889 Val AUC: 0.9601 Time: 18.38\n",
      "Epoch: 395 Train Loss: 0.1437 Val Loss: 0.2661 Acc: 0.8877 Pre: 0.8859 Recall: 0.9041 F1: 0.8949 Train AUC: 0.9879 Val AUC: 0.9604 Time: 14.61\n",
      "Epoch: 396 Train Loss: 0.1370 Val Loss: 0.2703 Acc: 0.8913 Pre: 0.8841 Recall: 0.9144 F1: 0.8990 Train AUC: 0.9894 Val AUC: 0.9597 Time: 15.85\n",
      "Epoch: 397 Train Loss: 0.1471 Val Loss: 0.2703 Acc: 0.8841 Pre: 0.8878 Recall: 0.8938 F1: 0.8908 Train AUC: 0.9876 Val AUC: 0.9590 Time: 12.95\n",
      "Epoch: 398 Train Loss: 0.1365 Val Loss: 0.2728 Acc: 0.8841 Pre: 0.8851 Recall: 0.8973 F1: 0.8912 Train AUC: 0.9892 Val AUC: 0.9586 Time: 13.79\n",
      "Epoch: 399 Train Loss: 0.1485 Val Loss: 0.2772 Acc: 0.8877 Pre: 0.8885 Recall: 0.9007 F1: 0.8946 Train AUC: 0.9869 Val AUC: 0.9591 Time: 13.13\n",
      "Epoch: 400 Train Loss: 0.1476 Val Loss: 0.2864 Acc: 0.8895 Pre: 0.8690 Recall: 0.9315 F1: 0.8992 Train AUC: 0.9873 Val AUC: 0.9575 Time: 13.40\n",
      "Epoch: 401 Train Loss: 0.1390 Val Loss: 0.2833 Acc: 0.8895 Pre: 0.8690 Recall: 0.9315 F1: 0.8992 Train AUC: 0.9897 Val AUC: 0.9581 Time: 14.16\n",
      "Epoch: 402 Train Loss: 0.1398 Val Loss: 0.2695 Acc: 0.8967 Pre: 0.8930 Recall: 0.9144 F1: 0.9036 Train AUC: 0.9893 Val AUC: 0.9594 Time: 17.74\n",
      "Epoch: 403 Train Loss: 0.1430 Val Loss: 0.2712 Acc: 0.8841 Pre: 0.8931 Recall: 0.8870 F1: 0.8900 Train AUC: 0.9881 Val AUC: 0.9582 Time: 19.38\n",
      "Epoch: 404 Train Loss: 0.1530 Val Loss: 0.2697 Acc: 0.8804 Pre: 0.8870 Recall: 0.8870 F1: 0.8870 Train AUC: 0.9877 Val AUC: 0.9583 Time: 14.22\n",
      "Epoch: 405 Train Loss: 0.1348 Val Loss: 0.2734 Acc: 0.8877 Pre: 0.8833 Recall: 0.9075 F1: 0.8953 Train AUC: 0.9894 Val AUC: 0.9589 Time: 13.02\n",
      "Epoch: 406 Train Loss: 0.1397 Val Loss: 0.2737 Acc: 0.8895 Pre: 0.8889 Recall: 0.9041 F1: 0.8964 Train AUC: 0.9887 Val AUC: 0.9591 Time: 12.51\n",
      "Epoch: 407 Train Loss: 0.1323 Val Loss: 0.2733 Acc: 0.8931 Pre: 0.8949 Recall: 0.9041 F1: 0.8995 Train AUC: 0.9899 Val AUC: 0.9595 Time: 12.59\n",
      "Epoch: 408 Train Loss: 0.1445 Val Loss: 0.2781 Acc: 0.9022 Pre: 0.8940 Recall: 0.9247 F1: 0.9091 Train AUC: 0.9878 Val AUC: 0.9595 Time: 14.52\n",
      "Epoch: 409 Train Loss: 0.1439 Val Loss: 0.2752 Acc: 0.8986 Pre: 0.8933 Recall: 0.9178 F1: 0.9054 Train AUC: 0.9874 Val AUC: 0.9595 Time: 15.43\n",
      "Epoch: 410 Train Loss: 0.1390 Val Loss: 0.2733 Acc: 0.8931 Pre: 0.8896 Recall: 0.9110 F1: 0.9002 Train AUC: 0.9882 Val AUC: 0.9595 Time: 14.22\n",
      "Epoch: 411 Train Loss: 0.1338 Val Loss: 0.2716 Acc: 0.8804 Pre: 0.8818 Recall: 0.8938 F1: 0.8878 Train AUC: 0.9896 Val AUC: 0.9582 Time: 15.78\n",
      "Epoch: 412 Train Loss: 0.1366 Val Loss: 0.2726 Acc: 0.8822 Pre: 0.8822 Recall: 0.8973 F1: 0.8896 Train AUC: 0.9890 Val AUC: 0.9572 Time: 14.75\n",
      "Epoch: 413 Train Loss: 0.1393 Val Loss: 0.2742 Acc: 0.8877 Pre: 0.8833 Recall: 0.9075 F1: 0.8953 Train AUC: 0.9892 Val AUC: 0.9580 Time: 13.54\n",
      "Epoch: 414 Train Loss: 0.1349 Val Loss: 0.2704 Acc: 0.8859 Pre: 0.8855 Recall: 0.9007 F1: 0.8930 Train AUC: 0.9900 Val AUC: 0.9593 Time: 12.20\n",
      "Epoch: 415 Train Loss: 0.1337 Val Loss: 0.2702 Acc: 0.8913 Pre: 0.8946 Recall: 0.9007 F1: 0.8976 Train AUC: 0.9894 Val AUC: 0.9600 Time: 11.93\n",
      "Epoch: 416 Train Loss: 0.1332 Val Loss: 0.2718 Acc: 0.8949 Pre: 0.8824 Recall: 0.9247 F1: 0.9030 Train AUC: 0.9899 Val AUC: 0.9594 Time: 12.39\n",
      "Epoch: 417 Train Loss: 0.1304 Val Loss: 0.2758 Acc: 0.8877 Pre: 0.8710 Recall: 0.9247 F1: 0.8970 Train AUC: 0.9903 Val AUC: 0.9584 Time: 12.80\n",
      "Epoch: 418 Train Loss: 0.1345 Val Loss: 0.2765 Acc: 0.8895 Pre: 0.8738 Recall: 0.9247 F1: 0.8985 Train AUC: 0.9904 Val AUC: 0.9594 Time: 13.47\n",
      "Epoch: 419 Train Loss: 0.1354 Val Loss: 0.2727 Acc: 0.8986 Pre: 0.8933 Recall: 0.9178 F1: 0.9054 Train AUC: 0.9895 Val AUC: 0.9599 Time: 14.24\n",
      "Epoch: 420 Train Loss: 0.1331 Val Loss: 0.2704 Acc: 0.8986 Pre: 0.8960 Recall: 0.9144 F1: 0.9051 Train AUC: 0.9894 Val AUC: 0.9599 Time: 14.37\n",
      "Epoch: 421 Train Loss: 0.1298 Val Loss: 0.2694 Acc: 0.8913 Pre: 0.8841 Recall: 0.9144 F1: 0.8990 Train AUC: 0.9901 Val AUC: 0.9592 Time: 13.29\n",
      "Epoch: 422 Train Loss: 0.1357 Val Loss: 0.2715 Acc: 0.8913 Pre: 0.8841 Recall: 0.9144 F1: 0.8990 Train AUC: 0.9898 Val AUC: 0.9590 Time: 12.74\n",
      "Epoch: 423 Train Loss: 0.1384 Val Loss: 0.2698 Acc: 0.8822 Pre: 0.8874 Recall: 0.8904 F1: 0.8889 Train AUC: 0.9891 Val AUC: 0.9593 Time: 13.66\n",
      "Epoch: 424 Train Loss: 0.1292 Val Loss: 0.2715 Acc: 0.8841 Pre: 0.8878 Recall: 0.8938 F1: 0.8908 Train AUC: 0.9909 Val AUC: 0.9592 Time: 16.87\n",
      "Epoch: 425 Train Loss: 0.1288 Val Loss: 0.2748 Acc: 0.8877 Pre: 0.8783 Recall: 0.9144 F1: 0.8960 Train AUC: 0.9910 Val AUC: 0.9589 Time: 16.22\n",
      "Epoch: 426 Train Loss: 0.1229 Val Loss: 0.2862 Acc: 0.8877 Pre: 0.8710 Recall: 0.9247 F1: 0.8970 Train AUC: 0.9912 Val AUC: 0.9581 Time: 17.00\n",
      "Epoch: 427 Train Loss: 0.1330 Val Loss: 0.2774 Acc: 0.8859 Pre: 0.8730 Recall: 0.9178 F1: 0.8948 Train AUC: 0.9913 Val AUC: 0.9580 Time: 13.95\n",
      "Epoch: 428 Train Loss: 0.1333 Val Loss: 0.2739 Acc: 0.8841 Pre: 0.8800 Recall: 0.9041 F1: 0.8919 Train AUC: 0.9903 Val AUC: 0.9585 Time: 14.58\n",
      "Epoch: 429 Train Loss: 0.1277 Val Loss: 0.2768 Acc: 0.8913 Pre: 0.8791 Recall: 0.9212 F1: 0.8997 Train AUC: 0.9916 Val AUC: 0.9600 Time: 16.70\n",
      "Epoch: 430 Train Loss: 0.1311 Val Loss: 0.2802 Acc: 0.8949 Pre: 0.8849 Recall: 0.9212 F1: 0.9027 Train AUC: 0.9903 Val AUC: 0.9604 Time: 15.35\n",
      "Epoch: 431 Train Loss: 0.1291 Val Loss: 0.2766 Acc: 0.8913 Pre: 0.8893 Recall: 0.9075 F1: 0.8983 Train AUC: 0.9898 Val AUC: 0.9601 Time: 14.28\n",
      "Epoch: 432 Train Loss: 0.1259 Val Loss: 0.2757 Acc: 0.8859 Pre: 0.8935 Recall: 0.8904 F1: 0.8919 Train AUC: 0.9907 Val AUC: 0.9599 Time: 13.58\n",
      "Epoch: 433 Train Loss: 0.1308 Val Loss: 0.2791 Acc: 0.8804 Pre: 0.8818 Recall: 0.8938 F1: 0.8878 Train AUC: 0.9903 Val AUC: 0.9584 Time: 15.54\n",
      "Epoch: 434 Train Loss: 0.1334 Val Loss: 0.2836 Acc: 0.8822 Pre: 0.8822 Recall: 0.8973 F1: 0.8896 Train AUC: 0.9913 Val AUC: 0.9582 Time: 13.86\n",
      "Epoch: 435 Train Loss: 0.1309 Val Loss: 0.2835 Acc: 0.8877 Pre: 0.8783 Recall: 0.9144 F1: 0.8960 Train AUC: 0.9898 Val AUC: 0.9584 Time: 15.36\n",
      "Epoch: 436 Train Loss: 0.1300 Val Loss: 0.2785 Acc: 0.8967 Pre: 0.8930 Recall: 0.9144 F1: 0.9036 Train AUC: 0.9909 Val AUC: 0.9591 Time: 14.26\n",
      "Epoch: 437 Train Loss: 0.1262 Val Loss: 0.2823 Acc: 0.8949 Pre: 0.8953 Recall: 0.9075 F1: 0.9014 Train AUC: 0.9906 Val AUC: 0.9574 Time: 13.57\n",
      "Epoch: 438 Train Loss: 0.1376 Val Loss: 0.2804 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9893 Val AUC: 0.9584 Time: 13.14\n",
      "Epoch: 439 Train Loss: 0.1244 Val Loss: 0.2901 Acc: 0.8841 Pre: 0.8775 Recall: 0.9075 F1: 0.8923 Train AUC: 0.9909 Val AUC: 0.9580 Time: 13.10\n",
      "Epoch: 440 Train Loss: 0.1236 Val Loss: 0.2920 Acc: 0.8841 Pre: 0.8750 Recall: 0.9110 F1: 0.8926 Train AUC: 0.9904 Val AUC: 0.9572 Time: 13.32\n",
      "Epoch: 441 Train Loss: 0.1298 Val Loss: 0.2807 Acc: 0.8786 Pre: 0.8763 Recall: 0.8973 F1: 0.8866 Train AUC: 0.9906 Val AUC: 0.9561 Time: 13.85\n",
      "Epoch: 442 Train Loss: 0.1329 Val Loss: 0.2818 Acc: 0.8841 Pre: 0.8701 Recall: 0.9178 F1: 0.8933 Train AUC: 0.9904 Val AUC: 0.9557 Time: 13.71\n",
      "Epoch: 443 Train Loss: 0.1201 Val Loss: 0.2896 Acc: 0.8841 Pre: 0.8654 Recall: 0.9247 F1: 0.8940 Train AUC: 0.9922 Val AUC: 0.9566 Time: 13.33\n",
      "Epoch: 444 Train Loss: 0.1313 Val Loss: 0.2819 Acc: 0.8949 Pre: 0.8900 Recall: 0.9144 F1: 0.9020 Train AUC: 0.9903 Val AUC: 0.9598 Time: 13.14\n",
      "Epoch: 445 Train Loss: 0.1217 Val Loss: 0.2831 Acc: 0.8913 Pre: 0.9000 Recall: 0.8938 F1: 0.8969 Train AUC: 0.9916 Val AUC: 0.9606 Time: 13.71\n",
      "Epoch: 446 Train Loss: 0.1295 Val Loss: 0.2824 Acc: 0.8931 Pre: 0.9003 Recall: 0.8973 F1: 0.8988 Train AUC: 0.9904 Val AUC: 0.9599 Time: 13.74\n",
      "Epoch: 447 Train Loss: 0.1247 Val Loss: 0.2782 Acc: 0.8859 Pre: 0.8804 Recall: 0.9075 F1: 0.8938 Train AUC: 0.9905 Val AUC: 0.9589 Time: 13.64\n",
      "Epoch: 448 Train Loss: 0.1239 Val Loss: 0.2828 Acc: 0.8768 Pre: 0.8636 Recall: 0.9110 F1: 0.8867 Train AUC: 0.9910 Val AUC: 0.9555 Time: 13.53\n",
      "Epoch: 449 Train Loss: 0.1279 Val Loss: 0.2787 Acc: 0.8841 Pre: 0.8701 Recall: 0.9178 F1: 0.8933 Train AUC: 0.9920 Val AUC: 0.9572 Time: 13.27\n",
      "Epoch: 450 Train Loss: 0.1282 Val Loss: 0.2805 Acc: 0.8931 Pre: 0.8820 Recall: 0.9212 F1: 0.9012 Train AUC: 0.9913 Val AUC: 0.9581 Time: 12.34\n",
      "Epoch: 451 Train Loss: 0.1141 Val Loss: 0.2887 Acc: 0.8913 Pre: 0.8791 Recall: 0.9212 F1: 0.8997 Train AUC: 0.9926 Val AUC: 0.9582 Time: 13.25\n",
      "Epoch: 452 Train Loss: 0.1294 Val Loss: 0.2798 Acc: 0.8931 Pre: 0.8820 Recall: 0.9212 F1: 0.9012 Train AUC: 0.9903 Val AUC: 0.9593 Time: 13.46\n",
      "Epoch: 453 Train Loss: 0.1244 Val Loss: 0.2752 Acc: 0.8841 Pre: 0.8800 Recall: 0.9041 F1: 0.8919 Train AUC: 0.9908 Val AUC: 0.9583 Time: 13.88\n",
      "Epoch: 454 Train Loss: 0.1161 Val Loss: 0.2814 Acc: 0.8804 Pre: 0.8818 Recall: 0.8938 F1: 0.8878 Train AUC: 0.9933 Val AUC: 0.9556 Time: 14.43\n",
      "Epoch: 455 Train Loss: 0.1257 Val Loss: 0.2823 Acc: 0.8750 Pre: 0.8754 Recall: 0.8904 F1: 0.8829 Train AUC: 0.9923 Val AUC: 0.9578 Time: 14.36\n",
      "Epoch: 456 Train Loss: 0.1255 Val Loss: 0.2891 Acc: 0.8804 Pre: 0.8792 Recall: 0.8973 F1: 0.8881 Train AUC: 0.9912 Val AUC: 0.9602 Time: 14.24\n",
      "Epoch: 457 Train Loss: 0.1236 Val Loss: 0.2879 Acc: 0.8895 Pre: 0.8863 Recall: 0.9075 F1: 0.8968 Train AUC: 0.9908 Val AUC: 0.9608 Time: 13.02\n",
      "Epoch: 458 Train Loss: 0.1338 Val Loss: 0.2880 Acc: 0.8895 Pre: 0.8762 Recall: 0.9212 F1: 0.8982 Train AUC: 0.9896 Val AUC: 0.9595 Time: 12.96\n",
      "Epoch: 459 Train Loss: 0.1301 Val Loss: 0.2913 Acc: 0.8841 Pre: 0.8654 Recall: 0.9247 F1: 0.8940 Train AUC: 0.9896 Val AUC: 0.9567 Time: 13.07\n",
      "Epoch: 460 Train Loss: 0.1194 Val Loss: 0.2921 Acc: 0.8804 Pre: 0.8645 Recall: 0.9178 F1: 0.8904 Train AUC: 0.9919 Val AUC: 0.9557 Time: 13.28\n",
      "Epoch: 461 Train Loss: 0.1257 Val Loss: 0.2898 Acc: 0.8786 Pre: 0.8763 Recall: 0.8973 F1: 0.8866 Train AUC: 0.9907 Val AUC: 0.9570 Time: 13.87\n",
      "Epoch: 462 Train Loss: 0.1152 Val Loss: 0.2890 Acc: 0.8732 Pre: 0.8776 Recall: 0.8836 F1: 0.8805 Train AUC: 0.9927 Val AUC: 0.9575 Time: 14.13\n",
      "Epoch: 463 Train Loss: 0.1355 Val Loss: 0.2883 Acc: 0.8877 Pre: 0.8808 Recall: 0.9110 F1: 0.8956 Train AUC: 0.9899 Val AUC: 0.9575 Time: 13.89\n",
      "Epoch: 464 Train Loss: 0.1119 Val Loss: 0.2973 Acc: 0.8877 Pre: 0.8710 Recall: 0.9247 F1: 0.8970 Train AUC: 0.9932 Val AUC: 0.9588 Time: 12.94\n",
      "Epoch: 465 Train Loss: 0.1244 Val Loss: 0.2848 Acc: 0.8949 Pre: 0.8874 Recall: 0.9178 F1: 0.9024 Train AUC: 0.9913 Val AUC: 0.9595 Time: 12.71\n",
      "Epoch: 466 Train Loss: 0.1186 Val Loss: 0.2810 Acc: 0.8877 Pre: 0.8885 Recall: 0.9007 F1: 0.8946 Train AUC: 0.9914 Val AUC: 0.9592 Time: 13.35\n",
      "Epoch: 467 Train Loss: 0.1213 Val Loss: 0.2824 Acc: 0.8804 Pre: 0.8767 Recall: 0.9007 F1: 0.8885 Train AUC: 0.9912 Val AUC: 0.9567 Time: 13.52\n",
      "Epoch: 468 Train Loss: 0.1174 Val Loss: 0.2926 Acc: 0.8822 Pre: 0.8650 Recall: 0.9212 F1: 0.8922 Train AUC: 0.9926 Val AUC: 0.9566 Time: 14.06\n",
      "Epoch: 469 Train Loss: 0.1295 Val Loss: 0.2832 Acc: 0.8786 Pre: 0.8713 Recall: 0.9041 F1: 0.8874 Train AUC: 0.9920 Val AUC: 0.9579 Time: 14.87\n",
      "Epoch: 470 Train Loss: 0.1182 Val Loss: 0.2805 Acc: 0.8822 Pre: 0.8822 Recall: 0.8973 F1: 0.8896 Train AUC: 0.9922 Val AUC: 0.9599 Time: 13.64\n",
      "Epoch: 471 Train Loss: 0.1190 Val Loss: 0.2868 Acc: 0.8877 Pre: 0.8833 Recall: 0.9075 F1: 0.8953 Train AUC: 0.9919 Val AUC: 0.9585 Time: 12.53\n",
      "Epoch: 472 Train Loss: 0.1224 Val Loss: 0.2927 Acc: 0.8895 Pre: 0.8738 Recall: 0.9247 F1: 0.8985 Train AUC: 0.9909 Val AUC: 0.9580 Time: 12.02\n",
      "Epoch: 473 Train Loss: 0.1224 Val Loss: 0.2854 Acc: 0.8877 Pre: 0.8734 Recall: 0.9212 F1: 0.8967 Train AUC: 0.9914 Val AUC: 0.9585 Time: 12.51\n",
      "Epoch: 474 Train Loss: 0.1220 Val Loss: 0.2812 Acc: 0.8822 Pre: 0.8796 Recall: 0.9007 F1: 0.8900 Train AUC: 0.9915 Val AUC: 0.9577 Time: 13.03\n",
      "Epoch: 475 Train Loss: 0.1199 Val Loss: 0.2827 Acc: 0.8786 Pre: 0.8763 Recall: 0.8973 F1: 0.8866 Train AUC: 0.9921 Val AUC: 0.9560 Time: 13.54\n",
      "Epoch: 476 Train Loss: 0.1209 Val Loss: 0.2835 Acc: 0.8768 Pre: 0.8709 Recall: 0.9007 F1: 0.8855 Train AUC: 0.9921 Val AUC: 0.9572 Time: 14.44\n",
      "Epoch: 477 Train Loss: 0.1145 Val Loss: 0.2868 Acc: 0.8877 Pre: 0.8783 Recall: 0.9144 F1: 0.8960 Train AUC: 0.9937 Val AUC: 0.9590 Time: 15.24\n",
      "Epoch: 478 Train Loss: 0.1195 Val Loss: 0.2843 Acc: 0.8913 Pre: 0.8893 Recall: 0.9075 F1: 0.8983 Train AUC: 0.9922 Val AUC: 0.9589 Time: 14.04\n",
      "Epoch: 479 Train Loss: 0.1169 Val Loss: 0.2872 Acc: 0.8949 Pre: 0.8980 Recall: 0.9041 F1: 0.9010 Train AUC: 0.9920 Val AUC: 0.9594 Time: 14.87\n",
      "Epoch: 480 Train Loss: 0.1261 Val Loss: 0.2867 Acc: 0.8931 Pre: 0.8896 Recall: 0.9110 F1: 0.9002 Train AUC: 0.9914 Val AUC: 0.9594 Time: 15.43\n",
      "Epoch: 481 Train Loss: 0.1094 Val Loss: 0.2936 Acc: 0.8913 Pre: 0.8766 Recall: 0.9247 F1: 0.9000 Train AUC: 0.9929 Val AUC: 0.9589 Time: 15.45\n",
      "Epoch: 482 Train Loss: 0.1035 Val Loss: 0.3043 Acc: 0.8804 Pre: 0.8599 Recall: 0.9247 F1: 0.8911 Train AUC: 0.9940 Val AUC: 0.9567 Time: 15.54\n",
      "Epoch: 483 Train Loss: 0.1196 Val Loss: 0.2921 Acc: 0.8768 Pre: 0.8684 Recall: 0.9041 F1: 0.8859 Train AUC: 0.9926 Val AUC: 0.9573 Time: 16.12\n",
      "Epoch: 484 Train Loss: 0.1129 Val Loss: 0.2869 Acc: 0.8841 Pre: 0.8851 Recall: 0.8973 F1: 0.8912 Train AUC: 0.9931 Val AUC: 0.9583 Time: 16.66\n",
      "Epoch: 485 Train Loss: 0.1141 Val Loss: 0.2868 Acc: 0.8913 Pre: 0.8867 Recall: 0.9110 F1: 0.8986 Train AUC: 0.9928 Val AUC: 0.9599 Time: 14.73\n",
      "Epoch: 486 Train Loss: 0.1165 Val Loss: 0.2956 Acc: 0.8949 Pre: 0.8824 Recall: 0.9247 F1: 0.9030 Train AUC: 0.9921 Val AUC: 0.9600 Time: 13.21\n",
      "Epoch: 487 Train Loss: 0.1209 Val Loss: 0.2927 Acc: 0.8913 Pre: 0.8816 Recall: 0.9178 F1: 0.8993 Train AUC: 0.9912 Val AUC: 0.9596 Time: 12.27\n",
      "Epoch: 488 Train Loss: 0.1207 Val Loss: 0.2826 Acc: 0.8931 Pre: 0.8896 Recall: 0.9110 F1: 0.9002 Train AUC: 0.9910 Val AUC: 0.9579 Time: 13.57\n",
      "Epoch: 489 Train Loss: 0.1314 Val Loss: 0.2845 Acc: 0.8822 Pre: 0.8721 Recall: 0.9110 F1: 0.8911 Train AUC: 0.9908 Val AUC: 0.9573 Time: 13.91\n",
      "Epoch: 490 Train Loss: 0.1242 Val Loss: 0.2842 Acc: 0.8841 Pre: 0.8750 Recall: 0.9110 F1: 0.8926 Train AUC: 0.9918 Val AUC: 0.9572 Time: 14.39\n",
      "Epoch: 491 Train Loss: 0.1144 Val Loss: 0.2831 Acc: 0.8768 Pre: 0.8758 Recall: 0.8938 F1: 0.8847 Train AUC: 0.9934 Val AUC: 0.9580 Time: 15.39\n",
      "Epoch: 492 Train Loss: 0.1146 Val Loss: 0.2838 Acc: 0.8859 Pre: 0.8829 Recall: 0.9041 F1: 0.8934 Train AUC: 0.9929 Val AUC: 0.9596 Time: 15.86\n",
      "Epoch: 493 Train Loss: 0.1153 Val Loss: 0.3005 Acc: 0.8967 Pre: 0.8852 Recall: 0.9247 F1: 0.9045 Train AUC: 0.9922 Val AUC: 0.9606 Time: 16.82\n",
      "Epoch: 494 Train Loss: 0.1147 Val Loss: 0.2961 Acc: 0.8949 Pre: 0.8874 Recall: 0.9178 F1: 0.9024 Train AUC: 0.9925 Val AUC: 0.9605 Time: 14.90\n",
      "Epoch: 495 Train Loss: 0.1192 Val Loss: 0.2857 Acc: 0.8841 Pre: 0.8851 Recall: 0.8973 F1: 0.8912 Train AUC: 0.9914 Val AUC: 0.9602 Time: 14.05\n",
      "Epoch: 496 Train Loss: 0.1215 Val Loss: 0.2878 Acc: 0.8786 Pre: 0.8763 Recall: 0.8973 F1: 0.8866 Train AUC: 0.9921 Val AUC: 0.9580 Time: 13.24\n",
      "Epoch: 497 Train Loss: 0.1130 Val Loss: 0.2923 Acc: 0.8768 Pre: 0.8684 Recall: 0.9041 F1: 0.8859 Train AUC: 0.9934 Val AUC: 0.9577 Time: 13.51\n",
      "Epoch: 498 Train Loss: 0.1142 Val Loss: 0.2898 Acc: 0.8859 Pre: 0.8779 Recall: 0.9110 F1: 0.8941 Train AUC: 0.9931 Val AUC: 0.9575 Time: 14.37\n",
      "Epoch: 499 Train Loss: 0.1120 Val Loss: 0.2876 Acc: 0.8877 Pre: 0.8885 Recall: 0.9007 F1: 0.8946 Train AUC: 0.9934 Val AUC: 0.9572 Time: 16.05\n",
      "Epoch: 500 Train Loss: 0.1154 Val Loss: 0.2931 Acc: 0.8841 Pre: 0.8878 Recall: 0.8938 F1: 0.8908 Train AUC: 0.9926 Val AUC: 0.9572 Time: 16.84\n",
      "Epoch: 501 Train Loss: 0.1177 Val Loss: 0.3043 Acc: 0.8859 Pre: 0.8682 Recall: 0.9247 F1: 0.8955 Train AUC: 0.9926 Val AUC: 0.9562 Time: 15.09\n",
      "Epoch: 502 Train Loss: 0.1199 Val Loss: 0.3003 Acc: 0.8913 Pre: 0.8766 Recall: 0.9247 F1: 0.9000 Train AUC: 0.9918 Val AUC: 0.9574 Time: 15.30\n",
      "Epoch: 503 Train Loss: 0.1046 Val Loss: 0.2968 Acc: 0.8786 Pre: 0.8763 Recall: 0.8973 F1: 0.8866 Train AUC: 0.9940 Val AUC: 0.9566 Time: 14.09\n",
      "Epoch: 504 Train Loss: 0.1163 Val Loss: 0.2980 Acc: 0.8732 Pre: 0.8700 Recall: 0.8938 F1: 0.8818 Train AUC: 0.9926 Val AUC: 0.9574 Time: 12.98\n",
      "Epoch: 505 Train Loss: 0.1152 Val Loss: 0.3107 Acc: 0.8822 Pre: 0.8580 Recall: 0.9315 F1: 0.8933 Train AUC: 0.9925 Val AUC: 0.9575 Time: 13.77\n",
      "Epoch: 506 Train Loss: 0.1145 Val Loss: 0.3026 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9939 Val AUC: 0.9591 Time: 14.27\n",
      "Epoch: 507 Train Loss: 0.1170 Val Loss: 0.2976 Acc: 0.8949 Pre: 0.9062 Recall: 0.8938 F1: 0.9000 Train AUC: 0.9920 Val AUC: 0.9593 Time: 14.03\n",
      "Epoch: 508 Train Loss: 0.1194 Val Loss: 0.2925 Acc: 0.8967 Pre: 0.9038 Recall: 0.9007 F1: 0.9022 Train AUC: 0.9916 Val AUC: 0.9587 Time: 15.21\n",
      "Epoch: 509 Train Loss: 0.1144 Val Loss: 0.2999 Acc: 0.8804 Pre: 0.8645 Recall: 0.9178 F1: 0.8904 Train AUC: 0.9937 Val AUC: 0.9566 Time: 16.15\n",
      "Epoch: 510 Train Loss: 0.1059 Val Loss: 0.3066 Acc: 0.8768 Pre: 0.8590 Recall: 0.9178 F1: 0.8874 Train AUC: 0.9945 Val AUC: 0.9551 Time: 13.98\n",
      "Epoch: 511 Train Loss: 0.1074 Val Loss: 0.2976 Acc: 0.8768 Pre: 0.8709 Recall: 0.9007 F1: 0.8855 Train AUC: 0.9946 Val AUC: 0.9548 Time: 12.60\n",
      "Epoch: 512 Train Loss: 0.1132 Val Loss: 0.2953 Acc: 0.8786 Pre: 0.8814 Recall: 0.8904 F1: 0.8859 Train AUC: 0.9932 Val AUC: 0.9564 Time: 12.20\n",
      "Epoch: 513 Train Loss: 0.1096 Val Loss: 0.3037 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9935 Val AUC: 0.9586 Time: 12.69\n",
      "Epoch: 514 Train Loss: 0.1048 Val Loss: 0.3117 Acc: 0.8913 Pre: 0.8816 Recall: 0.9178 F1: 0.8993 Train AUC: 0.9940 Val AUC: 0.9584 Time: 13.30\n",
      "Epoch: 515 Train Loss: 0.1120 Val Loss: 0.3012 Acc: 0.8949 Pre: 0.8900 Recall: 0.9144 F1: 0.9020 Train AUC: 0.9932 Val AUC: 0.9584 Time: 13.60\n",
      "Epoch: 516 Train Loss: 0.1093 Val Loss: 0.2956 Acc: 0.8949 Pre: 0.8926 Recall: 0.9110 F1: 0.9017 Train AUC: 0.9934 Val AUC: 0.9570 Time: 14.30\n",
      "Epoch: 517 Train Loss: 0.1181 Val Loss: 0.3025 Acc: 0.8804 Pre: 0.8742 Recall: 0.9041 F1: 0.8889 Train AUC: 0.9927 Val AUC: 0.9581 Time: 15.19\n",
      "Epoch: 518 Train Loss: 0.1088 Val Loss: 0.3134 Acc: 0.8859 Pre: 0.8754 Recall: 0.9144 F1: 0.8945 Train AUC: 0.9934 Val AUC: 0.9554 Time: 17.05\n",
      "Epoch: 519 Train Loss: 0.1221 Val Loss: 0.2975 Acc: 0.8804 Pre: 0.8792 Recall: 0.8973 F1: 0.8881 Train AUC: 0.9917 Val AUC: 0.9561 Time: 15.09\n",
      "Epoch: 520 Train Loss: 0.1122 Val Loss: 0.2911 Acc: 0.8931 Pre: 0.8949 Recall: 0.9041 F1: 0.8995 Train AUC: 0.9934 Val AUC: 0.9581 Time: 13.95\n",
      "Epoch: 521 Train Loss: 0.1100 Val Loss: 0.3035 Acc: 0.8949 Pre: 0.8926 Recall: 0.9110 F1: 0.9017 Train AUC: 0.9936 Val AUC: 0.9584 Time: 13.40\n",
      "Epoch: 522 Train Loss: 0.1211 Val Loss: 0.3142 Acc: 0.8967 Pre: 0.8852 Recall: 0.9247 F1: 0.9045 Train AUC: 0.9913 Val AUC: 0.9597 Time: 13.74\n",
      "Epoch: 523 Train Loss: 0.1174 Val Loss: 0.2995 Acc: 0.8859 Pre: 0.8829 Recall: 0.9041 F1: 0.8934 Train AUC: 0.9920 Val AUC: 0.9599 Time: 13.79\n",
      "Epoch: 524 Train Loss: 0.1075 Val Loss: 0.2922 Acc: 0.8804 Pre: 0.8844 Recall: 0.8904 F1: 0.8874 Train AUC: 0.9931 Val AUC: 0.9585 Time: 14.43\n",
      "Epoch: 525 Train Loss: 0.1128 Val Loss: 0.2964 Acc: 0.8678 Pre: 0.8567 Recall: 0.9007 F1: 0.8781 Train AUC: 0.9930 Val AUC: 0.9548 Time: 14.85\n",
      "Epoch: 526 Train Loss: 0.1102 Val Loss: 0.3037 Acc: 0.8768 Pre: 0.8544 Recall: 0.9247 F1: 0.8882 Train AUC: 0.9945 Val AUC: 0.9552 Time: 14.43\n",
      "Epoch: 527 Train Loss: 0.1115 Val Loss: 0.2997 Acc: 0.8895 Pre: 0.8787 Recall: 0.9178 F1: 0.8978 Train AUC: 0.9943 Val AUC: 0.9596 Time: 13.19\n",
      "Epoch: 528 Train Loss: 0.1115 Val Loss: 0.3004 Acc: 0.8931 Pre: 0.8976 Recall: 0.9007 F1: 0.8991 Train AUC: 0.9929 Val AUC: 0.9602 Time: 13.32\n",
      "Epoch: 529 Train Loss: 0.1116 Val Loss: 0.3004 Acc: 0.8895 Pre: 0.8997 Recall: 0.8904 F1: 0.8950 Train AUC: 0.9926 Val AUC: 0.9607 Time: 14.53\n",
      "Epoch: 530 Train Loss: 0.1138 Val Loss: 0.2978 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9933 Val AUC: 0.9604 Time: 14.52\n",
      "Epoch: 531 Train Loss: 0.1073 Val Loss: 0.3070 Acc: 0.8841 Pre: 0.8631 Recall: 0.9281 F1: 0.8944 Train AUC: 0.9935 Val AUC: 0.9583 Time: 15.37\n",
      "Epoch: 532 Train Loss: 0.1085 Val Loss: 0.3008 Acc: 0.8804 Pre: 0.8742 Recall: 0.9041 F1: 0.8889 Train AUC: 0.9942 Val AUC: 0.9557 Time: 14.23\n",
      "Epoch: 533 Train Loss: 0.1079 Val Loss: 0.3021 Acc: 0.8822 Pre: 0.8771 Recall: 0.9041 F1: 0.8904 Train AUC: 0.9937 Val AUC: 0.9541 Time: 13.09\n",
      "Epoch: 534 Train Loss: 0.1173 Val Loss: 0.2966 Acc: 0.8804 Pre: 0.8669 Recall: 0.9144 F1: 0.8900 Train AUC: 0.9929 Val AUC: 0.9562 Time: 12.50\n",
      "Epoch: 535 Train Loss: 0.1011 Val Loss: 0.2977 Acc: 0.8841 Pre: 0.8725 Recall: 0.9144 F1: 0.8930 Train AUC: 0.9945 Val AUC: 0.9577 Time: 12.90\n",
      "Epoch: 536 Train Loss: 0.1056 Val Loss: 0.3017 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9936 Val AUC: 0.9576 Time: 13.55\n",
      "Epoch: 537 Train Loss: 0.1072 Val Loss: 0.2967 Acc: 0.8931 Pre: 0.8896 Recall: 0.9110 F1: 0.9002 Train AUC: 0.9937 Val AUC: 0.9584 Time: 14.10\n",
      "Epoch: 538 Train Loss: 0.1052 Val Loss: 0.2947 Acc: 0.8841 Pre: 0.8826 Recall: 0.9007 F1: 0.8915 Train AUC: 0.9937 Val AUC: 0.9582 Time: 14.77\n",
      "Epoch: 539 Train Loss: 0.1074 Val Loss: 0.3018 Acc: 0.8786 Pre: 0.8713 Recall: 0.9041 F1: 0.8874 Train AUC: 0.9938 Val AUC: 0.9564 Time: 14.78\n",
      "Epoch: 540 Train Loss: 0.1015 Val Loss: 0.3076 Acc: 0.8768 Pre: 0.8684 Recall: 0.9041 F1: 0.8859 Train AUC: 0.9948 Val AUC: 0.9560 Time: 14.26\n",
      "Epoch: 541 Train Loss: 0.1031 Val Loss: 0.3029 Acc: 0.8804 Pre: 0.8717 Recall: 0.9075 F1: 0.8893 Train AUC: 0.9942 Val AUC: 0.9579 Time: 13.15\n",
      "Epoch: 542 Train Loss: 0.1051 Val Loss: 0.3008 Acc: 0.8931 Pre: 0.8896 Recall: 0.9110 F1: 0.9002 Train AUC: 0.9939 Val AUC: 0.9581 Time: 13.20\n",
      "Epoch: 543 Train Loss: 0.0991 Val Loss: 0.3015 Acc: 0.8931 Pre: 0.8923 Recall: 0.9075 F1: 0.8998 Train AUC: 0.9946 Val AUC: 0.9577 Time: 13.68\n",
      "Epoch: 544 Train Loss: 0.1151 Val Loss: 0.3045 Acc: 0.8895 Pre: 0.8787 Recall: 0.9178 F1: 0.8978 Train AUC: 0.9924 Val AUC: 0.9579 Time: 13.87\n",
      "Epoch: 545 Train Loss: 0.1125 Val Loss: 0.3116 Acc: 0.8804 Pre: 0.8645 Recall: 0.9178 F1: 0.8904 Train AUC: 0.9929 Val AUC: 0.9568 Time: 14.36\n",
      "Epoch: 546 Train Loss: 0.1097 Val Loss: 0.3049 Acc: 0.8859 Pre: 0.8804 Recall: 0.9075 F1: 0.8938 Train AUC: 0.9937 Val AUC: 0.9556 Time: 15.10\n",
      "Epoch: 547 Train Loss: 0.1069 Val Loss: 0.2995 Acc: 0.8822 Pre: 0.8874 Recall: 0.8904 F1: 0.8889 Train AUC: 0.9939 Val AUC: 0.9560 Time: 13.78\n",
      "Epoch: 548 Train Loss: 0.1036 Val Loss: 0.3030 Acc: 0.8895 Pre: 0.8837 Recall: 0.9110 F1: 0.8971 Train AUC: 0.9949 Val AUC: 0.9586 Time: 12.70\n",
      "Epoch: 549 Train Loss: 0.1040 Val Loss: 0.3091 Acc: 0.8877 Pre: 0.8710 Recall: 0.9247 F1: 0.8970 Train AUC: 0.9938 Val AUC: 0.9589 Time: 12.01\n",
      "Epoch: 550 Train Loss: 0.1004 Val Loss: 0.3030 Acc: 0.8895 Pre: 0.8787 Recall: 0.9178 F1: 0.8978 Train AUC: 0.9952 Val AUC: 0.9580 Time: 12.51\n",
      "Epoch: 551 Train Loss: 0.1021 Val Loss: 0.2996 Acc: 0.8877 Pre: 0.8859 Recall: 0.9041 F1: 0.8949 Train AUC: 0.9945 Val AUC: 0.9580 Time: 12.99\n",
      "Epoch: 552 Train Loss: 0.1053 Val Loss: 0.2990 Acc: 0.8949 Pre: 0.8900 Recall: 0.9144 F1: 0.9020 Train AUC: 0.9944 Val AUC: 0.9589 Time: 13.21\n",
      "Epoch: 553 Train Loss: 0.1045 Val Loss: 0.3123 Acc: 0.8877 Pre: 0.8662 Recall: 0.9315 F1: 0.8977 Train AUC: 0.9941 Val AUC: 0.9583 Time: 13.82\n",
      "Epoch: 554 Train Loss: 0.1057 Val Loss: 0.3092 Acc: 0.8895 Pre: 0.8738 Recall: 0.9247 F1: 0.8985 Train AUC: 0.9945 Val AUC: 0.9580 Time: 14.40\n",
      "Epoch: 555 Train Loss: 0.0948 Val Loss: 0.3034 Acc: 0.8859 Pre: 0.8935 Recall: 0.8904 F1: 0.8919 Train AUC: 0.9958 Val AUC: 0.9569 Time: 15.61\n",
      "Epoch: 556 Train Loss: 0.1034 Val Loss: 0.3042 Acc: 0.8804 Pre: 0.8924 Recall: 0.8801 F1: 0.8862 Train AUC: 0.9942 Val AUC: 0.9565 Time: 15.47\n",
      "Epoch: 557 Train Loss: 0.1083 Val Loss: 0.3194 Acc: 0.8786 Pre: 0.8571 Recall: 0.9247 F1: 0.8896 Train AUC: 0.9948 Val AUC: 0.9564 Time: 14.09\n",
      "Epoch: 558 Train Loss: 0.1040 Val Loss: 0.3343 Acc: 0.8732 Pre: 0.8405 Recall: 0.9384 F1: 0.8867 Train AUC: 0.9941 Val AUC: 0.9570 Time: 13.40\n",
      "Epoch: 559 Train Loss: 0.1076 Val Loss: 0.3043 Acc: 0.8841 Pre: 0.8750 Recall: 0.9110 F1: 0.8926 Train AUC: 0.9952 Val AUC: 0.9578 Time: 13.39\n",
      "Epoch: 560 Train Loss: 0.0992 Val Loss: 0.3105 Acc: 0.8786 Pre: 0.8947 Recall: 0.8733 F1: 0.8839 Train AUC: 0.9950 Val AUC: 0.9586 Time: 12.99\n",
      "Epoch: 561 Train Loss: 0.1109 Val Loss: 0.3073 Acc: 0.8841 Pre: 0.8851 Recall: 0.8973 F1: 0.8912 Train AUC: 0.9939 Val AUC: 0.9585 Time: 12.87\n",
      "Epoch: 562 Train Loss: 0.1041 Val Loss: 0.3360 Acc: 0.8768 Pre: 0.8500 Recall: 0.9315 F1: 0.8889 Train AUC: 0.9945 Val AUC: 0.9582 Time: 13.36\n",
      "Epoch: 563 Train Loss: 0.1172 Val Loss: 0.3160 Acc: 0.8822 Pre: 0.8650 Recall: 0.9212 F1: 0.8922 Train AUC: 0.9941 Val AUC: 0.9576 Time: 14.08\n",
      "Epoch: 564 Train Loss: 0.0974 Val Loss: 0.3054 Acc: 0.8841 Pre: 0.8878 Recall: 0.8938 F1: 0.8908 Train AUC: 0.9950 Val AUC: 0.9574 Time: 14.95\n",
      "Epoch: 565 Train Loss: 0.1009 Val Loss: 0.3101 Acc: 0.8804 Pre: 0.8924 Recall: 0.8801 F1: 0.8862 Train AUC: 0.9946 Val AUC: 0.9569 Time: 14.91\n",
      "Epoch: 566 Train Loss: 0.1069 Val Loss: 0.3174 Acc: 0.8877 Pre: 0.8710 Recall: 0.9247 F1: 0.8970 Train AUC: 0.9944 Val AUC: 0.9577 Time: 14.51\n",
      "Epoch: 567 Train Loss: 0.1111 Val Loss: 0.3205 Acc: 0.8768 Pre: 0.8567 Recall: 0.9212 F1: 0.8878 Train AUC: 0.9924 Val AUC: 0.9565 Time: 13.37\n",
      "Epoch: 568 Train Loss: 0.1091 Val Loss: 0.3027 Acc: 0.8841 Pre: 0.8701 Recall: 0.9178 F1: 0.8933 Train AUC: 0.9940 Val AUC: 0.9556 Time: 13.28\n",
      "Epoch: 569 Train Loss: 0.1009 Val Loss: 0.2962 Acc: 0.8895 Pre: 0.8889 Recall: 0.9041 F1: 0.8964 Train AUC: 0.9950 Val AUC: 0.9572 Time: 12.68\n",
      "Epoch: 570 Train Loss: 0.1156 Val Loss: 0.3051 Acc: 0.8986 Pre: 0.8986 Recall: 0.9110 F1: 0.9048 Train AUC: 0.9943 Val AUC: 0.9601 Time: 13.04\n",
      "Epoch: 571 Train Loss: 0.1074 Val Loss: 0.3221 Acc: 0.8877 Pre: 0.8758 Recall: 0.9178 F1: 0.8963 Train AUC: 0.9934 Val AUC: 0.9583 Time: 13.54\n",
      "Epoch: 572 Train Loss: 0.1087 Val Loss: 0.3106 Acc: 0.8913 Pre: 0.8816 Recall: 0.9178 F1: 0.8993 Train AUC: 0.9939 Val AUC: 0.9590 Time: 15.23\n",
      "Epoch: 573 Train Loss: 0.1048 Val Loss: 0.2999 Acc: 0.8895 Pre: 0.8863 Recall: 0.9075 F1: 0.8968 Train AUC: 0.9943 Val AUC: 0.9569 Time: 17.89\n",
      "Epoch: 574 Train Loss: 0.1034 Val Loss: 0.3108 Acc: 0.8750 Pre: 0.8968 Recall: 0.8630 F1: 0.8796 Train AUC: 0.9939 Val AUC: 0.9527 Time: 15.16\n",
      "Epoch: 575 Train Loss: 0.1256 Val Loss: 0.3124 Acc: 0.8822 Pre: 0.8673 Recall: 0.9178 F1: 0.8918 Train AUC: 0.9930 Val AUC: 0.9548 Time: 14.20\n",
      "Epoch: 576 Train Loss: 0.1024 Val Loss: 0.3280 Acc: 0.8841 Pre: 0.8631 Recall: 0.9281 F1: 0.8944 Train AUC: 0.9941 Val AUC: 0.9566 Time: 14.17\n",
      "Epoch: 577 Train Loss: 0.1030 Val Loss: 0.3136 Acc: 0.8877 Pre: 0.8710 Recall: 0.9247 F1: 0.8970 Train AUC: 0.9944 Val AUC: 0.9581 Time: 14.25\n",
      "Epoch: 578 Train Loss: 0.1029 Val Loss: 0.3053 Acc: 0.8949 Pre: 0.8900 Recall: 0.9144 F1: 0.9020 Train AUC: 0.9944 Val AUC: 0.9595 Time: 14.87\n",
      "Epoch: 579 Train Loss: 0.0928 Val Loss: 0.3096 Acc: 0.8931 Pre: 0.8896 Recall: 0.9110 F1: 0.9002 Train AUC: 0.9957 Val AUC: 0.9598 Time: 16.14\n",
      "Epoch: 580 Train Loss: 0.0986 Val Loss: 0.3224 Acc: 0.8949 Pre: 0.8774 Recall: 0.9315 F1: 0.9037 Train AUC: 0.9952 Val AUC: 0.9589 Time: 14.81\n",
      "Epoch: 581 Train Loss: 0.1054 Val Loss: 0.3240 Acc: 0.8913 Pre: 0.8766 Recall: 0.9247 F1: 0.9000 Train AUC: 0.9942 Val AUC: 0.9584 Time: 15.15\n",
      "Epoch: 582 Train Loss: 0.1013 Val Loss: 0.3147 Acc: 0.8822 Pre: 0.8771 Recall: 0.9041 F1: 0.8904 Train AUC: 0.9943 Val AUC: 0.9579 Time: 13.81\n",
      "Epoch: 583 Train Loss: 0.1004 Val Loss: 0.3109 Acc: 0.8822 Pre: 0.8822 Recall: 0.8973 F1: 0.8896 Train AUC: 0.9943 Val AUC: 0.9572 Time: 12.61\n",
      "Epoch: 584 Train Loss: 0.1030 Val Loss: 0.3122 Acc: 0.8877 Pre: 0.8710 Recall: 0.9247 F1: 0.8970 Train AUC: 0.9944 Val AUC: 0.9561 Time: 13.05\n",
      "Epoch: 585 Train Loss: 0.1010 Val Loss: 0.3212 Acc: 0.8768 Pre: 0.8567 Recall: 0.9212 F1: 0.8878 Train AUC: 0.9941 Val AUC: 0.9539 Time: 13.48\n",
      "Epoch: 586 Train Loss: 0.0974 Val Loss: 0.3190 Acc: 0.8750 Pre: 0.8562 Recall: 0.9178 F1: 0.8860 Train AUC: 0.9954 Val AUC: 0.9549 Time: 14.11\n",
      "Epoch: 587 Train Loss: 0.0972 Val Loss: 0.3146 Acc: 0.8804 Pre: 0.8767 Recall: 0.9007 F1: 0.8885 Train AUC: 0.9950 Val AUC: 0.9565 Time: 14.81\n",
      "Epoch: 588 Train Loss: 0.0948 Val Loss: 0.3205 Acc: 0.8786 Pre: 0.8763 Recall: 0.8973 F1: 0.8866 Train AUC: 0.9957 Val AUC: 0.9579 Time: 15.25\n",
      "Epoch: 589 Train Loss: 0.1044 Val Loss: 0.3278 Acc: 0.8804 Pre: 0.8693 Recall: 0.9110 F1: 0.8896 Train AUC: 0.9939 Val AUC: 0.9579 Time: 15.94\n",
      "Epoch: 590 Train Loss: 0.0988 Val Loss: 0.3311 Acc: 0.8877 Pre: 0.8662 Recall: 0.9315 F1: 0.8977 Train AUC: 0.9946 Val AUC: 0.9572 Time: 14.66\n",
      "Epoch: 591 Train Loss: 0.1011 Val Loss: 0.3242 Acc: 0.8804 Pre: 0.8645 Recall: 0.9178 F1: 0.8904 Train AUC: 0.9951 Val AUC: 0.9549 Time: 12.92\n",
      "Epoch: 592 Train Loss: 0.0982 Val Loss: 0.3184 Acc: 0.8859 Pre: 0.8804 Recall: 0.9075 F1: 0.8938 Train AUC: 0.9953 Val AUC: 0.9552 Time: 12.46\n",
      "Epoch: 593 Train Loss: 0.0946 Val Loss: 0.3207 Acc: 0.8931 Pre: 0.8870 Recall: 0.9144 F1: 0.9005 Train AUC: 0.9954 Val AUC: 0.9560 Time: 12.66\n",
      "Epoch: 594 Train Loss: 0.1014 Val Loss: 0.3409 Acc: 0.8859 Pre: 0.8635 Recall: 0.9315 F1: 0.8962 Train AUC: 0.9940 Val AUC: 0.9567 Time: 13.53\n",
      "Epoch: 595 Train Loss: 0.1043 Val Loss: 0.3253 Acc: 0.8841 Pre: 0.8701 Recall: 0.9178 F1: 0.8933 Train AUC: 0.9954 Val AUC: 0.9566 Time: 13.96\n",
      "Epoch: 596 Train Loss: 0.0933 Val Loss: 0.3137 Acc: 0.8895 Pre: 0.8889 Recall: 0.9041 F1: 0.8964 Train AUC: 0.9955 Val AUC: 0.9551 Time: 15.19\n",
      "Epoch: 597 Train Loss: 0.1037 Val Loss: 0.3208 Acc: 0.8768 Pre: 0.8613 Recall: 0.9144 F1: 0.8870 Train AUC: 0.9952 Val AUC: 0.9550 Time: 19.27\n",
      "Epoch: 598 Train Loss: 0.0948 Val Loss: 0.3194 Acc: 0.8841 Pre: 0.8677 Recall: 0.9212 F1: 0.8937 Train AUC: 0.9957 Val AUC: 0.9559 Time: 15.76\n",
      "Epoch: 599 Train Loss: 0.0991 Val Loss: 0.3156 Acc: 0.8895 Pre: 0.8812 Recall: 0.9144 F1: 0.8975 Train AUC: 0.9952 Val AUC: 0.9572 Time: 13.81\n",
      "Epoch: 600 Train Loss: 0.1002 Val Loss: 0.3200 Acc: 0.8804 Pre: 0.8792 Recall: 0.8973 F1: 0.8881 Train AUC: 0.9943 Val AUC: 0.9570 Time: 13.15\n",
      "Epoch: 601 Train Loss: 0.1020 Val Loss: 0.3221 Acc: 0.8786 Pre: 0.8713 Recall: 0.9041 F1: 0.8874 Train AUC: 0.9942 Val AUC: 0.9568 Time: 12.90\n",
      "Epoch: 602 Train Loss: 0.0992 Val Loss: 0.3246 Acc: 0.8696 Pre: 0.8503 Recall: 0.9144 F1: 0.8812 Train AUC: 0.9944 Val AUC: 0.9544 Time: 13.53\n",
      "Epoch: 603 Train Loss: 0.0900 Val Loss: 0.3305 Acc: 0.8804 Pre: 0.8531 Recall: 0.9349 F1: 0.8922 Train AUC: 0.9963 Val AUC: 0.9543 Time: 14.17\n",
      "Epoch: 604 Train Loss: 0.1004 Val Loss: 0.3159 Acc: 0.8877 Pre: 0.8783 Recall: 0.9144 F1: 0.8960 Train AUC: 0.9947 Val AUC: 0.9572 Time: 14.40\n",
      "Epoch: 605 Train Loss: 0.0970 Val Loss: 0.3179 Acc: 0.8841 Pre: 0.8826 Recall: 0.9007 F1: 0.8915 Train AUC: 0.9950 Val AUC: 0.9576 Time: 13.45\n",
      "Epoch: 606 Train Loss: 0.1003 Val Loss: 0.3274 Acc: 0.8841 Pre: 0.8725 Recall: 0.9144 F1: 0.8930 Train AUC: 0.9943 Val AUC: 0.9573 Time: 13.42\n",
      "Epoch: 607 Train Loss: 0.0966 Val Loss: 0.3262 Acc: 0.8768 Pre: 0.8590 Recall: 0.9178 F1: 0.8874 Train AUC: 0.9950 Val AUC: 0.9551 Time: 13.84\n",
      "Epoch: 608 Train Loss: 0.0858 Val Loss: 0.3356 Acc: 0.8678 Pre: 0.8476 Recall: 0.9144 F1: 0.8797 Train AUC: 0.9964 Val AUC: 0.9513 Time: 13.66\n",
      "Epoch: 609 Train Loss: 0.1001 Val Loss: 0.3188 Acc: 0.8750 Pre: 0.8680 Recall: 0.9007 F1: 0.8840 Train AUC: 0.9947 Val AUC: 0.9549 Time: 14.22\n",
      "Epoch: 610 Train Loss: 0.0990 Val Loss: 0.3155 Acc: 0.8895 Pre: 0.8969 Recall: 0.8938 F1: 0.8954 Train AUC: 0.9946 Val AUC: 0.9585 Time: 14.37\n",
      "Epoch: 611 Train Loss: 0.1086 Val Loss: 0.3282 Acc: 0.8877 Pre: 0.8808 Recall: 0.9110 F1: 0.8956 Train AUC: 0.9941 Val AUC: 0.9593 Time: 14.26\n",
      "Epoch: 612 Train Loss: 0.1011 Val Loss: 0.3357 Acc: 0.8877 Pre: 0.8686 Recall: 0.9281 F1: 0.8974 Train AUC: 0.9937 Val AUC: 0.9590 Time: 13.86\n",
      "Epoch: 613 Train Loss: 0.1048 Val Loss: 0.3163 Acc: 0.8768 Pre: 0.8544 Recall: 0.9247 F1: 0.8882 Train AUC: 0.9946 Val AUC: 0.9576 Time: 13.41\n",
      "Epoch: 614 Train Loss: 0.0961 Val Loss: 0.3101 Acc: 0.8841 Pre: 0.8800 Recall: 0.9041 F1: 0.8919 Train AUC: 0.9951 Val AUC: 0.9552 Time: 13.74\n",
      "Epoch: 615 Train Loss: 0.0931 Val Loss: 0.3134 Acc: 0.8786 Pre: 0.8893 Recall: 0.8801 F1: 0.8847 Train AUC: 0.9958 Val AUC: 0.9546 Time: 14.42\n",
      "Epoch: 616 Train Loss: 0.1025 Val Loss: 0.3358 Acc: 0.8877 Pre: 0.8710 Recall: 0.9247 F1: 0.8970 Train AUC: 0.9957 Val AUC: 0.9555 Time: 15.12\n",
      "Epoch: 617 Train Loss: 0.1089 Val Loss: 0.3440 Acc: 0.8841 Pre: 0.8608 Recall: 0.9315 F1: 0.8947 Train AUC: 0.9939 Val AUC: 0.9565 Time: 14.91\n",
      "Epoch: 618 Train Loss: 0.1057 Val Loss: 0.3116 Acc: 0.8841 Pre: 0.8958 Recall: 0.8836 F1: 0.8897 Train AUC: 0.9949 Val AUC: 0.9563 Time: 14.46\n",
      "Epoch: 619 Train Loss: 0.0963 Val Loss: 0.3140 Acc: 0.8714 Pre: 0.8850 Recall: 0.8699 F1: 0.8774 Train AUC: 0.9953 Val AUC: 0.9555 Time: 13.26\n",
      "Epoch: 620 Train Loss: 0.1192 Val Loss: 0.3205 Acc: 0.8841 Pre: 0.8631 Recall: 0.9281 F1: 0.8944 Train AUC: 0.9932 Val AUC: 0.9566 Time: 12.80\n",
      "Epoch: 621 Train Loss: 0.1054 Val Loss: 0.3523 Acc: 0.8786 Pre: 0.8483 Recall: 0.9384 F1: 0.8911 Train AUC: 0.9940 Val AUC: 0.9580 Time: 13.31\n",
      "Epoch: 622 Train Loss: 0.1209 Val Loss: 0.3242 Acc: 0.8841 Pre: 0.8725 Recall: 0.9144 F1: 0.8930 Train AUC: 0.9947 Val AUC: 0.9585 Time: 13.89\n",
      "Epoch: 623 Train Loss: 0.0993 Val Loss: 0.3133 Acc: 0.8822 Pre: 0.8874 Recall: 0.8904 F1: 0.8889 Train AUC: 0.9947 Val AUC: 0.9577 Time: 14.43\n",
      "Epoch: 624 Train Loss: 0.1008 Val Loss: 0.3103 Acc: 0.8895 Pre: 0.8942 Recall: 0.8973 F1: 0.8957 Train AUC: 0.9951 Val AUC: 0.9563 Time: 14.49\n",
      "Epoch: 625 Train Loss: 0.1007 Val Loss: 0.3354 Acc: 0.8841 Pre: 0.8585 Recall: 0.9349 F1: 0.8951 Train AUC: 0.9957 Val AUC: 0.9563 Time: 14.20\n",
      "Epoch: 626 Train Loss: 0.1047 Val Loss: 0.3576 Acc: 0.8732 Pre: 0.8405 Recall: 0.9384 F1: 0.8867 Train AUC: 0.9951 Val AUC: 0.9573 Time: 14.09\n",
      "Epoch: 627 Train Loss: 0.1208 Val Loss: 0.3194 Acc: 0.8877 Pre: 0.8808 Recall: 0.9110 F1: 0.8956 Train AUC: 0.9937 Val AUC: 0.9572 Time: 12.65\n",
      "Epoch: 628 Train Loss: 0.0928 Val Loss: 0.3110 Acc: 0.8768 Pre: 0.8784 Recall: 0.8904 F1: 0.8844 Train AUC: 0.9949 Val AUC: 0.9565 Time: 13.02\n",
      "Epoch: 629 Train Loss: 0.1093 Val Loss: 0.3096 Acc: 0.8696 Pre: 0.8667 Recall: 0.8904 F1: 0.8784 Train AUC: 0.9944 Val AUC: 0.9555 Time: 13.83\n",
      "Epoch: 630 Train Loss: 0.1051 Val Loss: 0.3288 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9945 Val AUC: 0.9548 Time: 14.75\n",
      "Epoch: 631 Train Loss: 0.0947 Val Loss: 0.3395 Acc: 0.8877 Pre: 0.8662 Recall: 0.9315 F1: 0.8977 Train AUC: 0.9958 Val AUC: 0.9551 Time: 14.93\n",
      "Epoch: 632 Train Loss: 0.0988 Val Loss: 0.3191 Acc: 0.8822 Pre: 0.8822 Recall: 0.8973 F1: 0.8896 Train AUC: 0.9956 Val AUC: 0.9570 Time: 15.91\n",
      "Epoch: 633 Train Loss: 0.0932 Val Loss: 0.3149 Acc: 0.8895 Pre: 0.8942 Recall: 0.8973 F1: 0.8957 Train AUC: 0.9954 Val AUC: 0.9563 Time: 16.41\n",
      "Epoch: 634 Train Loss: 0.0996 Val Loss: 0.3179 Acc: 0.8841 Pre: 0.8701 Recall: 0.9178 F1: 0.8933 Train AUC: 0.9948 Val AUC: 0.9546 Time: 15.58\n",
      "Epoch: 635 Train Loss: 0.0912 Val Loss: 0.3382 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9958 Val AUC: 0.9505 Time: 14.27\n",
      "Epoch: 636 Train Loss: 0.0998 Val Loss: 0.3293 Acc: 0.8786 Pre: 0.8549 Recall: 0.9281 F1: 0.8900 Train AUC: 0.9956 Val AUC: 0.9548 Time: 15.51\n",
      "Epoch: 637 Train Loss: 0.1029 Val Loss: 0.3250 Acc: 0.8768 Pre: 0.8684 Recall: 0.9041 F1: 0.8859 Train AUC: 0.9942 Val AUC: 0.9574 Time: 14.92\n",
      "Epoch: 638 Train Loss: 0.0956 Val Loss: 0.3295 Acc: 0.8804 Pre: 0.8792 Recall: 0.8973 F1: 0.8881 Train AUC: 0.9948 Val AUC: 0.9583 Time: 15.07\n",
      "Epoch: 639 Train Loss: 0.0956 Val Loss: 0.3198 Acc: 0.8895 Pre: 0.8889 Recall: 0.9041 F1: 0.8964 Train AUC: 0.9945 Val AUC: 0.9578 Time: 14.68\n",
      "Epoch: 640 Train Loss: 0.0901 Val Loss: 0.3263 Acc: 0.8696 Pre: 0.8595 Recall: 0.9007 F1: 0.8796 Train AUC: 0.9959 Val AUC: 0.9534 Time: 14.92\n",
      "Epoch: 641 Train Loss: 0.1040 Val Loss: 0.3213 Acc: 0.8822 Pre: 0.8746 Recall: 0.9075 F1: 0.8908 Train AUC: 0.9945 Val AUC: 0.9562 Time: 13.57\n",
      "Epoch: 642 Train Loss: 0.0953 Val Loss: 0.3193 Acc: 0.8895 Pre: 0.8863 Recall: 0.9075 F1: 0.8968 Train AUC: 0.9954 Val AUC: 0.9571 Time: 12.70\n",
      "Epoch: 643 Train Loss: 0.0927 Val Loss: 0.3200 Acc: 0.8859 Pre: 0.8829 Recall: 0.9041 F1: 0.8934 Train AUC: 0.9950 Val AUC: 0.9566 Time: 13.11\n",
      "Epoch: 644 Train Loss: 0.0933 Val Loss: 0.3198 Acc: 0.8768 Pre: 0.8684 Recall: 0.9041 F1: 0.8859 Train AUC: 0.9947 Val AUC: 0.9535 Time: 16.19\n",
      "Epoch: 645 Train Loss: 0.0914 Val Loss: 0.3292 Acc: 0.8696 Pre: 0.8526 Recall: 0.9110 F1: 0.8808 Train AUC: 0.9961 Val AUC: 0.9507 Time: 16.17\n",
      "Epoch: 646 Train Loss: 0.0935 Val Loss: 0.3228 Acc: 0.8913 Pre: 0.8816 Recall: 0.9178 F1: 0.8993 Train AUC: 0.9963 Val AUC: 0.9557 Time: 17.64\n",
      "Epoch: 647 Train Loss: 0.0889 Val Loss: 0.3292 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9968 Val AUC: 0.9583 Time: 16.98\n",
      "Epoch: 648 Train Loss: 0.1012 Val Loss: 0.3341 Acc: 0.8859 Pre: 0.8881 Recall: 0.8973 F1: 0.8927 Train AUC: 0.9939 Val AUC: 0.9577 Time: 17.72\n",
      "Epoch: 649 Train Loss: 0.1000 Val Loss: 0.3275 Acc: 0.8822 Pre: 0.8822 Recall: 0.8973 F1: 0.8896 Train AUC: 0.9944 Val AUC: 0.9569 Time: 16.16\n",
      "Epoch: 650 Train Loss: 0.0972 Val Loss: 0.3307 Acc: 0.8786 Pre: 0.8617 Recall: 0.9178 F1: 0.8889 Train AUC: 0.9948 Val AUC: 0.9549 Time: 13.96\n",
      "Epoch: 651 Train Loss: 0.1024 Val Loss: 0.3452 Acc: 0.8678 Pre: 0.8454 Recall: 0.9178 F1: 0.8801 Train AUC: 0.9944 Val AUC: 0.9508 Time: 13.42\n",
      "Epoch: 652 Train Loss: 0.1007 Val Loss: 0.3282 Acc: 0.8822 Pre: 0.8796 Recall: 0.9007 F1: 0.8900 Train AUC: 0.9951 Val AUC: 0.9521 Time: 14.64\n",
      "Epoch: 653 Train Loss: 0.0907 Val Loss: 0.3347 Acc: 0.8714 Pre: 0.8797 Recall: 0.8767 F1: 0.8782 Train AUC: 0.9959 Val AUC: 0.9546 Time: 15.31\n",
      "Epoch: 654 Train Loss: 0.0999 Val Loss: 0.3396 Acc: 0.8768 Pre: 0.8733 Recall: 0.8973 F1: 0.8851 Train AUC: 0.9945 Val AUC: 0.9552 Time: 14.97\n",
      "Epoch: 655 Train Loss: 0.0994 Val Loss: 0.3319 Acc: 0.8895 Pre: 0.8812 Recall: 0.9144 F1: 0.8975 Train AUC: 0.9941 Val AUC: 0.9567 Time: 14.74\n",
      "Epoch: 656 Train Loss: 0.1033 Val Loss: 0.3272 Acc: 0.8822 Pre: 0.8721 Recall: 0.9110 F1: 0.8911 Train AUC: 0.9938 Val AUC: 0.9553 Time: 15.57\n",
      "Epoch: 657 Train Loss: 0.0888 Val Loss: 0.3385 Acc: 0.8750 Pre: 0.8562 Recall: 0.9178 F1: 0.8860 Train AUC: 0.9964 Val AUC: 0.9534 Time: 16.17\n",
      "Epoch: 658 Train Loss: 0.1008 Val Loss: 0.3262 Acc: 0.8877 Pre: 0.8783 Recall: 0.9144 F1: 0.8960 Train AUC: 0.9951 Val AUC: 0.9566 Time: 13.23\n",
      "Epoch: 659 Train Loss: 0.0979 Val Loss: 0.3314 Acc: 0.8804 Pre: 0.8717 Recall: 0.9075 F1: 0.8893 Train AUC: 0.9949 Val AUC: 0.9576 Time: 12.91\n",
      "Epoch: 660 Train Loss: 0.1015 Val Loss: 0.3314 Acc: 0.8804 Pre: 0.8792 Recall: 0.8973 F1: 0.8881 Train AUC: 0.9937 Val AUC: 0.9578 Time: 13.32\n",
      "Epoch: 661 Train Loss: 0.0943 Val Loss: 0.3284 Acc: 0.8877 Pre: 0.8808 Recall: 0.9110 F1: 0.8956 Train AUC: 0.9945 Val AUC: 0.9565 Time: 13.56\n",
      "Epoch: 662 Train Loss: 0.0915 Val Loss: 0.3430 Acc: 0.8768 Pre: 0.8613 Recall: 0.9144 F1: 0.8870 Train AUC: 0.9951 Val AUC: 0.9527 Time: 14.04\n",
      "Epoch: 663 Train Loss: 0.0928 Val Loss: 0.3571 Acc: 0.8714 Pre: 0.8553 Recall: 0.9110 F1: 0.8823 Train AUC: 0.9956 Val AUC: 0.9470 Time: 14.83\n",
      "Epoch: 664 Train Loss: 0.0947 Val Loss: 0.3384 Acc: 0.8786 Pre: 0.8664 Recall: 0.9110 F1: 0.8881 Train AUC: 0.9952 Val AUC: 0.9529 Time: 15.85\n",
      "Epoch: 665 Train Loss: 0.0910 Val Loss: 0.3267 Acc: 0.8768 Pre: 0.8784 Recall: 0.8904 F1: 0.8844 Train AUC: 0.9962 Val AUC: 0.9558 Time: 14.36\n",
      "Epoch: 666 Train Loss: 0.0929 Val Loss: 0.3292 Acc: 0.8841 Pre: 0.8750 Recall: 0.9110 F1: 0.8926 Train AUC: 0.9955 Val AUC: 0.9565 Time: 12.79\n",
      "Epoch: 667 Train Loss: 0.0951 Val Loss: 0.3339 Acc: 0.8841 Pre: 0.8608 Recall: 0.9315 F1: 0.8947 Train AUC: 0.9946 Val AUC: 0.9569 Time: 12.65\n",
      "Epoch: 668 Train Loss: 0.1025 Val Loss: 0.3361 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9945 Val AUC: 0.9563 Time: 13.72\n",
      "Epoch: 669 Train Loss: 0.0965 Val Loss: 0.3193 Acc: 0.8877 Pre: 0.8808 Recall: 0.9110 F1: 0.8956 Train AUC: 0.9955 Val AUC: 0.9562 Time: 13.89\n",
      "Epoch: 670 Train Loss: 0.0861 Val Loss: 0.3169 Acc: 0.8822 Pre: 0.8847 Recall: 0.8938 F1: 0.8893 Train AUC: 0.9963 Val AUC: 0.9561 Time: 14.41\n",
      "Epoch: 671 Train Loss: 0.0960 Val Loss: 0.3269 Acc: 0.8750 Pre: 0.8680 Recall: 0.9007 F1: 0.8840 Train AUC: 0.9953 Val AUC: 0.9562 Time: 15.34\n",
      "Epoch: 672 Train Loss: 0.0925 Val Loss: 0.3365 Acc: 0.8913 Pre: 0.8791 Recall: 0.9212 F1: 0.8997 Train AUC: 0.9953 Val AUC: 0.9553 Time: 14.91\n",
      "Epoch: 673 Train Loss: 0.0933 Val Loss: 0.3336 Acc: 0.8895 Pre: 0.8738 Recall: 0.9247 F1: 0.8985 Train AUC: 0.9955 Val AUC: 0.9563 Time: 13.40\n",
      "Epoch: 674 Train Loss: 0.0879 Val Loss: 0.3218 Acc: 0.8913 Pre: 0.8867 Recall: 0.9110 F1: 0.8986 Train AUC: 0.9960 Val AUC: 0.9563 Time: 12.75\n",
      "Epoch: 675 Train Loss: 0.0913 Val Loss: 0.3170 Acc: 0.8877 Pre: 0.8859 Recall: 0.9041 F1: 0.8949 Train AUC: 0.9959 Val AUC: 0.9576 Time: 12.88\n",
      "Epoch: 676 Train Loss: 0.0929 Val Loss: 0.3262 Acc: 0.8895 Pre: 0.8762 Recall: 0.9212 F1: 0.8982 Train AUC: 0.9959 Val AUC: 0.9582 Time: 13.48\n",
      "Epoch: 677 Train Loss: 0.0911 Val Loss: 0.3296 Acc: 0.8895 Pre: 0.8690 Recall: 0.9315 F1: 0.8992 Train AUC: 0.9956 Val AUC: 0.9578 Time: 14.12\n",
      "Epoch: 678 Train Loss: 0.0881 Val Loss: 0.3203 Acc: 0.8913 Pre: 0.8791 Recall: 0.9212 F1: 0.8997 Train AUC: 0.9962 Val AUC: 0.9563 Time: 14.19\n",
      "Epoch: 679 Train Loss: 0.0891 Val Loss: 0.3154 Acc: 0.8768 Pre: 0.8660 Recall: 0.9075 F1: 0.8863 Train AUC: 0.9963 Val AUC: 0.9545 Time: 14.73\n",
      "Epoch: 680 Train Loss: 0.0959 Val Loss: 0.3154 Acc: 0.8822 Pre: 0.8721 Recall: 0.9110 F1: 0.8911 Train AUC: 0.9956 Val AUC: 0.9554 Time: 14.99\n",
      "Epoch: 681 Train Loss: 0.0907 Val Loss: 0.3233 Acc: 0.8967 Pre: 0.8852 Recall: 0.9247 F1: 0.9045 Train AUC: 0.9964 Val AUC: 0.9581 Time: 13.69\n",
      "Epoch: 682 Train Loss: 0.0892 Val Loss: 0.3306 Acc: 0.8877 Pre: 0.8758 Recall: 0.9178 F1: 0.8963 Train AUC: 0.9957 Val AUC: 0.9586 Time: 13.47\n",
      "Epoch: 683 Train Loss: 0.0960 Val Loss: 0.3251 Acc: 0.8895 Pre: 0.8812 Recall: 0.9144 F1: 0.8975 Train AUC: 0.9946 Val AUC: 0.9578 Time: 13.30\n",
      "Epoch: 684 Train Loss: 0.0941 Val Loss: 0.3209 Acc: 0.8786 Pre: 0.8738 Recall: 0.9007 F1: 0.8870 Train AUC: 0.9954 Val AUC: 0.9545 Time: 13.08\n",
      "Epoch: 685 Train Loss: 0.0937 Val Loss: 0.3256 Acc: 0.8804 Pre: 0.8669 Recall: 0.9144 F1: 0.8900 Train AUC: 0.9962 Val AUC: 0.9534 Time: 14.12\n",
      "Epoch: 686 Train Loss: 0.0955 Val Loss: 0.3212 Acc: 0.8877 Pre: 0.8783 Recall: 0.9144 F1: 0.8960 Train AUC: 0.9950 Val AUC: 0.9562 Time: 14.37\n",
      "Epoch: 687 Train Loss: 0.0911 Val Loss: 0.3241 Acc: 0.8859 Pre: 0.8829 Recall: 0.9041 F1: 0.8934 Train AUC: 0.9956 Val AUC: 0.9582 Time: 14.97\n",
      "Epoch: 688 Train Loss: 0.0912 Val Loss: 0.3258 Acc: 0.8859 Pre: 0.8779 Recall: 0.9110 F1: 0.8941 Train AUC: 0.9952 Val AUC: 0.9581 Time: 13.66\n",
      "Epoch: 689 Train Loss: 0.0923 Val Loss: 0.3240 Acc: 0.8877 Pre: 0.8758 Recall: 0.9178 F1: 0.8963 Train AUC: 0.9953 Val AUC: 0.9572 Time: 13.50\n",
      "Epoch: 690 Train Loss: 0.0855 Val Loss: 0.3265 Acc: 0.8786 Pre: 0.8617 Recall: 0.9178 F1: 0.8889 Train AUC: 0.9961 Val AUC: 0.9556 Time: 13.39\n",
      "Epoch: 691 Train Loss: 0.0904 Val Loss: 0.3163 Acc: 0.8822 Pre: 0.8721 Recall: 0.9110 F1: 0.8911 Train AUC: 0.9961 Val AUC: 0.9556 Time: 13.78\n",
      "Epoch: 692 Train Loss: 0.0849 Val Loss: 0.3155 Acc: 0.8877 Pre: 0.8859 Recall: 0.9041 F1: 0.8949 Train AUC: 0.9969 Val AUC: 0.9561 Time: 13.99\n",
      "Epoch: 693 Train Loss: 0.0819 Val Loss: 0.3187 Acc: 0.8859 Pre: 0.8829 Recall: 0.9041 F1: 0.8934 Train AUC: 0.9972 Val AUC: 0.9561 Time: 14.67\n",
      "Epoch: 694 Train Loss: 0.0857 Val Loss: 0.3227 Acc: 0.8841 Pre: 0.8775 Recall: 0.9075 F1: 0.8923 Train AUC: 0.9968 Val AUC: 0.9570 Time: 14.18\n",
      "Epoch: 695 Train Loss: 0.0907 Val Loss: 0.3397 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9956 Val AUC: 0.9569 Time: 13.49\n",
      "Epoch: 696 Train Loss: 0.0970 Val Loss: 0.3355 Acc: 0.8786 Pre: 0.8527 Recall: 0.9315 F1: 0.8903 Train AUC: 0.9952 Val AUC: 0.9565 Time: 13.14\n",
      "Epoch: 697 Train Loss: 0.0937 Val Loss: 0.3217 Acc: 0.8859 Pre: 0.8829 Recall: 0.9041 F1: 0.8934 Train AUC: 0.9953 Val AUC: 0.9553 Time: 13.39\n",
      "Epoch: 698 Train Loss: 0.0871 Val Loss: 0.3222 Acc: 0.8804 Pre: 0.8818 Recall: 0.8938 F1: 0.8878 Train AUC: 0.9962 Val AUC: 0.9552 Time: 13.66\n",
      "Epoch: 699 Train Loss: 0.0913 Val Loss: 0.3415 Acc: 0.8768 Pre: 0.8500 Recall: 0.9315 F1: 0.8889 Train AUC: 0.9962 Val AUC: 0.9556 Time: 14.50\n",
      "Epoch: 700 Train Loss: 0.1003 Val Loss: 0.3412 Acc: 0.8804 Pre: 0.8622 Recall: 0.9212 F1: 0.8907 Train AUC: 0.9951 Val AUC: 0.9554 Time: 14.85\n",
      "Epoch: 701 Train Loss: 0.0955 Val Loss: 0.3282 Acc: 0.8822 Pre: 0.8796 Recall: 0.9007 F1: 0.8900 Train AUC: 0.9952 Val AUC: 0.9559 Time: 14.04\n",
      "Epoch: 702 Train Loss: 0.0910 Val Loss: 0.3239 Acc: 0.8786 Pre: 0.8893 Recall: 0.8801 F1: 0.8847 Train AUC: 0.9956 Val AUC: 0.9557 Time: 13.04\n",
      "Epoch: 703 Train Loss: 0.0949 Val Loss: 0.3215 Acc: 0.8822 Pre: 0.8955 Recall: 0.8801 F1: 0.8877 Train AUC: 0.9954 Val AUC: 0.9570 Time: 12.93\n",
      "Epoch: 704 Train Loss: 0.0903 Val Loss: 0.3510 Acc: 0.8822 Pre: 0.8580 Recall: 0.9315 F1: 0.8933 Train AUC: 0.9966 Val AUC: 0.9555 Time: 13.23\n",
      "Epoch: 705 Train Loss: 0.1042 Val Loss: 0.3385 Acc: 0.8822 Pre: 0.8650 Recall: 0.9212 F1: 0.8922 Train AUC: 0.9952 Val AUC: 0.9558 Time: 13.98\n",
      "Epoch: 706 Train Loss: 0.0931 Val Loss: 0.3212 Acc: 0.8768 Pre: 0.8810 Recall: 0.8870 F1: 0.8840 Train AUC: 0.9963 Val AUC: 0.9562 Time: 14.76\n",
      "Epoch: 707 Train Loss: 0.0944 Val Loss: 0.3236 Acc: 0.8877 Pre: 0.8885 Recall: 0.9007 F1: 0.8946 Train AUC: 0.9954 Val AUC: 0.9566 Time: 15.49\n",
      "Epoch: 708 Train Loss: 0.1068 Val Loss: 0.3353 Acc: 0.8750 Pre: 0.8585 Recall: 0.9144 F1: 0.8856 Train AUC: 0.9933 Val AUC: 0.9550 Time: 14.28\n",
      "Epoch: 709 Train Loss: 0.0909 Val Loss: 0.3393 Acc: 0.8768 Pre: 0.8567 Recall: 0.9212 F1: 0.8878 Train AUC: 0.9962 Val AUC: 0.9519 Time: 13.06\n",
      "Epoch: 710 Train Loss: 0.0931 Val Loss: 0.3308 Acc: 0.8859 Pre: 0.8754 Recall: 0.9144 F1: 0.8945 Train AUC: 0.9957 Val AUC: 0.9529 Time: 12.15\n",
      "Epoch: 711 Train Loss: 0.0910 Val Loss: 0.3397 Acc: 0.8768 Pre: 0.8709 Recall: 0.9007 F1: 0.8855 Train AUC: 0.9962 Val AUC: 0.9548 Time: 12.48\n",
      "Epoch: 712 Train Loss: 0.0887 Val Loss: 0.3297 Acc: 0.8768 Pre: 0.8733 Recall: 0.8973 F1: 0.8851 Train AUC: 0.9958 Val AUC: 0.9566 Time: 12.97\n",
      "Epoch: 713 Train Loss: 0.0904 Val Loss: 0.3302 Acc: 0.8804 Pre: 0.8622 Recall: 0.9212 F1: 0.8907 Train AUC: 0.9956 Val AUC: 0.9562 Time: 13.55\n",
      "Epoch: 714 Train Loss: 0.0911 Val Loss: 0.3401 Acc: 0.8659 Pre: 0.8562 Recall: 0.8973 F1: 0.8763 Train AUC: 0.9951 Val AUC: 0.9543 Time: 13.97\n",
      "Epoch: 715 Train Loss: 0.0886 Val Loss: 0.3371 Acc: 0.8641 Pre: 0.8581 Recall: 0.8904 F1: 0.8739 Train AUC: 0.9958 Val AUC: 0.9536 Time: 14.56\n",
      "Epoch: 716 Train Loss: 0.0930 Val Loss: 0.3357 Acc: 0.8895 Pre: 0.8738 Recall: 0.9247 F1: 0.8985 Train AUC: 0.9954 Val AUC: 0.9553 Time: 14.81\n",
      "Epoch: 717 Train Loss: 0.0842 Val Loss: 0.3391 Acc: 0.8895 Pre: 0.8762 Recall: 0.9212 F1: 0.8982 Train AUC: 0.9961 Val AUC: 0.9550 Time: 14.89\n",
      "Epoch: 718 Train Loss: 0.0911 Val Loss: 0.3271 Acc: 0.8859 Pre: 0.8829 Recall: 0.9041 F1: 0.8934 Train AUC: 0.9955 Val AUC: 0.9538 Time: 14.07\n",
      "Epoch: 719 Train Loss: 0.0862 Val Loss: 0.3187 Acc: 0.8877 Pre: 0.8833 Recall: 0.9075 F1: 0.8953 Train AUC: 0.9961 Val AUC: 0.9560 Time: 13.10\n",
      "Epoch: 720 Train Loss: 0.0891 Val Loss: 0.3213 Acc: 0.8841 Pre: 0.8775 Recall: 0.9075 F1: 0.8923 Train AUC: 0.9957 Val AUC: 0.9568 Time: 13.16\n",
      "Epoch: 721 Train Loss: 0.0938 Val Loss: 0.3256 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9956 Val AUC: 0.9579 Time: 13.66\n",
      "Epoch: 722 Train Loss: 0.0918 Val Loss: 0.3289 Acc: 0.8931 Pre: 0.8795 Recall: 0.9247 F1: 0.9015 Train AUC: 0.9956 Val AUC: 0.9582 Time: 14.12\n",
      "Epoch: 723 Train Loss: 0.0910 Val Loss: 0.3271 Acc: 0.8895 Pre: 0.8714 Recall: 0.9281 F1: 0.8988 Train AUC: 0.9956 Val AUC: 0.9570 Time: 14.47\n",
      "Epoch: 724 Train Loss: 0.0902 Val Loss: 0.3249 Acc: 0.8859 Pre: 0.8682 Recall: 0.9247 F1: 0.8955 Train AUC: 0.9965 Val AUC: 0.9542 Time: 14.28\n",
      "Epoch: 725 Train Loss: 0.0933 Val Loss: 0.3259 Acc: 0.8841 Pre: 0.8654 Recall: 0.9247 F1: 0.8940 Train AUC: 0.9954 Val AUC: 0.9546 Time: 13.97\n",
      "Epoch: 726 Train Loss: 0.0895 Val Loss: 0.3274 Acc: 0.8804 Pre: 0.8599 Recall: 0.9247 F1: 0.8911 Train AUC: 0.9958 Val AUC: 0.9565 Time: 13.53\n",
      "Epoch: 727 Train Loss: 0.0843 Val Loss: 0.3198 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9966 Val AUC: 0.9579 Time: 13.43\n",
      "Epoch: 728 Train Loss: 0.0869 Val Loss: 0.3225 Acc: 0.8931 Pre: 0.8795 Recall: 0.9247 F1: 0.9015 Train AUC: 0.9962 Val AUC: 0.9581 Time: 14.06\n",
      "Epoch: 729 Train Loss: 0.0819 Val Loss: 0.3221 Acc: 0.8931 Pre: 0.8795 Recall: 0.9247 F1: 0.9015 Train AUC: 0.9966 Val AUC: 0.9575 Time: 13.89\n",
      "Epoch: 730 Train Loss: 0.0789 Val Loss: 0.3159 Acc: 0.8931 Pre: 0.8870 Recall: 0.9144 F1: 0.9005 Train AUC: 0.9973 Val AUC: 0.9572 Time: 13.87\n",
      "Epoch: 731 Train Loss: 0.0802 Val Loss: 0.3149 Acc: 0.8859 Pre: 0.8779 Recall: 0.9110 F1: 0.8941 Train AUC: 0.9972 Val AUC: 0.9558 Time: 14.50\n",
      "Epoch: 732 Train Loss: 0.0836 Val Loss: 0.3210 Acc: 0.8768 Pre: 0.8590 Recall: 0.9178 F1: 0.8874 Train AUC: 0.9969 Val AUC: 0.9570 Time: 13.66\n",
      "Epoch: 733 Train Loss: 0.0833 Val Loss: 0.3292 Acc: 0.8841 Pre: 0.8654 Recall: 0.9247 F1: 0.8940 Train AUC: 0.9970 Val AUC: 0.9582 Time: 13.31\n",
      "Epoch: 734 Train Loss: 0.0820 Val Loss: 0.3326 Acc: 0.8895 Pre: 0.8690 Recall: 0.9315 F1: 0.8992 Train AUC: 0.9965 Val AUC: 0.9583 Time: 13.76\n",
      "Epoch: 735 Train Loss: 0.0879 Val Loss: 0.3335 Acc: 0.8822 Pre: 0.8626 Recall: 0.9247 F1: 0.8926 Train AUC: 0.9955 Val AUC: 0.9577 Time: 14.27\n",
      "Epoch: 736 Train Loss: 0.0810 Val Loss: 0.3306 Acc: 0.8859 Pre: 0.8682 Recall: 0.9247 F1: 0.8955 Train AUC: 0.9967 Val AUC: 0.9554 Time: 14.01\n",
      "Epoch: 737 Train Loss: 0.0798 Val Loss: 0.3292 Acc: 0.8822 Pre: 0.8697 Recall: 0.9144 F1: 0.8915 Train AUC: 0.9970 Val AUC: 0.9536 Time: 13.41\n",
      "Epoch: 738 Train Loss: 0.0966 Val Loss: 0.3447 Acc: 0.8786 Pre: 0.8571 Recall: 0.9247 F1: 0.8896 Train AUC: 0.9963 Val AUC: 0.9562 Time: 13.56\n",
      "Epoch: 739 Train Loss: 0.0944 Val Loss: 0.3516 Acc: 0.8768 Pre: 0.8500 Recall: 0.9315 F1: 0.8889 Train AUC: 0.9950 Val AUC: 0.9581 Time: 14.18\n",
      "Epoch: 740 Train Loss: 0.0948 Val Loss: 0.3294 Acc: 0.8804 Pre: 0.8576 Recall: 0.9281 F1: 0.8914 Train AUC: 0.9959 Val AUC: 0.9578 Time: 14.36\n",
      "Epoch: 741 Train Loss: 0.0794 Val Loss: 0.3270 Acc: 0.8714 Pre: 0.8623 Recall: 0.9007 F1: 0.8811 Train AUC: 0.9971 Val AUC: 0.9562 Time: 13.72\n",
      "Epoch: 742 Train Loss: 0.1070 Val Loss: 0.3217 Acc: 0.8841 Pre: 0.8851 Recall: 0.8973 F1: 0.8912 Train AUC: 0.9938 Val AUC: 0.9570 Time: 13.31\n",
      "Epoch: 743 Train Loss: 0.0870 Val Loss: 0.3452 Acc: 0.8859 Pre: 0.8682 Recall: 0.9247 F1: 0.8955 Train AUC: 0.9967 Val AUC: 0.9572 Time: 13.43\n",
      "Epoch: 744 Train Loss: 0.0942 Val Loss: 0.3409 Acc: 0.8822 Pre: 0.8697 Recall: 0.9144 F1: 0.8915 Train AUC: 0.9954 Val AUC: 0.9550 Time: 13.88\n",
      "Epoch: 745 Train Loss: 0.0892 Val Loss: 0.3306 Acc: 0.8786 Pre: 0.8866 Recall: 0.8836 F1: 0.8851 Train AUC: 0.9960 Val AUC: 0.9519 Time: 14.58\n",
      "Epoch: 746 Train Loss: 0.0890 Val Loss: 0.3262 Acc: 0.8750 Pre: 0.8780 Recall: 0.8870 F1: 0.8825 Train AUC: 0.9962 Val AUC: 0.9535 Time: 15.67\n",
      "Epoch: 747 Train Loss: 0.0826 Val Loss: 0.3377 Acc: 0.8841 Pre: 0.8608 Recall: 0.9315 F1: 0.8947 Train AUC: 0.9970 Val AUC: 0.9553 Time: 14.52\n",
      "Epoch: 748 Train Loss: 0.0858 Val Loss: 0.3463 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9963 Val AUC: 0.9576 Time: 13.04\n",
      "Epoch: 749 Train Loss: 0.0999 Val Loss: 0.3370 Acc: 0.8822 Pre: 0.8650 Recall: 0.9212 F1: 0.8922 Train AUC: 0.9948 Val AUC: 0.9566 Time: 12.88\n",
      "Epoch: 750 Train Loss: 0.0866 Val Loss: 0.3341 Acc: 0.8768 Pre: 0.8684 Recall: 0.9041 F1: 0.8859 Train AUC: 0.9959 Val AUC: 0.9550 Time: 12.86\n",
      "Epoch: 751 Train Loss: 0.0867 Val Loss: 0.3405 Acc: 0.8750 Pre: 0.8632 Recall: 0.9075 F1: 0.8848 Train AUC: 0.9962 Val AUC: 0.9527 Time: 13.08\n",
      "Epoch: 752 Train Loss: 0.0855 Val Loss: 0.3477 Acc: 0.8786 Pre: 0.8571 Recall: 0.9247 F1: 0.8896 Train AUC: 0.9962 Val AUC: 0.9540 Time: 13.42\n",
      "Epoch: 753 Train Loss: 0.0842 Val Loss: 0.3624 Acc: 0.8804 Pre: 0.8645 Recall: 0.9178 F1: 0.8904 Train AUC: 0.9964 Val AUC: 0.9536 Time: 14.36\n",
      "Epoch: 754 Train Loss: 0.0899 Val Loss: 0.3446 Acc: 0.8841 Pre: 0.8750 Recall: 0.9110 F1: 0.8926 Train AUC: 0.9960 Val AUC: 0.9559 Time: 14.87\n",
      "Epoch: 755 Train Loss: 0.0846 Val Loss: 0.3294 Acc: 0.8768 Pre: 0.8889 Recall: 0.8767 F1: 0.8828 Train AUC: 0.9962 Val AUC: 0.9568 Time: 15.71\n",
      "Epoch: 756 Train Loss: 0.0854 Val Loss: 0.3284 Acc: 0.8732 Pre: 0.8675 Recall: 0.8973 F1: 0.8822 Train AUC: 0.9967 Val AUC: 0.9562 Time: 14.85\n",
      "Epoch: 757 Train Loss: 0.0969 Val Loss: 0.3505 Acc: 0.8822 Pre: 0.8558 Recall: 0.9349 F1: 0.8936 Train AUC: 0.9955 Val AUC: 0.9540 Time: 13.52\n",
      "Epoch: 758 Train Loss: 0.0850 Val Loss: 0.3523 Acc: 0.8750 Pre: 0.8474 Recall: 0.9315 F1: 0.8874 Train AUC: 0.9970 Val AUC: 0.9556 Time: 13.43\n",
      "Epoch: 759 Train Loss: 0.0907 Val Loss: 0.3399 Acc: 0.8859 Pre: 0.8804 Recall: 0.9075 F1: 0.8938 Train AUC: 0.9969 Val AUC: 0.9562 Time: 13.28\n",
      "Epoch: 760 Train Loss: 0.0965 Val Loss: 0.3340 Acc: 0.8678 Pre: 0.8842 Recall: 0.8630 F1: 0.8735 Train AUC: 0.9953 Val AUC: 0.9545 Time: 13.27\n",
      "Epoch: 761 Train Loss: 0.1014 Val Loss: 0.3294 Acc: 0.8732 Pre: 0.8750 Recall: 0.8870 F1: 0.8810 Train AUC: 0.9954 Val AUC: 0.9517 Time: 13.74\n",
      "Epoch: 762 Train Loss: 0.0984 Val Loss: 0.3656 Acc: 0.8659 Pre: 0.8364 Recall: 0.9281 F1: 0.8799 Train AUC: 0.9958 Val AUC: 0.9481 Time: 14.33\n",
      "Epoch: 763 Train Loss: 0.0954 Val Loss: 0.3606 Acc: 0.8877 Pre: 0.8662 Recall: 0.9315 F1: 0.8977 Train AUC: 0.9962 Val AUC: 0.9574 Time: 14.41\n",
      "Epoch: 764 Train Loss: 0.0984 Val Loss: 0.3427 Acc: 0.8967 Pre: 0.8930 Recall: 0.9144 F1: 0.9036 Train AUC: 0.9955 Val AUC: 0.9586 Time: 13.99\n",
      "Epoch: 765 Train Loss: 0.0897 Val Loss: 0.3409 Acc: 0.8678 Pre: 0.8815 Recall: 0.8664 F1: 0.8739 Train AUC: 0.9953 Val AUC: 0.9580 Time: 13.35\n",
      "Epoch: 766 Train Loss: 0.1091 Val Loss: 0.3340 Acc: 0.8859 Pre: 0.8804 Recall: 0.9075 F1: 0.8938 Train AUC: 0.9945 Val AUC: 0.9558 Time: 13.55\n",
      "Epoch: 767 Train Loss: 0.0929 Val Loss: 0.3878 Acc: 0.8750 Pre: 0.8431 Recall: 0.9384 F1: 0.8882 Train AUC: 0.9952 Val AUC: 0.9479 Time: 14.22\n",
      "Epoch: 768 Train Loss: 0.1063 Val Loss: 0.3557 Acc: 0.8768 Pre: 0.8500 Recall: 0.9315 F1: 0.8889 Train AUC: 0.9953 Val AUC: 0.9538 Time: 14.47\n",
      "Epoch: 769 Train Loss: 0.0837 Val Loss: 0.3301 Acc: 0.8859 Pre: 0.8881 Recall: 0.8973 F1: 0.8927 Train AUC: 0.9971 Val AUC: 0.9570 Time: 13.41\n",
      "Epoch: 770 Train Loss: 0.0901 Val Loss: 0.3335 Acc: 0.8768 Pre: 0.8916 Recall: 0.8733 F1: 0.8824 Train AUC: 0.9954 Val AUC: 0.9568 Time: 13.27\n",
      "Epoch: 771 Train Loss: 0.0979 Val Loss: 0.3275 Acc: 0.8913 Pre: 0.8893 Recall: 0.9075 F1: 0.8983 Train AUC: 0.9948 Val AUC: 0.9555 Time: 13.86\n",
      "Epoch: 772 Train Loss: 0.0840 Val Loss: 0.3511 Acc: 0.8841 Pre: 0.8608 Recall: 0.9315 F1: 0.8947 Train AUC: 0.9964 Val AUC: 0.9519 Time: 14.62\n",
      "Epoch: 773 Train Loss: 0.0934 Val Loss: 0.3529 Acc: 0.8822 Pre: 0.8558 Recall: 0.9349 F1: 0.8936 Train AUC: 0.9963 Val AUC: 0.9506 Time: 13.85\n",
      "Epoch: 774 Train Loss: 0.0899 Val Loss: 0.3319 Acc: 0.8895 Pre: 0.8762 Recall: 0.9212 F1: 0.8982 Train AUC: 0.9972 Val AUC: 0.9550 Time: 13.72\n",
      "Epoch: 775 Train Loss: 0.0847 Val Loss: 0.3318 Acc: 0.8822 Pre: 0.8927 Recall: 0.8836 F1: 0.8881 Train AUC: 0.9964 Val AUC: 0.9567 Time: 14.29\n",
      "Epoch: 776 Train Loss: 0.0935 Val Loss: 0.3362 Acc: 0.8986 Pre: 0.8933 Recall: 0.9178 F1: 0.9054 Train AUC: 0.9957 Val AUC: 0.9573 Time: 13.50\n",
      "Epoch: 777 Train Loss: 0.0831 Val Loss: 0.3482 Acc: 0.8931 Pre: 0.8746 Recall: 0.9315 F1: 0.9022 Train AUC: 0.9962 Val AUC: 0.9564 Time: 13.06\n",
      "Epoch: 778 Train Loss: 0.0865 Val Loss: 0.3504 Acc: 0.8822 Pre: 0.8580 Recall: 0.9315 F1: 0.8933 Train AUC: 0.9962 Val AUC: 0.9540 Time: 13.50\n",
      "Epoch: 779 Train Loss: 0.0879 Val Loss: 0.3367 Acc: 0.8822 Pre: 0.8746 Recall: 0.9075 F1: 0.8908 Train AUC: 0.9964 Val AUC: 0.9515 Time: 14.25\n",
      "Epoch: 780 Train Loss: 0.0850 Val Loss: 0.3367 Acc: 0.8732 Pre: 0.8750 Recall: 0.8870 F1: 0.8810 Train AUC: 0.9964 Val AUC: 0.9498 Time: 14.80\n",
      "Epoch: 781 Train Loss: 0.0843 Val Loss: 0.3345 Acc: 0.8696 Pre: 0.8642 Recall: 0.8938 F1: 0.8788 Train AUC: 0.9972 Val AUC: 0.9515 Time: 14.47\n",
      "Epoch: 782 Train Loss: 0.0868 Val Loss: 0.3418 Acc: 0.8841 Pre: 0.8701 Recall: 0.9178 F1: 0.8933 Train AUC: 0.9963 Val AUC: 0.9544 Time: 13.72\n",
      "Epoch: 783 Train Loss: 0.0910 Val Loss: 0.3552 Acc: 0.8877 Pre: 0.8662 Recall: 0.9315 F1: 0.8977 Train AUC: 0.9950 Val AUC: 0.9562 Time: 13.36\n",
      "Epoch: 784 Train Loss: 0.0930 Val Loss: 0.3381 Acc: 0.8859 Pre: 0.8730 Recall: 0.9178 F1: 0.8948 Train AUC: 0.9958 Val AUC: 0.9567 Time: 13.19\n",
      "Epoch: 785 Train Loss: 0.0772 Val Loss: 0.3325 Acc: 0.8768 Pre: 0.8862 Recall: 0.8801 F1: 0.8832 Train AUC: 0.9971 Val AUC: 0.9560 Time: 13.23\n",
      "Epoch: 786 Train Loss: 0.0992 Val Loss: 0.3339 Acc: 0.8786 Pre: 0.8814 Recall: 0.8904 F1: 0.8859 Train AUC: 0.9949 Val AUC: 0.9559 Time: 13.83\n",
      "Epoch: 787 Train Loss: 0.0753 Val Loss: 0.3433 Acc: 0.8750 Pre: 0.8517 Recall: 0.9247 F1: 0.8867 Train AUC: 0.9974 Val AUC: 0.9551 Time: 14.34\n",
      "Epoch: 788 Train Loss: 0.0853 Val Loss: 0.3511 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9965 Val AUC: 0.9548 Time: 14.34\n",
      "Epoch: 789 Train Loss: 0.0870 Val Loss: 0.3457 Acc: 0.8913 Pre: 0.8718 Recall: 0.9315 F1: 0.9007 Train AUC: 0.9963 Val AUC: 0.9555 Time: 13.80\n",
      "Epoch: 790 Train Loss: 0.0775 Val Loss: 0.3443 Acc: 0.8841 Pre: 0.8775 Recall: 0.9075 F1: 0.8923 Train AUC: 0.9970 Val AUC: 0.9553 Time: 13.59\n",
      "Epoch: 791 Train Loss: 0.0914 Val Loss: 0.3384 Acc: 0.8841 Pre: 0.8701 Recall: 0.9178 F1: 0.8933 Train AUC: 0.9953 Val AUC: 0.9546 Time: 13.39\n",
      "Epoch: 792 Train Loss: 0.0851 Val Loss: 0.3524 Acc: 0.8750 Pre: 0.8585 Recall: 0.9144 F1: 0.8856 Train AUC: 0.9964 Val AUC: 0.9498 Time: 13.37\n",
      "Epoch: 793 Train Loss: 0.0908 Val Loss: 0.3436 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9962 Val AUC: 0.9554 Time: 13.44\n",
      "Epoch: 794 Train Loss: 0.0866 Val Loss: 0.3397 Acc: 0.8913 Pre: 0.8791 Recall: 0.9212 F1: 0.8997 Train AUC: 0.9960 Val AUC: 0.9585 Time: 14.03\n",
      "Epoch: 795 Train Loss: 0.0920 Val Loss: 0.3409 Acc: 0.8804 Pre: 0.8742 Recall: 0.9041 F1: 0.8889 Train AUC: 0.9953 Val AUC: 0.9589 Time: 14.69\n",
      "Epoch: 796 Train Loss: 0.0906 Val Loss: 0.3289 Acc: 0.8841 Pre: 0.8800 Recall: 0.9041 F1: 0.8919 Train AUC: 0.9955 Val AUC: 0.9575 Time: 15.11\n",
      "Epoch: 797 Train Loss: 0.0837 Val Loss: 0.3316 Acc: 0.8859 Pre: 0.8658 Recall: 0.9281 F1: 0.8959 Train AUC: 0.9965 Val AUC: 0.9549 Time: 13.49\n",
      "Epoch: 798 Train Loss: 0.0839 Val Loss: 0.3350 Acc: 0.8750 Pre: 0.8562 Recall: 0.9178 F1: 0.8860 Train AUC: 0.9971 Val AUC: 0.9530 Time: 12.81\n",
      "Epoch: 799 Train Loss: 0.0883 Val Loss: 0.3305 Acc: 0.8822 Pre: 0.8822 Recall: 0.8973 F1: 0.8896 Train AUC: 0.9966 Val AUC: 0.9536 Time: 13.21\n",
      "Epoch: 800 Train Loss: 0.0824 Val Loss: 0.3355 Acc: 0.8877 Pre: 0.8833 Recall: 0.9075 F1: 0.8953 Train AUC: 0.9966 Val AUC: 0.9545 Time: 13.67\n",
      "Epoch: 801 Train Loss: 0.0917 Val Loss: 0.3420 Acc: 0.8895 Pre: 0.8787 Recall: 0.9178 F1: 0.8978 Train AUC: 0.9955 Val AUC: 0.9549 Time: 14.28\n",
      "Epoch: 802 Train Loss: 0.0884 Val Loss: 0.3369 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9961 Val AUC: 0.9565 Time: 14.76\n",
      "Epoch: 803 Train Loss: 0.0840 Val Loss: 0.3332 Acc: 0.8804 Pre: 0.8645 Recall: 0.9178 F1: 0.8904 Train AUC: 0.9964 Val AUC: 0.9558 Time: 13.73\n",
      "Epoch: 804 Train Loss: 0.0787 Val Loss: 0.3338 Acc: 0.8768 Pre: 0.8613 Recall: 0.9144 F1: 0.8870 Train AUC: 0.9970 Val AUC: 0.9552 Time: 13.01\n",
      "Epoch: 805 Train Loss: 0.0878 Val Loss: 0.3402 Acc: 0.8822 Pre: 0.8603 Recall: 0.9281 F1: 0.8929 Train AUC: 0.9966 Val AUC: 0.9560 Time: 12.86\n",
      "Epoch: 806 Train Loss: 0.0861 Val Loss: 0.3424 Acc: 0.8895 Pre: 0.8762 Recall: 0.9212 F1: 0.8982 Train AUC: 0.9962 Val AUC: 0.9570 Time: 13.40\n",
      "Epoch: 807 Train Loss: 0.0814 Val Loss: 0.3353 Acc: 0.8895 Pre: 0.8863 Recall: 0.9075 F1: 0.8968 Train AUC: 0.9966 Val AUC: 0.9566 Time: 14.01\n",
      "Epoch: 808 Train Loss: 0.0793 Val Loss: 0.3334 Acc: 0.8714 Pre: 0.8671 Recall: 0.8938 F1: 0.8803 Train AUC: 0.9967 Val AUC: 0.9544 Time: 14.56\n",
      "Epoch: 809 Train Loss: 0.0814 Val Loss: 0.3457 Acc: 0.8714 Pre: 0.8553 Recall: 0.9110 F1: 0.8823 Train AUC: 0.9970 Val AUC: 0.9508 Time: 15.56\n",
      "Epoch: 810 Train Loss: 0.0802 Val Loss: 0.3478 Acc: 0.8841 Pre: 0.8631 Recall: 0.9281 F1: 0.8944 Train AUC: 0.9971 Val AUC: 0.9507 Time: 14.32\n",
      "Epoch: 811 Train Loss: 0.0845 Val Loss: 0.3436 Acc: 0.8768 Pre: 0.8636 Recall: 0.9110 F1: 0.8867 Train AUC: 0.9963 Val AUC: 0.9527 Time: 13.08\n",
      "Epoch: 812 Train Loss: 0.0838 Val Loss: 0.3459 Acc: 0.8804 Pre: 0.8742 Recall: 0.9041 F1: 0.8889 Train AUC: 0.9963 Val AUC: 0.9541 Time: 12.46\n",
      "Epoch: 813 Train Loss: 0.0799 Val Loss: 0.3452 Acc: 0.8895 Pre: 0.8787 Recall: 0.9178 F1: 0.8978 Train AUC: 0.9968 Val AUC: 0.9562 Time: 13.17\n",
      "Epoch: 814 Train Loss: 0.0893 Val Loss: 0.3463 Acc: 0.8804 Pre: 0.8576 Recall: 0.9281 F1: 0.8914 Train AUC: 0.9957 Val AUC: 0.9571 Time: 13.70\n",
      "Epoch: 815 Train Loss: 0.0843 Val Loss: 0.3415 Acc: 0.8804 Pre: 0.8693 Recall: 0.9110 F1: 0.8896 Train AUC: 0.9964 Val AUC: 0.9581 Time: 14.29\n",
      "Epoch: 816 Train Loss: 0.0891 Val Loss: 0.3328 Acc: 0.8913 Pre: 0.8946 Recall: 0.9007 F1: 0.8976 Train AUC: 0.9959 Val AUC: 0.9571 Time: 15.01\n",
      "Epoch: 817 Train Loss: 0.0884 Val Loss: 0.3371 Acc: 0.8841 Pre: 0.8851 Recall: 0.8973 F1: 0.8912 Train AUC: 0.9962 Val AUC: 0.9549 Time: 14.78\n",
      "Epoch: 818 Train Loss: 0.0820 Val Loss: 0.3519 Acc: 0.8714 Pre: 0.8599 Recall: 0.9041 F1: 0.8815 Train AUC: 0.9964 Val AUC: 0.9479 Time: 14.35\n",
      "Epoch: 819 Train Loss: 0.0826 Val Loss: 0.3545 Acc: 0.8732 Pre: 0.8581 Recall: 0.9110 F1: 0.8837 Train AUC: 0.9970 Val AUC: 0.9475 Time: 13.29\n",
      "Epoch: 820 Train Loss: 0.0805 Val Loss: 0.3516 Acc: 0.8822 Pre: 0.8650 Recall: 0.9212 F1: 0.8922 Train AUC: 0.9977 Val AUC: 0.9538 Time: 12.62\n",
      "Epoch: 821 Train Loss: 0.0864 Val Loss: 0.3399 Acc: 0.8877 Pre: 0.8783 Recall: 0.9144 F1: 0.8960 Train AUC: 0.9964 Val AUC: 0.9580 Time: 12.70\n",
      "Epoch: 822 Train Loss: 0.0864 Val Loss: 0.3289 Acc: 0.8895 Pre: 0.8915 Recall: 0.9007 F1: 0.8961 Train AUC: 0.9957 Val AUC: 0.9582 Time: 13.52\n",
      "Epoch: 823 Train Loss: 0.0844 Val Loss: 0.3334 Acc: 0.8804 Pre: 0.8792 Recall: 0.8973 F1: 0.8881 Train AUC: 0.9967 Val AUC: 0.9579 Time: 13.86\n",
      "Epoch: 824 Train Loss: 0.0879 Val Loss: 0.3463 Acc: 0.8877 Pre: 0.8734 Recall: 0.9212 F1: 0.8967 Train AUC: 0.9957 Val AUC: 0.9572 Time: 14.40\n",
      "Epoch: 825 Train Loss: 0.0895 Val Loss: 0.3414 Acc: 0.8822 Pre: 0.8603 Recall: 0.9281 F1: 0.8929 Train AUC: 0.9962 Val AUC: 0.9569 Time: 15.33\n",
      "Epoch: 826 Train Loss: 0.0806 Val Loss: 0.3348 Acc: 0.8768 Pre: 0.8660 Recall: 0.9075 F1: 0.8863 Train AUC: 0.9973 Val AUC: 0.9538 Time: 16.01\n",
      "Epoch: 827 Train Loss: 0.0787 Val Loss: 0.3427 Acc: 0.8804 Pre: 0.8742 Recall: 0.9041 F1: 0.8889 Train AUC: 0.9970 Val AUC: 0.9513 Time: 13.84\n",
      "Epoch: 828 Train Loss: 0.0820 Val Loss: 0.3488 Acc: 0.8841 Pre: 0.8725 Recall: 0.9144 F1: 0.8930 Train AUC: 0.9965 Val AUC: 0.9497 Time: 12.91\n",
      "Epoch: 829 Train Loss: 0.0910 Val Loss: 0.3478 Acc: 0.8804 Pre: 0.8645 Recall: 0.9178 F1: 0.8904 Train AUC: 0.9955 Val AUC: 0.9518 Time: 11.96\n",
      "Epoch: 830 Train Loss: 0.0837 Val Loss: 0.3481 Acc: 0.8786 Pre: 0.8571 Recall: 0.9247 F1: 0.8896 Train AUC: 0.9963 Val AUC: 0.9551 Time: 11.78\n",
      "Epoch: 831 Train Loss: 0.0876 Val Loss: 0.3378 Acc: 0.8859 Pre: 0.8706 Recall: 0.9212 F1: 0.8952 Train AUC: 0.9961 Val AUC: 0.9573 Time: 12.28\n",
      "Epoch: 832 Train Loss: 0.0845 Val Loss: 0.3374 Acc: 0.8859 Pre: 0.8682 Recall: 0.9247 F1: 0.8955 Train AUC: 0.9961 Val AUC: 0.9573 Time: 12.69\n",
      "Epoch: 833 Train Loss: 0.0849 Val Loss: 0.3414 Acc: 0.8841 Pre: 0.8608 Recall: 0.9315 F1: 0.8947 Train AUC: 0.9959 Val AUC: 0.9570 Time: 13.43\n",
      "Epoch: 834 Train Loss: 0.0869 Val Loss: 0.3350 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9961 Val AUC: 0.9575 Time: 13.73\n",
      "Epoch: 835 Train Loss: 0.0882 Val Loss: 0.3371 Acc: 0.8913 Pre: 0.8893 Recall: 0.9075 F1: 0.8983 Train AUC: 0.9958 Val AUC: 0.9563 Time: 14.33\n",
      "Epoch: 836 Train Loss: 0.0867 Val Loss: 0.3555 Acc: 0.8877 Pre: 0.8734 Recall: 0.9212 F1: 0.8967 Train AUC: 0.9962 Val AUC: 0.9528 Time: 15.27\n",
      "Epoch: 837 Train Loss: 0.0839 Val Loss: 0.3636 Acc: 0.8732 Pre: 0.8558 Recall: 0.9144 F1: 0.8841 Train AUC: 0.9963 Val AUC: 0.9503 Time: 16.07\n",
      "Epoch: 838 Train Loss: 0.0915 Val Loss: 0.3447 Acc: 0.8678 Pre: 0.8567 Recall: 0.9007 F1: 0.8781 Train AUC: 0.9961 Val AUC: 0.9534 Time: 14.75\n",
      "Epoch: 839 Train Loss: 0.0882 Val Loss: 0.3322 Acc: 0.8714 Pre: 0.8647 Recall: 0.8973 F1: 0.8807 Train AUC: 0.9958 Val AUC: 0.9549 Time: 13.45\n",
      "Epoch: 840 Train Loss: 0.0836 Val Loss: 0.3391 Acc: 0.8804 Pre: 0.8669 Recall: 0.9144 F1: 0.8900 Train AUC: 0.9971 Val AUC: 0.9550 Time: 12.90\n",
      "Epoch: 841 Train Loss: 0.0793 Val Loss: 0.3473 Acc: 0.8786 Pre: 0.8571 Recall: 0.9247 F1: 0.8896 Train AUC: 0.9975 Val AUC: 0.9553 Time: 13.49\n",
      "Epoch: 842 Train Loss: 0.0852 Val Loss: 0.3349 Acc: 0.8841 Pre: 0.8701 Recall: 0.9178 F1: 0.8933 Train AUC: 0.9968 Val AUC: 0.9582 Time: 14.23\n",
      "Epoch: 843 Train Loss: 0.0747 Val Loss: 0.3394 Acc: 0.8786 Pre: 0.8814 Recall: 0.8904 F1: 0.8859 Train AUC: 0.9977 Val AUC: 0.9585 Time: 14.13\n",
      "Epoch: 844 Train Loss: 0.0877 Val Loss: 0.3380 Acc: 0.8786 Pre: 0.8713 Recall: 0.9041 F1: 0.8874 Train AUC: 0.9960 Val AUC: 0.9574 Time: 14.54\n",
      "Epoch: 845 Train Loss: 0.0750 Val Loss: 0.3455 Acc: 0.8768 Pre: 0.8544 Recall: 0.9247 F1: 0.8882 Train AUC: 0.9968 Val AUC: 0.9533 Time: 14.55\n",
      "Epoch: 846 Train Loss: 0.0803 Val Loss: 0.3583 Acc: 0.8804 Pre: 0.8599 Recall: 0.9247 F1: 0.8911 Train AUC: 0.9969 Val AUC: 0.9487 Time: 13.59\n",
      "Epoch: 847 Train Loss: 0.0810 Val Loss: 0.3498 Acc: 0.8859 Pre: 0.8635 Recall: 0.9315 F1: 0.8962 Train AUC: 0.9974 Val AUC: 0.9527 Time: 13.04\n",
      "Epoch: 848 Train Loss: 0.0801 Val Loss: 0.3417 Acc: 0.8841 Pre: 0.8800 Recall: 0.9041 F1: 0.8919 Train AUC: 0.9972 Val AUC: 0.9570 Time: 12.60\n",
      "Epoch: 849 Train Loss: 0.0830 Val Loss: 0.3483 Acc: 0.8931 Pre: 0.8845 Recall: 0.9178 F1: 0.9008 Train AUC: 0.9961 Val AUC: 0.9580 Time: 13.34\n",
      "Epoch: 850 Train Loss: 0.0819 Val Loss: 0.3444 Acc: 0.8913 Pre: 0.8718 Recall: 0.9315 F1: 0.9007 Train AUC: 0.9961 Val AUC: 0.9579 Time: 13.69\n",
      "Epoch: 851 Train Loss: 0.0827 Val Loss: 0.3370 Acc: 0.8768 Pre: 0.8613 Recall: 0.9144 F1: 0.8870 Train AUC: 0.9965 Val AUC: 0.9526 Time: 14.51\n",
      "Epoch: 852 Train Loss: 0.0789 Val Loss: 0.3379 Acc: 0.8750 Pre: 0.8680 Recall: 0.9007 F1: 0.8840 Train AUC: 0.9975 Val AUC: 0.9500 Time: 14.94\n",
      "Epoch: 853 Train Loss: 0.0873 Val Loss: 0.3381 Acc: 0.8895 Pre: 0.8812 Recall: 0.9144 F1: 0.8975 Train AUC: 0.9966 Val AUC: 0.9547 Time: 14.57\n",
      "Epoch: 854 Train Loss: 0.0802 Val Loss: 0.3448 Acc: 0.8841 Pre: 0.8800 Recall: 0.9041 F1: 0.8919 Train AUC: 0.9968 Val AUC: 0.9553 Time: 13.53\n",
      "Epoch: 855 Train Loss: 0.0861 Val Loss: 0.3359 Acc: 0.8804 Pre: 0.8767 Recall: 0.9007 F1: 0.8885 Train AUC: 0.9960 Val AUC: 0.9550 Time: 12.87\n",
      "Epoch: 856 Train Loss: 0.0863 Val Loss: 0.3444 Acc: 0.8696 Pre: 0.8642 Recall: 0.8938 F1: 0.8788 Train AUC: 0.9955 Val AUC: 0.9495 Time: 12.88\n",
      "Epoch: 857 Train Loss: 0.0810 Val Loss: 0.3540 Acc: 0.8623 Pre: 0.8600 Recall: 0.8836 F1: 0.8716 Train AUC: 0.9970 Val AUC: 0.9456 Time: 13.35\n",
      "Epoch: 858 Train Loss: 0.0965 Val Loss: 0.3347 Acc: 0.8841 Pre: 0.8654 Recall: 0.9247 F1: 0.8940 Train AUC: 0.9960 Val AUC: 0.9568 Time: 13.80\n",
      "Epoch: 859 Train Loss: 0.0739 Val Loss: 0.3629 Acc: 0.8877 Pre: 0.8758 Recall: 0.9178 F1: 0.8963 Train AUC: 0.9977 Val AUC: 0.9573 Time: 14.70\n",
      "Epoch: 860 Train Loss: 0.0944 Val Loss: 0.3627 Acc: 0.8913 Pre: 0.8766 Recall: 0.9247 F1: 0.9000 Train AUC: 0.9952 Val AUC: 0.9576 Time: 15.61\n",
      "Epoch: 861 Train Loss: 0.0960 Val Loss: 0.3381 Acc: 0.8859 Pre: 0.8804 Recall: 0.9075 F1: 0.8938 Train AUC: 0.9954 Val AUC: 0.9570 Time: 14.54\n",
      "Epoch: 862 Train Loss: 0.0870 Val Loss: 0.3536 Acc: 0.8678 Pre: 0.8567 Recall: 0.9007 F1: 0.8781 Train AUC: 0.9959 Val AUC: 0.9523 Time: 13.32\n",
      "Epoch: 863 Train Loss: 0.0897 Val Loss: 0.3526 Acc: 0.8696 Pre: 0.8571 Recall: 0.9041 F1: 0.8800 Train AUC: 0.9959 Val AUC: 0.9523 Time: 12.92\n",
      "Epoch: 864 Train Loss: 0.0876 Val Loss: 0.3479 Acc: 0.8877 Pre: 0.8783 Recall: 0.9144 F1: 0.8960 Train AUC: 0.9962 Val AUC: 0.9539 Time: 13.08\n",
      "Epoch: 865 Train Loss: 0.0798 Val Loss: 0.3545 Acc: 0.8786 Pre: 0.8788 Recall: 0.8938 F1: 0.8862 Train AUC: 0.9965 Val AUC: 0.9536 Time: 13.66\n",
      "Epoch: 866 Train Loss: 0.0851 Val Loss: 0.3449 Acc: 0.8859 Pre: 0.8829 Recall: 0.9041 F1: 0.8934 Train AUC: 0.9959 Val AUC: 0.9535 Time: 14.29\n",
      "Epoch: 867 Train Loss: 0.0827 Val Loss: 0.3422 Acc: 0.8732 Pre: 0.8627 Recall: 0.9041 F1: 0.8829 Train AUC: 0.9963 Val AUC: 0.9518 Time: 14.88\n",
      "Epoch: 868 Train Loss: 0.0801 Val Loss: 0.3482 Acc: 0.8696 Pre: 0.8642 Recall: 0.8938 F1: 0.8788 Train AUC: 0.9972 Val AUC: 0.9501 Time: 14.70\n",
      "Epoch: 869 Train Loss: 0.0891 Val Loss: 0.3332 Acc: 0.8678 Pre: 0.8614 Recall: 0.8938 F1: 0.8773 Train AUC: 0.9963 Val AUC: 0.9542 Time: 13.33\n",
      "Epoch: 870 Train Loss: 0.0855 Val Loss: 0.3390 Acc: 0.8913 Pre: 0.8816 Recall: 0.9178 F1: 0.8993 Train AUC: 0.9969 Val AUC: 0.9567 Time: 12.68\n",
      "Epoch: 871 Train Loss: 0.0801 Val Loss: 0.3512 Acc: 0.8859 Pre: 0.8730 Recall: 0.9178 F1: 0.8948 Train AUC: 0.9966 Val AUC: 0.9569 Time: 13.10\n",
      "Epoch: 872 Train Loss: 0.0890 Val Loss: 0.3435 Acc: 0.8931 Pre: 0.8746 Recall: 0.9315 F1: 0.9022 Train AUC: 0.9956 Val AUC: 0.9567 Time: 13.20\n",
      "Epoch: 873 Train Loss: 0.0849 Val Loss: 0.3418 Acc: 0.8641 Pre: 0.8581 Recall: 0.8904 F1: 0.8739 Train AUC: 0.9960 Val AUC: 0.9532 Time: 13.85\n",
      "Epoch: 874 Train Loss: 0.0905 Val Loss: 0.3510 Acc: 0.8714 Pre: 0.8553 Recall: 0.9110 F1: 0.8823 Train AUC: 0.9966 Val AUC: 0.9515 Time: 14.43\n",
      "Epoch: 875 Train Loss: 0.0842 Val Loss: 0.3520 Acc: 0.8822 Pre: 0.8536 Recall: 0.9384 F1: 0.8940 Train AUC: 0.9966 Val AUC: 0.9530 Time: 15.04\n",
      "Epoch: 876 Train Loss: 0.0800 Val Loss: 0.3523 Acc: 0.8913 Pre: 0.8718 Recall: 0.9315 F1: 0.9007 Train AUC: 0.9976 Val AUC: 0.9552 Time: 14.95\n",
      "Epoch: 877 Train Loss: 0.0803 Val Loss: 0.3457 Acc: 0.8931 Pre: 0.8870 Recall: 0.9144 F1: 0.9005 Train AUC: 0.9969 Val AUC: 0.9565 Time: 13.37\n",
      "Epoch: 878 Train Loss: 0.0876 Val Loss: 0.3312 Acc: 0.8804 Pre: 0.8844 Recall: 0.8904 F1: 0.8874 Train AUC: 0.9958 Val AUC: 0.9552 Time: 12.46\n",
      "Epoch: 879 Train Loss: 0.0838 Val Loss: 0.3397 Acc: 0.8696 Pre: 0.8618 Recall: 0.8973 F1: 0.8792 Train AUC: 0.9966 Val AUC: 0.9518 Time: 13.06\n",
      "Epoch: 880 Train Loss: 0.0833 Val Loss: 0.3502 Acc: 0.8732 Pre: 0.8581 Recall: 0.9110 F1: 0.8837 Train AUC: 0.9970 Val AUC: 0.9519 Time: 13.35\n",
      "Epoch: 881 Train Loss: 0.0776 Val Loss: 0.3463 Acc: 0.8804 Pre: 0.8599 Recall: 0.9247 F1: 0.8911 Train AUC: 0.9974 Val AUC: 0.9551 Time: 14.44\n",
      "Epoch: 882 Train Loss: 0.0764 Val Loss: 0.3460 Acc: 0.8768 Pre: 0.8544 Recall: 0.9247 F1: 0.8882 Train AUC: 0.9972 Val AUC: 0.9548 Time: 14.58\n",
      "Epoch: 883 Train Loss: 0.0807 Val Loss: 0.3490 Acc: 0.8732 Pre: 0.8491 Recall: 0.9247 F1: 0.8852 Train AUC: 0.9963 Val AUC: 0.9523 Time: 14.72\n",
      "Epoch: 884 Train Loss: 0.0807 Val Loss: 0.3480 Acc: 0.8732 Pre: 0.8651 Recall: 0.9007 F1: 0.8826 Train AUC: 0.9970 Val AUC: 0.9499 Time: 14.12\n",
      "Epoch: 885 Train Loss: 0.0878 Val Loss: 0.3388 Acc: 0.8822 Pre: 0.8746 Recall: 0.9075 F1: 0.8908 Train AUC: 0.9958 Val AUC: 0.9535 Time: 13.57\n",
      "Epoch: 886 Train Loss: 0.0810 Val Loss: 0.3541 Acc: 0.8786 Pre: 0.8549 Recall: 0.9281 F1: 0.8900 Train AUC: 0.9969 Val AUC: 0.9565 Time: 13.11\n",
      "Epoch: 887 Train Loss: 0.0828 Val Loss: 0.3685 Acc: 0.8822 Pre: 0.8626 Recall: 0.9247 F1: 0.8926 Train AUC: 0.9968 Val AUC: 0.9560 Time: 13.17\n",
      "Epoch: 888 Train Loss: 0.0998 Val Loss: 0.3393 Acc: 0.8931 Pre: 0.9031 Recall: 0.8938 F1: 0.8985 Train AUC: 0.9946 Val AUC: 0.9581 Time: 13.67\n",
      "Epoch: 889 Train Loss: 0.0974 Val Loss: 0.3343 Acc: 0.8714 Pre: 0.8850 Recall: 0.8699 F1: 0.8774 Train AUC: 0.9954 Val AUC: 0.9569 Time: 14.21\n",
      "Epoch: 890 Train Loss: 0.0968 Val Loss: 0.3533 Acc: 0.8822 Pre: 0.8580 Recall: 0.9315 F1: 0.8933 Train AUC: 0.9960 Val AUC: 0.9526 Time: 14.21\n",
      "Epoch: 891 Train Loss: 0.0923 Val Loss: 0.3957 Acc: 0.8678 Pre: 0.8328 Recall: 0.9384 F1: 0.8824 Train AUC: 0.9956 Val AUC: 0.9473 Time: 14.77\n",
      "Epoch: 892 Train Loss: 0.1044 Val Loss: 0.3485 Acc: 0.8786 Pre: 0.8549 Recall: 0.9281 F1: 0.8900 Train AUC: 0.9970 Val AUC: 0.9514 Time: 13.81\n",
      "Epoch: 893 Train Loss: 0.0855 Val Loss: 0.3293 Acc: 0.8786 Pre: 0.8947 Recall: 0.8733 F1: 0.8839 Train AUC: 0.9969 Val AUC: 0.9567 Time: 12.79\n",
      "Epoch: 894 Train Loss: 0.0966 Val Loss: 0.3318 Acc: 0.8931 Pre: 0.9059 Recall: 0.8904 F1: 0.8981 Train AUC: 0.9963 Val AUC: 0.9595 Time: 12.85\n",
      "Epoch: 895 Train Loss: 0.0916 Val Loss: 0.3400 Acc: 0.8931 Pre: 0.8820 Recall: 0.9212 F1: 0.9012 Train AUC: 0.9961 Val AUC: 0.9596 Time: 13.25\n",
      "Epoch: 896 Train Loss: 0.0918 Val Loss: 0.3641 Acc: 0.8841 Pre: 0.8608 Recall: 0.9315 F1: 0.8947 Train AUC: 0.9957 Val AUC: 0.9564 Time: 13.90\n",
      "Epoch: 897 Train Loss: 0.0947 Val Loss: 0.3521 Acc: 0.8822 Pre: 0.8558 Recall: 0.9349 F1: 0.8936 Train AUC: 0.9964 Val AUC: 0.9537 Time: 14.36\n",
      "Epoch: 898 Train Loss: 0.0960 Val Loss: 0.3385 Acc: 0.8714 Pre: 0.8721 Recall: 0.8870 F1: 0.8795 Train AUC: 0.9963 Val AUC: 0.9480 Time: 14.57\n",
      "Epoch: 899 Train Loss: 0.0873 Val Loss: 0.3524 Acc: 0.8659 Pre: 0.8682 Recall: 0.8801 F1: 0.8741 Train AUC: 0.9969 Val AUC: 0.9435 Time: 15.16\n",
      "Epoch: 900 Train Loss: 0.1042 Val Loss: 0.3570 Acc: 0.8786 Pre: 0.8641 Recall: 0.9144 F1: 0.8885 Train AUC: 0.9951 Val AUC: 0.9520 Time: 13.86\n",
      "Epoch: 901 Train Loss: 0.0921 Val Loss: 0.3592 Acc: 0.8841 Pre: 0.8654 Recall: 0.9247 F1: 0.8940 Train AUC: 0.9956 Val AUC: 0.9555 Time: 12.67\n",
      "Epoch: 902 Train Loss: 0.0966 Val Loss: 0.3382 Acc: 0.8913 Pre: 0.8841 Recall: 0.9144 F1: 0.8990 Train AUC: 0.9949 Val AUC: 0.9580 Time: 12.30\n",
      "Epoch: 903 Train Loss: 0.0893 Val Loss: 0.3401 Acc: 0.8623 Pre: 0.8576 Recall: 0.8870 F1: 0.8721 Train AUC: 0.9953 Val AUC: 0.9537 Time: 12.47\n",
      "Epoch: 904 Train Loss: 0.0986 Val Loss: 0.3552 Acc: 0.8641 Pre: 0.8557 Recall: 0.8938 F1: 0.8744 Train AUC: 0.9951 Val AUC: 0.9490 Time: 12.77\n",
      "Epoch: 905 Train Loss: 0.0917 Val Loss: 0.3523 Acc: 0.8768 Pre: 0.8590 Recall: 0.9178 F1: 0.8874 Train AUC: 0.9954 Val AUC: 0.9536 Time: 13.37\n",
      "Epoch: 906 Train Loss: 0.0853 Val Loss: 0.3531 Acc: 0.8913 Pre: 0.8841 Recall: 0.9144 F1: 0.8990 Train AUC: 0.9963 Val AUC: 0.9564 Time: 13.99\n",
      "Epoch: 907 Train Loss: 0.0862 Val Loss: 0.3514 Acc: 0.8877 Pre: 0.8912 Recall: 0.8973 F1: 0.8942 Train AUC: 0.9953 Val AUC: 0.9561 Time: 14.62\n",
      "Epoch: 908 Train Loss: 0.0861 Val Loss: 0.3444 Acc: 0.8877 Pre: 0.8734 Recall: 0.9212 F1: 0.8967 Train AUC: 0.9952 Val AUC: 0.9557 Time: 15.67\n",
      "Epoch: 909 Train Loss: 0.0818 Val Loss: 0.3399 Acc: 0.8786 Pre: 0.8594 Recall: 0.9212 F1: 0.8893 Train AUC: 0.9959 Val AUC: 0.9518 Time: 14.81\n",
      "Epoch: 910 Train Loss: 0.0857 Val Loss: 0.3407 Acc: 0.8786 Pre: 0.8641 Recall: 0.9144 F1: 0.8885 Train AUC: 0.9965 Val AUC: 0.9484 Time: 13.18\n",
      "Epoch: 911 Train Loss: 0.0815 Val Loss: 0.3411 Acc: 0.8750 Pre: 0.8608 Recall: 0.9110 F1: 0.8852 Train AUC: 0.9975 Val AUC: 0.9505 Time: 13.12\n",
      "Epoch: 912 Train Loss: 0.0853 Val Loss: 0.3336 Acc: 0.8967 Pre: 0.8878 Recall: 0.9212 F1: 0.9042 Train AUC: 0.9963 Val AUC: 0.9556 Time: 13.59\n",
      "Epoch: 913 Train Loss: 0.0820 Val Loss: 0.3350 Acc: 0.8931 Pre: 0.8820 Recall: 0.9212 F1: 0.9012 Train AUC: 0.9967 Val AUC: 0.9561 Time: 14.32\n",
      "Epoch: 914 Train Loss: 0.0854 Val Loss: 0.3287 Acc: 0.8913 Pre: 0.8867 Recall: 0.9110 F1: 0.8986 Train AUC: 0.9963 Val AUC: 0.9568 Time: 14.81\n",
      "Epoch: 915 Train Loss: 0.0823 Val Loss: 0.3260 Acc: 0.8895 Pre: 0.8812 Recall: 0.9144 F1: 0.8975 Train AUC: 0.9968 Val AUC: 0.9568 Time: 13.69\n",
      "Epoch: 916 Train Loss: 0.0782 Val Loss: 0.3286 Acc: 0.8895 Pre: 0.8738 Recall: 0.9247 F1: 0.8985 Train AUC: 0.9971 Val AUC: 0.9558 Time: 13.46\n",
      "Epoch: 917 Train Loss: 0.0748 Val Loss: 0.3370 Acc: 0.8804 Pre: 0.8669 Recall: 0.9144 F1: 0.8900 Train AUC: 0.9973 Val AUC: 0.9542 Time: 13.30\n",
      "Epoch: 918 Train Loss: 0.0824 Val Loss: 0.3472 Acc: 0.8768 Pre: 0.8613 Recall: 0.9144 F1: 0.8870 Train AUC: 0.9965 Val AUC: 0.9521 Time: 14.15\n",
      "Epoch: 919 Train Loss: 0.0793 Val Loss: 0.3493 Acc: 0.8786 Pre: 0.8594 Recall: 0.9212 F1: 0.8893 Train AUC: 0.9970 Val AUC: 0.9532 Time: 14.68\n",
      "Epoch: 920 Train Loss: 0.0839 Val Loss: 0.3341 Acc: 0.8895 Pre: 0.8762 Recall: 0.9212 F1: 0.8982 Train AUC: 0.9966 Val AUC: 0.9555 Time: 14.61\n",
      "Epoch: 921 Train Loss: 0.0777 Val Loss: 0.3338 Acc: 0.8750 Pre: 0.8704 Recall: 0.8973 F1: 0.8836 Train AUC: 0.9968 Val AUC: 0.9552 Time: 13.49\n",
      "Epoch: 922 Train Loss: 0.0765 Val Loss: 0.3353 Acc: 0.8714 Pre: 0.8623 Recall: 0.9007 F1: 0.8811 Train AUC: 0.9970 Val AUC: 0.9549 Time: 12.60\n",
      "Epoch: 923 Train Loss: 0.0804 Val Loss: 0.3315 Acc: 0.8841 Pre: 0.8800 Recall: 0.9041 F1: 0.8919 Train AUC: 0.9967 Val AUC: 0.9562 Time: 12.73\n",
      "Epoch: 924 Train Loss: 0.0845 Val Loss: 0.3464 Acc: 0.8895 Pre: 0.8714 Recall: 0.9281 F1: 0.8988 Train AUC: 0.9966 Val AUC: 0.9550 Time: 13.10\n",
      "Epoch: 925 Train Loss: 0.0868 Val Loss: 0.3438 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9963 Val AUC: 0.9534 Time: 13.65\n",
      "Epoch: 926 Train Loss: 0.0803 Val Loss: 0.3360 Acc: 0.8732 Pre: 0.8558 Recall: 0.9144 F1: 0.8841 Train AUC: 0.9971 Val AUC: 0.9513 Time: 14.37\n",
      "Epoch: 927 Train Loss: 0.0890 Val Loss: 0.3319 Acc: 0.8822 Pre: 0.8697 Recall: 0.9144 F1: 0.8915 Train AUC: 0.9965 Val AUC: 0.9533 Time: 14.84\n",
      "Epoch: 928 Train Loss: 0.0795 Val Loss: 0.3379 Acc: 0.8841 Pre: 0.8725 Recall: 0.9144 F1: 0.8930 Train AUC: 0.9968 Val AUC: 0.9560 Time: 14.55\n",
      "Epoch: 929 Train Loss: 0.0793 Val Loss: 0.3378 Acc: 0.8895 Pre: 0.8714 Recall: 0.9281 F1: 0.8988 Train AUC: 0.9968 Val AUC: 0.9568 Time: 13.82\n",
      "Epoch: 930 Train Loss: 0.0891 Val Loss: 0.3268 Acc: 0.8804 Pre: 0.8844 Recall: 0.8904 F1: 0.8874 Train AUC: 0.9965 Val AUC: 0.9579 Time: 12.68\n",
      "Epoch: 931 Train Loss: 0.0785 Val Loss: 0.3271 Acc: 0.8768 Pre: 0.8784 Recall: 0.8904 F1: 0.8844 Train AUC: 0.9968 Val AUC: 0.9576 Time: 12.73\n",
      "Epoch: 932 Train Loss: 0.0744 Val Loss: 0.3307 Acc: 0.8859 Pre: 0.8706 Recall: 0.9212 F1: 0.8952 Train AUC: 0.9977 Val AUC: 0.9562 Time: 13.32\n",
      "Epoch: 933 Train Loss: 0.0816 Val Loss: 0.3492 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9969 Val AUC: 0.9527 Time: 13.97\n",
      "Epoch: 934 Train Loss: 0.0821 Val Loss: 0.3401 Acc: 0.8895 Pre: 0.8690 Recall: 0.9315 F1: 0.8992 Train AUC: 0.9974 Val AUC: 0.9555 Time: 14.54\n",
      "Epoch: 935 Train Loss: 0.0796 Val Loss: 0.3350 Acc: 0.8786 Pre: 0.8840 Recall: 0.8870 F1: 0.8855 Train AUC: 0.9973 Val AUC: 0.9567 Time: 15.00\n",
      "Epoch: 936 Train Loss: 0.0810 Val Loss: 0.3363 Acc: 0.8768 Pre: 0.8836 Recall: 0.8836 F1: 0.8836 Train AUC: 0.9959 Val AUC: 0.9546 Time: 14.82\n",
      "Epoch: 937 Train Loss: 0.0861 Val Loss: 0.3437 Acc: 0.8786 Pre: 0.8594 Recall: 0.9212 F1: 0.8893 Train AUC: 0.9962 Val AUC: 0.9534 Time: 13.33\n",
      "Epoch: 938 Train Loss: 0.0790 Val Loss: 0.3452 Acc: 0.8804 Pre: 0.8599 Recall: 0.9247 F1: 0.8911 Train AUC: 0.9975 Val AUC: 0.9539 Time: 12.89\n",
      "Epoch: 939 Train Loss: 0.0723 Val Loss: 0.3373 Acc: 0.8714 Pre: 0.8553 Recall: 0.9110 F1: 0.8823 Train AUC: 0.9979 Val AUC: 0.9533 Time: 13.48\n",
      "Epoch: 940 Train Loss: 0.0748 Val Loss: 0.3311 Acc: 0.8678 Pre: 0.8614 Recall: 0.8938 F1: 0.8773 Train AUC: 0.9974 Val AUC: 0.9539 Time: 13.44\n",
      "Epoch: 941 Train Loss: 0.0832 Val Loss: 0.3323 Acc: 0.8786 Pre: 0.8664 Recall: 0.9110 F1: 0.8881 Train AUC: 0.9968 Val AUC: 0.9561 Time: 14.05\n",
      "Epoch: 942 Train Loss: 0.0750 Val Loss: 0.3410 Acc: 0.8822 Pre: 0.8626 Recall: 0.9247 F1: 0.8926 Train AUC: 0.9974 Val AUC: 0.9564 Time: 14.78\n",
      "Epoch: 943 Train Loss: 0.0794 Val Loss: 0.3458 Acc: 0.8841 Pre: 0.8631 Recall: 0.9281 F1: 0.8944 Train AUC: 0.9970 Val AUC: 0.9552 Time: 14.92\n",
      "Epoch: 944 Train Loss: 0.0732 Val Loss: 0.3401 Acc: 0.8786 Pre: 0.8641 Recall: 0.9144 F1: 0.8885 Train AUC: 0.9978 Val AUC: 0.9536 Time: 13.31\n",
      "Epoch: 945 Train Loss: 0.0827 Val Loss: 0.3384 Acc: 0.8714 Pre: 0.8553 Recall: 0.9110 F1: 0.8823 Train AUC: 0.9968 Val AUC: 0.9531 Time: 12.45\n",
      "Epoch: 946 Train Loss: 0.0796 Val Loss: 0.3399 Acc: 0.8732 Pre: 0.8581 Recall: 0.9110 F1: 0.8837 Train AUC: 0.9971 Val AUC: 0.9535 Time: 13.00\n",
      "Epoch: 947 Train Loss: 0.0804 Val Loss: 0.3357 Acc: 0.8750 Pre: 0.8680 Recall: 0.9007 F1: 0.8840 Train AUC: 0.9972 Val AUC: 0.9557 Time: 13.14\n",
      "Epoch: 948 Train Loss: 0.0819 Val Loss: 0.3348 Acc: 0.8822 Pre: 0.8796 Recall: 0.9007 F1: 0.8900 Train AUC: 0.9972 Val AUC: 0.9574 Time: 13.73\n",
      "Epoch: 949 Train Loss: 0.0719 Val Loss: 0.3378 Acc: 0.8877 Pre: 0.8859 Recall: 0.9041 F1: 0.8949 Train AUC: 0.9976 Val AUC: 0.9576 Time: 14.56\n",
      "Epoch: 950 Train Loss: 0.0857 Val Loss: 0.3410 Acc: 0.8967 Pre: 0.8803 Recall: 0.9315 F1: 0.9052 Train AUC: 0.9961 Val AUC: 0.9570 Time: 15.30\n",
      "Epoch: 951 Train Loss: 0.0713 Val Loss: 0.3521 Acc: 0.8750 Pre: 0.8585 Recall: 0.9144 F1: 0.8856 Train AUC: 0.9977 Val AUC: 0.9518 Time: 15.62\n",
      "Epoch: 952 Train Loss: 0.0812 Val Loss: 0.3468 Acc: 0.8714 Pre: 0.8576 Recall: 0.9075 F1: 0.8819 Train AUC: 0.9975 Val AUC: 0.9515 Time: 13.47\n",
      "Epoch: 953 Train Loss: 0.0767 Val Loss: 0.3373 Acc: 0.8822 Pre: 0.8796 Recall: 0.9007 F1: 0.8900 Train AUC: 0.9975 Val AUC: 0.9538 Time: 12.82\n",
      "Epoch: 954 Train Loss: 0.0760 Val Loss: 0.3363 Acc: 0.8804 Pre: 0.8792 Recall: 0.8973 F1: 0.8881 Train AUC: 0.9973 Val AUC: 0.9559 Time: 13.08\n",
      "Epoch: 955 Train Loss: 0.0839 Val Loss: 0.3340 Acc: 0.8877 Pre: 0.8808 Recall: 0.9110 F1: 0.8956 Train AUC: 0.9963 Val AUC: 0.9567 Time: 13.35\n",
      "Epoch: 956 Train Loss: 0.0776 Val Loss: 0.3397 Acc: 0.8841 Pre: 0.8608 Recall: 0.9315 F1: 0.8947 Train AUC: 0.9968 Val AUC: 0.9546 Time: 13.95\n",
      "Epoch: 957 Train Loss: 0.0754 Val Loss: 0.3481 Acc: 0.8822 Pre: 0.8603 Recall: 0.9281 F1: 0.8929 Train AUC: 0.9973 Val AUC: 0.9524 Time: 13.80\n",
      "Epoch: 958 Train Loss: 0.0897 Val Loss: 0.3344 Acc: 0.8786 Pre: 0.8594 Recall: 0.9212 F1: 0.8893 Train AUC: 0.9965 Val AUC: 0.9548 Time: 14.69\n",
      "Epoch: 959 Train Loss: 0.0729 Val Loss: 0.3334 Acc: 0.8822 Pre: 0.8771 Recall: 0.9041 F1: 0.8904 Train AUC: 0.9977 Val AUC: 0.9565 Time: 14.05\n",
      "Epoch: 960 Train Loss: 0.0790 Val Loss: 0.3388 Acc: 0.8859 Pre: 0.8754 Recall: 0.9144 F1: 0.8945 Train AUC: 0.9965 Val AUC: 0.9558 Time: 13.18\n",
      "Epoch: 961 Train Loss: 0.0757 Val Loss: 0.3511 Acc: 0.8768 Pre: 0.8522 Recall: 0.9281 F1: 0.8885 Train AUC: 0.9970 Val AUC: 0.9551 Time: 13.07\n",
      "Epoch: 962 Train Loss: 0.0801 Val Loss: 0.3526 Acc: 0.8786 Pre: 0.8571 Recall: 0.9247 F1: 0.8896 Train AUC: 0.9971 Val AUC: 0.9532 Time: 13.83\n",
      "Epoch: 963 Train Loss: 0.0800 Val Loss: 0.3371 Acc: 0.8732 Pre: 0.8627 Recall: 0.9041 F1: 0.8829 Train AUC: 0.9973 Val AUC: 0.9544 Time: 14.60\n",
      "Epoch: 964 Train Loss: 0.0801 Val Loss: 0.3294 Acc: 0.8768 Pre: 0.8810 Recall: 0.8870 F1: 0.8840 Train AUC: 0.9969 Val AUC: 0.9564 Time: 14.98\n",
      "Epoch: 965 Train Loss: 0.0788 Val Loss: 0.3313 Acc: 0.8804 Pre: 0.8844 Recall: 0.8904 F1: 0.8874 Train AUC: 0.9970 Val AUC: 0.9557 Time: 15.48\n",
      "Epoch: 966 Train Loss: 0.0789 Val Loss: 0.3504 Acc: 0.8822 Pre: 0.8580 Recall: 0.9315 F1: 0.8933 Train AUC: 0.9972 Val AUC: 0.9540 Time: 13.72\n",
      "Epoch: 967 Train Loss: 0.0752 Val Loss: 0.3627 Acc: 0.8750 Pre: 0.8495 Recall: 0.9281 F1: 0.8871 Train AUC: 0.9976 Val AUC: 0.9507 Time: 12.51\n",
      "Epoch: 968 Train Loss: 0.0833 Val Loss: 0.3351 Acc: 0.8786 Pre: 0.8617 Recall: 0.9178 F1: 0.8889 Train AUC: 0.9980 Val AUC: 0.9537 Time: 12.24\n",
      "Epoch: 969 Train Loss: 0.0775 Val Loss: 0.3294 Acc: 0.8768 Pre: 0.8836 Recall: 0.8836 F1: 0.8836 Train AUC: 0.9975 Val AUC: 0.9571 Time: 12.83\n",
      "Epoch: 970 Train Loss: 0.0844 Val Loss: 0.3378 Acc: 0.8931 Pre: 0.8820 Recall: 0.9212 F1: 0.9012 Train AUC: 0.9972 Val AUC: 0.9575 Time: 13.35\n",
      "Epoch: 971 Train Loss: 0.0719 Val Loss: 0.3610 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9978 Val AUC: 0.9574 Time: 13.85\n",
      "Epoch: 972 Train Loss: 0.0767 Val Loss: 0.3641 Acc: 0.8804 Pre: 0.8509 Recall: 0.9384 F1: 0.8925 Train AUC: 0.9975 Val AUC: 0.9542 Time: 14.46\n",
      "Epoch: 973 Train Loss: 0.0841 Val Loss: 0.3403 Acc: 0.8750 Pre: 0.8608 Recall: 0.9110 F1: 0.8852 Train AUC: 0.9971 Val AUC: 0.9539 Time: 15.11\n",
      "Epoch: 974 Train Loss: 0.0767 Val Loss: 0.3344 Acc: 0.8659 Pre: 0.8707 Recall: 0.8767 F1: 0.8737 Train AUC: 0.9973 Val AUC: 0.9554 Time: 14.78\n",
      "Epoch: 975 Train Loss: 0.0843 Val Loss: 0.3440 Acc: 0.8913 Pre: 0.8718 Recall: 0.9315 F1: 0.9007 Train AUC: 0.9976 Val AUC: 0.9566 Time: 13.46\n",
      "Epoch: 976 Train Loss: 0.0823 Val Loss: 0.3617 Acc: 0.8768 Pre: 0.8500 Recall: 0.9315 F1: 0.8889 Train AUC: 0.9960 Val AUC: 0.9541 Time: 12.73\n",
      "Epoch: 977 Train Loss: 0.0846 Val Loss: 0.3415 Acc: 0.8786 Pre: 0.8617 Recall: 0.9178 F1: 0.8889 Train AUC: 0.9967 Val AUC: 0.9541 Time: 13.04\n",
      "Epoch: 978 Train Loss: 0.0758 Val Loss: 0.3328 Acc: 0.8750 Pre: 0.8832 Recall: 0.8801 F1: 0.8816 Train AUC: 0.9973 Val AUC: 0.9553 Time: 13.42\n",
      "Epoch: 979 Train Loss: 0.0821 Val Loss: 0.3341 Acc: 0.8768 Pre: 0.8836 Recall: 0.8836 F1: 0.8836 Train AUC: 0.9970 Val AUC: 0.9548 Time: 14.05\n",
      "Epoch: 980 Train Loss: 0.0721 Val Loss: 0.3416 Acc: 0.8750 Pre: 0.8656 Recall: 0.9041 F1: 0.8844 Train AUC: 0.9980 Val AUC: 0.9536 Time: 14.73\n",
      "Epoch: 981 Train Loss: 0.0784 Val Loss: 0.3439 Acc: 0.8804 Pre: 0.8622 Recall: 0.9212 F1: 0.8907 Train AUC: 0.9968 Val AUC: 0.9536 Time: 15.14\n",
      "Epoch: 982 Train Loss: 0.0821 Val Loss: 0.3410 Acc: 0.8768 Pre: 0.8613 Recall: 0.9144 F1: 0.8870 Train AUC: 0.9969 Val AUC: 0.9534 Time: 14.92\n",
      "Epoch: 983 Train Loss: 0.0768 Val Loss: 0.3422 Acc: 0.8895 Pre: 0.8714 Recall: 0.9281 F1: 0.8988 Train AUC: 0.9978 Val AUC: 0.9560 Time: 13.53\n",
      "Epoch: 984 Train Loss: 0.0752 Val Loss: 0.3443 Acc: 0.8913 Pre: 0.8742 Recall: 0.9281 F1: 0.9003 Train AUC: 0.9973 Val AUC: 0.9572 Time: 13.19\n",
      "Epoch: 985 Train Loss: 0.0724 Val Loss: 0.3520 Acc: 0.8841 Pre: 0.8654 Recall: 0.9247 F1: 0.8940 Train AUC: 0.9978 Val AUC: 0.9560 Time: 13.96\n",
      "Epoch: 986 Train Loss: 0.0824 Val Loss: 0.3476 Acc: 0.8841 Pre: 0.8677 Recall: 0.9212 F1: 0.8937 Train AUC: 0.9966 Val AUC: 0.9552 Time: 14.20\n",
      "Epoch: 987 Train Loss: 0.0862 Val Loss: 0.3413 Acc: 0.8714 Pre: 0.8671 Recall: 0.8938 F1: 0.8803 Train AUC: 0.9963 Val AUC: 0.9513 Time: 12.76\n",
      "Epoch: 988 Train Loss: 0.0798 Val Loss: 0.3443 Acc: 0.8714 Pre: 0.8746 Recall: 0.8836 F1: 0.8790 Train AUC: 0.9968 Val AUC: 0.9492 Time: 12.45\n",
      "Epoch: 989 Train Loss: 0.0781 Val Loss: 0.3477 Acc: 0.8786 Pre: 0.8617 Recall: 0.9178 F1: 0.8889 Train AUC: 0.9977 Val AUC: 0.9514 Time: 13.02\n",
      "Epoch: 990 Train Loss: 0.0755 Val Loss: 0.3549 Acc: 0.8841 Pre: 0.8608 Recall: 0.9315 F1: 0.8947 Train AUC: 0.9974 Val AUC: 0.9550 Time: 13.39\n",
      "Epoch: 991 Train Loss: 0.0851 Val Loss: 0.3361 Acc: 0.8841 Pre: 0.8775 Recall: 0.9075 F1: 0.8923 Train AUC: 0.9973 Val AUC: 0.9571 Time: 14.05\n",
      "Epoch: 992 Train Loss: 0.0790 Val Loss: 0.3319 Acc: 0.8804 Pre: 0.8818 Recall: 0.8938 F1: 0.8878 Train AUC: 0.9970 Val AUC: 0.9564 Time: 14.79\n",
      "Epoch: 993 Train Loss: 0.0898 Val Loss: 0.3326 Acc: 0.8768 Pre: 0.8684 Recall: 0.9041 F1: 0.8859 Train AUC: 0.9956 Val AUC: 0.9551 Time: 15.90\n",
      "Epoch: 994 Train Loss: 0.0720 Val Loss: 0.3495 Acc: 0.8768 Pre: 0.8567 Recall: 0.9212 F1: 0.8878 Train AUC: 0.9977 Val AUC: 0.9523 Time: 14.13\n",
      "Epoch: 995 Train Loss: 0.0772 Val Loss: 0.3620 Acc: 0.8822 Pre: 0.8580 Recall: 0.9315 F1: 0.8933 Train AUC: 0.9974 Val AUC: 0.9524 Time: 12.88\n",
      "Epoch: 996 Train Loss: 0.0808 Val Loss: 0.3441 Acc: 0.8895 Pre: 0.8837 Recall: 0.9110 F1: 0.8971 Train AUC: 0.9970 Val AUC: 0.9548 Time: 12.05\n",
      "Epoch: 997 Train Loss: 0.0703 Val Loss: 0.3368 Acc: 0.8768 Pre: 0.8836 Recall: 0.8836 F1: 0.8836 Train AUC: 0.9978 Val AUC: 0.9560 Time: 12.05\n",
      "Epoch: 998 Train Loss: 0.0884 Val Loss: 0.3475 Acc: 0.8804 Pre: 0.8576 Recall: 0.9281 F1: 0.8914 Train AUC: 0.9965 Val AUC: 0.9542 Time: 12.58\n",
      "Epoch: 999 Train Loss: 0.0787 Val Loss: 0.3651 Acc: 0.8804 Pre: 0.8509 Recall: 0.9384 F1: 0.8925 Train AUC: 0.9969 Val AUC: 0.9551 Time: 13.05\n",
      "Epoch: 1000 Train Loss: 0.0796 Val Loss: 0.3539 Acc: 0.8804 Pre: 0.8553 Recall: 0.9315 F1: 0.8918 Train AUC: 0.9972 Val AUC: 0.9567 Time: 13.42\n",
      "Fold: 2 Best Epoch: 345 Test acc: 0.9130 Test Pre: 0.9067 Test Recall: 0.9315 Test F1: 0.9189 Test PRC: 0.9651 Test AUC: 0.9707\n",
      "Training for Fold 3\n",
      "## Training edges: 2208\n",
      "## Testing edges: 552\n",
      "Epoch: 1 Train Loss: 0.8958 Val Loss: 0.8164 Acc: 0.6413 Pre: 0.6150 Recall: 0.8483 F1: 0.7130 Train AUC: 0.5057 Val AUC: 0.7078 Time: 14.43\n",
      "Epoch: 2 Train Loss: 1.0223 Val Loss: 1.8294 Acc: 0.6087 Pre: 0.6814 Recall: 0.4793 F1: 0.5628 Train AUC: 0.7007 Val AUC: 0.6700 Time: 15.25\n",
      "Epoch: 3 Train Loss: 1.8375 Val Loss: 0.5815 Acc: 0.6703 Pre: 0.8553 Recall: 0.4483 F1: 0.5882 Train AUC: 0.6077 Val AUC: 0.8372 Time: 16.49\n",
      "Epoch: 4 Train Loss: 0.5678 Val Loss: 0.4943 Acc: 0.7518 Pre: 0.7297 Recall: 0.8379 F1: 0.7801 Train AUC: 0.8226 Val AUC: 0.8487 Time: 14.75\n",
      "Epoch: 5 Train Loss: 0.6024 Val Loss: 0.5085 Acc: 0.7464 Pre: 0.7219 Recall: 0.8414 F1: 0.7771 Train AUC: 0.7988 Val AUC: 0.8443 Time: 13.40\n",
      "Epoch: 6 Train Loss: 0.5828 Val Loss: 0.4294 Acc: 0.7862 Pre: 0.8282 Recall: 0.7483 F1: 0.7862 Train AUC: 0.8204 Val AUC: 0.8771 Time: 13.07\n",
      "Epoch: 7 Train Loss: 0.5267 Val Loss: 0.5026 Acc: 0.7808 Pre: 0.8824 Recall: 0.6724 F1: 0.7632 Train AUC: 0.8327 Val AUC: 0.8677 Time: 13.52\n",
      "Epoch: 8 Train Loss: 0.5205 Val Loss: 0.5569 Acc: 0.7699 Pre: 0.8655 Recall: 0.6655 F1: 0.7524 Train AUC: 0.8575 Val AUC: 0.8531 Time: 13.95\n",
      "Epoch: 9 Train Loss: 0.5979 Val Loss: 0.5209 Acc: 0.7754 Pre: 0.8578 Recall: 0.6862 F1: 0.7625 Train AUC: 0.8528 Val AUC: 0.8642 Time: 14.02\n",
      "Epoch: 10 Train Loss: 0.5757 Val Loss: 0.4752 Acc: 0.7971 Pre: 0.8589 Recall: 0.7345 F1: 0.7918 Train AUC: 0.8533 Val AUC: 0.8762 Time: 14.82\n",
      "Epoch: 11 Train Loss: 0.4973 Val Loss: 0.4758 Acc: 0.7899 Pre: 0.8398 Recall: 0.7414 F1: 0.7875 Train AUC: 0.8674 Val AUC: 0.8757 Time: 14.93\n",
      "Epoch: 12 Train Loss: 0.5318 Val Loss: 0.4765 Acc: 0.7862 Pre: 0.8209 Recall: 0.7586 F1: 0.7885 Train AUC: 0.8575 Val AUC: 0.8747 Time: 14.58\n",
      "Epoch: 13 Train Loss: 0.4949 Val Loss: 0.4618 Acc: 0.7808 Pre: 0.8189 Recall: 0.7483 F1: 0.7820 Train AUC: 0.8748 Val AUC: 0.8783 Time: 13.48\n",
      "Epoch: 14 Train Loss: 0.4662 Val Loss: 0.4284 Acc: 0.7953 Pre: 0.8444 Recall: 0.7483 F1: 0.7934 Train AUC: 0.8767 Val AUC: 0.8848 Time: 13.14\n",
      "Epoch: 15 Train Loss: 0.4508 Val Loss: 0.4005 Acc: 0.8062 Pre: 0.8617 Recall: 0.7517 F1: 0.8029 Train AUC: 0.8769 Val AUC: 0.8958 Time: 13.25\n",
      "Epoch: 16 Train Loss: 0.4116 Val Loss: 0.3965 Acc: 0.8152 Pre: 0.8917 Recall: 0.7379 F1: 0.8075 Train AUC: 0.8938 Val AUC: 0.9039 Time: 13.71\n",
      "Epoch: 17 Train Loss: 0.4243 Val Loss: 0.4093 Acc: 0.8188 Pre: 0.9167 Recall: 0.7207 F1: 0.8069 Train AUC: 0.8828 Val AUC: 0.9035 Time: 14.34\n",
      "Epoch: 18 Train Loss: 0.4150 Val Loss: 0.4167 Acc: 0.8279 Pre: 0.9535 Recall: 0.7069 F1: 0.8119 Train AUC: 0.8894 Val AUC: 0.9040 Time: 14.13\n",
      "Epoch: 19 Train Loss: 0.4132 Val Loss: 0.4064 Acc: 0.8388 Pre: 0.9507 Recall: 0.7310 F1: 0.8265 Train AUC: 0.8899 Val AUC: 0.9050 Time: 13.99\n",
      "Epoch: 20 Train Loss: 0.4047 Val Loss: 0.3997 Acc: 0.8170 Pre: 0.8954 Recall: 0.7379 F1: 0.8091 Train AUC: 0.8965 Val AUC: 0.9029 Time: 13.46\n",
      "Epoch: 21 Train Loss: 0.4436 Val Loss: 0.4032 Acc: 0.8043 Pre: 0.8611 Recall: 0.7483 F1: 0.8007 Train AUC: 0.8854 Val AUC: 0.8990 Time: 13.68\n",
      "Epoch: 22 Train Loss: 0.3910 Val Loss: 0.4142 Acc: 0.7971 Pre: 0.8346 Recall: 0.7655 F1: 0.7986 Train AUC: 0.9093 Val AUC: 0.8966 Time: 14.00\n",
      "Epoch: 23 Train Loss: 0.3985 Val Loss: 0.4198 Acc: 0.7971 Pre: 0.8321 Recall: 0.7690 F1: 0.7993 Train AUC: 0.9076 Val AUC: 0.8963 Time: 14.86\n",
      "Epoch: 24 Train Loss: 0.4033 Val Loss: 0.4190 Acc: 0.8025 Pre: 0.8467 Recall: 0.7621 F1: 0.8022 Train AUC: 0.9108 Val AUC: 0.8966 Time: 14.74\n",
      "Epoch: 25 Train Loss: 0.3845 Val Loss: 0.4119 Acc: 0.8116 Pre: 0.8750 Recall: 0.7483 F1: 0.8067 Train AUC: 0.9128 Val AUC: 0.8982 Time: 13.22\n",
      "Epoch: 26 Train Loss: 0.4062 Val Loss: 0.4030 Acc: 0.8188 Pre: 0.8926 Recall: 0.7448 F1: 0.8120 Train AUC: 0.9038 Val AUC: 0.9009 Time: 13.05\n",
      "Epoch: 27 Train Loss: 0.4068 Val Loss: 0.3946 Acc: 0.8225 Pre: 0.9000 Recall: 0.7448 F1: 0.8151 Train AUC: 0.8971 Val AUC: 0.9043 Time: 13.12\n",
      "Epoch: 28 Train Loss: 0.3869 Val Loss: 0.3939 Acc: 0.8333 Pre: 0.9231 Recall: 0.7448 F1: 0.8244 Train AUC: 0.9032 Val AUC: 0.9050 Time: 13.85\n",
      "Epoch: 29 Train Loss: 0.3872 Val Loss: 0.3901 Acc: 0.8351 Pre: 0.9270 Recall: 0.7448 F1: 0.8260 Train AUC: 0.9023 Val AUC: 0.9055 Time: 14.50\n",
      "Epoch: 30 Train Loss: 0.3834 Val Loss: 0.3857 Acc: 0.8279 Pre: 0.9114 Recall: 0.7448 F1: 0.8197 Train AUC: 0.9046 Val AUC: 0.9054 Time: 14.96\n",
      "Epoch: 31 Train Loss: 0.3756 Val Loss: 0.3837 Acc: 0.8134 Pre: 0.8816 Recall: 0.7448 F1: 0.8075 Train AUC: 0.9109 Val AUC: 0.9060 Time: 14.62\n",
      "Epoch: 32 Train Loss: 0.3557 Val Loss: 0.3844 Acc: 0.8134 Pre: 0.8785 Recall: 0.7483 F1: 0.8082 Train AUC: 0.9184 Val AUC: 0.9063 Time: 13.94\n",
      "Epoch: 33 Train Loss: 0.3611 Val Loss: 0.3860 Acc: 0.8098 Pre: 0.8656 Recall: 0.7552 F1: 0.8066 Train AUC: 0.9196 Val AUC: 0.9065 Time: 12.88\n",
      "Epoch: 34 Train Loss: 0.3485 Val Loss: 0.3855 Acc: 0.8098 Pre: 0.8571 Recall: 0.7655 F1: 0.8087 Train AUC: 0.9223 Val AUC: 0.9070 Time: 13.10\n",
      "Epoch: 35 Train Loss: 0.3536 Val Loss: 0.3816 Acc: 0.8116 Pre: 0.8550 Recall: 0.7724 F1: 0.8116 Train AUC: 0.9226 Val AUC: 0.9087 Time: 13.52\n",
      "Epoch: 36 Train Loss: 0.3671 Val Loss: 0.3669 Acc: 0.8134 Pre: 0.8555 Recall: 0.7759 F1: 0.8137 Train AUC: 0.9187 Val AUC: 0.9123 Time: 13.97\n",
      "Epoch: 37 Train Loss: 0.3514 Val Loss: 0.3550 Acc: 0.8315 Pre: 0.8662 Recall: 0.8034 F1: 0.8336 Train AUC: 0.9226 Val AUC: 0.9179 Time: 14.73\n",
      "Epoch: 38 Train Loss: 0.3555 Val Loss: 0.3501 Acc: 0.8351 Pre: 0.8592 Recall: 0.8207 F1: 0.8395 Train AUC: 0.9205 Val AUC: 0.9215 Time: 14.92\n",
      "Epoch: 39 Train Loss: 0.3586 Val Loss: 0.3494 Acc: 0.8406 Pre: 0.8713 Recall: 0.8172 F1: 0.8434 Train AUC: 0.9210 Val AUC: 0.9228 Time: 13.31\n",
      "Epoch: 40 Train Loss: 0.3441 Val Loss: 0.3551 Acc: 0.8514 Pre: 0.9094 Recall: 0.7966 F1: 0.8493 Train AUC: 0.9255 Val AUC: 0.9227 Time: 12.99\n",
      "Epoch: 41 Train Loss: 0.3216 Val Loss: 0.3672 Acc: 0.8496 Pre: 0.9295 Recall: 0.7724 F1: 0.8437 Train AUC: 0.9335 Val AUC: 0.9219 Time: 13.25\n",
      "Epoch: 42 Train Loss: 0.3386 Val Loss: 0.3730 Acc: 0.8406 Pre: 0.9316 Recall: 0.7517 F1: 0.8321 Train AUC: 0.9292 Val AUC: 0.9212 Time: 13.67\n",
      "Epoch: 43 Train Loss: 0.3397 Val Loss: 0.3647 Acc: 0.8496 Pre: 0.9259 Recall: 0.7759 F1: 0.8443 Train AUC: 0.9316 Val AUC: 0.9217 Time: 14.41\n",
      "Epoch: 44 Train Loss: 0.3252 Val Loss: 0.3550 Acc: 0.8514 Pre: 0.9000 Recall: 0.8069 F1: 0.8509 Train AUC: 0.9324 Val AUC: 0.9221 Time: 15.06\n",
      "Epoch: 45 Train Loss: 0.3246 Val Loss: 0.3533 Acc: 0.8351 Pre: 0.8672 Recall: 0.8103 F1: 0.8378 Train AUC: 0.9336 Val AUC: 0.9210 Time: 15.86\n",
      "Epoch: 46 Train Loss: 0.3362 Val Loss: 0.3574 Acc: 0.8279 Pre: 0.8545 Recall: 0.8103 F1: 0.8319 Train AUC: 0.9296 Val AUC: 0.9184 Time: 13.79\n",
      "Epoch: 47 Train Loss: 0.3257 Val Loss: 0.3627 Acc: 0.8261 Pre: 0.8540 Recall: 0.8069 F1: 0.8298 Train AUC: 0.9338 Val AUC: 0.9168 Time: 12.82\n",
      "Epoch: 48 Train Loss: 0.3190 Val Loss: 0.3651 Acc: 0.8279 Pre: 0.8571 Recall: 0.8069 F1: 0.8313 Train AUC: 0.9350 Val AUC: 0.9163 Time: 12.61\n",
      "Epoch: 49 Train Loss: 0.3326 Val Loss: 0.3637 Acc: 0.8333 Pre: 0.8640 Recall: 0.8103 F1: 0.8363 Train AUC: 0.9314 Val AUC: 0.9167 Time: 12.93\n",
      "Epoch: 50 Train Loss: 0.3264 Val Loss: 0.3614 Acc: 0.8370 Pre: 0.8676 Recall: 0.8138 F1: 0.8399 Train AUC: 0.9325 Val AUC: 0.9180 Time: 13.44\n",
      "Epoch: 51 Train Loss: 0.3455 Val Loss: 0.3604 Acc: 0.8351 Pre: 0.8645 Recall: 0.8138 F1: 0.8384 Train AUC: 0.9271 Val AUC: 0.9179 Time: 14.10\n",
      "Epoch: 52 Train Loss: 0.3245 Val Loss: 0.3583 Acc: 0.8333 Pre: 0.8561 Recall: 0.8207 F1: 0.8380 Train AUC: 0.9342 Val AUC: 0.9186 Time: 14.57\n",
      "Epoch: 53 Train Loss: 0.3150 Val Loss: 0.3563 Acc: 0.8315 Pre: 0.8505 Recall: 0.8241 F1: 0.8371 Train AUC: 0.9366 Val AUC: 0.9201 Time: 15.55\n",
      "Epoch: 54 Train Loss: 0.3171 Val Loss: 0.3612 Acc: 0.8406 Pre: 0.8826 Recall: 0.8034 F1: 0.8412 Train AUC: 0.9385 Val AUC: 0.9204 Time: 15.28\n",
      "Epoch: 55 Train Loss: 0.3042 Val Loss: 0.3679 Acc: 0.8496 Pre: 0.9091 Recall: 0.7931 F1: 0.8471 Train AUC: 0.9410 Val AUC: 0.9209 Time: 13.41\n",
      "Epoch: 56 Train Loss: 0.3331 Val Loss: 0.3642 Acc: 0.8478 Pre: 0.9023 Recall: 0.7966 F1: 0.8462 Train AUC: 0.9361 Val AUC: 0.9208 Time: 12.91\n",
      "Epoch: 57 Train Loss: 0.3066 Val Loss: 0.3611 Acc: 0.8442 Pre: 0.8893 Recall: 0.8034 F1: 0.8442 Train AUC: 0.9403 Val AUC: 0.9212 Time: 13.03\n",
      "Epoch: 58 Train Loss: 0.3054 Val Loss: 0.3572 Acc: 0.8514 Pre: 0.8910 Recall: 0.8172 F1: 0.8525 Train AUC: 0.9414 Val AUC: 0.9217 Time: 13.00\n",
      "Epoch: 59 Train Loss: 0.3119 Val Loss: 0.3548 Acc: 0.8478 Pre: 0.8843 Recall: 0.8172 F1: 0.8495 Train AUC: 0.9381 Val AUC: 0.9226 Time: 13.57\n",
      "Epoch: 60 Train Loss: 0.3038 Val Loss: 0.3496 Acc: 0.8442 Pre: 0.8723 Recall: 0.8241 F1: 0.8475 Train AUC: 0.9432 Val AUC: 0.9243 Time: 14.32\n",
      "Epoch: 61 Train Loss: 0.3170 Val Loss: 0.3478 Acc: 0.8442 Pre: 0.8723 Recall: 0.8241 F1: 0.8475 Train AUC: 0.9369 Val AUC: 0.9248 Time: 14.93\n",
      "Epoch: 62 Train Loss: 0.3098 Val Loss: 0.3514 Acc: 0.8478 Pre: 0.8843 Recall: 0.8172 F1: 0.8495 Train AUC: 0.9408 Val AUC: 0.9243 Time: 15.87\n",
      "Epoch: 63 Train Loss: 0.3079 Val Loss: 0.3588 Acc: 0.8496 Pre: 0.8935 Recall: 0.8103 F1: 0.8499 Train AUC: 0.9409 Val AUC: 0.9228 Time: 15.12\n",
      "Epoch: 64 Train Loss: 0.3224 Val Loss: 0.3611 Acc: 0.8514 Pre: 0.8969 Recall: 0.8103 F1: 0.8514 Train AUC: 0.9351 Val AUC: 0.9226 Time: 13.53\n",
      "Epoch: 65 Train Loss: 0.3062 Val Loss: 0.3530 Acc: 0.8478 Pre: 0.8872 Recall: 0.8138 F1: 0.8489 Train AUC: 0.9410 Val AUC: 0.9242 Time: 12.71\n",
      "Epoch: 66 Train Loss: 0.3000 Val Loss: 0.3442 Acc: 0.8406 Pre: 0.8633 Recall: 0.8276 F1: 0.8451 Train AUC: 0.9433 Val AUC: 0.9267 Time: 12.70\n",
      "Epoch: 67 Train Loss: 0.3004 Val Loss: 0.3451 Acc: 0.8496 Pre: 0.8791 Recall: 0.8276 F1: 0.8526 Train AUC: 0.9444 Val AUC: 0.9271 Time: 13.29\n",
      "Epoch: 68 Train Loss: 0.3119 Val Loss: 0.3491 Acc: 0.8460 Pre: 0.8782 Recall: 0.8207 F1: 0.8485 Train AUC: 0.9409 Val AUC: 0.9256 Time: 13.84\n",
      "Epoch: 69 Train Loss: 0.3112 Val Loss: 0.3533 Acc: 0.8460 Pre: 0.8810 Recall: 0.8172 F1: 0.8479 Train AUC: 0.9426 Val AUC: 0.9235 Time: 13.69\n",
      "Epoch: 70 Train Loss: 0.2972 Val Loss: 0.3590 Acc: 0.8442 Pre: 0.8778 Recall: 0.8172 F1: 0.8464 Train AUC: 0.9441 Val AUC: 0.9216 Time: 14.35\n",
      "Epoch: 71 Train Loss: 0.3115 Val Loss: 0.3601 Acc: 0.8333 Pre: 0.8561 Recall: 0.8207 F1: 0.8380 Train AUC: 0.9397 Val AUC: 0.9207 Time: 14.74\n",
      "Epoch: 72 Train Loss: 0.3043 Val Loss: 0.3577 Acc: 0.8297 Pre: 0.8500 Recall: 0.8207 F1: 0.8351 Train AUC: 0.9423 Val AUC: 0.9213 Time: 14.32\n",
      "Epoch: 73 Train Loss: 0.2903 Val Loss: 0.3551 Acc: 0.8279 Pre: 0.8421 Recall: 0.8276 F1: 0.8348 Train AUC: 0.9474 Val AUC: 0.9222 Time: 13.58\n",
      "Epoch: 74 Train Loss: 0.2985 Val Loss: 0.3561 Acc: 0.8333 Pre: 0.8561 Recall: 0.8207 F1: 0.8380 Train AUC: 0.9447 Val AUC: 0.9221 Time: 13.36\n",
      "Epoch: 75 Train Loss: 0.2880 Val Loss: 0.3599 Acc: 0.8424 Pre: 0.8801 Recall: 0.8103 F1: 0.8438 Train AUC: 0.9468 Val AUC: 0.9226 Time: 13.73\n",
      "Epoch: 76 Train Loss: 0.2901 Val Loss: 0.3582 Acc: 0.8406 Pre: 0.8769 Recall: 0.8103 F1: 0.8423 Train AUC: 0.9475 Val AUC: 0.9240 Time: 14.33\n",
      "Epoch: 77 Train Loss: 0.2843 Val Loss: 0.3534 Acc: 0.8424 Pre: 0.8773 Recall: 0.8138 F1: 0.8444 Train AUC: 0.9507 Val AUC: 0.9258 Time: 13.97\n",
      "Epoch: 78 Train Loss: 0.3074 Val Loss: 0.3495 Acc: 0.8388 Pre: 0.8628 Recall: 0.8241 F1: 0.8430 Train AUC: 0.9431 Val AUC: 0.9255 Time: 14.74\n",
      "Epoch: 79 Train Loss: 0.3038 Val Loss: 0.3496 Acc: 0.8333 Pre: 0.8462 Recall: 0.8345 F1: 0.8403 Train AUC: 0.9431 Val AUC: 0.9245 Time: 13.86\n",
      "Epoch: 80 Train Loss: 0.2970 Val Loss: 0.3584 Acc: 0.8460 Pre: 0.8782 Recall: 0.8207 F1: 0.8485 Train AUC: 0.9466 Val AUC: 0.9225 Time: 13.24\n",
      "Epoch: 81 Train Loss: 0.2917 Val Loss: 0.3699 Acc: 0.8478 Pre: 0.9087 Recall: 0.7897 F1: 0.8450 Train AUC: 0.9467 Val AUC: 0.9219 Time: 13.34\n",
      "Epoch: 82 Train Loss: 0.2958 Val Loss: 0.3587 Acc: 0.8569 Pre: 0.9042 Recall: 0.8138 F1: 0.8566 Train AUC: 0.9466 Val AUC: 0.9242 Time: 13.71\n",
      "Epoch: 83 Train Loss: 0.2953 Val Loss: 0.3443 Acc: 0.8424 Pre: 0.8691 Recall: 0.8241 F1: 0.8460 Train AUC: 0.9474 Val AUC: 0.9292 Time: 14.40\n",
      "Epoch: 84 Train Loss: 0.2864 Val Loss: 0.3473 Acc: 0.8478 Pre: 0.8705 Recall: 0.8345 F1: 0.8521 Train AUC: 0.9496 Val AUC: 0.9292 Time: 15.05\n",
      "Epoch: 85 Train Loss: 0.2910 Val Loss: 0.3547 Acc: 0.8478 Pre: 0.8872 Recall: 0.8138 F1: 0.8489 Train AUC: 0.9498 Val AUC: 0.9295 Time: 13.81\n",
      "Epoch: 86 Train Loss: 0.2981 Val Loss: 0.3472 Acc: 0.8569 Pre: 0.8951 Recall: 0.8241 F1: 0.8582 Train AUC: 0.9487 Val AUC: 0.9290 Time: 13.65\n",
      "Epoch: 87 Train Loss: 0.2955 Val Loss: 0.3544 Acc: 0.8442 Pre: 0.8669 Recall: 0.8310 F1: 0.8486 Train AUC: 0.9475 Val AUC: 0.9233 Time: 12.92\n",
      "Epoch: 88 Train Loss: 0.2948 Val Loss: 0.3657 Acc: 0.8315 Pre: 0.8505 Recall: 0.8241 F1: 0.8371 Train AUC: 0.9469 Val AUC: 0.9203 Time: 13.49\n",
      "Epoch: 89 Train Loss: 0.2958 Val Loss: 0.3722 Acc: 0.8370 Pre: 0.8704 Recall: 0.8103 F1: 0.8393 Train AUC: 0.9456 Val AUC: 0.9192 Time: 14.17\n",
      "Epoch: 90 Train Loss: 0.2863 Val Loss: 0.3652 Acc: 0.8424 Pre: 0.8801 Recall: 0.8103 F1: 0.8438 Train AUC: 0.9496 Val AUC: 0.9206 Time: 14.75\n",
      "Epoch: 91 Train Loss: 0.2952 Val Loss: 0.3582 Acc: 0.8388 Pre: 0.8736 Recall: 0.8103 F1: 0.8408 Train AUC: 0.9450 Val AUC: 0.9225 Time: 14.31\n",
      "Epoch: 92 Train Loss: 0.2875 Val Loss: 0.3538 Acc: 0.8315 Pre: 0.8505 Recall: 0.8241 F1: 0.8371 Train AUC: 0.9501 Val AUC: 0.9243 Time: 13.40\n",
      "Epoch: 93 Train Loss: 0.2768 Val Loss: 0.3554 Acc: 0.8315 Pre: 0.8635 Recall: 0.8069 F1: 0.8342 Train AUC: 0.9538 Val AUC: 0.9251 Time: 13.30\n",
      "Epoch: 94 Train Loss: 0.2804 Val Loss: 0.3632 Acc: 0.8478 Pre: 0.8962 Recall: 0.8034 F1: 0.8473 Train AUC: 0.9529 Val AUC: 0.9247 Time: 14.01\n",
      "Epoch: 95 Train Loss: 0.2874 Val Loss: 0.3614 Acc: 0.8478 Pre: 0.8962 Recall: 0.8034 F1: 0.8473 Train AUC: 0.9506 Val AUC: 0.9233 Time: 14.28\n",
      "Epoch: 96 Train Loss: 0.2855 Val Loss: 0.3587 Acc: 0.8388 Pre: 0.8526 Recall: 0.8379 F1: 0.8452 Train AUC: 0.9521 Val AUC: 0.9221 Time: 13.68\n",
      "Epoch: 97 Train Loss: 0.2941 Val Loss: 0.3597 Acc: 0.8351 Pre: 0.8491 Recall: 0.8345 F1: 0.8417 Train AUC: 0.9475 Val AUC: 0.9220 Time: 13.58\n",
      "Epoch: 98 Train Loss: 0.2919 Val Loss: 0.3605 Acc: 0.8460 Pre: 0.8897 Recall: 0.8069 F1: 0.8463 Train AUC: 0.9485 Val AUC: 0.9236 Time: 13.71\n",
      "Epoch: 99 Train Loss: 0.2761 Val Loss: 0.3559 Acc: 0.8496 Pre: 0.8966 Recall: 0.8069 F1: 0.8494 Train AUC: 0.9527 Val AUC: 0.9263 Time: 14.13\n",
      "Epoch: 100 Train Loss: 0.2732 Val Loss: 0.3462 Acc: 0.8388 Pre: 0.8681 Recall: 0.8172 F1: 0.8419 Train AUC: 0.9570 Val AUC: 0.9294 Time: 13.96\n",
      "Epoch: 101 Train Loss: 0.2756 Val Loss: 0.3416 Acc: 0.8388 Pre: 0.8577 Recall: 0.8310 F1: 0.8441 Train AUC: 0.9562 Val AUC: 0.9303 Time: 14.39\n",
      "Epoch: 102 Train Loss: 0.2688 Val Loss: 0.3430 Acc: 0.8424 Pre: 0.8691 Recall: 0.8241 F1: 0.8460 Train AUC: 0.9559 Val AUC: 0.9299 Time: 14.14\n",
      "Epoch: 103 Train Loss: 0.2717 Val Loss: 0.3549 Acc: 0.8533 Pre: 0.9130 Recall: 0.7966 F1: 0.8508 Train AUC: 0.9545 Val AUC: 0.9288 Time: 13.84\n",
      "Epoch: 104 Train Loss: 0.2797 Val Loss: 0.3491 Acc: 0.8533 Pre: 0.8943 Recall: 0.8172 F1: 0.8541 Train AUC: 0.9537 Val AUC: 0.9283 Time: 13.17\n",
      "Epoch: 105 Train Loss: 0.2775 Val Loss: 0.3432 Acc: 0.8514 Pre: 0.8796 Recall: 0.8310 F1: 0.8546 Train AUC: 0.9521 Val AUC: 0.9288 Time: 13.26\n",
      "Epoch: 106 Train Loss: 0.2763 Val Loss: 0.3464 Acc: 0.8496 Pre: 0.8848 Recall: 0.8207 F1: 0.8515 Train AUC: 0.9534 Val AUC: 0.9280 Time: 13.93\n",
      "Epoch: 107 Train Loss: 0.2664 Val Loss: 0.3468 Acc: 0.8533 Pre: 0.8943 Recall: 0.8172 F1: 0.8541 Train AUC: 0.9559 Val AUC: 0.9282 Time: 13.84\n",
      "Epoch: 108 Train Loss: 0.2705 Val Loss: 0.3508 Acc: 0.8587 Pre: 0.9046 Recall: 0.8172 F1: 0.8587 Train AUC: 0.9543 Val AUC: 0.9278 Time: 14.22\n",
      "Epoch: 109 Train Loss: 0.2722 Val Loss: 0.3464 Acc: 0.8569 Pre: 0.8981 Recall: 0.8207 F1: 0.8577 Train AUC: 0.9531 Val AUC: 0.9286 Time: 13.79\n",
      "Epoch: 110 Train Loss: 0.2749 Val Loss: 0.3444 Acc: 0.8442 Pre: 0.8696 Recall: 0.8276 F1: 0.8481 Train AUC: 0.9530 Val AUC: 0.9285 Time: 14.22\n",
      "Epoch: 111 Train Loss: 0.2737 Val Loss: 0.3477 Acc: 0.8388 Pre: 0.8577 Recall: 0.8310 F1: 0.8441 Train AUC: 0.9563 Val AUC: 0.9260 Time: 13.94\n",
      "Epoch: 112 Train Loss: 0.2695 Val Loss: 0.3639 Acc: 0.8424 Pre: 0.8919 Recall: 0.7966 F1: 0.8415 Train AUC: 0.9557 Val AUC: 0.9218 Time: 13.37\n",
      "Epoch: 113 Train Loss: 0.2716 Val Loss: 0.3664 Acc: 0.8388 Pre: 0.8851 Recall: 0.7966 F1: 0.8385 Train AUC: 0.9540 Val AUC: 0.9208 Time: 13.73\n",
      "Epoch: 114 Train Loss: 0.2703 Val Loss: 0.3559 Acc: 0.8406 Pre: 0.8659 Recall: 0.8241 F1: 0.8445 Train AUC: 0.9554 Val AUC: 0.9222 Time: 14.31\n",
      "Epoch: 115 Train Loss: 0.2688 Val Loss: 0.3504 Acc: 0.8388 Pre: 0.8551 Recall: 0.8345 F1: 0.8447 Train AUC: 0.9567 Val AUC: 0.9239 Time: 14.77\n",
      "Epoch: 116 Train Loss: 0.2636 Val Loss: 0.3547 Acc: 0.8514 Pre: 0.9000 Recall: 0.8069 F1: 0.8509 Train AUC: 0.9585 Val AUC: 0.9251 Time: 14.47\n",
      "Epoch: 117 Train Loss: 0.2607 Val Loss: 0.3602 Acc: 0.8533 Pre: 0.9163 Recall: 0.7931 F1: 0.8503 Train AUC: 0.9594 Val AUC: 0.9268 Time: 13.34\n",
      "Epoch: 118 Train Loss: 0.2650 Val Loss: 0.3511 Acc: 0.8569 Pre: 0.9137 Recall: 0.8034 F1: 0.8550 Train AUC: 0.9568 Val AUC: 0.9292 Time: 12.78\n",
      "Epoch: 119 Train Loss: 0.2688 Val Loss: 0.3344 Acc: 0.8424 Pre: 0.8664 Recall: 0.8276 F1: 0.8466 Train AUC: 0.9579 Val AUC: 0.9314 Time: 13.27\n",
      "Epoch: 120 Train Loss: 0.2744 Val Loss: 0.3339 Acc: 0.8424 Pre: 0.8561 Recall: 0.8414 F1: 0.8487 Train AUC: 0.9532 Val AUC: 0.9314 Time: 13.86\n",
      "Epoch: 121 Train Loss: 0.2671 Val Loss: 0.3420 Acc: 0.8442 Pre: 0.8723 Recall: 0.8241 F1: 0.8475 Train AUC: 0.9576 Val AUC: 0.9292 Time: 14.49\n",
      "Epoch: 122 Train Loss: 0.2620 Val Loss: 0.3613 Acc: 0.8533 Pre: 0.9163 Recall: 0.7931 F1: 0.8503 Train AUC: 0.9576 Val AUC: 0.9274 Time: 15.26\n",
      "Epoch: 123 Train Loss: 0.2652 Val Loss: 0.3637 Acc: 0.8533 Pre: 0.9163 Recall: 0.7931 F1: 0.8503 Train AUC: 0.9573 Val AUC: 0.9274 Time: 14.44\n",
      "Epoch: 124 Train Loss: 0.2615 Val Loss: 0.3427 Acc: 0.8424 Pre: 0.8801 Recall: 0.8103 F1: 0.8438 Train AUC: 0.9585 Val AUC: 0.9300 Time: 13.08\n",
      "Epoch: 125 Train Loss: 0.2675 Val Loss: 0.3362 Acc: 0.8478 Pre: 0.8552 Recall: 0.8552 F1: 0.8552 Train AUC: 0.9571 Val AUC: 0.9317 Time: 12.49\n",
      "Epoch: 126 Train Loss: 0.2643 Val Loss: 0.3382 Acc: 0.8496 Pre: 0.8581 Recall: 0.8552 F1: 0.8566 Train AUC: 0.9583 Val AUC: 0.9309 Time: 12.39\n",
      "Epoch: 127 Train Loss: 0.2635 Val Loss: 0.3580 Acc: 0.8424 Pre: 0.8773 Recall: 0.8138 F1: 0.8444 Train AUC: 0.9593 Val AUC: 0.9258 Time: 13.00\n",
      "Epoch: 128 Train Loss: 0.2510 Val Loss: 0.3823 Acc: 0.8496 Pre: 0.9124 Recall: 0.7897 F1: 0.8466 Train AUC: 0.9610 Val AUC: 0.9236 Time: 13.68\n",
      "Epoch: 129 Train Loss: 0.2524 Val Loss: 0.3629 Acc: 0.8460 Pre: 0.8782 Recall: 0.8207 F1: 0.8485 Train AUC: 0.9621 Val AUC: 0.9259 Time: 14.01\n",
      "Epoch: 130 Train Loss: 0.2606 Val Loss: 0.3416 Acc: 0.8496 Pre: 0.8632 Recall: 0.8483 F1: 0.8557 Train AUC: 0.9586 Val AUC: 0.9298 Time: 14.63\n",
      "Epoch: 131 Train Loss: 0.2674 Val Loss: 0.3351 Acc: 0.8496 Pre: 0.8683 Recall: 0.8414 F1: 0.8546 Train AUC: 0.9570 Val AUC: 0.9326 Time: 15.31\n",
      "Epoch: 132 Train Loss: 0.2586 Val Loss: 0.3412 Acc: 0.8514 Pre: 0.8939 Recall: 0.8138 F1: 0.8520 Train AUC: 0.9610 Val AUC: 0.9324 Time: 14.88\n",
      "Epoch: 133 Train Loss: 0.2789 Val Loss: 0.3445 Acc: 0.8514 Pre: 0.8881 Recall: 0.8207 F1: 0.8530 Train AUC: 0.9554 Val AUC: 0.9300 Time: 13.36\n",
      "Epoch: 134 Train Loss: 0.2514 Val Loss: 0.3538 Acc: 0.8406 Pre: 0.8659 Recall: 0.8241 F1: 0.8445 Train AUC: 0.9616 Val AUC: 0.9268 Time: 12.81\n",
      "Epoch: 135 Train Loss: 0.2565 Val Loss: 0.3625 Acc: 0.8424 Pre: 0.8718 Recall: 0.8207 F1: 0.8455 Train AUC: 0.9603 Val AUC: 0.9248 Time: 13.03\n",
      "Epoch: 136 Train Loss: 0.2509 Val Loss: 0.3566 Acc: 0.8424 Pre: 0.8691 Recall: 0.8241 F1: 0.8460 Train AUC: 0.9615 Val AUC: 0.9263 Time: 13.60\n",
      "Epoch: 137 Train Loss: 0.2458 Val Loss: 0.3477 Acc: 0.8514 Pre: 0.8852 Recall: 0.8241 F1: 0.8536 Train AUC: 0.9630 Val AUC: 0.9292 Time: 13.74\n",
      "Epoch: 138 Train Loss: 0.2460 Val Loss: 0.3435 Acc: 0.8514 Pre: 0.8969 Recall: 0.8103 F1: 0.8514 Train AUC: 0.9634 Val AUC: 0.9311 Time: 14.17\n",
      "Epoch: 139 Train Loss: 0.2572 Val Loss: 0.3429 Acc: 0.8569 Pre: 0.8951 Recall: 0.8241 F1: 0.8582 Train AUC: 0.9609 Val AUC: 0.9309 Time: 14.91\n",
      "Epoch: 140 Train Loss: 0.2450 Val Loss: 0.3427 Acc: 0.8514 Pre: 0.8824 Recall: 0.8276 F1: 0.8541 Train AUC: 0.9633 Val AUC: 0.9307 Time: 14.44\n",
      "Epoch: 141 Train Loss: 0.2430 Val Loss: 0.3458 Acc: 0.8514 Pre: 0.8852 Recall: 0.8241 F1: 0.8536 Train AUC: 0.9639 Val AUC: 0.9303 Time: 13.41\n",
      "Epoch: 142 Train Loss: 0.2453 Val Loss: 0.3510 Acc: 0.8514 Pre: 0.8881 Recall: 0.8207 F1: 0.8530 Train AUC: 0.9637 Val AUC: 0.9289 Time: 12.91\n",
      "Epoch: 143 Train Loss: 0.2475 Val Loss: 0.3456 Acc: 0.8514 Pre: 0.8796 Recall: 0.8310 F1: 0.8546 Train AUC: 0.9629 Val AUC: 0.9299 Time: 13.46\n",
      "Epoch: 144 Train Loss: 0.2475 Val Loss: 0.3385 Acc: 0.8569 Pre: 0.8809 Recall: 0.8414 F1: 0.8607 Train AUC: 0.9622 Val AUC: 0.9321 Time: 13.81\n",
      "Epoch: 145 Train Loss: 0.2426 Val Loss: 0.3410 Acc: 0.8514 Pre: 0.8824 Recall: 0.8276 F1: 0.8541 Train AUC: 0.9643 Val AUC: 0.9321 Time: 14.61\n",
      "Epoch: 146 Train Loss: 0.2462 Val Loss: 0.3425 Acc: 0.8478 Pre: 0.8843 Recall: 0.8172 F1: 0.8495 Train AUC: 0.9634 Val AUC: 0.9323 Time: 15.40\n",
      "Epoch: 147 Train Loss: 0.2507 Val Loss: 0.3373 Acc: 0.8478 Pre: 0.8652 Recall: 0.8414 F1: 0.8531 Train AUC: 0.9620 Val AUC: 0.9326 Time: 14.85\n",
      "Epoch: 148 Train Loss: 0.2428 Val Loss: 0.3403 Acc: 0.8460 Pre: 0.8727 Recall: 0.8276 F1: 0.8496 Train AUC: 0.9643 Val AUC: 0.9318 Time: 13.58\n",
      "Epoch: 149 Train Loss: 0.2479 Val Loss: 0.3576 Acc: 0.8460 Pre: 0.8988 Recall: 0.7966 F1: 0.8446 Train AUC: 0.9638 Val AUC: 0.9290 Time: 12.70\n",
      "Epoch: 150 Train Loss: 0.2477 Val Loss: 0.3430 Acc: 0.8496 Pre: 0.8819 Recall: 0.8241 F1: 0.8520 Train AUC: 0.9639 Val AUC: 0.9308 Time: 12.55\n",
      "Epoch: 151 Train Loss: 0.2393 Val Loss: 0.3290 Acc: 0.8478 Pre: 0.8527 Recall: 0.8586 F1: 0.8557 Train AUC: 0.9653 Val AUC: 0.9338 Time: 12.99\n",
      "Epoch: 152 Train Loss: 0.2401 Val Loss: 0.3319 Acc: 0.8587 Pre: 0.8985 Recall: 0.8241 F1: 0.8597 Train AUC: 0.9672 Val AUC: 0.9345 Time: 13.60\n",
      "Epoch: 153 Train Loss: 0.2397 Val Loss: 0.3425 Acc: 0.8551 Pre: 0.9167 Recall: 0.7966 F1: 0.8524 Train AUC: 0.9648 Val AUC: 0.9345 Time: 14.36\n",
      "Epoch: 154 Train Loss: 0.2461 Val Loss: 0.3232 Acc: 0.8569 Pre: 0.8836 Recall: 0.8379 F1: 0.8602 Train AUC: 0.9662 Val AUC: 0.9365 Time: 14.91\n",
      "Epoch: 155 Train Loss: 0.2396 Val Loss: 0.3258 Acc: 0.8514 Pre: 0.8662 Recall: 0.8483 F1: 0.8571 Train AUC: 0.9657 Val AUC: 0.9362 Time: 15.39\n",
      "Epoch: 156 Train Loss: 0.2451 Val Loss: 0.3469 Acc: 0.8587 Pre: 0.9173 Recall: 0.8034 F1: 0.8566 Train AUC: 0.9661 Val AUC: 0.9324 Time: 13.78\n",
      "Epoch: 157 Train Loss: 0.2416 Val Loss: 0.3724 Acc: 0.8514 Pre: 0.9333 Recall: 0.7724 F1: 0.8453 Train AUC: 0.9635 Val AUC: 0.9298 Time: 12.92\n",
      "Epoch: 158 Train Loss: 0.2398 Val Loss: 0.3462 Acc: 0.8460 Pre: 0.8897 Recall: 0.8069 F1: 0.8463 Train AUC: 0.9661 Val AUC: 0.9296 Time: 12.76\n",
      "Epoch: 159 Train Loss: 0.2391 Val Loss: 0.3323 Acc: 0.8442 Pre: 0.8517 Recall: 0.8517 F1: 0.8517 Train AUC: 0.9652 Val AUC: 0.9313 Time: 13.38\n",
      "Epoch: 160 Train Loss: 0.2345 Val Loss: 0.3302 Acc: 0.8514 Pre: 0.8741 Recall: 0.8379 F1: 0.8556 Train AUC: 0.9689 Val AUC: 0.9331 Time: 13.69\n",
      "Epoch: 161 Train Loss: 0.2481 Val Loss: 0.3489 Acc: 0.8587 Pre: 0.9344 Recall: 0.7862 F1: 0.8539 Train AUC: 0.9640 Val AUC: 0.9340 Time: 14.33\n",
      "Epoch: 162 Train Loss: 0.2384 Val Loss: 0.3453 Acc: 0.8605 Pre: 0.9209 Recall: 0.8034 F1: 0.8582 Train AUC: 0.9675 Val AUC: 0.9354 Time: 15.17\n",
      "Epoch: 163 Train Loss: 0.2469 Val Loss: 0.3345 Acc: 0.8551 Pre: 0.8832 Recall: 0.8345 F1: 0.8582 Train AUC: 0.9630 Val AUC: 0.9351 Time: 13.86\n",
      "Epoch: 164 Train Loss: 0.2274 Val Loss: 0.3424 Acc: 0.8533 Pre: 0.8719 Recall: 0.8448 F1: 0.8581 Train AUC: 0.9692 Val AUC: 0.9323 Time: 13.34\n",
      "Epoch: 165 Train Loss: 0.2371 Val Loss: 0.3617 Acc: 0.8442 Pre: 0.8984 Recall: 0.7931 F1: 0.8425 Train AUC: 0.9665 Val AUC: 0.9281 Time: 13.02\n",
      "Epoch: 166 Train Loss: 0.2361 Val Loss: 0.3683 Acc: 0.8496 Pre: 0.9124 Recall: 0.7897 F1: 0.8466 Train AUC: 0.9661 Val AUC: 0.9285 Time: 13.45\n",
      "Epoch: 167 Train Loss: 0.2350 Val Loss: 0.3450 Acc: 0.8605 Pre: 0.9080 Recall: 0.8172 F1: 0.8603 Train AUC: 0.9679 Val AUC: 0.9331 Time: 14.08\n",
      "Epoch: 168 Train Loss: 0.2357 Val Loss: 0.3283 Acc: 0.8496 Pre: 0.8485 Recall: 0.8690 F1: 0.8586 Train AUC: 0.9680 Val AUC: 0.9373 Time: 14.79\n",
      "Epoch: 169 Train Loss: 0.2383 Val Loss: 0.3327 Acc: 0.8659 Pre: 0.8971 Recall: 0.8414 F1: 0.8683 Train AUC: 0.9685 Val AUC: 0.9372 Time: 15.35\n",
      "Epoch: 170 Train Loss: 0.2422 Val Loss: 0.3535 Acc: 0.8551 Pre: 0.9268 Recall: 0.7862 F1: 0.8507 Train AUC: 0.9645 Val AUC: 0.9379 Time: 13.99\n",
      "Epoch: 171 Train Loss: 0.2420 Val Loss: 0.3318 Acc: 0.8641 Pre: 0.9151 Recall: 0.8172 F1: 0.8634 Train AUC: 0.9678 Val AUC: 0.9386 Time: 12.86\n",
      "Epoch: 172 Train Loss: 0.2212 Val Loss: 0.3217 Acc: 0.8478 Pre: 0.8503 Recall: 0.8621 F1: 0.8562 Train AUC: 0.9700 Val AUC: 0.9377 Time: 12.45\n",
      "Epoch: 173 Train Loss: 0.2283 Val Loss: 0.3239 Acc: 0.8533 Pre: 0.8591 Recall: 0.8621 F1: 0.8606 Train AUC: 0.9711 Val AUC: 0.9369 Time: 12.84\n",
      "Epoch: 174 Train Loss: 0.2297 Val Loss: 0.3474 Acc: 0.8623 Pre: 0.9280 Recall: 0.8000 F1: 0.8593 Train AUC: 0.9705 Val AUC: 0.9343 Time: 13.10\n",
      "Epoch: 175 Train Loss: 0.2319 Val Loss: 0.3654 Acc: 0.8551 Pre: 0.9339 Recall: 0.7793 F1: 0.8496 Train AUC: 0.9676 Val AUC: 0.9342 Time: 13.59\n",
      "Epoch: 176 Train Loss: 0.2293 Val Loss: 0.3433 Acc: 0.8659 Pre: 0.9219 Recall: 0.8138 F1: 0.8645 Train AUC: 0.9682 Val AUC: 0.9355 Time: 14.16\n",
      "Epoch: 177 Train Loss: 0.2267 Val Loss: 0.3214 Acc: 0.8623 Pre: 0.8821 Recall: 0.8517 F1: 0.8667 Train AUC: 0.9688 Val AUC: 0.9370 Time: 14.87\n",
      "Epoch: 178 Train Loss: 0.2199 Val Loss: 0.3155 Acc: 0.8641 Pre: 0.8826 Recall: 0.8552 F1: 0.8687 Train AUC: 0.9733 Val AUC: 0.9387 Time: 14.82\n",
      "Epoch: 179 Train Loss: 0.2366 Val Loss: 0.3349 Acc: 0.8605 Pre: 0.9243 Recall: 0.8000 F1: 0.8577 Train AUC: 0.9693 Val AUC: 0.9384 Time: 15.37\n",
      "Epoch: 180 Train Loss: 0.2304 Val Loss: 0.3501 Acc: 0.8587 Pre: 0.9344 Recall: 0.7862 F1: 0.8539 Train AUC: 0.9678 Val AUC: 0.9369 Time: 13.70\n",
      "Epoch: 181 Train Loss: 0.2345 Val Loss: 0.3412 Acc: 0.8696 Pre: 0.9192 Recall: 0.8241 F1: 0.8691 Train AUC: 0.9671 Val AUC: 0.9349 Time: 12.85\n",
      "Epoch: 182 Train Loss: 0.2270 Val Loss: 0.3357 Acc: 0.8478 Pre: 0.8552 Recall: 0.8552 F1: 0.8552 Train AUC: 0.9686 Val AUC: 0.9335 Time: 13.02\n",
      "Epoch: 183 Train Loss: 0.2279 Val Loss: 0.3389 Acc: 0.8496 Pre: 0.8632 Recall: 0.8483 F1: 0.8557 Train AUC: 0.9703 Val AUC: 0.9335 Time: 13.28\n",
      "Epoch: 184 Train Loss: 0.2201 Val Loss: 0.3445 Acc: 0.8605 Pre: 0.9019 Recall: 0.8241 F1: 0.8613 Train AUC: 0.9708 Val AUC: 0.9347 Time: 13.82\n",
      "Epoch: 185 Train Loss: 0.2185 Val Loss: 0.3476 Acc: 0.8605 Pre: 0.9144 Recall: 0.8103 F1: 0.8592 Train AUC: 0.9709 Val AUC: 0.9368 Time: 14.53\n",
      "Epoch: 186 Train Loss: 0.2151 Val Loss: 0.3312 Acc: 0.8605 Pre: 0.9080 Recall: 0.8172 F1: 0.8603 Train AUC: 0.9723 Val AUC: 0.9397 Time: 15.15\n",
      "Epoch: 187 Train Loss: 0.2153 Val Loss: 0.3225 Acc: 0.8623 Pre: 0.8905 Recall: 0.8414 F1: 0.8652 Train AUC: 0.9721 Val AUC: 0.9402 Time: 14.62\n",
      "Epoch: 188 Train Loss: 0.2257 Val Loss: 0.3255 Acc: 0.8478 Pre: 0.8552 Recall: 0.8552 F1: 0.8552 Train AUC: 0.9700 Val AUC: 0.9377 Time: 13.33\n",
      "Epoch: 189 Train Loss: 0.2181 Val Loss: 0.3419 Acc: 0.8587 Pre: 0.9046 Recall: 0.8172 F1: 0.8587 Train AUC: 0.9729 Val AUC: 0.9362 Time: 12.76\n",
      "Epoch: 190 Train Loss: 0.2198 Val Loss: 0.3400 Acc: 0.8551 Pre: 0.8860 Recall: 0.8310 F1: 0.8577 Train AUC: 0.9722 Val AUC: 0.9356 Time: 13.00\n",
      "Epoch: 191 Train Loss: 0.2077 Val Loss: 0.3376 Acc: 0.8623 Pre: 0.8905 Recall: 0.8414 F1: 0.8652 Train AUC: 0.9741 Val AUC: 0.9371 Time: 13.48\n",
      "Epoch: 192 Train Loss: 0.2131 Val Loss: 0.3252 Acc: 0.8605 Pre: 0.8763 Recall: 0.8552 F1: 0.8656 Train AUC: 0.9724 Val AUC: 0.9393 Time: 14.10\n",
      "Epoch: 193 Train Loss: 0.2082 Val Loss: 0.3230 Acc: 0.8678 Pre: 0.8917 Recall: 0.8517 F1: 0.8713 Train AUC: 0.9745 Val AUC: 0.9402 Time: 14.78\n",
      "Epoch: 194 Train Loss: 0.2176 Val Loss: 0.3301 Acc: 0.8623 Pre: 0.9180 Recall: 0.8103 F1: 0.8608 Train AUC: 0.9719 Val AUC: 0.9407 Time: 14.49\n",
      "Epoch: 195 Train Loss: 0.2231 Val Loss: 0.3229 Acc: 0.8605 Pre: 0.9049 Recall: 0.8207 F1: 0.8608 Train AUC: 0.9704 Val AUC: 0.9404 Time: 13.48\n",
      "Epoch: 196 Train Loss: 0.2075 Val Loss: 0.3193 Acc: 0.8587 Pre: 0.8841 Recall: 0.8414 F1: 0.8622 Train AUC: 0.9747 Val AUC: 0.9397 Time: 13.39\n",
      "Epoch: 197 Train Loss: 0.2138 Val Loss: 0.3312 Acc: 0.8641 Pre: 0.9057 Recall: 0.8276 F1: 0.8649 Train AUC: 0.9732 Val AUC: 0.9384 Time: 13.49\n",
      "Epoch: 198 Train Loss: 0.2158 Val Loss: 0.3480 Acc: 0.8587 Pre: 0.9141 Recall: 0.8069 F1: 0.8571 Train AUC: 0.9720 Val AUC: 0.9369 Time: 13.71\n",
      "Epoch: 199 Train Loss: 0.2164 Val Loss: 0.3527 Acc: 0.8569 Pre: 0.9170 Recall: 0.8000 F1: 0.8545 Train AUC: 0.9715 Val AUC: 0.9353 Time: 14.22\n",
      "Epoch: 200 Train Loss: 0.2146 Val Loss: 0.3310 Acc: 0.8659 Pre: 0.8971 Recall: 0.8414 F1: 0.8683 Train AUC: 0.9719 Val AUC: 0.9361 Time: 14.14\n",
      "Epoch: 201 Train Loss: 0.2122 Val Loss: 0.3189 Acc: 0.8569 Pre: 0.8625 Recall: 0.8655 F1: 0.8640 Train AUC: 0.9729 Val AUC: 0.9378 Time: 14.02\n",
      "Epoch: 202 Train Loss: 0.2103 Val Loss: 0.3147 Acc: 0.8569 Pre: 0.8981 Recall: 0.8207 F1: 0.8577 Train AUC: 0.9753 Val AUC: 0.9410 Time: 13.56\n",
      "Epoch: 203 Train Loss: 0.2102 Val Loss: 0.3268 Acc: 0.8587 Pre: 0.9141 Recall: 0.8069 F1: 0.8571 Train AUC: 0.9757 Val AUC: 0.9421 Time: 13.61\n",
      "Epoch: 204 Train Loss: 0.2102 Val Loss: 0.3306 Acc: 0.8569 Pre: 0.9105 Recall: 0.8069 F1: 0.8556 Train AUC: 0.9735 Val AUC: 0.9412 Time: 13.41\n",
      "Epoch: 205 Train Loss: 0.2087 Val Loss: 0.3293 Acc: 0.8732 Pre: 0.9015 Recall: 0.8517 F1: 0.8759 Train AUC: 0.9738 Val AUC: 0.9387 Time: 13.96\n",
      "Epoch: 206 Train Loss: 0.2011 Val Loss: 0.3386 Acc: 0.8605 Pre: 0.8737 Recall: 0.8586 F1: 0.8661 Train AUC: 0.9771 Val AUC: 0.9358 Time: 13.95\n",
      "Epoch: 207 Train Loss: 0.2110 Val Loss: 0.3435 Acc: 0.8696 Pre: 0.9007 Recall: 0.8448 F1: 0.8719 Train AUC: 0.9744 Val AUC: 0.9370 Time: 13.98\n",
      "Epoch: 208 Train Loss: 0.2032 Val Loss: 0.3461 Acc: 0.8659 Pre: 0.9219 Recall: 0.8138 F1: 0.8645 Train AUC: 0.9748 Val AUC: 0.9385 Time: 14.51\n",
      "Epoch: 209 Train Loss: 0.2022 Val Loss: 0.3212 Acc: 0.8768 Pre: 0.9081 Recall: 0.8517 F1: 0.8790 Train AUC: 0.9762 Val AUC: 0.9418 Time: 14.04\n",
      "Epoch: 210 Train Loss: 0.2073 Val Loss: 0.3081 Acc: 0.8804 Pre: 0.9118 Recall: 0.8552 F1: 0.8826 Train AUC: 0.9753 Val AUC: 0.9454 Time: 13.36\n",
      "Epoch: 211 Train Loss: 0.2032 Val Loss: 0.3160 Acc: 0.8750 Pre: 0.9139 Recall: 0.8414 F1: 0.8761 Train AUC: 0.9763 Val AUC: 0.9462 Time: 12.89\n",
      "Epoch: 212 Train Loss: 0.2099 Val Loss: 0.3243 Acc: 0.8732 Pre: 0.9135 Recall: 0.8379 F1: 0.8741 Train AUC: 0.9741 Val AUC: 0.9426 Time: 13.16\n",
      "Epoch: 213 Train Loss: 0.2040 Val Loss: 0.3352 Acc: 0.8659 Pre: 0.8857 Recall: 0.8552 F1: 0.8702 Train AUC: 0.9745 Val AUC: 0.9375 Time: 13.58\n",
      "Epoch: 214 Train Loss: 0.1980 Val Loss: 0.3489 Acc: 0.8659 Pre: 0.9060 Recall: 0.8310 F1: 0.8669 Train AUC: 0.9767 Val AUC: 0.9353 Time: 14.25\n",
      "Epoch: 215 Train Loss: 0.2053 Val Loss: 0.3484 Acc: 0.8641 Pre: 0.9183 Recall: 0.8138 F1: 0.8629 Train AUC: 0.9740 Val AUC: 0.9376 Time: 15.14\n",
      "Epoch: 216 Train Loss: 0.2084 Val Loss: 0.3210 Acc: 0.8659 Pre: 0.9122 Recall: 0.8241 F1: 0.8659 Train AUC: 0.9735 Val AUC: 0.9430 Time: 14.33\n",
      "Epoch: 217 Train Loss: 0.2038 Val Loss: 0.3091 Acc: 0.8732 Pre: 0.8986 Recall: 0.8552 F1: 0.8763 Train AUC: 0.9759 Val AUC: 0.9452 Time: 13.76\n",
      "Epoch: 218 Train Loss: 0.2019 Val Loss: 0.3109 Acc: 0.8732 Pre: 0.8986 Recall: 0.8552 F1: 0.8763 Train AUC: 0.9764 Val AUC: 0.9450 Time: 13.54\n",
      "Epoch: 219 Train Loss: 0.2022 Val Loss: 0.3346 Acc: 0.8641 Pre: 0.8996 Recall: 0.8345 F1: 0.8658 Train AUC: 0.9766 Val AUC: 0.9410 Time: 13.98\n",
      "Epoch: 220 Train Loss: 0.2004 Val Loss: 0.3544 Acc: 0.8569 Pre: 0.9073 Recall: 0.8103 F1: 0.8561 Train AUC: 0.9758 Val AUC: 0.9375 Time: 14.43\n",
      "Epoch: 221 Train Loss: 0.1982 Val Loss: 0.3518 Acc: 0.8605 Pre: 0.9049 Recall: 0.8207 F1: 0.8608 Train AUC: 0.9763 Val AUC: 0.9347 Time: 14.54\n",
      "Epoch: 222 Train Loss: 0.1980 Val Loss: 0.3308 Acc: 0.8514 Pre: 0.8586 Recall: 0.8586 F1: 0.8586 Train AUC: 0.9760 Val AUC: 0.9362 Time: 13.46\n",
      "Epoch: 223 Train Loss: 0.2013 Val Loss: 0.3140 Acc: 0.8623 Pre: 0.8905 Recall: 0.8414 F1: 0.8652 Train AUC: 0.9767 Val AUC: 0.9434 Time: 13.02\n",
      "Epoch: 224 Train Loss: 0.2018 Val Loss: 0.3238 Acc: 0.8641 Pre: 0.8996 Recall: 0.8345 F1: 0.8658 Train AUC: 0.9778 Val AUC: 0.9440 Time: 12.63\n",
      "Epoch: 225 Train Loss: 0.2060 Val Loss: 0.3333 Acc: 0.8678 Pre: 0.9004 Recall: 0.8414 F1: 0.8699 Train AUC: 0.9743 Val AUC: 0.9416 Time: 13.25\n",
      "Epoch: 226 Train Loss: 0.2044 Val Loss: 0.3368 Acc: 0.8768 Pre: 0.9051 Recall: 0.8552 F1: 0.8794 Train AUC: 0.9744 Val AUC: 0.9384 Time: 13.55\n",
      "Epoch: 227 Train Loss: 0.2013 Val Loss: 0.3439 Acc: 0.8641 Pre: 0.8996 Recall: 0.8345 F1: 0.8658 Train AUC: 0.9757 Val AUC: 0.9347 Time: 14.30\n",
      "Epoch: 228 Train Loss: 0.1941 Val Loss: 0.3504 Acc: 0.8696 Pre: 0.9291 Recall: 0.8138 F1: 0.8676 Train AUC: 0.9779 Val AUC: 0.9360 Time: 14.93\n",
      "Epoch: 229 Train Loss: 0.2047 Val Loss: 0.3175 Acc: 0.8732 Pre: 0.9167 Recall: 0.8345 F1: 0.8736 Train AUC: 0.9769 Val AUC: 0.9424 Time: 15.20\n",
      "Epoch: 230 Train Loss: 0.1933 Val Loss: 0.3025 Acc: 0.8750 Pre: 0.9018 Recall: 0.8552 F1: 0.8779 Train AUC: 0.9790 Val AUC: 0.9466 Time: 13.68\n",
      "Epoch: 231 Train Loss: 0.2002 Val Loss: 0.3178 Acc: 0.8659 Pre: 0.8971 Recall: 0.8414 F1: 0.8683 Train AUC: 0.9770 Val AUC: 0.9452 Time: 13.17\n",
      "Epoch: 232 Train Loss: 0.2020 Val Loss: 0.3428 Acc: 0.8659 Pre: 0.9091 Recall: 0.8276 F1: 0.8664 Train AUC: 0.9750 Val AUC: 0.9409 Time: 12.80\n",
      "Epoch: 233 Train Loss: 0.2038 Val Loss: 0.3432 Acc: 0.8804 Pre: 0.9148 Recall: 0.8517 F1: 0.8821 Train AUC: 0.9745 Val AUC: 0.9374 Time: 13.54\n",
      "Epoch: 234 Train Loss: 0.1909 Val Loss: 0.3405 Acc: 0.8623 Pre: 0.8821 Recall: 0.8517 F1: 0.8667 Train AUC: 0.9783 Val AUC: 0.9352 Time: 13.85\n",
      "Epoch: 235 Train Loss: 0.1918 Val Loss: 0.3418 Acc: 0.8696 Pre: 0.9129 Recall: 0.8310 F1: 0.8700 Train AUC: 0.9791 Val AUC: 0.9385 Time: 14.69\n",
      "Epoch: 236 Train Loss: 0.1939 Val Loss: 0.3411 Acc: 0.8659 Pre: 0.9252 Recall: 0.8103 F1: 0.8640 Train AUC: 0.9782 Val AUC: 0.9428 Time: 15.15\n",
      "Epoch: 237 Train Loss: 0.1956 Val Loss: 0.3078 Acc: 0.8750 Pre: 0.8961 Recall: 0.8621 F1: 0.8787 Train AUC: 0.9791 Val AUC: 0.9466 Time: 13.65\n",
      "Epoch: 238 Train Loss: 0.1906 Val Loss: 0.3064 Acc: 0.8696 Pre: 0.8865 Recall: 0.8621 F1: 0.8741 Train AUC: 0.9789 Val AUC: 0.9469 Time: 12.75\n",
      "Epoch: 239 Train Loss: 0.2037 Val Loss: 0.3290 Acc: 0.8659 Pre: 0.9219 Recall: 0.8138 F1: 0.8645 Train AUC: 0.9768 Val AUC: 0.9447 Time: 12.62\n",
      "Epoch: 240 Train Loss: 0.1958 Val Loss: 0.3322 Acc: 0.8659 Pre: 0.9091 Recall: 0.8276 F1: 0.8664 Train AUC: 0.9775 Val AUC: 0.9389 Time: 12.96\n",
      "Epoch: 241 Train Loss: 0.1908 Val Loss: 0.3343 Acc: 0.8605 Pre: 0.8763 Recall: 0.8552 F1: 0.8656 Train AUC: 0.9782 Val AUC: 0.9347 Time: 13.53\n",
      "Epoch: 242 Train Loss: 0.1989 Val Loss: 0.3405 Acc: 0.8659 Pre: 0.9154 Recall: 0.8207 F1: 0.8655 Train AUC: 0.9786 Val AUC: 0.9398 Time: 14.10\n",
      "Epoch: 243 Train Loss: 0.1898 Val Loss: 0.3413 Acc: 0.8696 Pre: 0.9258 Recall: 0.8172 F1: 0.8681 Train AUC: 0.9791 Val AUC: 0.9434 Time: 14.78\n",
      "Epoch: 244 Train Loss: 0.1992 Val Loss: 0.3109 Acc: 0.8696 Pre: 0.8978 Recall: 0.8483 F1: 0.8723 Train AUC: 0.9762 Val AUC: 0.9471 Time: 14.78\n",
      "Epoch: 245 Train Loss: 0.1886 Val Loss: 0.2994 Acc: 0.8750 Pre: 0.8850 Recall: 0.8759 F1: 0.8804 Train AUC: 0.9789 Val AUC: 0.9486 Time: 13.57\n",
      "Epoch: 246 Train Loss: 0.2013 Val Loss: 0.3141 Acc: 0.8786 Pre: 0.9240 Recall: 0.8379 F1: 0.8788 Train AUC: 0.9783 Val AUC: 0.9473 Time: 13.39\n",
      "Epoch: 247 Train Loss: 0.1963 Val Loss: 0.3343 Acc: 0.8732 Pre: 0.9167 Recall: 0.8345 F1: 0.8736 Train AUC: 0.9783 Val AUC: 0.9421 Time: 13.17\n",
      "Epoch: 248 Train Loss: 0.1846 Val Loss: 0.3546 Acc: 0.8714 Pre: 0.8925 Recall: 0.8586 F1: 0.8752 Train AUC: 0.9807 Val AUC: 0.9347 Time: 13.89\n",
      "Epoch: 249 Train Loss: 0.1922 Val Loss: 0.3588 Acc: 0.8696 Pre: 0.8893 Recall: 0.8586 F1: 0.8737 Train AUC: 0.9777 Val AUC: 0.9325 Time: 14.37\n",
      "Epoch: 250 Train Loss: 0.2028 Val Loss: 0.3452 Acc: 0.8750 Pre: 0.9234 Recall: 0.8310 F1: 0.8748 Train AUC: 0.9754 Val AUC: 0.9365 Time: 14.27\n",
      "Epoch: 251 Train Loss: 0.1943 Val Loss: 0.3209 Acc: 0.8768 Pre: 0.9336 Recall: 0.8241 F1: 0.8755 Train AUC: 0.9768 Val AUC: 0.9436 Time: 14.24\n",
      "Epoch: 252 Train Loss: 0.1824 Val Loss: 0.3014 Acc: 0.8623 Pre: 0.8993 Recall: 0.8310 F1: 0.8638 Train AUC: 0.9821 Val AUC: 0.9463 Time: 13.98\n",
      "Epoch: 253 Train Loss: 0.2097 Val Loss: 0.3052 Acc: 0.8732 Pre: 0.9044 Recall: 0.8483 F1: 0.8754 Train AUC: 0.9779 Val AUC: 0.9477 Time: 13.00\n",
      "Epoch: 254 Train Loss: 0.1901 Val Loss: 0.3370 Acc: 0.8678 Pre: 0.8974 Recall: 0.8448 F1: 0.8703 Train AUC: 0.9794 Val AUC: 0.9427 Time: 12.92\n",
      "Epoch: 255 Train Loss: 0.1928 Val Loss: 0.3585 Acc: 0.8678 Pre: 0.8917 Recall: 0.8517 F1: 0.8713 Train AUC: 0.9772 Val AUC: 0.9390 Time: 13.46\n",
      "Epoch: 256 Train Loss: 0.1881 Val Loss: 0.3668 Acc: 0.8786 Pre: 0.9145 Recall: 0.8483 F1: 0.8801 Train AUC: 0.9788 Val AUC: 0.9382 Time: 14.14\n",
      "Epoch: 257 Train Loss: 0.1929 Val Loss: 0.3507 Acc: 0.8822 Pre: 0.9245 Recall: 0.8448 F1: 0.8829 Train AUC: 0.9769 Val AUC: 0.9390 Time: 14.66\n",
      "Epoch: 258 Train Loss: 0.1840 Val Loss: 0.3256 Acc: 0.8714 Pre: 0.8925 Recall: 0.8586 F1: 0.8752 Train AUC: 0.9792 Val AUC: 0.9399 Time: 14.99\n",
      "Epoch: 259 Train Loss: 0.1999 Val Loss: 0.3110 Acc: 0.8696 Pre: 0.8978 Recall: 0.8483 F1: 0.8723 Train AUC: 0.9779 Val AUC: 0.9457 Time: 13.47\n",
      "Epoch: 260 Train Loss: 0.1845 Val Loss: 0.3105 Acc: 0.8732 Pre: 0.9104 Recall: 0.8414 F1: 0.8746 Train AUC: 0.9807 Val AUC: 0.9482 Time: 12.89\n",
      "Epoch: 261 Train Loss: 0.1841 Val Loss: 0.3084 Acc: 0.8714 Pre: 0.9041 Recall: 0.8448 F1: 0.8734 Train AUC: 0.9797 Val AUC: 0.9472 Time: 13.15\n",
      "Epoch: 262 Train Loss: 0.1823 Val Loss: 0.3224 Acc: 0.8678 Pre: 0.9033 Recall: 0.8379 F1: 0.8694 Train AUC: 0.9812 Val AUC: 0.9442 Time: 13.71\n",
      "Epoch: 263 Train Loss: 0.1846 Val Loss: 0.3265 Acc: 0.8714 Pre: 0.8925 Recall: 0.8586 F1: 0.8752 Train AUC: 0.9798 Val AUC: 0.9413 Time: 14.32\n",
      "Epoch: 264 Train Loss: 0.1900 Val Loss: 0.3480 Acc: 0.8605 Pre: 0.9112 Recall: 0.8138 F1: 0.8597 Train AUC: 0.9797 Val AUC: 0.9381 Time: 15.02\n",
      "Epoch: 265 Train Loss: 0.1803 Val Loss: 0.3503 Acc: 0.8659 Pre: 0.9154 Recall: 0.8207 F1: 0.8655 Train AUC: 0.9811 Val AUC: 0.9375 Time: 14.21\n",
      "Epoch: 266 Train Loss: 0.1775 Val Loss: 0.3273 Acc: 0.8732 Pre: 0.8929 Recall: 0.8621 F1: 0.8772 Train AUC: 0.9814 Val AUC: 0.9412 Time: 13.01\n",
      "Epoch: 267 Train Loss: 0.1878 Val Loss: 0.3227 Acc: 0.8822 Pre: 0.9121 Recall: 0.8586 F1: 0.8845 Train AUC: 0.9799 Val AUC: 0.9467 Time: 12.91\n",
      "Epoch: 268 Train Loss: 0.1770 Val Loss: 0.3235 Acc: 0.8732 Pre: 0.9135 Recall: 0.8379 F1: 0.8741 Train AUC: 0.9815 Val AUC: 0.9491 Time: 13.36\n",
      "Epoch: 269 Train Loss: 0.1806 Val Loss: 0.3169 Acc: 0.8732 Pre: 0.8986 Recall: 0.8552 F1: 0.8763 Train AUC: 0.9801 Val AUC: 0.9490 Time: 14.02\n",
      "Epoch: 270 Train Loss: 0.1788 Val Loss: 0.3141 Acc: 0.8659 Pre: 0.8803 Recall: 0.8621 F1: 0.8711 Train AUC: 0.9808 Val AUC: 0.9456 Time: 14.56\n",
      "Epoch: 271 Train Loss: 0.1790 Val Loss: 0.3233 Acc: 0.8786 Pre: 0.9176 Recall: 0.8448 F1: 0.8797 Train AUC: 0.9816 Val AUC: 0.9431 Time: 14.97\n",
      "Epoch: 272 Train Loss: 0.1757 Val Loss: 0.3429 Acc: 0.8732 Pre: 0.9297 Recall: 0.8207 F1: 0.8718 Train AUC: 0.9818 Val AUC: 0.9411 Time: 13.83\n",
      "Epoch: 273 Train Loss: 0.1727 Val Loss: 0.3343 Acc: 0.8750 Pre: 0.9266 Recall: 0.8276 F1: 0.8743 Train AUC: 0.9838 Val AUC: 0.9413 Time: 13.33\n",
      "Epoch: 274 Train Loss: 0.1840 Val Loss: 0.3113 Acc: 0.8804 Pre: 0.8916 Recall: 0.8793 F1: 0.8854 Train AUC: 0.9821 Val AUC: 0.9426 Time: 14.04\n",
      "Epoch: 275 Train Loss: 0.1801 Val Loss: 0.3127 Acc: 0.8786 Pre: 0.8885 Recall: 0.8793 F1: 0.8839 Train AUC: 0.9830 Val AUC: 0.9444 Time: 14.58\n",
      "Epoch: 276 Train Loss: 0.1876 Val Loss: 0.3297 Acc: 0.8641 Pre: 0.9087 Recall: 0.8241 F1: 0.8644 Train AUC: 0.9829 Val AUC: 0.9467 Time: 14.66\n",
      "Epoch: 277 Train Loss: 0.1790 Val Loss: 0.3349 Acc: 0.8732 Pre: 0.9400 Recall: 0.8103 F1: 0.8704 Train AUC: 0.9809 Val AUC: 0.9492 Time: 13.26\n",
      "Epoch: 278 Train Loss: 0.1901 Val Loss: 0.3003 Acc: 0.8768 Pre: 0.8936 Recall: 0.8690 F1: 0.8811 Train AUC: 0.9808 Val AUC: 0.9474 Time: 12.61\n",
      "Epoch: 279 Train Loss: 0.1737 Val Loss: 0.3099 Acc: 0.8804 Pre: 0.8889 Recall: 0.8828 F1: 0.8858 Train AUC: 0.9838 Val AUC: 0.9435 Time: 12.92\n",
      "Epoch: 280 Train Loss: 0.1823 Val Loss: 0.3450 Acc: 0.8822 Pre: 0.9245 Recall: 0.8448 F1: 0.8829 Train AUC: 0.9825 Val AUC: 0.9414 Time: 13.41\n",
      "Epoch: 281 Train Loss: 0.1800 Val Loss: 0.3627 Acc: 0.8786 Pre: 0.9272 Recall: 0.8345 F1: 0.8784 Train AUC: 0.9804 Val AUC: 0.9392 Time: 14.22\n",
      "Epoch: 282 Train Loss: 0.1721 Val Loss: 0.3365 Acc: 0.8841 Pre: 0.9185 Recall: 0.8552 F1: 0.8857 Train AUC: 0.9819 Val AUC: 0.9413 Time: 14.90\n",
      "Epoch: 283 Train Loss: 0.1665 Val Loss: 0.3043 Acc: 0.8822 Pre: 0.8947 Recall: 0.8793 F1: 0.8870 Train AUC: 0.9838 Val AUC: 0.9456 Time: 15.36\n",
      "Epoch: 284 Train Loss: 0.1771 Val Loss: 0.2958 Acc: 0.8768 Pre: 0.9081 Recall: 0.8517 F1: 0.8790 Train AUC: 0.9841 Val AUC: 0.9502 Time: 13.99\n",
      "Epoch: 285 Train Loss: 0.1758 Val Loss: 0.3045 Acc: 0.8804 Pre: 0.9242 Recall: 0.8414 F1: 0.8809 Train AUC: 0.9831 Val AUC: 0.9510 Time: 12.87\n",
      "Epoch: 286 Train Loss: 0.1716 Val Loss: 0.3083 Acc: 0.8822 Pre: 0.9121 Recall: 0.8586 F1: 0.8845 Train AUC: 0.9830 Val AUC: 0.9501 Time: 12.12\n",
      "Epoch: 287 Train Loss: 0.1756 Val Loss: 0.3137 Acc: 0.8786 Pre: 0.8996 Recall: 0.8655 F1: 0.8822 Train AUC: 0.9815 Val AUC: 0.9468 Time: 12.67\n",
      "Epoch: 288 Train Loss: 0.1604 Val Loss: 0.3284 Acc: 0.8786 Pre: 0.9055 Recall: 0.8586 F1: 0.8814 Train AUC: 0.9855 Val AUC: 0.9429 Time: 13.08\n",
      "Epoch: 289 Train Loss: 0.1659 Val Loss: 0.3436 Acc: 0.8659 Pre: 0.9060 Recall: 0.8310 F1: 0.8669 Train AUC: 0.9838 Val AUC: 0.9401 Time: 13.53\n",
      "Epoch: 290 Train Loss: 0.1748 Val Loss: 0.3458 Acc: 0.8659 Pre: 0.9091 Recall: 0.8276 F1: 0.8664 Train AUC: 0.9822 Val AUC: 0.9408 Time: 14.22\n",
      "Epoch: 291 Train Loss: 0.1778 Val Loss: 0.3228 Acc: 0.8804 Pre: 0.9088 Recall: 0.8586 F1: 0.8830 Train AUC: 0.9809 Val AUC: 0.9439 Time: 14.81\n",
      "Epoch: 292 Train Loss: 0.1667 Val Loss: 0.3179 Acc: 0.8841 Pre: 0.9248 Recall: 0.8483 F1: 0.8849 Train AUC: 0.9840 Val AUC: 0.9475 Time: 15.84\n",
      "Epoch: 293 Train Loss: 0.1643 Val Loss: 0.3192 Acc: 0.8841 Pre: 0.9346 Recall: 0.8379 F1: 0.8836 Train AUC: 0.9844 Val AUC: 0.9490 Time: 14.57\n",
      "Epoch: 294 Train Loss: 0.1689 Val Loss: 0.3011 Acc: 0.8786 Pre: 0.8968 Recall: 0.8690 F1: 0.8827 Train AUC: 0.9845 Val AUC: 0.9492 Time: 13.18\n",
      "Epoch: 295 Train Loss: 0.1718 Val Loss: 0.3150 Acc: 0.8841 Pre: 0.9124 Recall: 0.8621 F1: 0.8865 Train AUC: 0.9839 Val AUC: 0.9464 Time: 12.32\n",
      "Epoch: 296 Train Loss: 0.1597 Val Loss: 0.3374 Acc: 0.8750 Pre: 0.9234 Recall: 0.8310 F1: 0.8748 Train AUC: 0.9851 Val AUC: 0.9441 Time: 12.78\n",
      "Epoch: 297 Train Loss: 0.1682 Val Loss: 0.3387 Acc: 0.8750 Pre: 0.9234 Recall: 0.8310 F1: 0.8748 Train AUC: 0.9836 Val AUC: 0.9426 Time: 13.28\n",
      "Epoch: 298 Train Loss: 0.1617 Val Loss: 0.3267 Acc: 0.8841 Pre: 0.9094 Recall: 0.8655 F1: 0.8869 Train AUC: 0.9851 Val AUC: 0.9430 Time: 13.64\n",
      "Epoch: 299 Train Loss: 0.1705 Val Loss: 0.3152 Acc: 0.8841 Pre: 0.9065 Recall: 0.8690 F1: 0.8873 Train AUC: 0.9831 Val AUC: 0.9461 Time: 14.39\n",
      "Epoch: 300 Train Loss: 0.1683 Val Loss: 0.3139 Acc: 0.8841 Pre: 0.9346 Recall: 0.8379 F1: 0.8836 Train AUC: 0.9842 Val AUC: 0.9507 Time: 14.74\n",
      "Epoch: 301 Train Loss: 0.1660 Val Loss: 0.2999 Acc: 0.8877 Pre: 0.9385 Recall: 0.8414 F1: 0.8873 Train AUC: 0.9837 Val AUC: 0.9522 Time: 14.66\n",
      "Epoch: 302 Train Loss: 0.1681 Val Loss: 0.2923 Acc: 0.8841 Pre: 0.9124 Recall: 0.8621 F1: 0.8865 Train AUC: 0.9846 Val AUC: 0.9507 Time: 13.93\n",
      "Epoch: 303 Train Loss: 0.1735 Val Loss: 0.3042 Acc: 0.8768 Pre: 0.9081 Recall: 0.8517 F1: 0.8790 Train AUC: 0.9833 Val AUC: 0.9476 Time: 12.96\n",
      "Epoch: 304 Train Loss: 0.1676 Val Loss: 0.3343 Acc: 0.8841 Pre: 0.9313 Recall: 0.8414 F1: 0.8841 Train AUC: 0.9847 Val AUC: 0.9446 Time: 12.81\n",
      "Epoch: 305 Train Loss: 0.1615 Val Loss: 0.3349 Acc: 0.8750 Pre: 0.9077 Recall: 0.8483 F1: 0.8770 Train AUC: 0.9851 Val AUC: 0.9439 Time: 13.39\n",
      "Epoch: 306 Train Loss: 0.1573 Val Loss: 0.3244 Acc: 0.8859 Pre: 0.9127 Recall: 0.8655 F1: 0.8885 Train AUC: 0.9854 Val AUC: 0.9454 Time: 14.04\n",
      "Epoch: 307 Train Loss: 0.1623 Val Loss: 0.3281 Acc: 0.8895 Pre: 0.9387 Recall: 0.8448 F1: 0.8893 Train AUC: 0.9847 Val AUC: 0.9481 Time: 14.61\n",
      "Epoch: 308 Train Loss: 0.1620 Val Loss: 0.3170 Acc: 0.8931 Pre: 0.9392 Recall: 0.8517 F1: 0.8933 Train AUC: 0.9849 Val AUC: 0.9495 Time: 15.20\n",
      "Epoch: 309 Train Loss: 0.1513 Val Loss: 0.2948 Acc: 0.8822 Pre: 0.9032 Recall: 0.8690 F1: 0.8858 Train AUC: 0.9872 Val AUC: 0.9502 Time: 14.33\n",
      "Epoch: 310 Train Loss: 0.1674 Val Loss: 0.3039 Acc: 0.8859 Pre: 0.9158 Recall: 0.8621 F1: 0.8881 Train AUC: 0.9853 Val AUC: 0.9509 Time: 13.38\n",
      "Epoch: 311 Train Loss: 0.1620 Val Loss: 0.3332 Acc: 0.8732 Pre: 0.9231 Recall: 0.8276 F1: 0.8727 Train AUC: 0.9850 Val AUC: 0.9480 Time: 12.71\n",
      "Epoch: 312 Train Loss: 0.1718 Val Loss: 0.3245 Acc: 0.8822 Pre: 0.9121 Recall: 0.8586 F1: 0.8845 Train AUC: 0.9824 Val AUC: 0.9460 Time: 12.83\n",
      "Epoch: 313 Train Loss: 0.1588 Val Loss: 0.3156 Acc: 0.8859 Pre: 0.9251 Recall: 0.8517 F1: 0.8869 Train AUC: 0.9855 Val AUC: 0.9467 Time: 13.01\n",
      "Epoch: 314 Train Loss: 0.1585 Val Loss: 0.3027 Acc: 0.8895 Pre: 0.9164 Recall: 0.8690 F1: 0.8920 Train AUC: 0.9851 Val AUC: 0.9480 Time: 13.59\n",
      "Epoch: 315 Train Loss: 0.1517 Val Loss: 0.3046 Acc: 0.8913 Pre: 0.9291 Recall: 0.8586 F1: 0.8925 Train AUC: 0.9876 Val AUC: 0.9497 Time: 14.17\n",
      "Epoch: 316 Train Loss: 0.1673 Val Loss: 0.3201 Acc: 0.8877 Pre: 0.9286 Recall: 0.8517 F1: 0.8885 Train AUC: 0.9845 Val AUC: 0.9494 Time: 14.78\n",
      "Epoch: 317 Train Loss: 0.1594 Val Loss: 0.3197 Acc: 0.8859 Pre: 0.9158 Recall: 0.8621 F1: 0.8881 Train AUC: 0.9852 Val AUC: 0.9482 Time: 15.76\n",
      "Epoch: 318 Train Loss: 0.1588 Val Loss: 0.3182 Acc: 0.8841 Pre: 0.9154 Recall: 0.8586 F1: 0.8861 Train AUC: 0.9851 Val AUC: 0.9470 Time: 14.61\n",
      "Epoch: 319 Train Loss: 0.1694 Val Loss: 0.3169 Acc: 0.8768 Pre: 0.9081 Recall: 0.8517 F1: 0.8790 Train AUC: 0.9828 Val AUC: 0.9456 Time: 13.14\n",
      "Epoch: 320 Train Loss: 0.1536 Val Loss: 0.3241 Acc: 0.8750 Pre: 0.9139 Recall: 0.8414 F1: 0.8761 Train AUC: 0.9864 Val AUC: 0.9460 Time: 12.26\n",
      "Epoch: 321 Train Loss: 0.1513 Val Loss: 0.3359 Acc: 0.8768 Pre: 0.9405 Recall: 0.8172 F1: 0.8745 Train AUC: 0.9867 Val AUC: 0.9473 Time: 12.13\n",
      "Epoch: 322 Train Loss: 0.1666 Val Loss: 0.3057 Acc: 0.8913 Pre: 0.9228 Recall: 0.8655 F1: 0.8932 Train AUC: 0.9854 Val AUC: 0.9494 Time: 12.58\n",
      "Epoch: 323 Train Loss: 0.1536 Val Loss: 0.3056 Acc: 0.8786 Pre: 0.8885 Recall: 0.8793 F1: 0.8839 Train AUC: 0.9868 Val AUC: 0.9494 Time: 13.13\n",
      "Epoch: 324 Train Loss: 0.1569 Val Loss: 0.3287 Acc: 0.8877 Pre: 0.9286 Recall: 0.8517 F1: 0.8885 Train AUC: 0.9872 Val AUC: 0.9499 Time: 13.63\n",
      "Epoch: 325 Train Loss: 0.1571 Val Loss: 0.3215 Acc: 0.8859 Pre: 0.9349 Recall: 0.8414 F1: 0.8857 Train AUC: 0.9857 Val AUC: 0.9505 Time: 14.18\n",
      "Epoch: 326 Train Loss: 0.1530 Val Loss: 0.2939 Acc: 0.8859 Pre: 0.9097 Recall: 0.8690 F1: 0.8889 Train AUC: 0.9873 Val AUC: 0.9503 Time: 14.86\n",
      "Epoch: 327 Train Loss: 0.1505 Val Loss: 0.2895 Acc: 0.8841 Pre: 0.9036 Recall: 0.8724 F1: 0.8877 Train AUC: 0.9876 Val AUC: 0.9508 Time: 15.88\n",
      "Epoch: 328 Train Loss: 0.1551 Val Loss: 0.3175 Acc: 0.8877 Pre: 0.9385 Recall: 0.8414 F1: 0.8873 Train AUC: 0.9882 Val AUC: 0.9521 Time: 15.10\n",
      "Epoch: 329 Train Loss: 0.1619 Val Loss: 0.3113 Acc: 0.8877 Pre: 0.9130 Recall: 0.8690 F1: 0.8905 Train AUC: 0.9861 Val AUC: 0.9506 Time: 13.56\n",
      "Epoch: 330 Train Loss: 0.1547 Val Loss: 0.3105 Acc: 0.8768 Pre: 0.8828 Recall: 0.8828 F1: 0.8828 Train AUC: 0.9856 Val AUC: 0.9477 Time: 12.72\n",
      "Epoch: 331 Train Loss: 0.1581 Val Loss: 0.3223 Acc: 0.8877 Pre: 0.9254 Recall: 0.8552 F1: 0.8889 Train AUC: 0.9869 Val AUC: 0.9484 Time: 12.32\n",
      "Epoch: 332 Train Loss: 0.1407 Val Loss: 0.3495 Acc: 0.8822 Pre: 0.9482 Recall: 0.8207 F1: 0.8799 Train AUC: 0.9886 Val AUC: 0.9498 Time: 12.68\n",
      "Epoch: 333 Train Loss: 0.1639 Val Loss: 0.3006 Acc: 0.8877 Pre: 0.9385 Recall: 0.8414 F1: 0.8873 Train AUC: 0.9871 Val AUC: 0.9529 Time: 13.25\n",
      "Epoch: 334 Train Loss: 0.1466 Val Loss: 0.2834 Acc: 0.8859 Pre: 0.8822 Recall: 0.9034 F1: 0.8927 Train AUC: 0.9880 Val AUC: 0.9534 Time: 13.71\n",
      "Epoch: 335 Train Loss: 0.1720 Val Loss: 0.3146 Acc: 0.8841 Pre: 0.9124 Recall: 0.8621 F1: 0.8865 Train AUC: 0.9871 Val AUC: 0.9500 Time: 14.38\n",
      "Epoch: 336 Train Loss: 0.1467 Val Loss: 0.3611 Acc: 0.8714 Pre: 0.9195 Recall: 0.8276 F1: 0.8711 Train AUC: 0.9874 Val AUC: 0.9466 Time: 15.02\n",
      "Epoch: 337 Train Loss: 0.1559 Val Loss: 0.3467 Acc: 0.8822 Pre: 0.9213 Recall: 0.8483 F1: 0.8833 Train AUC: 0.9857 Val AUC: 0.9449 Time: 16.84\n",
      "Epoch: 338 Train Loss: 0.1512 Val Loss: 0.3269 Acc: 0.8786 Pre: 0.8968 Recall: 0.8690 F1: 0.8827 Train AUC: 0.9869 Val AUC: 0.9437 Time: 14.83\n",
      "Epoch: 339 Train Loss: 0.1540 Val Loss: 0.3231 Acc: 0.8895 Pre: 0.9354 Recall: 0.8483 F1: 0.8897 Train AUC: 0.9873 Val AUC: 0.9488 Time: 13.26\n",
      "Epoch: 340 Train Loss: 0.1503 Val Loss: 0.3072 Acc: 0.8877 Pre: 0.9254 Recall: 0.8552 F1: 0.8889 Train AUC: 0.9876 Val AUC: 0.9514 Time: 12.39\n",
      "Epoch: 341 Train Loss: 0.1540 Val Loss: 0.3057 Acc: 0.8877 Pre: 0.9318 Recall: 0.8483 F1: 0.8881 Train AUC: 0.9865 Val AUC: 0.9513 Time: 12.42\n",
      "Epoch: 342 Train Loss: 0.1571 Val Loss: 0.3109 Acc: 0.8877 Pre: 0.9254 Recall: 0.8552 F1: 0.8889 Train AUC: 0.9857 Val AUC: 0.9498 Time: 12.76\n",
      "Epoch: 343 Train Loss: 0.1457 Val Loss: 0.3191 Acc: 0.8822 Pre: 0.9121 Recall: 0.8586 F1: 0.8845 Train AUC: 0.9882 Val AUC: 0.9479 Time: 13.23\n",
      "Epoch: 344 Train Loss: 0.1462 Val Loss: 0.3320 Acc: 0.8841 Pre: 0.9280 Recall: 0.8448 F1: 0.8845 Train AUC: 0.9885 Val AUC: 0.9483 Time: 13.96\n",
      "Epoch: 345 Train Loss: 0.1494 Val Loss: 0.3064 Acc: 0.8841 Pre: 0.9094 Recall: 0.8655 F1: 0.8869 Train AUC: 0.9876 Val AUC: 0.9502 Time: 14.47\n",
      "Epoch: 346 Train Loss: 0.1392 Val Loss: 0.2931 Acc: 0.8949 Pre: 0.9085 Recall: 0.8897 F1: 0.8990 Train AUC: 0.9896 Val AUC: 0.9525 Time: 15.28\n",
      "Epoch: 347 Train Loss: 0.1459 Val Loss: 0.3079 Acc: 0.8895 Pre: 0.9257 Recall: 0.8586 F1: 0.8909 Train AUC: 0.9892 Val AUC: 0.9532 Time: 14.63\n",
      "Epoch: 348 Train Loss: 0.1532 Val Loss: 0.3053 Acc: 0.8913 Pre: 0.9457 Recall: 0.8414 F1: 0.8905 Train AUC: 0.9860 Val AUC: 0.9551 Time: 13.69\n",
      "Epoch: 349 Train Loss: 0.1436 Val Loss: 0.2865 Acc: 0.8895 Pre: 0.9194 Recall: 0.8655 F1: 0.8917 Train AUC: 0.9884 Val AUC: 0.9547 Time: 13.41\n",
      "Epoch: 350 Train Loss: 0.1437 Val Loss: 0.2877 Acc: 0.8949 Pre: 0.9085 Recall: 0.8897 F1: 0.8990 Train AUC: 0.9890 Val AUC: 0.9531 Time: 13.01\n",
      "Epoch: 351 Train Loss: 0.1464 Val Loss: 0.3121 Acc: 0.8841 Pre: 0.9124 Recall: 0.8621 F1: 0.8865 Train AUC: 0.9891 Val AUC: 0.9504 Time: 13.64\n",
      "Epoch: 352 Train Loss: 0.1409 Val Loss: 0.3308 Acc: 0.8877 Pre: 0.9161 Recall: 0.8655 F1: 0.8901 Train AUC: 0.9886 Val AUC: 0.9482 Time: 14.17\n",
      "Epoch: 353 Train Loss: 0.1488 Val Loss: 0.3346 Acc: 0.8877 Pre: 0.9191 Recall: 0.8621 F1: 0.8897 Train AUC: 0.9867 Val AUC: 0.9474 Time: 14.90\n",
      "Epoch: 354 Train Loss: 0.1498 Val Loss: 0.3091 Acc: 0.8895 Pre: 0.9164 Recall: 0.8690 F1: 0.8920 Train AUC: 0.9867 Val AUC: 0.9498 Time: 14.47\n",
      "Epoch: 355 Train Loss: 0.1407 Val Loss: 0.2901 Acc: 0.8895 Pre: 0.9288 Recall: 0.8552 F1: 0.8905 Train AUC: 0.9889 Val AUC: 0.9535 Time: 13.66\n",
      "Epoch: 356 Train Loss: 0.1389 Val Loss: 0.2874 Acc: 0.8931 Pre: 0.9358 Recall: 0.8552 F1: 0.8937 Train AUC: 0.9899 Val AUC: 0.9562 Time: 13.11\n",
      "Epoch: 357 Train Loss: 0.1386 Val Loss: 0.2960 Acc: 0.8913 Pre: 0.9389 Recall: 0.8483 F1: 0.8913 Train AUC: 0.9901 Val AUC: 0.9568 Time: 13.27\n",
      "Epoch: 358 Train Loss: 0.1431 Val Loss: 0.2912 Acc: 0.8895 Pre: 0.9134 Recall: 0.8724 F1: 0.8924 Train AUC: 0.9901 Val AUC: 0.9549 Time: 13.55\n",
      "Epoch: 359 Train Loss: 0.1440 Val Loss: 0.3053 Acc: 0.8859 Pre: 0.8982 Recall: 0.8828 F1: 0.8904 Train AUC: 0.9883 Val AUC: 0.9514 Time: 13.80\n",
      "Epoch: 360 Train Loss: 0.1448 Val Loss: 0.3301 Acc: 0.8895 Pre: 0.9225 Recall: 0.8621 F1: 0.8913 Train AUC: 0.9888 Val AUC: 0.9481 Time: 14.50\n",
      "Epoch: 361 Train Loss: 0.1409 Val Loss: 0.3594 Acc: 0.8841 Pre: 0.9313 Recall: 0.8414 F1: 0.8841 Train AUC: 0.9883 Val AUC: 0.9463 Time: 14.99\n",
      "Epoch: 362 Train Loss: 0.1501 Val Loss: 0.3216 Acc: 0.8841 Pre: 0.9185 Recall: 0.8552 F1: 0.8857 Train AUC: 0.9869 Val AUC: 0.9480 Time: 14.65\n",
      "Epoch: 363 Train Loss: 0.1456 Val Loss: 0.2879 Acc: 0.8841 Pre: 0.8818 Recall: 0.9000 F1: 0.8908 Train AUC: 0.9887 Val AUC: 0.9521 Time: 13.75\n",
      "Epoch: 364 Train Loss: 0.1649 Val Loss: 0.3018 Acc: 0.8949 Pre: 0.9394 Recall: 0.8552 F1: 0.8953 Train AUC: 0.9878 Val AUC: 0.9553 Time: 12.96\n",
      "Epoch: 365 Train Loss: 0.1390 Val Loss: 0.3447 Acc: 0.8822 Pre: 0.9412 Recall: 0.8276 F1: 0.8807 Train AUC: 0.9891 Val AUC: 0.9546 Time: 12.46\n",
      "Epoch: 366 Train Loss: 0.1544 Val Loss: 0.3024 Acc: 0.8895 Pre: 0.9104 Recall: 0.8759 F1: 0.8928 Train AUC: 0.9874 Val AUC: 0.9540 Time: 12.91\n",
      "Epoch: 367 Train Loss: 0.1487 Val Loss: 0.2876 Acc: 0.8877 Pre: 0.8851 Recall: 0.9034 F1: 0.8942 Train AUC: 0.9870 Val AUC: 0.9534 Time: 13.47\n",
      "Epoch: 368 Train Loss: 0.1598 Val Loss: 0.3245 Acc: 0.8841 Pre: 0.9593 Recall: 0.8138 F1: 0.8806 Train AUC: 0.9900 Val AUC: 0.9553 Time: 14.00\n",
      "Epoch: 369 Train Loss: 0.1531 Val Loss: 0.3122 Acc: 0.8859 Pre: 0.9558 Recall: 0.8207 F1: 0.8831 Train AUC: 0.9896 Val AUC: 0.9552 Time: 14.64\n",
      "Epoch: 370 Train Loss: 0.1442 Val Loss: 0.2714 Acc: 0.8949 Pre: 0.8919 Recall: 0.9103 F1: 0.9010 Train AUC: 0.9905 Val AUC: 0.9558 Time: 15.30\n",
      "Epoch: 371 Train Loss: 0.1439 Val Loss: 0.2851 Acc: 0.8877 Pre: 0.8931 Recall: 0.8931 F1: 0.8931 Train AUC: 0.9922 Val AUC: 0.9531 Time: 14.27\n",
      "Epoch: 372 Train Loss: 0.1509 Val Loss: 0.3186 Acc: 0.8786 Pre: 0.9145 Recall: 0.8483 F1: 0.8801 Train AUC: 0.9889 Val AUC: 0.9510 Time: 12.96\n",
      "Epoch: 373 Train Loss: 0.1448 Val Loss: 0.3205 Acc: 0.8877 Pre: 0.9419 Recall: 0.8379 F1: 0.8869 Train AUC: 0.9877 Val AUC: 0.9541 Time: 13.11\n",
      "Epoch: 374 Train Loss: 0.1449 Val Loss: 0.2826 Acc: 0.8986 Pre: 0.9301 Recall: 0.8724 F1: 0.9004 Train AUC: 0.9883 Val AUC: 0.9571 Time: 13.80\n",
      "Epoch: 375 Train Loss: 0.1310 Val Loss: 0.2809 Acc: 0.9058 Pre: 0.9312 Recall: 0.8862 F1: 0.9081 Train AUC: 0.9910 Val AUC: 0.9567 Time: 14.30\n",
      "Epoch: 376 Train Loss: 0.1519 Val Loss: 0.3032 Acc: 0.8967 Pre: 0.9363 Recall: 0.8621 F1: 0.8977 Train AUC: 0.9878 Val AUC: 0.9554 Time: 14.19\n",
      "Epoch: 377 Train Loss: 0.1287 Val Loss: 0.3376 Acc: 0.8949 Pre: 0.9394 Recall: 0.8552 F1: 0.8953 Train AUC: 0.9911 Val AUC: 0.9513 Time: 13.60\n",
      "Epoch: 378 Train Loss: 0.1342 Val Loss: 0.3276 Acc: 0.8895 Pre: 0.9194 Recall: 0.8655 F1: 0.8917 Train AUC: 0.9895 Val AUC: 0.9495 Time: 13.41\n",
      "Epoch: 379 Train Loss: 0.1423 Val Loss: 0.3069 Acc: 0.8859 Pre: 0.9068 Recall: 0.8724 F1: 0.8893 Train AUC: 0.9881 Val AUC: 0.9504 Time: 14.03\n",
      "Epoch: 380 Train Loss: 0.1382 Val Loss: 0.2991 Acc: 0.8986 Pre: 0.9301 Recall: 0.8724 F1: 0.9004 Train AUC: 0.9898 Val AUC: 0.9516 Time: 13.95\n",
      "Epoch: 381 Train Loss: 0.1333 Val Loss: 0.3089 Acc: 0.8841 Pre: 0.9313 Recall: 0.8414 F1: 0.8841 Train AUC: 0.9907 Val AUC: 0.9542 Time: 14.32\n",
      "Epoch: 382 Train Loss: 0.1422 Val Loss: 0.2974 Acc: 0.8859 Pre: 0.9316 Recall: 0.8448 F1: 0.8861 Train AUC: 0.9893 Val AUC: 0.9566 Time: 13.29\n",
      "Epoch: 383 Train Loss: 0.1326 Val Loss: 0.2931 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9907 Val AUC: 0.9573 Time: 12.99\n",
      "Epoch: 384 Train Loss: 0.1291 Val Loss: 0.3145 Acc: 0.8931 Pre: 0.9231 Recall: 0.8690 F1: 0.8952 Train AUC: 0.9913 Val AUC: 0.9542 Time: 12.98\n",
      "Epoch: 385 Train Loss: 0.1407 Val Loss: 0.3313 Acc: 0.8913 Pre: 0.9259 Recall: 0.8621 F1: 0.8929 Train AUC: 0.9880 Val AUC: 0.9513 Time: 13.19\n",
      "Epoch: 386 Train Loss: 0.1446 Val Loss: 0.3120 Acc: 0.8895 Pre: 0.9134 Recall: 0.8724 F1: 0.8924 Train AUC: 0.9871 Val AUC: 0.9527 Time: 13.70\n",
      "Epoch: 387 Train Loss: 0.1343 Val Loss: 0.3019 Acc: 0.8931 Pre: 0.9200 Recall: 0.8724 F1: 0.8956 Train AUC: 0.9893 Val AUC: 0.9546 Time: 14.24\n",
      "Epoch: 388 Train Loss: 0.1265 Val Loss: 0.3145 Acc: 0.8895 Pre: 0.9490 Recall: 0.8345 F1: 0.8881 Train AUC: 0.9908 Val AUC: 0.9547 Time: 15.03\n",
      "Epoch: 389 Train Loss: 0.1434 Val Loss: 0.2975 Acc: 0.8877 Pre: 0.9385 Recall: 0.8414 F1: 0.8873 Train AUC: 0.9895 Val AUC: 0.9564 Time: 15.40\n",
      "Epoch: 390 Train Loss: 0.1405 Val Loss: 0.2753 Acc: 0.8949 Pre: 0.9113 Recall: 0.8862 F1: 0.8986 Train AUC: 0.9896 Val AUC: 0.9588 Time: 13.43\n",
      "Epoch: 391 Train Loss: 0.1420 Val Loss: 0.2934 Acc: 0.8804 Pre: 0.8916 Recall: 0.8793 F1: 0.8854 Train AUC: 0.9887 Val AUC: 0.9566 Time: 12.51\n",
      "Epoch: 392 Train Loss: 0.1387 Val Loss: 0.3254 Acc: 0.8913 Pre: 0.9259 Recall: 0.8621 F1: 0.8929 Train AUC: 0.9896 Val AUC: 0.9533 Time: 12.50\n",
      "Epoch: 393 Train Loss: 0.1278 Val Loss: 0.3507 Acc: 0.8859 Pre: 0.9316 Recall: 0.8448 F1: 0.8861 Train AUC: 0.9904 Val AUC: 0.9503 Time: 12.76\n",
      "Epoch: 394 Train Loss: 0.1361 Val Loss: 0.3303 Acc: 0.8859 Pre: 0.9251 Recall: 0.8517 F1: 0.8869 Train AUC: 0.9896 Val AUC: 0.9498 Time: 13.23\n",
      "Epoch: 395 Train Loss: 0.1351 Val Loss: 0.2978 Acc: 0.8986 Pre: 0.9270 Recall: 0.8759 F1: 0.9007 Train AUC: 0.9897 Val AUC: 0.9525 Time: 13.71\n",
      "Epoch: 396 Train Loss: 0.1313 Val Loss: 0.2828 Acc: 0.8949 Pre: 0.9203 Recall: 0.8759 F1: 0.8975 Train AUC: 0.9909 Val AUC: 0.9552 Time: 14.47\n",
      "Epoch: 397 Train Loss: 0.1357 Val Loss: 0.3031 Acc: 0.8859 Pre: 0.9283 Recall: 0.8483 F1: 0.8865 Train AUC: 0.9907 Val AUC: 0.9557 Time: 15.19\n",
      "Epoch: 398 Train Loss: 0.1338 Val Loss: 0.3167 Acc: 0.8841 Pre: 0.9313 Recall: 0.8414 F1: 0.8841 Train AUC: 0.9894 Val AUC: 0.9562 Time: 16.42\n",
      "Epoch: 399 Train Loss: 0.1388 Val Loss: 0.2938 Acc: 0.8986 Pre: 0.9398 Recall: 0.8621 F1: 0.8993 Train AUC: 0.9889 Val AUC: 0.9563 Time: 14.59\n",
      "Epoch: 400 Train Loss: 0.1247 Val Loss: 0.2884 Acc: 0.8931 Pre: 0.9053 Recall: 0.8897 F1: 0.8974 Train AUC: 0.9915 Val AUC: 0.9551 Time: 13.26\n",
      "Epoch: 401 Train Loss: 0.1404 Val Loss: 0.3035 Acc: 0.8986 Pre: 0.9432 Recall: 0.8586 F1: 0.8989 Train AUC: 0.9896 Val AUC: 0.9557 Time: 12.33\n",
      "Epoch: 402 Train Loss: 0.1242 Val Loss: 0.3120 Acc: 0.9004 Pre: 0.9537 Recall: 0.8517 F1: 0.8998 Train AUC: 0.9915 Val AUC: 0.9560 Time: 12.56\n",
      "Epoch: 403 Train Loss: 0.1320 Val Loss: 0.2904 Acc: 0.8967 Pre: 0.9396 Recall: 0.8586 F1: 0.8973 Train AUC: 0.9906 Val AUC: 0.9574 Time: 12.92\n",
      "Epoch: 404 Train Loss: 0.1230 Val Loss: 0.2795 Acc: 0.8967 Pre: 0.9236 Recall: 0.8759 F1: 0.8991 Train AUC: 0.9913 Val AUC: 0.9578 Time: 13.75\n",
      "Epoch: 405 Train Loss: 0.1337 Val Loss: 0.2977 Acc: 0.8986 Pre: 0.9432 Recall: 0.8586 F1: 0.8989 Train AUC: 0.9903 Val AUC: 0.9562 Time: 14.32\n",
      "Epoch: 406 Train Loss: 0.1349 Val Loss: 0.2960 Acc: 0.8986 Pre: 0.9432 Recall: 0.8586 F1: 0.8989 Train AUC: 0.9897 Val AUC: 0.9560 Time: 14.47\n",
      "Epoch: 407 Train Loss: 0.1341 Val Loss: 0.2969 Acc: 0.9022 Pre: 0.9436 Recall: 0.8655 F1: 0.9029 Train AUC: 0.9893 Val AUC: 0.9547 Time: 14.55\n",
      "Epoch: 408 Train Loss: 0.1303 Val Loss: 0.2978 Acc: 0.8967 Pre: 0.9299 Recall: 0.8690 F1: 0.8984 Train AUC: 0.9911 Val AUC: 0.9553 Time: 15.03\n",
      "Epoch: 409 Train Loss: 0.1183 Val Loss: 0.3011 Acc: 0.8931 Pre: 0.9294 Recall: 0.8621 F1: 0.8945 Train AUC: 0.9928 Val AUC: 0.9559 Time: 14.09\n",
      "Epoch: 410 Train Loss: 0.1241 Val Loss: 0.2989 Acc: 0.9022 Pre: 0.9436 Recall: 0.8655 F1: 0.9029 Train AUC: 0.9911 Val AUC: 0.9573 Time: 13.10\n",
      "Epoch: 411 Train Loss: 0.1310 Val Loss: 0.2833 Acc: 0.9004 Pre: 0.9401 Recall: 0.8655 F1: 0.9013 Train AUC: 0.9901 Val AUC: 0.9593 Time: 12.53\n",
      "Epoch: 412 Train Loss: 0.1140 Val Loss: 0.2753 Acc: 0.9004 Pre: 0.9434 Recall: 0.8621 F1: 0.9009 Train AUC: 0.9931 Val AUC: 0.9606 Time: 12.48\n",
      "Epoch: 413 Train Loss: 0.1327 Val Loss: 0.2675 Acc: 0.9004 Pre: 0.9304 Recall: 0.8759 F1: 0.9023 Train AUC: 0.9904 Val AUC: 0.9604 Time: 13.04\n",
      "Epoch: 414 Train Loss: 0.1252 Val Loss: 0.2681 Acc: 0.9004 Pre: 0.9242 Recall: 0.8828 F1: 0.9030 Train AUC: 0.9911 Val AUC: 0.9590 Time: 13.58\n",
      "Epoch: 415 Train Loss: 0.1257 Val Loss: 0.2925 Acc: 0.9004 Pre: 0.9401 Recall: 0.8655 F1: 0.9013 Train AUC: 0.9929 Val AUC: 0.9557 Time: 13.99\n",
      "Epoch: 416 Train Loss: 0.1194 Val Loss: 0.3185 Acc: 0.8949 Pre: 0.9531 Recall: 0.8414 F1: 0.8938 Train AUC: 0.9922 Val AUC: 0.9540 Time: 14.91\n",
      "Epoch: 417 Train Loss: 0.1303 Val Loss: 0.2838 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9907 Val AUC: 0.9570 Time: 15.42\n",
      "Epoch: 418 Train Loss: 0.1152 Val Loss: 0.2639 Acc: 0.9040 Pre: 0.9247 Recall: 0.8897 F1: 0.9069 Train AUC: 0.9933 Val AUC: 0.9598 Time: 14.02\n",
      "Epoch: 419 Train Loss: 0.1228 Val Loss: 0.2793 Acc: 0.9022 Pre: 0.9436 Recall: 0.8655 F1: 0.9029 Train AUC: 0.9937 Val AUC: 0.9609 Time: 13.24\n",
      "Epoch: 420 Train Loss: 0.1205 Val Loss: 0.3039 Acc: 0.8949 Pre: 0.9462 Recall: 0.8483 F1: 0.8945 Train AUC: 0.9922 Val AUC: 0.9589 Time: 13.17\n",
      "Epoch: 421 Train Loss: 0.1282 Val Loss: 0.2911 Acc: 0.8913 Pre: 0.9228 Recall: 0.8655 F1: 0.8932 Train AUC: 0.9907 Val AUC: 0.9573 Time: 13.75\n",
      "Epoch: 422 Train Loss: 0.1310 Val Loss: 0.2728 Acc: 0.9076 Pre: 0.9164 Recall: 0.9069 F1: 0.9116 Train AUC: 0.9899 Val AUC: 0.9580 Time: 14.44\n",
      "Epoch: 423 Train Loss: 0.1272 Val Loss: 0.2891 Acc: 0.9022 Pre: 0.9403 Recall: 0.8690 F1: 0.9032 Train AUC: 0.9935 Val AUC: 0.9569 Time: 15.15\n",
      "Epoch: 424 Train Loss: 0.1257 Val Loss: 0.3205 Acc: 0.9058 Pre: 0.9648 Recall: 0.8517 F1: 0.9048 Train AUC: 0.9919 Val AUC: 0.9553 Time: 13.99\n",
      "Epoch: 425 Train Loss: 0.1187 Val Loss: 0.3102 Acc: 0.9094 Pre: 0.9580 Recall: 0.8655 F1: 0.9094 Train AUC: 0.9929 Val AUC: 0.9550 Time: 13.26\n",
      "Epoch: 426 Train Loss: 0.1375 Val Loss: 0.2870 Acc: 0.9004 Pre: 0.9242 Recall: 0.8828 F1: 0.9030 Train AUC: 0.9917 Val AUC: 0.9546 Time: 13.00\n",
      "Epoch: 427 Train Loss: 0.1280 Val Loss: 0.2879 Acc: 0.9004 Pre: 0.9181 Recall: 0.8897 F1: 0.9037 Train AUC: 0.9918 Val AUC: 0.9557 Time: 13.55\n",
      "Epoch: 428 Train Loss: 0.1261 Val Loss: 0.3117 Acc: 0.8967 Pre: 0.9396 Recall: 0.8586 F1: 0.8973 Train AUC: 0.9923 Val AUC: 0.9554 Time: 14.05\n",
      "Epoch: 429 Train Loss: 0.1246 Val Loss: 0.3155 Acc: 0.8931 Pre: 0.9425 Recall: 0.8483 F1: 0.8929 Train AUC: 0.9908 Val AUC: 0.9579 Time: 14.50\n",
      "Epoch: 430 Train Loss: 0.1191 Val Loss: 0.2743 Acc: 0.9022 Pre: 0.9370 Recall: 0.8724 F1: 0.9036 Train AUC: 0.9929 Val AUC: 0.9601 Time: 13.26\n",
      "Epoch: 431 Train Loss: 0.1168 Val Loss: 0.2715 Acc: 0.8986 Pre: 0.9239 Recall: 0.8793 F1: 0.9011 Train AUC: 0.9940 Val AUC: 0.9591 Time: 13.39\n",
      "Epoch: 432 Train Loss: 0.1136 Val Loss: 0.3054 Acc: 0.9022 Pre: 0.9436 Recall: 0.8655 F1: 0.9029 Train AUC: 0.9944 Val AUC: 0.9558 Time: 13.89\n",
      "Epoch: 433 Train Loss: 0.1201 Val Loss: 0.3260 Acc: 0.9004 Pre: 0.9537 Recall: 0.8517 F1: 0.8998 Train AUC: 0.9919 Val AUC: 0.9543 Time: 14.58\n",
      "Epoch: 434 Train Loss: 0.1193 Val Loss: 0.2992 Acc: 0.9058 Pre: 0.9407 Recall: 0.8759 F1: 0.9071 Train AUC: 0.9925 Val AUC: 0.9561 Time: 14.25\n",
      "Epoch: 435 Train Loss: 0.1235 Val Loss: 0.2776 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9911 Val AUC: 0.9587 Time: 13.37\n",
      "Epoch: 436 Train Loss: 0.1171 Val Loss: 0.2752 Acc: 0.9004 Pre: 0.9368 Recall: 0.8690 F1: 0.9016 Train AUC: 0.9925 Val AUC: 0.9612 Time: 13.71\n",
      "Epoch: 437 Train Loss: 0.1103 Val Loss: 0.2893 Acc: 0.8986 Pre: 0.9466 Recall: 0.8552 F1: 0.8986 Train AUC: 0.9935 Val AUC: 0.9618 Time: 13.54\n",
      "Epoch: 438 Train Loss: 0.1243 Val Loss: 0.2857 Acc: 0.9076 Pre: 0.9410 Recall: 0.8793 F1: 0.9091 Train AUC: 0.9919 Val AUC: 0.9585 Time: 13.48\n",
      "Epoch: 439 Train Loss: 0.1173 Val Loss: 0.2985 Acc: 0.9040 Pre: 0.9187 Recall: 0.8966 F1: 0.9075 Train AUC: 0.9928 Val AUC: 0.9541 Time: 14.16\n",
      "Epoch: 440 Train Loss: 0.1248 Val Loss: 0.3087 Acc: 0.8967 Pre: 0.9146 Recall: 0.8862 F1: 0.9002 Train AUC: 0.9913 Val AUC: 0.9519 Time: 14.92\n",
      "Epoch: 441 Train Loss: 0.1230 Val Loss: 0.3160 Acc: 0.9076 Pre: 0.9509 Recall: 0.8690 F1: 0.9081 Train AUC: 0.9920 Val AUC: 0.9540 Time: 13.68\n",
      "Epoch: 442 Train Loss: 0.1172 Val Loss: 0.3134 Acc: 0.9004 Pre: 0.9537 Recall: 0.8517 F1: 0.8998 Train AUC: 0.9924 Val AUC: 0.9574 Time: 13.14\n",
      "Epoch: 443 Train Loss: 0.1229 Val Loss: 0.2750 Acc: 0.9004 Pre: 0.9336 Recall: 0.8724 F1: 0.9020 Train AUC: 0.9924 Val AUC: 0.9606 Time: 13.15\n",
      "Epoch: 444 Train Loss: 0.1142 Val Loss: 0.2776 Acc: 0.9058 Pre: 0.9407 Recall: 0.8759 F1: 0.9071 Train AUC: 0.9936 Val AUC: 0.9608 Time: 13.39\n",
      "Epoch: 445 Train Loss: 0.1118 Val Loss: 0.3056 Acc: 0.9004 Pre: 0.9434 Recall: 0.8621 F1: 0.9009 Train AUC: 0.9937 Val AUC: 0.9589 Time: 13.84\n",
      "Epoch: 446 Train Loss: 0.1115 Val Loss: 0.3136 Acc: 0.8986 Pre: 0.9432 Recall: 0.8586 F1: 0.8989 Train AUC: 0.9927 Val AUC: 0.9579 Time: 14.51\n",
      "Epoch: 447 Train Loss: 0.1184 Val Loss: 0.2979 Acc: 0.9058 Pre: 0.9343 Recall: 0.8828 F1: 0.9078 Train AUC: 0.9923 Val AUC: 0.9560 Time: 15.06\n",
      "Epoch: 448 Train Loss: 0.1179 Val Loss: 0.3029 Acc: 0.8986 Pre: 0.9366 Recall: 0.8655 F1: 0.8996 Train AUC: 0.9930 Val AUC: 0.9555 Time: 13.77\n",
      "Epoch: 449 Train Loss: 0.1114 Val Loss: 0.3112 Acc: 0.9094 Pre: 0.9580 Recall: 0.8655 F1: 0.9094 Train AUC: 0.9935 Val AUC: 0.9561 Time: 12.79\n",
      "Epoch: 450 Train Loss: 0.1084 Val Loss: 0.3130 Acc: 0.9112 Pre: 0.9725 Recall: 0.8552 F1: 0.9101 Train AUC: 0.9939 Val AUC: 0.9584 Time: 12.46\n",
      "Epoch: 451 Train Loss: 0.1139 Val Loss: 0.2786 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9933 Val AUC: 0.9605 Time: 12.91\n",
      "Epoch: 452 Train Loss: 0.1070 Val Loss: 0.2639 Acc: 0.9058 Pre: 0.9220 Recall: 0.8966 F1: 0.9091 Train AUC: 0.9946 Val AUC: 0.9609 Time: 13.50\n",
      "Epoch: 453 Train Loss: 0.1202 Val Loss: 0.2980 Acc: 0.9022 Pre: 0.9403 Recall: 0.8690 F1: 0.9032 Train AUC: 0.9936 Val AUC: 0.9588 Time: 14.17\n",
      "Epoch: 454 Train Loss: 0.1134 Val Loss: 0.3257 Acc: 0.8986 Pre: 0.9432 Recall: 0.8586 F1: 0.8989 Train AUC: 0.9925 Val AUC: 0.9565 Time: 14.65\n",
      "Epoch: 455 Train Loss: 0.1183 Val Loss: 0.3052 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9919 Val AUC: 0.9563 Time: 15.42\n",
      "Epoch: 456 Train Loss: 0.1171 Val Loss: 0.2811 Acc: 0.9040 Pre: 0.9341 Recall: 0.8793 F1: 0.9059 Train AUC: 0.9921 Val AUC: 0.9588 Time: 14.06\n",
      "Epoch: 457 Train Loss: 0.1201 Val Loss: 0.2923 Acc: 0.9058 Pre: 0.9474 Recall: 0.8690 F1: 0.9065 Train AUC: 0.9931 Val AUC: 0.9592 Time: 13.04\n",
      "Epoch: 458 Train Loss: 0.1177 Val Loss: 0.3144 Acc: 0.8986 Pre: 0.9398 Recall: 0.8621 F1: 0.8993 Train AUC: 0.9932 Val AUC: 0.9552 Time: 12.62\n",
      "Epoch: 459 Train Loss: 0.1285 Val Loss: 0.3245 Acc: 0.9004 Pre: 0.9273 Recall: 0.8793 F1: 0.9027 Train AUC: 0.9904 Val AUC: 0.9516 Time: 13.17\n",
      "Epoch: 460 Train Loss: 0.1274 Val Loss: 0.3139 Acc: 0.8986 Pre: 0.9239 Recall: 0.8793 F1: 0.9011 Train AUC: 0.9907 Val AUC: 0.9510 Time: 13.63\n",
      "Epoch: 461 Train Loss: 0.1193 Val Loss: 0.3192 Acc: 0.9112 Pre: 0.9582 Recall: 0.8690 F1: 0.9114 Train AUC: 0.9927 Val AUC: 0.9542 Time: 14.30\n",
      "Epoch: 462 Train Loss: 0.1138 Val Loss: 0.3151 Acc: 0.9094 Pre: 0.9651 Recall: 0.8586 F1: 0.9088 Train AUC: 0.9939 Val AUC: 0.9596 Time: 15.13\n",
      "Epoch: 463 Train Loss: 0.1216 Val Loss: 0.2677 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9925 Val AUC: 0.9630 Time: 14.46\n",
      "Epoch: 464 Train Loss: 0.1211 Val Loss: 0.2700 Acc: 0.9040 Pre: 0.9278 Recall: 0.8862 F1: 0.9065 Train AUC: 0.9922 Val AUC: 0.9634 Time: 13.21\n",
      "Epoch: 465 Train Loss: 0.1241 Val Loss: 0.2875 Acc: 0.8986 Pre: 0.9432 Recall: 0.8586 F1: 0.8989 Train AUC: 0.9913 Val AUC: 0.9606 Time: 12.85\n",
      "Epoch: 466 Train Loss: 0.1232 Val Loss: 0.2976 Acc: 0.9004 Pre: 0.9368 Recall: 0.8690 F1: 0.9016 Train AUC: 0.9917 Val AUC: 0.9561 Time: 13.41\n",
      "Epoch: 467 Train Loss: 0.1089 Val Loss: 0.3141 Acc: 0.8949 Pre: 0.9361 Recall: 0.8586 F1: 0.8957 Train AUC: 0.9940 Val AUC: 0.9512 Time: 14.02\n",
      "Epoch: 468 Train Loss: 0.1142 Val Loss: 0.3186 Acc: 0.9022 Pre: 0.9504 Recall: 0.8586 F1: 0.9022 Train AUC: 0.9935 Val AUC: 0.9521 Time: 15.07\n",
      "Epoch: 469 Train Loss: 0.1161 Val Loss: 0.2961 Acc: 0.9058 Pre: 0.9440 Recall: 0.8724 F1: 0.9068 Train AUC: 0.9927 Val AUC: 0.9571 Time: 14.90\n",
      "Epoch: 470 Train Loss: 0.1044 Val Loss: 0.2798 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9947 Val AUC: 0.9615 Time: 13.54\n",
      "Epoch: 471 Train Loss: 0.1086 Val Loss: 0.2753 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9935 Val AUC: 0.9633 Time: 13.02\n",
      "Epoch: 472 Train Loss: 0.1138 Val Loss: 0.2719 Acc: 0.9076 Pre: 0.9377 Recall: 0.8828 F1: 0.9094 Train AUC: 0.9926 Val AUC: 0.9630 Time: 13.14\n",
      "Epoch: 473 Train Loss: 0.1140 Val Loss: 0.2886 Acc: 0.9094 Pre: 0.9511 Recall: 0.8724 F1: 0.9101 Train AUC: 0.9935 Val AUC: 0.9587 Time: 13.64\n",
      "Epoch: 474 Train Loss: 0.1081 Val Loss: 0.3382 Acc: 0.9040 Pre: 0.9647 Recall: 0.8483 F1: 0.9028 Train AUC: 0.9939 Val AUC: 0.9534 Time: 14.24\n",
      "Epoch: 475 Train Loss: 0.1109 Val Loss: 0.3352 Acc: 0.8986 Pre: 0.9466 Recall: 0.8552 F1: 0.8986 Train AUC: 0.9937 Val AUC: 0.9501 Time: 14.26\n",
      "Epoch: 476 Train Loss: 0.1080 Val Loss: 0.3062 Acc: 0.9004 Pre: 0.9242 Recall: 0.8828 F1: 0.9030 Train AUC: 0.9936 Val AUC: 0.9531 Time: 14.78\n",
      "Epoch: 477 Train Loss: 0.1104 Val Loss: 0.3108 Acc: 0.9094 Pre: 0.9545 Recall: 0.8690 F1: 0.9097 Train AUC: 0.9946 Val AUC: 0.9561 Time: 13.76\n",
      "Epoch: 478 Train Loss: 0.1053 Val Loss: 0.3010 Acc: 0.9004 Pre: 0.9468 Recall: 0.8586 F1: 0.9005 Train AUC: 0.9940 Val AUC: 0.9587 Time: 12.65\n",
      "Epoch: 479 Train Loss: 0.1048 Val Loss: 0.2749 Acc: 0.9022 Pre: 0.9155 Recall: 0.8966 F1: 0.9059 Train AUC: 0.9941 Val AUC: 0.9607 Time: 13.11\n",
      "Epoch: 480 Train Loss: 0.1133 Val Loss: 0.2779 Acc: 0.9076 Pre: 0.9345 Recall: 0.8862 F1: 0.9097 Train AUC: 0.9932 Val AUC: 0.9613 Time: 13.41\n",
      "Epoch: 481 Train Loss: 0.1041 Val Loss: 0.3058 Acc: 0.9058 Pre: 0.9542 Recall: 0.8621 F1: 0.9058 Train AUC: 0.9942 Val AUC: 0.9605 Time: 13.87\n",
      "Epoch: 482 Train Loss: 0.1156 Val Loss: 0.2939 Acc: 0.9094 Pre: 0.9478 Recall: 0.8759 F1: 0.9104 Train AUC: 0.9939 Val AUC: 0.9586 Time: 14.49\n",
      "Epoch: 483 Train Loss: 0.1036 Val Loss: 0.2947 Acc: 0.9040 Pre: 0.9247 Recall: 0.8897 F1: 0.9069 Train AUC: 0.9941 Val AUC: 0.9563 Time: 15.22\n",
      "Epoch: 484 Train Loss: 0.1123 Val Loss: 0.2974 Acc: 0.9112 Pre: 0.9382 Recall: 0.8897 F1: 0.9133 Train AUC: 0.9933 Val AUC: 0.9559 Time: 14.94\n",
      "Epoch: 485 Train Loss: 0.1150 Val Loss: 0.3044 Acc: 0.9185 Pre: 0.9658 Recall: 0.8759 F1: 0.9186 Train AUC: 0.9932 Val AUC: 0.9592 Time: 13.68\n",
      "Epoch: 486 Train Loss: 0.1129 Val Loss: 0.2856 Acc: 0.9112 Pre: 0.9547 Recall: 0.8724 F1: 0.9117 Train AUC: 0.9928 Val AUC: 0.9626 Time: 12.95\n",
      "Epoch: 487 Train Loss: 0.1088 Val Loss: 0.2517 Acc: 0.9076 Pre: 0.9193 Recall: 0.9034 F1: 0.9113 Train AUC: 0.9942 Val AUC: 0.9654 Time: 12.50\n",
      "Epoch: 488 Train Loss: 0.1291 Val Loss: 0.2843 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9918 Val AUC: 0.9627 Time: 13.11\n",
      "Epoch: 489 Train Loss: 0.1056 Val Loss: 0.3209 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9940 Val AUC: 0.9575 Time: 13.47\n",
      "Epoch: 490 Train Loss: 0.1098 Val Loss: 0.3220 Acc: 0.9076 Pre: 0.9476 Recall: 0.8724 F1: 0.9084 Train AUC: 0.9930 Val AUC: 0.9551 Time: 14.05\n",
      "Epoch: 491 Train Loss: 0.0963 Val Loss: 0.3091 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9949 Val AUC: 0.9537 Time: 14.75\n",
      "Epoch: 492 Train Loss: 0.1082 Val Loss: 0.3152 Acc: 0.9058 Pre: 0.9508 Recall: 0.8655 F1: 0.9061 Train AUC: 0.9944 Val AUC: 0.9560 Time: 15.34\n",
      "Epoch: 493 Train Loss: 0.1065 Val Loss: 0.3076 Acc: 0.9058 Pre: 0.9474 Recall: 0.8690 F1: 0.9065 Train AUC: 0.9945 Val AUC: 0.9585 Time: 13.92\n",
      "Epoch: 494 Train Loss: 0.1074 Val Loss: 0.2675 Acc: 0.9076 Pre: 0.9253 Recall: 0.8966 F1: 0.9107 Train AUC: 0.9941 Val AUC: 0.9626 Time: 12.96\n",
      "Epoch: 495 Train Loss: 0.1012 Val Loss: 0.2621 Acc: 0.9094 Pre: 0.9225 Recall: 0.9034 F1: 0.9129 Train AUC: 0.9953 Val AUC: 0.9635 Time: 12.65\n",
      "Epoch: 496 Train Loss: 0.1061 Val Loss: 0.3009 Acc: 0.9076 Pre: 0.9544 Recall: 0.8655 F1: 0.9078 Train AUC: 0.9950 Val AUC: 0.9620 Time: 13.43\n",
      "Epoch: 497 Train Loss: 0.1040 Val Loss: 0.3131 Acc: 0.9058 Pre: 0.9577 Recall: 0.8586 F1: 0.9055 Train AUC: 0.9949 Val AUC: 0.9606 Time: 13.93\n",
      "Epoch: 498 Train Loss: 0.1115 Val Loss: 0.2739 Acc: 0.9004 Pre: 0.9123 Recall: 0.8966 F1: 0.9043 Train AUC: 0.9943 Val AUC: 0.9600 Time: 14.28\n",
      "Epoch: 499 Train Loss: 0.1063 Val Loss: 0.2711 Acc: 0.9040 Pre: 0.9217 Recall: 0.8931 F1: 0.9072 Train AUC: 0.9947 Val AUC: 0.9600 Time: 14.97\n",
      "Epoch: 500 Train Loss: 0.1085 Val Loss: 0.3064 Acc: 0.9094 Pre: 0.9545 Recall: 0.8690 F1: 0.9097 Train AUC: 0.9947 Val AUC: 0.9591 Time: 14.70\n",
      "Epoch: 501 Train Loss: 0.0995 Val Loss: 0.3245 Acc: 0.9058 Pre: 0.9612 Recall: 0.8552 F1: 0.9051 Train AUC: 0.9946 Val AUC: 0.9579 Time: 13.53\n",
      "Epoch: 502 Train Loss: 0.0958 Val Loss: 0.2934 Acc: 0.9058 Pre: 0.9407 Recall: 0.8759 F1: 0.9071 Train AUC: 0.9959 Val AUC: 0.9589 Time: 12.80\n",
      "Epoch: 503 Train Loss: 0.0983 Val Loss: 0.2751 Acc: 0.9022 Pre: 0.9184 Recall: 0.8931 F1: 0.9056 Train AUC: 0.9952 Val AUC: 0.9593 Time: 12.94\n",
      "Epoch: 504 Train Loss: 0.1110 Val Loss: 0.2999 Acc: 0.9112 Pre: 0.9513 Recall: 0.8759 F1: 0.9120 Train AUC: 0.9940 Val AUC: 0.9598 Time: 13.38\n",
      "Epoch: 505 Train Loss: 0.0897 Val Loss: 0.3234 Acc: 0.9022 Pre: 0.9538 Recall: 0.8552 F1: 0.9018 Train AUC: 0.9959 Val AUC: 0.9588 Time: 14.00\n",
      "Epoch: 506 Train Loss: 0.1040 Val Loss: 0.2897 Acc: 0.9094 Pre: 0.9444 Recall: 0.8793 F1: 0.9107 Train AUC: 0.9938 Val AUC: 0.9597 Time: 14.54\n",
      "Epoch: 507 Train Loss: 0.1019 Val Loss: 0.2760 Acc: 0.9040 Pre: 0.9309 Recall: 0.8828 F1: 0.9062 Train AUC: 0.9942 Val AUC: 0.9595 Time: 14.49\n",
      "Epoch: 508 Train Loss: 0.1019 Val Loss: 0.2954 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9955 Val AUC: 0.9591 Time: 14.11\n",
      "Epoch: 509 Train Loss: 0.1029 Val Loss: 0.3254 Acc: 0.9004 Pre: 0.9608 Recall: 0.8448 F1: 0.8991 Train AUC: 0.9948 Val AUC: 0.9578 Time: 13.18\n",
      "Epoch: 510 Train Loss: 0.0986 Val Loss: 0.3051 Acc: 0.9058 Pre: 0.9508 Recall: 0.8655 F1: 0.9061 Train AUC: 0.9953 Val AUC: 0.9578 Time: 13.05\n",
      "Epoch: 511 Train Loss: 0.1030 Val Loss: 0.2756 Acc: 0.9094 Pre: 0.9225 Recall: 0.9034 F1: 0.9129 Train AUC: 0.9943 Val AUC: 0.9601 Time: 13.57\n",
      "Epoch: 512 Train Loss: 0.1006 Val Loss: 0.2762 Acc: 0.9149 Pre: 0.9451 Recall: 0.8897 F1: 0.9165 Train AUC: 0.9953 Val AUC: 0.9616 Time: 14.26\n",
      "Epoch: 513 Train Loss: 0.0946 Val Loss: 0.2985 Acc: 0.9076 Pre: 0.9650 Recall: 0.8552 F1: 0.9068 Train AUC: 0.9956 Val AUC: 0.9611 Time: 14.41\n",
      "Epoch: 514 Train Loss: 0.0937 Val Loss: 0.3013 Acc: 0.9094 Pre: 0.9545 Recall: 0.8690 F1: 0.9097 Train AUC: 0.9961 Val AUC: 0.9608 Time: 13.69\n",
      "Epoch: 515 Train Loss: 0.1017 Val Loss: 0.2786 Acc: 0.9058 Pre: 0.9375 Recall: 0.8793 F1: 0.9075 Train AUC: 0.9949 Val AUC: 0.9603 Time: 13.51\n",
      "Epoch: 516 Train Loss: 0.0982 Val Loss: 0.2751 Acc: 0.9112 Pre: 0.9382 Recall: 0.8897 F1: 0.9133 Train AUC: 0.9957 Val AUC: 0.9598 Time: 13.96\n",
      "Epoch: 517 Train Loss: 0.0953 Val Loss: 0.3124 Acc: 0.9058 Pre: 0.9508 Recall: 0.8655 F1: 0.9061 Train AUC: 0.9966 Val AUC: 0.9577 Time: 13.79\n",
      "Epoch: 518 Train Loss: 0.1018 Val Loss: 0.3323 Acc: 0.8967 Pre: 0.9533 Recall: 0.8448 F1: 0.8958 Train AUC: 0.9941 Val AUC: 0.9576 Time: 13.99\n",
      "Epoch: 519 Train Loss: 0.1115 Val Loss: 0.2841 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9933 Val AUC: 0.9598 Time: 13.89\n",
      "Epoch: 520 Train Loss: 0.1001 Val Loss: 0.2827 Acc: 0.9040 Pre: 0.9341 Recall: 0.8793 F1: 0.9059 Train AUC: 0.9952 Val AUC: 0.9602 Time: 13.74\n",
      "Epoch: 521 Train Loss: 0.1047 Val Loss: 0.2894 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9943 Val AUC: 0.9606 Time: 14.30\n",
      "Epoch: 522 Train Loss: 0.1105 Val Loss: 0.3103 Acc: 0.9040 Pre: 0.9540 Recall: 0.8586 F1: 0.9038 Train AUC: 0.9936 Val AUC: 0.9599 Time: 13.84\n",
      "Epoch: 523 Train Loss: 0.1020 Val Loss: 0.3088 Acc: 0.9058 Pre: 0.9542 Recall: 0.8621 F1: 0.9058 Train AUC: 0.9939 Val AUC: 0.9593 Time: 13.26\n",
      "Epoch: 524 Train Loss: 0.1081 Val Loss: 0.2945 Acc: 0.9112 Pre: 0.9513 Recall: 0.8759 F1: 0.9120 Train AUC: 0.9933 Val AUC: 0.9605 Time: 13.03\n",
      "Epoch: 525 Train Loss: 0.0940 Val Loss: 0.2794 Acc: 0.9076 Pre: 0.9345 Recall: 0.8862 F1: 0.9097 Train AUC: 0.9953 Val AUC: 0.9616 Time: 13.45\n",
      "Epoch: 526 Train Loss: 0.0960 Val Loss: 0.2714 Acc: 0.9076 Pre: 0.9314 Recall: 0.8897 F1: 0.9101 Train AUC: 0.9952 Val AUC: 0.9626 Time: 14.15\n",
      "Epoch: 527 Train Loss: 0.0935 Val Loss: 0.2684 Acc: 0.9076 Pre: 0.9345 Recall: 0.8862 F1: 0.9097 Train AUC: 0.9965 Val AUC: 0.9630 Time: 14.17\n",
      "Epoch: 528 Train Loss: 0.0939 Val Loss: 0.2863 Acc: 0.9058 Pre: 0.9474 Recall: 0.8690 F1: 0.9065 Train AUC: 0.9963 Val AUC: 0.9622 Time: 13.80\n",
      "Epoch: 529 Train Loss: 0.0933 Val Loss: 0.2980 Acc: 0.9076 Pre: 0.9509 Recall: 0.8690 F1: 0.9081 Train AUC: 0.9956 Val AUC: 0.9612 Time: 13.86\n",
      "Epoch: 530 Train Loss: 0.1047 Val Loss: 0.2996 Acc: 0.9112 Pre: 0.9480 Recall: 0.8793 F1: 0.9123 Train AUC: 0.9936 Val AUC: 0.9600 Time: 14.57\n",
      "Epoch: 531 Train Loss: 0.0917 Val Loss: 0.2946 Acc: 0.9076 Pre: 0.9442 Recall: 0.8759 F1: 0.9088 Train AUC: 0.9957 Val AUC: 0.9597 Time: 14.15\n",
      "Epoch: 532 Train Loss: 0.0947 Val Loss: 0.2907 Acc: 0.9040 Pre: 0.9341 Recall: 0.8793 F1: 0.9059 Train AUC: 0.9952 Val AUC: 0.9597 Time: 13.41\n",
      "Epoch: 533 Train Loss: 0.0964 Val Loss: 0.2972 Acc: 0.9076 Pre: 0.9442 Recall: 0.8759 F1: 0.9088 Train AUC: 0.9954 Val AUC: 0.9622 Time: 13.34\n",
      "Epoch: 534 Train Loss: 0.0873 Val Loss: 0.3041 Acc: 0.9076 Pre: 0.9509 Recall: 0.8690 F1: 0.9081 Train AUC: 0.9964 Val AUC: 0.9621 Time: 13.23\n",
      "Epoch: 535 Train Loss: 0.0904 Val Loss: 0.2856 Acc: 0.9076 Pre: 0.9314 Recall: 0.8897 F1: 0.9101 Train AUC: 0.9959 Val AUC: 0.9617 Time: 13.74\n",
      "Epoch: 536 Train Loss: 0.0965 Val Loss: 0.2836 Acc: 0.9004 Pre: 0.9181 Recall: 0.8897 F1: 0.9037 Train AUC: 0.9948 Val AUC: 0.9596 Time: 14.46\n",
      "Epoch: 537 Train Loss: 0.0988 Val Loss: 0.3157 Acc: 0.9004 Pre: 0.9434 Recall: 0.8621 F1: 0.9009 Train AUC: 0.9953 Val AUC: 0.9573 Time: 14.13\n",
      "Epoch: 538 Train Loss: 0.0918 Val Loss: 0.3178 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9961 Val AUC: 0.9575 Time: 13.36\n",
      "Epoch: 539 Train Loss: 0.0986 Val Loss: 0.2715 Acc: 0.9112 Pre: 0.9319 Recall: 0.8966 F1: 0.9139 Train AUC: 0.9958 Val AUC: 0.9626 Time: 13.52\n",
      "Epoch: 540 Train Loss: 0.0832 Val Loss: 0.2576 Acc: 0.9112 Pre: 0.9199 Recall: 0.9103 F1: 0.9151 Train AUC: 0.9969 Val AUC: 0.9658 Time: 14.04\n",
      "Epoch: 541 Train Loss: 0.0984 Val Loss: 0.2707 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9955 Val AUC: 0.9654 Time: 14.73\n",
      "Epoch: 542 Train Loss: 0.0956 Val Loss: 0.2925 Acc: 0.9076 Pre: 0.9476 Recall: 0.8724 F1: 0.9084 Train AUC: 0.9950 Val AUC: 0.9645 Time: 14.28\n",
      "Epoch: 543 Train Loss: 0.1003 Val Loss: 0.2870 Acc: 0.9167 Pre: 0.9485 Recall: 0.8897 F1: 0.9181 Train AUC: 0.9947 Val AUC: 0.9597 Time: 13.23\n",
      "Epoch: 544 Train Loss: 0.1052 Val Loss: 0.2975 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9945 Val AUC: 0.9577 Time: 13.24\n",
      "Epoch: 545 Train Loss: 0.1004 Val Loss: 0.3182 Acc: 0.9094 Pre: 0.9545 Recall: 0.8690 F1: 0.9097 Train AUC: 0.9949 Val AUC: 0.9582 Time: 13.73\n",
      "Epoch: 546 Train Loss: 0.0917 Val Loss: 0.3102 Acc: 0.9022 Pre: 0.9436 Recall: 0.8655 F1: 0.9029 Train AUC: 0.9957 Val AUC: 0.9592 Time: 14.02\n",
      "Epoch: 547 Train Loss: 0.0989 Val Loss: 0.2754 Acc: 0.9022 Pre: 0.9155 Recall: 0.8966 F1: 0.9059 Train AUC: 0.9948 Val AUC: 0.9616 Time: 13.77\n",
      "Epoch: 548 Train Loss: 0.1015 Val Loss: 0.2711 Acc: 0.9076 Pre: 0.9223 Recall: 0.9000 F1: 0.9110 Train AUC: 0.9952 Val AUC: 0.9622 Time: 13.55\n",
      "Epoch: 549 Train Loss: 0.0991 Val Loss: 0.3174 Acc: 0.8986 Pre: 0.9432 Recall: 0.8586 F1: 0.8989 Train AUC: 0.9959 Val AUC: 0.9588 Time: 13.49\n",
      "Epoch: 550 Train Loss: 0.0925 Val Loss: 0.3510 Acc: 0.8949 Pre: 0.9496 Recall: 0.8448 F1: 0.8942 Train AUC: 0.9954 Val AUC: 0.9552 Time: 13.46\n",
      "Epoch: 551 Train Loss: 0.1016 Val Loss: 0.3303 Acc: 0.8986 Pre: 0.9500 Recall: 0.8517 F1: 0.8982 Train AUC: 0.9938 Val AUC: 0.9543 Time: 14.09\n",
      "Epoch: 552 Train Loss: 0.1002 Val Loss: 0.2959 Acc: 0.9040 Pre: 0.9247 Recall: 0.8897 F1: 0.9069 Train AUC: 0.9954 Val AUC: 0.9560 Time: 14.85\n",
      "Epoch: 553 Train Loss: 0.1056 Val Loss: 0.2964 Acc: 0.9004 Pre: 0.9368 Recall: 0.8690 F1: 0.9016 Train AUC: 0.9951 Val AUC: 0.9593 Time: 14.64\n",
      "Epoch: 554 Train Loss: 0.0862 Val Loss: 0.3266 Acc: 0.9004 Pre: 0.9572 Recall: 0.8483 F1: 0.8995 Train AUC: 0.9966 Val AUC: 0.9591 Time: 13.48\n",
      "Epoch: 555 Train Loss: 0.1060 Val Loss: 0.2768 Acc: 0.9040 Pre: 0.9247 Recall: 0.8897 F1: 0.9069 Train AUC: 0.9947 Val AUC: 0.9621 Time: 13.01\n",
      "Epoch: 556 Train Loss: 0.0967 Val Loss: 0.2691 Acc: 0.9040 Pre: 0.9217 Recall: 0.8931 F1: 0.9072 Train AUC: 0.9956 Val AUC: 0.9622 Time: 13.16\n",
      "Epoch: 557 Train Loss: 0.0944 Val Loss: 0.2960 Acc: 0.9058 Pre: 0.9407 Recall: 0.8759 F1: 0.9071 Train AUC: 0.9965 Val AUC: 0.9610 Time: 13.30\n",
      "Epoch: 558 Train Loss: 0.0890 Val Loss: 0.3012 Acc: 0.8986 Pre: 0.9366 Recall: 0.8655 F1: 0.8996 Train AUC: 0.9963 Val AUC: 0.9614 Time: 13.75\n",
      "Epoch: 559 Train Loss: 0.0949 Val Loss: 0.2931 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9953 Val AUC: 0.9607 Time: 14.27\n",
      "Epoch: 560 Train Loss: 0.1010 Val Loss: 0.2911 Acc: 0.9076 Pre: 0.9377 Recall: 0.8828 F1: 0.9094 Train AUC: 0.9940 Val AUC: 0.9603 Time: 13.91\n",
      "Epoch: 561 Train Loss: 0.0883 Val Loss: 0.2983 Acc: 0.8986 Pre: 0.9333 Recall: 0.8690 F1: 0.9000 Train AUC: 0.9966 Val AUC: 0.9601 Time: 13.63\n",
      "Epoch: 562 Train Loss: 0.0874 Val Loss: 0.2962 Acc: 0.8986 Pre: 0.9270 Recall: 0.8759 F1: 0.9007 Train AUC: 0.9966 Val AUC: 0.9610 Time: 14.06\n",
      "Epoch: 563 Train Loss: 0.0949 Val Loss: 0.2998 Acc: 0.8967 Pre: 0.9236 Recall: 0.8759 F1: 0.8991 Train AUC: 0.9951 Val AUC: 0.9595 Time: 13.62\n",
      "Epoch: 564 Train Loss: 0.0926 Val Loss: 0.2972 Acc: 0.9004 Pre: 0.9211 Recall: 0.8862 F1: 0.9033 Train AUC: 0.9955 Val AUC: 0.9590 Time: 13.44\n",
      "Epoch: 565 Train Loss: 0.0948 Val Loss: 0.2986 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9954 Val AUC: 0.9583 Time: 13.94\n",
      "Epoch: 566 Train Loss: 0.0926 Val Loss: 0.3016 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9955 Val AUC: 0.9580 Time: 14.14\n",
      "Epoch: 567 Train Loss: 0.0948 Val Loss: 0.3329 Acc: 0.9022 Pre: 0.9504 Recall: 0.8586 F1: 0.9022 Train AUC: 0.9962 Val AUC: 0.9592 Time: 13.56\n",
      "Epoch: 568 Train Loss: 0.0995 Val Loss: 0.3100 Acc: 0.9112 Pre: 0.9617 Recall: 0.8655 F1: 0.9111 Train AUC: 0.9959 Val AUC: 0.9610 Time: 13.10\n",
      "Epoch: 569 Train Loss: 0.0971 Val Loss: 0.2664 Acc: 0.9022 Pre: 0.9041 Recall: 0.9103 F1: 0.9072 Train AUC: 0.9954 Val AUC: 0.9628 Time: 13.01\n",
      "Epoch: 570 Train Loss: 0.1035 Val Loss: 0.2726 Acc: 0.9004 Pre: 0.9066 Recall: 0.9034 F1: 0.9050 Train AUC: 0.9957 Val AUC: 0.9617 Time: 13.22\n",
      "Epoch: 571 Train Loss: 0.1053 Val Loss: 0.3336 Acc: 0.8949 Pre: 0.9427 Recall: 0.8517 F1: 0.8949 Train AUC: 0.9957 Val AUC: 0.9567 Time: 13.64\n",
      "Epoch: 572 Train Loss: 0.0973 Val Loss: 0.3559 Acc: 0.8895 Pre: 0.9421 Recall: 0.8414 F1: 0.8889 Train AUC: 0.9956 Val AUC: 0.9529 Time: 14.13\n",
      "Epoch: 573 Train Loss: 0.1068 Val Loss: 0.3023 Acc: 0.9022 Pre: 0.9184 Recall: 0.8931 F1: 0.9056 Train AUC: 0.9950 Val AUC: 0.9549 Time: 14.88\n",
      "Epoch: 574 Train Loss: 0.0956 Val Loss: 0.2841 Acc: 0.9076 Pre: 0.9283 Recall: 0.8931 F1: 0.9104 Train AUC: 0.9961 Val AUC: 0.9594 Time: 14.31\n",
      "Epoch: 575 Train Loss: 0.0904 Val Loss: 0.3069 Acc: 0.8895 Pre: 0.9354 Recall: 0.8483 F1: 0.8897 Train AUC: 0.9970 Val AUC: 0.9628 Time: 13.17\n",
      "Epoch: 576 Train Loss: 0.0930 Val Loss: 0.3217 Acc: 0.8859 Pre: 0.9349 Recall: 0.8414 F1: 0.8857 Train AUC: 0.9955 Val AUC: 0.9633 Time: 12.93\n",
      "Epoch: 577 Train Loss: 0.1085 Val Loss: 0.2706 Acc: 0.8986 Pre: 0.9209 Recall: 0.8828 F1: 0.9014 Train AUC: 0.9941 Val AUC: 0.9652 Time: 13.53\n",
      "Epoch: 578 Train Loss: 0.1038 Val Loss: 0.2747 Acc: 0.9149 Pre: 0.9263 Recall: 0.9103 F1: 0.9183 Train AUC: 0.9940 Val AUC: 0.9598 Time: 13.85\n",
      "Epoch: 579 Train Loss: 0.1093 Val Loss: 0.3174 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9956 Val AUC: 0.9565 Time: 14.71\n",
      "Epoch: 580 Train Loss: 0.0937 Val Loss: 0.3820 Acc: 0.8822 Pre: 0.9592 Recall: 0.8103 F1: 0.8785 Train AUC: 0.9955 Val AUC: 0.9535 Time: 14.94\n",
      "Epoch: 581 Train Loss: 0.1174 Val Loss: 0.3099 Acc: 0.8986 Pre: 0.9398 Recall: 0.8621 F1: 0.8993 Train AUC: 0.9937 Val AUC: 0.9582 Time: 13.60\n",
      "Epoch: 582 Train Loss: 0.0813 Val Loss: 0.2624 Acc: 0.9112 Pre: 0.9258 Recall: 0.9034 F1: 0.9145 Train AUC: 0.9970 Val AUC: 0.9627 Time: 12.83\n",
      "Epoch: 583 Train Loss: 0.1000 Val Loss: 0.2634 Acc: 0.9149 Pre: 0.9355 Recall: 0.9000 F1: 0.9174 Train AUC: 0.9962 Val AUC: 0.9647 Time: 13.10\n",
      "Epoch: 584 Train Loss: 0.0894 Val Loss: 0.2920 Acc: 0.8986 Pre: 0.9466 Recall: 0.8552 F1: 0.8986 Train AUC: 0.9963 Val AUC: 0.9643 Time: 13.56\n",
      "Epoch: 585 Train Loss: 0.0981 Val Loss: 0.2949 Acc: 0.8931 Pre: 0.9425 Recall: 0.8483 F1: 0.8929 Train AUC: 0.9945 Val AUC: 0.9646 Time: 14.23\n",
      "Epoch: 586 Train Loss: 0.0921 Val Loss: 0.2662 Acc: 0.9094 Pre: 0.9286 Recall: 0.8966 F1: 0.9123 Train AUC: 0.9962 Val AUC: 0.9638 Time: 14.71\n",
      "Epoch: 587 Train Loss: 0.0911 Val Loss: 0.2680 Acc: 0.9094 Pre: 0.9167 Recall: 0.9103 F1: 0.9135 Train AUC: 0.9963 Val AUC: 0.9605 Time: 14.32\n",
      "Epoch: 588 Train Loss: 0.0947 Val Loss: 0.2937 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9968 Val AUC: 0.9578 Time: 13.82\n",
      "Epoch: 589 Train Loss: 0.0919 Val Loss: 0.3538 Acc: 0.9004 Pre: 0.9608 Recall: 0.8448 F1: 0.8991 Train AUC: 0.9965 Val AUC: 0.9538 Time: 12.84\n",
      "Epoch: 590 Train Loss: 0.0895 Val Loss: 0.3562 Acc: 0.8986 Pre: 0.9606 Recall: 0.8414 F1: 0.8971 Train AUC: 0.9965 Val AUC: 0.9531 Time: 12.97\n",
      "Epoch: 591 Train Loss: 0.0974 Val Loss: 0.3032 Acc: 0.9112 Pre: 0.9513 Recall: 0.8759 F1: 0.9120 Train AUC: 0.9946 Val AUC: 0.9576 Time: 13.31\n",
      "Epoch: 592 Train Loss: 0.0850 Val Loss: 0.2703 Acc: 0.9076 Pre: 0.9253 Recall: 0.8966 F1: 0.9107 Train AUC: 0.9969 Val AUC: 0.9621 Time: 14.01\n",
      "Epoch: 593 Train Loss: 0.0895 Val Loss: 0.2695 Acc: 0.9112 Pre: 0.9350 Recall: 0.8931 F1: 0.9136 Train AUC: 0.9975 Val AUC: 0.9650 Time: 14.44\n",
      "Epoch: 594 Train Loss: 0.0901 Val Loss: 0.2871 Acc: 0.8949 Pre: 0.9394 Recall: 0.8552 F1: 0.8953 Train AUC: 0.9964 Val AUC: 0.9644 Time: 14.74\n",
      "Epoch: 595 Train Loss: 0.0953 Val Loss: 0.2923 Acc: 0.9076 Pre: 0.9509 Recall: 0.8690 F1: 0.9081 Train AUC: 0.9950 Val AUC: 0.9635 Time: 15.14\n",
      "Epoch: 596 Train Loss: 0.1031 Val Loss: 0.2787 Acc: 0.9130 Pre: 0.9321 Recall: 0.9000 F1: 0.9158 Train AUC: 0.9946 Val AUC: 0.9623 Time: 14.59\n",
      "Epoch: 597 Train Loss: 0.0897 Val Loss: 0.2847 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9961 Val AUC: 0.9592 Time: 13.32\n",
      "Epoch: 598 Train Loss: 0.0945 Val Loss: 0.3150 Acc: 0.9094 Pre: 0.9478 Recall: 0.8759 F1: 0.9104 Train AUC: 0.9960 Val AUC: 0.9568 Time: 13.06\n",
      "Epoch: 599 Train Loss: 0.0932 Val Loss: 0.3262 Acc: 0.8949 Pre: 0.9462 Recall: 0.8483 F1: 0.8945 Train AUC: 0.9964 Val AUC: 0.9576 Time: 13.10\n",
      "Epoch: 600 Train Loss: 0.0947 Val Loss: 0.3100 Acc: 0.9058 Pre: 0.9312 Recall: 0.8862 F1: 0.9081 Train AUC: 0.9963 Val AUC: 0.9578 Time: 13.53\n",
      "Epoch: 601 Train Loss: 0.0873 Val Loss: 0.2987 Acc: 0.8967 Pre: 0.9003 Recall: 0.9034 F1: 0.9019 Train AUC: 0.9963 Val AUC: 0.9590 Time: 14.03\n",
      "Epoch: 602 Train Loss: 0.0929 Val Loss: 0.2846 Acc: 0.9094 Pre: 0.9167 Recall: 0.9103 F1: 0.9135 Train AUC: 0.9959 Val AUC: 0.9613 Time: 14.45\n",
      "Epoch: 603 Train Loss: 0.0848 Val Loss: 0.2966 Acc: 0.9149 Pre: 0.9483 Recall: 0.8862 F1: 0.9162 Train AUC: 0.9970 Val AUC: 0.9623 Time: 14.02\n",
      "Epoch: 604 Train Loss: 0.0815 Val Loss: 0.3193 Acc: 0.9076 Pre: 0.9509 Recall: 0.8690 F1: 0.9081 Train AUC: 0.9970 Val AUC: 0.9613 Time: 13.37\n",
      "Epoch: 605 Train Loss: 0.0850 Val Loss: 0.2942 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9975 Val AUC: 0.9607 Time: 13.18\n",
      "Epoch: 606 Train Loss: 0.0868 Val Loss: 0.2825 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9965 Val AUC: 0.9619 Time: 13.22\n",
      "Epoch: 607 Train Loss: 0.0888 Val Loss: 0.2880 Acc: 0.9112 Pre: 0.9350 Recall: 0.8931 F1: 0.9136 Train AUC: 0.9962 Val AUC: 0.9630 Time: 13.58\n",
      "Epoch: 608 Train Loss: 0.0839 Val Loss: 0.3116 Acc: 0.8967 Pre: 0.9430 Recall: 0.8552 F1: 0.8969 Train AUC: 0.9962 Val AUC: 0.9617 Time: 14.42\n",
      "Epoch: 609 Train Loss: 0.0851 Val Loss: 0.3050 Acc: 0.9040 Pre: 0.9506 Recall: 0.8621 F1: 0.9042 Train AUC: 0.9961 Val AUC: 0.9621 Time: 14.83\n",
      "Epoch: 610 Train Loss: 0.0835 Val Loss: 0.2788 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9969 Val AUC: 0.9617 Time: 13.43\n",
      "Epoch: 611 Train Loss: 0.0918 Val Loss: 0.2792 Acc: 0.9058 Pre: 0.9220 Recall: 0.8966 F1: 0.9091 Train AUC: 0.9963 Val AUC: 0.9613 Time: 12.90\n",
      "Epoch: 612 Train Loss: 0.0893 Val Loss: 0.3031 Acc: 0.9076 Pre: 0.9410 Recall: 0.8793 F1: 0.9091 Train AUC: 0.9965 Val AUC: 0.9612 Time: 13.49\n",
      "Epoch: 613 Train Loss: 0.0866 Val Loss: 0.3229 Acc: 0.9022 Pre: 0.9504 Recall: 0.8586 F1: 0.9022 Train AUC: 0.9957 Val AUC: 0.9597 Time: 14.14\n",
      "Epoch: 614 Train Loss: 0.0811 Val Loss: 0.3121 Acc: 0.9130 Pre: 0.9481 Recall: 0.8828 F1: 0.9143 Train AUC: 0.9966 Val AUC: 0.9615 Time: 14.53\n",
      "Epoch: 615 Train Loss: 0.0814 Val Loss: 0.2806 Acc: 0.9094 Pre: 0.9286 Recall: 0.8966 F1: 0.9123 Train AUC: 0.9966 Val AUC: 0.9635 Time: 14.07\n",
      "Epoch: 616 Train Loss: 0.0787 Val Loss: 0.2723 Acc: 0.9203 Pre: 0.9362 Recall: 0.9103 F1: 0.9231 Train AUC: 0.9969 Val AUC: 0.9637 Time: 13.39\n",
      "Epoch: 617 Train Loss: 0.0837 Val Loss: 0.2808 Acc: 0.9167 Pre: 0.9357 Recall: 0.9034 F1: 0.9193 Train AUC: 0.9973 Val AUC: 0.9635 Time: 12.68\n",
      "Epoch: 618 Train Loss: 0.0976 Val Loss: 0.3104 Acc: 0.9149 Pre: 0.9483 Recall: 0.8862 F1: 0.9162 Train AUC: 0.9952 Val AUC: 0.9600 Time: 13.33\n",
      "Epoch: 619 Train Loss: 0.0809 Val Loss: 0.3417 Acc: 0.9094 Pre: 0.9511 Recall: 0.8724 F1: 0.9101 Train AUC: 0.9966 Val AUC: 0.9549 Time: 13.66\n",
      "Epoch: 620 Train Loss: 0.0925 Val Loss: 0.3244 Acc: 0.9058 Pre: 0.9375 Recall: 0.8793 F1: 0.9075 Train AUC: 0.9952 Val AUC: 0.9556 Time: 14.23\n",
      "Epoch: 621 Train Loss: 0.0816 Val Loss: 0.2928 Acc: 0.9076 Pre: 0.9223 Recall: 0.9000 F1: 0.9110 Train AUC: 0.9968 Val AUC: 0.9594 Time: 15.04\n",
      "Epoch: 622 Train Loss: 0.0836 Val Loss: 0.2784 Acc: 0.9040 Pre: 0.9187 Recall: 0.8966 F1: 0.9075 Train AUC: 0.9971 Val AUC: 0.9627 Time: 14.11\n",
      "Epoch: 623 Train Loss: 0.0779 Val Loss: 0.2915 Acc: 0.8949 Pre: 0.9328 Recall: 0.8621 F1: 0.8961 Train AUC: 0.9975 Val AUC: 0.9643 Time: 13.01\n",
      "Epoch: 624 Train Loss: 0.0897 Val Loss: 0.2805 Acc: 0.9022 Pre: 0.9275 Recall: 0.8828 F1: 0.9046 Train AUC: 0.9964 Val AUC: 0.9654 Time: 12.72\n",
      "Epoch: 625 Train Loss: 0.0816 Val Loss: 0.2718 Acc: 0.9130 Pre: 0.9291 Recall: 0.9034 F1: 0.9161 Train AUC: 0.9969 Val AUC: 0.9644 Time: 12.47\n",
      "Epoch: 626 Train Loss: 0.0737 Val Loss: 0.2870 Acc: 0.9130 Pre: 0.9291 Recall: 0.9034 F1: 0.9161 Train AUC: 0.9980 Val AUC: 0.9613 Time: 12.75\n",
      "Epoch: 627 Train Loss: 0.0765 Val Loss: 0.3250 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9980 Val AUC: 0.9570 Time: 13.52\n",
      "Epoch: 628 Train Loss: 0.0858 Val Loss: 0.3508 Acc: 0.9022 Pre: 0.9609 Recall: 0.8483 F1: 0.9011 Train AUC: 0.9963 Val AUC: 0.9563 Time: 13.85\n",
      "Epoch: 629 Train Loss: 0.0799 Val Loss: 0.3330 Acc: 0.9112 Pre: 0.9617 Recall: 0.8655 F1: 0.9111 Train AUC: 0.9969 Val AUC: 0.9575 Time: 14.72\n",
      "Epoch: 630 Train Loss: 0.0826 Val Loss: 0.2936 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9970 Val AUC: 0.9611 Time: 15.21\n",
      "Epoch: 631 Train Loss: 0.0770 Val Loss: 0.2914 Acc: 0.9040 Pre: 0.9187 Recall: 0.8966 F1: 0.9075 Train AUC: 0.9978 Val AUC: 0.9626 Time: 15.69\n",
      "Epoch: 632 Train Loss: 0.0819 Val Loss: 0.2934 Acc: 0.9076 Pre: 0.9314 Recall: 0.8897 F1: 0.9101 Train AUC: 0.9965 Val AUC: 0.9633 Time: 13.86\n",
      "Epoch: 633 Train Loss: 0.0883 Val Loss: 0.2916 Acc: 0.9094 Pre: 0.9286 Recall: 0.8966 F1: 0.9123 Train AUC: 0.9956 Val AUC: 0.9626 Time: 12.91\n",
      "Epoch: 634 Train Loss: 0.0862 Val Loss: 0.2991 Acc: 0.9040 Pre: 0.9247 Recall: 0.8897 F1: 0.9069 Train AUC: 0.9957 Val AUC: 0.9594 Time: 12.36\n",
      "Epoch: 635 Train Loss: 0.0900 Val Loss: 0.3123 Acc: 0.9094 Pre: 0.9511 Recall: 0.8724 F1: 0.9101 Train AUC: 0.9960 Val AUC: 0.9600 Time: 12.91\n",
      "Epoch: 636 Train Loss: 0.0782 Val Loss: 0.3166 Acc: 0.9094 Pre: 0.9511 Recall: 0.8724 F1: 0.9101 Train AUC: 0.9981 Val AUC: 0.9606 Time: 13.51\n",
      "Epoch: 637 Train Loss: 0.0796 Val Loss: 0.2984 Acc: 0.9112 Pre: 0.9350 Recall: 0.8931 F1: 0.9136 Train AUC: 0.9971 Val AUC: 0.9620 Time: 13.93\n",
      "Epoch: 638 Train Loss: 0.0766 Val Loss: 0.2928 Acc: 0.9112 Pre: 0.9350 Recall: 0.8931 F1: 0.9136 Train AUC: 0.9970 Val AUC: 0.9621 Time: 14.55\n",
      "Epoch: 639 Train Loss: 0.0835 Val Loss: 0.3079 Acc: 0.9058 Pre: 0.9407 Recall: 0.8759 F1: 0.9071 Train AUC: 0.9968 Val AUC: 0.9616 Time: 14.95\n",
      "Epoch: 640 Train Loss: 0.0777 Val Loss: 0.3124 Acc: 0.9149 Pre: 0.9517 Recall: 0.8828 F1: 0.9159 Train AUC: 0.9969 Val AUC: 0.9613 Time: 14.12\n",
      "Epoch: 641 Train Loss: 0.0740 Val Loss: 0.2973 Acc: 0.9058 Pre: 0.9312 Recall: 0.8862 F1: 0.9081 Train AUC: 0.9975 Val AUC: 0.9621 Time: 13.08\n",
      "Epoch: 642 Train Loss: 0.0682 Val Loss: 0.2871 Acc: 0.9058 Pre: 0.9220 Recall: 0.8966 F1: 0.9091 Train AUC: 0.9982 Val AUC: 0.9625 Time: 12.70\n",
      "Epoch: 643 Train Loss: 0.0753 Val Loss: 0.2920 Acc: 0.9040 Pre: 0.9247 Recall: 0.8897 F1: 0.9069 Train AUC: 0.9975 Val AUC: 0.9621 Time: 12.47\n",
      "Epoch: 644 Train Loss: 0.0783 Val Loss: 0.3099 Acc: 0.9076 Pre: 0.9410 Recall: 0.8793 F1: 0.9091 Train AUC: 0.9971 Val AUC: 0.9610 Time: 12.95\n",
      "Epoch: 645 Train Loss: 0.0705 Val Loss: 0.3089 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9978 Val AUC: 0.9607 Time: 13.56\n",
      "Epoch: 646 Train Loss: 0.0727 Val Loss: 0.2929 Acc: 0.9130 Pre: 0.9321 Recall: 0.9000 F1: 0.9158 Train AUC: 0.9978 Val AUC: 0.9609 Time: 14.05\n",
      "Epoch: 647 Train Loss: 0.0804 Val Loss: 0.2980 Acc: 0.9167 Pre: 0.9420 Recall: 0.8966 F1: 0.9187 Train AUC: 0.9972 Val AUC: 0.9617 Time: 14.86\n",
      "Epoch: 648 Train Loss: 0.0754 Val Loss: 0.2988 Acc: 0.9149 Pre: 0.9418 Recall: 0.8931 F1: 0.9168 Train AUC: 0.9973 Val AUC: 0.9628 Time: 15.38\n",
      "Epoch: 649 Train Loss: 0.0708 Val Loss: 0.2967 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9979 Val AUC: 0.9634 Time: 14.05\n",
      "Epoch: 650 Train Loss: 0.0776 Val Loss: 0.2887 Acc: 0.9167 Pre: 0.9357 Recall: 0.9034 F1: 0.9193 Train AUC: 0.9970 Val AUC: 0.9640 Time: 13.42\n",
      "Epoch: 651 Train Loss: 0.0693 Val Loss: 0.2793 Acc: 0.9130 Pre: 0.9291 Recall: 0.9034 F1: 0.9161 Train AUC: 0.9981 Val AUC: 0.9643 Time: 13.26\n",
      "Epoch: 652 Train Loss: 0.0760 Val Loss: 0.2829 Acc: 0.9149 Pre: 0.9324 Recall: 0.9034 F1: 0.9177 Train AUC: 0.9970 Val AUC: 0.9632 Time: 13.15\n",
      "Epoch: 653 Train Loss: 0.0743 Val Loss: 0.3013 Acc: 0.9167 Pre: 0.9519 Recall: 0.8862 F1: 0.9179 Train AUC: 0.9974 Val AUC: 0.9617 Time: 13.77\n",
      "Epoch: 654 Train Loss: 0.0792 Val Loss: 0.3146 Acc: 0.9130 Pre: 0.9549 Recall: 0.8759 F1: 0.9137 Train AUC: 0.9967 Val AUC: 0.9608 Time: 14.39\n",
      "Epoch: 655 Train Loss: 0.0852 Val Loss: 0.3000 Acc: 0.9112 Pre: 0.9382 Recall: 0.8897 F1: 0.9133 Train AUC: 0.9965 Val AUC: 0.9610 Time: 14.21\n",
      "Epoch: 656 Train Loss: 0.0725 Val Loss: 0.2996 Acc: 0.9167 Pre: 0.9420 Recall: 0.8966 F1: 0.9187 Train AUC: 0.9979 Val AUC: 0.9619 Time: 14.09\n",
      "Epoch: 657 Train Loss: 0.0736 Val Loss: 0.2960 Acc: 0.9130 Pre: 0.9321 Recall: 0.9000 F1: 0.9158 Train AUC: 0.9976 Val AUC: 0.9628 Time: 13.13\n",
      "Epoch: 658 Train Loss: 0.0841 Val Loss: 0.2994 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9964 Val AUC: 0.9629 Time: 12.68\n",
      "Epoch: 659 Train Loss: 0.0794 Val Loss: 0.2952 Acc: 0.9076 Pre: 0.9314 Recall: 0.8897 F1: 0.9101 Train AUC: 0.9972 Val AUC: 0.9619 Time: 12.75\n",
      "Epoch: 660 Train Loss: 0.0793 Val Loss: 0.3005 Acc: 0.9076 Pre: 0.9410 Recall: 0.8793 F1: 0.9091 Train AUC: 0.9971 Val AUC: 0.9615 Time: 13.37\n",
      "Epoch: 661 Train Loss: 0.0660 Val Loss: 0.3018 Acc: 0.9094 Pre: 0.9444 Recall: 0.8793 F1: 0.9107 Train AUC: 0.9984 Val AUC: 0.9614 Time: 13.85\n",
      "Epoch: 662 Train Loss: 0.0742 Val Loss: 0.3029 Acc: 0.9058 Pre: 0.9343 Recall: 0.8828 F1: 0.9078 Train AUC: 0.9978 Val AUC: 0.9620 Time: 14.60\n",
      "Epoch: 663 Train Loss: 0.0706 Val Loss: 0.2963 Acc: 0.9112 Pre: 0.9350 Recall: 0.8931 F1: 0.9136 Train AUC: 0.9978 Val AUC: 0.9632 Time: 15.01\n",
      "Epoch: 664 Train Loss: 0.0774 Val Loss: 0.2731 Acc: 0.9167 Pre: 0.9236 Recall: 0.9172 F1: 0.9204 Train AUC: 0.9978 Val AUC: 0.9652 Time: 14.06\n",
      "Epoch: 665 Train Loss: 0.0805 Val Loss: 0.2725 Acc: 0.9167 Pre: 0.9266 Recall: 0.9138 F1: 0.9201 Train AUC: 0.9972 Val AUC: 0.9651 Time: 13.09\n",
      "Epoch: 666 Train Loss: 0.0833 Val Loss: 0.3064 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9973 Val AUC: 0.9633 Time: 13.25\n",
      "Epoch: 667 Train Loss: 0.0766 Val Loss: 0.3529 Acc: 0.9004 Pre: 0.9608 Recall: 0.8448 F1: 0.8991 Train AUC: 0.9972 Val AUC: 0.9610 Time: 13.53\n",
      "Epoch: 668 Train Loss: 0.1020 Val Loss: 0.3080 Acc: 0.9076 Pre: 0.9345 Recall: 0.8862 F1: 0.9097 Train AUC: 0.9966 Val AUC: 0.9592 Time: 14.13\n",
      "Epoch: 669 Train Loss: 0.0703 Val Loss: 0.3077 Acc: 0.9040 Pre: 0.9044 Recall: 0.9138 F1: 0.9091 Train AUC: 0.9979 Val AUC: 0.9579 Time: 14.96\n",
      "Epoch: 670 Train Loss: 0.0834 Val Loss: 0.3111 Acc: 0.9076 Pre: 0.9253 Recall: 0.8966 F1: 0.9107 Train AUC: 0.9975 Val AUC: 0.9593 Time: 13.68\n",
      "Epoch: 671 Train Loss: 0.0779 Val Loss: 0.3383 Acc: 0.9004 Pre: 0.9502 Recall: 0.8552 F1: 0.9002 Train AUC: 0.9976 Val AUC: 0.9596 Time: 12.72\n",
      "Epoch: 672 Train Loss: 0.0901 Val Loss: 0.2978 Acc: 0.9022 Pre: 0.9307 Recall: 0.8793 F1: 0.9043 Train AUC: 0.9960 Val AUC: 0.9628 Time: 12.79\n",
      "Epoch: 673 Train Loss: 0.0812 Val Loss: 0.2783 Acc: 0.9094 Pre: 0.9196 Recall: 0.9069 F1: 0.9132 Train AUC: 0.9968 Val AUC: 0.9618 Time: 13.25\n",
      "Epoch: 674 Train Loss: 0.0804 Val Loss: 0.2887 Acc: 0.9076 Pre: 0.9223 Recall: 0.9000 F1: 0.9110 Train AUC: 0.9972 Val AUC: 0.9606 Time: 14.14\n",
      "Epoch: 675 Train Loss: 0.0821 Val Loss: 0.3518 Acc: 0.8986 Pre: 0.9570 Recall: 0.8448 F1: 0.8974 Train AUC: 0.9972 Val AUC: 0.9566 Time: 14.57\n",
      "Epoch: 676 Train Loss: 0.0758 Val Loss: 0.3844 Acc: 0.8895 Pre: 0.9455 Recall: 0.8379 F1: 0.8885 Train AUC: 0.9975 Val AUC: 0.9533 Time: 15.16\n",
      "Epoch: 677 Train Loss: 0.0916 Val Loss: 0.3221 Acc: 0.9149 Pre: 0.9451 Recall: 0.8897 F1: 0.9165 Train AUC: 0.9957 Val AUC: 0.9571 Time: 14.62\n",
      "Epoch: 678 Train Loss: 0.0737 Val Loss: 0.2818 Acc: 0.9040 Pre: 0.9129 Recall: 0.9034 F1: 0.9081 Train AUC: 0.9975 Val AUC: 0.9617 Time: 13.34\n",
      "Epoch: 679 Train Loss: 0.0859 Val Loss: 0.2830 Acc: 0.9149 Pre: 0.9324 Recall: 0.9034 F1: 0.9177 Train AUC: 0.9979 Val AUC: 0.9640 Time: 13.03\n",
      "Epoch: 680 Train Loss: 0.0831 Val Loss: 0.3282 Acc: 0.9004 Pre: 0.9502 Recall: 0.8552 F1: 0.9002 Train AUC: 0.9967 Val AUC: 0.9639 Time: 13.33\n",
      "Epoch: 681 Train Loss: 0.0872 Val Loss: 0.3162 Acc: 0.8913 Pre: 0.9259 Recall: 0.8621 F1: 0.8929 Train AUC: 0.9974 Val AUC: 0.9627 Time: 13.45\n",
      "Epoch: 682 Train Loss: 0.0798 Val Loss: 0.2938 Acc: 0.9130 Pre: 0.9231 Recall: 0.9103 F1: 0.9167 Train AUC: 0.9968 Val AUC: 0.9604 Time: 13.42\n",
      "Epoch: 683 Train Loss: 0.0823 Val Loss: 0.2877 Acc: 0.9022 Pre: 0.9155 Recall: 0.8966 F1: 0.9059 Train AUC: 0.9967 Val AUC: 0.9606 Time: 13.94\n",
      "Epoch: 684 Train Loss: 0.0764 Val Loss: 0.3188 Acc: 0.9076 Pre: 0.9476 Recall: 0.8724 F1: 0.9084 Train AUC: 0.9981 Val AUC: 0.9595 Time: 14.17\n",
      "Epoch: 685 Train Loss: 0.0766 Val Loss: 0.3262 Acc: 0.9040 Pre: 0.9506 Recall: 0.8621 F1: 0.9042 Train AUC: 0.9974 Val AUC: 0.9587 Time: 13.45\n",
      "Epoch: 686 Train Loss: 0.0771 Val Loss: 0.3017 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9975 Val AUC: 0.9607 Time: 12.72\n",
      "Epoch: 687 Train Loss: 0.0744 Val Loss: 0.2896 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9978 Val AUC: 0.9618 Time: 13.22\n",
      "Epoch: 688 Train Loss: 0.0673 Val Loss: 0.2992 Acc: 0.9076 Pre: 0.9442 Recall: 0.8759 F1: 0.9088 Train AUC: 0.9987 Val AUC: 0.9622 Time: 13.80\n",
      "Epoch: 689 Train Loss: 0.0779 Val Loss: 0.3123 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9971 Val AUC: 0.9621 Time: 14.08\n",
      "Epoch: 690 Train Loss: 0.0653 Val Loss: 0.3119 Acc: 0.9076 Pre: 0.9509 Recall: 0.8690 F1: 0.9081 Train AUC: 0.9979 Val AUC: 0.9618 Time: 14.29\n",
      "Epoch: 691 Train Loss: 0.0763 Val Loss: 0.2918 Acc: 0.9149 Pre: 0.9386 Recall: 0.8966 F1: 0.9171 Train AUC: 0.9977 Val AUC: 0.9615 Time: 14.84\n",
      "Epoch: 692 Train Loss: 0.0730 Val Loss: 0.2898 Acc: 0.9149 Pre: 0.9324 Recall: 0.9034 F1: 0.9177 Train AUC: 0.9980 Val AUC: 0.9611 Time: 13.57\n",
      "Epoch: 693 Train Loss: 0.0742 Val Loss: 0.3017 Acc: 0.9130 Pre: 0.9321 Recall: 0.9000 F1: 0.9158 Train AUC: 0.9979 Val AUC: 0.9605 Time: 12.96\n",
      "Epoch: 694 Train Loss: 0.0798 Val Loss: 0.3177 Acc: 0.9076 Pre: 0.9314 Recall: 0.8897 F1: 0.9101 Train AUC: 0.9969 Val AUC: 0.9591 Time: 12.78\n",
      "Epoch: 695 Train Loss: 0.0714 Val Loss: 0.3183 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9974 Val AUC: 0.9586 Time: 13.56\n",
      "Epoch: 696 Train Loss: 0.0800 Val Loss: 0.3144 Acc: 0.9112 Pre: 0.9382 Recall: 0.8897 F1: 0.9133 Train AUC: 0.9966 Val AUC: 0.9589 Time: 13.98\n",
      "Epoch: 697 Train Loss: 0.0707 Val Loss: 0.3284 Acc: 0.9112 Pre: 0.9547 Recall: 0.8724 F1: 0.9117 Train AUC: 0.9978 Val AUC: 0.9589 Time: 14.47\n",
      "Epoch: 698 Train Loss: 0.0782 Val Loss: 0.3077 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9968 Val AUC: 0.9605 Time: 14.45\n",
      "Epoch: 699 Train Loss: 0.0714 Val Loss: 0.3059 Acc: 0.9112 Pre: 0.9446 Recall: 0.8828 F1: 0.9127 Train AUC: 0.9982 Val AUC: 0.9611 Time: 13.83\n",
      "Epoch: 700 Train Loss: 0.0772 Val Loss: 0.2962 Acc: 0.9185 Pre: 0.9422 Recall: 0.9000 F1: 0.9206 Train AUC: 0.9969 Val AUC: 0.9626 Time: 12.57\n",
      "Epoch: 701 Train Loss: 0.0770 Val Loss: 0.2981 Acc: 0.9149 Pre: 0.9418 Recall: 0.8931 F1: 0.9168 Train AUC: 0.9970 Val AUC: 0.9623 Time: 12.52\n",
      "Epoch: 702 Train Loss: 0.0770 Val Loss: 0.3083 Acc: 0.9112 Pre: 0.9513 Recall: 0.8759 F1: 0.9120 Train AUC: 0.9975 Val AUC: 0.9614 Time: 13.04\n",
      "Epoch: 703 Train Loss: 0.0691 Val Loss: 0.3130 Acc: 0.9076 Pre: 0.9509 Recall: 0.8690 F1: 0.9081 Train AUC: 0.9979 Val AUC: 0.9604 Time: 13.47\n",
      "Epoch: 704 Train Loss: 0.0759 Val Loss: 0.3075 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9972 Val AUC: 0.9601 Time: 14.00\n",
      "Epoch: 705 Train Loss: 0.0764 Val Loss: 0.3044 Acc: 0.9094 Pre: 0.9317 Recall: 0.8931 F1: 0.9120 Train AUC: 0.9973 Val AUC: 0.9599 Time: 14.76\n",
      "Epoch: 706 Train Loss: 0.0701 Val Loss: 0.3095 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9982 Val AUC: 0.9604 Time: 15.35\n",
      "Epoch: 707 Train Loss: 0.0672 Val Loss: 0.3146 Acc: 0.9094 Pre: 0.9444 Recall: 0.8793 F1: 0.9107 Train AUC: 0.9979 Val AUC: 0.9608 Time: 14.45\n",
      "Epoch: 708 Train Loss: 0.0806 Val Loss: 0.3185 Acc: 0.9076 Pre: 0.9476 Recall: 0.8724 F1: 0.9084 Train AUC: 0.9972 Val AUC: 0.9593 Time: 13.41\n",
      "Epoch: 709 Train Loss: 0.0687 Val Loss: 0.3093 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9979 Val AUC: 0.9592 Time: 12.45\n",
      "Epoch: 710 Train Loss: 0.0701 Val Loss: 0.3018 Acc: 0.9094 Pre: 0.9286 Recall: 0.8966 F1: 0.9123 Train AUC: 0.9979 Val AUC: 0.9600 Time: 12.06\n",
      "Epoch: 711 Train Loss: 0.0679 Val Loss: 0.3030 Acc: 0.9149 Pre: 0.9386 Recall: 0.8966 F1: 0.9171 Train AUC: 0.9984 Val AUC: 0.9610 Time: 12.54\n",
      "Epoch: 712 Train Loss: 0.0681 Val Loss: 0.3109 Acc: 0.9112 Pre: 0.9382 Recall: 0.8897 F1: 0.9133 Train AUC: 0.9983 Val AUC: 0.9610 Time: 12.92\n",
      "Epoch: 713 Train Loss: 0.0760 Val Loss: 0.3159 Acc: 0.9004 Pre: 0.9368 Recall: 0.8690 F1: 0.9016 Train AUC: 0.9969 Val AUC: 0.9600 Time: 13.45\n",
      "Epoch: 714 Train Loss: 0.0723 Val Loss: 0.3015 Acc: 0.9112 Pre: 0.9350 Recall: 0.8931 F1: 0.9136 Train AUC: 0.9976 Val AUC: 0.9601 Time: 14.30\n",
      "Epoch: 715 Train Loss: 0.0763 Val Loss: 0.3146 Acc: 0.9094 Pre: 0.9478 Recall: 0.8759 F1: 0.9104 Train AUC: 0.9973 Val AUC: 0.9584 Time: 14.80\n",
      "Epoch: 716 Train Loss: 0.0775 Val Loss: 0.3234 Acc: 0.9112 Pre: 0.9582 Recall: 0.8690 F1: 0.9114 Train AUC: 0.9972 Val AUC: 0.9578 Time: 15.27\n",
      "Epoch: 717 Train Loss: 0.0780 Val Loss: 0.3269 Acc: 0.9149 Pre: 0.9620 Recall: 0.8724 F1: 0.9150 Train AUC: 0.9974 Val AUC: 0.9590 Time: 13.98\n",
      "Epoch: 718 Train Loss: 0.0694 Val Loss: 0.3132 Acc: 0.9130 Pre: 0.9449 Recall: 0.8862 F1: 0.9146 Train AUC: 0.9976 Val AUC: 0.9596 Time: 12.81\n",
      "Epoch: 719 Train Loss: 0.0754 Val Loss: 0.2996 Acc: 0.9185 Pre: 0.9359 Recall: 0.9069 F1: 0.9212 Train AUC: 0.9972 Val AUC: 0.9611 Time: 12.69\n",
      "Epoch: 720 Train Loss: 0.0701 Val Loss: 0.3040 Acc: 0.9149 Pre: 0.9386 Recall: 0.8966 F1: 0.9171 Train AUC: 0.9979 Val AUC: 0.9611 Time: 13.32\n",
      "Epoch: 721 Train Loss: 0.0667 Val Loss: 0.3245 Acc: 0.9130 Pre: 0.9549 Recall: 0.8759 F1: 0.9137 Train AUC: 0.9983 Val AUC: 0.9593 Time: 13.69\n",
      "Epoch: 722 Train Loss: 0.0711 Val Loss: 0.3222 Acc: 0.9130 Pre: 0.9481 Recall: 0.8828 F1: 0.9143 Train AUC: 0.9975 Val AUC: 0.9585 Time: 14.32\n",
      "Epoch: 723 Train Loss: 0.0653 Val Loss: 0.3212 Acc: 0.9112 Pre: 0.9446 Recall: 0.8828 F1: 0.9127 Train AUC: 0.9987 Val AUC: 0.9590 Time: 15.13\n",
      "Epoch: 724 Train Loss: 0.0666 Val Loss: 0.3176 Acc: 0.9112 Pre: 0.9446 Recall: 0.8828 F1: 0.9127 Train AUC: 0.9980 Val AUC: 0.9599 Time: 14.12\n",
      "Epoch: 725 Train Loss: 0.0790 Val Loss: 0.3174 Acc: 0.9130 Pre: 0.9481 Recall: 0.8828 F1: 0.9143 Train AUC: 0.9965 Val AUC: 0.9605 Time: 13.00\n",
      "Epoch: 726 Train Loss: 0.0708 Val Loss: 0.3204 Acc: 0.9149 Pre: 0.9620 Recall: 0.8724 F1: 0.9150 Train AUC: 0.9975 Val AUC: 0.9615 Time: 12.45\n",
      "Epoch: 727 Train Loss: 0.0700 Val Loss: 0.2948 Acc: 0.9040 Pre: 0.9187 Recall: 0.8966 F1: 0.9075 Train AUC: 0.9987 Val AUC: 0.9620 Time: 12.86\n",
      "Epoch: 728 Train Loss: 0.0697 Val Loss: 0.2940 Acc: 0.9058 Pre: 0.9103 Recall: 0.9103 F1: 0.9103 Train AUC: 0.9978 Val AUC: 0.9615 Time: 13.42\n",
      "Epoch: 729 Train Loss: 0.0760 Val Loss: 0.3339 Acc: 0.8986 Pre: 0.9239 Recall: 0.8793 F1: 0.9011 Train AUC: 0.9976 Val AUC: 0.9576 Time: 13.94\n",
      "Epoch: 730 Train Loss: 0.0723 Val Loss: 0.3522 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9972 Val AUC: 0.9561 Time: 14.83\n",
      "Epoch: 731 Train Loss: 0.0762 Val Loss: 0.3248 Acc: 0.9112 Pre: 0.9480 Recall: 0.8793 F1: 0.9123 Train AUC: 0.9972 Val AUC: 0.9586 Time: 15.00\n",
      "Epoch: 732 Train Loss: 0.0692 Val Loss: 0.2945 Acc: 0.9094 Pre: 0.9286 Recall: 0.8966 F1: 0.9123 Train AUC: 0.9982 Val AUC: 0.9607 Time: 13.82\n",
      "Epoch: 733 Train Loss: 0.0705 Val Loss: 0.3022 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9981 Val AUC: 0.9625 Time: 12.97\n",
      "Epoch: 734 Train Loss: 0.0711 Val Loss: 0.3364 Acc: 0.9058 Pre: 0.9508 Recall: 0.8655 F1: 0.9061 Train AUC: 0.9979 Val AUC: 0.9609 Time: 12.54\n",
      "Epoch: 735 Train Loss: 0.0780 Val Loss: 0.3179 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9975 Val AUC: 0.9602 Time: 13.00\n",
      "Epoch: 736 Train Loss: 0.0699 Val Loss: 0.3135 Acc: 0.9094 Pre: 0.9225 Recall: 0.9034 F1: 0.9129 Train AUC: 0.9978 Val AUC: 0.9595 Time: 13.44\n",
      "Epoch: 737 Train Loss: 0.0820 Val Loss: 0.3279 Acc: 0.9076 Pre: 0.9345 Recall: 0.8862 F1: 0.9097 Train AUC: 0.9970 Val AUC: 0.9561 Time: 14.08\n",
      "Epoch: 738 Train Loss: 0.0673 Val Loss: 0.3646 Acc: 0.9058 Pre: 0.9612 Recall: 0.8552 F1: 0.9051 Train AUC: 0.9982 Val AUC: 0.9552 Time: 14.83\n",
      "Epoch: 739 Train Loss: 0.0652 Val Loss: 0.3685 Acc: 0.9058 Pre: 0.9612 Recall: 0.8552 F1: 0.9051 Train AUC: 0.9980 Val AUC: 0.9557 Time: 14.03\n",
      "Epoch: 740 Train Loss: 0.0717 Val Loss: 0.3145 Acc: 0.9149 Pre: 0.9451 Recall: 0.8897 F1: 0.9165 Train AUC: 0.9980 Val AUC: 0.9589 Time: 13.17\n",
      "Epoch: 741 Train Loss: 0.0680 Val Loss: 0.2899 Acc: 0.9094 Pre: 0.9255 Recall: 0.9000 F1: 0.9126 Train AUC: 0.9978 Val AUC: 0.9616 Time: 13.11\n",
      "Epoch: 742 Train Loss: 0.0804 Val Loss: 0.3039 Acc: 0.9130 Pre: 0.9384 Recall: 0.8931 F1: 0.9152 Train AUC: 0.9981 Val AUC: 0.9629 Time: 13.09\n",
      "Epoch: 743 Train Loss: 0.0718 Val Loss: 0.3297 Acc: 0.9004 Pre: 0.9401 Recall: 0.8655 F1: 0.9013 Train AUC: 0.9973 Val AUC: 0.9613 Time: 13.66\n",
      "Epoch: 744 Train Loss: 0.0720 Val Loss: 0.3354 Acc: 0.9076 Pre: 0.9476 Recall: 0.8724 F1: 0.9084 Train AUC: 0.9977 Val AUC: 0.9576 Time: 14.24\n",
      "Epoch: 745 Train Loss: 0.0734 Val Loss: 0.3248 Acc: 0.9022 Pre: 0.9275 Recall: 0.8828 F1: 0.9046 Train AUC: 0.9978 Val AUC: 0.9567 Time: 14.29\n",
      "Epoch: 746 Train Loss: 0.0749 Val Loss: 0.3459 Acc: 0.8967 Pre: 0.9206 Recall: 0.8793 F1: 0.8995 Train AUC: 0.9977 Val AUC: 0.9547 Time: 13.49\n",
      "Epoch: 747 Train Loss: 0.0669 Val Loss: 0.3485 Acc: 0.8986 Pre: 0.9270 Recall: 0.8759 F1: 0.9007 Train AUC: 0.9978 Val AUC: 0.9539 Time: 13.48\n",
      "Epoch: 748 Train Loss: 0.0767 Val Loss: 0.3330 Acc: 0.9130 Pre: 0.9449 Recall: 0.8862 F1: 0.9146 Train AUC: 0.9969 Val AUC: 0.9568 Time: 13.83\n",
      "Epoch: 749 Train Loss: 0.0719 Val Loss: 0.3172 Acc: 0.9094 Pre: 0.9444 Recall: 0.8793 F1: 0.9107 Train AUC: 0.9979 Val AUC: 0.9606 Time: 13.97\n",
      "Epoch: 750 Train Loss: 0.0862 Val Loss: 0.2896 Acc: 0.9112 Pre: 0.9319 Recall: 0.8966 F1: 0.9139 Train AUC: 0.9975 Val AUC: 0.9648 Time: 14.64\n",
      "Epoch: 751 Train Loss: 0.0705 Val Loss: 0.2941 Acc: 0.9149 Pre: 0.9263 Recall: 0.9103 F1: 0.9183 Train AUC: 0.9978 Val AUC: 0.9636 Time: 13.82\n",
      "Epoch: 752 Train Loss: 0.0813 Val Loss: 0.3169 Acc: 0.9076 Pre: 0.9345 Recall: 0.8862 F1: 0.9097 Train AUC: 0.9967 Val AUC: 0.9589 Time: 12.89\n",
      "Epoch: 753 Train Loss: 0.0640 Val Loss: 0.3669 Acc: 0.9040 Pre: 0.9575 Recall: 0.8552 F1: 0.9035 Train AUC: 0.9983 Val AUC: 0.9536 Time: 12.73\n",
      "Epoch: 754 Train Loss: 0.0695 Val Loss: 0.4155 Acc: 0.8949 Pre: 0.9640 Recall: 0.8310 F1: 0.8926 Train AUC: 0.9975 Val AUC: 0.9494 Time: 13.18\n",
      "Epoch: 755 Train Loss: 0.0822 Val Loss: 0.3751 Acc: 0.9004 Pre: 0.9572 Recall: 0.8483 F1: 0.8995 Train AUC: 0.9973 Val AUC: 0.9517 Time: 13.67\n",
      "Epoch: 756 Train Loss: 0.0700 Val Loss: 0.3122 Acc: 0.9094 Pre: 0.9380 Recall: 0.8862 F1: 0.9113 Train AUC: 0.9980 Val AUC: 0.9586 Time: 14.34\n",
      "Epoch: 757 Train Loss: 0.0765 Val Loss: 0.2859 Acc: 0.9167 Pre: 0.9296 Recall: 0.9103 F1: 0.9199 Train AUC: 0.9975 Val AUC: 0.9646 Time: 14.78\n",
      "Epoch: 758 Train Loss: 0.0749 Val Loss: 0.2955 Acc: 0.9167 Pre: 0.9326 Recall: 0.9069 F1: 0.9196 Train AUC: 0.9973 Val AUC: 0.9638 Time: 13.54\n",
      "Epoch: 759 Train Loss: 0.0764 Val Loss: 0.3221 Acc: 0.9022 Pre: 0.9403 Recall: 0.8690 F1: 0.9032 Train AUC: 0.9971 Val AUC: 0.9614 Time: 13.64\n",
      "Epoch: 760 Train Loss: 0.0786 Val Loss: 0.3227 Acc: 0.9130 Pre: 0.9549 Recall: 0.8759 F1: 0.9137 Train AUC: 0.9966 Val AUC: 0.9585 Time: 13.90\n",
      "Epoch: 761 Train Loss: 0.0647 Val Loss: 0.3343 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9980 Val AUC: 0.9536 Time: 13.72\n",
      "Epoch: 762 Train Loss: 0.0752 Val Loss: 0.3541 Acc: 0.9040 Pre: 0.9506 Recall: 0.8621 F1: 0.9042 Train AUC: 0.9979 Val AUC: 0.9529 Time: 13.44\n",
      "Epoch: 763 Train Loss: 0.0686 Val Loss: 0.3485 Acc: 0.9058 Pre: 0.9542 Recall: 0.8621 F1: 0.9058 Train AUC: 0.9980 Val AUC: 0.9557 Time: 13.25\n",
      "Epoch: 764 Train Loss: 0.0696 Val Loss: 0.3095 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9973 Val AUC: 0.9600 Time: 13.80\n",
      "Epoch: 765 Train Loss: 0.0653 Val Loss: 0.2824 Acc: 0.9112 Pre: 0.9258 Recall: 0.9034 F1: 0.9145 Train AUC: 0.9980 Val AUC: 0.9639 Time: 14.27\n",
      "Epoch: 766 Train Loss: 0.0767 Val Loss: 0.2881 Acc: 0.9167 Pre: 0.9357 Recall: 0.9034 F1: 0.9193 Train AUC: 0.9973 Val AUC: 0.9648 Time: 13.75\n",
      "Epoch: 767 Train Loss: 0.0731 Val Loss: 0.3008 Acc: 0.9112 Pre: 0.9446 Recall: 0.8828 F1: 0.9127 Train AUC: 0.9975 Val AUC: 0.9629 Time: 12.90\n",
      "Epoch: 768 Train Loss: 0.0668 Val Loss: 0.3060 Acc: 0.9076 Pre: 0.9314 Recall: 0.8897 F1: 0.9101 Train AUC: 0.9981 Val AUC: 0.9612 Time: 12.47\n",
      "Epoch: 769 Train Loss: 0.0730 Val Loss: 0.3108 Acc: 0.9058 Pre: 0.9190 Recall: 0.9000 F1: 0.9094 Train AUC: 0.9974 Val AUC: 0.9582 Time: 12.91\n",
      "Epoch: 770 Train Loss: 0.0692 Val Loss: 0.3304 Acc: 0.9076 Pre: 0.9377 Recall: 0.8828 F1: 0.9094 Train AUC: 0.9987 Val AUC: 0.9560 Time: 13.36\n",
      "Epoch: 771 Train Loss: 0.0735 Val Loss: 0.3353 Acc: 0.9094 Pre: 0.9444 Recall: 0.8793 F1: 0.9107 Train AUC: 0.9977 Val AUC: 0.9568 Time: 14.04\n",
      "Epoch: 772 Train Loss: 0.0659 Val Loss: 0.3281 Acc: 0.9130 Pre: 0.9549 Recall: 0.8759 F1: 0.9137 Train AUC: 0.9982 Val AUC: 0.9588 Time: 14.69\n",
      "Epoch: 773 Train Loss: 0.0727 Val Loss: 0.3085 Acc: 0.9112 Pre: 0.9350 Recall: 0.8931 F1: 0.9136 Train AUC: 0.9975 Val AUC: 0.9594 Time: 14.87\n",
      "Epoch: 774 Train Loss: 0.0590 Val Loss: 0.3055 Acc: 0.9112 Pre: 0.9319 Recall: 0.8966 F1: 0.9139 Train AUC: 0.9987 Val AUC: 0.9593 Time: 13.49\n",
      "Epoch: 775 Train Loss: 0.0711 Val Loss: 0.3180 Acc: 0.9076 Pre: 0.9377 Recall: 0.8828 F1: 0.9094 Train AUC: 0.9979 Val AUC: 0.9581 Time: 12.49\n",
      "Epoch: 776 Train Loss: 0.0717 Val Loss: 0.3322 Acc: 0.9022 Pre: 0.9403 Recall: 0.8690 F1: 0.9032 Train AUC: 0.9974 Val AUC: 0.9588 Time: 12.64\n",
      "Epoch: 777 Train Loss: 0.0681 Val Loss: 0.3219 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9981 Val AUC: 0.9604 Time: 13.27\n",
      "Epoch: 778 Train Loss: 0.0700 Val Loss: 0.3052 Acc: 0.9076 Pre: 0.9283 Recall: 0.8931 F1: 0.9104 Train AUC: 0.9976 Val AUC: 0.9602 Time: 13.56\n",
      "Epoch: 779 Train Loss: 0.0692 Val Loss: 0.2933 Acc: 0.9094 Pre: 0.9196 Recall: 0.9069 F1: 0.9132 Train AUC: 0.9977 Val AUC: 0.9602 Time: 14.10\n",
      "Epoch: 780 Train Loss: 0.0786 Val Loss: 0.3119 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9970 Val AUC: 0.9601 Time: 15.00\n",
      "Epoch: 781 Train Loss: 0.0605 Val Loss: 0.3464 Acc: 0.9004 Pre: 0.9537 Recall: 0.8517 F1: 0.8998 Train AUC: 0.9986 Val AUC: 0.9593 Time: 15.22\n",
      "Epoch: 782 Train Loss: 0.0790 Val Loss: 0.3224 Acc: 0.9058 Pre: 0.9440 Recall: 0.8724 F1: 0.9068 Train AUC: 0.9972 Val AUC: 0.9595 Time: 13.69\n",
      "Epoch: 783 Train Loss: 0.0661 Val Loss: 0.3021 Acc: 0.9094 Pre: 0.9225 Recall: 0.9034 F1: 0.9129 Train AUC: 0.9977 Val AUC: 0.9600 Time: 12.77\n",
      "Epoch: 784 Train Loss: 0.0688 Val Loss: 0.3022 Acc: 0.9076 Pre: 0.9193 Recall: 0.9034 F1: 0.9113 Train AUC: 0.9981 Val AUC: 0.9603 Time: 12.74\n",
      "Epoch: 785 Train Loss: 0.0745 Val Loss: 0.3412 Acc: 0.9076 Pre: 0.9544 Recall: 0.8655 F1: 0.9078 Train AUC: 0.9976 Val AUC: 0.9592 Time: 13.28\n",
      "Epoch: 786 Train Loss: 0.0686 Val Loss: 0.3627 Acc: 0.9004 Pre: 0.9572 Recall: 0.8483 F1: 0.8995 Train AUC: 0.9978 Val AUC: 0.9579 Time: 13.87\n",
      "Epoch: 787 Train Loss: 0.0753 Val Loss: 0.3204 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9976 Val AUC: 0.9580 Time: 14.33\n",
      "Epoch: 788 Train Loss: 0.0648 Val Loss: 0.3125 Acc: 0.9094 Pre: 0.9286 Recall: 0.8966 F1: 0.9123 Train AUC: 0.9983 Val AUC: 0.9592 Time: 14.61\n",
      "Epoch: 789 Train Loss: 0.0682 Val Loss: 0.3155 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9985 Val AUC: 0.9597 Time: 15.80\n",
      "Epoch: 790 Train Loss: 0.0718 Val Loss: 0.3461 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9975 Val AUC: 0.9585 Time: 13.05\n",
      "Epoch: 791 Train Loss: 0.0786 Val Loss: 0.3158 Acc: 0.9076 Pre: 0.9410 Recall: 0.8793 F1: 0.9091 Train AUC: 0.9973 Val AUC: 0.9604 Time: 13.51\n",
      "Epoch: 792 Train Loss: 0.0612 Val Loss: 0.2957 Acc: 0.9040 Pre: 0.9100 Recall: 0.9069 F1: 0.9085 Train AUC: 0.9987 Val AUC: 0.9594 Time: 14.61\n",
      "Epoch: 793 Train Loss: 0.0777 Val Loss: 0.3168 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9984 Val AUC: 0.9584 Time: 13.91\n",
      "Epoch: 794 Train Loss: 0.0598 Val Loss: 0.3486 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9984 Val AUC: 0.9573 Time: 14.17\n",
      "Epoch: 795 Train Loss: 0.0772 Val Loss: 0.3408 Acc: 0.9130 Pre: 0.9549 Recall: 0.8759 F1: 0.9137 Train AUC: 0.9966 Val AUC: 0.9580 Time: 14.49\n",
      "Epoch: 796 Train Loss: 0.0723 Val Loss: 0.3143 Acc: 0.9149 Pre: 0.9451 Recall: 0.8897 F1: 0.9165 Train AUC: 0.9973 Val AUC: 0.9592 Time: 14.60\n",
      "Epoch: 797 Train Loss: 0.0680 Val Loss: 0.2877 Acc: 0.9130 Pre: 0.9291 Recall: 0.9034 F1: 0.9161 Train AUC: 0.9981 Val AUC: 0.9621 Time: 12.60\n",
      "Epoch: 798 Train Loss: 0.0712 Val Loss: 0.2948 Acc: 0.9167 Pre: 0.9453 Recall: 0.8931 F1: 0.9184 Train AUC: 0.9987 Val AUC: 0.9646 Time: 13.06\n",
      "Epoch: 799 Train Loss: 0.0660 Val Loss: 0.3135 Acc: 0.9004 Pre: 0.9401 Recall: 0.8655 F1: 0.9013 Train AUC: 0.9984 Val AUC: 0.9640 Time: 13.52\n",
      "Epoch: 800 Train Loss: 0.0726 Val Loss: 0.3121 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9974 Val AUC: 0.9625 Time: 13.52\n",
      "Epoch: 801 Train Loss: 0.0715 Val Loss: 0.3178 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9972 Val AUC: 0.9591 Time: 14.60\n",
      "Epoch: 802 Train Loss: 0.0663 Val Loss: 0.3366 Acc: 0.9094 Pre: 0.9444 Recall: 0.8793 F1: 0.9107 Train AUC: 0.9984 Val AUC: 0.9544 Time: 14.62\n",
      "Epoch: 803 Train Loss: 0.0692 Val Loss: 0.3375 Acc: 0.9058 Pre: 0.9343 Recall: 0.8828 F1: 0.9078 Train AUC: 0.9982 Val AUC: 0.9551 Time: 14.46\n",
      "Epoch: 804 Train Loss: 0.0692 Val Loss: 0.3340 Acc: 0.8986 Pre: 0.9398 Recall: 0.8621 F1: 0.8993 Train AUC: 0.9979 Val AUC: 0.9571 Time: 12.86\n",
      "Epoch: 805 Train Loss: 0.0669 Val Loss: 0.3104 Acc: 0.9076 Pre: 0.9345 Recall: 0.8862 F1: 0.9097 Train AUC: 0.9986 Val AUC: 0.9614 Time: 12.86\n",
      "Epoch: 806 Train Loss: 0.0683 Val Loss: 0.2913 Acc: 0.9167 Pre: 0.9357 Recall: 0.9034 F1: 0.9193 Train AUC: 0.9982 Val AUC: 0.9634 Time: 13.53\n",
      "Epoch: 807 Train Loss: 0.0732 Val Loss: 0.2933 Acc: 0.9221 Pre: 0.9458 Recall: 0.9034 F1: 0.9242 Train AUC: 0.9976 Val AUC: 0.9636 Time: 14.31\n",
      "Epoch: 808 Train Loss: 0.0626 Val Loss: 0.3133 Acc: 0.9112 Pre: 0.9480 Recall: 0.8793 F1: 0.9123 Train AUC: 0.9985 Val AUC: 0.9621 Time: 14.83\n",
      "Epoch: 809 Train Loss: 0.0739 Val Loss: 0.3403 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9973 Val AUC: 0.9588 Time: 13.90\n",
      "Epoch: 810 Train Loss: 0.0665 Val Loss: 0.3471 Acc: 0.9058 Pre: 0.9542 Recall: 0.8621 F1: 0.9058 Train AUC: 0.9981 Val AUC: 0.9576 Time: 14.62\n",
      "Epoch: 811 Train Loss: 0.0637 Val Loss: 0.3247 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9982 Val AUC: 0.9572 Time: 12.64\n",
      "Epoch: 812 Train Loss: 0.0646 Val Loss: 0.3101 Acc: 0.8913 Pre: 0.8966 Recall: 0.8966 F1: 0.8966 Train AUC: 0.9982 Val AUC: 0.9580 Time: 13.37\n",
      "Epoch: 813 Train Loss: 0.0764 Val Loss: 0.3251 Acc: 0.9022 Pre: 0.9275 Recall: 0.8828 F1: 0.9046 Train AUC: 0.9979 Val AUC: 0.9576 Time: 13.84\n",
      "Epoch: 814 Train Loss: 0.0653 Val Loss: 0.3625 Acc: 0.9022 Pre: 0.9538 Recall: 0.8552 F1: 0.9018 Train AUC: 0.9982 Val AUC: 0.9580 Time: 14.45\n",
      "Epoch: 815 Train Loss: 0.0750 Val Loss: 0.3333 Acc: 0.9004 Pre: 0.9401 Recall: 0.8655 F1: 0.9013 Train AUC: 0.9983 Val AUC: 0.9596 Time: 14.82\n",
      "Epoch: 816 Train Loss: 0.0634 Val Loss: 0.3088 Acc: 0.9167 Pre: 0.9296 Recall: 0.9103 F1: 0.9199 Train AUC: 0.9984 Val AUC: 0.9596 Time: 13.50\n",
      "Epoch: 817 Train Loss: 0.0913 Val Loss: 0.3033 Acc: 0.9185 Pre: 0.9391 Recall: 0.9034 F1: 0.9209 Train AUC: 0.9965 Val AUC: 0.9622 Time: 14.38\n",
      "Epoch: 818 Train Loss: 0.0686 Val Loss: 0.3394 Acc: 0.9112 Pre: 0.9582 Recall: 0.8690 F1: 0.9114 Train AUC: 0.9979 Val AUC: 0.9611 Time: 13.45\n",
      "Epoch: 819 Train Loss: 0.0743 Val Loss: 0.3342 Acc: 0.9058 Pre: 0.9508 Recall: 0.8655 F1: 0.9061 Train AUC: 0.9974 Val AUC: 0.9601 Time: 12.95\n",
      "Epoch: 820 Train Loss: 0.0686 Val Loss: 0.2993 Acc: 0.8986 Pre: 0.9179 Recall: 0.8862 F1: 0.9018 Train AUC: 0.9980 Val AUC: 0.9603 Time: 12.84\n",
      "Epoch: 821 Train Loss: 0.0701 Val Loss: 0.3015 Acc: 0.9076 Pre: 0.9164 Recall: 0.9069 F1: 0.9116 Train AUC: 0.9980 Val AUC: 0.9585 Time: 13.04\n",
      "Epoch: 822 Train Loss: 0.0722 Val Loss: 0.3236 Acc: 0.9004 Pre: 0.9273 Recall: 0.8793 F1: 0.9027 Train AUC: 0.9983 Val AUC: 0.9581 Time: 14.33\n",
      "Epoch: 823 Train Loss: 0.0663 Val Loss: 0.3511 Acc: 0.9112 Pre: 0.9513 Recall: 0.8759 F1: 0.9120 Train AUC: 0.9981 Val AUC: 0.9584 Time: 14.56\n",
      "Epoch: 824 Train Loss: 0.0730 Val Loss: 0.3216 Acc: 0.9112 Pre: 0.9350 Recall: 0.8931 F1: 0.9136 Train AUC: 0.9974 Val AUC: 0.9603 Time: 14.82\n",
      "Epoch: 825 Train Loss: 0.0708 Val Loss: 0.3050 Acc: 0.9058 Pre: 0.9161 Recall: 0.9034 F1: 0.9097 Train AUC: 0.9976 Val AUC: 0.9608 Time: 13.76\n",
      "Epoch: 826 Train Loss: 0.0639 Val Loss: 0.3114 Acc: 0.9112 Pre: 0.9258 Recall: 0.9034 F1: 0.9145 Train AUC: 0.9986 Val AUC: 0.9593 Time: 13.15\n",
      "Epoch: 827 Train Loss: 0.0686 Val Loss: 0.3394 Acc: 0.9076 Pre: 0.9345 Recall: 0.8862 F1: 0.9097 Train AUC: 0.9979 Val AUC: 0.9576 Time: 14.23\n",
      "Epoch: 828 Train Loss: 0.0599 Val Loss: 0.3534 Acc: 0.8986 Pre: 0.9333 Recall: 0.8690 F1: 0.9000 Train AUC: 0.9986 Val AUC: 0.9572 Time: 14.51\n",
      "Epoch: 829 Train Loss: 0.0664 Val Loss: 0.3355 Acc: 0.9040 Pre: 0.9341 Recall: 0.8793 F1: 0.9059 Train AUC: 0.9977 Val AUC: 0.9579 Time: 14.43\n",
      "Epoch: 830 Train Loss: 0.0657 Val Loss: 0.3176 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9984 Val AUC: 0.9596 Time: 13.23\n",
      "Epoch: 831 Train Loss: 0.0598 Val Loss: 0.3034 Acc: 0.9076 Pre: 0.9193 Recall: 0.9034 F1: 0.9113 Train AUC: 0.9985 Val AUC: 0.9609 Time: 12.52\n",
      "Epoch: 832 Train Loss: 0.0702 Val Loss: 0.2976 Acc: 0.9094 Pre: 0.9255 Recall: 0.9000 F1: 0.9126 Train AUC: 0.9977 Val AUC: 0.9616 Time: 12.35\n",
      "Epoch: 833 Train Loss: 0.0642 Val Loss: 0.2966 Acc: 0.9076 Pre: 0.9253 Recall: 0.8966 F1: 0.9107 Train AUC: 0.9984 Val AUC: 0.9615 Time: 12.90\n",
      "Epoch: 834 Train Loss: 0.0660 Val Loss: 0.3222 Acc: 0.9094 Pre: 0.9478 Recall: 0.8759 F1: 0.9104 Train AUC: 0.9983 Val AUC: 0.9616 Time: 13.26\n",
      "Epoch: 835 Train Loss: 0.0632 Val Loss: 0.3368 Acc: 0.8986 Pre: 0.9535 Recall: 0.8483 F1: 0.8978 Train AUC: 0.9985 Val AUC: 0.9615 Time: 13.77\n",
      "Epoch: 836 Train Loss: 0.0806 Val Loss: 0.3093 Acc: 0.9112 Pre: 0.9319 Recall: 0.8966 F1: 0.9139 Train AUC: 0.9973 Val AUC: 0.9618 Time: 14.72\n",
      "Epoch: 837 Train Loss: 0.0702 Val Loss: 0.3065 Acc: 0.9167 Pre: 0.9296 Recall: 0.9103 F1: 0.9199 Train AUC: 0.9976 Val AUC: 0.9605 Time: 15.16\n",
      "Epoch: 838 Train Loss: 0.0685 Val Loss: 0.3270 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9982 Val AUC: 0.9581 Time: 15.19\n",
      "Epoch: 839 Train Loss: 0.0705 Val Loss: 0.3531 Acc: 0.8967 Pre: 0.9498 Recall: 0.8483 F1: 0.8962 Train AUC: 0.9977 Val AUC: 0.9572 Time: 12.40\n",
      "Epoch: 840 Train Loss: 0.0719 Val Loss: 0.3296 Acc: 0.8967 Pre: 0.9363 Recall: 0.8621 F1: 0.8977 Train AUC: 0.9975 Val AUC: 0.9581 Time: 12.65\n",
      "Epoch: 841 Train Loss: 0.0645 Val Loss: 0.3122 Acc: 0.9022 Pre: 0.9245 Recall: 0.8862 F1: 0.9049 Train AUC: 0.9985 Val AUC: 0.9591 Time: 13.20\n",
      "Epoch: 842 Train Loss: 0.0704 Val Loss: 0.3029 Acc: 0.9076 Pre: 0.9283 Recall: 0.8931 F1: 0.9104 Train AUC: 0.9979 Val AUC: 0.9601 Time: 13.07\n",
      "Epoch: 843 Train Loss: 0.0637 Val Loss: 0.3129 Acc: 0.9076 Pre: 0.9345 Recall: 0.8862 F1: 0.9097 Train AUC: 0.9984 Val AUC: 0.9602 Time: 14.35\n",
      "Epoch: 844 Train Loss: 0.0668 Val Loss: 0.3178 Acc: 0.9040 Pre: 0.9438 Recall: 0.8690 F1: 0.9048 Train AUC: 0.9980 Val AUC: 0.9613 Time: 14.45\n",
      "Epoch: 845 Train Loss: 0.0694 Val Loss: 0.3068 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9979 Val AUC: 0.9606 Time: 14.88\n",
      "Epoch: 846 Train Loss: 0.0663 Val Loss: 0.3010 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9982 Val AUC: 0.9605 Time: 13.30\n",
      "Epoch: 847 Train Loss: 0.0708 Val Loss: 0.3075 Acc: 0.9040 Pre: 0.9247 Recall: 0.8897 F1: 0.9069 Train AUC: 0.9974 Val AUC: 0.9597 Time: 13.47\n",
      "Epoch: 848 Train Loss: 0.0624 Val Loss: 0.3188 Acc: 0.9094 Pre: 0.9380 Recall: 0.8862 F1: 0.9113 Train AUC: 0.9982 Val AUC: 0.9582 Time: 13.87\n",
      "Epoch: 849 Train Loss: 0.0710 Val Loss: 0.3163 Acc: 0.9149 Pre: 0.9418 Recall: 0.8931 F1: 0.9168 Train AUC: 0.9977 Val AUC: 0.9589 Time: 13.02\n",
      "Epoch: 850 Train Loss: 0.0621 Val Loss: 0.3138 Acc: 0.9221 Pre: 0.9591 Recall: 0.8897 F1: 0.9231 Train AUC: 0.9985 Val AUC: 0.9599 Time: 13.88\n",
      "Epoch: 851 Train Loss: 0.0660 Val Loss: 0.3251 Acc: 0.9076 Pre: 0.9442 Recall: 0.8759 F1: 0.9088 Train AUC: 0.9980 Val AUC: 0.9586 Time: 14.80\n",
      "Epoch: 852 Train Loss: 0.0689 Val Loss: 0.3028 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9985 Val AUC: 0.9607 Time: 14.36\n",
      "Epoch: 853 Train Loss: 0.0680 Val Loss: 0.3057 Acc: 0.9203 Pre: 0.9424 Recall: 0.9034 F1: 0.9225 Train AUC: 0.9984 Val AUC: 0.9625 Time: 13.38\n",
      "Epoch: 854 Train Loss: 0.0612 Val Loss: 0.3234 Acc: 0.9149 Pre: 0.9386 Recall: 0.8966 F1: 0.9171 Train AUC: 0.9983 Val AUC: 0.9613 Time: 13.87\n",
      "Epoch: 855 Train Loss: 0.0720 Val Loss: 0.3192 Acc: 0.9167 Pre: 0.9388 Recall: 0.9000 F1: 0.9190 Train AUC: 0.9972 Val AUC: 0.9604 Time: 12.49\n",
      "Epoch: 856 Train Loss: 0.0704 Val Loss: 0.3109 Acc: 0.9094 Pre: 0.9286 Recall: 0.8966 F1: 0.9123 Train AUC: 0.9976 Val AUC: 0.9583 Time: 13.20\n",
      "Epoch: 857 Train Loss: 0.0715 Val Loss: 0.3381 Acc: 0.9094 Pre: 0.9511 Recall: 0.8724 F1: 0.9101 Train AUC: 0.9982 Val AUC: 0.9554 Time: 13.66\n",
      "Epoch: 858 Train Loss: 0.0700 Val Loss: 0.3660 Acc: 0.9040 Pre: 0.9540 Recall: 0.8586 F1: 0.9038 Train AUC: 0.9978 Val AUC: 0.9540 Time: 14.66\n",
      "Epoch: 859 Train Loss: 0.0647 Val Loss: 0.3531 Acc: 0.8967 Pre: 0.9396 Recall: 0.8586 F1: 0.8973 Train AUC: 0.9983 Val AUC: 0.9554 Time: 15.18\n",
      "Epoch: 860 Train Loss: 0.0707 Val Loss: 0.3155 Acc: 0.9094 Pre: 0.9317 Recall: 0.8931 F1: 0.9120 Train AUC: 0.9976 Val AUC: 0.9597 Time: 15.76\n",
      "Epoch: 861 Train Loss: 0.0576 Val Loss: 0.2898 Acc: 0.9094 Pre: 0.9167 Recall: 0.9103 F1: 0.9135 Train AUC: 0.9988 Val AUC: 0.9630 Time: 14.58\n",
      "Epoch: 862 Train Loss: 0.0638 Val Loss: 0.2925 Acc: 0.9167 Pre: 0.9296 Recall: 0.9103 F1: 0.9199 Train AUC: 0.9987 Val AUC: 0.9636 Time: 13.21\n",
      "Epoch: 863 Train Loss: 0.0816 Val Loss: 0.3248 Acc: 0.9112 Pre: 0.9480 Recall: 0.8793 F1: 0.9123 Train AUC: 0.9967 Val AUC: 0.9613 Time: 12.86\n",
      "Epoch: 864 Train Loss: 0.0630 Val Loss: 0.3271 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9986 Val AUC: 0.9587 Time: 14.00\n",
      "Epoch: 865 Train Loss: 0.0614 Val Loss: 0.3360 Acc: 0.9040 Pre: 0.9373 Recall: 0.8759 F1: 0.9055 Train AUC: 0.9985 Val AUC: 0.9558 Time: 14.35\n",
      "Epoch: 866 Train Loss: 0.0670 Val Loss: 0.3460 Acc: 0.9022 Pre: 0.9403 Recall: 0.8690 F1: 0.9032 Train AUC: 0.9982 Val AUC: 0.9558 Time: 14.92\n",
      "Epoch: 867 Train Loss: 0.0671 Val Loss: 0.3423 Acc: 0.9130 Pre: 0.9549 Recall: 0.8759 F1: 0.9137 Train AUC: 0.9981 Val AUC: 0.9576 Time: 15.79\n",
      "Epoch: 868 Train Loss: 0.0732 Val Loss: 0.3106 Acc: 0.9130 Pre: 0.9321 Recall: 0.9000 F1: 0.9158 Train AUC: 0.9973 Val AUC: 0.9596 Time: 14.71\n",
      "Epoch: 869 Train Loss: 0.0733 Val Loss: 0.2995 Acc: 0.9130 Pre: 0.9291 Recall: 0.9034 F1: 0.9161 Train AUC: 0.9975 Val AUC: 0.9610 Time: 14.30\n",
      "Epoch: 870 Train Loss: 0.0625 Val Loss: 0.3166 Acc: 0.9130 Pre: 0.9321 Recall: 0.9000 F1: 0.9158 Train AUC: 0.9989 Val AUC: 0.9603 Time: 12.91\n",
      "Epoch: 871 Train Loss: 0.0660 Val Loss: 0.3357 Acc: 0.9076 Pre: 0.9410 Recall: 0.8793 F1: 0.9091 Train AUC: 0.9981 Val AUC: 0.9594 Time: 13.26\n",
      "Epoch: 872 Train Loss: 0.0710 Val Loss: 0.3395 Acc: 0.9076 Pre: 0.9410 Recall: 0.8793 F1: 0.9091 Train AUC: 0.9973 Val AUC: 0.9581 Time: 13.28\n",
      "Epoch: 873 Train Loss: 0.0634 Val Loss: 0.3277 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9983 Val AUC: 0.9570 Time: 14.22\n",
      "Epoch: 874 Train Loss: 0.0614 Val Loss: 0.3261 Acc: 0.9058 Pre: 0.9407 Recall: 0.8759 F1: 0.9071 Train AUC: 0.9985 Val AUC: 0.9568 Time: 14.83\n",
      "Epoch: 875 Train Loss: 0.0715 Val Loss: 0.3304 Acc: 0.9167 Pre: 0.9586 Recall: 0.8793 F1: 0.9173 Train AUC: 0.9978 Val AUC: 0.9593 Time: 15.16\n",
      "Epoch: 876 Train Loss: 0.0601 Val Loss: 0.3174 Acc: 0.9149 Pre: 0.9451 Recall: 0.8897 F1: 0.9165 Train AUC: 0.9984 Val AUC: 0.9606 Time: 13.36\n",
      "Epoch: 877 Train Loss: 0.0636 Val Loss: 0.2997 Acc: 0.9112 Pre: 0.9258 Recall: 0.9034 F1: 0.9145 Train AUC: 0.9986 Val AUC: 0.9617 Time: 13.01\n",
      "Epoch: 878 Train Loss: 0.0598 Val Loss: 0.3084 Acc: 0.9130 Pre: 0.9291 Recall: 0.9034 F1: 0.9161 Train AUC: 0.9989 Val AUC: 0.9595 Time: 13.83\n",
      "Epoch: 879 Train Loss: 0.0688 Val Loss: 0.3177 Acc: 0.9112 Pre: 0.9288 Recall: 0.9000 F1: 0.9142 Train AUC: 0.9981 Val AUC: 0.9588 Time: 14.38\n",
      "Epoch: 880 Train Loss: 0.0623 Val Loss: 0.3138 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9986 Val AUC: 0.9600 Time: 14.24\n",
      "Epoch: 881 Train Loss: 0.0639 Val Loss: 0.3324 Acc: 0.9112 Pre: 0.9547 Recall: 0.8724 F1: 0.9117 Train AUC: 0.9985 Val AUC: 0.9615 Time: 12.51\n",
      "Epoch: 882 Train Loss: 0.0714 Val Loss: 0.3125 Acc: 0.9149 Pre: 0.9418 Recall: 0.8931 F1: 0.9168 Train AUC: 0.9980 Val AUC: 0.9618 Time: 12.60\n",
      "Epoch: 883 Train Loss: 0.0689 Val Loss: 0.3086 Acc: 0.9112 Pre: 0.9350 Recall: 0.8931 F1: 0.9136 Train AUC: 0.9980 Val AUC: 0.9604 Time: 12.96\n",
      "Epoch: 884 Train Loss: 0.0606 Val Loss: 0.3167 Acc: 0.9040 Pre: 0.9247 Recall: 0.8897 F1: 0.9069 Train AUC: 0.9988 Val AUC: 0.9575 Time: 13.80\n",
      "Epoch: 885 Train Loss: 0.0785 Val Loss: 0.3279 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9973 Val AUC: 0.9583 Time: 13.88\n",
      "Epoch: 886 Train Loss: 0.0668 Val Loss: 0.3322 Acc: 0.9094 Pre: 0.9511 Recall: 0.8724 F1: 0.9101 Train AUC: 0.9980 Val AUC: 0.9593 Time: 14.68\n",
      "Epoch: 887 Train Loss: 0.0664 Val Loss: 0.3217 Acc: 0.9149 Pre: 0.9551 Recall: 0.8793 F1: 0.9156 Train AUC: 0.9987 Val AUC: 0.9589 Time: 15.30\n",
      "Epoch: 888 Train Loss: 0.0688 Val Loss: 0.3169 Acc: 0.9185 Pre: 0.9455 Recall: 0.8966 F1: 0.9204 Train AUC: 0.9982 Val AUC: 0.9589 Time: 14.11\n",
      "Epoch: 889 Train Loss: 0.0687 Val Loss: 0.3202 Acc: 0.9203 Pre: 0.9424 Recall: 0.9034 F1: 0.9225 Train AUC: 0.9978 Val AUC: 0.9600 Time: 12.66\n",
      "Epoch: 890 Train Loss: 0.0695 Val Loss: 0.3306 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9975 Val AUC: 0.9591 Time: 12.84\n",
      "Epoch: 891 Train Loss: 0.0793 Val Loss: 0.3222 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9963 Val AUC: 0.9591 Time: 13.47\n",
      "Epoch: 892 Train Loss: 0.0677 Val Loss: 0.3354 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9979 Val AUC: 0.9593 Time: 13.77\n",
      "Epoch: 893 Train Loss: 0.0689 Val Loss: 0.3300 Acc: 0.9040 Pre: 0.9472 Recall: 0.8655 F1: 0.9045 Train AUC: 0.9978 Val AUC: 0.9574 Time: 14.55\n",
      "Epoch: 894 Train Loss: 0.0657 Val Loss: 0.3205 Acc: 0.9076 Pre: 0.9253 Recall: 0.8966 F1: 0.9107 Train AUC: 0.9984 Val AUC: 0.9565 Time: 14.72\n",
      "Epoch: 895 Train Loss: 0.0663 Val Loss: 0.3192 Acc: 0.9076 Pre: 0.9253 Recall: 0.8966 F1: 0.9107 Train AUC: 0.9984 Val AUC: 0.9570 Time: 14.86\n",
      "Epoch: 896 Train Loss: 0.0663 Val Loss: 0.3213 Acc: 0.9040 Pre: 0.9309 Recall: 0.8828 F1: 0.9062 Train AUC: 0.9981 Val AUC: 0.9572 Time: 12.22\n",
      "Epoch: 897 Train Loss: 0.0622 Val Loss: 0.3304 Acc: 0.9022 Pre: 0.9403 Recall: 0.8690 F1: 0.9032 Train AUC: 0.9983 Val AUC: 0.9572 Time: 12.56\n",
      "Epoch: 898 Train Loss: 0.0681 Val Loss: 0.3221 Acc: 0.9004 Pre: 0.9304 Recall: 0.8759 F1: 0.9023 Train AUC: 0.9980 Val AUC: 0.9592 Time: 12.60\n",
      "Epoch: 899 Train Loss: 0.0688 Val Loss: 0.2982 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9979 Val AUC: 0.9618 Time: 13.54\n",
      "Epoch: 900 Train Loss: 0.0693 Val Loss: 0.3010 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9981 Val AUC: 0.9633 Time: 14.15\n",
      "Epoch: 901 Train Loss: 0.0623 Val Loss: 0.3147 Acc: 0.9076 Pre: 0.9377 Recall: 0.8828 F1: 0.9094 Train AUC: 0.9984 Val AUC: 0.9617 Time: 14.52\n",
      "Epoch: 902 Train Loss: 0.0717 Val Loss: 0.3190 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9972 Val AUC: 0.9606 Time: 15.04\n",
      "Epoch: 903 Train Loss: 0.0574 Val Loss: 0.3232 Acc: 0.9076 Pre: 0.9410 Recall: 0.8793 F1: 0.9091 Train AUC: 0.9987 Val AUC: 0.9583 Time: 14.76\n",
      "Epoch: 904 Train Loss: 0.0677 Val Loss: 0.3306 Acc: 0.9004 Pre: 0.9273 Recall: 0.8793 F1: 0.9027 Train AUC: 0.9981 Val AUC: 0.9534 Time: 14.09\n",
      "Epoch: 905 Train Loss: 0.0801 Val Loss: 0.3294 Acc: 0.9058 Pre: 0.9440 Recall: 0.8724 F1: 0.9068 Train AUC: 0.9971 Val AUC: 0.9560 Time: 13.43\n",
      "Epoch: 906 Train Loss: 0.0653 Val Loss: 0.3353 Acc: 0.9149 Pre: 0.9483 Recall: 0.8862 F1: 0.9162 Train AUC: 0.9983 Val AUC: 0.9586 Time: 13.18\n",
      "Epoch: 907 Train Loss: 0.0715 Val Loss: 0.3185 Acc: 0.9257 Pre: 0.9495 Recall: 0.9069 F1: 0.9277 Train AUC: 0.9971 Val AUC: 0.9617 Time: 12.48\n",
      "Epoch: 908 Train Loss: 0.0817 Val Loss: 0.2945 Acc: 0.9203 Pre: 0.9362 Recall: 0.9103 F1: 0.9231 Train AUC: 0.9961 Val AUC: 0.9631 Time: 13.27\n",
      "Epoch: 909 Train Loss: 0.0788 Val Loss: 0.2993 Acc: 0.9167 Pre: 0.9357 Recall: 0.9034 F1: 0.9193 Train AUC: 0.9970 Val AUC: 0.9619 Time: 13.24\n",
      "Epoch: 910 Train Loss: 0.0693 Val Loss: 0.3333 Acc: 0.9022 Pre: 0.9403 Recall: 0.8690 F1: 0.9032 Train AUC: 0.9980 Val AUC: 0.9597 Time: 14.34\n",
      "Epoch: 911 Train Loss: 0.0813 Val Loss: 0.3330 Acc: 0.9004 Pre: 0.9336 Recall: 0.8724 F1: 0.9020 Train AUC: 0.9967 Val AUC: 0.9573 Time: 14.51\n",
      "Epoch: 912 Train Loss: 0.0720 Val Loss: 0.3080 Acc: 0.9094 Pre: 0.9225 Recall: 0.9034 F1: 0.9129 Train AUC: 0.9971 Val AUC: 0.9595 Time: 14.50\n",
      "Epoch: 913 Train Loss: 0.0638 Val Loss: 0.3023 Acc: 0.9094 Pre: 0.9255 Recall: 0.9000 F1: 0.9126 Train AUC: 0.9987 Val AUC: 0.9601 Time: 15.08\n",
      "Epoch: 914 Train Loss: 0.0607 Val Loss: 0.3015 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9988 Val AUC: 0.9614 Time: 12.37\n",
      "Epoch: 915 Train Loss: 0.0665 Val Loss: 0.3075 Acc: 0.9149 Pre: 0.9451 Recall: 0.8897 F1: 0.9165 Train AUC: 0.9986 Val AUC: 0.9618 Time: 12.69\n",
      "Epoch: 916 Train Loss: 0.0679 Val Loss: 0.3112 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9981 Val AUC: 0.9607 Time: 12.53\n",
      "Epoch: 917 Train Loss: 0.0704 Val Loss: 0.3105 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9978 Val AUC: 0.9602 Time: 12.66\n",
      "Epoch: 918 Train Loss: 0.0569 Val Loss: 0.3180 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9987 Val AUC: 0.9605 Time: 13.66\n",
      "Epoch: 919 Train Loss: 0.0673 Val Loss: 0.3289 Acc: 0.9076 Pre: 0.9476 Recall: 0.8724 F1: 0.9084 Train AUC: 0.9980 Val AUC: 0.9594 Time: 14.22\n",
      "Epoch: 920 Train Loss: 0.0666 Val Loss: 0.3166 Acc: 0.9058 Pre: 0.9250 Recall: 0.8931 F1: 0.9088 Train AUC: 0.9984 Val AUC: 0.9580 Time: 14.74\n",
      "Epoch: 921 Train Loss: 0.0700 Val Loss: 0.3207 Acc: 0.9130 Pre: 0.9321 Recall: 0.9000 F1: 0.9158 Train AUC: 0.9978 Val AUC: 0.9570 Time: 15.34\n",
      "Epoch: 922 Train Loss: 0.0679 Val Loss: 0.3388 Acc: 0.9094 Pre: 0.9444 Recall: 0.8793 F1: 0.9107 Train AUC: 0.9982 Val AUC: 0.9581 Time: 13.88\n",
      "Epoch: 923 Train Loss: 0.0674 Val Loss: 0.3370 Acc: 0.9130 Pre: 0.9583 Recall: 0.8724 F1: 0.9134 Train AUC: 0.9978 Val AUC: 0.9593 Time: 14.95\n",
      "Epoch: 924 Train Loss: 0.0702 Val Loss: 0.3228 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9975 Val AUC: 0.9592 Time: 12.48\n",
      "Epoch: 925 Train Loss: 0.0674 Val Loss: 0.3214 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9983 Val AUC: 0.9583 Time: 12.74\n",
      "Epoch: 926 Train Loss: 0.0693 Val Loss: 0.3177 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9982 Val AUC: 0.9600 Time: 12.61\n",
      "Epoch: 927 Train Loss: 0.0660 Val Loss: 0.3256 Acc: 0.9058 Pre: 0.9440 Recall: 0.8724 F1: 0.9068 Train AUC: 0.9980 Val AUC: 0.9623 Time: 12.64\n",
      "Epoch: 928 Train Loss: 0.0704 Val Loss: 0.3104 Acc: 0.9112 Pre: 0.9480 Recall: 0.8793 F1: 0.9123 Train AUC: 0.9972 Val AUC: 0.9643 Time: 12.49\n",
      "Epoch: 929 Train Loss: 0.0647 Val Loss: 0.2892 Acc: 0.9167 Pre: 0.9296 Recall: 0.9103 F1: 0.9199 Train AUC: 0.9983 Val AUC: 0.9626 Time: 13.77\n",
      "Epoch: 930 Train Loss: 0.0695 Val Loss: 0.3109 Acc: 0.8986 Pre: 0.9179 Recall: 0.8862 F1: 0.9018 Train AUC: 0.9982 Val AUC: 0.9581 Time: 13.97\n",
      "Epoch: 931 Train Loss: 0.0706 Val Loss: 0.3365 Acc: 0.9022 Pre: 0.9338 Recall: 0.8759 F1: 0.9039 Train AUC: 0.9980 Val AUC: 0.9535 Time: 14.73\n",
      "Epoch: 932 Train Loss: 0.0674 Val Loss: 0.3698 Acc: 0.9040 Pre: 0.9540 Recall: 0.8586 F1: 0.9038 Train AUC: 0.9983 Val AUC: 0.9534 Time: 15.72\n",
      "Epoch: 933 Train Loss: 0.0672 Val Loss: 0.3588 Acc: 0.8967 Pre: 0.9430 Recall: 0.8552 F1: 0.8969 Train AUC: 0.9977 Val AUC: 0.9566 Time: 14.77\n",
      "Epoch: 934 Train Loss: 0.0618 Val Loss: 0.3276 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9986 Val AUC: 0.9600 Time: 14.81\n",
      "Epoch: 935 Train Loss: 0.0617 Val Loss: 0.3009 Acc: 0.9221 Pre: 0.9458 Recall: 0.9034 F1: 0.9242 Train AUC: 0.9984 Val AUC: 0.9635 Time: 12.30\n",
      "Epoch: 936 Train Loss: 0.0633 Val Loss: 0.2787 Acc: 0.9167 Pre: 0.9357 Recall: 0.9034 F1: 0.9193 Train AUC: 0.9985 Val AUC: 0.9664 Time: 12.49\n",
      "Epoch: 937 Train Loss: 0.0687 Val Loss: 0.2930 Acc: 0.9167 Pre: 0.9388 Recall: 0.9000 F1: 0.9190 Train AUC: 0.9983 Val AUC: 0.9649 Time: 12.53\n",
      "Epoch: 938 Train Loss: 0.0707 Val Loss: 0.3063 Acc: 0.9185 Pre: 0.9487 Recall: 0.8931 F1: 0.9201 Train AUC: 0.9973 Val AUC: 0.9626 Time: 13.37\n",
      "Epoch: 939 Train Loss: 0.0675 Val Loss: 0.3112 Acc: 0.9094 Pre: 0.9380 Recall: 0.8862 F1: 0.9113 Train AUC: 0.9982 Val AUC: 0.9593 Time: 13.46\n",
      "Epoch: 940 Train Loss: 0.0547 Val Loss: 0.3219 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9991 Val AUC: 0.9574 Time: 14.40\n",
      "Epoch: 941 Train Loss: 0.0591 Val Loss: 0.3495 Acc: 0.9058 Pre: 0.9440 Recall: 0.8724 F1: 0.9068 Train AUC: 0.9991 Val AUC: 0.9562 Time: 15.18\n",
      "Epoch: 942 Train Loss: 0.0728 Val Loss: 0.3271 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9973 Val AUC: 0.9588 Time: 14.41\n",
      "Epoch: 943 Train Loss: 0.0652 Val Loss: 0.2942 Acc: 0.9167 Pre: 0.9296 Recall: 0.9103 F1: 0.9199 Train AUC: 0.9979 Val AUC: 0.9627 Time: 15.06\n",
      "Epoch: 944 Train Loss: 0.0626 Val Loss: 0.3003 Acc: 0.9167 Pre: 0.9420 Recall: 0.8966 F1: 0.9187 Train AUC: 0.9988 Val AUC: 0.9635 Time: 12.52\n",
      "Epoch: 945 Train Loss: 0.0701 Val Loss: 0.3274 Acc: 0.9040 Pre: 0.9405 Recall: 0.8724 F1: 0.9052 Train AUC: 0.9978 Val AUC: 0.9618 Time: 13.01\n",
      "Epoch: 946 Train Loss: 0.0646 Val Loss: 0.3299 Acc: 0.9112 Pre: 0.9513 Recall: 0.8759 F1: 0.9120 Train AUC: 0.9980 Val AUC: 0.9606 Time: 13.14\n",
      "Epoch: 947 Train Loss: 0.0680 Val Loss: 0.3015 Acc: 0.9058 Pre: 0.9132 Recall: 0.9069 F1: 0.9100 Train AUC: 0.9979 Val AUC: 0.9593 Time: 13.89\n",
      "Epoch: 948 Train Loss: 0.0718 Val Loss: 0.3033 Acc: 0.9076 Pre: 0.9193 Recall: 0.9034 F1: 0.9113 Train AUC: 0.9977 Val AUC: 0.9586 Time: 14.64\n",
      "Epoch: 949 Train Loss: 0.0758 Val Loss: 0.3368 Acc: 0.9185 Pre: 0.9520 Recall: 0.8897 F1: 0.9198 Train AUC: 0.9976 Val AUC: 0.9576 Time: 14.43\n",
      "Epoch: 950 Train Loss: 0.0638 Val Loss: 0.3728 Acc: 0.8949 Pre: 0.9462 Recall: 0.8483 F1: 0.8945 Train AUC: 0.9980 Val AUC: 0.9566 Time: 14.49\n",
      "Epoch: 951 Train Loss: 0.0626 Val Loss: 0.3466 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9983 Val AUC: 0.9581 Time: 12.44\n",
      "Epoch: 952 Train Loss: 0.0656 Val Loss: 0.3028 Acc: 0.9203 Pre: 0.9362 Recall: 0.9103 F1: 0.9231 Train AUC: 0.9979 Val AUC: 0.9616 Time: 12.47\n",
      "Epoch: 953 Train Loss: 0.0682 Val Loss: 0.2943 Acc: 0.9167 Pre: 0.9357 Recall: 0.9034 F1: 0.9193 Train AUC: 0.9985 Val AUC: 0.9627 Time: 13.07\n",
      "Epoch: 954 Train Loss: 0.0560 Val Loss: 0.3079 Acc: 0.9185 Pre: 0.9391 Recall: 0.9034 F1: 0.9209 Train AUC: 0.9992 Val AUC: 0.9625 Time: 13.39\n",
      "Epoch: 955 Train Loss: 0.0631 Val Loss: 0.3266 Acc: 0.9004 Pre: 0.9434 Recall: 0.8621 F1: 0.9009 Train AUC: 0.9981 Val AUC: 0.9611 Time: 14.13\n",
      "Epoch: 956 Train Loss: 0.0761 Val Loss: 0.3261 Acc: 0.9130 Pre: 0.9549 Recall: 0.8759 F1: 0.9137 Train AUC: 0.9971 Val AUC: 0.9599 Time: 14.77\n",
      "Epoch: 957 Train Loss: 0.0678 Val Loss: 0.3071 Acc: 0.9167 Pre: 0.9485 Recall: 0.8897 F1: 0.9181 Train AUC: 0.9981 Val AUC: 0.9589 Time: 15.08\n",
      "Epoch: 958 Train Loss: 0.0634 Val Loss: 0.3058 Acc: 0.9167 Pre: 0.9485 Recall: 0.8897 F1: 0.9181 Train AUC: 0.9979 Val AUC: 0.9589 Time: 15.06\n",
      "Epoch: 959 Train Loss: 0.0567 Val Loss: 0.3080 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9990 Val AUC: 0.9609 Time: 12.45\n",
      "Epoch: 960 Train Loss: 0.0616 Val Loss: 0.2936 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9984 Val AUC: 0.9637 Time: 12.33\n",
      "Epoch: 961 Train Loss: 0.0741 Val Loss: 0.2916 Acc: 0.9112 Pre: 0.9319 Recall: 0.8966 F1: 0.9139 Train AUC: 0.9973 Val AUC: 0.9638 Time: 12.42\n",
      "Epoch: 962 Train Loss: 0.0568 Val Loss: 0.3077 Acc: 0.9149 Pre: 0.9386 Recall: 0.8966 F1: 0.9171 Train AUC: 0.9990 Val AUC: 0.9617 Time: 13.18\n",
      "Epoch: 963 Train Loss: 0.0633 Val Loss: 0.3099 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9982 Val AUC: 0.9606 Time: 13.46\n",
      "Epoch: 964 Train Loss: 0.0582 Val Loss: 0.3109 Acc: 0.9167 Pre: 0.9420 Recall: 0.8966 F1: 0.9187 Train AUC: 0.9990 Val AUC: 0.9588 Time: 14.24\n",
      "Epoch: 965 Train Loss: 0.0653 Val Loss: 0.3132 Acc: 0.9112 Pre: 0.9414 Recall: 0.8862 F1: 0.9130 Train AUC: 0.9981 Val AUC: 0.9584 Time: 14.88\n",
      "Epoch: 966 Train Loss: 0.0674 Val Loss: 0.3432 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9984 Val AUC: 0.9581 Time: 14.71\n",
      "Epoch: 967 Train Loss: 0.0698 Val Loss: 0.3353 Acc: 0.9022 Pre: 0.9275 Recall: 0.8828 F1: 0.9046 Train AUC: 0.9982 Val AUC: 0.9586 Time: 15.48\n",
      "Epoch: 968 Train Loss: 0.0657 Val Loss: 0.3077 Acc: 0.9130 Pre: 0.9291 Recall: 0.9034 F1: 0.9161 Train AUC: 0.9981 Val AUC: 0.9603 Time: 12.32\n",
      "Epoch: 969 Train Loss: 0.0566 Val Loss: 0.2945 Acc: 0.9112 Pre: 0.9258 Recall: 0.9034 F1: 0.9145 Train AUC: 0.9988 Val AUC: 0.9618 Time: 13.00\n",
      "Epoch: 970 Train Loss: 0.0657 Val Loss: 0.3115 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9986 Val AUC: 0.9608 Time: 13.46\n",
      "Epoch: 971 Train Loss: 0.0660 Val Loss: 0.3617 Acc: 0.9004 Pre: 0.9681 Recall: 0.8379 F1: 0.8983 Train AUC: 0.9980 Val AUC: 0.9603 Time: 13.73\n",
      "Epoch: 972 Train Loss: 0.0720 Val Loss: 0.3203 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9984 Val AUC: 0.9608 Time: 14.07\n",
      "Epoch: 973 Train Loss: 0.0599 Val Loss: 0.2972 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9986 Val AUC: 0.9616 Time: 13.95\n",
      "Epoch: 974 Train Loss: 0.0629 Val Loss: 0.2884 Acc: 0.9040 Pre: 0.9187 Recall: 0.8966 F1: 0.9075 Train AUC: 0.9984 Val AUC: 0.9619 Time: 14.29\n",
      "Epoch: 975 Train Loss: 0.0584 Val Loss: 0.3052 Acc: 0.9022 Pre: 0.9307 Recall: 0.8793 F1: 0.9043 Train AUC: 0.9989 Val AUC: 0.9600 Time: 13.52\n",
      "Epoch: 976 Train Loss: 0.0613 Val Loss: 0.3338 Acc: 0.9112 Pre: 0.9582 Recall: 0.8690 F1: 0.9114 Train AUC: 0.9989 Val AUC: 0.9582 Time: 13.85\n",
      "Epoch: 977 Train Loss: 0.0610 Val Loss: 0.3364 Acc: 0.9130 Pre: 0.9515 Recall: 0.8793 F1: 0.9140 Train AUC: 0.9988 Val AUC: 0.9583 Time: 13.40\n",
      "Epoch: 978 Train Loss: 0.0650 Val Loss: 0.3107 Acc: 0.9203 Pre: 0.9393 Recall: 0.9069 F1: 0.9228 Train AUC: 0.9979 Val AUC: 0.9590 Time: 14.19\n",
      "Epoch: 979 Train Loss: 0.0688 Val Loss: 0.3129 Acc: 0.9149 Pre: 0.9386 Recall: 0.8966 F1: 0.9171 Train AUC: 0.9980 Val AUC: 0.9604 Time: 14.51\n",
      "Epoch: 980 Train Loss: 0.0618 Val Loss: 0.3319 Acc: 0.9022 Pre: 0.9403 Recall: 0.8690 F1: 0.9032 Train AUC: 0.9983 Val AUC: 0.9598 Time: 14.78\n",
      "Epoch: 981 Train Loss: 0.0619 Val Loss: 0.3281 Acc: 0.9058 Pre: 0.9407 Recall: 0.8759 F1: 0.9071 Train AUC: 0.9982 Val AUC: 0.9599 Time: 13.80\n",
      "Epoch: 982 Train Loss: 0.0655 Val Loss: 0.3154 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9980 Val AUC: 0.9592 Time: 12.54\n",
      "Epoch: 983 Train Loss: 0.0605 Val Loss: 0.3016 Acc: 0.9094 Pre: 0.9225 Recall: 0.9034 F1: 0.9129 Train AUC: 0.9986 Val AUC: 0.9602 Time: 13.45\n",
      "Epoch: 984 Train Loss: 0.0603 Val Loss: 0.3151 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9988 Val AUC: 0.9594 Time: 13.69\n",
      "Epoch: 985 Train Loss: 0.0610 Val Loss: 0.3358 Acc: 0.9076 Pre: 0.9442 Recall: 0.8759 F1: 0.9088 Train AUC: 0.9987 Val AUC: 0.9593 Time: 13.86\n",
      "Epoch: 986 Train Loss: 0.0705 Val Loss: 0.2999 Acc: 0.9058 Pre: 0.9281 Recall: 0.8897 F1: 0.9085 Train AUC: 0.9976 Val AUC: 0.9618 Time: 14.32\n",
      "Epoch: 987 Train Loss: 0.0585 Val Loss: 0.2889 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9984 Val AUC: 0.9624 Time: 15.11\n",
      "Epoch: 988 Train Loss: 0.0575 Val Loss: 0.3066 Acc: 0.9094 Pre: 0.9412 Recall: 0.8828 F1: 0.9110 Train AUC: 0.9992 Val AUC: 0.9625 Time: 12.96\n",
      "Epoch: 989 Train Loss: 0.0621 Val Loss: 0.3443 Acc: 0.9022 Pre: 0.9470 Recall: 0.8621 F1: 0.9025 Train AUC: 0.9987 Val AUC: 0.9603 Time: 12.77\n",
      "Epoch: 990 Train Loss: 0.0789 Val Loss: 0.3092 Acc: 0.9167 Pre: 0.9420 Recall: 0.8966 F1: 0.9187 Train AUC: 0.9972 Val AUC: 0.9616 Time: 12.75\n",
      "Epoch: 991 Train Loss: 0.0681 Val Loss: 0.2960 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9980 Val AUC: 0.9623 Time: 13.27\n",
      "Epoch: 992 Train Loss: 0.0564 Val Loss: 0.3083 Acc: 0.9094 Pre: 0.9348 Recall: 0.8897 F1: 0.9117 Train AUC: 0.9990 Val AUC: 0.9607 Time: 14.46\n",
      "Epoch: 993 Train Loss: 0.0654 Val Loss: 0.3571 Acc: 0.9094 Pre: 0.9545 Recall: 0.8690 F1: 0.9097 Train AUC: 0.9982 Val AUC: 0.9577 Time: 14.63\n",
      "Epoch: 994 Train Loss: 0.0697 Val Loss: 0.3510 Acc: 0.9040 Pre: 0.9540 Recall: 0.8586 F1: 0.9038 Train AUC: 0.9983 Val AUC: 0.9568 Time: 14.63\n",
      "Epoch: 995 Train Loss: 0.0605 Val Loss: 0.3257 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9987 Val AUC: 0.9573 Time: 13.89\n",
      "Epoch: 996 Train Loss: 0.0647 Val Loss: 0.3065 Acc: 0.9130 Pre: 0.9353 Recall: 0.8966 F1: 0.9155 Train AUC: 0.9987 Val AUC: 0.9610 Time: 12.30\n",
      "Epoch: 997 Train Loss: 0.0614 Val Loss: 0.3033 Acc: 0.9130 Pre: 0.9449 Recall: 0.8862 F1: 0.9146 Train AUC: 0.9987 Val AUC: 0.9644 Time: 12.46\n",
      "Epoch: 998 Train Loss: 0.0582 Val Loss: 0.3118 Acc: 0.9076 Pre: 0.9476 Recall: 0.8724 F1: 0.9084 Train AUC: 0.9990 Val AUC: 0.9643 Time: 13.35\n",
      "Epoch: 999 Train Loss: 0.0603 Val Loss: 0.2947 Acc: 0.9130 Pre: 0.9416 Recall: 0.8897 F1: 0.9149 Train AUC: 0.9988 Val AUC: 0.9633 Time: 13.55\n",
      "Epoch: 1000 Train Loss: 0.0577 Val Loss: 0.2951 Acc: 0.9022 Pre: 0.9184 Recall: 0.8931 F1: 0.9056 Train AUC: 0.9990 Val AUC: 0.9611 Time: 14.20\n",
      "Fold: 3 Best Epoch: 936 Test acc: 0.9167 Test Pre: 0.9357 Test Recall: 0.9034 Test F1: 0.9193 Test PRC: 0.9756 Test AUC: 0.9664\n",
      "Training for Fold 4\n",
      "## Training edges: 2208\n",
      "## Testing edges: 552\n",
      "Epoch: 1 Train Loss: 0.9143 Val Loss: 1.8084 Acc: 0.5000 Pre: 0.4493 Recall: 0.5000 F1: 0.4733 Train AUC: 0.5554 Val AUC: 0.5878 Time: 14.83\n",
      "Epoch: 2 Train Loss: 1.8876 Val Loss: 0.6842 Acc: 0.5435 Pre: 0.4931 Recall: 0.5726 F1: 0.5299 Train AUC: 0.5716 Val AUC: 0.6841 Time: 14.56\n",
      "Epoch: 3 Train Loss: 0.8402 Val Loss: 0.5946 Acc: 0.6830 Pre: 0.5995 Recall: 0.8871 F1: 0.7154 Train AUC: 0.6154 Val AUC: 0.8104 Time: 13.28\n",
      "Epoch: 4 Train Loss: 0.8607 Val Loss: 0.4737 Acc: 0.7301 Pre: 0.6562 Recall: 0.8387 F1: 0.7363 Train AUC: 0.6256 Val AUC: 0.8791 Time: 13.62\n",
      "Epoch: 5 Train Loss: 0.6300 Val Loss: 0.4260 Acc: 0.7971 Pre: 0.7833 Recall: 0.7581 F1: 0.7705 Train AUC: 0.7893 Val AUC: 0.8808 Time: 13.12\n",
      "Epoch: 6 Train Loss: 0.5908 Val Loss: 0.3799 Acc: 0.8351 Pre: 0.8685 Recall: 0.7460 F1: 0.8026 Train AUC: 0.7994 Val AUC: 0.9040 Time: 13.77\n",
      "Epoch: 7 Train Loss: 0.5625 Val Loss: 0.3889 Acc: 0.8152 Pre: 0.8093 Recall: 0.7702 F1: 0.7893 Train AUC: 0.8449 Val AUC: 0.9067 Time: 14.09\n",
      "Epoch: 8 Train Loss: 0.4953 Val Loss: 0.4428 Acc: 0.8116 Pre: 0.7857 Recall: 0.7984 F1: 0.7920 Train AUC: 0.8668 Val AUC: 0.8992 Time: 14.35\n",
      "Epoch: 9 Train Loss: 0.5646 Val Loss: 0.4620 Acc: 0.8152 Pre: 0.7829 Recall: 0.8145 F1: 0.7984 Train AUC: 0.8619 Val AUC: 0.8976 Time: 14.03\n",
      "Epoch: 10 Train Loss: 0.6073 Val Loss: 0.4330 Acc: 0.8188 Pre: 0.8109 Recall: 0.7782 F1: 0.7942 Train AUC: 0.8603 Val AUC: 0.9043 Time: 13.28\n",
      "Epoch: 11 Train Loss: 0.5578 Val Loss: 0.4042 Acc: 0.8333 Pre: 0.8482 Recall: 0.7661 F1: 0.8051 Train AUC: 0.8600 Val AUC: 0.9114 Time: 16.00\n",
      "Epoch: 12 Train Loss: 0.5269 Val Loss: 0.3876 Acc: 0.8370 Pre: 0.8591 Recall: 0.7621 F1: 0.8077 Train AUC: 0.8838 Val AUC: 0.9154 Time: 15.21\n",
      "Epoch: 13 Train Loss: 0.4979 Val Loss: 0.3785 Acc: 0.8351 Pre: 0.8552 Recall: 0.7621 F1: 0.8060 Train AUC: 0.8796 Val AUC: 0.9143 Time: 16.03\n",
      "Epoch: 14 Train Loss: 0.4904 Val Loss: 0.3738 Acc: 0.8333 Pre: 0.8333 Recall: 0.7863 F1: 0.8091 Train AUC: 0.8801 Val AUC: 0.9126 Time: 14.03\n",
      "Epoch: 15 Train Loss: 0.4734 Val Loss: 0.3718 Acc: 0.8261 Pre: 0.8140 Recall: 0.7944 F1: 0.8041 Train AUC: 0.8720 Val AUC: 0.9118 Time: 14.62\n",
      "Epoch: 16 Train Loss: 0.4580 Val Loss: 0.3628 Acc: 0.8279 Pre: 0.8174 Recall: 0.7944 F1: 0.8057 Train AUC: 0.8827 Val AUC: 0.9136 Time: 14.26\n",
      "Epoch: 17 Train Loss: 0.4298 Val Loss: 0.3514 Acc: 0.8370 Pre: 0.8319 Recall: 0.7984 F1: 0.8148 Train AUC: 0.8880 Val AUC: 0.9180 Time: 12.62\n",
      "Epoch: 18 Train Loss: 0.4386 Val Loss: 0.3477 Acc: 0.8315 Pre: 0.8270 Recall: 0.7903 F1: 0.8082 Train AUC: 0.8772 Val AUC: 0.9216 Time: 12.00\n",
      "Epoch: 19 Train Loss: 0.4469 Val Loss: 0.3452 Acc: 0.8406 Pre: 0.8361 Recall: 0.8024 F1: 0.8189 Train AUC: 0.8725 Val AUC: 0.9235 Time: 12.81\n",
      "Epoch: 20 Train Loss: 0.4337 Val Loss: 0.3436 Acc: 0.8424 Pre: 0.8340 Recall: 0.8105 F1: 0.8221 Train AUC: 0.8783 Val AUC: 0.9233 Time: 12.94\n",
      "Epoch: 21 Train Loss: 0.4725 Val Loss: 0.3402 Acc: 0.8388 Pre: 0.8299 Recall: 0.8065 F1: 0.8180 Train AUC: 0.8534 Val AUC: 0.9229 Time: 13.47\n",
      "Epoch: 22 Train Loss: 0.4172 Val Loss: 0.3370 Acc: 0.8388 Pre: 0.8354 Recall: 0.7984 F1: 0.8165 Train AUC: 0.8876 Val AUC: 0.9237 Time: 14.03\n",
      "Epoch: 23 Train Loss: 0.3948 Val Loss: 0.3342 Acc: 0.8351 Pre: 0.8340 Recall: 0.7903 F1: 0.8116 Train AUC: 0.8986 Val AUC: 0.9243 Time: 14.20\n",
      "Epoch: 24 Train Loss: 0.3907 Val Loss: 0.3327 Acc: 0.8351 Pre: 0.8458 Recall: 0.7742 F1: 0.8084 Train AUC: 0.9040 Val AUC: 0.9250 Time: 14.02\n",
      "Epoch: 25 Train Loss: 0.3970 Val Loss: 0.3323 Acc: 0.8388 Pre: 0.8533 Recall: 0.7742 F1: 0.8118 Train AUC: 0.8997 Val AUC: 0.9253 Time: 13.33\n",
      "Epoch: 26 Train Loss: 0.3804 Val Loss: 0.3322 Acc: 0.8406 Pre: 0.8540 Recall: 0.7782 F1: 0.8143 Train AUC: 0.9071 Val AUC: 0.9255 Time: 12.63\n",
      "Epoch: 27 Train Loss: 0.3838 Val Loss: 0.3353 Acc: 0.8370 Pre: 0.8435 Recall: 0.7823 F1: 0.8117 Train AUC: 0.9085 Val AUC: 0.9251 Time: 13.79\n",
      "Epoch: 28 Train Loss: 0.3938 Val Loss: 0.3380 Acc: 0.8315 Pre: 0.8189 Recall: 0.8024 F1: 0.8106 Train AUC: 0.9039 Val AUC: 0.9248 Time: 14.33\n",
      "Epoch: 29 Train Loss: 0.3858 Val Loss: 0.3389 Acc: 0.8315 Pre: 0.8112 Recall: 0.8145 F1: 0.8129 Train AUC: 0.9058 Val AUC: 0.9252 Time: 13.83\n",
      "Epoch: 30 Train Loss: 0.3827 Val Loss: 0.3357 Acc: 0.8351 Pre: 0.8153 Recall: 0.8185 F1: 0.8169 Train AUC: 0.9046 Val AUC: 0.9258 Time: 15.18\n",
      "Epoch: 31 Train Loss: 0.3894 Val Loss: 0.3328 Acc: 0.8351 Pre: 0.8127 Recall: 0.8226 F1: 0.8176 Train AUC: 0.9026 Val AUC: 0.9267 Time: 16.99\n",
      "Epoch: 32 Train Loss: 0.3609 Val Loss: 0.3261 Acc: 0.8388 Pre: 0.8219 Recall: 0.8185 F1: 0.8202 Train AUC: 0.9155 Val AUC: 0.9284 Time: 13.17\n",
      "Epoch: 33 Train Loss: 0.3595 Val Loss: 0.3193 Acc: 0.8460 Pre: 0.8410 Recall: 0.8105 F1: 0.8255 Train AUC: 0.9144 Val AUC: 0.9306 Time: 13.58\n",
      "Epoch: 34 Train Loss: 0.3710 Val Loss: 0.3148 Acc: 0.8460 Pre: 0.8528 Recall: 0.7944 F1: 0.8225 Train AUC: 0.9117 Val AUC: 0.9320 Time: 13.79\n",
      "Epoch: 35 Train Loss: 0.3716 Val Loss: 0.3130 Acc: 0.8551 Pre: 0.8684 Recall: 0.7984 F1: 0.8319 Train AUC: 0.9113 Val AUC: 0.9333 Time: 14.56\n",
      "Epoch: 36 Train Loss: 0.3646 Val Loss: 0.3128 Acc: 0.8587 Pre: 0.8664 Recall: 0.8105 F1: 0.8375 Train AUC: 0.9154 Val AUC: 0.9335 Time: 13.00\n",
      "Epoch: 37 Train Loss: 0.3708 Val Loss: 0.3162 Acc: 0.8533 Pre: 0.8408 Recall: 0.8306 F1: 0.8357 Train AUC: 0.9148 Val AUC: 0.9331 Time: 12.33\n",
      "Epoch: 38 Train Loss: 0.3572 Val Loss: 0.3193 Acc: 0.8442 Pre: 0.8214 Recall: 0.8347 F1: 0.8280 Train AUC: 0.9178 Val AUC: 0.9329 Time: 12.58\n",
      "Epoch: 39 Train Loss: 0.3585 Val Loss: 0.3192 Acc: 0.8442 Pre: 0.8214 Recall: 0.8347 F1: 0.8280 Train AUC: 0.9164 Val AUC: 0.9333 Time: 12.81\n",
      "Epoch: 40 Train Loss: 0.3488 Val Loss: 0.3165 Acc: 0.8478 Pre: 0.8280 Recall: 0.8347 F1: 0.8313 Train AUC: 0.9219 Val AUC: 0.9338 Time: 13.75\n",
      "Epoch: 41 Train Loss: 0.3702 Val Loss: 0.3144 Acc: 0.8496 Pre: 0.8367 Recall: 0.8266 F1: 0.8316 Train AUC: 0.9140 Val AUC: 0.9341 Time: 14.07\n",
      "Epoch: 42 Train Loss: 0.3534 Val Loss: 0.3118 Acc: 0.8460 Pre: 0.8410 Recall: 0.8105 F1: 0.8255 Train AUC: 0.9187 Val AUC: 0.9346 Time: 14.71\n",
      "Epoch: 43 Train Loss: 0.3451 Val Loss: 0.3082 Acc: 0.8605 Pre: 0.8734 Recall: 0.8065 F1: 0.8386 Train AUC: 0.9227 Val AUC: 0.9354 Time: 14.60\n",
      "Epoch: 44 Train Loss: 0.3473 Val Loss: 0.3066 Acc: 0.8659 Pre: 0.8884 Recall: 0.8024 F1: 0.8432 Train AUC: 0.9231 Val AUC: 0.9360 Time: 13.52\n",
      "Epoch: 45 Train Loss: 0.3425 Val Loss: 0.3052 Acc: 0.8678 Pre: 0.9032 Recall: 0.7903 F1: 0.8430 Train AUC: 0.9241 Val AUC: 0.9366 Time: 12.91\n",
      "Epoch: 46 Train Loss: 0.3499 Val Loss: 0.3038 Acc: 0.8714 Pre: 0.9078 Recall: 0.7944 F1: 0.8473 Train AUC: 0.9195 Val AUC: 0.9374 Time: 13.07\n",
      "Epoch: 47 Train Loss: 0.3330 Val Loss: 0.3029 Acc: 0.8696 Pre: 0.8964 Recall: 0.8024 F1: 0.8468 Train AUC: 0.9294 Val AUC: 0.9380 Time: 13.74\n",
      "Epoch: 48 Train Loss: 0.3380 Val Loss: 0.3033 Acc: 0.8696 Pre: 0.8929 Recall: 0.8065 F1: 0.8475 Train AUC: 0.9270 Val AUC: 0.9383 Time: 13.72\n",
      "Epoch: 49 Train Loss: 0.3355 Val Loss: 0.3053 Acc: 0.8750 Pre: 0.8841 Recall: 0.8306 F1: 0.8565 Train AUC: 0.9296 Val AUC: 0.9380 Time: 13.61\n",
      "Epoch: 50 Train Loss: 0.3295 Val Loss: 0.3061 Acc: 0.8750 Pre: 0.8776 Recall: 0.8387 F1: 0.8577 Train AUC: 0.9314 Val AUC: 0.9378 Time: 14.12\n",
      "Epoch: 51 Train Loss: 0.3424 Val Loss: 0.3043 Acc: 0.8732 Pre: 0.8771 Recall: 0.8347 F1: 0.8554 Train AUC: 0.9256 Val AUC: 0.9382 Time: 14.08\n",
      "Epoch: 52 Train Loss: 0.3307 Val Loss: 0.3042 Acc: 0.8714 Pre: 0.8734 Recall: 0.8347 F1: 0.8536 Train AUC: 0.9308 Val AUC: 0.9380 Time: 14.70\n",
      "Epoch: 53 Train Loss: 0.3232 Val Loss: 0.3036 Acc: 0.8732 Pre: 0.8771 Recall: 0.8347 F1: 0.8554 Train AUC: 0.9330 Val AUC: 0.9385 Time: 12.27\n",
      "Epoch: 54 Train Loss: 0.3351 Val Loss: 0.3036 Acc: 0.8714 Pre: 0.8766 Recall: 0.8306 F1: 0.8530 Train AUC: 0.9284 Val AUC: 0.9386 Time: 12.23\n",
      "Epoch: 55 Train Loss: 0.3493 Val Loss: 0.3043 Acc: 0.8678 Pre: 0.8661 Recall: 0.8347 F1: 0.8501 Train AUC: 0.9222 Val AUC: 0.9384 Time: 12.75\n",
      "Epoch: 56 Train Loss: 0.3200 Val Loss: 0.3054 Acc: 0.8678 Pre: 0.8631 Recall: 0.8387 F1: 0.8507 Train AUC: 0.9345 Val AUC: 0.9382 Time: 13.35\n",
      "Epoch: 57 Train Loss: 0.3315 Val Loss: 0.3038 Acc: 0.8714 Pre: 0.8703 Recall: 0.8387 F1: 0.8542 Train AUC: 0.9304 Val AUC: 0.9386 Time: 14.11\n",
      "Epoch: 58 Train Loss: 0.3171 Val Loss: 0.3013 Acc: 0.8768 Pre: 0.8814 Recall: 0.8387 F1: 0.8595 Train AUC: 0.9363 Val AUC: 0.9393 Time: 14.58\n",
      "Epoch: 59 Train Loss: 0.3166 Val Loss: 0.2976 Acc: 0.8859 Pre: 0.9075 Recall: 0.8306 F1: 0.8674 Train AUC: 0.9353 Val AUC: 0.9398 Time: 14.93\n",
      "Epoch: 60 Train Loss: 0.3217 Val Loss: 0.2972 Acc: 0.8877 Pre: 0.9043 Recall: 0.8387 F1: 0.8703 Train AUC: 0.9347 Val AUC: 0.9399 Time: 14.36\n",
      "Epoch: 61 Train Loss: 0.3156 Val Loss: 0.2998 Acc: 0.8822 Pre: 0.8927 Recall: 0.8387 F1: 0.8649 Train AUC: 0.9367 Val AUC: 0.9394 Time: 14.14\n",
      "Epoch: 62 Train Loss: 0.3231 Val Loss: 0.3022 Acc: 0.8786 Pre: 0.8851 Recall: 0.8387 F1: 0.8613 Train AUC: 0.9336 Val AUC: 0.9392 Time: 12.77\n",
      "Epoch: 63 Train Loss: 0.3256 Val Loss: 0.3036 Acc: 0.8768 Pre: 0.8782 Recall: 0.8427 F1: 0.8601 Train AUC: 0.9348 Val AUC: 0.9390 Time: 12.49\n",
      "Epoch: 64 Train Loss: 0.3136 Val Loss: 0.3011 Acc: 0.8786 Pre: 0.8851 Recall: 0.8387 F1: 0.8613 Train AUC: 0.9388 Val AUC: 0.9393 Time: 12.53\n",
      "Epoch: 65 Train Loss: 0.3079 Val Loss: 0.2982 Acc: 0.8822 Pre: 0.8927 Recall: 0.8387 F1: 0.8649 Train AUC: 0.9394 Val AUC: 0.9397 Time: 12.33\n",
      "Epoch: 66 Train Loss: 0.3078 Val Loss: 0.2959 Acc: 0.8877 Pre: 0.9043 Recall: 0.8387 F1: 0.8703 Train AUC: 0.9389 Val AUC: 0.9406 Time: 13.55\n",
      "Epoch: 67 Train Loss: 0.3066 Val Loss: 0.2954 Acc: 0.8913 Pre: 0.9123 Recall: 0.8387 F1: 0.8739 Train AUC: 0.9403 Val AUC: 0.9412 Time: 14.10\n",
      "Epoch: 68 Train Loss: 0.3088 Val Loss: 0.2953 Acc: 0.8895 Pre: 0.9083 Recall: 0.8387 F1: 0.8721 Train AUC: 0.9396 Val AUC: 0.9415 Time: 14.42\n",
      "Epoch: 69 Train Loss: 0.3118 Val Loss: 0.2942 Acc: 0.8895 Pre: 0.9083 Recall: 0.8387 F1: 0.8721 Train AUC: 0.9374 Val AUC: 0.9418 Time: 14.91\n",
      "Epoch: 70 Train Loss: 0.3037 Val Loss: 0.2930 Acc: 0.8877 Pre: 0.9043 Recall: 0.8387 F1: 0.8703 Train AUC: 0.9412 Val AUC: 0.9421 Time: 14.46\n",
      "Epoch: 71 Train Loss: 0.3137 Val Loss: 0.2935 Acc: 0.8859 Pre: 0.8970 Recall: 0.8427 F1: 0.8690 Train AUC: 0.9372 Val AUC: 0.9418 Time: 13.34\n",
      "Epoch: 72 Train Loss: 0.3045 Val Loss: 0.2963 Acc: 0.8822 Pre: 0.8894 Recall: 0.8427 F1: 0.8654 Train AUC: 0.9416 Val AUC: 0.9412 Time: 12.35\n",
      "Epoch: 73 Train Loss: 0.3013 Val Loss: 0.2945 Acc: 0.8822 Pre: 0.8894 Recall: 0.8427 F1: 0.8654 Train AUC: 0.9437 Val AUC: 0.9417 Time: 12.80\n",
      "Epoch: 74 Train Loss: 0.3038 Val Loss: 0.2913 Acc: 0.8841 Pre: 0.8932 Recall: 0.8427 F1: 0.8672 Train AUC: 0.9435 Val AUC: 0.9429 Time: 13.19\n",
      "Epoch: 75 Train Loss: 0.3015 Val Loss: 0.2899 Acc: 0.8895 Pre: 0.9083 Recall: 0.8387 F1: 0.8721 Train AUC: 0.9429 Val AUC: 0.9430 Time: 13.96\n",
      "Epoch: 76 Train Loss: 0.3097 Val Loss: 0.2899 Acc: 0.8822 Pre: 0.9031 Recall: 0.8266 F1: 0.8632 Train AUC: 0.9415 Val AUC: 0.9422 Time: 14.75\n",
      "Epoch: 77 Train Loss: 0.2910 Val Loss: 0.2915 Acc: 0.8822 Pre: 0.8996 Recall: 0.8306 F1: 0.8637 Train AUC: 0.9462 Val AUC: 0.9418 Time: 14.80\n",
      "Epoch: 78 Train Loss: 0.3087 Val Loss: 0.2962 Acc: 0.8841 Pre: 0.8966 Recall: 0.8387 F1: 0.8667 Train AUC: 0.9403 Val AUC: 0.9417 Time: 14.79\n",
      "Epoch: 79 Train Loss: 0.3030 Val Loss: 0.2959 Acc: 0.8859 Pre: 0.9111 Recall: 0.8266 F1: 0.8668 Train AUC: 0.9428 Val AUC: 0.9417 Time: 12.25\n",
      "Epoch: 80 Train Loss: 0.2871 Val Loss: 0.2961 Acc: 0.8877 Pre: 0.9152 Recall: 0.8266 F1: 0.8686 Train AUC: 0.9481 Val AUC: 0.9420 Time: 12.61\n",
      "Epoch: 81 Train Loss: 0.2952 Val Loss: 0.2954 Acc: 0.8877 Pre: 0.9152 Recall: 0.8266 F1: 0.8686 Train AUC: 0.9447 Val AUC: 0.9420 Time: 13.20\n",
      "Epoch: 82 Train Loss: 0.2984 Val Loss: 0.2947 Acc: 0.8877 Pre: 0.9043 Recall: 0.8387 F1: 0.8703 Train AUC: 0.9423 Val AUC: 0.9420 Time: 13.58\n",
      "Epoch: 83 Train Loss: 0.3207 Val Loss: 0.2940 Acc: 0.8895 Pre: 0.9013 Recall: 0.8468 F1: 0.8732 Train AUC: 0.9353 Val AUC: 0.9414 Time: 14.83\n",
      "Epoch: 84 Train Loss: 0.2920 Val Loss: 0.2933 Acc: 0.8841 Pre: 0.8898 Recall: 0.8468 F1: 0.8678 Train AUC: 0.9462 Val AUC: 0.9413 Time: 14.13\n",
      "Epoch: 85 Train Loss: 0.2990 Val Loss: 0.2957 Acc: 0.8841 Pre: 0.8898 Recall: 0.8468 F1: 0.8678 Train AUC: 0.9442 Val AUC: 0.9410 Time: 13.86\n",
      "Epoch: 86 Train Loss: 0.3007 Val Loss: 0.2959 Acc: 0.8841 Pre: 0.8898 Recall: 0.8468 F1: 0.8678 Train AUC: 0.9447 Val AUC: 0.9410 Time: 14.13\n",
      "Epoch: 87 Train Loss: 0.2876 Val Loss: 0.2934 Acc: 0.8859 Pre: 0.8936 Recall: 0.8468 F1: 0.8696 Train AUC: 0.9481 Val AUC: 0.9417 Time: 12.88\n",
      "Epoch: 88 Train Loss: 0.2846 Val Loss: 0.2904 Acc: 0.8877 Pre: 0.9009 Recall: 0.8427 F1: 0.8708 Train AUC: 0.9490 Val AUC: 0.9426 Time: 12.30\n",
      "Epoch: 89 Train Loss: 0.2908 Val Loss: 0.2894 Acc: 0.8913 Pre: 0.9087 Recall: 0.8427 F1: 0.8745 Train AUC: 0.9470 Val AUC: 0.9433 Time: 13.42\n",
      "Epoch: 90 Train Loss: 0.2877 Val Loss: 0.2878 Acc: 0.8931 Pre: 0.9315 Recall: 0.8226 F1: 0.8737 Train AUC: 0.9475 Val AUC: 0.9441 Time: 14.06\n",
      "Epoch: 91 Train Loss: 0.2965 Val Loss: 0.2882 Acc: 0.8949 Pre: 0.9279 Recall: 0.8306 F1: 0.8766 Train AUC: 0.9462 Val AUC: 0.9440 Time: 14.32\n",
      "Epoch: 92 Train Loss: 0.3001 Val Loss: 0.2930 Acc: 0.8877 Pre: 0.8974 Recall: 0.8468 F1: 0.8714 Train AUC: 0.9439 Val AUC: 0.9428 Time: 13.95\n",
      "Epoch: 93 Train Loss: 0.2889 Val Loss: 0.2967 Acc: 0.8877 Pre: 0.8908 Recall: 0.8548 F1: 0.8724 Train AUC: 0.9480 Val AUC: 0.9424 Time: 13.14\n",
      "Epoch: 94 Train Loss: 0.2871 Val Loss: 0.2938 Acc: 0.8877 Pre: 0.8908 Recall: 0.8548 F1: 0.8724 Train AUC: 0.9485 Val AUC: 0.9430 Time: 12.76\n",
      "Epoch: 95 Train Loss: 0.2939 Val Loss: 0.2866 Acc: 0.8913 Pre: 0.9159 Recall: 0.8347 F1: 0.8734 Train AUC: 0.9467 Val AUC: 0.9447 Time: 13.70\n",
      "Epoch: 96 Train Loss: 0.2830 Val Loss: 0.2849 Acc: 0.8913 Pre: 0.9352 Recall: 0.8145 F1: 0.8707 Train AUC: 0.9511 Val AUC: 0.9457 Time: 13.91\n",
      "Epoch: 97 Train Loss: 0.2905 Val Loss: 0.2851 Acc: 0.8895 Pre: 0.9269 Recall: 0.8185 F1: 0.8694 Train AUC: 0.9495 Val AUC: 0.9458 Time: 14.03\n",
      "Epoch: 98 Train Loss: 0.2885 Val Loss: 0.2867 Acc: 0.8895 Pre: 0.9119 Recall: 0.8347 F1: 0.8716 Train AUC: 0.9483 Val AUC: 0.9450 Time: 13.40\n",
      "Epoch: 99 Train Loss: 0.2716 Val Loss: 0.2896 Acc: 0.8895 Pre: 0.8945 Recall: 0.8548 F1: 0.8742 Train AUC: 0.9531 Val AUC: 0.9438 Time: 13.78\n",
      "Epoch: 100 Train Loss: 0.2837 Val Loss: 0.2914 Acc: 0.8895 Pre: 0.8945 Recall: 0.8548 F1: 0.8742 Train AUC: 0.9489 Val AUC: 0.9431 Time: 13.60\n",
      "Epoch: 101 Train Loss: 0.2734 Val Loss: 0.2887 Acc: 0.8877 Pre: 0.8941 Recall: 0.8508 F1: 0.8719 Train AUC: 0.9545 Val AUC: 0.9432 Time: 12.23\n",
      "Epoch: 102 Train Loss: 0.2731 Val Loss: 0.2849 Acc: 0.8931 Pre: 0.9238 Recall: 0.8306 F1: 0.8747 Train AUC: 0.9537 Val AUC: 0.9440 Time: 12.95\n",
      "Epoch: 103 Train Loss: 0.2763 Val Loss: 0.2845 Acc: 0.8895 Pre: 0.9269 Recall: 0.8185 F1: 0.8694 Train AUC: 0.9526 Val AUC: 0.9443 Time: 14.07\n",
      "Epoch: 104 Train Loss: 0.2909 Val Loss: 0.2859 Acc: 0.8895 Pre: 0.9119 Recall: 0.8347 F1: 0.8716 Train AUC: 0.9507 Val AUC: 0.9438 Time: 14.17\n",
      "Epoch: 105 Train Loss: 0.2711 Val Loss: 0.2879 Acc: 0.8913 Pre: 0.9052 Recall: 0.8468 F1: 0.8750 Train AUC: 0.9549 Val AUC: 0.9434 Time: 14.59\n",
      "Epoch: 106 Train Loss: 0.2719 Val Loss: 0.2904 Acc: 0.8859 Pre: 0.8903 Recall: 0.8508 F1: 0.8701 Train AUC: 0.9537 Val AUC: 0.9432 Time: 14.76\n",
      "Epoch: 107 Train Loss: 0.2736 Val Loss: 0.2897 Acc: 0.8895 Pre: 0.9013 Recall: 0.8468 F1: 0.8732 Train AUC: 0.9531 Val AUC: 0.9435 Time: 12.32\n",
      "Epoch: 108 Train Loss: 0.2685 Val Loss: 0.2867 Acc: 0.8967 Pre: 0.9283 Recall: 0.8347 F1: 0.8790 Train AUC: 0.9548 Val AUC: 0.9438 Time: 12.32\n",
      "Epoch: 109 Train Loss: 0.2791 Val Loss: 0.2863 Acc: 0.8967 Pre: 0.9283 Recall: 0.8347 F1: 0.8790 Train AUC: 0.9516 Val AUC: 0.9439 Time: 12.50\n",
      "Epoch: 110 Train Loss: 0.2807 Val Loss: 0.2871 Acc: 0.8967 Pre: 0.9207 Recall: 0.8427 F1: 0.8800 Train AUC: 0.9526 Val AUC: 0.9439 Time: 13.55\n",
      "Epoch: 111 Train Loss: 0.2775 Val Loss: 0.2942 Acc: 0.8859 Pre: 0.8870 Recall: 0.8548 F1: 0.8706 Train AUC: 0.9537 Val AUC: 0.9436 Time: 13.50\n",
      "Epoch: 112 Train Loss: 0.2720 Val Loss: 0.3013 Acc: 0.8750 Pre: 0.8623 Recall: 0.8589 F1: 0.8606 Train AUC: 0.9543 Val AUC: 0.9431 Time: 14.25\n",
      "Epoch: 113 Train Loss: 0.2688 Val Loss: 0.2949 Acc: 0.8931 Pre: 0.9091 Recall: 0.8468 F1: 0.8768 Train AUC: 0.9581 Val AUC: 0.9438 Time: 15.01\n",
      "Epoch: 114 Train Loss: 0.2632 Val Loss: 0.2898 Acc: 0.8967 Pre: 0.9207 Recall: 0.8427 F1: 0.8800 Train AUC: 0.9582 Val AUC: 0.9443 Time: 14.66\n",
      "Epoch: 115 Train Loss: 0.2619 Val Loss: 0.2874 Acc: 0.8931 Pre: 0.9315 Recall: 0.8226 F1: 0.8737 Train AUC: 0.9569 Val AUC: 0.9449 Time: 13.41\n",
      "Epoch: 116 Train Loss: 0.2710 Val Loss: 0.2869 Acc: 0.8931 Pre: 0.9200 Recall: 0.8347 F1: 0.8753 Train AUC: 0.9560 Val AUC: 0.9443 Time: 12.57\n",
      "Epoch: 117 Train Loss: 0.2802 Val Loss: 0.2930 Acc: 0.8859 Pre: 0.8870 Recall: 0.8548 F1: 0.8706 Train AUC: 0.9521 Val AUC: 0.9422 Time: 12.60\n",
      "Epoch: 118 Train Loss: 0.2622 Val Loss: 0.3015 Acc: 0.8696 Pre: 0.8548 Recall: 0.8548 F1: 0.8548 Train AUC: 0.9573 Val AUC: 0.9414 Time: 13.21\n",
      "Epoch: 119 Train Loss: 0.2691 Val Loss: 0.2947 Acc: 0.8750 Pre: 0.8653 Recall: 0.8548 F1: 0.8600 Train AUC: 0.9579 Val AUC: 0.9420 Time: 14.11\n",
      "Epoch: 120 Train Loss: 0.2601 Val Loss: 0.2867 Acc: 0.8986 Pre: 0.9248 Recall: 0.8427 F1: 0.8819 Train AUC: 0.9588 Val AUC: 0.9435 Time: 13.60\n",
      "Epoch: 121 Train Loss: 0.2566 Val Loss: 0.2890 Acc: 0.8913 Pre: 0.9393 Recall: 0.8105 F1: 0.8701 Train AUC: 0.9596 Val AUC: 0.9455 Time: 13.91\n",
      "Epoch: 122 Train Loss: 0.2644 Val Loss: 0.2877 Acc: 0.8913 Pre: 0.9273 Recall: 0.8226 F1: 0.8718 Train AUC: 0.9598 Val AUC: 0.9447 Time: 14.18\n",
      "Epoch: 123 Train Loss: 0.2669 Val Loss: 0.2918 Acc: 0.8913 Pre: 0.9052 Recall: 0.8468 F1: 0.8750 Train AUC: 0.9566 Val AUC: 0.9432 Time: 14.32\n",
      "Epoch: 124 Train Loss: 0.2617 Val Loss: 0.2990 Acc: 0.8732 Pre: 0.8618 Recall: 0.8548 F1: 0.8583 Train AUC: 0.9575 Val AUC: 0.9428 Time: 13.70\n",
      "Epoch: 125 Train Loss: 0.2664 Val Loss: 0.2921 Acc: 0.8804 Pre: 0.8824 Recall: 0.8468 F1: 0.8642 Train AUC: 0.9573 Val AUC: 0.9444 Time: 12.48\n",
      "Epoch: 126 Train Loss: 0.2570 Val Loss: 0.2855 Acc: 0.8931 Pre: 0.9238 Recall: 0.8306 F1: 0.8747 Train AUC: 0.9605 Val AUC: 0.9461 Time: 12.68\n",
      "Epoch: 127 Train Loss: 0.2557 Val Loss: 0.2869 Acc: 0.8913 Pre: 0.9352 Recall: 0.8145 F1: 0.8707 Train AUC: 0.9599 Val AUC: 0.9469 Time: 13.55\n",
      "Epoch: 128 Train Loss: 0.2661 Val Loss: 0.2860 Acc: 0.8804 Pre: 0.9027 Recall: 0.8226 F1: 0.8608 Train AUC: 0.9593 Val AUC: 0.9467 Time: 13.87\n",
      "Epoch: 129 Train Loss: 0.2670 Val Loss: 0.2903 Acc: 0.8768 Pre: 0.8689 Recall: 0.8548 F1: 0.8618 Train AUC: 0.9573 Val AUC: 0.9456 Time: 13.84\n",
      "Epoch: 130 Train Loss: 0.2577 Val Loss: 0.3041 Acc: 0.8605 Pre: 0.8327 Recall: 0.8629 F1: 0.8475 Train AUC: 0.9597 Val AUC: 0.9426 Time: 14.32\n",
      "Epoch: 131 Train Loss: 0.2541 Val Loss: 0.3088 Acc: 0.8587 Pre: 0.8320 Recall: 0.8589 F1: 0.8452 Train AUC: 0.9618 Val AUC: 0.9417 Time: 13.38\n",
      "Epoch: 132 Train Loss: 0.2589 Val Loss: 0.3013 Acc: 0.8877 Pre: 0.8974 Recall: 0.8468 F1: 0.8714 Train AUC: 0.9608 Val AUC: 0.9421 Time: 13.23\n",
      "Epoch: 133 Train Loss: 0.2532 Val Loss: 0.2956 Acc: 0.8931 Pre: 0.9200 Recall: 0.8347 F1: 0.8753 Train AUC: 0.9607 Val AUC: 0.9439 Time: 13.95\n",
      "Epoch: 134 Train Loss: 0.2574 Val Loss: 0.2946 Acc: 0.8895 Pre: 0.9349 Recall: 0.8105 F1: 0.8683 Train AUC: 0.9604 Val AUC: 0.9454 Time: 13.12\n",
      "Epoch: 135 Train Loss: 0.2634 Val Loss: 0.2894 Acc: 0.8967 Pre: 0.9283 Recall: 0.8347 F1: 0.8790 Train AUC: 0.9597 Val AUC: 0.9459 Time: 13.77\n",
      "Epoch: 136 Train Loss: 0.2589 Val Loss: 0.2978 Acc: 0.8732 Pre: 0.8618 Recall: 0.8548 F1: 0.8583 Train AUC: 0.9615 Val AUC: 0.9444 Time: 14.16\n",
      "Epoch: 137 Train Loss: 0.2556 Val Loss: 0.3070 Acc: 0.8750 Pre: 0.8456 Recall: 0.8831 F1: 0.8639 Train AUC: 0.9612 Val AUC: 0.9437 Time: 14.16\n",
      "Epoch: 138 Train Loss: 0.2527 Val Loss: 0.2979 Acc: 0.8768 Pre: 0.8629 Recall: 0.8629 F1: 0.8629 Train AUC: 0.9637 Val AUC: 0.9445 Time: 13.01\n",
      "Epoch: 139 Train Loss: 0.2444 Val Loss: 0.2891 Acc: 0.8877 Pre: 0.9009 Recall: 0.8427 F1: 0.8708 Train AUC: 0.9650 Val AUC: 0.9456 Time: 12.69\n",
      "Epoch: 140 Train Loss: 0.2450 Val Loss: 0.2899 Acc: 0.8949 Pre: 0.9279 Recall: 0.8306 F1: 0.8766 Train AUC: 0.9636 Val AUC: 0.9458 Time: 12.38\n",
      "Epoch: 141 Train Loss: 0.2518 Val Loss: 0.2904 Acc: 0.8986 Pre: 0.9286 Recall: 0.8387 F1: 0.8814 Train AUC: 0.9636 Val AUC: 0.9450 Time: 12.51\n",
      "Epoch: 142 Train Loss: 0.2612 Val Loss: 0.2909 Acc: 0.8931 Pre: 0.9127 Recall: 0.8427 F1: 0.8763 Train AUC: 0.9582 Val AUC: 0.9443 Time: 13.09\n",
      "Epoch: 143 Train Loss: 0.2496 Val Loss: 0.2934 Acc: 0.8841 Pre: 0.8898 Recall: 0.8468 F1: 0.8678 Train AUC: 0.9613 Val AUC: 0.9443 Time: 13.94\n",
      "Epoch: 144 Train Loss: 0.2397 Val Loss: 0.2933 Acc: 0.8822 Pre: 0.8765 Recall: 0.8589 F1: 0.8676 Train AUC: 0.9654 Val AUC: 0.9455 Time: 14.11\n",
      "Epoch: 145 Train Loss: 0.2418 Val Loss: 0.2899 Acc: 0.8841 Pre: 0.8802 Recall: 0.8589 F1: 0.8694 Train AUC: 0.9653 Val AUC: 0.9466 Time: 14.80\n",
      "Epoch: 146 Train Loss: 0.2394 Val Loss: 0.2875 Acc: 0.8804 Pre: 0.8792 Recall: 0.8508 F1: 0.8648 Train AUC: 0.9648 Val AUC: 0.9478 Time: 13.97\n",
      "Epoch: 147 Train Loss: 0.2456 Val Loss: 0.2876 Acc: 0.8841 Pre: 0.9000 Recall: 0.8347 F1: 0.8661 Train AUC: 0.9647 Val AUC: 0.9480 Time: 14.69\n",
      "Epoch: 148 Train Loss: 0.2403 Val Loss: 0.2893 Acc: 0.8931 Pre: 0.9200 Recall: 0.8347 F1: 0.8753 Train AUC: 0.9652 Val AUC: 0.9481 Time: 12.72\n",
      "Epoch: 149 Train Loss: 0.2391 Val Loss: 0.2930 Acc: 0.8841 Pre: 0.8898 Recall: 0.8468 F1: 0.8678 Train AUC: 0.9665 Val AUC: 0.9463 Time: 13.24\n",
      "Epoch: 150 Train Loss: 0.2316 Val Loss: 0.2989 Acc: 0.8768 Pre: 0.8719 Recall: 0.8508 F1: 0.8612 Train AUC: 0.9679 Val AUC: 0.9450 Time: 14.05\n",
      "Epoch: 151 Train Loss: 0.2288 Val Loss: 0.2990 Acc: 0.8804 Pre: 0.8824 Recall: 0.8468 F1: 0.8642 Train AUC: 0.9681 Val AUC: 0.9447 Time: 13.71\n",
      "Epoch: 152 Train Loss: 0.2373 Val Loss: 0.2951 Acc: 0.8967 Pre: 0.9170 Recall: 0.8468 F1: 0.8805 Train AUC: 0.9660 Val AUC: 0.9454 Time: 13.05\n",
      "Epoch: 153 Train Loss: 0.2368 Val Loss: 0.2956 Acc: 0.8913 Pre: 0.9052 Recall: 0.8468 F1: 0.8750 Train AUC: 0.9660 Val AUC: 0.9459 Time: 12.73\n",
      "Epoch: 154 Train Loss: 0.2294 Val Loss: 0.2976 Acc: 0.8841 Pre: 0.8740 Recall: 0.8669 F1: 0.8704 Train AUC: 0.9681 Val AUC: 0.9459 Time: 14.27\n",
      "Epoch: 155 Train Loss: 0.2336 Val Loss: 0.2951 Acc: 0.8859 Pre: 0.8745 Recall: 0.8710 F1: 0.8727 Train AUC: 0.9672 Val AUC: 0.9468 Time: 14.09\n",
      "Epoch: 156 Train Loss: 0.2332 Val Loss: 0.2898 Acc: 0.8877 Pre: 0.8908 Recall: 0.8548 F1: 0.8724 Train AUC: 0.9676 Val AUC: 0.9478 Time: 14.09\n",
      "Epoch: 157 Train Loss: 0.2314 Val Loss: 0.2901 Acc: 0.8931 Pre: 0.9056 Recall: 0.8508 F1: 0.8773 Train AUC: 0.9681 Val AUC: 0.9479 Time: 13.64\n",
      "Epoch: 158 Train Loss: 0.2267 Val Loss: 0.2918 Acc: 0.8967 Pre: 0.9134 Recall: 0.8508 F1: 0.8810 Train AUC: 0.9687 Val AUC: 0.9477 Time: 12.19\n",
      "Epoch: 159 Train Loss: 0.2307 Val Loss: 0.2902 Acc: 0.8986 Pre: 0.9211 Recall: 0.8468 F1: 0.8824 Train AUC: 0.9679 Val AUC: 0.9481 Time: 12.74\n",
      "Epoch: 160 Train Loss: 0.2290 Val Loss: 0.2913 Acc: 0.9004 Pre: 0.9177 Recall: 0.8548 F1: 0.8852 Train AUC: 0.9678 Val AUC: 0.9479 Time: 13.79\n",
      "Epoch: 161 Train Loss: 0.2324 Val Loss: 0.2976 Acc: 0.8913 Pre: 0.8917 Recall: 0.8629 F1: 0.8770 Train AUC: 0.9675 Val AUC: 0.9466 Time: 13.27\n",
      "Epoch: 162 Train Loss: 0.2319 Val Loss: 0.2971 Acc: 0.8931 Pre: 0.8889 Recall: 0.8710 F1: 0.8798 Train AUC: 0.9671 Val AUC: 0.9470 Time: 14.59\n",
      "Epoch: 163 Train Loss: 0.2211 Val Loss: 0.2976 Acc: 0.8949 Pre: 0.8926 Recall: 0.8710 F1: 0.8816 Train AUC: 0.9709 Val AUC: 0.9472 Time: 14.36\n",
      "Epoch: 164 Train Loss: 0.2327 Val Loss: 0.2944 Acc: 0.8967 Pre: 0.9134 Recall: 0.8508 F1: 0.8810 Train AUC: 0.9673 Val AUC: 0.9474 Time: 13.06\n",
      "Epoch: 165 Train Loss: 0.2291 Val Loss: 0.2946 Acc: 0.8931 Pre: 0.9091 Recall: 0.8468 F1: 0.8768 Train AUC: 0.9678 Val AUC: 0.9475 Time: 13.02\n",
      "Epoch: 166 Train Loss: 0.2207 Val Loss: 0.2961 Acc: 0.8931 Pre: 0.9056 Recall: 0.8508 F1: 0.8773 Train AUC: 0.9704 Val AUC: 0.9473 Time: 12.85\n",
      "Epoch: 167 Train Loss: 0.2255 Val Loss: 0.2954 Acc: 0.8949 Pre: 0.8992 Recall: 0.8629 F1: 0.8807 Train AUC: 0.9692 Val AUC: 0.9477 Time: 13.71\n",
      "Epoch: 168 Train Loss: 0.2280 Val Loss: 0.2920 Acc: 0.8986 Pre: 0.9000 Recall: 0.8710 F1: 0.8852 Train AUC: 0.9690 Val AUC: 0.9485 Time: 13.80\n",
      "Epoch: 169 Train Loss: 0.2208 Val Loss: 0.2920 Acc: 0.8967 Pre: 0.8996 Recall: 0.8669 F1: 0.8830 Train AUC: 0.9715 Val AUC: 0.9489 Time: 14.54\n",
      "Epoch: 170 Train Loss: 0.2196 Val Loss: 0.2904 Acc: 0.9040 Pre: 0.9149 Recall: 0.8669 F1: 0.8903 Train AUC: 0.9710 Val AUC: 0.9489 Time: 13.79\n",
      "Epoch: 171 Train Loss: 0.2245 Val Loss: 0.2910 Acc: 0.9058 Pre: 0.9261 Recall: 0.8589 F1: 0.8912 Train AUC: 0.9695 Val AUC: 0.9490 Time: 13.73\n",
      "Epoch: 172 Train Loss: 0.2242 Val Loss: 0.2917 Acc: 0.8986 Pre: 0.9248 Recall: 0.8427 F1: 0.8819 Train AUC: 0.9694 Val AUC: 0.9493 Time: 12.27\n",
      "Epoch: 173 Train Loss: 0.2146 Val Loss: 0.2945 Acc: 0.8986 Pre: 0.9174 Recall: 0.8508 F1: 0.8828 Train AUC: 0.9721 Val AUC: 0.9489 Time: 13.40\n",
      "Epoch: 174 Train Loss: 0.2138 Val Loss: 0.2977 Acc: 0.8895 Pre: 0.8945 Recall: 0.8548 F1: 0.8742 Train AUC: 0.9724 Val AUC: 0.9479 Time: 13.39\n",
      "Epoch: 175 Train Loss: 0.2228 Val Loss: 0.2994 Acc: 0.8877 Pre: 0.8780 Recall: 0.8710 F1: 0.8745 Train AUC: 0.9703 Val AUC: 0.9481 Time: 14.29\n",
      "Epoch: 176 Train Loss: 0.2265 Val Loss: 0.2948 Acc: 0.9022 Pre: 0.9254 Recall: 0.8508 F1: 0.8866 Train AUC: 0.9698 Val AUC: 0.9492 Time: 14.54\n",
      "Epoch: 177 Train Loss: 0.2216 Val Loss: 0.2938 Acc: 0.8986 Pre: 0.9286 Recall: 0.8387 F1: 0.8814 Train AUC: 0.9697 Val AUC: 0.9495 Time: 13.07\n",
      "Epoch: 178 Train Loss: 0.2276 Val Loss: 0.2947 Acc: 0.9040 Pre: 0.9185 Recall: 0.8629 F1: 0.8898 Train AUC: 0.9697 Val AUC: 0.9492 Time: 12.99\n",
      "Epoch: 179 Train Loss: 0.2148 Val Loss: 0.3018 Acc: 0.8696 Pre: 0.8385 Recall: 0.8790 F1: 0.8583 Train AUC: 0.9717 Val AUC: 0.9482 Time: 12.56\n",
      "Epoch: 180 Train Loss: 0.2201 Val Loss: 0.2943 Acc: 0.9094 Pre: 0.9195 Recall: 0.8750 F1: 0.8967 Train AUC: 0.9730 Val AUC: 0.9486 Time: 13.43\n",
      "Epoch: 181 Train Loss: 0.2145 Val Loss: 0.2905 Acc: 0.9022 Pre: 0.9292 Recall: 0.8468 F1: 0.8861 Train AUC: 0.9727 Val AUC: 0.9494 Time: 14.10\n",
      "Epoch: 182 Train Loss: 0.2179 Val Loss: 0.2902 Acc: 0.9022 Pre: 0.9330 Recall: 0.8427 F1: 0.8856 Train AUC: 0.9722 Val AUC: 0.9494 Time: 13.59\n",
      "Epoch: 183 Train Loss: 0.2041 Val Loss: 0.2914 Acc: 0.9058 Pre: 0.9188 Recall: 0.8669 F1: 0.8921 Train AUC: 0.9759 Val AUC: 0.9491 Time: 14.11\n",
      "Epoch: 184 Train Loss: 0.2169 Val Loss: 0.2980 Acc: 0.8714 Pre: 0.8444 Recall: 0.8750 F1: 0.8594 Train AUC: 0.9720 Val AUC: 0.9485 Time: 13.94\n",
      "Epoch: 185 Train Loss: 0.2170 Val Loss: 0.3001 Acc: 0.8659 Pre: 0.8346 Recall: 0.8750 F1: 0.8543 Train AUC: 0.9718 Val AUC: 0.9482 Time: 13.99\n",
      "Epoch: 186 Train Loss: 0.2081 Val Loss: 0.2980 Acc: 0.8895 Pre: 0.8816 Recall: 0.8710 F1: 0.8763 Train AUC: 0.9750 Val AUC: 0.9487 Time: 13.46\n",
      "Epoch: 187 Train Loss: 0.2110 Val Loss: 0.2951 Acc: 0.8913 Pre: 0.9123 Recall: 0.8387 F1: 0.8739 Train AUC: 0.9737 Val AUC: 0.9496 Time: 13.00\n",
      "Epoch: 188 Train Loss: 0.2088 Val Loss: 0.2964 Acc: 0.8913 Pre: 0.9017 Recall: 0.8508 F1: 0.8755 Train AUC: 0.9753 Val AUC: 0.9487 Time: 13.59\n",
      "Epoch: 189 Train Loss: 0.2119 Val Loss: 0.2968 Acc: 0.8859 Pre: 0.8715 Recall: 0.8750 F1: 0.8732 Train AUC: 0.9723 Val AUC: 0.9488 Time: 13.87\n",
      "Epoch: 190 Train Loss: 0.2129 Val Loss: 0.2928 Acc: 0.8877 Pre: 0.8750 Recall: 0.8750 F1: 0.8750 Train AUC: 0.9730 Val AUC: 0.9489 Time: 13.75\n",
      "Epoch: 191 Train Loss: 0.2077 Val Loss: 0.2899 Acc: 0.8967 Pre: 0.8963 Recall: 0.8710 F1: 0.8834 Train AUC: 0.9766 Val AUC: 0.9495 Time: 13.68\n",
      "Epoch: 192 Train Loss: 0.2044 Val Loss: 0.2894 Acc: 0.9022 Pre: 0.9145 Recall: 0.8629 F1: 0.8880 Train AUC: 0.9765 Val AUC: 0.9507 Time: 12.65\n",
      "Epoch: 193 Train Loss: 0.2062 Val Loss: 0.2932 Acc: 0.9004 Pre: 0.9072 Recall: 0.8669 F1: 0.8866 Train AUC: 0.9753 Val AUC: 0.9504 Time: 13.12\n",
      "Epoch: 194 Train Loss: 0.2083 Val Loss: 0.3016 Acc: 0.8895 Pre: 0.8785 Recall: 0.8750 F1: 0.8768 Train AUC: 0.9744 Val AUC: 0.9501 Time: 13.77\n",
      "Epoch: 195 Train Loss: 0.2028 Val Loss: 0.3033 Acc: 0.8895 Pre: 0.8755 Recall: 0.8790 F1: 0.8773 Train AUC: 0.9747 Val AUC: 0.9503 Time: 13.53\n",
      "Epoch: 196 Train Loss: 0.2070 Val Loss: 0.3007 Acc: 0.8913 Pre: 0.8821 Recall: 0.8750 F1: 0.8785 Train AUC: 0.9745 Val AUC: 0.9504 Time: 14.08\n",
      "Epoch: 197 Train Loss: 0.2060 Val Loss: 0.2950 Acc: 0.8949 Pre: 0.8862 Recall: 0.8790 F1: 0.8826 Train AUC: 0.9736 Val AUC: 0.9513 Time: 14.10\n",
      "Epoch: 198 Train Loss: 0.2070 Val Loss: 0.2937 Acc: 0.9004 Pre: 0.9038 Recall: 0.8710 F1: 0.8871 Train AUC: 0.9750 Val AUC: 0.9515 Time: 13.57\n",
      "Epoch: 199 Train Loss: 0.2045 Val Loss: 0.2982 Acc: 0.8895 Pre: 0.8638 Recall: 0.8952 F1: 0.8792 Train AUC: 0.9760 Val AUC: 0.9515 Time: 14.23\n",
      "Epoch: 200 Train Loss: 0.2043 Val Loss: 0.3106 Acc: 0.8895 Pre: 0.8610 Recall: 0.8992 F1: 0.8797 Train AUC: 0.9751 Val AUC: 0.9505 Time: 13.30\n",
      "Epoch: 201 Train Loss: 0.2124 Val Loss: 0.3054 Acc: 0.8877 Pre: 0.8750 Recall: 0.8750 F1: 0.8750 Train AUC: 0.9737 Val AUC: 0.9504 Time: 13.20\n",
      "Epoch: 202 Train Loss: 0.2112 Val Loss: 0.2951 Acc: 0.9004 Pre: 0.9327 Recall: 0.8387 F1: 0.8832 Train AUC: 0.9728 Val AUC: 0.9525 Time: 14.10\n",
      "Epoch: 203 Train Loss: 0.2004 Val Loss: 0.2858 Acc: 0.9112 Pre: 0.9345 Recall: 0.8629 F1: 0.8973 Train AUC: 0.9765 Val AUC: 0.9535 Time: 12.51\n",
      "Epoch: 204 Train Loss: 0.2058 Val Loss: 0.2921 Acc: 0.8877 Pre: 0.8605 Recall: 0.8952 F1: 0.8775 Train AUC: 0.9749 Val AUC: 0.9508 Time: 13.17\n",
      "Epoch: 205 Train Loss: 0.2048 Val Loss: 0.2957 Acc: 0.8822 Pre: 0.8560 Recall: 0.8871 F1: 0.8713 Train AUC: 0.9768 Val AUC: 0.9510 Time: 14.03\n",
      "Epoch: 206 Train Loss: 0.2010 Val Loss: 0.2930 Acc: 0.8967 Pre: 0.8963 Recall: 0.8710 F1: 0.8834 Train AUC: 0.9772 Val AUC: 0.9512 Time: 14.20\n",
      "Epoch: 207 Train Loss: 0.2004 Val Loss: 0.2978 Acc: 0.8931 Pre: 0.9127 Recall: 0.8427 F1: 0.8763 Train AUC: 0.9761 Val AUC: 0.9510 Time: 14.13\n",
      "Epoch: 208 Train Loss: 0.2086 Val Loss: 0.2935 Acc: 0.8986 Pre: 0.9034 Recall: 0.8669 F1: 0.8848 Train AUC: 0.9747 Val AUC: 0.9518 Time: 12.76\n",
      "Epoch: 209 Train Loss: 0.2020 Val Loss: 0.3071 Acc: 0.8913 Pre: 0.8534 Recall: 0.9153 F1: 0.8833 Train AUC: 0.9766 Val AUC: 0.9504 Time: 13.47\n",
      "Epoch: 210 Train Loss: 0.2102 Val Loss: 0.3019 Acc: 0.8967 Pre: 0.8805 Recall: 0.8911 F1: 0.8858 Train AUC: 0.9750 Val AUC: 0.9505 Time: 13.91\n",
      "Epoch: 211 Train Loss: 0.1979 Val Loss: 0.2954 Acc: 0.8877 Pre: 0.8941 Recall: 0.8508 F1: 0.8719 Train AUC: 0.9774 Val AUC: 0.9509 Time: 14.14\n",
      "Epoch: 212 Train Loss: 0.2016 Val Loss: 0.2934 Acc: 0.8931 Pre: 0.9056 Recall: 0.8508 F1: 0.8773 Train AUC: 0.9761 Val AUC: 0.9513 Time: 12.49\n",
      "Epoch: 213 Train Loss: 0.2029 Val Loss: 0.2938 Acc: 0.8949 Pre: 0.8862 Recall: 0.8790 F1: 0.8826 Train AUC: 0.9767 Val AUC: 0.9512 Time: 13.52\n",
      "Epoch: 214 Train Loss: 0.1991 Val Loss: 0.2951 Acc: 0.8913 Pre: 0.8760 Recall: 0.8831 F1: 0.8795 Train AUC: 0.9762 Val AUC: 0.9513 Time: 13.61\n",
      "Epoch: 215 Train Loss: 0.1967 Val Loss: 0.2891 Acc: 0.8986 Pre: 0.8902 Recall: 0.8831 F1: 0.8866 Train AUC: 0.9777 Val AUC: 0.9522 Time: 14.15\n",
      "Epoch: 216 Train Loss: 0.2016 Val Loss: 0.2870 Acc: 0.9022 Pre: 0.9181 Recall: 0.8589 F1: 0.8875 Train AUC: 0.9769 Val AUC: 0.9527 Time: 14.18\n",
      "Epoch: 217 Train Loss: 0.1986 Val Loss: 0.2882 Acc: 0.8986 Pre: 0.9138 Recall: 0.8548 F1: 0.8833 Train AUC: 0.9769 Val AUC: 0.9528 Time: 12.15\n",
      "Epoch: 218 Train Loss: 0.1984 Val Loss: 0.2907 Acc: 0.8913 Pre: 0.8821 Recall: 0.8750 F1: 0.8785 Train AUC: 0.9774 Val AUC: 0.9528 Time: 12.92\n",
      "Epoch: 219 Train Loss: 0.1899 Val Loss: 0.3024 Acc: 0.8913 Pre: 0.8643 Recall: 0.8992 F1: 0.8814 Train AUC: 0.9790 Val AUC: 0.9521 Time: 12.72\n",
      "Epoch: 220 Train Loss: 0.2005 Val Loss: 0.2950 Acc: 0.8931 Pre: 0.8735 Recall: 0.8911 F1: 0.8822 Train AUC: 0.9772 Val AUC: 0.9526 Time: 13.08\n",
      "Epoch: 221 Train Loss: 0.1962 Val Loss: 0.2882 Acc: 0.8967 Pre: 0.9030 Recall: 0.8629 F1: 0.8825 Train AUC: 0.9777 Val AUC: 0.9534 Time: 13.86\n",
      "Epoch: 222 Train Loss: 0.1859 Val Loss: 0.2886 Acc: 0.8967 Pre: 0.9099 Recall: 0.8548 F1: 0.8815 Train AUC: 0.9801 Val AUC: 0.9535 Time: 14.08\n",
      "Epoch: 223 Train Loss: 0.1897 Val Loss: 0.2883 Acc: 0.8895 Pre: 0.8816 Recall: 0.8710 F1: 0.8763 Train AUC: 0.9792 Val AUC: 0.9533 Time: 14.42\n",
      "Epoch: 224 Train Loss: 0.1944 Val Loss: 0.2987 Acc: 0.8967 Pre: 0.8745 Recall: 0.8992 F1: 0.8867 Train AUC: 0.9775 Val AUC: 0.9529 Time: 14.18\n",
      "Epoch: 225 Train Loss: 0.1940 Val Loss: 0.2949 Acc: 0.8949 Pre: 0.8800 Recall: 0.8871 F1: 0.8835 Train AUC: 0.9791 Val AUC: 0.9530 Time: 12.37\n",
      "Epoch: 226 Train Loss: 0.1933 Val Loss: 0.2860 Acc: 0.8949 Pre: 0.8992 Recall: 0.8629 F1: 0.8807 Train AUC: 0.9773 Val AUC: 0.9538 Time: 12.53\n",
      "Epoch: 227 Train Loss: 0.1944 Val Loss: 0.2834 Acc: 0.8949 Pre: 0.8958 Recall: 0.8669 F1: 0.8811 Train AUC: 0.9776 Val AUC: 0.9534 Time: 12.80\n",
      "Epoch: 228 Train Loss: 0.1841 Val Loss: 0.2896 Acc: 0.8931 Pre: 0.8826 Recall: 0.8790 F1: 0.8808 Train AUC: 0.9803 Val AUC: 0.9518 Time: 13.67\n",
      "Epoch: 229 Train Loss: 0.1866 Val Loss: 0.2931 Acc: 0.8859 Pre: 0.8571 Recall: 0.8952 F1: 0.8757 Train AUC: 0.9799 Val AUC: 0.9519 Time: 14.14\n",
      "Epoch: 230 Train Loss: 0.1896 Val Loss: 0.2873 Acc: 0.8986 Pre: 0.8840 Recall: 0.8911 F1: 0.8876 Train AUC: 0.9793 Val AUC: 0.9535 Time: 14.39\n",
      "Epoch: 231 Train Loss: 0.1878 Val Loss: 0.2850 Acc: 0.9022 Pre: 0.8975 Recall: 0.8831 F1: 0.8902 Train AUC: 0.9790 Val AUC: 0.9540 Time: 14.58\n",
      "Epoch: 232 Train Loss: 0.1875 Val Loss: 0.2831 Acc: 0.9112 Pre: 0.9198 Recall: 0.8790 F1: 0.8990 Train AUC: 0.9792 Val AUC: 0.9545 Time: 13.16\n",
      "Epoch: 233 Train Loss: 0.1986 Val Loss: 0.2815 Acc: 0.9058 Pre: 0.8984 Recall: 0.8911 F1: 0.8947 Train AUC: 0.9770 Val AUC: 0.9548 Time: 12.40\n",
      "Epoch: 234 Train Loss: 0.1929 Val Loss: 0.2887 Acc: 0.9004 Pre: 0.8784 Recall: 0.9032 F1: 0.8907 Train AUC: 0.9779 Val AUC: 0.9537 Time: 12.75\n",
      "Epoch: 235 Train Loss: 0.1816 Val Loss: 0.2951 Acc: 0.8841 Pre: 0.8622 Recall: 0.8831 F1: 0.8725 Train AUC: 0.9813 Val AUC: 0.9521 Time: 14.23\n",
      "Epoch: 236 Train Loss: 0.1836 Val Loss: 0.2904 Acc: 0.8895 Pre: 0.8755 Recall: 0.8790 F1: 0.8773 Train AUC: 0.9800 Val AUC: 0.9522 Time: 14.37\n",
      "Epoch: 237 Train Loss: 0.1943 Val Loss: 0.2831 Acc: 0.8931 Pre: 0.9021 Recall: 0.8548 F1: 0.8778 Train AUC: 0.9779 Val AUC: 0.9544 Time: 14.49\n",
      "Epoch: 238 Train Loss: 0.1932 Val Loss: 0.2794 Acc: 0.9094 Pre: 0.9091 Recall: 0.8871 F1: 0.8980 Train AUC: 0.9783 Val AUC: 0.9547 Time: 14.54\n",
      "Epoch: 239 Train Loss: 0.1881 Val Loss: 0.2916 Acc: 0.8841 Pre: 0.8433 Recall: 0.9113 F1: 0.8760 Train AUC: 0.9798 Val AUC: 0.9540 Time: 12.73\n",
      "Epoch: 240 Train Loss: 0.1877 Val Loss: 0.2954 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9799 Val AUC: 0.9537 Time: 12.45\n",
      "Epoch: 241 Train Loss: 0.1921 Val Loss: 0.2900 Acc: 0.8967 Pre: 0.8963 Recall: 0.8710 F1: 0.8834 Train AUC: 0.9790 Val AUC: 0.9537 Time: 12.30\n",
      "Epoch: 242 Train Loss: 0.1838 Val Loss: 0.2938 Acc: 0.8895 Pre: 0.8979 Recall: 0.8508 F1: 0.8737 Train AUC: 0.9802 Val AUC: 0.9527 Time: 13.06\n",
      "Epoch: 243 Train Loss: 0.1791 Val Loss: 0.2902 Acc: 0.8949 Pre: 0.8992 Recall: 0.8629 F1: 0.8807 Train AUC: 0.9814 Val AUC: 0.9531 Time: 14.12\n",
      "Epoch: 244 Train Loss: 0.1790 Val Loss: 0.2862 Acc: 0.9004 Pre: 0.8907 Recall: 0.8871 F1: 0.8889 Train AUC: 0.9808 Val AUC: 0.9535 Time: 14.42\n",
      "Epoch: 245 Train Loss: 0.1755 Val Loss: 0.2835 Acc: 0.9040 Pre: 0.8824 Recall: 0.9073 F1: 0.8946 Train AUC: 0.9821 Val AUC: 0.9540 Time: 14.86\n",
      "Epoch: 246 Train Loss: 0.1770 Val Loss: 0.2834 Acc: 0.8986 Pre: 0.8810 Recall: 0.8952 F1: 0.8880 Train AUC: 0.9827 Val AUC: 0.9541 Time: 14.44\n",
      "Epoch: 247 Train Loss: 0.1773 Val Loss: 0.2854 Acc: 0.9040 Pre: 0.8980 Recall: 0.8871 F1: 0.8925 Train AUC: 0.9820 Val AUC: 0.9536 Time: 12.92\n",
      "Epoch: 248 Train Loss: 0.1726 Val Loss: 0.2915 Acc: 0.8986 Pre: 0.9000 Recall: 0.8710 F1: 0.8852 Train AUC: 0.9827 Val AUC: 0.9529 Time: 12.37\n",
      "Epoch: 249 Train Loss: 0.1790 Val Loss: 0.2934 Acc: 0.9040 Pre: 0.8916 Recall: 0.8952 F1: 0.8934 Train AUC: 0.9812 Val AUC: 0.9536 Time: 12.43\n",
      "Epoch: 250 Train Loss: 0.1670 Val Loss: 0.2902 Acc: 0.9040 Pre: 0.8916 Recall: 0.8952 F1: 0.8934 Train AUC: 0.9834 Val AUC: 0.9540 Time: 12.39\n",
      "Epoch: 251 Train Loss: 0.1712 Val Loss: 0.2861 Acc: 0.9004 Pre: 0.8876 Recall: 0.8911 F1: 0.8893 Train AUC: 0.9829 Val AUC: 0.9542 Time: 13.55\n",
      "Epoch: 252 Train Loss: 0.1739 Val Loss: 0.2844 Acc: 0.9022 Pre: 0.8943 Recall: 0.8871 F1: 0.8907 Train AUC: 0.9824 Val AUC: 0.9536 Time: 13.77\n",
      "Epoch: 253 Train Loss: 0.1749 Val Loss: 0.2893 Acc: 0.8967 Pre: 0.8898 Recall: 0.8790 F1: 0.8844 Train AUC: 0.9844 Val AUC: 0.9543 Time: 14.31\n",
      "Epoch: 254 Train Loss: 0.1722 Val Loss: 0.2937 Acc: 0.9076 Pre: 0.8893 Recall: 0.9073 F1: 0.8982 Train AUC: 0.9829 Val AUC: 0.9540 Time: 15.02\n",
      "Epoch: 255 Train Loss: 0.1765 Val Loss: 0.2869 Acc: 0.9058 Pre: 0.8889 Recall: 0.9032 F1: 0.8960 Train AUC: 0.9815 Val AUC: 0.9547 Time: 13.64\n",
      "Epoch: 256 Train Loss: 0.1775 Val Loss: 0.2842 Acc: 0.9040 Pre: 0.8884 Recall: 0.8992 F1: 0.8938 Train AUC: 0.9818 Val AUC: 0.9554 Time: 12.70\n",
      "Epoch: 257 Train Loss: 0.1679 Val Loss: 0.2818 Acc: 0.9022 Pre: 0.8880 Recall: 0.8952 F1: 0.8916 Train AUC: 0.9838 Val AUC: 0.9558 Time: 13.49\n",
      "Epoch: 258 Train Loss: 0.1727 Val Loss: 0.2825 Acc: 0.9112 Pre: 0.9061 Recall: 0.8952 F1: 0.9006 Train AUC: 0.9831 Val AUC: 0.9552 Time: 13.06\n",
      "Epoch: 259 Train Loss: 0.1673 Val Loss: 0.2879 Acc: 0.9058 Pre: 0.8984 Recall: 0.8911 F1: 0.8947 Train AUC: 0.9841 Val AUC: 0.9547 Time: 13.47\n",
      "Epoch: 260 Train Loss: 0.1763 Val Loss: 0.2995 Acc: 0.9022 Pre: 0.8819 Recall: 0.9032 F1: 0.8924 Train AUC: 0.9823 Val AUC: 0.9538 Time: 13.92\n",
      "Epoch: 261 Train Loss: 0.1679 Val Loss: 0.2971 Acc: 0.9076 Pre: 0.8988 Recall: 0.8952 F1: 0.8970 Train AUC: 0.9841 Val AUC: 0.9540 Time: 14.52\n",
      "Epoch: 262 Train Loss: 0.1703 Val Loss: 0.2923 Acc: 0.9130 Pre: 0.9065 Recall: 0.8992 F1: 0.9028 Train AUC: 0.9830 Val AUC: 0.9539 Time: 13.26\n",
      "Epoch: 263 Train Loss: 0.1694 Val Loss: 0.2896 Acc: 0.9076 Pre: 0.9053 Recall: 0.8871 F1: 0.8961 Train AUC: 0.9827 Val AUC: 0.9538 Time: 13.34\n",
      "Epoch: 264 Train Loss: 0.1694 Val Loss: 0.2866 Acc: 0.9058 Pre: 0.8952 Recall: 0.8952 F1: 0.8952 Train AUC: 0.9831 Val AUC: 0.9541 Time: 13.56\n",
      "Epoch: 265 Train Loss: 0.1638 Val Loss: 0.2868 Acc: 0.9004 Pre: 0.8845 Recall: 0.8952 F1: 0.8898 Train AUC: 0.9842 Val AUC: 0.9545 Time: 13.84\n",
      "Epoch: 266 Train Loss: 0.1717 Val Loss: 0.2942 Acc: 0.9022 Pre: 0.8911 Recall: 0.8911 F1: 0.8911 Train AUC: 0.9835 Val AUC: 0.9545 Time: 12.85\n",
      "Epoch: 267 Train Loss: 0.1612 Val Loss: 0.3045 Acc: 0.8913 Pre: 0.8821 Recall: 0.8750 F1: 0.8785 Train AUC: 0.9849 Val AUC: 0.9533 Time: 12.95\n",
      "Epoch: 268 Train Loss: 0.1670 Val Loss: 0.2993 Acc: 0.8967 Pre: 0.8930 Recall: 0.8750 F1: 0.8839 Train AUC: 0.9837 Val AUC: 0.9542 Time: 13.15\n",
      "Epoch: 269 Train Loss: 0.1667 Val Loss: 0.2925 Acc: 0.9112 Pre: 0.8964 Recall: 0.9073 F1: 0.9018 Train AUC: 0.9837 Val AUC: 0.9551 Time: 13.46\n",
      "Epoch: 270 Train Loss: 0.1644 Val Loss: 0.2901 Acc: 0.8967 Pre: 0.8659 Recall: 0.9113 F1: 0.8880 Train AUC: 0.9842 Val AUC: 0.9549 Time: 13.89\n",
      "Epoch: 271 Train Loss: 0.1715 Val Loss: 0.2899 Acc: 0.9112 Pre: 0.9061 Recall: 0.8952 F1: 0.9006 Train AUC: 0.9836 Val AUC: 0.9556 Time: 14.52\n",
      "Epoch: 272 Train Loss: 0.1727 Val Loss: 0.2976 Acc: 0.9094 Pre: 0.9024 Recall: 0.8952 F1: 0.8988 Train AUC: 0.9819 Val AUC: 0.9543 Time: 13.81\n",
      "Epoch: 273 Train Loss: 0.1700 Val Loss: 0.3077 Acc: 0.8986 Pre: 0.8780 Recall: 0.8992 F1: 0.8884 Train AUC: 0.9826 Val AUC: 0.9528 Time: 13.54\n",
      "Epoch: 274 Train Loss: 0.1710 Val Loss: 0.2957 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9829 Val AUC: 0.9545 Time: 14.01\n",
      "Epoch: 275 Train Loss: 0.1720 Val Loss: 0.2758 Acc: 0.9076 Pre: 0.8988 Recall: 0.8952 F1: 0.8970 Train AUC: 0.9830 Val AUC: 0.9567 Time: 12.37\n",
      "Epoch: 276 Train Loss: 0.1614 Val Loss: 0.2749 Acc: 0.9022 Pre: 0.8911 Recall: 0.8911 F1: 0.8911 Train AUC: 0.9853 Val AUC: 0.9565 Time: 12.49\n",
      "Epoch: 277 Train Loss: 0.1637 Val Loss: 0.2830 Acc: 0.9022 Pre: 0.8880 Recall: 0.8952 F1: 0.8916 Train AUC: 0.9850 Val AUC: 0.9566 Time: 12.32\n",
      "Epoch: 278 Train Loss: 0.1608 Val Loss: 0.2885 Acc: 0.9094 Pre: 0.8992 Recall: 0.8992 F1: 0.8992 Train AUC: 0.9845 Val AUC: 0.9564 Time: 12.49\n",
      "Epoch: 279 Train Loss: 0.1615 Val Loss: 0.3018 Acc: 0.9112 Pre: 0.8964 Recall: 0.9073 F1: 0.9018 Train AUC: 0.9856 Val AUC: 0.9548 Time: 13.58\n",
      "Epoch: 280 Train Loss: 0.1543 Val Loss: 0.2973 Acc: 0.9040 Pre: 0.8980 Recall: 0.8871 F1: 0.8925 Train AUC: 0.9860 Val AUC: 0.9547 Time: 14.10\n",
      "Epoch: 281 Train Loss: 0.1609 Val Loss: 0.2826 Acc: 0.9130 Pre: 0.9098 Recall: 0.8952 F1: 0.9024 Train AUC: 0.9845 Val AUC: 0.9566 Time: 14.57\n",
      "Epoch: 282 Train Loss: 0.1549 Val Loss: 0.2750 Acc: 0.9076 Pre: 0.8988 Recall: 0.8952 F1: 0.8970 Train AUC: 0.9864 Val AUC: 0.9568 Time: 14.84\n",
      "Epoch: 283 Train Loss: 0.1530 Val Loss: 0.2802 Acc: 0.8986 Pre: 0.8721 Recall: 0.9073 F1: 0.8893 Train AUC: 0.9875 Val AUC: 0.9573 Time: 14.55\n",
      "Epoch: 284 Train Loss: 0.1581 Val Loss: 0.2896 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9869 Val AUC: 0.9563 Time: 12.55\n",
      "Epoch: 285 Train Loss: 0.1578 Val Loss: 0.2958 Acc: 0.9094 Pre: 0.9057 Recall: 0.8911 F1: 0.8984 Train AUC: 0.9857 Val AUC: 0.9551 Time: 12.30\n",
      "Epoch: 286 Train Loss: 0.1685 Val Loss: 0.2892 Acc: 0.9076 Pre: 0.8956 Recall: 0.8992 F1: 0.8974 Train AUC: 0.9831 Val AUC: 0.9560 Time: 12.22\n",
      "Epoch: 287 Train Loss: 0.1532 Val Loss: 0.2866 Acc: 0.9076 Pre: 0.8893 Recall: 0.9073 F1: 0.8982 Train AUC: 0.9866 Val AUC: 0.9567 Time: 12.46\n",
      "Epoch: 288 Train Loss: 0.1561 Val Loss: 0.2810 Acc: 0.9058 Pre: 0.8889 Recall: 0.9032 F1: 0.8960 Train AUC: 0.9864 Val AUC: 0.9564 Time: 13.75\n",
      "Epoch: 289 Train Loss: 0.1484 Val Loss: 0.2819 Acc: 0.9076 Pre: 0.8988 Recall: 0.8952 F1: 0.8970 Train AUC: 0.9878 Val AUC: 0.9560 Time: 13.87\n",
      "Epoch: 290 Train Loss: 0.1521 Val Loss: 0.2932 Acc: 0.8986 Pre: 0.9000 Recall: 0.8710 F1: 0.8852 Train AUC: 0.9864 Val AUC: 0.9549 Time: 14.46\n",
      "Epoch: 291 Train Loss: 0.1595 Val Loss: 0.2944 Acc: 0.9022 Pre: 0.8819 Recall: 0.9032 F1: 0.8924 Train AUC: 0.9855 Val AUC: 0.9554 Time: 14.77\n",
      "Epoch: 292 Train Loss: 0.1469 Val Loss: 0.2941 Acc: 0.8949 Pre: 0.8654 Recall: 0.9073 F1: 0.8858 Train AUC: 0.9881 Val AUC: 0.9563 Time: 14.69\n",
      "Epoch: 293 Train Loss: 0.1550 Val Loss: 0.2845 Acc: 0.9040 Pre: 0.8824 Recall: 0.9073 F1: 0.8946 Train AUC: 0.9871 Val AUC: 0.9568 Time: 12.40\n",
      "Epoch: 294 Train Loss: 0.1568 Val Loss: 0.2834 Acc: 0.9022 Pre: 0.8943 Recall: 0.8871 F1: 0.8907 Train AUC: 0.9854 Val AUC: 0.9560 Time: 12.51\n",
      "Epoch: 295 Train Loss: 0.1663 Val Loss: 0.2803 Acc: 0.9094 Pre: 0.8992 Recall: 0.8992 F1: 0.8992 Train AUC: 0.9838 Val AUC: 0.9571 Time: 13.00\n",
      "Epoch: 296 Train Loss: 0.1499 Val Loss: 0.2918 Acc: 0.9112 Pre: 0.8902 Recall: 0.9153 F1: 0.9026 Train AUC: 0.9882 Val AUC: 0.9567 Time: 13.38\n",
      "Epoch: 297 Train Loss: 0.1503 Val Loss: 0.3215 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9872 Val AUC: 0.9534 Time: 14.09\n",
      "Epoch: 298 Train Loss: 0.1547 Val Loss: 0.3108 Acc: 0.9022 Pre: 0.8789 Recall: 0.9073 F1: 0.8929 Train AUC: 0.9857 Val AUC: 0.9541 Time: 14.92\n",
      "Epoch: 299 Train Loss: 0.1472 Val Loss: 0.2806 Acc: 0.9112 Pre: 0.9061 Recall: 0.8952 F1: 0.9006 Train AUC: 0.9872 Val AUC: 0.9575 Time: 14.67\n",
      "Epoch: 300 Train Loss: 0.1491 Val Loss: 0.2734 Acc: 0.9076 Pre: 0.9020 Recall: 0.8911 F1: 0.8966 Train AUC: 0.9873 Val AUC: 0.9578 Time: 12.68\n",
      "Epoch: 301 Train Loss: 0.1624 Val Loss: 0.2817 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9861 Val AUC: 0.9588 Time: 12.48\n",
      "Epoch: 302 Train Loss: 0.1519 Val Loss: 0.3088 Acc: 0.8931 Pre: 0.8593 Recall: 0.9113 F1: 0.8845 Train AUC: 0.9874 Val AUC: 0.9563 Time: 12.65\n",
      "Epoch: 303 Train Loss: 0.1572 Val Loss: 0.3055 Acc: 0.9022 Pre: 0.8849 Recall: 0.8992 F1: 0.8920 Train AUC: 0.9868 Val AUC: 0.9550 Time: 13.08\n",
      "Epoch: 304 Train Loss: 0.1552 Val Loss: 0.2820 Acc: 0.9130 Pre: 0.9098 Recall: 0.8952 F1: 0.9024 Train AUC: 0.9855 Val AUC: 0.9583 Time: 14.19\n",
      "Epoch: 305 Train Loss: 0.1460 Val Loss: 0.2694 Acc: 0.9022 Pre: 0.8911 Recall: 0.8911 F1: 0.8911 Train AUC: 0.9876 Val AUC: 0.9576 Time: 14.11\n",
      "Epoch: 306 Train Loss: 0.1792 Val Loss: 0.2797 Acc: 0.9112 Pre: 0.9061 Recall: 0.8952 F1: 0.9006 Train AUC: 0.9817 Val AUC: 0.9584 Time: 14.06\n",
      "Epoch: 307 Train Loss: 0.1432 Val Loss: 0.3131 Acc: 0.9076 Pre: 0.8956 Recall: 0.8992 F1: 0.8974 Train AUC: 0.9896 Val AUC: 0.9552 Time: 14.84\n",
      "Epoch: 308 Train Loss: 0.1576 Val Loss: 0.3281 Acc: 0.9022 Pre: 0.8819 Recall: 0.9032 F1: 0.8924 Train AUC: 0.9856 Val AUC: 0.9543 Time: 13.32\n",
      "Epoch: 309 Train Loss: 0.1653 Val Loss: 0.3043 Acc: 0.9094 Pre: 0.9024 Recall: 0.8952 F1: 0.8988 Train AUC: 0.9834 Val AUC: 0.9564 Time: 12.35\n",
      "Epoch: 310 Train Loss: 0.1495 Val Loss: 0.2818 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9864 Val AUC: 0.9588 Time: 12.41\n",
      "Epoch: 311 Train Loss: 0.1498 Val Loss: 0.2757 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9870 Val AUC: 0.9578 Time: 12.52\n",
      "Epoch: 312 Train Loss: 0.1542 Val Loss: 0.2781 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9866 Val AUC: 0.9574 Time: 12.76\n",
      "Epoch: 313 Train Loss: 0.1586 Val Loss: 0.2956 Acc: 0.9058 Pre: 0.8984 Recall: 0.8911 F1: 0.8947 Train AUC: 0.9858 Val AUC: 0.9560 Time: 13.75\n",
      "Epoch: 314 Train Loss: 0.1419 Val Loss: 0.3136 Acc: 0.8986 Pre: 0.8840 Recall: 0.8911 F1: 0.8876 Train AUC: 0.9883 Val AUC: 0.9529 Time: 14.25\n",
      "Epoch: 315 Train Loss: 0.1480 Val Loss: 0.3122 Acc: 0.8967 Pre: 0.8745 Recall: 0.8992 F1: 0.8867 Train AUC: 0.9872 Val AUC: 0.9537 Time: 14.86\n",
      "Epoch: 316 Train Loss: 0.1476 Val Loss: 0.2984 Acc: 0.9022 Pre: 0.8819 Recall: 0.9032 F1: 0.8924 Train AUC: 0.9870 Val AUC: 0.9554 Time: 15.30\n",
      "Epoch: 317 Train Loss: 0.1447 Val Loss: 0.2855 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9878 Val AUC: 0.9570 Time: 13.14\n",
      "Epoch: 318 Train Loss: 0.1489 Val Loss: 0.2786 Acc: 0.9112 Pre: 0.9028 Recall: 0.8992 F1: 0.9010 Train AUC: 0.9884 Val AUC: 0.9578 Time: 12.68\n",
      "Epoch: 319 Train Loss: 0.1480 Val Loss: 0.2845 Acc: 0.9022 Pre: 0.8975 Recall: 0.8831 F1: 0.8902 Train AUC: 0.9886 Val AUC: 0.9564 Time: 13.24\n",
      "Epoch: 320 Train Loss: 0.1566 Val Loss: 0.2952 Acc: 0.9022 Pre: 0.8819 Recall: 0.9032 F1: 0.8924 Train AUC: 0.9872 Val AUC: 0.9563 Time: 14.03\n",
      "Epoch: 321 Train Loss: 0.1370 Val Loss: 0.2999 Acc: 0.9004 Pre: 0.8784 Recall: 0.9032 F1: 0.8907 Train AUC: 0.9901 Val AUC: 0.9565 Time: 14.20\n",
      "Epoch: 322 Train Loss: 0.1298 Val Loss: 0.2967 Acc: 0.9058 Pre: 0.8952 Recall: 0.8952 F1: 0.8952 Train AUC: 0.9910 Val AUC: 0.9564 Time: 13.05\n",
      "Epoch: 323 Train Loss: 0.1417 Val Loss: 0.2872 Acc: 0.9130 Pre: 0.9032 Recall: 0.9032 F1: 0.9032 Train AUC: 0.9884 Val AUC: 0.9572 Time: 13.61\n",
      "Epoch: 324 Train Loss: 0.1405 Val Loss: 0.2808 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9883 Val AUC: 0.9579 Time: 14.38\n",
      "Epoch: 325 Train Loss: 0.1348 Val Loss: 0.2811 Acc: 0.9040 Pre: 0.8824 Recall: 0.9073 F1: 0.8946 Train AUC: 0.9906 Val AUC: 0.9576 Time: 14.08\n",
      "Epoch: 326 Train Loss: 0.1362 Val Loss: 0.2874 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9906 Val AUC: 0.9565 Time: 14.06\n",
      "Epoch: 327 Train Loss: 0.1344 Val Loss: 0.2972 Acc: 0.9040 Pre: 0.8854 Recall: 0.9032 F1: 0.8942 Train AUC: 0.9904 Val AUC: 0.9557 Time: 13.38\n",
      "Epoch: 328 Train Loss: 0.1395 Val Loss: 0.3069 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9898 Val AUC: 0.9557 Time: 12.89\n",
      "Epoch: 329 Train Loss: 0.1420 Val Loss: 0.2990 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9882 Val AUC: 0.9570 Time: 13.25\n",
      "Epoch: 330 Train Loss: 0.1386 Val Loss: 0.2842 Acc: 0.9112 Pre: 0.8996 Recall: 0.9032 F1: 0.9014 Train AUC: 0.9897 Val AUC: 0.9582 Time: 12.71\n",
      "Epoch: 331 Train Loss: 0.1260 Val Loss: 0.2798 Acc: 0.9076 Pre: 0.8956 Recall: 0.8992 F1: 0.8974 Train AUC: 0.9913 Val AUC: 0.9584 Time: 13.38\n",
      "Epoch: 332 Train Loss: 0.1454 Val Loss: 0.2916 Acc: 0.9058 Pre: 0.8740 Recall: 0.9234 F1: 0.8980 Train AUC: 0.9891 Val AUC: 0.9591 Time: 13.72\n",
      "Epoch: 333 Train Loss: 0.1420 Val Loss: 0.3131 Acc: 0.8895 Pre: 0.8528 Recall: 0.9113 F1: 0.8811 Train AUC: 0.9897 Val AUC: 0.9574 Time: 14.51\n",
      "Epoch: 334 Train Loss: 0.1375 Val Loss: 0.3170 Acc: 0.8967 Pre: 0.8835 Recall: 0.8871 F1: 0.8853 Train AUC: 0.9904 Val AUC: 0.9550 Time: 15.02\n",
      "Epoch: 335 Train Loss: 0.1469 Val Loss: 0.3072 Acc: 0.9022 Pre: 0.8975 Recall: 0.8831 F1: 0.8902 Train AUC: 0.9870 Val AUC: 0.9560 Time: 13.34\n",
      "Epoch: 336 Train Loss: 0.1406 Val Loss: 0.2782 Acc: 0.9022 Pre: 0.8880 Recall: 0.8952 F1: 0.8916 Train AUC: 0.9889 Val AUC: 0.9581 Time: 12.33\n",
      "Epoch: 337 Train Loss: 0.1328 Val Loss: 0.2791 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9908 Val AUC: 0.9595 Time: 12.36\n",
      "Epoch: 338 Train Loss: 0.1446 Val Loss: 0.2829 Acc: 0.9076 Pre: 0.8774 Recall: 0.9234 F1: 0.8998 Train AUC: 0.9891 Val AUC: 0.9593 Time: 12.79\n",
      "Epoch: 339 Train Loss: 0.1387 Val Loss: 0.3134 Acc: 0.9040 Pre: 0.8884 Recall: 0.8992 F1: 0.8938 Train AUC: 0.9899 Val AUC: 0.9561 Time: 13.87\n",
      "Epoch: 340 Train Loss: 0.1420 Val Loss: 0.3354 Acc: 0.8949 Pre: 0.8682 Recall: 0.9032 F1: 0.8854 Train AUC: 0.9879 Val AUC: 0.9544 Time: 14.14\n",
      "Epoch: 341 Train Loss: 0.1431 Val Loss: 0.3146 Acc: 0.9004 Pre: 0.8755 Recall: 0.9073 F1: 0.8911 Train AUC: 0.9880 Val AUC: 0.9558 Time: 13.78\n",
      "Epoch: 342 Train Loss: 0.1315 Val Loss: 0.2862 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9898 Val AUC: 0.9585 Time: 14.54\n",
      "Epoch: 343 Train Loss: 0.1328 Val Loss: 0.2795 Acc: 0.8895 Pre: 0.8528 Recall: 0.9113 F1: 0.8811 Train AUC: 0.9902 Val AUC: 0.9582 Time: 13.30\n",
      "Epoch: 344 Train Loss: 0.1453 Val Loss: 0.2787 Acc: 0.9040 Pre: 0.8884 Recall: 0.8992 F1: 0.8938 Train AUC: 0.9890 Val AUC: 0.9586 Time: 12.27\n",
      "Epoch: 345 Train Loss: 0.1318 Val Loss: 0.3030 Acc: 0.9058 Pre: 0.8984 Recall: 0.8911 F1: 0.8947 Train AUC: 0.9909 Val AUC: 0.9565 Time: 12.39\n",
      "Epoch: 346 Train Loss: 0.1344 Val Loss: 0.3314 Acc: 0.8949 Pre: 0.8740 Recall: 0.8952 F1: 0.8845 Train AUC: 0.9898 Val AUC: 0.9534 Time: 13.05\n",
      "Epoch: 347 Train Loss: 0.1335 Val Loss: 0.3327 Acc: 0.8949 Pre: 0.8654 Recall: 0.9073 F1: 0.8858 Train AUC: 0.9897 Val AUC: 0.9551 Time: 13.42\n",
      "Epoch: 348 Train Loss: 0.1432 Val Loss: 0.2942 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9893 Val AUC: 0.9591 Time: 14.00\n",
      "Epoch: 349 Train Loss: 0.1235 Val Loss: 0.2845 Acc: 0.9004 Pre: 0.8755 Recall: 0.9073 F1: 0.8911 Train AUC: 0.9920 Val AUC: 0.9587 Time: 14.66\n",
      "Epoch: 350 Train Loss: 0.1372 Val Loss: 0.2884 Acc: 0.8986 Pre: 0.8721 Recall: 0.9073 F1: 0.8893 Train AUC: 0.9895 Val AUC: 0.9579 Time: 14.80\n",
      "Epoch: 351 Train Loss: 0.1452 Val Loss: 0.2939 Acc: 0.8931 Pre: 0.8593 Recall: 0.9113 F1: 0.8845 Train AUC: 0.9880 Val AUC: 0.9587 Time: 14.04\n",
      "Epoch: 352 Train Loss: 0.1308 Val Loss: 0.3090 Acc: 0.9004 Pre: 0.8814 Recall: 0.8992 F1: 0.8902 Train AUC: 0.9904 Val AUC: 0.9574 Time: 12.42\n",
      "Epoch: 353 Train Loss: 0.1315 Val Loss: 0.3297 Acc: 0.8931 Pre: 0.8765 Recall: 0.8871 F1: 0.8818 Train AUC: 0.9895 Val AUC: 0.9544 Time: 12.76\n",
      "Epoch: 354 Train Loss: 0.1444 Val Loss: 0.3220 Acc: 0.8931 Pre: 0.8765 Recall: 0.8871 F1: 0.8818 Train AUC: 0.9874 Val AUC: 0.9545 Time: 12.29\n",
      "Epoch: 355 Train Loss: 0.1308 Val Loss: 0.2955 Acc: 0.8949 Pre: 0.8682 Recall: 0.9032 F1: 0.8854 Train AUC: 0.9902 Val AUC: 0.9570 Time: 12.56\n",
      "Epoch: 356 Train Loss: 0.1249 Val Loss: 0.2783 Acc: 0.9004 Pre: 0.8755 Recall: 0.9073 F1: 0.8911 Train AUC: 0.9928 Val AUC: 0.9599 Time: 14.09\n",
      "Epoch: 357 Train Loss: 0.1293 Val Loss: 0.2888 Acc: 0.9004 Pre: 0.8814 Recall: 0.8992 F1: 0.8902 Train AUC: 0.9914 Val AUC: 0.9591 Time: 13.97\n",
      "Epoch: 358 Train Loss: 0.1274 Val Loss: 0.3065 Acc: 0.8949 Pre: 0.8598 Recall: 0.9153 F1: 0.8867 Train AUC: 0.9917 Val AUC: 0.9584 Time: 14.66\n",
      "Epoch: 359 Train Loss: 0.1372 Val Loss: 0.3148 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9889 Val AUC: 0.9581 Time: 14.61\n",
      "Epoch: 360 Train Loss: 0.1313 Val Loss: 0.3255 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9899 Val AUC: 0.9572 Time: 12.67\n",
      "Epoch: 361 Train Loss: 0.1362 Val Loss: 0.3287 Acc: 0.8913 Pre: 0.8534 Recall: 0.9153 F1: 0.8833 Train AUC: 0.9889 Val AUC: 0.9550 Time: 12.43\n",
      "Epoch: 362 Train Loss: 0.1221 Val Loss: 0.3146 Acc: 0.8877 Pre: 0.8577 Recall: 0.8992 F1: 0.8780 Train AUC: 0.9915 Val AUC: 0.9543 Time: 12.70\n",
      "Epoch: 363 Train Loss: 0.1322 Val Loss: 0.2975 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9900 Val AUC: 0.9560 Time: 13.10\n",
      "Epoch: 364 Train Loss: 0.1300 Val Loss: 0.2961 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9908 Val AUC: 0.9565 Time: 13.48\n",
      "Epoch: 365 Train Loss: 0.1285 Val Loss: 0.2999 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9911 Val AUC: 0.9569 Time: 14.16\n",
      "Epoch: 366 Train Loss: 0.1239 Val Loss: 0.3087 Acc: 0.9040 Pre: 0.8884 Recall: 0.8992 F1: 0.8938 Train AUC: 0.9915 Val AUC: 0.9571 Time: 14.06\n",
      "Epoch: 367 Train Loss: 0.1225 Val Loss: 0.3180 Acc: 0.9058 Pre: 0.8952 Recall: 0.8952 F1: 0.8952 Train AUC: 0.9911 Val AUC: 0.9561 Time: 14.00\n",
      "Epoch: 368 Train Loss: 0.1273 Val Loss: 0.3081 Acc: 0.8967 Pre: 0.8716 Recall: 0.9032 F1: 0.8871 Train AUC: 0.9903 Val AUC: 0.9574 Time: 12.58\n",
      "Epoch: 369 Train Loss: 0.1266 Val Loss: 0.2892 Acc: 0.8967 Pre: 0.8716 Recall: 0.9032 F1: 0.8871 Train AUC: 0.9910 Val AUC: 0.9588 Time: 12.82\n",
      "Epoch: 370 Train Loss: 0.1266 Val Loss: 0.2799 Acc: 0.9130 Pre: 0.9032 Recall: 0.9032 F1: 0.9032 Train AUC: 0.9913 Val AUC: 0.9597 Time: 13.21\n",
      "Epoch: 371 Train Loss: 0.1318 Val Loss: 0.2880 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9913 Val AUC: 0.9600 Time: 13.70\n",
      "Epoch: 372 Train Loss: 0.1166 Val Loss: 0.3045 Acc: 0.8949 Pre: 0.8654 Recall: 0.9073 F1: 0.8858 Train AUC: 0.9930 Val AUC: 0.9583 Time: 14.45\n",
      "Epoch: 373 Train Loss: 0.1260 Val Loss: 0.3087 Acc: 0.9022 Pre: 0.8789 Recall: 0.9073 F1: 0.8929 Train AUC: 0.9911 Val AUC: 0.9573 Time: 15.29\n",
      "Epoch: 374 Train Loss: 0.1210 Val Loss: 0.3060 Acc: 0.9004 Pre: 0.8814 Recall: 0.8992 F1: 0.8902 Train AUC: 0.9916 Val AUC: 0.9569 Time: 13.33\n",
      "Epoch: 375 Train Loss: 0.1200 Val Loss: 0.2973 Acc: 0.9004 Pre: 0.8697 Recall: 0.9153 F1: 0.8919 Train AUC: 0.9924 Val AUC: 0.9573 Time: 12.30\n",
      "Epoch: 376 Train Loss: 0.1182 Val Loss: 0.2938 Acc: 0.8949 Pre: 0.8598 Recall: 0.9153 F1: 0.8867 Train AUC: 0.9928 Val AUC: 0.9579 Time: 12.25\n",
      "Epoch: 377 Train Loss: 0.1129 Val Loss: 0.2931 Acc: 0.9112 Pre: 0.8902 Recall: 0.9153 F1: 0.9026 Train AUC: 0.9942 Val AUC: 0.9584 Time: 12.47\n",
      "Epoch: 378 Train Loss: 0.1122 Val Loss: 0.3006 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9941 Val AUC: 0.9585 Time: 12.26\n",
      "Epoch: 379 Train Loss: 0.1137 Val Loss: 0.3075 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9929 Val AUC: 0.9585 Time: 13.53\n",
      "Epoch: 380 Train Loss: 0.1149 Val Loss: 0.3036 Acc: 0.9076 Pre: 0.8956 Recall: 0.8992 F1: 0.8974 Train AUC: 0.9928 Val AUC: 0.9585 Time: 13.83\n",
      "Epoch: 381 Train Loss: 0.1135 Val Loss: 0.2936 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9926 Val AUC: 0.9599 Time: 14.39\n",
      "Epoch: 382 Train Loss: 0.1176 Val Loss: 0.2954 Acc: 0.8931 Pre: 0.8487 Recall: 0.9274 F1: 0.8863 Train AUC: 0.9925 Val AUC: 0.9605 Time: 15.16\n",
      "Epoch: 383 Train Loss: 0.1232 Val Loss: 0.3037 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9923 Val AUC: 0.9596 Time: 15.12\n",
      "Epoch: 384 Train Loss: 0.1201 Val Loss: 0.3055 Acc: 0.8986 Pre: 0.8750 Recall: 0.9032 F1: 0.8889 Train AUC: 0.9925 Val AUC: 0.9586 Time: 13.82\n",
      "Epoch: 385 Train Loss: 0.1140 Val Loss: 0.3138 Acc: 0.9076 Pre: 0.8988 Recall: 0.8952 F1: 0.8970 Train AUC: 0.9930 Val AUC: 0.9580 Time: 12.09\n",
      "Epoch: 386 Train Loss: 0.1213 Val Loss: 0.3045 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9916 Val AUC: 0.9590 Time: 12.13\n",
      "Epoch: 387 Train Loss: 0.1140 Val Loss: 0.2948 Acc: 0.8986 Pre: 0.8582 Recall: 0.9274 F1: 0.8915 Train AUC: 0.9926 Val AUC: 0.9600 Time: 12.36\n",
      "Epoch: 388 Train Loss: 0.1175 Val Loss: 0.2917 Acc: 0.8986 Pre: 0.8582 Recall: 0.9274 F1: 0.8915 Train AUC: 0.9933 Val AUC: 0.9597 Time: 12.72\n",
      "Epoch: 389 Train Loss: 0.1112 Val Loss: 0.2968 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9940 Val AUC: 0.9592 Time: 13.53\n",
      "Epoch: 390 Train Loss: 0.1208 Val Loss: 0.3213 Acc: 0.8967 Pre: 0.8716 Recall: 0.9032 F1: 0.8871 Train AUC: 0.9926 Val AUC: 0.9569 Time: 13.86\n",
      "Epoch: 391 Train Loss: 0.1191 Val Loss: 0.3345 Acc: 0.8949 Pre: 0.8654 Recall: 0.9073 F1: 0.8858 Train AUC: 0.9920 Val AUC: 0.9563 Time: 14.88\n",
      "Epoch: 392 Train Loss: 0.1201 Val Loss: 0.3209 Acc: 0.8967 Pre: 0.8687 Recall: 0.9073 F1: 0.8876 Train AUC: 0.9916 Val AUC: 0.9577 Time: 14.80\n",
      "Epoch: 393 Train Loss: 0.1155 Val Loss: 0.3036 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9923 Val AUC: 0.9596 Time: 13.53\n",
      "Epoch: 394 Train Loss: 0.1073 Val Loss: 0.2937 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9941 Val AUC: 0.9605 Time: 14.47\n",
      "Epoch: 395 Train Loss: 0.1130 Val Loss: 0.2946 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9939 Val AUC: 0.9600 Time: 12.15\n",
      "Epoch: 396 Train Loss: 0.1124 Val Loss: 0.2999 Acc: 0.9040 Pre: 0.8854 Recall: 0.9032 F1: 0.8942 Train AUC: 0.9930 Val AUC: 0.9591 Time: 12.36\n",
      "Epoch: 397 Train Loss: 0.1113 Val Loss: 0.3004 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9930 Val AUC: 0.9596 Time: 12.59\n",
      "Epoch: 398 Train Loss: 0.1112 Val Loss: 0.3059 Acc: 0.9058 Pre: 0.8889 Recall: 0.9032 F1: 0.8960 Train AUC: 0.9938 Val AUC: 0.9584 Time: 13.65\n",
      "Epoch: 399 Train Loss: 0.1107 Val Loss: 0.3178 Acc: 0.9004 Pre: 0.8755 Recall: 0.9073 F1: 0.8911 Train AUC: 0.9941 Val AUC: 0.9574 Time: 13.97\n",
      "Epoch: 400 Train Loss: 0.1175 Val Loss: 0.3258 Acc: 0.9040 Pre: 0.8625 Recall: 0.9355 F1: 0.8975 Train AUC: 0.9924 Val AUC: 0.9582 Time: 14.39\n",
      "Epoch: 401 Train Loss: 0.1050 Val Loss: 0.3113 Acc: 0.9004 Pre: 0.8697 Recall: 0.9153 F1: 0.8919 Train AUC: 0.9946 Val AUC: 0.9587 Time: 14.19\n",
      "Epoch: 402 Train Loss: 0.1079 Val Loss: 0.2960 Acc: 0.9058 Pre: 0.8920 Recall: 0.8992 F1: 0.8956 Train AUC: 0.9942 Val AUC: 0.9590 Time: 14.23\n",
      "Epoch: 403 Train Loss: 0.1193 Val Loss: 0.2919 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9927 Val AUC: 0.9594 Time: 12.73\n",
      "Epoch: 404 Train Loss: 0.1159 Val Loss: 0.3054 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9935 Val AUC: 0.9596 Time: 13.11\n",
      "Epoch: 405 Train Loss: 0.1059 Val Loss: 0.3130 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9952 Val AUC: 0.9599 Time: 13.44\n",
      "Epoch: 406 Train Loss: 0.1129 Val Loss: 0.3173 Acc: 0.9040 Pre: 0.8854 Recall: 0.9032 F1: 0.8942 Train AUC: 0.9942 Val AUC: 0.9587 Time: 13.74\n",
      "Epoch: 407 Train Loss: 0.1134 Val Loss: 0.3185 Acc: 0.9112 Pre: 0.9095 Recall: 0.8911 F1: 0.9002 Train AUC: 0.9925 Val AUC: 0.9587 Time: 14.33\n",
      "Epoch: 408 Train Loss: 0.1204 Val Loss: 0.2979 Acc: 0.9112 Pre: 0.8902 Recall: 0.9153 F1: 0.9026 Train AUC: 0.9927 Val AUC: 0.9597 Time: 13.21\n",
      "Epoch: 409 Train Loss: 0.1127 Val Loss: 0.3069 Acc: 0.8913 Pre: 0.8431 Recall: 0.9315 F1: 0.8851 Train AUC: 0.9931 Val AUC: 0.9599 Time: 13.71\n",
      "Epoch: 410 Train Loss: 0.1264 Val Loss: 0.3023 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9934 Val AUC: 0.9596 Time: 12.91\n",
      "Epoch: 411 Train Loss: 0.0998 Val Loss: 0.3170 Acc: 0.9058 Pre: 0.8952 Recall: 0.8952 F1: 0.8952 Train AUC: 0.9956 Val AUC: 0.9587 Time: 13.47\n",
      "Epoch: 412 Train Loss: 0.1158 Val Loss: 0.3230 Acc: 0.9058 Pre: 0.8920 Recall: 0.8992 F1: 0.8956 Train AUC: 0.9927 Val AUC: 0.9582 Time: 13.94\n",
      "Epoch: 413 Train Loss: 0.1125 Val Loss: 0.3265 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9932 Val AUC: 0.9594 Time: 14.40\n",
      "Epoch: 414 Train Loss: 0.1171 Val Loss: 0.3205 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9932 Val AUC: 0.9595 Time: 14.89\n",
      "Epoch: 415 Train Loss: 0.1130 Val Loss: 0.3201 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9934 Val AUC: 0.9569 Time: 13.52\n",
      "Epoch: 416 Train Loss: 0.1011 Val Loss: 0.3249 Acc: 0.8986 Pre: 0.8780 Recall: 0.8992 F1: 0.8884 Train AUC: 0.9947 Val AUC: 0.9564 Time: 12.30\n",
      "Epoch: 417 Train Loss: 0.1124 Val Loss: 0.3247 Acc: 0.8986 Pre: 0.8750 Recall: 0.9032 F1: 0.8889 Train AUC: 0.9935 Val AUC: 0.9572 Time: 12.29\n",
      "Epoch: 418 Train Loss: 0.1070 Val Loss: 0.3221 Acc: 0.8986 Pre: 0.8582 Recall: 0.9274 F1: 0.8915 Train AUC: 0.9937 Val AUC: 0.9582 Time: 12.53\n",
      "Epoch: 419 Train Loss: 0.1142 Val Loss: 0.3154 Acc: 0.8949 Pre: 0.8519 Recall: 0.9274 F1: 0.8880 Train AUC: 0.9928 Val AUC: 0.9596 Time: 12.13\n",
      "Epoch: 420 Train Loss: 0.1083 Val Loss: 0.3033 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9951 Val AUC: 0.9602 Time: 12.24\n",
      "Epoch: 421 Train Loss: 0.0987 Val Loss: 0.2996 Acc: 0.9040 Pre: 0.8916 Recall: 0.8952 F1: 0.8934 Train AUC: 0.9949 Val AUC: 0.9604 Time: 12.47\n",
      "Epoch: 422 Train Loss: 0.1066 Val Loss: 0.2921 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9945 Val AUC: 0.9615 Time: 12.35\n",
      "Epoch: 423 Train Loss: 0.1111 Val Loss: 0.3041 Acc: 0.8967 Pre: 0.8577 Recall: 0.9234 F1: 0.8893 Train AUC: 0.9933 Val AUC: 0.9612 Time: 13.88\n",
      "Epoch: 424 Train Loss: 0.1161 Val Loss: 0.3175 Acc: 0.8931 Pre: 0.8539 Recall: 0.9194 F1: 0.8854 Train AUC: 0.9932 Val AUC: 0.9595 Time: 14.15\n",
      "Epoch: 425 Train Loss: 0.1081 Val Loss: 0.3300 Acc: 0.8986 Pre: 0.8750 Recall: 0.9032 F1: 0.8889 Train AUC: 0.9944 Val AUC: 0.9576 Time: 14.67\n",
      "Epoch: 426 Train Loss: 0.1122 Val Loss: 0.3265 Acc: 0.9004 Pre: 0.8784 Recall: 0.9032 F1: 0.8907 Train AUC: 0.9932 Val AUC: 0.9577 Time: 15.62\n",
      "Epoch: 427 Train Loss: 0.1068 Val Loss: 0.3067 Acc: 0.9058 Pre: 0.8712 Recall: 0.9274 F1: 0.8984 Train AUC: 0.9935 Val AUC: 0.9606 Time: 14.38\n",
      "Epoch: 428 Train Loss: 0.1013 Val Loss: 0.3077 Acc: 0.8949 Pre: 0.8519 Recall: 0.9274 F1: 0.8880 Train AUC: 0.9953 Val AUC: 0.9603 Time: 14.84\n",
      "Epoch: 429 Train Loss: 0.1139 Val Loss: 0.3077 Acc: 0.9004 Pre: 0.8614 Recall: 0.9274 F1: 0.8932 Train AUC: 0.9939 Val AUC: 0.9586 Time: 12.04\n",
      "Epoch: 430 Train Loss: 0.1052 Val Loss: 0.3371 Acc: 0.8949 Pre: 0.8740 Recall: 0.8952 F1: 0.8845 Train AUC: 0.9944 Val AUC: 0.9553 Time: 12.29\n",
      "Epoch: 431 Train Loss: 0.1176 Val Loss: 0.3462 Acc: 0.8986 Pre: 0.8840 Recall: 0.8911 F1: 0.8876 Train AUC: 0.9923 Val AUC: 0.9550 Time: 12.41\n",
      "Epoch: 432 Train Loss: 0.1210 Val Loss: 0.3256 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9917 Val AUC: 0.9589 Time: 12.35\n",
      "Epoch: 433 Train Loss: 0.1060 Val Loss: 0.3151 Acc: 0.8949 Pre: 0.8493 Recall: 0.9315 F1: 0.8885 Train AUC: 0.9938 Val AUC: 0.9627 Time: 12.61\n",
      "Epoch: 434 Train Loss: 0.1150 Val Loss: 0.2894 Acc: 0.9076 Pre: 0.8774 Recall: 0.9234 F1: 0.8998 Train AUC: 0.9946 Val AUC: 0.9625 Time: 13.76\n",
      "Epoch: 435 Train Loss: 0.1097 Val Loss: 0.2950 Acc: 0.9022 Pre: 0.8880 Recall: 0.8952 F1: 0.8916 Train AUC: 0.9940 Val AUC: 0.9618 Time: 13.94\n",
      "Epoch: 436 Train Loss: 0.1071 Val Loss: 0.3076 Acc: 0.9040 Pre: 0.8854 Recall: 0.9032 F1: 0.8942 Train AUC: 0.9943 Val AUC: 0.9609 Time: 14.60\n",
      "Epoch: 437 Train Loss: 0.1094 Val Loss: 0.3393 Acc: 0.8967 Pre: 0.8577 Recall: 0.9234 F1: 0.8893 Train AUC: 0.9935 Val AUC: 0.9595 Time: 14.09\n",
      "Epoch: 438 Train Loss: 0.1263 Val Loss: 0.3168 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9923 Val AUC: 0.9600 Time: 14.14\n",
      "Epoch: 439 Train Loss: 0.1081 Val Loss: 0.2902 Acc: 0.9040 Pre: 0.8916 Recall: 0.8952 F1: 0.8934 Train AUC: 0.9940 Val AUC: 0.9597 Time: 13.31\n",
      "Epoch: 440 Train Loss: 0.1108 Val Loss: 0.2926 Acc: 0.9040 Pre: 0.8947 Recall: 0.8911 F1: 0.8929 Train AUC: 0.9945 Val AUC: 0.9604 Time: 13.15\n",
      "Epoch: 441 Train Loss: 0.1022 Val Loss: 0.2949 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9948 Val AUC: 0.9608 Time: 13.36\n",
      "Epoch: 442 Train Loss: 0.1119 Val Loss: 0.3308 Acc: 0.8913 Pre: 0.8561 Recall: 0.9113 F1: 0.8828 Train AUC: 0.9935 Val AUC: 0.9592 Time: 13.21\n",
      "Epoch: 443 Train Loss: 0.1127 Val Loss: 0.3303 Acc: 0.8931 Pre: 0.8649 Recall: 0.9032 F1: 0.8836 Train AUC: 0.9942 Val AUC: 0.9581 Time: 13.00\n",
      "Epoch: 444 Train Loss: 0.1118 Val Loss: 0.3196 Acc: 0.9040 Pre: 0.8854 Recall: 0.9032 F1: 0.8942 Train AUC: 0.9933 Val AUC: 0.9586 Time: 14.26\n",
      "Epoch: 445 Train Loss: 0.1052 Val Loss: 0.2976 Acc: 0.9112 Pre: 0.8933 Recall: 0.9113 F1: 0.9022 Train AUC: 0.9944 Val AUC: 0.9604 Time: 14.98\n",
      "Epoch: 446 Train Loss: 0.1000 Val Loss: 0.2932 Acc: 0.9004 Pre: 0.8614 Recall: 0.9274 F1: 0.8932 Train AUC: 0.9947 Val AUC: 0.9605 Time: 13.57\n",
      "Epoch: 447 Train Loss: 0.1059 Val Loss: 0.2971 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9944 Val AUC: 0.9602 Time: 13.60\n",
      "Epoch: 448 Train Loss: 0.1005 Val Loss: 0.3215 Acc: 0.9022 Pre: 0.8880 Recall: 0.8952 F1: 0.8916 Train AUC: 0.9947 Val AUC: 0.9577 Time: 12.43\n",
      "Epoch: 449 Train Loss: 0.1048 Val Loss: 0.3429 Acc: 0.8949 Pre: 0.8740 Recall: 0.8952 F1: 0.8845 Train AUC: 0.9941 Val AUC: 0.9555 Time: 13.31\n",
      "Epoch: 450 Train Loss: 0.0999 Val Loss: 0.3359 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9946 Val AUC: 0.9563 Time: 13.74\n",
      "Epoch: 451 Train Loss: 0.1046 Val Loss: 0.3135 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9946 Val AUC: 0.9601 Time: 14.34\n",
      "Epoch: 452 Train Loss: 0.0928 Val Loss: 0.3048 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9963 Val AUC: 0.9605 Time: 14.13\n",
      "Epoch: 453 Train Loss: 0.1121 Val Loss: 0.3045 Acc: 0.8967 Pre: 0.8659 Recall: 0.9113 F1: 0.8880 Train AUC: 0.9937 Val AUC: 0.9602 Time: 13.61\n",
      "Epoch: 454 Train Loss: 0.1003 Val Loss: 0.3187 Acc: 0.8967 Pre: 0.8687 Recall: 0.9073 F1: 0.8876 Train AUC: 0.9950 Val AUC: 0.9593 Time: 12.25\n",
      "Epoch: 455 Train Loss: 0.1048 Val Loss: 0.3253 Acc: 0.8913 Pre: 0.8588 Recall: 0.9073 F1: 0.8824 Train AUC: 0.9943 Val AUC: 0.9600 Time: 12.55\n",
      "Epoch: 456 Train Loss: 0.0972 Val Loss: 0.3146 Acc: 0.8967 Pre: 0.8631 Recall: 0.9153 F1: 0.8885 Train AUC: 0.9948 Val AUC: 0.9610 Time: 14.08\n",
      "Epoch: 457 Train Loss: 0.0952 Val Loss: 0.2953 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9954 Val AUC: 0.9619 Time: 13.84\n",
      "Epoch: 458 Train Loss: 0.0955 Val Loss: 0.2866 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9953 Val AUC: 0.9623 Time: 14.94\n",
      "Epoch: 459 Train Loss: 0.1172 Val Loss: 0.2928 Acc: 0.9094 Pre: 0.8779 Recall: 0.9274 F1: 0.9020 Train AUC: 0.9929 Val AUC: 0.9626 Time: 13.58\n",
      "Epoch: 460 Train Loss: 0.0946 Val Loss: 0.3089 Acc: 0.8986 Pre: 0.8664 Recall: 0.9153 F1: 0.8902 Train AUC: 0.9955 Val AUC: 0.9611 Time: 12.69\n",
      "Epoch: 461 Train Loss: 0.1014 Val Loss: 0.3252 Acc: 0.8931 Pre: 0.8566 Recall: 0.9153 F1: 0.8850 Train AUC: 0.9946 Val AUC: 0.9590 Time: 13.22\n",
      "Epoch: 462 Train Loss: 0.0947 Val Loss: 0.3289 Acc: 0.8949 Pre: 0.8598 Recall: 0.9153 F1: 0.8867 Train AUC: 0.9955 Val AUC: 0.9586 Time: 13.80\n",
      "Epoch: 463 Train Loss: 0.0956 Val Loss: 0.3324 Acc: 0.8913 Pre: 0.8534 Recall: 0.9153 F1: 0.8833 Train AUC: 0.9955 Val AUC: 0.9582 Time: 13.71\n",
      "Epoch: 464 Train Loss: 0.1020 Val Loss: 0.3205 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9948 Val AUC: 0.9593 Time: 14.09\n",
      "Epoch: 465 Train Loss: 0.0930 Val Loss: 0.3133 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9959 Val AUC: 0.9597 Time: 13.24\n",
      "Epoch: 466 Train Loss: 0.1027 Val Loss: 0.3138 Acc: 0.9094 Pre: 0.8960 Recall: 0.9032 F1: 0.8996 Train AUC: 0.9944 Val AUC: 0.9595 Time: 13.17\n",
      "Epoch: 467 Train Loss: 0.0999 Val Loss: 0.3297 Acc: 0.9022 Pre: 0.8819 Recall: 0.9032 F1: 0.8924 Train AUC: 0.9948 Val AUC: 0.9579 Time: 12.81\n",
      "Epoch: 468 Train Loss: 0.0964 Val Loss: 0.3322 Acc: 0.8931 Pre: 0.8649 Recall: 0.9032 F1: 0.8836 Train AUC: 0.9951 Val AUC: 0.9577 Time: 13.43\n",
      "Epoch: 469 Train Loss: 0.1025 Val Loss: 0.3316 Acc: 0.8931 Pre: 0.8566 Recall: 0.9153 F1: 0.8850 Train AUC: 0.9945 Val AUC: 0.9571 Time: 13.72\n",
      "Epoch: 470 Train Loss: 0.0987 Val Loss: 0.3134 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9954 Val AUC: 0.9586 Time: 14.54\n",
      "Epoch: 471 Train Loss: 0.0911 Val Loss: 0.3144 Acc: 0.8986 Pre: 0.8750 Recall: 0.9032 F1: 0.8889 Train AUC: 0.9964 Val AUC: 0.9603 Time: 13.15\n",
      "Epoch: 472 Train Loss: 0.0905 Val Loss: 0.3190 Acc: 0.9022 Pre: 0.8819 Recall: 0.9032 F1: 0.8924 Train AUC: 0.9959 Val AUC: 0.9618 Time: 12.97\n",
      "Epoch: 473 Train Loss: 0.0934 Val Loss: 0.3263 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9956 Val AUC: 0.9616 Time: 13.10\n",
      "Epoch: 474 Train Loss: 0.0995 Val Loss: 0.3141 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9948 Val AUC: 0.9614 Time: 13.72\n",
      "Epoch: 475 Train Loss: 0.0932 Val Loss: 0.3039 Acc: 0.9112 Pre: 0.8842 Recall: 0.9234 F1: 0.9034 Train AUC: 0.9954 Val AUC: 0.9619 Time: 13.31\n",
      "Epoch: 476 Train Loss: 0.0992 Val Loss: 0.3152 Acc: 0.9094 Pre: 0.8929 Recall: 0.9073 F1: 0.9000 Train AUC: 0.9948 Val AUC: 0.9594 Time: 13.86\n",
      "Epoch: 477 Train Loss: 0.1038 Val Loss: 0.3369 Acc: 0.8913 Pre: 0.8561 Recall: 0.9113 F1: 0.8828 Train AUC: 0.9945 Val AUC: 0.9572 Time: 13.63\n",
      "Epoch: 478 Train Loss: 0.0928 Val Loss: 0.3422 Acc: 0.8931 Pre: 0.8566 Recall: 0.9153 F1: 0.8850 Train AUC: 0.9958 Val AUC: 0.9565 Time: 13.74\n",
      "Epoch: 479 Train Loss: 0.0991 Val Loss: 0.3268 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9945 Val AUC: 0.9589 Time: 12.51\n",
      "Epoch: 480 Train Loss: 0.0883 Val Loss: 0.3155 Acc: 0.9094 Pre: 0.8867 Recall: 0.9153 F1: 0.9008 Train AUC: 0.9960 Val AUC: 0.9602 Time: 12.78\n",
      "Epoch: 481 Train Loss: 0.0994 Val Loss: 0.3154 Acc: 0.9058 Pre: 0.8798 Recall: 0.9153 F1: 0.8972 Train AUC: 0.9949 Val AUC: 0.9614 Time: 14.04\n",
      "Epoch: 482 Train Loss: 0.0971 Val Loss: 0.3107 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9950 Val AUC: 0.9608 Time: 14.18\n",
      "Epoch: 483 Train Loss: 0.1002 Val Loss: 0.3058 Acc: 0.8986 Pre: 0.8721 Recall: 0.9073 F1: 0.8893 Train AUC: 0.9948 Val AUC: 0.9606 Time: 13.75\n",
      "Epoch: 484 Train Loss: 0.0972 Val Loss: 0.3097 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9952 Val AUC: 0.9609 Time: 14.09\n",
      "Epoch: 485 Train Loss: 0.1041 Val Loss: 0.3230 Acc: 0.8967 Pre: 0.8550 Recall: 0.9274 F1: 0.8897 Train AUC: 0.9948 Val AUC: 0.9610 Time: 12.28\n",
      "Epoch: 486 Train Loss: 0.0900 Val Loss: 0.3219 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9967 Val AUC: 0.9601 Time: 12.20\n",
      "Epoch: 487 Train Loss: 0.0856 Val Loss: 0.3349 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9971 Val AUC: 0.9583 Time: 12.24\n",
      "Epoch: 488 Train Loss: 0.1002 Val Loss: 0.3394 Acc: 0.9022 Pre: 0.8819 Recall: 0.9032 F1: 0.8924 Train AUC: 0.9948 Val AUC: 0.9577 Time: 13.73\n",
      "Epoch: 489 Train Loss: 0.1046 Val Loss: 0.3501 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9944 Val AUC: 0.9562 Time: 13.44\n",
      "Epoch: 490 Train Loss: 0.0931 Val Loss: 0.3544 Acc: 0.8877 Pre: 0.8444 Recall: 0.9194 F1: 0.8803 Train AUC: 0.9956 Val AUC: 0.9570 Time: 14.24\n",
      "Epoch: 491 Train Loss: 0.1102 Val Loss: 0.3140 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9947 Val AUC: 0.9611 Time: 15.00\n",
      "Epoch: 492 Train Loss: 0.0925 Val Loss: 0.3030 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9959 Val AUC: 0.9623 Time: 14.95\n",
      "Epoch: 493 Train Loss: 0.0860 Val Loss: 0.3062 Acc: 0.9112 Pre: 0.8872 Recall: 0.9194 F1: 0.9030 Train AUC: 0.9963 Val AUC: 0.9627 Time: 13.07\n",
      "Epoch: 494 Train Loss: 0.1036 Val Loss: 0.3106 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9937 Val AUC: 0.9621 Time: 12.78\n",
      "Epoch: 495 Train Loss: 0.0889 Val Loss: 0.3220 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9961 Val AUC: 0.9606 Time: 12.40\n",
      "Epoch: 496 Train Loss: 0.0970 Val Loss: 0.3312 Acc: 0.8986 Pre: 0.8664 Recall: 0.9153 F1: 0.8902 Train AUC: 0.9951 Val AUC: 0.9581 Time: 12.51\n",
      "Epoch: 497 Train Loss: 0.0951 Val Loss: 0.3220 Acc: 0.9004 Pre: 0.8697 Recall: 0.9153 F1: 0.8919 Train AUC: 0.9953 Val AUC: 0.9582 Time: 12.98\n",
      "Epoch: 498 Train Loss: 0.0876 Val Loss: 0.3149 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9964 Val AUC: 0.9594 Time: 13.80\n",
      "Epoch: 499 Train Loss: 0.0979 Val Loss: 0.3161 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9950 Val AUC: 0.9601 Time: 13.63\n",
      "Epoch: 500 Train Loss: 0.0945 Val Loss: 0.3251 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9955 Val AUC: 0.9607 Time: 13.75\n",
      "Epoch: 501 Train Loss: 0.0983 Val Loss: 0.3163 Acc: 0.9130 Pre: 0.8906 Recall: 0.9194 F1: 0.9048 Train AUC: 0.9951 Val AUC: 0.9608 Time: 14.35\n",
      "Epoch: 502 Train Loss: 0.0926 Val Loss: 0.3154 Acc: 0.9004 Pre: 0.8814 Recall: 0.8992 F1: 0.8902 Train AUC: 0.9952 Val AUC: 0.9607 Time: 13.15\n",
      "Epoch: 503 Train Loss: 0.0981 Val Loss: 0.3191 Acc: 0.8913 Pre: 0.8615 Recall: 0.9032 F1: 0.8819 Train AUC: 0.9957 Val AUC: 0.9607 Time: 12.55\n",
      "Epoch: 504 Train Loss: 0.0918 Val Loss: 0.3285 Acc: 0.8949 Pre: 0.8493 Recall: 0.9315 F1: 0.8885 Train AUC: 0.9957 Val AUC: 0.9625 Time: 12.40\n",
      "Epoch: 505 Train Loss: 0.1055 Val Loss: 0.3209 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9954 Val AUC: 0.9621 Time: 12.45\n",
      "Epoch: 506 Train Loss: 0.0896 Val Loss: 0.3212 Acc: 0.9058 Pre: 0.8889 Recall: 0.9032 F1: 0.8960 Train AUC: 0.9967 Val AUC: 0.9602 Time: 12.52\n",
      "Epoch: 507 Train Loss: 0.0993 Val Loss: 0.3227 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9959 Val AUC: 0.9594 Time: 12.95\n",
      "Epoch: 508 Train Loss: 0.0947 Val Loss: 0.3308 Acc: 0.8931 Pre: 0.8539 Recall: 0.9194 F1: 0.8854 Train AUC: 0.9954 Val AUC: 0.9576 Time: 13.69\n",
      "Epoch: 509 Train Loss: 0.0940 Val Loss: 0.3253 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9959 Val AUC: 0.9587 Time: 14.15\n",
      "Epoch: 510 Train Loss: 0.0850 Val Loss: 0.3152 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9965 Val AUC: 0.9604 Time: 14.90\n",
      "Epoch: 511 Train Loss: 0.0920 Val Loss: 0.3077 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9956 Val AUC: 0.9615 Time: 14.59\n",
      "Epoch: 512 Train Loss: 0.0891 Val Loss: 0.3053 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9961 Val AUC: 0.9622 Time: 14.53\n",
      "Epoch: 513 Train Loss: 0.0911 Val Loss: 0.3237 Acc: 0.8986 Pre: 0.8664 Recall: 0.9153 F1: 0.8902 Train AUC: 0.9958 Val AUC: 0.9613 Time: 12.30\n",
      "Epoch: 514 Train Loss: 0.0929 Val Loss: 0.3365 Acc: 0.8931 Pre: 0.8593 Recall: 0.9113 F1: 0.8845 Train AUC: 0.9954 Val AUC: 0.9603 Time: 12.57\n",
      "Epoch: 515 Train Loss: 0.0873 Val Loss: 0.3267 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9960 Val AUC: 0.9602 Time: 12.41\n",
      "Epoch: 516 Train Loss: 0.0915 Val Loss: 0.3080 Acc: 0.9004 Pre: 0.8697 Recall: 0.9153 F1: 0.8919 Train AUC: 0.9958 Val AUC: 0.9603 Time: 13.38\n",
      "Epoch: 517 Train Loss: 0.0909 Val Loss: 0.3122 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9959 Val AUC: 0.9599 Time: 13.94\n",
      "Epoch: 518 Train Loss: 0.0873 Val Loss: 0.3290 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9967 Val AUC: 0.9589 Time: 14.26\n",
      "Epoch: 519 Train Loss: 0.0850 Val Loss: 0.3508 Acc: 0.8967 Pre: 0.8659 Recall: 0.9113 F1: 0.8880 Train AUC: 0.9968 Val AUC: 0.9581 Time: 14.99\n",
      "Epoch: 520 Train Loss: 0.0886 Val Loss: 0.3722 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9963 Val AUC: 0.9566 Time: 13.65\n",
      "Epoch: 521 Train Loss: 0.1047 Val Loss: 0.3450 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9939 Val AUC: 0.9590 Time: 12.24\n",
      "Epoch: 522 Train Loss: 0.0919 Val Loss: 0.3230 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9953 Val AUC: 0.9600 Time: 12.17\n",
      "Epoch: 523 Train Loss: 0.0902 Val Loss: 0.3149 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9958 Val AUC: 0.9597 Time: 12.82\n",
      "Epoch: 524 Train Loss: 0.0863 Val Loss: 0.3227 Acc: 0.9004 Pre: 0.8642 Recall: 0.9234 F1: 0.8928 Train AUC: 0.9968 Val AUC: 0.9598 Time: 13.48\n",
      "Epoch: 525 Train Loss: 0.0879 Val Loss: 0.3409 Acc: 0.8931 Pre: 0.8566 Recall: 0.9153 F1: 0.8850 Train AUC: 0.9966 Val AUC: 0.9593 Time: 14.18\n",
      "Epoch: 526 Train Loss: 0.0898 Val Loss: 0.3513 Acc: 0.8931 Pre: 0.8593 Recall: 0.9113 F1: 0.8845 Train AUC: 0.9957 Val AUC: 0.9595 Time: 14.44\n",
      "Epoch: 527 Train Loss: 0.1016 Val Loss: 0.3327 Acc: 0.8967 Pre: 0.8716 Recall: 0.9032 F1: 0.8871 Train AUC: 0.9941 Val AUC: 0.9607 Time: 14.00\n",
      "Epoch: 528 Train Loss: 0.0965 Val Loss: 0.3092 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9945 Val AUC: 0.9613 Time: 14.23\n",
      "Epoch: 529 Train Loss: 0.0884 Val Loss: 0.3067 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9966 Val AUC: 0.9615 Time: 12.16\n",
      "Epoch: 530 Train Loss: 0.0944 Val Loss: 0.3345 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9953 Val AUC: 0.9597 Time: 12.45\n",
      "Epoch: 531 Train Loss: 0.0944 Val Loss: 0.3527 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9958 Val AUC: 0.9587 Time: 12.34\n",
      "Epoch: 532 Train Loss: 0.0829 Val Loss: 0.3429 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9965 Val AUC: 0.9598 Time: 12.30\n",
      "Epoch: 533 Train Loss: 0.0887 Val Loss: 0.3312 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9960 Val AUC: 0.9602 Time: 13.55\n",
      "Epoch: 534 Train Loss: 0.0888 Val Loss: 0.3207 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9961 Val AUC: 0.9604 Time: 13.94\n",
      "Epoch: 535 Train Loss: 0.0896 Val Loss: 0.3216 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9958 Val AUC: 0.9604 Time: 14.35\n",
      "Epoch: 536 Train Loss: 0.0888 Val Loss: 0.3412 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9963 Val AUC: 0.9584 Time: 14.59\n",
      "Epoch: 537 Train Loss: 0.0855 Val Loss: 0.3512 Acc: 0.8931 Pre: 0.8593 Recall: 0.9113 F1: 0.8845 Train AUC: 0.9967 Val AUC: 0.9574 Time: 14.45\n",
      "Epoch: 538 Train Loss: 0.0919 Val Loss: 0.3427 Acc: 0.9004 Pre: 0.8755 Recall: 0.9073 F1: 0.8911 Train AUC: 0.9957 Val AUC: 0.9582 Time: 12.37\n",
      "Epoch: 539 Train Loss: 0.0856 Val Loss: 0.3191 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9966 Val AUC: 0.9613 Time: 12.25\n",
      "Epoch: 540 Train Loss: 0.0832 Val Loss: 0.3132 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9970 Val AUC: 0.9626 Time: 12.49\n",
      "Epoch: 541 Train Loss: 0.0952 Val Loss: 0.3169 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9957 Val AUC: 0.9614 Time: 12.49\n",
      "Epoch: 542 Train Loss: 0.0907 Val Loss: 0.3291 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9960 Val AUC: 0.9601 Time: 13.21\n",
      "Epoch: 543 Train Loss: 0.0897 Val Loss: 0.3340 Acc: 0.8967 Pre: 0.8687 Recall: 0.9073 F1: 0.8876 Train AUC: 0.9959 Val AUC: 0.9591 Time: 14.53\n",
      "Epoch: 544 Train Loss: 0.0907 Val Loss: 0.3233 Acc: 0.9004 Pre: 0.8697 Recall: 0.9153 F1: 0.8919 Train AUC: 0.9960 Val AUC: 0.9598 Time: 14.41\n",
      "Epoch: 545 Train Loss: 0.0884 Val Loss: 0.3083 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9960 Val AUC: 0.9601 Time: 14.80\n",
      "Epoch: 546 Train Loss: 0.0864 Val Loss: 0.3176 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9970 Val AUC: 0.9596 Time: 13.82\n",
      "Epoch: 547 Train Loss: 0.0871 Val Loss: 0.3392 Acc: 0.8949 Pre: 0.8598 Recall: 0.9153 F1: 0.8867 Train AUC: 0.9968 Val AUC: 0.9585 Time: 13.75\n",
      "Epoch: 548 Train Loss: 0.0862 Val Loss: 0.3367 Acc: 0.9022 Pre: 0.8789 Recall: 0.9073 F1: 0.8929 Train AUC: 0.9963 Val AUC: 0.9589 Time: 12.20\n",
      "Epoch: 549 Train Loss: 0.0815 Val Loss: 0.3318 Acc: 0.9022 Pre: 0.8731 Recall: 0.9153 F1: 0.8937 Train AUC: 0.9969 Val AUC: 0.9597 Time: 12.54\n",
      "Epoch: 550 Train Loss: 0.0827 Val Loss: 0.3282 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9970 Val AUC: 0.9607 Time: 13.94\n",
      "Epoch: 551 Train Loss: 0.0824 Val Loss: 0.3220 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9967 Val AUC: 0.9610 Time: 14.23\n",
      "Epoch: 552 Train Loss: 0.0846 Val Loss: 0.3242 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9968 Val AUC: 0.9604 Time: 14.43\n",
      "Epoch: 553 Train Loss: 0.0873 Val Loss: 0.3214 Acc: 0.8986 Pre: 0.8750 Recall: 0.9032 F1: 0.8889 Train AUC: 0.9960 Val AUC: 0.9598 Time: 14.24\n",
      "Epoch: 554 Train Loss: 0.0901 Val Loss: 0.3089 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9958 Val AUC: 0.9608 Time: 12.17\n",
      "Epoch: 555 Train Loss: 0.0957 Val Loss: 0.3192 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9956 Val AUC: 0.9619 Time: 12.51\n",
      "Epoch: 556 Train Loss: 0.0802 Val Loss: 0.3321 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9972 Val AUC: 0.9609 Time: 13.40\n",
      "Epoch: 557 Train Loss: 0.0780 Val Loss: 0.3347 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9966 Val AUC: 0.9605 Time: 13.56\n",
      "Epoch: 558 Train Loss: 0.0924 Val Loss: 0.3251 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9951 Val AUC: 0.9613 Time: 14.32\n",
      "Epoch: 559 Train Loss: 0.0863 Val Loss: 0.3268 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9962 Val AUC: 0.9601 Time: 14.20\n",
      "Epoch: 560 Train Loss: 0.0913 Val Loss: 0.3295 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9960 Val AUC: 0.9590 Time: 14.31\n",
      "Epoch: 561 Train Loss: 0.0851 Val Loss: 0.3359 Acc: 0.9004 Pre: 0.8784 Recall: 0.9032 F1: 0.8907 Train AUC: 0.9965 Val AUC: 0.9598 Time: 13.44\n",
      "Epoch: 562 Train Loss: 0.0820 Val Loss: 0.3309 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9969 Val AUC: 0.9608 Time: 12.66\n",
      "Epoch: 563 Train Loss: 0.0956 Val Loss: 0.3070 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9947 Val AUC: 0.9627 Time: 12.83\n",
      "Epoch: 564 Train Loss: 0.0968 Val Loss: 0.2970 Acc: 0.9149 Pre: 0.8972 Recall: 0.9153 F1: 0.9062 Train AUC: 0.9956 Val AUC: 0.9617 Time: 13.32\n",
      "Epoch: 565 Train Loss: 0.0846 Val Loss: 0.3129 Acc: 0.9058 Pre: 0.8920 Recall: 0.8992 F1: 0.8956 Train AUC: 0.9968 Val AUC: 0.9607 Time: 13.74\n",
      "Epoch: 566 Train Loss: 0.0897 Val Loss: 0.3234 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9964 Val AUC: 0.9606 Time: 14.40\n",
      "Epoch: 567 Train Loss: 0.0894 Val Loss: 0.3477 Acc: 0.8913 Pre: 0.8507 Recall: 0.9194 F1: 0.8837 Train AUC: 0.9959 Val AUC: 0.9599 Time: 14.20\n",
      "Epoch: 568 Train Loss: 0.0861 Val Loss: 0.3440 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9969 Val AUC: 0.9598 Time: 13.35\n",
      "Epoch: 569 Train Loss: 0.0846 Val Loss: 0.3318 Acc: 0.9040 Pre: 0.8764 Recall: 0.9153 F1: 0.8955 Train AUC: 0.9968 Val AUC: 0.9606 Time: 12.45\n",
      "Epoch: 570 Train Loss: 0.0893 Val Loss: 0.3242 Acc: 0.9130 Pre: 0.8968 Recall: 0.9113 F1: 0.9040 Train AUC: 0.9957 Val AUC: 0.9604 Time: 12.58\n",
      "Epoch: 571 Train Loss: 0.0886 Val Loss: 0.3231 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9962 Val AUC: 0.9604 Time: 12.51\n",
      "Epoch: 572 Train Loss: 0.0815 Val Loss: 0.3434 Acc: 0.8877 Pre: 0.8444 Recall: 0.9194 F1: 0.8803 Train AUC: 0.9974 Val AUC: 0.9592 Time: 13.08\n",
      "Epoch: 573 Train Loss: 0.1014 Val Loss: 0.3343 Acc: 0.8967 Pre: 0.8659 Recall: 0.9113 F1: 0.8880 Train AUC: 0.9962 Val AUC: 0.9605 Time: 14.02\n",
      "Epoch: 574 Train Loss: 0.0829 Val Loss: 0.3297 Acc: 0.9076 Pre: 0.8924 Recall: 0.9032 F1: 0.8978 Train AUC: 0.9968 Val AUC: 0.9613 Time: 14.48\n",
      "Epoch: 575 Train Loss: 0.0811 Val Loss: 0.3219 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9971 Val AUC: 0.9623 Time: 14.08\n",
      "Epoch: 576 Train Loss: 0.0907 Val Loss: 0.3114 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9956 Val AUC: 0.9634 Time: 14.59\n",
      "Epoch: 577 Train Loss: 0.0892 Val Loss: 0.3296 Acc: 0.8986 Pre: 0.8609 Recall: 0.9234 F1: 0.8911 Train AUC: 0.9960 Val AUC: 0.9615 Time: 12.31\n",
      "Epoch: 578 Train Loss: 0.0869 Val Loss: 0.3429 Acc: 0.8895 Pre: 0.8555 Recall: 0.9073 F1: 0.8806 Train AUC: 0.9965 Val AUC: 0.9591 Time: 12.39\n",
      "Epoch: 579 Train Loss: 0.0896 Val Loss: 0.3353 Acc: 0.8931 Pre: 0.8706 Recall: 0.8952 F1: 0.8827 Train AUC: 0.9960 Val AUC: 0.9601 Time: 12.30\n",
      "Epoch: 580 Train Loss: 0.0905 Val Loss: 0.3390 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9964 Val AUC: 0.9606 Time: 12.47\n",
      "Epoch: 581 Train Loss: 0.0851 Val Loss: 0.3189 Acc: 0.9040 Pre: 0.8764 Recall: 0.9153 F1: 0.8955 Train AUC: 0.9961 Val AUC: 0.9623 Time: 13.14\n",
      "Epoch: 582 Train Loss: 0.0910 Val Loss: 0.2992 Acc: 0.9130 Pre: 0.8906 Recall: 0.9194 F1: 0.9048 Train AUC: 0.9953 Val AUC: 0.9628 Time: 14.26\n",
      "Epoch: 583 Train Loss: 0.0846 Val Loss: 0.2881 Acc: 0.9130 Pre: 0.8876 Recall: 0.9234 F1: 0.9051 Train AUC: 0.9963 Val AUC: 0.9627 Time: 14.60\n",
      "Epoch: 584 Train Loss: 0.0846 Val Loss: 0.3016 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9971 Val AUC: 0.9626 Time: 14.94\n",
      "Epoch: 585 Train Loss: 0.0817 Val Loss: 0.3291 Acc: 0.8967 Pre: 0.8659 Recall: 0.9113 F1: 0.8880 Train AUC: 0.9969 Val AUC: 0.9617 Time: 13.22\n",
      "Epoch: 586 Train Loss: 0.0719 Val Loss: 0.3477 Acc: 0.8986 Pre: 0.8750 Recall: 0.9032 F1: 0.8889 Train AUC: 0.9976 Val AUC: 0.9602 Time: 12.47\n",
      "Epoch: 587 Train Loss: 0.0844 Val Loss: 0.3348 Acc: 0.8967 Pre: 0.8631 Recall: 0.9153 F1: 0.8885 Train AUC: 0.9963 Val AUC: 0.9614 Time: 13.30\n",
      "Epoch: 588 Train Loss: 0.0808 Val Loss: 0.3169 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9965 Val AUC: 0.9624 Time: 12.79\n",
      "Epoch: 589 Train Loss: 0.0768 Val Loss: 0.3200 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9975 Val AUC: 0.9623 Time: 14.60\n",
      "Epoch: 590 Train Loss: 0.0826 Val Loss: 0.3414 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9970 Val AUC: 0.9609 Time: 13.95\n",
      "Epoch: 591 Train Loss: 0.0825 Val Loss: 0.3524 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9965 Val AUC: 0.9589 Time: 13.29\n",
      "Epoch: 592 Train Loss: 0.0919 Val Loss: 0.3291 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9952 Val AUC: 0.9599 Time: 13.68\n",
      "Epoch: 593 Train Loss: 0.0855 Val Loss: 0.3198 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9966 Val AUC: 0.9605 Time: 13.79\n",
      "Epoch: 594 Train Loss: 0.0801 Val Loss: 0.3218 Acc: 0.8895 Pre: 0.8476 Recall: 0.9194 F1: 0.8820 Train AUC: 0.9973 Val AUC: 0.9604 Time: 13.32\n",
      "Epoch: 595 Train Loss: 0.0893 Val Loss: 0.3340 Acc: 0.9004 Pre: 0.8697 Recall: 0.9153 F1: 0.8919 Train AUC: 0.9970 Val AUC: 0.9606 Time: 14.25\n",
      "Epoch: 596 Train Loss: 0.0885 Val Loss: 0.3589 Acc: 0.9022 Pre: 0.8731 Recall: 0.9153 F1: 0.8937 Train AUC: 0.9962 Val AUC: 0.9597 Time: 14.46\n",
      "Epoch: 597 Train Loss: 0.1026 Val Loss: 0.3452 Acc: 0.9040 Pre: 0.8707 Recall: 0.9234 F1: 0.8963 Train AUC: 0.9940 Val AUC: 0.9605 Time: 12.94\n",
      "Epoch: 598 Train Loss: 0.0849 Val Loss: 0.3195 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9961 Val AUC: 0.9611 Time: 12.27\n",
      "Epoch: 599 Train Loss: 0.0782 Val Loss: 0.2997 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9973 Val AUC: 0.9617 Time: 12.91\n",
      "Epoch: 600 Train Loss: 0.0864 Val Loss: 0.3071 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9969 Val AUC: 0.9623 Time: 12.88\n",
      "Epoch: 601 Train Loss: 0.0806 Val Loss: 0.3343 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9970 Val AUC: 0.9611 Time: 13.26\n",
      "Epoch: 602 Train Loss: 0.0808 Val Loss: 0.3440 Acc: 0.8967 Pre: 0.8659 Recall: 0.9113 F1: 0.8880 Train AUC: 0.9966 Val AUC: 0.9609 Time: 14.24\n",
      "Epoch: 603 Train Loss: 0.0846 Val Loss: 0.3270 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9963 Val AUC: 0.9625 Time: 14.38\n",
      "Epoch: 604 Train Loss: 0.0787 Val Loss: 0.3037 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9971 Val AUC: 0.9624 Time: 13.84\n",
      "Epoch: 605 Train Loss: 0.0934 Val Loss: 0.3219 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9954 Val AUC: 0.9608 Time: 13.36\n",
      "Epoch: 606 Train Loss: 0.0759 Val Loss: 0.3406 Acc: 0.9022 Pre: 0.8731 Recall: 0.9153 F1: 0.8937 Train AUC: 0.9976 Val AUC: 0.9591 Time: 12.78\n",
      "Epoch: 607 Train Loss: 0.0766 Val Loss: 0.3654 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9975 Val AUC: 0.9577 Time: 12.75\n",
      "Epoch: 608 Train Loss: 0.0836 Val Loss: 0.3596 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9962 Val AUC: 0.9583 Time: 12.76\n",
      "Epoch: 609 Train Loss: 0.0772 Val Loss: 0.3290 Acc: 0.9040 Pre: 0.8764 Recall: 0.9153 F1: 0.8955 Train AUC: 0.9971 Val AUC: 0.9600 Time: 13.48\n",
      "Epoch: 610 Train Loss: 0.0801 Val Loss: 0.3102 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9969 Val AUC: 0.9614 Time: 13.98\n",
      "Epoch: 611 Train Loss: 0.0833 Val Loss: 0.3175 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9968 Val AUC: 0.9600 Time: 14.55\n",
      "Epoch: 612 Train Loss: 0.0816 Val Loss: 0.3491 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9970 Val AUC: 0.9582 Time: 14.95\n",
      "Epoch: 613 Train Loss: 0.0821 Val Loss: 0.3756 Acc: 0.8931 Pre: 0.8566 Recall: 0.9153 F1: 0.8850 Train AUC: 0.9968 Val AUC: 0.9563 Time: 13.16\n",
      "Epoch: 614 Train Loss: 0.0876 Val Loss: 0.3538 Acc: 0.8931 Pre: 0.8593 Recall: 0.9113 F1: 0.8845 Train AUC: 0.9961 Val AUC: 0.9596 Time: 12.50\n",
      "Epoch: 615 Train Loss: 0.0815 Val Loss: 0.3196 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9968 Val AUC: 0.9616 Time: 13.00\n",
      "Epoch: 616 Train Loss: 0.0846 Val Loss: 0.3028 Acc: 0.9076 Pre: 0.8988 Recall: 0.8952 F1: 0.8970 Train AUC: 0.9966 Val AUC: 0.9628 Time: 14.04\n",
      "Epoch: 617 Train Loss: 0.0845 Val Loss: 0.2994 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9966 Val AUC: 0.9636 Time: 14.12\n",
      "Epoch: 618 Train Loss: 0.0822 Val Loss: 0.3258 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9968 Val AUC: 0.9609 Time: 13.96\n",
      "Epoch: 619 Train Loss: 0.0909 Val Loss: 0.3406 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9963 Val AUC: 0.9588 Time: 13.02\n",
      "Epoch: 620 Train Loss: 0.0780 Val Loss: 0.3464 Acc: 0.9004 Pre: 0.8845 Recall: 0.8952 F1: 0.8898 Train AUC: 0.9970 Val AUC: 0.9591 Time: 12.44\n",
      "Epoch: 621 Train Loss: 0.0880 Val Loss: 0.3251 Acc: 0.9040 Pre: 0.8707 Recall: 0.9234 F1: 0.8963 Train AUC: 0.9960 Val AUC: 0.9614 Time: 12.83\n",
      "Epoch: 622 Train Loss: 0.0784 Val Loss: 0.3212 Acc: 0.9076 Pre: 0.8774 Recall: 0.9234 F1: 0.8998 Train AUC: 0.9972 Val AUC: 0.9629 Time: 13.29\n",
      "Epoch: 623 Train Loss: 0.0885 Val Loss: 0.3239 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9966 Val AUC: 0.9610 Time: 13.78\n",
      "Epoch: 624 Train Loss: 0.0767 Val Loss: 0.3449 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9975 Val AUC: 0.9589 Time: 14.56\n",
      "Epoch: 625 Train Loss: 0.0790 Val Loss: 0.3668 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9971 Val AUC: 0.9578 Time: 14.06\n",
      "Epoch: 626 Train Loss: 0.0790 Val Loss: 0.3579 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9971 Val AUC: 0.9582 Time: 13.58\n",
      "Epoch: 627 Train Loss: 0.0839 Val Loss: 0.3445 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9965 Val AUC: 0.9594 Time: 12.52\n",
      "Epoch: 628 Train Loss: 0.0818 Val Loss: 0.3344 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9966 Val AUC: 0.9605 Time: 12.42\n",
      "Epoch: 629 Train Loss: 0.0795 Val Loss: 0.3370 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9966 Val AUC: 0.9614 Time: 12.27\n",
      "Epoch: 630 Train Loss: 0.0895 Val Loss: 0.3286 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9957 Val AUC: 0.9620 Time: 13.60\n",
      "Epoch: 631 Train Loss: 0.0683 Val Loss: 0.3330 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9984 Val AUC: 0.9601 Time: 13.82\n",
      "Epoch: 632 Train Loss: 0.0809 Val Loss: 0.3257 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9964 Val AUC: 0.9603 Time: 14.47\n",
      "Epoch: 633 Train Loss: 0.0776 Val Loss: 0.3123 Acc: 0.8931 Pre: 0.8539 Recall: 0.9194 F1: 0.8854 Train AUC: 0.9973 Val AUC: 0.9617 Time: 14.18\n",
      "Epoch: 634 Train Loss: 0.0897 Val Loss: 0.3240 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9967 Val AUC: 0.9626 Time: 14.30\n",
      "Epoch: 635 Train Loss: 0.0827 Val Loss: 0.3432 Acc: 0.9112 Pre: 0.8933 Recall: 0.9113 F1: 0.9022 Train AUC: 0.9966 Val AUC: 0.9596 Time: 12.14\n",
      "Epoch: 636 Train Loss: 0.0925 Val Loss: 0.3434 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9952 Val AUC: 0.9592 Time: 12.49\n",
      "Epoch: 637 Train Loss: 0.0802 Val Loss: 0.3395 Acc: 0.8877 Pre: 0.8470 Recall: 0.9153 F1: 0.8798 Train AUC: 0.9967 Val AUC: 0.9596 Time: 12.48\n",
      "Epoch: 638 Train Loss: 0.0904 Val Loss: 0.3331 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9964 Val AUC: 0.9598 Time: 13.50\n",
      "Epoch: 639 Train Loss: 0.0800 Val Loss: 0.3434 Acc: 0.8986 Pre: 0.8750 Recall: 0.9032 F1: 0.8889 Train AUC: 0.9976 Val AUC: 0.9590 Time: 14.36\n",
      "Epoch: 640 Train Loss: 0.0819 Val Loss: 0.3381 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9969 Val AUC: 0.9596 Time: 14.61\n",
      "Epoch: 641 Train Loss: 0.0885 Val Loss: 0.3278 Acc: 0.9040 Pre: 0.8707 Recall: 0.9234 F1: 0.8963 Train AUC: 0.9958 Val AUC: 0.9610 Time: 14.01\n",
      "Epoch: 642 Train Loss: 0.0917 Val Loss: 0.3129 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9953 Val AUC: 0.9622 Time: 13.14\n",
      "Epoch: 643 Train Loss: 0.0799 Val Loss: 0.3063 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9975 Val AUC: 0.9606 Time: 12.91\n",
      "Epoch: 644 Train Loss: 0.0802 Val Loss: 0.3065 Acc: 0.9058 Pre: 0.8858 Recall: 0.9073 F1: 0.8964 Train AUC: 0.9972 Val AUC: 0.9600 Time: 14.02\n",
      "Epoch: 645 Train Loss: 0.0941 Val Loss: 0.3311 Acc: 0.9040 Pre: 0.8854 Recall: 0.9032 F1: 0.8942 Train AUC: 0.9962 Val AUC: 0.9597 Time: 13.18\n",
      "Epoch: 646 Train Loss: 0.0758 Val Loss: 0.3441 Acc: 0.9004 Pre: 0.8697 Recall: 0.9153 F1: 0.8919 Train AUC: 0.9974 Val AUC: 0.9596 Time: 13.15\n",
      "Epoch: 647 Train Loss: 0.0876 Val Loss: 0.3267 Acc: 0.9112 Pre: 0.8902 Recall: 0.9153 F1: 0.9026 Train AUC: 0.9960 Val AUC: 0.9613 Time: 13.23\n",
      "Epoch: 648 Train Loss: 0.0848 Val Loss: 0.3180 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9960 Val AUC: 0.9616 Time: 12.83\n",
      "Epoch: 649 Train Loss: 0.0831 Val Loss: 0.3403 Acc: 0.8931 Pre: 0.8539 Recall: 0.9194 F1: 0.8854 Train AUC: 0.9965 Val AUC: 0.9584 Time: 12.93\n",
      "Epoch: 650 Train Loss: 0.0881 Val Loss: 0.3455 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9963 Val AUC: 0.9584 Time: 14.62\n",
      "Epoch: 651 Train Loss: 0.0846 Val Loss: 0.3413 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9965 Val AUC: 0.9597 Time: 14.32\n",
      "Epoch: 652 Train Loss: 0.0746 Val Loss: 0.3274 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9975 Val AUC: 0.9616 Time: 14.11\n",
      "Epoch: 653 Train Loss: 0.0815 Val Loss: 0.3158 Acc: 0.8986 Pre: 0.8664 Recall: 0.9153 F1: 0.8902 Train AUC: 0.9964 Val AUC: 0.9635 Time: 12.78\n",
      "Epoch: 654 Train Loss: 0.0770 Val Loss: 0.3052 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9972 Val AUC: 0.9632 Time: 13.05\n",
      "Epoch: 655 Train Loss: 0.0890 Val Loss: 0.3318 Acc: 0.9112 Pre: 0.8872 Recall: 0.9194 F1: 0.9030 Train AUC: 0.9960 Val AUC: 0.9602 Time: 14.02\n",
      "Epoch: 656 Train Loss: 0.0855 Val Loss: 0.3693 Acc: 0.9022 Pre: 0.8731 Recall: 0.9153 F1: 0.8937 Train AUC: 0.9969 Val AUC: 0.9579 Time: 14.33\n",
      "Epoch: 657 Train Loss: 0.0840 Val Loss: 0.3632 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9964 Val AUC: 0.9597 Time: 13.21\n",
      "Epoch: 658 Train Loss: 0.0751 Val Loss: 0.3365 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9970 Val AUC: 0.9615 Time: 13.19\n",
      "Epoch: 659 Train Loss: 0.0781 Val Loss: 0.3144 Acc: 0.9112 Pre: 0.8842 Recall: 0.9234 F1: 0.9034 Train AUC: 0.9968 Val AUC: 0.9629 Time: 13.54\n",
      "Epoch: 660 Train Loss: 0.0836 Val Loss: 0.3120 Acc: 0.9094 Pre: 0.8808 Recall: 0.9234 F1: 0.9016 Train AUC: 0.9963 Val AUC: 0.9623 Time: 14.25\n",
      "Epoch: 661 Train Loss: 0.0733 Val Loss: 0.3269 Acc: 0.8986 Pre: 0.8582 Recall: 0.9274 F1: 0.8915 Train AUC: 0.9981 Val AUC: 0.9615 Time: 14.00\n",
      "Epoch: 662 Train Loss: 0.0847 Val Loss: 0.3626 Acc: 0.8931 Pre: 0.8539 Recall: 0.9194 F1: 0.8854 Train AUC: 0.9966 Val AUC: 0.9589 Time: 13.54\n",
      "Epoch: 663 Train Loss: 0.0824 Val Loss: 0.3613 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9968 Val AUC: 0.9596 Time: 12.38\n",
      "Epoch: 664 Train Loss: 0.0757 Val Loss: 0.3319 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9971 Val AUC: 0.9619 Time: 12.37\n",
      "Epoch: 665 Train Loss: 0.0739 Val Loss: 0.3098 Acc: 0.9022 Pre: 0.8674 Recall: 0.9234 F1: 0.8945 Train AUC: 0.9976 Val AUC: 0.9639 Time: 12.65\n",
      "Epoch: 666 Train Loss: 0.0804 Val Loss: 0.3055 Acc: 0.9004 Pre: 0.8614 Recall: 0.9274 F1: 0.8932 Train AUC: 0.9966 Val AUC: 0.9647 Time: 13.58\n",
      "Epoch: 667 Train Loss: 0.0879 Val Loss: 0.3152 Acc: 0.8967 Pre: 0.8577 Recall: 0.9234 F1: 0.8893 Train AUC: 0.9962 Val AUC: 0.9634 Time: 13.98\n",
      "Epoch: 668 Train Loss: 0.0718 Val Loss: 0.3508 Acc: 0.8949 Pre: 0.8740 Recall: 0.8952 F1: 0.8845 Train AUC: 0.9981 Val AUC: 0.9595 Time: 14.68\n",
      "Epoch: 669 Train Loss: 0.0833 Val Loss: 0.3818 Acc: 0.8967 Pre: 0.8745 Recall: 0.8992 F1: 0.8867 Train AUC: 0.9969 Val AUC: 0.9567 Time: 12.79\n",
      "Epoch: 670 Train Loss: 0.0960 Val Loss: 0.3461 Acc: 0.8967 Pre: 0.8687 Recall: 0.9073 F1: 0.8876 Train AUC: 0.9949 Val AUC: 0.9594 Time: 12.68\n",
      "Epoch: 671 Train Loss: 0.0728 Val Loss: 0.3263 Acc: 0.9040 Pre: 0.8707 Recall: 0.9234 F1: 0.8963 Train AUC: 0.9976 Val AUC: 0.9607 Time: 13.27\n",
      "Epoch: 672 Train Loss: 0.0730 Val Loss: 0.3123 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9978 Val AUC: 0.9605 Time: 13.50\n",
      "Epoch: 673 Train Loss: 0.0868 Val Loss: 0.3263 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9968 Val AUC: 0.9602 Time: 14.28\n",
      "Epoch: 674 Train Loss: 0.0790 Val Loss: 0.3636 Acc: 0.9004 Pre: 0.8755 Recall: 0.9073 F1: 0.8911 Train AUC: 0.9976 Val AUC: 0.9578 Time: 14.44\n",
      "Epoch: 675 Train Loss: 0.0839 Val Loss: 0.3840 Acc: 0.8931 Pre: 0.8621 Recall: 0.9073 F1: 0.8841 Train AUC: 0.9966 Val AUC: 0.9577 Time: 14.56\n",
      "Epoch: 676 Train Loss: 0.0969 Val Loss: 0.3459 Acc: 0.8931 Pre: 0.8593 Recall: 0.9113 F1: 0.8845 Train AUC: 0.9950 Val AUC: 0.9609 Time: 12.35\n",
      "Epoch: 677 Train Loss: 0.0797 Val Loss: 0.3012 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9970 Val AUC: 0.9643 Time: 12.17\n",
      "Epoch: 678 Train Loss: 0.0807 Val Loss: 0.2988 Acc: 0.9112 Pre: 0.8842 Recall: 0.9234 F1: 0.9034 Train AUC: 0.9969 Val AUC: 0.9627 Time: 12.46\n",
      "Epoch: 679 Train Loss: 0.0916 Val Loss: 0.3249 Acc: 0.9094 Pre: 0.8808 Recall: 0.9234 F1: 0.9016 Train AUC: 0.9956 Val AUC: 0.9614 Time: 13.38\n",
      "Epoch: 680 Train Loss: 0.0847 Val Loss: 0.3561 Acc: 0.8949 Pre: 0.8740 Recall: 0.8952 F1: 0.8845 Train AUC: 0.9959 Val AUC: 0.9589 Time: 13.87\n",
      "Epoch: 681 Train Loss: 0.0815 Val Loss: 0.3617 Acc: 0.9004 Pre: 0.8784 Recall: 0.9032 F1: 0.8907 Train AUC: 0.9966 Val AUC: 0.9579 Time: 14.21\n",
      "Epoch: 682 Train Loss: 0.0806 Val Loss: 0.3338 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9968 Val AUC: 0.9592 Time: 14.89\n",
      "Epoch: 683 Train Loss: 0.0841 Val Loss: 0.3068 Acc: 0.8913 Pre: 0.8507 Recall: 0.9194 F1: 0.8837 Train AUC: 0.9966 Val AUC: 0.9621 Time: 12.67\n",
      "Epoch: 684 Train Loss: 0.0880 Val Loss: 0.2987 Acc: 0.9112 Pre: 0.8872 Recall: 0.9194 F1: 0.9030 Train AUC: 0.9966 Val AUC: 0.9621 Time: 12.53\n",
      "Epoch: 685 Train Loss: 0.0814 Val Loss: 0.3305 Acc: 0.9094 Pre: 0.8867 Recall: 0.9153 F1: 0.9008 Train AUC: 0.9968 Val AUC: 0.9597 Time: 12.41\n",
      "Epoch: 686 Train Loss: 0.0755 Val Loss: 0.3605 Acc: 0.8931 Pre: 0.8566 Recall: 0.9153 F1: 0.8850 Train AUC: 0.9975 Val AUC: 0.9579 Time: 13.42\n",
      "Epoch: 687 Train Loss: 0.0842 Val Loss: 0.3637 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9964 Val AUC: 0.9582 Time: 13.80\n",
      "Epoch: 688 Train Loss: 0.0851 Val Loss: 0.3267 Acc: 0.9022 Pre: 0.8731 Recall: 0.9153 F1: 0.8937 Train AUC: 0.9964 Val AUC: 0.9609 Time: 13.89\n",
      "Epoch: 689 Train Loss: 0.0694 Val Loss: 0.3084 Acc: 0.9130 Pre: 0.8968 Recall: 0.9113 F1: 0.9040 Train AUC: 0.9982 Val AUC: 0.9614 Time: 13.78\n",
      "Epoch: 690 Train Loss: 0.0799 Val Loss: 0.3066 Acc: 0.9130 Pre: 0.8968 Recall: 0.9113 F1: 0.9040 Train AUC: 0.9974 Val AUC: 0.9614 Time: 14.68\n",
      "Epoch: 691 Train Loss: 0.0918 Val Loss: 0.3177 Acc: 0.9040 Pre: 0.8764 Recall: 0.9153 F1: 0.8955 Train AUC: 0.9955 Val AUC: 0.9610 Time: 12.32\n",
      "Epoch: 692 Train Loss: 0.0715 Val Loss: 0.3365 Acc: 0.8967 Pre: 0.8577 Recall: 0.9234 F1: 0.8893 Train AUC: 0.9980 Val AUC: 0.9597 Time: 12.43\n",
      "Epoch: 693 Train Loss: 0.0783 Val Loss: 0.3453 Acc: 0.8967 Pre: 0.8550 Recall: 0.9274 F1: 0.8897 Train AUC: 0.9974 Val AUC: 0.9589 Time: 12.38\n",
      "Epoch: 694 Train Loss: 0.0893 Val Loss: 0.3255 Acc: 0.9058 Pre: 0.8740 Recall: 0.9234 F1: 0.8980 Train AUC: 0.9958 Val AUC: 0.9605 Time: 12.68\n",
      "Epoch: 695 Train Loss: 0.0733 Val Loss: 0.3190 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9982 Val AUC: 0.9619 Time: 13.73\n",
      "Epoch: 696 Train Loss: 0.0750 Val Loss: 0.3225 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9976 Val AUC: 0.9612 Time: 14.28\n",
      "Epoch: 697 Train Loss: 0.0710 Val Loss: 0.3327 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9980 Val AUC: 0.9614 Time: 14.41\n",
      "Epoch: 698 Train Loss: 0.0776 Val Loss: 0.3359 Acc: 0.8949 Pre: 0.8598 Recall: 0.9153 F1: 0.8867 Train AUC: 0.9973 Val AUC: 0.9606 Time: 14.82\n",
      "Epoch: 699 Train Loss: 0.0756 Val Loss: 0.3359 Acc: 0.9022 Pre: 0.8731 Recall: 0.9153 F1: 0.8937 Train AUC: 0.9974 Val AUC: 0.9595 Time: 14.13\n",
      "Epoch: 700 Train Loss: 0.0718 Val Loss: 0.3273 Acc: 0.9130 Pre: 0.8937 Recall: 0.9153 F1: 0.9044 Train AUC: 0.9978 Val AUC: 0.9601 Time: 12.91\n",
      "Epoch: 701 Train Loss: 0.0765 Val Loss: 0.3219 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9971 Val AUC: 0.9602 Time: 12.32\n",
      "Epoch: 702 Train Loss: 0.0771 Val Loss: 0.3294 Acc: 0.8986 Pre: 0.8609 Recall: 0.9234 F1: 0.8911 Train AUC: 0.9970 Val AUC: 0.9603 Time: 12.22\n",
      "Epoch: 703 Train Loss: 0.0776 Val Loss: 0.3410 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9972 Val AUC: 0.9608 Time: 12.80\n",
      "Epoch: 704 Train Loss: 0.0744 Val Loss: 0.3360 Acc: 0.8986 Pre: 0.8692 Recall: 0.9113 F1: 0.8898 Train AUC: 0.9977 Val AUC: 0.9613 Time: 13.23\n",
      "Epoch: 705 Train Loss: 0.0732 Val Loss: 0.3308 Acc: 0.9112 Pre: 0.8964 Recall: 0.9073 F1: 0.9018 Train AUC: 0.9975 Val AUC: 0.9614 Time: 14.47\n",
      "Epoch: 706 Train Loss: 0.0805 Val Loss: 0.3269 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9969 Val AUC: 0.9613 Time: 14.80\n",
      "Epoch: 707 Train Loss: 0.0745 Val Loss: 0.3226 Acc: 0.8913 Pre: 0.8481 Recall: 0.9234 F1: 0.8842 Train AUC: 0.9973 Val AUC: 0.9619 Time: 14.60\n",
      "Epoch: 708 Train Loss: 0.0848 Val Loss: 0.3082 Acc: 0.9094 Pre: 0.8808 Recall: 0.9234 F1: 0.9016 Train AUC: 0.9966 Val AUC: 0.9634 Time: 12.97\n",
      "Epoch: 709 Train Loss: 0.0697 Val Loss: 0.3113 Acc: 0.9130 Pre: 0.8906 Recall: 0.9194 F1: 0.9048 Train AUC: 0.9981 Val AUC: 0.9628 Time: 12.52\n",
      "Epoch: 710 Train Loss: 0.0733 Val Loss: 0.3282 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9975 Val AUC: 0.9615 Time: 13.02\n",
      "Epoch: 711 Train Loss: 0.0768 Val Loss: 0.3279 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9971 Val AUC: 0.9610 Time: 13.70\n",
      "Epoch: 712 Train Loss: 0.0748 Val Loss: 0.3242 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9974 Val AUC: 0.9600 Time: 14.12\n",
      "Epoch: 713 Train Loss: 0.0690 Val Loss: 0.3259 Acc: 0.9112 Pre: 0.8933 Recall: 0.9113 F1: 0.9022 Train AUC: 0.9985 Val AUC: 0.9604 Time: 13.98\n",
      "Epoch: 714 Train Loss: 0.0668 Val Loss: 0.3144 Acc: 0.9203 Pre: 0.9080 Recall: 0.9153 F1: 0.9116 Train AUC: 0.9984 Val AUC: 0.9630 Time: 12.62\n",
      "Epoch: 715 Train Loss: 0.0742 Val Loss: 0.3090 Acc: 0.9094 Pre: 0.8808 Recall: 0.9234 F1: 0.9016 Train AUC: 0.9978 Val AUC: 0.9642 Time: 12.39\n",
      "Epoch: 716 Train Loss: 0.0748 Val Loss: 0.3155 Acc: 0.9130 Pre: 0.8846 Recall: 0.9274 F1: 0.9055 Train AUC: 0.9974 Val AUC: 0.9641 Time: 12.40\n",
      "Epoch: 717 Train Loss: 0.0745 Val Loss: 0.3257 Acc: 0.9076 Pre: 0.8774 Recall: 0.9234 F1: 0.8998 Train AUC: 0.9972 Val AUC: 0.9626 Time: 13.46\n",
      "Epoch: 718 Train Loss: 0.0718 Val Loss: 0.3332 Acc: 0.9040 Pre: 0.8764 Recall: 0.9153 F1: 0.8955 Train AUC: 0.9976 Val AUC: 0.9607 Time: 13.89\n",
      "Epoch: 719 Train Loss: 0.0712 Val Loss: 0.3405 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9979 Val AUC: 0.9587 Time: 14.24\n",
      "Epoch: 720 Train Loss: 0.0753 Val Loss: 0.3530 Acc: 0.9040 Pre: 0.8764 Recall: 0.9153 F1: 0.8955 Train AUC: 0.9975 Val AUC: 0.9581 Time: 14.43\n",
      "Epoch: 721 Train Loss: 0.0794 Val Loss: 0.3536 Acc: 0.9112 Pre: 0.8842 Recall: 0.9234 F1: 0.9034 Train AUC: 0.9968 Val AUC: 0.9591 Time: 14.18\n",
      "Epoch: 722 Train Loss: 0.0737 Val Loss: 0.3445 Acc: 0.9130 Pre: 0.8906 Recall: 0.9194 F1: 0.9048 Train AUC: 0.9971 Val AUC: 0.9606 Time: 12.12\n",
      "Epoch: 723 Train Loss: 0.0800 Val Loss: 0.3181 Acc: 0.9130 Pre: 0.8968 Recall: 0.9113 F1: 0.9040 Train AUC: 0.9965 Val AUC: 0.9629 Time: 12.34\n",
      "Epoch: 724 Train Loss: 0.0736 Val Loss: 0.3075 Acc: 0.9130 Pre: 0.8968 Recall: 0.9113 F1: 0.9040 Train AUC: 0.9973 Val AUC: 0.9637 Time: 12.51\n",
      "Epoch: 725 Train Loss: 0.0694 Val Loss: 0.3157 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9981 Val AUC: 0.9633 Time: 12.67\n",
      "Epoch: 726 Train Loss: 0.0798 Val Loss: 0.3300 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9974 Val AUC: 0.9620 Time: 13.52\n",
      "Epoch: 727 Train Loss: 0.0702 Val Loss: 0.3501 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9978 Val AUC: 0.9609 Time: 13.65\n",
      "Epoch: 728 Train Loss: 0.0715 Val Loss: 0.3633 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9976 Val AUC: 0.9598 Time: 14.39\n",
      "Epoch: 729 Train Loss: 0.0789 Val Loss: 0.3447 Acc: 0.9076 Pre: 0.8774 Recall: 0.9234 F1: 0.8998 Train AUC: 0.9967 Val AUC: 0.9606 Time: 14.86\n",
      "Epoch: 730 Train Loss: 0.0732 Val Loss: 0.3431 Acc: 0.8967 Pre: 0.8550 Recall: 0.9274 F1: 0.8897 Train AUC: 0.9975 Val AUC: 0.9605 Time: 14.22\n",
      "Epoch: 731 Train Loss: 0.0760 Val Loss: 0.3354 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9973 Val AUC: 0.9611 Time: 13.97\n",
      "Epoch: 732 Train Loss: 0.0702 Val Loss: 0.3429 Acc: 0.9112 Pre: 0.8933 Recall: 0.9113 F1: 0.9022 Train AUC: 0.9980 Val AUC: 0.9590 Time: 12.66\n",
      "Epoch: 733 Train Loss: 0.0752 Val Loss: 0.3570 Acc: 0.9203 Pre: 0.9113 Recall: 0.9113 F1: 0.9113 Train AUC: 0.9975 Val AUC: 0.9586 Time: 12.37\n",
      "Epoch: 734 Train Loss: 0.0895 Val Loss: 0.3486 Acc: 0.8986 Pre: 0.8664 Recall: 0.9153 F1: 0.8902 Train AUC: 0.9965 Val AUC: 0.9606 Time: 12.47\n",
      "Epoch: 735 Train Loss: 0.0763 Val Loss: 0.3443 Acc: 0.9004 Pre: 0.8642 Recall: 0.9234 F1: 0.8928 Train AUC: 0.9978 Val AUC: 0.9612 Time: 12.42\n",
      "Epoch: 736 Train Loss: 0.0717 Val Loss: 0.3221 Acc: 0.9094 Pre: 0.8808 Recall: 0.9234 F1: 0.9016 Train AUC: 0.9980 Val AUC: 0.9619 Time: 12.37\n",
      "Epoch: 737 Train Loss: 0.0703 Val Loss: 0.3189 Acc: 0.9149 Pre: 0.9102 Recall: 0.8992 F1: 0.9047 Train AUC: 0.9975 Val AUC: 0.9617 Time: 12.62\n",
      "Epoch: 738 Train Loss: 0.0768 Val Loss: 0.3226 Acc: 0.9112 Pre: 0.8902 Recall: 0.9153 F1: 0.9026 Train AUC: 0.9978 Val AUC: 0.9616 Time: 13.89\n",
      "Epoch: 739 Train Loss: 0.0642 Val Loss: 0.3312 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9983 Val AUC: 0.9632 Time: 14.09\n",
      "Epoch: 740 Train Loss: 0.0705 Val Loss: 0.3397 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9983 Val AUC: 0.9623 Time: 14.03\n",
      "Epoch: 741 Train Loss: 0.0844 Val Loss: 0.3425 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9973 Val AUC: 0.9597 Time: 14.71\n",
      "Epoch: 742 Train Loss: 0.0715 Val Loss: 0.3483 Acc: 0.9094 Pre: 0.8929 Recall: 0.9073 F1: 0.9000 Train AUC: 0.9975 Val AUC: 0.9585 Time: 14.02\n",
      "Epoch: 743 Train Loss: 0.0772 Val Loss: 0.3360 Acc: 0.9004 Pre: 0.8642 Recall: 0.9234 F1: 0.8928 Train AUC: 0.9974 Val AUC: 0.9601 Time: 13.62\n",
      "Epoch: 744 Train Loss: 0.0680 Val Loss: 0.3299 Acc: 0.8967 Pre: 0.8550 Recall: 0.9274 F1: 0.8897 Train AUC: 0.9982 Val AUC: 0.9607 Time: 12.35\n",
      "Epoch: 745 Train Loss: 0.0779 Val Loss: 0.3186 Acc: 0.9040 Pre: 0.8679 Recall: 0.9274 F1: 0.8967 Train AUC: 0.9972 Val AUC: 0.9613 Time: 12.55\n",
      "Epoch: 746 Train Loss: 0.0745 Val Loss: 0.3197 Acc: 0.9112 Pre: 0.8964 Recall: 0.9073 F1: 0.9018 Train AUC: 0.9974 Val AUC: 0.9612 Time: 12.26\n",
      "Epoch: 747 Train Loss: 0.0744 Val Loss: 0.3238 Acc: 0.9040 Pre: 0.8824 Recall: 0.9073 F1: 0.8946 Train AUC: 0.9978 Val AUC: 0.9612 Time: 12.33\n",
      "Epoch: 748 Train Loss: 0.0674 Val Loss: 0.3247 Acc: 0.9022 Pre: 0.8674 Recall: 0.9234 F1: 0.8945 Train AUC: 0.9981 Val AUC: 0.9626 Time: 12.46\n",
      "Epoch: 749 Train Loss: 0.0775 Val Loss: 0.3286 Acc: 0.8895 Pre: 0.8450 Recall: 0.9234 F1: 0.8825 Train AUC: 0.9975 Val AUC: 0.9631 Time: 13.42\n",
      "Epoch: 750 Train Loss: 0.0816 Val Loss: 0.3122 Acc: 0.9112 Pre: 0.8872 Recall: 0.9194 F1: 0.9030 Train AUC: 0.9972 Val AUC: 0.9622 Time: 14.00\n",
      "Epoch: 751 Train Loss: 0.0738 Val Loss: 0.3167 Acc: 0.9149 Pre: 0.8972 Recall: 0.9153 F1: 0.9062 Train AUC: 0.9975 Val AUC: 0.9611 Time: 14.61\n",
      "Epoch: 752 Train Loss: 0.0771 Val Loss: 0.3231 Acc: 0.9149 Pre: 0.9004 Recall: 0.9113 F1: 0.9058 Train AUC: 0.9974 Val AUC: 0.9603 Time: 15.17\n",
      "Epoch: 753 Train Loss: 0.0682 Val Loss: 0.3418 Acc: 0.8967 Pre: 0.8631 Recall: 0.9153 F1: 0.8885 Train AUC: 0.9983 Val AUC: 0.9593 Time: 13.25\n",
      "Epoch: 754 Train Loss: 0.0703 Val Loss: 0.3488 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9982 Val AUC: 0.9601 Time: 13.18\n",
      "Epoch: 755 Train Loss: 0.0660 Val Loss: 0.3466 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9983 Val AUC: 0.9612 Time: 13.47\n",
      "Epoch: 756 Train Loss: 0.0702 Val Loss: 0.3376 Acc: 0.9058 Pre: 0.8798 Recall: 0.9153 F1: 0.8972 Train AUC: 0.9980 Val AUC: 0.9606 Time: 13.60\n",
      "Epoch: 757 Train Loss: 0.0651 Val Loss: 0.3283 Acc: 0.9112 Pre: 0.8996 Recall: 0.9032 F1: 0.9014 Train AUC: 0.9983 Val AUC: 0.9608 Time: 13.25\n",
      "Epoch: 758 Train Loss: 0.0757 Val Loss: 0.3086 Acc: 0.9112 Pre: 0.8933 Recall: 0.9113 F1: 0.9022 Train AUC: 0.9972 Val AUC: 0.9620 Time: 13.84\n",
      "Epoch: 759 Train Loss: 0.0703 Val Loss: 0.3016 Acc: 0.8949 Pre: 0.8545 Recall: 0.9234 F1: 0.8876 Train AUC: 0.9978 Val AUC: 0.9634 Time: 13.80\n",
      "Epoch: 760 Train Loss: 0.0786 Val Loss: 0.3104 Acc: 0.9040 Pre: 0.8764 Recall: 0.9153 F1: 0.8955 Train AUC: 0.9975 Val AUC: 0.9630 Time: 13.89\n",
      "Epoch: 761 Train Loss: 0.0753 Val Loss: 0.3380 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9980 Val AUC: 0.9612 Time: 13.53\n",
      "Epoch: 762 Train Loss: 0.0847 Val Loss: 0.3503 Acc: 0.9130 Pre: 0.8968 Recall: 0.9113 F1: 0.9040 Train AUC: 0.9959 Val AUC: 0.9597 Time: 12.35\n",
      "Epoch: 763 Train Loss: 0.0770 Val Loss: 0.3354 Acc: 0.9130 Pre: 0.8906 Recall: 0.9194 F1: 0.9048 Train AUC: 0.9968 Val AUC: 0.9595 Time: 13.19\n",
      "Epoch: 764 Train Loss: 0.0753 Val Loss: 0.3329 Acc: 0.8895 Pre: 0.8425 Recall: 0.9274 F1: 0.8829 Train AUC: 0.9971 Val AUC: 0.9593 Time: 13.23\n",
      "Epoch: 765 Train Loss: 0.0736 Val Loss: 0.3383 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9983 Val AUC: 0.9584 Time: 13.39\n",
      "Epoch: 766 Train Loss: 0.0788 Val Loss: 0.3458 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9975 Val AUC: 0.9594 Time: 14.37\n",
      "Epoch: 767 Train Loss: 0.0717 Val Loss: 0.3642 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9979 Val AUC: 0.9585 Time: 13.97\n",
      "Epoch: 768 Train Loss: 0.0769 Val Loss: 0.3539 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9968 Val AUC: 0.9596 Time: 13.75\n",
      "Epoch: 769 Train Loss: 0.0825 Val Loss: 0.3355 Acc: 0.9022 Pre: 0.8731 Recall: 0.9153 F1: 0.8937 Train AUC: 0.9963 Val AUC: 0.9608 Time: 12.26\n",
      "Epoch: 770 Train Loss: 0.0727 Val Loss: 0.3345 Acc: 0.8877 Pre: 0.8444 Recall: 0.9194 F1: 0.8803 Train AUC: 0.9972 Val AUC: 0.9592 Time: 12.11\n",
      "Epoch: 771 Train Loss: 0.0899 Val Loss: 0.3131 Acc: 0.8931 Pre: 0.8539 Recall: 0.9194 F1: 0.8854 Train AUC: 0.9957 Val AUC: 0.9614 Time: 12.51\n",
      "Epoch: 772 Train Loss: 0.0884 Val Loss: 0.3158 Acc: 0.9167 Pre: 0.9008 Recall: 0.9153 F1: 0.9080 Train AUC: 0.9963 Val AUC: 0.9621 Time: 13.40\n",
      "Epoch: 773 Train Loss: 0.0652 Val Loss: 0.3565 Acc: 0.9130 Pre: 0.8937 Recall: 0.9153 F1: 0.9044 Train AUC: 0.9986 Val AUC: 0.9606 Time: 13.67\n",
      "Epoch: 774 Train Loss: 0.0885 Val Loss: 0.3759 Acc: 0.9040 Pre: 0.8707 Recall: 0.9234 F1: 0.8963 Train AUC: 0.9956 Val AUC: 0.9604 Time: 14.19\n",
      "Epoch: 775 Train Loss: 0.0927 Val Loss: 0.3489 Acc: 0.8986 Pre: 0.8664 Recall: 0.9153 F1: 0.8902 Train AUC: 0.9953 Val AUC: 0.9604 Time: 14.66\n",
      "Epoch: 776 Train Loss: 0.0727 Val Loss: 0.3341 Acc: 0.9004 Pre: 0.8697 Recall: 0.9153 F1: 0.8919 Train AUC: 0.9977 Val AUC: 0.9605 Time: 14.35\n",
      "Epoch: 777 Train Loss: 0.0704 Val Loss: 0.3321 Acc: 0.8913 Pre: 0.8507 Recall: 0.9194 F1: 0.8837 Train AUC: 0.9979 Val AUC: 0.9588 Time: 12.19\n",
      "Epoch: 778 Train Loss: 0.0812 Val Loss: 0.3270 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9970 Val AUC: 0.9606 Time: 13.12\n",
      "Epoch: 779 Train Loss: 0.0784 Val Loss: 0.3482 Acc: 0.9022 Pre: 0.8647 Recall: 0.9274 F1: 0.8949 Train AUC: 0.9971 Val AUC: 0.9612 Time: 13.26\n",
      "Epoch: 780 Train Loss: 0.0695 Val Loss: 0.3687 Acc: 0.9076 Pre: 0.8745 Recall: 0.9274 F1: 0.9002 Train AUC: 0.9976 Val AUC: 0.9607 Time: 14.19\n",
      "Epoch: 781 Train Loss: 0.0775 Val Loss: 0.3568 Acc: 0.9058 Pre: 0.8740 Recall: 0.9234 F1: 0.8980 Train AUC: 0.9966 Val AUC: 0.9615 Time: 13.69\n",
      "Epoch: 782 Train Loss: 0.0758 Val Loss: 0.3422 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9972 Val AUC: 0.9612 Time: 13.87\n",
      "Epoch: 783 Train Loss: 0.0682 Val Loss: 0.3324 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9983 Val AUC: 0.9616 Time: 13.46\n",
      "Epoch: 784 Train Loss: 0.0762 Val Loss: 0.3347 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9976 Val AUC: 0.9610 Time: 12.48\n",
      "Epoch: 785 Train Loss: 0.0733 Val Loss: 0.3468 Acc: 0.9022 Pre: 0.8674 Recall: 0.9234 F1: 0.8945 Train AUC: 0.9978 Val AUC: 0.9610 Time: 13.02\n",
      "Epoch: 786 Train Loss: 0.0731 Val Loss: 0.3330 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9975 Val AUC: 0.9623 Time: 13.54\n",
      "Epoch: 787 Train Loss: 0.0736 Val Loss: 0.3333 Acc: 0.9040 Pre: 0.8764 Recall: 0.9153 F1: 0.8955 Train AUC: 0.9973 Val AUC: 0.9623 Time: 14.23\n",
      "Epoch: 788 Train Loss: 0.0892 Val Loss: 0.3464 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9953 Val AUC: 0.9602 Time: 13.98\n",
      "Epoch: 789 Train Loss: 0.0684 Val Loss: 0.3628 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9979 Val AUC: 0.9585 Time: 13.36\n",
      "Epoch: 790 Train Loss: 0.0745 Val Loss: 0.3523 Acc: 0.8967 Pre: 0.8659 Recall: 0.9113 F1: 0.8880 Train AUC: 0.9976 Val AUC: 0.9584 Time: 12.48\n",
      "Epoch: 791 Train Loss: 0.0757 Val Loss: 0.3274 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9974 Val AUC: 0.9600 Time: 13.32\n",
      "Epoch: 792 Train Loss: 0.0660 Val Loss: 0.3283 Acc: 0.9058 Pre: 0.8740 Recall: 0.9234 F1: 0.8980 Train AUC: 0.9984 Val AUC: 0.9611 Time: 13.70\n",
      "Epoch: 793 Train Loss: 0.0711 Val Loss: 0.3346 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9977 Val AUC: 0.9621 Time: 14.47\n",
      "Epoch: 794 Train Loss: 0.0721 Val Loss: 0.3321 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9975 Val AUC: 0.9622 Time: 14.19\n",
      "Epoch: 795 Train Loss: 0.0789 Val Loss: 0.3358 Acc: 0.9149 Pre: 0.9004 Recall: 0.9113 F1: 0.9058 Train AUC: 0.9968 Val AUC: 0.9606 Time: 12.44\n",
      "Epoch: 796 Train Loss: 0.0826 Val Loss: 0.3261 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9968 Val AUC: 0.9619 Time: 12.22\n",
      "Epoch: 797 Train Loss: 0.0742 Val Loss: 0.3317 Acc: 0.8841 Pre: 0.8382 Recall: 0.9194 F1: 0.8769 Train AUC: 0.9972 Val AUC: 0.9633 Time: 13.02\n",
      "Epoch: 798 Train Loss: 0.0870 Val Loss: 0.3396 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9974 Val AUC: 0.9609 Time: 13.79\n",
      "Epoch: 799 Train Loss: 0.0611 Val Loss: 0.3617 Acc: 0.9094 Pre: 0.8929 Recall: 0.9073 F1: 0.9000 Train AUC: 0.9986 Val AUC: 0.9586 Time: 13.97\n",
      "Epoch: 800 Train Loss: 0.0770 Val Loss: 0.3883 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9980 Val AUC: 0.9578 Time: 14.50\n",
      "Epoch: 801 Train Loss: 0.0706 Val Loss: 0.3691 Acc: 0.8931 Pre: 0.8539 Recall: 0.9194 F1: 0.8854 Train AUC: 0.9977 Val AUC: 0.9597 Time: 14.13\n",
      "Epoch: 802 Train Loss: 0.0757 Val Loss: 0.3218 Acc: 0.9022 Pre: 0.8674 Recall: 0.9234 F1: 0.8945 Train AUC: 0.9976 Val AUC: 0.9624 Time: 13.79\n",
      "Epoch: 803 Train Loss: 0.0725 Val Loss: 0.2994 Acc: 0.9112 Pre: 0.8872 Recall: 0.9194 F1: 0.9030 Train AUC: 0.9978 Val AUC: 0.9637 Time: 12.19\n",
      "Epoch: 804 Train Loss: 0.0752 Val Loss: 0.3077 Acc: 0.9221 Pre: 0.9084 Recall: 0.9194 F1: 0.9138 Train AUC: 0.9973 Val AUC: 0.9627 Time: 12.26\n",
      "Epoch: 805 Train Loss: 0.0666 Val Loss: 0.3322 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9983 Val AUC: 0.9613 Time: 13.20\n",
      "Epoch: 806 Train Loss: 0.0750 Val Loss: 0.3541 Acc: 0.9004 Pre: 0.8697 Recall: 0.9153 F1: 0.8919 Train AUC: 0.9975 Val AUC: 0.9611 Time: 13.44\n",
      "Epoch: 807 Train Loss: 0.0836 Val Loss: 0.3290 Acc: 0.9022 Pre: 0.8674 Recall: 0.9234 F1: 0.8945 Train AUC: 0.9962 Val AUC: 0.9621 Time: 13.97\n",
      "Epoch: 808 Train Loss: 0.0604 Val Loss: 0.3061 Acc: 0.9094 Pre: 0.8867 Recall: 0.9153 F1: 0.9008 Train AUC: 0.9985 Val AUC: 0.9631 Time: 14.11\n",
      "Epoch: 809 Train Loss: 0.0723 Val Loss: 0.3091 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9979 Val AUC: 0.9634 Time: 13.71\n",
      "Epoch: 810 Train Loss: 0.0681 Val Loss: 0.3205 Acc: 0.9040 Pre: 0.8824 Recall: 0.9073 F1: 0.8946 Train AUC: 0.9980 Val AUC: 0.9626 Time: 13.83\n",
      "Epoch: 811 Train Loss: 0.0770 Val Loss: 0.3407 Acc: 0.8967 Pre: 0.8577 Recall: 0.9234 F1: 0.8893 Train AUC: 0.9975 Val AUC: 0.9621 Time: 12.34\n",
      "Epoch: 812 Train Loss: 0.0698 Val Loss: 0.3412 Acc: 0.8967 Pre: 0.8550 Recall: 0.9274 F1: 0.8897 Train AUC: 0.9977 Val AUC: 0.9614 Time: 12.29\n",
      "Epoch: 813 Train Loss: 0.0641 Val Loss: 0.3528 Acc: 0.9040 Pre: 0.8707 Recall: 0.9234 F1: 0.8963 Train AUC: 0.9985 Val AUC: 0.9593 Time: 13.07\n",
      "Epoch: 814 Train Loss: 0.0684 Val Loss: 0.3466 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9979 Val AUC: 0.9584 Time: 14.09\n",
      "Epoch: 815 Train Loss: 0.0743 Val Loss: 0.3261 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9973 Val AUC: 0.9615 Time: 14.38\n",
      "Epoch: 816 Train Loss: 0.0729 Val Loss: 0.3199 Acc: 0.9076 Pre: 0.8774 Recall: 0.9234 F1: 0.8998 Train AUC: 0.9974 Val AUC: 0.9624 Time: 14.81\n",
      "Epoch: 817 Train Loss: 0.0690 Val Loss: 0.3278 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9978 Val AUC: 0.9617 Time: 14.03\n",
      "Epoch: 818 Train Loss: 0.0773 Val Loss: 0.3326 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9970 Val AUC: 0.9608 Time: 13.23\n",
      "Epoch: 819 Train Loss: 0.0752 Val Loss: 0.3407 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9976 Val AUC: 0.9600 Time: 12.44\n",
      "Epoch: 820 Train Loss: 0.0689 Val Loss: 0.3393 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9979 Val AUC: 0.9608 Time: 12.58\n",
      "Epoch: 821 Train Loss: 0.0693 Val Loss: 0.3373 Acc: 0.8967 Pre: 0.8577 Recall: 0.9234 F1: 0.8893 Train AUC: 0.9982 Val AUC: 0.9610 Time: 12.57\n",
      "Epoch: 822 Train Loss: 0.0707 Val Loss: 0.3327 Acc: 0.8931 Pre: 0.8539 Recall: 0.9194 F1: 0.8854 Train AUC: 0.9976 Val AUC: 0.9610 Time: 12.99\n",
      "Epoch: 823 Train Loss: 0.0693 Val Loss: 0.3264 Acc: 0.8967 Pre: 0.8577 Recall: 0.9234 F1: 0.8893 Train AUC: 0.9983 Val AUC: 0.9613 Time: 13.74\n",
      "Epoch: 824 Train Loss: 0.0663 Val Loss: 0.3242 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9988 Val AUC: 0.9626 Time: 14.33\n",
      "Epoch: 825 Train Loss: 0.0779 Val Loss: 0.3229 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9977 Val AUC: 0.9624 Time: 14.88\n",
      "Epoch: 826 Train Loss: 0.0720 Val Loss: 0.3219 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9976 Val AUC: 0.9623 Time: 14.51\n",
      "Epoch: 827 Train Loss: 0.0751 Val Loss: 0.3370 Acc: 0.9058 Pre: 0.8740 Recall: 0.9234 F1: 0.8980 Train AUC: 0.9975 Val AUC: 0.9608 Time: 13.30\n",
      "Epoch: 828 Train Loss: 0.0641 Val Loss: 0.3445 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9983 Val AUC: 0.9607 Time: 12.78\n",
      "Epoch: 829 Train Loss: 0.0682 Val Loss: 0.3350 Acc: 0.8877 Pre: 0.8394 Recall: 0.9274 F1: 0.8812 Train AUC: 0.9981 Val AUC: 0.9616 Time: 13.71\n",
      "Epoch: 830 Train Loss: 0.0690 Val Loss: 0.3105 Acc: 0.9094 Pre: 0.8808 Recall: 0.9234 F1: 0.9016 Train AUC: 0.9983 Val AUC: 0.9634 Time: 13.96\n",
      "Epoch: 831 Train Loss: 0.0695 Val Loss: 0.3109 Acc: 0.9185 Pre: 0.9044 Recall: 0.9153 F1: 0.9098 Train AUC: 0.9978 Val AUC: 0.9631 Time: 14.36\n",
      "Epoch: 832 Train Loss: 0.0684 Val Loss: 0.3254 Acc: 0.9167 Pre: 0.9040 Recall: 0.9113 F1: 0.9076 Train AUC: 0.9981 Val AUC: 0.9622 Time: 14.49\n",
      "Epoch: 833 Train Loss: 0.0704 Val Loss: 0.3360 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9974 Val AUC: 0.9613 Time: 13.22\n",
      "Epoch: 834 Train Loss: 0.0704 Val Loss: 0.3437 Acc: 0.8949 Pre: 0.8545 Recall: 0.9234 F1: 0.8876 Train AUC: 0.9977 Val AUC: 0.9621 Time: 13.51\n",
      "Epoch: 835 Train Loss: 0.0745 Val Loss: 0.3265 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9975 Val AUC: 0.9627 Time: 13.25\n",
      "Epoch: 836 Train Loss: 0.0684 Val Loss: 0.3081 Acc: 0.9076 Pre: 0.8774 Recall: 0.9234 F1: 0.8998 Train AUC: 0.9985 Val AUC: 0.9629 Time: 13.03\n",
      "Epoch: 837 Train Loss: 0.0768 Val Loss: 0.3213 Acc: 0.9130 Pre: 0.8937 Recall: 0.9153 F1: 0.9044 Train AUC: 0.9973 Val AUC: 0.9614 Time: 13.66\n",
      "Epoch: 838 Train Loss: 0.0703 Val Loss: 0.3454 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9982 Val AUC: 0.9604 Time: 14.28\n",
      "Epoch: 839 Train Loss: 0.0610 Val Loss: 0.3599 Acc: 0.8967 Pre: 0.8604 Recall: 0.9194 F1: 0.8889 Train AUC: 0.9984 Val AUC: 0.9613 Time: 13.65\n",
      "Epoch: 840 Train Loss: 0.0822 Val Loss: 0.3204 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9974 Val AUC: 0.9630 Time: 13.87\n",
      "Epoch: 841 Train Loss: 0.0660 Val Loss: 0.3013 Acc: 0.9167 Pre: 0.9040 Recall: 0.9113 F1: 0.9076 Train AUC: 0.9983 Val AUC: 0.9632 Time: 14.49\n",
      "Epoch: 842 Train Loss: 0.0711 Val Loss: 0.3028 Acc: 0.9112 Pre: 0.8933 Recall: 0.9113 F1: 0.9022 Train AUC: 0.9977 Val AUC: 0.9628 Time: 12.28\n",
      "Epoch: 843 Train Loss: 0.0783 Val Loss: 0.3309 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9975 Val AUC: 0.9610 Time: 12.59\n",
      "Epoch: 844 Train Loss: 0.0724 Val Loss: 0.3577 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9979 Val AUC: 0.9596 Time: 12.63\n",
      "Epoch: 845 Train Loss: 0.0652 Val Loss: 0.3612 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9984 Val AUC: 0.9594 Time: 13.20\n",
      "Epoch: 846 Train Loss: 0.0717 Val Loss: 0.3405 Acc: 0.9130 Pre: 0.8968 Recall: 0.9113 F1: 0.9040 Train AUC: 0.9977 Val AUC: 0.9602 Time: 13.44\n",
      "Epoch: 847 Train Loss: 0.0597 Val Loss: 0.3241 Acc: 0.9149 Pre: 0.9004 Recall: 0.9113 F1: 0.9058 Train AUC: 0.9987 Val AUC: 0.9606 Time: 14.03\n",
      "Epoch: 848 Train Loss: 0.0781 Val Loss: 0.3244 Acc: 0.9167 Pre: 0.9008 Recall: 0.9153 F1: 0.9080 Train AUC: 0.9971 Val AUC: 0.9599 Time: 14.74\n",
      "Epoch: 849 Train Loss: 0.0846 Val Loss: 0.3416 Acc: 0.8841 Pre: 0.8382 Recall: 0.9194 F1: 0.8769 Train AUC: 0.9967 Val AUC: 0.9609 Time: 13.69\n",
      "Epoch: 850 Train Loss: 0.0720 Val Loss: 0.3598 Acc: 0.8859 Pre: 0.8364 Recall: 0.9274 F1: 0.8795 Train AUC: 0.9979 Val AUC: 0.9614 Time: 14.11\n",
      "Epoch: 851 Train Loss: 0.0785 Val Loss: 0.3465 Acc: 0.8931 Pre: 0.8566 Recall: 0.9153 F1: 0.8850 Train AUC: 0.9978 Val AUC: 0.9615 Time: 12.21\n",
      "Epoch: 852 Train Loss: 0.0722 Val Loss: 0.3344 Acc: 0.9094 Pre: 0.8929 Recall: 0.9073 F1: 0.9000 Train AUC: 0.9978 Val AUC: 0.9619 Time: 12.32\n",
      "Epoch: 853 Train Loss: 0.0831 Val Loss: 0.3124 Acc: 0.9130 Pre: 0.8876 Recall: 0.9234 F1: 0.9051 Train AUC: 0.9967 Val AUC: 0.9637 Time: 12.43\n",
      "Epoch: 854 Train Loss: 0.0704 Val Loss: 0.3173 Acc: 0.8931 Pre: 0.8487 Recall: 0.9274 F1: 0.8863 Train AUC: 0.9976 Val AUC: 0.9633 Time: 12.41\n",
      "Epoch: 855 Train Loss: 0.0748 Val Loss: 0.3385 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9979 Val AUC: 0.9605 Time: 13.71\n",
      "Epoch: 856 Train Loss: 0.0711 Val Loss: 0.3533 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9983 Val AUC: 0.9602 Time: 14.03\n",
      "Epoch: 857 Train Loss: 0.0791 Val Loss: 0.3526 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9969 Val AUC: 0.9603 Time: 14.70\n",
      "Epoch: 858 Train Loss: 0.0665 Val Loss: 0.3370 Acc: 0.9004 Pre: 0.8614 Recall: 0.9274 F1: 0.8932 Train AUC: 0.9982 Val AUC: 0.9621 Time: 14.52\n",
      "Epoch: 859 Train Loss: 0.0687 Val Loss: 0.3293 Acc: 0.8986 Pre: 0.8582 Recall: 0.9274 F1: 0.8915 Train AUC: 0.9979 Val AUC: 0.9637 Time: 14.80\n",
      "Epoch: 860 Train Loss: 0.0718 Val Loss: 0.3276 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9981 Val AUC: 0.9627 Time: 12.33\n",
      "Epoch: 861 Train Loss: 0.0670 Val Loss: 0.3483 Acc: 0.9076 Pre: 0.8893 Recall: 0.9073 F1: 0.8982 Train AUC: 0.9982 Val AUC: 0.9600 Time: 12.61\n",
      "Epoch: 862 Train Loss: 0.0680 Val Loss: 0.3538 Acc: 0.9040 Pre: 0.8824 Recall: 0.9073 F1: 0.8946 Train AUC: 0.9982 Val AUC: 0.9600 Time: 12.33\n",
      "Epoch: 863 Train Loss: 0.0603 Val Loss: 0.3540 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9989 Val AUC: 0.9614 Time: 12.73\n",
      "Epoch: 864 Train Loss: 0.0684 Val Loss: 0.3383 Acc: 0.9094 Pre: 0.8722 Recall: 0.9355 F1: 0.9027 Train AUC: 0.9980 Val AUC: 0.9637 Time: 13.67\n",
      "Epoch: 865 Train Loss: 0.0764 Val Loss: 0.3143 Acc: 0.9112 Pre: 0.8902 Recall: 0.9153 F1: 0.9026 Train AUC: 0.9972 Val AUC: 0.9634 Time: 14.13\n",
      "Epoch: 866 Train Loss: 0.0685 Val Loss: 0.3191 Acc: 0.9130 Pre: 0.9098 Recall: 0.8952 F1: 0.9024 Train AUC: 0.9978 Val AUC: 0.9615 Time: 14.72\n",
      "Epoch: 867 Train Loss: 0.0749 Val Loss: 0.3298 Acc: 0.8967 Pre: 0.8716 Recall: 0.9032 F1: 0.8871 Train AUC: 0.9981 Val AUC: 0.9604 Time: 14.47\n",
      "Epoch: 868 Train Loss: 0.0699 Val Loss: 0.3514 Acc: 0.8822 Pre: 0.8376 Recall: 0.9153 F1: 0.8748 Train AUC: 0.9981 Val AUC: 0.9611 Time: 13.44\n",
      "Epoch: 869 Train Loss: 0.0620 Val Loss: 0.3614 Acc: 0.8913 Pre: 0.8456 Recall: 0.9274 F1: 0.8846 Train AUC: 0.9990 Val AUC: 0.9619 Time: 12.27\n",
      "Epoch: 870 Train Loss: 0.0720 Val Loss: 0.3524 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9982 Val AUC: 0.9616 Time: 12.21\n",
      "Epoch: 871 Train Loss: 0.0692 Val Loss: 0.3475 Acc: 0.9112 Pre: 0.8933 Recall: 0.9113 F1: 0.9022 Train AUC: 0.9978 Val AUC: 0.9611 Time: 12.61\n",
      "Epoch: 872 Train Loss: 0.0716 Val Loss: 0.3314 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9974 Val AUC: 0.9611 Time: 12.78\n",
      "Epoch: 873 Train Loss: 0.0690 Val Loss: 0.3309 Acc: 0.8841 Pre: 0.8382 Recall: 0.9194 F1: 0.8769 Train AUC: 0.9983 Val AUC: 0.9602 Time: 12.71\n",
      "Epoch: 874 Train Loss: 0.0733 Val Loss: 0.3263 Acc: 0.8768 Pre: 0.8261 Recall: 0.9194 F1: 0.8702 Train AUC: 0.9978 Val AUC: 0.9621 Time: 13.00\n",
      "Epoch: 875 Train Loss: 0.0688 Val Loss: 0.3153 Acc: 0.9112 Pre: 0.8842 Recall: 0.9234 F1: 0.9034 Train AUC: 0.9985 Val AUC: 0.9638 Time: 14.14\n",
      "Epoch: 876 Train Loss: 0.0653 Val Loss: 0.3230 Acc: 0.9112 Pre: 0.8902 Recall: 0.9153 F1: 0.9026 Train AUC: 0.9985 Val AUC: 0.9631 Time: 14.65\n",
      "Epoch: 877 Train Loss: 0.0659 Val Loss: 0.3260 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9979 Val AUC: 0.9631 Time: 14.43\n",
      "Epoch: 878 Train Loss: 0.0737 Val Loss: 0.3296 Acc: 0.8967 Pre: 0.8577 Recall: 0.9234 F1: 0.8893 Train AUC: 0.9971 Val AUC: 0.9633 Time: 14.36\n",
      "Epoch: 879 Train Loss: 0.0576 Val Loss: 0.3405 Acc: 0.8949 Pre: 0.8545 Recall: 0.9234 F1: 0.8876 Train AUC: 0.9987 Val AUC: 0.9633 Time: 13.01\n",
      "Epoch: 880 Train Loss: 0.0699 Val Loss: 0.3294 Acc: 0.8895 Pre: 0.8476 Recall: 0.9194 F1: 0.8820 Train AUC: 0.9983 Val AUC: 0.9638 Time: 12.37\n",
      "Epoch: 881 Train Loss: 0.0705 Val Loss: 0.3137 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9979 Val AUC: 0.9641 Time: 12.54\n",
      "Epoch: 882 Train Loss: 0.0580 Val Loss: 0.3193 Acc: 0.9094 Pre: 0.8867 Recall: 0.9153 F1: 0.9008 Train AUC: 0.9990 Val AUC: 0.9637 Time: 12.93\n",
      "Epoch: 883 Train Loss: 0.0667 Val Loss: 0.3355 Acc: 0.9004 Pre: 0.8669 Recall: 0.9194 F1: 0.8924 Train AUC: 0.9980 Val AUC: 0.9635 Time: 13.92\n",
      "Epoch: 884 Train Loss: 0.0569 Val Loss: 0.3605 Acc: 0.8895 Pre: 0.8450 Recall: 0.9234 F1: 0.8825 Train AUC: 0.9988 Val AUC: 0.9632 Time: 13.93\n",
      "Epoch: 885 Train Loss: 0.0762 Val Loss: 0.3446 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9973 Val AUC: 0.9620 Time: 14.61\n",
      "Epoch: 886 Train Loss: 0.0628 Val Loss: 0.3412 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9985 Val AUC: 0.9614 Time: 13.82\n",
      "Epoch: 887 Train Loss: 0.0607 Val Loss: 0.3223 Acc: 0.8986 Pre: 0.8750 Recall: 0.9032 F1: 0.8889 Train AUC: 0.9989 Val AUC: 0.9615 Time: 12.82\n",
      "Epoch: 888 Train Loss: 0.0633 Val Loss: 0.3221 Acc: 0.9058 Pre: 0.8889 Recall: 0.9032 F1: 0.8960 Train AUC: 0.9987 Val AUC: 0.9616 Time: 13.40\n",
      "Epoch: 889 Train Loss: 0.0781 Val Loss: 0.3274 Acc: 0.9112 Pre: 0.8872 Recall: 0.9194 F1: 0.9030 Train AUC: 0.9976 Val AUC: 0.9625 Time: 13.34\n",
      "Epoch: 890 Train Loss: 0.0692 Val Loss: 0.3411 Acc: 0.9058 Pre: 0.8712 Recall: 0.9274 F1: 0.8984 Train AUC: 0.9978 Val AUC: 0.9630 Time: 13.67\n",
      "Epoch: 891 Train Loss: 0.0781 Val Loss: 0.3352 Acc: 0.8986 Pre: 0.8609 Recall: 0.9234 F1: 0.8911 Train AUC: 0.9969 Val AUC: 0.9619 Time: 14.48\n",
      "Epoch: 892 Train Loss: 0.0632 Val Loss: 0.3237 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9983 Val AUC: 0.9613 Time: 12.81\n",
      "Epoch: 893 Train Loss: 0.0710 Val Loss: 0.3160 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9977 Val AUC: 0.9627 Time: 12.43\n",
      "Epoch: 894 Train Loss: 0.0641 Val Loss: 0.3177 Acc: 0.9058 Pre: 0.8889 Recall: 0.9032 F1: 0.8960 Train AUC: 0.9983 Val AUC: 0.9630 Time: 12.78\n",
      "Epoch: 895 Train Loss: 0.0710 Val Loss: 0.3453 Acc: 0.8931 Pre: 0.8593 Recall: 0.9113 F1: 0.8845 Train AUC: 0.9974 Val AUC: 0.9625 Time: 13.69\n",
      "Epoch: 896 Train Loss: 0.0651 Val Loss: 0.3655 Acc: 0.8931 Pre: 0.8593 Recall: 0.9113 F1: 0.8845 Train AUC: 0.9983 Val AUC: 0.9616 Time: 13.72\n",
      "Epoch: 897 Train Loss: 0.0719 Val Loss: 0.3507 Acc: 0.9040 Pre: 0.8824 Recall: 0.9073 F1: 0.8946 Train AUC: 0.9975 Val AUC: 0.9610 Time: 14.41\n",
      "Epoch: 898 Train Loss: 0.0661 Val Loss: 0.3331 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9978 Val AUC: 0.9616 Time: 15.06\n",
      "Epoch: 899 Train Loss: 0.0731 Val Loss: 0.3317 Acc: 0.8859 Pre: 0.8464 Recall: 0.9113 F1: 0.8777 Train AUC: 0.9978 Val AUC: 0.9600 Time: 12.93\n",
      "Epoch: 900 Train Loss: 0.0736 Val Loss: 0.3391 Acc: 0.8750 Pre: 0.8208 Recall: 0.9234 F1: 0.8691 Train AUC: 0.9976 Val AUC: 0.9631 Time: 12.22\n",
      "Epoch: 901 Train Loss: 0.0720 Val Loss: 0.3421 Acc: 0.8967 Pre: 0.8550 Recall: 0.9274 F1: 0.8897 Train AUC: 0.9985 Val AUC: 0.9621 Time: 12.53\n",
      "Epoch: 902 Train Loss: 0.0779 Val Loss: 0.3458 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9972 Val AUC: 0.9613 Time: 12.36\n",
      "Epoch: 903 Train Loss: 0.0780 Val Loss: 0.3480 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9969 Val AUC: 0.9606 Time: 12.88\n",
      "Epoch: 904 Train Loss: 0.0742 Val Loss: 0.3442 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9971 Val AUC: 0.9605 Time: 14.26\n",
      "Epoch: 905 Train Loss: 0.0677 Val Loss: 0.3365 Acc: 0.8913 Pre: 0.8507 Recall: 0.9194 F1: 0.8837 Train AUC: 0.9981 Val AUC: 0.9605 Time: 13.89\n",
      "Epoch: 906 Train Loss: 0.0667 Val Loss: 0.3218 Acc: 0.8967 Pre: 0.8577 Recall: 0.9234 F1: 0.8893 Train AUC: 0.9982 Val AUC: 0.9624 Time: 14.83\n",
      "Epoch: 907 Train Loss: 0.0758 Val Loss: 0.3246 Acc: 0.9112 Pre: 0.8812 Recall: 0.9274 F1: 0.9037 Train AUC: 0.9976 Val AUC: 0.9628 Time: 14.99\n",
      "Epoch: 908 Train Loss: 0.0627 Val Loss: 0.3459 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9988 Val AUC: 0.9611 Time: 15.05\n",
      "Epoch: 909 Train Loss: 0.0625 Val Loss: 0.3509 Acc: 0.8967 Pre: 0.8687 Recall: 0.9073 F1: 0.8876 Train AUC: 0.9983 Val AUC: 0.9607 Time: 12.57\n",
      "Epoch: 910 Train Loss: 0.0725 Val Loss: 0.3304 Acc: 0.8967 Pre: 0.8631 Recall: 0.9153 F1: 0.8885 Train AUC: 0.9970 Val AUC: 0.9629 Time: 12.46\n",
      "Epoch: 911 Train Loss: 0.0712 Val Loss: 0.3192 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9975 Val AUC: 0.9644 Time: 12.54\n",
      "Epoch: 912 Train Loss: 0.0705 Val Loss: 0.3235 Acc: 0.8913 Pre: 0.8507 Recall: 0.9194 F1: 0.8837 Train AUC: 0.9981 Val AUC: 0.9627 Time: 12.49\n",
      "Epoch: 913 Train Loss: 0.0682 Val Loss: 0.3414 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9979 Val AUC: 0.9625 Time: 12.54\n",
      "Epoch: 914 Train Loss: 0.0688 Val Loss: 0.3638 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9981 Val AUC: 0.9608 Time: 13.81\n",
      "Epoch: 915 Train Loss: 0.0690 Val Loss: 0.3777 Acc: 0.8931 Pre: 0.8513 Recall: 0.9234 F1: 0.8859 Train AUC: 0.9980 Val AUC: 0.9605 Time: 14.03\n",
      "Epoch: 916 Train Loss: 0.0635 Val Loss: 0.3572 Acc: 0.9004 Pre: 0.8561 Recall: 0.9355 F1: 0.8940 Train AUC: 0.9981 Val AUC: 0.9617 Time: 14.54\n",
      "Epoch: 917 Train Loss: 0.0806 Val Loss: 0.3274 Acc: 0.9040 Pre: 0.8736 Recall: 0.9194 F1: 0.8959 Train AUC: 0.9970 Val AUC: 0.9616 Time: 14.79\n",
      "Epoch: 918 Train Loss: 0.0598 Val Loss: 0.3352 Acc: 0.8913 Pre: 0.8615 Recall: 0.9032 F1: 0.8819 Train AUC: 0.9985 Val AUC: 0.9581 Time: 14.75\n",
      "Epoch: 919 Train Loss: 0.0944 Val Loss: 0.3502 Acc: 0.8967 Pre: 0.8631 Recall: 0.9153 F1: 0.8885 Train AUC: 0.9949 Val AUC: 0.9587 Time: 12.35\n",
      "Epoch: 920 Train Loss: 0.0724 Val Loss: 0.3711 Acc: 0.8895 Pre: 0.8476 Recall: 0.9194 F1: 0.8820 Train AUC: 0.9979 Val AUC: 0.9591 Time: 12.34\n",
      "Epoch: 921 Train Loss: 0.0752 Val Loss: 0.3734 Acc: 0.8967 Pre: 0.8659 Recall: 0.9113 F1: 0.8880 Train AUC: 0.9977 Val AUC: 0.9600 Time: 12.46\n",
      "Epoch: 922 Train Loss: 0.0817 Val Loss: 0.3488 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9966 Val AUC: 0.9616 Time: 12.33\n",
      "Epoch: 923 Train Loss: 0.0612 Val Loss: 0.3267 Acc: 0.9149 Pre: 0.9004 Recall: 0.9113 F1: 0.9058 Train AUC: 0.9982 Val AUC: 0.9620 Time: 12.30\n",
      "Epoch: 924 Train Loss: 0.0615 Val Loss: 0.3137 Acc: 0.9058 Pre: 0.8889 Recall: 0.9032 F1: 0.8960 Train AUC: 0.9985 Val AUC: 0.9627 Time: 12.30\n",
      "Epoch: 925 Train Loss: 0.0715 Val Loss: 0.3231 Acc: 0.8895 Pre: 0.8476 Recall: 0.9194 F1: 0.8820 Train AUC: 0.9974 Val AUC: 0.9631 Time: 12.94\n",
      "Epoch: 926 Train Loss: 0.0732 Val Loss: 0.3406 Acc: 0.8859 Pre: 0.8413 Recall: 0.9194 F1: 0.8786 Train AUC: 0.9979 Val AUC: 0.9625 Time: 13.81\n",
      "Epoch: 927 Train Loss: 0.0696 Val Loss: 0.3412 Acc: 0.8986 Pre: 0.8664 Recall: 0.9153 F1: 0.8902 Train AUC: 0.9978 Val AUC: 0.9624 Time: 14.61\n",
      "Epoch: 928 Train Loss: 0.0631 Val Loss: 0.3325 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9982 Val AUC: 0.9618 Time: 15.08\n",
      "Epoch: 929 Train Loss: 0.0715 Val Loss: 0.3234 Acc: 0.9076 Pre: 0.8833 Recall: 0.9153 F1: 0.8990 Train AUC: 0.9977 Val AUC: 0.9629 Time: 15.59\n",
      "Epoch: 930 Train Loss: 0.0600 Val Loss: 0.3081 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9987 Val AUC: 0.9639 Time: 13.48\n",
      "Epoch: 931 Train Loss: 0.0621 Val Loss: 0.3080 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9983 Val AUC: 0.9644 Time: 12.40\n",
      "Epoch: 932 Train Loss: 0.0645 Val Loss: 0.3115 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9984 Val AUC: 0.9636 Time: 12.30\n",
      "Epoch: 933 Train Loss: 0.0658 Val Loss: 0.3143 Acc: 0.9022 Pre: 0.8789 Recall: 0.9073 F1: 0.8929 Train AUC: 0.9982 Val AUC: 0.9640 Time: 12.27\n",
      "Epoch: 934 Train Loss: 0.0606 Val Loss: 0.3098 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9986 Val AUC: 0.9643 Time: 12.33\n",
      "Epoch: 935 Train Loss: 0.0619 Val Loss: 0.3075 Acc: 0.8967 Pre: 0.8550 Recall: 0.9274 F1: 0.8897 Train AUC: 0.9983 Val AUC: 0.9687 Time: 12.36\n",
      "Epoch: 936 Train Loss: 0.0646 Val Loss: 0.3186 Acc: 0.8949 Pre: 0.8519 Recall: 0.9274 F1: 0.8880 Train AUC: 0.9986 Val AUC: 0.9648 Time: 13.10\n",
      "Epoch: 937 Train Loss: 0.0657 Val Loss: 0.3276 Acc: 0.9004 Pre: 0.8642 Recall: 0.9234 F1: 0.8928 Train AUC: 0.9983 Val AUC: 0.9621 Time: 13.26\n",
      "Epoch: 938 Train Loss: 0.0587 Val Loss: 0.3390 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9987 Val AUC: 0.9612 Time: 14.32\n",
      "Epoch: 939 Train Loss: 0.0604 Val Loss: 0.3636 Acc: 0.9022 Pre: 0.8760 Recall: 0.9113 F1: 0.8933 Train AUC: 0.9986 Val AUC: 0.9598 Time: 14.71\n",
      "Epoch: 940 Train Loss: 0.0690 Val Loss: 0.3524 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9979 Val AUC: 0.9612 Time: 14.00\n",
      "Epoch: 941 Train Loss: 0.0684 Val Loss: 0.3251 Acc: 0.8931 Pre: 0.8487 Recall: 0.9274 F1: 0.8863 Train AUC: 0.9978 Val AUC: 0.9641 Time: 14.70\n",
      "Epoch: 942 Train Loss: 0.0650 Val Loss: 0.3049 Acc: 0.9040 Pre: 0.8679 Recall: 0.9274 F1: 0.8967 Train AUC: 0.9983 Val AUC: 0.9643 Time: 12.17\n",
      "Epoch: 943 Train Loss: 0.0696 Val Loss: 0.3091 Acc: 0.9167 Pre: 0.8945 Recall: 0.9234 F1: 0.9087 Train AUC: 0.9982 Val AUC: 0.9628 Time: 12.48\n",
      "Epoch: 944 Train Loss: 0.0652 Val Loss: 0.3303 Acc: 0.9112 Pre: 0.8872 Recall: 0.9194 F1: 0.9030 Train AUC: 0.9985 Val AUC: 0.9620 Time: 12.50\n",
      "Epoch: 945 Train Loss: 0.0706 Val Loss: 0.3475 Acc: 0.8949 Pre: 0.8519 Recall: 0.9274 F1: 0.8880 Train AUC: 0.9980 Val AUC: 0.9626 Time: 13.46\n",
      "Epoch: 946 Train Loss: 0.0668 Val Loss: 0.3450 Acc: 0.8877 Pre: 0.8394 Recall: 0.9274 F1: 0.8812 Train AUC: 0.9980 Val AUC: 0.9637 Time: 13.98\n",
      "Epoch: 947 Train Loss: 0.0688 Val Loss: 0.3226 Acc: 0.8986 Pre: 0.8582 Recall: 0.9274 F1: 0.8915 Train AUC: 0.9986 Val AUC: 0.9630 Time: 14.37\n",
      "Epoch: 948 Train Loss: 0.0832 Val Loss: 0.3241 Acc: 0.9185 Pre: 0.9044 Recall: 0.9153 F1: 0.9098 Train AUC: 0.9962 Val AUC: 0.9619 Time: 15.13\n",
      "Epoch: 949 Train Loss: 0.0652 Val Loss: 0.3307 Acc: 0.9167 Pre: 0.9040 Recall: 0.9113 F1: 0.9076 Train AUC: 0.9983 Val AUC: 0.9616 Time: 13.34\n",
      "Epoch: 950 Train Loss: 0.0730 Val Loss: 0.3397 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9977 Val AUC: 0.9625 Time: 12.58\n",
      "Epoch: 951 Train Loss: 0.0723 Val Loss: 0.3480 Acc: 0.8804 Pre: 0.8321 Recall: 0.9194 F1: 0.8736 Train AUC: 0.9978 Val AUC: 0.9612 Time: 12.70\n",
      "Epoch: 952 Train Loss: 0.0663 Val Loss: 0.3331 Acc: 0.8877 Pre: 0.8444 Recall: 0.9194 F1: 0.8803 Train AUC: 0.9988 Val AUC: 0.9607 Time: 12.79\n",
      "Epoch: 953 Train Loss: 0.0623 Val Loss: 0.3216 Acc: 0.9076 Pre: 0.8803 Recall: 0.9194 F1: 0.8994 Train AUC: 0.9987 Val AUC: 0.9617 Time: 13.88\n",
      "Epoch: 954 Train Loss: 0.0638 Val Loss: 0.3195 Acc: 0.9112 Pre: 0.8933 Recall: 0.9113 F1: 0.9022 Train AUC: 0.9985 Val AUC: 0.9626 Time: 14.20\n",
      "Epoch: 955 Train Loss: 0.0619 Val Loss: 0.3241 Acc: 0.9076 Pre: 0.8863 Recall: 0.9113 F1: 0.8986 Train AUC: 0.9982 Val AUC: 0.9636 Time: 14.55\n",
      "Epoch: 956 Train Loss: 0.0688 Val Loss: 0.3183 Acc: 0.9058 Pre: 0.8798 Recall: 0.9153 F1: 0.8972 Train AUC: 0.9977 Val AUC: 0.9635 Time: 13.94\n",
      "Epoch: 957 Train Loss: 0.0679 Val Loss: 0.3188 Acc: 0.8949 Pre: 0.8545 Recall: 0.9234 F1: 0.8876 Train AUC: 0.9978 Val AUC: 0.9639 Time: 13.31\n",
      "Epoch: 958 Train Loss: 0.0709 Val Loss: 0.3145 Acc: 0.8949 Pre: 0.8545 Recall: 0.9234 F1: 0.8876 Train AUC: 0.9982 Val AUC: 0.9641 Time: 12.32\n",
      "Epoch: 959 Train Loss: 0.0587 Val Loss: 0.3159 Acc: 0.9094 Pre: 0.8837 Recall: 0.9194 F1: 0.9012 Train AUC: 0.9990 Val AUC: 0.9625 Time: 12.31\n",
      "Epoch: 960 Train Loss: 0.0711 Val Loss: 0.3346 Acc: 0.9022 Pre: 0.8731 Recall: 0.9153 F1: 0.8937 Train AUC: 0.9984 Val AUC: 0.9620 Time: 12.48\n",
      "Epoch: 961 Train Loss: 0.0665 Val Loss: 0.3384 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9979 Val AUC: 0.9617 Time: 13.91\n",
      "Epoch: 962 Train Loss: 0.0731 Val Loss: 0.3497 Acc: 0.9004 Pre: 0.8726 Recall: 0.9113 F1: 0.8915 Train AUC: 0.9974 Val AUC: 0.9601 Time: 13.76\n",
      "Epoch: 963 Train Loss: 0.0696 Val Loss: 0.3390 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9977 Val AUC: 0.9604 Time: 14.38\n",
      "Epoch: 964 Train Loss: 0.0576 Val Loss: 0.3223 Acc: 0.9022 Pre: 0.8702 Recall: 0.9194 F1: 0.8941 Train AUC: 0.9989 Val AUC: 0.9622 Time: 14.71\n",
      "Epoch: 965 Train Loss: 0.0693 Val Loss: 0.3114 Acc: 0.9094 Pre: 0.8779 Recall: 0.9274 F1: 0.9020 Train AUC: 0.9980 Val AUC: 0.9630 Time: 14.84\n",
      "Epoch: 966 Train Loss: 0.0713 Val Loss: 0.3201 Acc: 0.9058 Pre: 0.8712 Recall: 0.9274 F1: 0.8984 Train AUC: 0.9980 Val AUC: 0.9629 Time: 12.14\n",
      "Epoch: 967 Train Loss: 0.0620 Val Loss: 0.3353 Acc: 0.8949 Pre: 0.8519 Recall: 0.9274 F1: 0.8880 Train AUC: 0.9986 Val AUC: 0.9627 Time: 12.32\n",
      "Epoch: 968 Train Loss: 0.0608 Val Loss: 0.3491 Acc: 0.9022 Pre: 0.8674 Recall: 0.9234 F1: 0.8945 Train AUC: 0.9986 Val AUC: 0.9618 Time: 12.51\n",
      "Epoch: 969 Train Loss: 0.0657 Val Loss: 0.3384 Acc: 0.9094 Pre: 0.8898 Recall: 0.9113 F1: 0.9004 Train AUC: 0.9980 Val AUC: 0.9615 Time: 12.66\n",
      "Epoch: 970 Train Loss: 0.0656 Val Loss: 0.3146 Acc: 0.9112 Pre: 0.8933 Recall: 0.9113 F1: 0.9022 Train AUC: 0.9982 Val AUC: 0.9629 Time: 13.60\n",
      "Epoch: 971 Train Loss: 0.0739 Val Loss: 0.3168 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9975 Val AUC: 0.9631 Time: 14.52\n",
      "Epoch: 972 Train Loss: 0.0646 Val Loss: 0.3387 Acc: 0.8931 Pre: 0.8539 Recall: 0.9194 F1: 0.8854 Train AUC: 0.9984 Val AUC: 0.9627 Time: 14.24\n",
      "Epoch: 973 Train Loss: 0.0688 Val Loss: 0.3435 Acc: 0.8949 Pre: 0.8545 Recall: 0.9234 F1: 0.8876 Train AUC: 0.9981 Val AUC: 0.9624 Time: 13.84\n",
      "Epoch: 974 Train Loss: 0.0764 Val Loss: 0.3401 Acc: 0.9130 Pre: 0.8906 Recall: 0.9194 F1: 0.9048 Train AUC: 0.9975 Val AUC: 0.9615 Time: 12.59\n",
      "Epoch: 975 Train Loss: 0.0665 Val Loss: 0.3356 Acc: 0.9167 Pre: 0.9073 Recall: 0.9073 F1: 0.9073 Train AUC: 0.9981 Val AUC: 0.9611 Time: 12.53\n",
      "Epoch: 976 Train Loss: 0.0687 Val Loss: 0.3171 Acc: 0.9094 Pre: 0.8808 Recall: 0.9234 F1: 0.9016 Train AUC: 0.9985 Val AUC: 0.9636 Time: 12.49\n",
      "Epoch: 977 Train Loss: 0.0615 Val Loss: 0.3386 Acc: 0.8877 Pre: 0.8394 Recall: 0.9274 F1: 0.8812 Train AUC: 0.9989 Val AUC: 0.9632 Time: 13.95\n",
      "Epoch: 978 Train Loss: 0.0728 Val Loss: 0.3582 Acc: 0.8841 Pre: 0.8382 Recall: 0.9194 F1: 0.8769 Train AUC: 0.9979 Val AUC: 0.9611 Time: 13.72\n",
      "Epoch: 979 Train Loss: 0.0708 Val Loss: 0.3436 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9983 Val AUC: 0.9606 Time: 13.83\n",
      "Epoch: 980 Train Loss: 0.0635 Val Loss: 0.3322 Acc: 0.9167 Pre: 0.9040 Recall: 0.9113 F1: 0.9076 Train AUC: 0.9981 Val AUC: 0.9613 Time: 13.88\n",
      "Epoch: 981 Train Loss: 0.0739 Val Loss: 0.3117 Acc: 0.9130 Pre: 0.8906 Recall: 0.9194 F1: 0.9048 Train AUC: 0.9982 Val AUC: 0.9627 Time: 14.25\n",
      "Epoch: 982 Train Loss: 0.0654 Val Loss: 0.3147 Acc: 0.9058 Pre: 0.8712 Recall: 0.9274 F1: 0.8984 Train AUC: 0.9983 Val AUC: 0.9637 Time: 12.32\n",
      "Epoch: 983 Train Loss: 0.0651 Val Loss: 0.3329 Acc: 0.8949 Pre: 0.8571 Recall: 0.9194 F1: 0.8872 Train AUC: 0.9985 Val AUC: 0.9621 Time: 12.63\n",
      "Epoch: 984 Train Loss: 0.0656 Val Loss: 0.3549 Acc: 0.8949 Pre: 0.8626 Recall: 0.9113 F1: 0.8863 Train AUC: 0.9986 Val AUC: 0.9602 Time: 13.35\n",
      "Epoch: 985 Train Loss: 0.0616 Val Loss: 0.3443 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9986 Val AUC: 0.9609 Time: 13.67\n",
      "Epoch: 986 Train Loss: 0.0704 Val Loss: 0.3186 Acc: 0.9094 Pre: 0.8867 Recall: 0.9153 F1: 0.9008 Train AUC: 0.9973 Val AUC: 0.9630 Time: 13.64\n",
      "Epoch: 987 Train Loss: 0.0696 Val Loss: 0.3085 Acc: 0.9058 Pre: 0.8712 Recall: 0.9274 F1: 0.8984 Train AUC: 0.9975 Val AUC: 0.9643 Time: 14.16\n",
      "Epoch: 988 Train Loss: 0.0658 Val Loss: 0.3171 Acc: 0.9058 Pre: 0.8769 Recall: 0.9194 F1: 0.8976 Train AUC: 0.9981 Val AUC: 0.9617 Time: 14.32\n",
      "Epoch: 989 Train Loss: 0.0696 Val Loss: 0.3494 Acc: 0.8804 Pre: 0.8321 Recall: 0.9194 F1: 0.8736 Train AUC: 0.9982 Val AUC: 0.9590 Time: 12.69\n",
      "Epoch: 990 Train Loss: 0.0626 Val Loss: 0.3749 Acc: 0.8804 Pre: 0.8321 Recall: 0.9194 F1: 0.8736 Train AUC: 0.9984 Val AUC: 0.9585 Time: 12.35\n",
      "Epoch: 991 Train Loss: 0.0664 Val Loss: 0.3541 Acc: 0.8931 Pre: 0.8566 Recall: 0.9153 F1: 0.8850 Train AUC: 0.9982 Val AUC: 0.9611 Time: 13.07\n",
      "Epoch: 992 Train Loss: 0.0635 Val Loss: 0.3211 Acc: 0.9058 Pre: 0.8798 Recall: 0.9153 F1: 0.8972 Train AUC: 0.9981 Val AUC: 0.9631 Time: 13.59\n",
      "Epoch: 993 Train Loss: 0.0708 Val Loss: 0.3062 Acc: 0.9112 Pre: 0.8872 Recall: 0.9194 F1: 0.9030 Train AUC: 0.9978 Val AUC: 0.9641 Time: 13.71\n",
      "Epoch: 994 Train Loss: 0.0674 Val Loss: 0.3086 Acc: 0.9076 Pre: 0.8745 Recall: 0.9274 F1: 0.9002 Train AUC: 0.9983 Val AUC: 0.9640 Time: 14.67\n",
      "Epoch: 995 Train Loss: 0.0661 Val Loss: 0.3242 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9981 Val AUC: 0.9628 Time: 15.00\n",
      "Epoch: 996 Train Loss: 0.0706 Val Loss: 0.3425 Acc: 0.8986 Pre: 0.8636 Recall: 0.9194 F1: 0.8906 Train AUC: 0.9982 Val AUC: 0.9620 Time: 13.13\n",
      "Epoch: 997 Train Loss: 0.0652 Val Loss: 0.3539 Acc: 0.9022 Pre: 0.8674 Recall: 0.9234 F1: 0.8945 Train AUC: 0.9980 Val AUC: 0.9624 Time: 12.39\n",
      "Epoch: 998 Train Loss: 0.0630 Val Loss: 0.3544 Acc: 0.9022 Pre: 0.8674 Recall: 0.9234 F1: 0.8945 Train AUC: 0.9981 Val AUC: 0.9617 Time: 13.47\n",
      "Epoch: 999 Train Loss: 0.0759 Val Loss: 0.3398 Acc: 0.9040 Pre: 0.8794 Recall: 0.9113 F1: 0.8950 Train AUC: 0.9967 Val AUC: 0.9611 Time: 13.41\n",
      "Epoch: 1000 Train Loss: 0.0665 Val Loss: 0.3356 Acc: 0.9058 Pre: 0.8828 Recall: 0.9113 F1: 0.8968 Train AUC: 0.9981 Val AUC: 0.9609 Time: 13.77\n",
      "Fold: 4 Best Epoch: 935 Test acc: 0.8967 Test Pre: 0.8550 Test Recall: 0.9274 Test F1: 0.8897 Test PRC: 0.9689 Test AUC: 0.9687\n",
      "Training for Fold 5\n",
      "## Training edges: 2208\n",
      "## Testing edges: 552\n",
      "Epoch: 1 Train Loss: 1.0512 Val Loss: 1.5141 Acc: 0.5036 Pre: 0.4854 Recall: 0.5000 F1: 0.4926 Train AUC: 0.4425 Val AUC: 0.5530 Time: 14.57\n",
      "Epoch: 2 Train Loss: 1.6622 Val Loss: 1.0291 Acc: 0.5634 Pre: 0.5399 Recall: 0.6353 F1: 0.5838 Train AUC: 0.5382 Val AUC: 0.6338 Time: 13.31\n",
      "Epoch: 3 Train Loss: 1.0011 Val Loss: 0.5020 Acc: 0.7264 Pre: 0.7154 Recall: 0.7180 F1: 0.7167 Train AUC: 0.6533 Val AUC: 0.8294 Time: 12.55\n",
      "Epoch: 4 Train Loss: 0.6525 Val Loss: 0.5166 Acc: 0.7409 Pre: 0.7639 Recall: 0.6692 F1: 0.7134 Train AUC: 0.7487 Val AUC: 0.8233 Time: 13.36\n",
      "Epoch: 5 Train Loss: 0.5558 Val Loss: 0.4967 Acc: 0.7772 Pre: 0.7563 Recall: 0.7932 F1: 0.7743 Train AUC: 0.8124 Val AUC: 0.8458 Time: 14.04\n",
      "Epoch: 6 Train Loss: 0.7667 Val Loss: 0.5806 Acc: 0.7083 Pre: 0.6431 Recall: 0.8872 F1: 0.7457 Train AUC: 0.7135 Val AUC: 0.8593 Time: 14.86\n",
      "Epoch: 7 Train Loss: 0.6270 Val Loss: 0.6123 Acc: 0.7228 Pre: 0.6592 Recall: 0.8797 F1: 0.7536 Train AUC: 0.7962 Val AUC: 0.8594 Time: 13.43\n",
      "Epoch: 8 Train Loss: 0.5935 Val Loss: 0.4796 Acc: 0.7862 Pre: 0.7569 Recall: 0.8195 F1: 0.7870 Train AUC: 0.8525 Val AUC: 0.8732 Time: 13.32\n",
      "Epoch: 9 Train Loss: 0.5516 Val Loss: 0.4477 Acc: 0.8007 Pre: 0.8750 Recall: 0.6842 F1: 0.7679 Train AUC: 0.8469 Val AUC: 0.8908 Time: 14.05\n",
      "Epoch: 10 Train Loss: 0.5381 Val Loss: 0.4679 Acc: 0.8025 Pre: 0.8945 Recall: 0.6692 F1: 0.7656 Train AUC: 0.8718 Val AUC: 0.8987 Time: 14.15\n",
      "Epoch: 11 Train Loss: 0.5312 Val Loss: 0.4528 Acc: 0.8225 Pre: 0.8925 Recall: 0.7180 F1: 0.7958 Train AUC: 0.8948 Val AUC: 0.8969 Time: 14.21\n",
      "Epoch: 12 Train Loss: 0.5258 Val Loss: 0.4421 Acc: 0.8134 Pre: 0.8382 Recall: 0.7594 F1: 0.7968 Train AUC: 0.8798 Val AUC: 0.8926 Time: 13.01\n",
      "Epoch: 13 Train Loss: 0.4683 Val Loss: 0.4456 Acc: 0.8062 Pre: 0.8046 Recall: 0.7895 F1: 0.7970 Train AUC: 0.8837 Val AUC: 0.8903 Time: 12.37\n",
      "Epoch: 14 Train Loss: 0.4669 Val Loss: 0.4487 Acc: 0.8025 Pre: 0.7814 Recall: 0.8195 F1: 0.8000 Train AUC: 0.8806 Val AUC: 0.8911 Time: 12.43\n",
      "Epoch: 15 Train Loss: 0.4446 Val Loss: 0.4412 Acc: 0.8080 Pre: 0.7797 Recall: 0.8383 F1: 0.8080 Train AUC: 0.8869 Val AUC: 0.8928 Time: 13.69\n",
      "Epoch: 16 Train Loss: 0.4438 Val Loss: 0.4137 Acc: 0.8134 Pre: 0.7942 Recall: 0.8271 F1: 0.8103 Train AUC: 0.8898 Val AUC: 0.8976 Time: 13.64\n",
      "Epoch: 17 Train Loss: 0.4153 Val Loss: 0.3935 Acc: 0.8243 Pre: 0.8263 Recall: 0.8045 F1: 0.8152 Train AUC: 0.8881 Val AUC: 0.9009 Time: 14.58\n",
      "Epoch: 18 Train Loss: 0.4528 Val Loss: 0.3837 Acc: 0.8261 Pre: 0.8427 Recall: 0.7857 F1: 0.8132 Train AUC: 0.8670 Val AUC: 0.9050 Time: 14.77\n",
      "Epoch: 19 Train Loss: 0.4028 Val Loss: 0.3762 Acc: 0.8388 Pre: 0.8798 Recall: 0.7707 F1: 0.8216 Train AUC: 0.8950 Val AUC: 0.9086 Time: 13.23\n",
      "Epoch: 20 Train Loss: 0.4333 Val Loss: 0.3691 Acc: 0.8225 Pre: 0.8590 Recall: 0.7556 F1: 0.8040 Train AUC: 0.8782 Val AUC: 0.9107 Time: 12.70\n",
      "Epoch: 21 Train Loss: 0.4406 Val Loss: 0.3686 Acc: 0.8261 Pre: 0.8602 Recall: 0.7632 F1: 0.8088 Train AUC: 0.8700 Val AUC: 0.9102 Time: 12.39\n",
      "Epoch: 22 Train Loss: 0.4149 Val Loss: 0.3770 Acc: 0.8225 Pre: 0.8471 Recall: 0.7707 F1: 0.8071 Train AUC: 0.8902 Val AUC: 0.9078 Time: 12.51\n",
      "Epoch: 23 Train Loss: 0.3946 Val Loss: 0.3869 Acc: 0.8243 Pre: 0.8449 Recall: 0.7782 F1: 0.8102 Train AUC: 0.8996 Val AUC: 0.9066 Time: 13.82\n",
      "Epoch: 24 Train Loss: 0.3876 Val Loss: 0.3951 Acc: 0.8333 Pre: 0.8480 Recall: 0.7970 F1: 0.8217 Train AUC: 0.9049 Val AUC: 0.9058 Time: 13.90\n",
      "Epoch: 25 Train Loss: 0.4004 Val Loss: 0.3989 Acc: 0.8297 Pre: 0.8413 Recall: 0.7970 F1: 0.8185 Train AUC: 0.9035 Val AUC: 0.9056 Time: 14.46\n",
      "Epoch: 26 Train Loss: 0.4030 Val Loss: 0.3983 Acc: 0.8315 Pre: 0.8392 Recall: 0.8045 F1: 0.8215 Train AUC: 0.9006 Val AUC: 0.9058 Time: 14.88\n",
      "Epoch: 27 Train Loss: 0.3699 Val Loss: 0.3931 Acc: 0.8315 Pre: 0.8366 Recall: 0.8083 F1: 0.8222 Train AUC: 0.9152 Val AUC: 0.9070 Time: 13.58\n",
      "Epoch: 28 Train Loss: 0.4072 Val Loss: 0.3830 Acc: 0.8279 Pre: 0.8379 Recall: 0.7970 F1: 0.8170 Train AUC: 0.9027 Val AUC: 0.9085 Time: 13.61\n",
      "Epoch: 29 Train Loss: 0.3913 Val Loss: 0.3742 Acc: 0.8279 Pre: 0.8406 Recall: 0.7932 F1: 0.8162 Train AUC: 0.9055 Val AUC: 0.9104 Time: 13.38\n",
      "Epoch: 30 Train Loss: 0.3816 Val Loss: 0.3668 Acc: 0.8333 Pre: 0.8508 Recall: 0.7932 F1: 0.8210 Train AUC: 0.9075 Val AUC: 0.9122 Time: 13.99\n",
      "Epoch: 31 Train Loss: 0.3918 Val Loss: 0.3628 Acc: 0.8460 Pre: 0.8694 Recall: 0.8008 F1: 0.8337 Train AUC: 0.9024 Val AUC: 0.9122 Time: 14.33\n",
      "Epoch: 32 Train Loss: 0.3934 Val Loss: 0.3622 Acc: 0.8478 Pre: 0.8669 Recall: 0.8083 F1: 0.8366 Train AUC: 0.9038 Val AUC: 0.9123 Time: 14.41\n",
      "Epoch: 33 Train Loss: 0.3797 Val Loss: 0.3625 Acc: 0.8496 Pre: 0.8645 Recall: 0.8158 F1: 0.8395 Train AUC: 0.9069 Val AUC: 0.9128 Time: 13.41\n",
      "Epoch: 34 Train Loss: 0.3560 Val Loss: 0.3608 Acc: 0.8478 Pre: 0.8611 Recall: 0.8158 F1: 0.8378 Train AUC: 0.9188 Val AUC: 0.9133 Time: 12.77\n",
      "Epoch: 35 Train Loss: 0.3970 Val Loss: 0.3573 Acc: 0.8424 Pre: 0.8594 Recall: 0.8045 F1: 0.8311 Train AUC: 0.9005 Val AUC: 0.9145 Time: 12.83\n",
      "Epoch: 36 Train Loss: 0.3571 Val Loss: 0.3549 Acc: 0.8460 Pre: 0.8694 Recall: 0.8008 F1: 0.8337 Train AUC: 0.9184 Val AUC: 0.9150 Time: 13.75\n",
      "Epoch: 37 Train Loss: 0.3601 Val Loss: 0.3554 Acc: 0.8351 Pre: 0.8543 Recall: 0.7932 F1: 0.8226 Train AUC: 0.9178 Val AUC: 0.9149 Time: 14.20\n",
      "Epoch: 38 Train Loss: 0.3640 Val Loss: 0.3570 Acc: 0.8370 Pre: 0.8577 Recall: 0.7932 F1: 0.8242 Train AUC: 0.9147 Val AUC: 0.9151 Time: 14.86\n",
      "Epoch: 39 Train Loss: 0.3696 Val Loss: 0.3560 Acc: 0.8351 Pre: 0.8601 Recall: 0.7857 F1: 0.8212 Train AUC: 0.9132 Val AUC: 0.9166 Time: 13.91\n",
      "Epoch: 40 Train Loss: 0.3588 Val Loss: 0.3528 Acc: 0.8351 Pre: 0.8601 Recall: 0.7857 F1: 0.8212 Train AUC: 0.9162 Val AUC: 0.9175 Time: 12.98\n",
      "Epoch: 41 Train Loss: 0.3429 Val Loss: 0.3490 Acc: 0.8315 Pre: 0.8560 Recall: 0.7820 F1: 0.8173 Train AUC: 0.9223 Val AUC: 0.9185 Time: 12.57\n",
      "Epoch: 42 Train Loss: 0.3524 Val Loss: 0.3458 Acc: 0.8333 Pre: 0.8537 Recall: 0.7895 F1: 0.8203 Train AUC: 0.9192 Val AUC: 0.9193 Time: 12.89\n",
      "Epoch: 43 Train Loss: 0.3444 Val Loss: 0.3451 Acc: 0.8406 Pre: 0.8589 Recall: 0.8008 F1: 0.8288 Train AUC: 0.9242 Val AUC: 0.9197 Time: 12.72\n",
      "Epoch: 44 Train Loss: 0.3447 Val Loss: 0.3467 Acc: 0.8424 Pre: 0.8538 Recall: 0.8120 F1: 0.8324 Train AUC: 0.9242 Val AUC: 0.9196 Time: 13.77\n",
      "Epoch: 45 Train Loss: 0.3457 Val Loss: 0.3498 Acc: 0.8424 Pre: 0.8429 Recall: 0.8271 F1: 0.8349 Train AUC: 0.9249 Val AUC: 0.9195 Time: 14.38\n",
      "Epoch: 46 Train Loss: 0.3468 Val Loss: 0.3494 Acc: 0.8424 Pre: 0.8429 Recall: 0.8271 F1: 0.8349 Train AUC: 0.9221 Val AUC: 0.9194 Time: 14.68\n",
      "Epoch: 47 Train Loss: 0.3456 Val Loss: 0.3453 Acc: 0.8406 Pre: 0.8450 Recall: 0.8195 F1: 0.8321 Train AUC: 0.9226 Val AUC: 0.9204 Time: 14.81\n",
      "Epoch: 48 Train Loss: 0.3475 Val Loss: 0.3415 Acc: 0.8406 Pre: 0.8504 Recall: 0.8120 F1: 0.8308 Train AUC: 0.9223 Val AUC: 0.9216 Time: 13.57\n",
      "Epoch: 49 Train Loss: 0.3666 Val Loss: 0.3427 Acc: 0.8424 Pre: 0.8538 Recall: 0.8120 F1: 0.8324 Train AUC: 0.9159 Val AUC: 0.9215 Time: 12.59\n",
      "Epoch: 50 Train Loss: 0.3404 Val Loss: 0.3430 Acc: 0.8388 Pre: 0.8554 Recall: 0.8008 F1: 0.8272 Train AUC: 0.9281 Val AUC: 0.9219 Time: 12.64\n",
      "Epoch: 51 Train Loss: 0.3354 Val Loss: 0.3415 Acc: 0.8388 Pre: 0.8583 Recall: 0.7970 F1: 0.8265 Train AUC: 0.9268 Val AUC: 0.9233 Time: 13.46\n",
      "Epoch: 52 Train Loss: 0.3418 Val Loss: 0.3407 Acc: 0.8388 Pre: 0.8583 Recall: 0.7970 F1: 0.8265 Train AUC: 0.9253 Val AUC: 0.9241 Time: 13.28\n",
      "Epoch: 53 Train Loss: 0.3465 Val Loss: 0.3406 Acc: 0.8424 Pre: 0.8566 Recall: 0.8083 F1: 0.8317 Train AUC: 0.9232 Val AUC: 0.9244 Time: 14.22\n",
      "Epoch: 54 Train Loss: 0.3364 Val Loss: 0.3407 Acc: 0.8442 Pre: 0.8543 Recall: 0.8158 F1: 0.8346 Train AUC: 0.9275 Val AUC: 0.9247 Time: 14.04\n",
      "Epoch: 55 Train Loss: 0.3406 Val Loss: 0.3369 Acc: 0.8460 Pre: 0.8549 Recall: 0.8195 F1: 0.8369 Train AUC: 0.9262 Val AUC: 0.9258 Time: 13.89\n",
      "Epoch: 56 Train Loss: 0.3378 Val Loss: 0.3326 Acc: 0.8460 Pre: 0.8577 Recall: 0.8158 F1: 0.8362 Train AUC: 0.9275 Val AUC: 0.9275 Time: 14.25\n",
      "Epoch: 57 Train Loss: 0.3294 Val Loss: 0.3269 Acc: 0.8496 Pre: 0.8704 Recall: 0.8083 F1: 0.8382 Train AUC: 0.9309 Val AUC: 0.9301 Time: 12.51\n",
      "Epoch: 58 Train Loss: 0.3279 Val Loss: 0.3236 Acc: 0.8496 Pre: 0.8765 Recall: 0.8008 F1: 0.8369 Train AUC: 0.9332 Val AUC: 0.9318 Time: 13.28\n",
      "Epoch: 59 Train Loss: 0.3363 Val Loss: 0.3217 Acc: 0.8496 Pre: 0.8765 Recall: 0.8008 F1: 0.8369 Train AUC: 0.9300 Val AUC: 0.9327 Time: 14.03\n",
      "Epoch: 60 Train Loss: 0.3337 Val Loss: 0.3216 Acc: 0.8496 Pre: 0.8735 Recall: 0.8045 F1: 0.8376 Train AUC: 0.9325 Val AUC: 0.9331 Time: 14.49\n",
      "Epoch: 61 Train Loss: 0.3206 Val Loss: 0.3229 Acc: 0.8478 Pre: 0.8699 Recall: 0.8045 F1: 0.8359 Train AUC: 0.9333 Val AUC: 0.9325 Time: 14.60\n",
      "Epoch: 62 Train Loss: 0.3332 Val Loss: 0.3263 Acc: 0.8478 Pre: 0.8611 Recall: 0.8158 F1: 0.8378 Train AUC: 0.9301 Val AUC: 0.9316 Time: 12.53\n",
      "Epoch: 63 Train Loss: 0.3212 Val Loss: 0.3282 Acc: 0.8478 Pre: 0.8583 Recall: 0.8195 F1: 0.8385 Train AUC: 0.9352 Val AUC: 0.9314 Time: 12.41\n",
      "Epoch: 64 Train Loss: 0.3300 Val Loss: 0.3255 Acc: 0.8460 Pre: 0.8606 Recall: 0.8120 F1: 0.8356 Train AUC: 0.9301 Val AUC: 0.9320 Time: 12.70\n",
      "Epoch: 65 Train Loss: 0.3272 Val Loss: 0.3214 Acc: 0.8496 Pre: 0.8704 Recall: 0.8083 F1: 0.8382 Train AUC: 0.9305 Val AUC: 0.9331 Time: 12.67\n",
      "Epoch: 66 Train Loss: 0.3129 Val Loss: 0.3180 Acc: 0.8514 Pre: 0.8770 Recall: 0.8045 F1: 0.8392 Train AUC: 0.9365 Val AUC: 0.9344 Time: 13.24\n",
      "Epoch: 67 Train Loss: 0.3193 Val Loss: 0.3148 Acc: 0.8514 Pre: 0.8866 Recall: 0.7932 F1: 0.8373 Train AUC: 0.9344 Val AUC: 0.9359 Time: 13.94\n",
      "Epoch: 68 Train Loss: 0.3144 Val Loss: 0.3129 Acc: 0.8551 Pre: 0.8908 Recall: 0.7970 F1: 0.8413 Train AUC: 0.9363 Val AUC: 0.9370 Time: 14.71\n",
      "Epoch: 69 Train Loss: 0.3255 Val Loss: 0.3124 Acc: 0.8551 Pre: 0.8908 Recall: 0.7970 F1: 0.8413 Train AUC: 0.9339 Val AUC: 0.9375 Time: 14.73\n",
      "Epoch: 70 Train Loss: 0.3281 Val Loss: 0.3135 Acc: 0.8587 Pre: 0.8917 Recall: 0.8045 F1: 0.8458 Train AUC: 0.9336 Val AUC: 0.9368 Time: 14.78\n",
      "Epoch: 71 Train Loss: 0.3267 Val Loss: 0.3224 Acc: 0.8533 Pre: 0.8571 Recall: 0.8346 F1: 0.8457 Train AUC: 0.9353 Val AUC: 0.9337 Time: 12.56\n",
      "Epoch: 72 Train Loss: 0.3078 Val Loss: 0.3308 Acc: 0.8460 Pre: 0.8415 Recall: 0.8383 F1: 0.8399 Train AUC: 0.9397 Val AUC: 0.9324 Time: 12.53\n",
      "Epoch: 73 Train Loss: 0.3294 Val Loss: 0.3280 Acc: 0.8514 Pre: 0.8538 Recall: 0.8346 F1: 0.8441 Train AUC: 0.9330 Val AUC: 0.9329 Time: 12.78\n",
      "Epoch: 74 Train Loss: 0.3223 Val Loss: 0.3210 Acc: 0.8424 Pre: 0.8623 Recall: 0.8008 F1: 0.8304 Train AUC: 0.9350 Val AUC: 0.9340 Time: 12.79\n",
      "Epoch: 75 Train Loss: 0.3071 Val Loss: 0.3196 Acc: 0.8424 Pre: 0.8683 Recall: 0.7932 F1: 0.8291 Train AUC: 0.9400 Val AUC: 0.9348 Time: 14.00\n",
      "Epoch: 76 Train Loss: 0.3248 Val Loss: 0.3174 Acc: 0.8460 Pre: 0.8755 Recall: 0.7932 F1: 0.8323 Train AUC: 0.9354 Val AUC: 0.9357 Time: 14.31\n",
      "Epoch: 77 Train Loss: 0.3234 Val Loss: 0.3136 Acc: 0.8478 Pre: 0.8760 Recall: 0.7970 F1: 0.8346 Train AUC: 0.9363 Val AUC: 0.9362 Time: 14.93\n",
      "Epoch: 78 Train Loss: 0.3156 Val Loss: 0.3142 Acc: 0.8551 Pre: 0.8750 Recall: 0.8158 F1: 0.8444 Train AUC: 0.9378 Val AUC: 0.9368 Time: 14.80\n",
      "Epoch: 79 Train Loss: 0.3173 Val Loss: 0.3196 Acc: 0.8587 Pre: 0.8643 Recall: 0.8383 F1: 0.8511 Train AUC: 0.9374 Val AUC: 0.9362 Time: 14.62\n",
      "Epoch: 80 Train Loss: 0.3104 Val Loss: 0.3205 Acc: 0.8605 Pre: 0.8621 Recall: 0.8459 F1: 0.8539 Train AUC: 0.9411 Val AUC: 0.9364 Time: 13.73\n",
      "Epoch: 81 Train Loss: 0.3124 Val Loss: 0.3139 Acc: 0.8605 Pre: 0.8765 Recall: 0.8271 F1: 0.8511 Train AUC: 0.9398 Val AUC: 0.9372 Time: 12.67\n",
      "Epoch: 82 Train Loss: 0.3255 Val Loss: 0.3097 Acc: 0.8514 Pre: 0.8770 Recall: 0.8045 F1: 0.8392 Train AUC: 0.9328 Val AUC: 0.9382 Time: 12.74\n",
      "Epoch: 83 Train Loss: 0.3013 Val Loss: 0.3086 Acc: 0.8478 Pre: 0.8760 Recall: 0.7970 F1: 0.8346 Train AUC: 0.9423 Val AUC: 0.9388 Time: 13.29\n",
      "Epoch: 84 Train Loss: 0.3108 Val Loss: 0.3078 Acc: 0.8478 Pre: 0.8760 Recall: 0.7970 F1: 0.8346 Train AUC: 0.9398 Val AUC: 0.9390 Time: 13.96\n",
      "Epoch: 85 Train Loss: 0.3099 Val Loss: 0.3083 Acc: 0.8569 Pre: 0.8785 Recall: 0.8158 F1: 0.8460 Train AUC: 0.9418 Val AUC: 0.9388 Time: 14.42\n",
      "Epoch: 86 Train Loss: 0.3041 Val Loss: 0.3144 Acc: 0.8587 Pre: 0.8643 Recall: 0.8383 F1: 0.8511 Train AUC: 0.9408 Val AUC: 0.9382 Time: 14.96\n",
      "Epoch: 87 Train Loss: 0.3184 Val Loss: 0.3204 Acc: 0.8605 Pre: 0.8593 Recall: 0.8496 F1: 0.8544 Train AUC: 0.9379 Val AUC: 0.9375 Time: 14.67\n",
      "Epoch: 88 Train Loss: 0.3091 Val Loss: 0.3116 Acc: 0.8605 Pre: 0.8706 Recall: 0.8346 F1: 0.8522 Train AUC: 0.9425 Val AUC: 0.9387 Time: 12.51\n",
      "Epoch: 89 Train Loss: 0.3148 Val Loss: 0.3065 Acc: 0.8551 Pre: 0.8811 Recall: 0.8083 F1: 0.8431 Train AUC: 0.9386 Val AUC: 0.9396 Time: 13.22\n",
      "Epoch: 90 Train Loss: 0.3068 Val Loss: 0.3079 Acc: 0.8496 Pre: 0.8765 Recall: 0.8008 F1: 0.8369 Train AUC: 0.9423 Val AUC: 0.9391 Time: 13.66\n",
      "Epoch: 91 Train Loss: 0.3088 Val Loss: 0.3092 Acc: 0.8496 Pre: 0.8735 Recall: 0.8045 F1: 0.8376 Train AUC: 0.9412 Val AUC: 0.9387 Time: 14.73\n",
      "Epoch: 92 Train Loss: 0.3094 Val Loss: 0.3108 Acc: 0.8551 Pre: 0.8750 Recall: 0.8158 F1: 0.8444 Train AUC: 0.9402 Val AUC: 0.9385 Time: 14.88\n",
      "Epoch: 93 Train Loss: 0.2972 Val Loss: 0.3104 Acc: 0.8659 Pre: 0.8840 Recall: 0.8308 F1: 0.8566 Train AUC: 0.9434 Val AUC: 0.9389 Time: 13.61\n",
      "Epoch: 94 Train Loss: 0.3091 Val Loss: 0.3100 Acc: 0.8678 Pre: 0.8814 Recall: 0.8383 F1: 0.8593 Train AUC: 0.9386 Val AUC: 0.9390 Time: 13.40\n",
      "Epoch: 95 Train Loss: 0.3026 Val Loss: 0.3038 Acc: 0.8678 Pre: 0.8939 Recall: 0.8233 F1: 0.8571 Train AUC: 0.9419 Val AUC: 0.9408 Time: 12.60\n",
      "Epoch: 96 Train Loss: 0.3093 Val Loss: 0.3016 Acc: 0.8641 Pre: 0.8930 Recall: 0.8158 F1: 0.8527 Train AUC: 0.9413 Val AUC: 0.9414 Time: 12.54\n",
      "Epoch: 97 Train Loss: 0.2957 Val Loss: 0.3003 Acc: 0.8641 Pre: 0.8963 Recall: 0.8120 F1: 0.8521 Train AUC: 0.9446 Val AUC: 0.9419 Time: 12.48\n",
      "Epoch: 98 Train Loss: 0.3001 Val Loss: 0.3022 Acc: 0.8641 Pre: 0.8963 Recall: 0.8120 F1: 0.8521 Train AUC: 0.9458 Val AUC: 0.9410 Time: 12.82\n",
      "Epoch: 99 Train Loss: 0.3017 Val Loss: 0.3055 Acc: 0.8659 Pre: 0.8902 Recall: 0.8233 F1: 0.8555 Train AUC: 0.9424 Val AUC: 0.9402 Time: 13.18\n",
      "Epoch: 100 Train Loss: 0.2932 Val Loss: 0.3063 Acc: 0.8714 Pre: 0.8884 Recall: 0.8383 F1: 0.8627 Train AUC: 0.9444 Val AUC: 0.9403 Time: 14.10\n",
      "Epoch: 101 Train Loss: 0.3127 Val Loss: 0.3064 Acc: 0.8696 Pre: 0.8880 Recall: 0.8346 F1: 0.8605 Train AUC: 0.9397 Val AUC: 0.9405 Time: 14.64\n",
      "Epoch: 102 Train Loss: 0.2966 Val Loss: 0.3011 Acc: 0.8641 Pre: 0.8963 Recall: 0.8120 F1: 0.8521 Train AUC: 0.9451 Val AUC: 0.9419 Time: 15.30\n",
      "Epoch: 103 Train Loss: 0.2998 Val Loss: 0.2977 Acc: 0.8641 Pre: 0.8963 Recall: 0.8120 F1: 0.8521 Train AUC: 0.9441 Val AUC: 0.9430 Time: 13.43\n",
      "Epoch: 104 Train Loss: 0.2989 Val Loss: 0.2963 Acc: 0.8605 Pre: 0.8954 Recall: 0.8045 F1: 0.8475 Train AUC: 0.9447 Val AUC: 0.9439 Time: 13.12\n",
      "Epoch: 105 Train Loss: 0.2893 Val Loss: 0.2962 Acc: 0.8641 Pre: 0.8963 Recall: 0.8120 F1: 0.8521 Train AUC: 0.9492 Val AUC: 0.9442 Time: 12.50\n",
      "Epoch: 106 Train Loss: 0.2908 Val Loss: 0.2971 Acc: 0.8641 Pre: 0.8963 Recall: 0.8120 F1: 0.8521 Train AUC: 0.9474 Val AUC: 0.9441 Time: 12.76\n",
      "Epoch: 107 Train Loss: 0.2948 Val Loss: 0.2984 Acc: 0.8659 Pre: 0.8967 Recall: 0.8158 F1: 0.8543 Train AUC: 0.9459 Val AUC: 0.9437 Time: 12.67\n",
      "Epoch: 108 Train Loss: 0.2913 Val Loss: 0.2972 Acc: 0.8641 Pre: 0.8963 Recall: 0.8120 F1: 0.8521 Train AUC: 0.9461 Val AUC: 0.9438 Time: 13.32\n",
      "Epoch: 109 Train Loss: 0.2907 Val Loss: 0.2960 Acc: 0.8641 Pre: 0.8963 Recall: 0.8120 F1: 0.8521 Train AUC: 0.9456 Val AUC: 0.9444 Time: 14.45\n",
      "Epoch: 110 Train Loss: 0.2924 Val Loss: 0.2953 Acc: 0.8659 Pre: 0.8934 Recall: 0.8195 F1: 0.8549 Train AUC: 0.9473 Val AUC: 0.9448 Time: 14.70\n",
      "Epoch: 111 Train Loss: 0.2851 Val Loss: 0.2937 Acc: 0.8659 Pre: 0.8967 Recall: 0.8158 F1: 0.8543 Train AUC: 0.9495 Val AUC: 0.9453 Time: 14.14\n",
      "Epoch: 112 Train Loss: 0.2814 Val Loss: 0.2925 Acc: 0.8678 Pre: 0.9004 Recall: 0.8158 F1: 0.8560 Train AUC: 0.9499 Val AUC: 0.9454 Time: 14.05\n",
      "Epoch: 113 Train Loss: 0.2868 Val Loss: 0.2931 Acc: 0.8678 Pre: 0.8971 Recall: 0.8195 F1: 0.8566 Train AUC: 0.9475 Val AUC: 0.9450 Time: 12.37\n",
      "Epoch: 114 Train Loss: 0.2827 Val Loss: 0.2940 Acc: 0.8678 Pre: 0.8939 Recall: 0.8233 F1: 0.8571 Train AUC: 0.9509 Val AUC: 0.9445 Time: 12.61\n",
      "Epoch: 115 Train Loss: 0.2858 Val Loss: 0.2919 Acc: 0.8659 Pre: 0.8934 Recall: 0.8195 F1: 0.8549 Train AUC: 0.9480 Val AUC: 0.9451 Time: 12.46\n",
      "Epoch: 116 Train Loss: 0.2824 Val Loss: 0.2903 Acc: 0.8696 Pre: 0.8975 Recall: 0.8233 F1: 0.8588 Train AUC: 0.9490 Val AUC: 0.9463 Time: 12.86\n",
      "Epoch: 117 Train Loss: 0.2828 Val Loss: 0.2890 Acc: 0.8714 Pre: 0.8980 Recall: 0.8271 F1: 0.8611 Train AUC: 0.9492 Val AUC: 0.9471 Time: 13.86\n",
      "Epoch: 118 Train Loss: 0.2837 Val Loss: 0.2863 Acc: 0.8678 Pre: 0.8971 Recall: 0.8195 F1: 0.8566 Train AUC: 0.9509 Val AUC: 0.9482 Time: 14.09\n",
      "Epoch: 119 Train Loss: 0.2895 Val Loss: 0.2853 Acc: 0.8678 Pre: 0.8971 Recall: 0.8195 F1: 0.8566 Train AUC: 0.9486 Val AUC: 0.9485 Time: 14.86\n",
      "Epoch: 120 Train Loss: 0.2793 Val Loss: 0.2845 Acc: 0.8659 Pre: 0.9000 Recall: 0.8120 F1: 0.8538 Train AUC: 0.9519 Val AUC: 0.9488 Time: 15.25\n",
      "Epoch: 121 Train Loss: 0.2781 Val Loss: 0.2839 Acc: 0.8678 Pre: 0.9004 Recall: 0.8158 F1: 0.8560 Train AUC: 0.9514 Val AUC: 0.9493 Time: 14.47\n",
      "Epoch: 122 Train Loss: 0.2853 Val Loss: 0.2849 Acc: 0.8732 Pre: 0.8984 Recall: 0.8308 F1: 0.8633 Train AUC: 0.9507 Val AUC: 0.9495 Time: 13.35\n",
      "Epoch: 123 Train Loss: 0.2794 Val Loss: 0.2821 Acc: 0.8732 Pre: 0.9016 Recall: 0.8271 F1: 0.8627 Train AUC: 0.9511 Val AUC: 0.9507 Time: 12.74\n",
      "Epoch: 124 Train Loss: 0.2817 Val Loss: 0.2804 Acc: 0.8732 Pre: 0.9050 Recall: 0.8233 F1: 0.8622 Train AUC: 0.9502 Val AUC: 0.9514 Time: 12.71\n",
      "Epoch: 125 Train Loss: 0.2896 Val Loss: 0.2795 Acc: 0.8696 Pre: 0.9042 Recall: 0.8158 F1: 0.8577 Train AUC: 0.9471 Val AUC: 0.9518 Time: 12.74\n",
      "Epoch: 126 Train Loss: 0.2708 Val Loss: 0.2798 Acc: 0.8659 Pre: 0.9034 Recall: 0.8083 F1: 0.8532 Train AUC: 0.9552 Val AUC: 0.9513 Time: 13.06\n",
      "Epoch: 127 Train Loss: 0.2844 Val Loss: 0.2835 Acc: 0.8678 Pre: 0.8939 Recall: 0.8233 F1: 0.8571 Train AUC: 0.9515 Val AUC: 0.9493 Time: 14.19\n",
      "Epoch: 128 Train Loss: 0.2664 Val Loss: 0.2909 Acc: 0.8678 Pre: 0.8845 Recall: 0.8346 F1: 0.8588 Train AUC: 0.9559 Val AUC: 0.9468 Time: 14.44\n",
      "Epoch: 129 Train Loss: 0.2656 Val Loss: 0.2955 Acc: 0.8750 Pre: 0.8833 Recall: 0.8534 F1: 0.8681 Train AUC: 0.9555 Val AUC: 0.9458 Time: 15.12\n",
      "Epoch: 130 Train Loss: 0.2720 Val Loss: 0.2884 Acc: 0.8659 Pre: 0.8902 Recall: 0.8233 F1: 0.8555 Train AUC: 0.9545 Val AUC: 0.9469 Time: 14.40\n",
      "Epoch: 131 Train Loss: 0.2701 Val Loss: 0.2859 Acc: 0.8696 Pre: 0.9042 Recall: 0.8158 F1: 0.8577 Train AUC: 0.9540 Val AUC: 0.9481 Time: 14.13\n",
      "Epoch: 132 Train Loss: 0.2726 Val Loss: 0.2832 Acc: 0.8678 Pre: 0.9072 Recall: 0.8083 F1: 0.8549 Train AUC: 0.9540 Val AUC: 0.9495 Time: 12.60\n",
      "Epoch: 133 Train Loss: 0.2814 Val Loss: 0.2837 Acc: 0.8678 Pre: 0.8876 Recall: 0.8308 F1: 0.8583 Train AUC: 0.9511 Val AUC: 0.9492 Time: 12.74\n",
      "Epoch: 134 Train Loss: 0.2734 Val Loss: 0.2891 Acc: 0.8786 Pre: 0.8842 Recall: 0.8609 F1: 0.8724 Train AUC: 0.9540 Val AUC: 0.9492 Time: 13.68\n",
      "Epoch: 135 Train Loss: 0.2787 Val Loss: 0.2817 Acc: 0.8732 Pre: 0.8920 Recall: 0.8383 F1: 0.8643 Train AUC: 0.9525 Val AUC: 0.9511 Time: 14.13\n",
      "Epoch: 136 Train Loss: 0.2699 Val Loss: 0.2776 Acc: 0.8659 Pre: 0.9068 Recall: 0.8045 F1: 0.8526 Train AUC: 0.9546 Val AUC: 0.9524 Time: 14.61\n",
      "Epoch: 137 Train Loss: 0.2691 Val Loss: 0.2765 Acc: 0.8678 Pre: 0.9072 Recall: 0.8083 F1: 0.8549 Train AUC: 0.9564 Val AUC: 0.9528 Time: 15.74\n",
      "Epoch: 138 Train Loss: 0.2697 Val Loss: 0.2793 Acc: 0.8804 Pre: 0.8968 Recall: 0.8496 F1: 0.8726 Train AUC: 0.9539 Val AUC: 0.9526 Time: 14.35\n",
      "Epoch: 139 Train Loss: 0.2686 Val Loss: 0.2809 Acc: 0.8786 Pre: 0.8872 Recall: 0.8571 F1: 0.8719 Train AUC: 0.9554 Val AUC: 0.9523 Time: 12.48\n",
      "Epoch: 140 Train Loss: 0.2712 Val Loss: 0.2806 Acc: 0.8696 Pre: 0.8880 Recall: 0.8346 F1: 0.8605 Train AUC: 0.9556 Val AUC: 0.9514 Time: 12.44\n",
      "Epoch: 141 Train Loss: 0.2656 Val Loss: 0.2810 Acc: 0.8641 Pre: 0.8898 Recall: 0.8195 F1: 0.8532 Train AUC: 0.9551 Val AUC: 0.9506 Time: 12.42\n",
      "Epoch: 142 Train Loss: 0.2624 Val Loss: 0.2813 Acc: 0.8641 Pre: 0.8996 Recall: 0.8083 F1: 0.8515 Train AUC: 0.9566 Val AUC: 0.9505 Time: 12.53\n",
      "Epoch: 143 Train Loss: 0.2702 Val Loss: 0.2815 Acc: 0.8659 Pre: 0.8902 Recall: 0.8233 F1: 0.8555 Train AUC: 0.9549 Val AUC: 0.9499 Time: 13.78\n",
      "Epoch: 144 Train Loss: 0.2559 Val Loss: 0.2883 Acc: 0.8750 Pre: 0.8863 Recall: 0.8496 F1: 0.8676 Train AUC: 0.9602 Val AUC: 0.9484 Time: 14.12\n",
      "Epoch: 145 Train Loss: 0.2671 Val Loss: 0.2860 Acc: 0.8822 Pre: 0.8880 Recall: 0.8647 F1: 0.8762 Train AUC: 0.9563 Val AUC: 0.9501 Time: 13.73\n",
      "Epoch: 146 Train Loss: 0.2747 Val Loss: 0.2731 Acc: 0.8678 Pre: 0.9038 Recall: 0.8120 F1: 0.8554 Train AUC: 0.9541 Val AUC: 0.9539 Time: 14.35\n",
      "Epoch: 147 Train Loss: 0.2722 Val Loss: 0.2730 Acc: 0.8605 Pre: 0.9056 Recall: 0.7932 F1: 0.8457 Train AUC: 0.9551 Val AUC: 0.9548 Time: 14.71\n",
      "Epoch: 148 Train Loss: 0.2791 Val Loss: 0.2748 Acc: 0.8678 Pre: 0.9004 Recall: 0.8158 F1: 0.8560 Train AUC: 0.9519 Val AUC: 0.9542 Time: 12.56\n",
      "Epoch: 149 Train Loss: 0.2706 Val Loss: 0.2781 Acc: 0.8804 Pre: 0.8937 Recall: 0.8534 F1: 0.8731 Train AUC: 0.9549 Val AUC: 0.9538 Time: 12.70\n",
      "Epoch: 150 Train Loss: 0.2727 Val Loss: 0.2769 Acc: 0.8768 Pre: 0.8867 Recall: 0.8534 F1: 0.8697 Train AUC: 0.9539 Val AUC: 0.9542 Time: 12.79\n",
      "Epoch: 151 Train Loss: 0.2562 Val Loss: 0.2718 Acc: 0.8696 Pre: 0.8975 Recall: 0.8233 F1: 0.8588 Train AUC: 0.9592 Val AUC: 0.9542 Time: 13.90\n",
      "Epoch: 152 Train Loss: 0.2688 Val Loss: 0.2743 Acc: 0.8659 Pre: 0.8967 Recall: 0.8158 F1: 0.8543 Train AUC: 0.9547 Val AUC: 0.9519 Time: 14.37\n",
      "Epoch: 153 Train Loss: 0.2686 Val Loss: 0.2804 Acc: 0.8714 Pre: 0.8884 Recall: 0.8383 F1: 0.8627 Train AUC: 0.9558 Val AUC: 0.9489 Time: 13.99\n",
      "Epoch: 154 Train Loss: 0.2605 Val Loss: 0.2873 Acc: 0.8750 Pre: 0.8774 Recall: 0.8609 F1: 0.8691 Train AUC: 0.9579 Val AUC: 0.9477 Time: 14.34\n",
      "Epoch: 155 Train Loss: 0.2615 Val Loss: 0.2793 Acc: 0.8714 Pre: 0.8824 Recall: 0.8459 F1: 0.8637 Train AUC: 0.9585 Val AUC: 0.9502 Time: 14.25\n",
      "Epoch: 156 Train Loss: 0.2664 Val Loss: 0.2732 Acc: 0.8659 Pre: 0.9000 Recall: 0.8120 F1: 0.8538 Train AUC: 0.9571 Val AUC: 0.9537 Time: 12.49\n",
      "Epoch: 157 Train Loss: 0.2481 Val Loss: 0.2745 Acc: 0.8605 Pre: 0.9056 Recall: 0.7932 F1: 0.8457 Train AUC: 0.9625 Val AUC: 0.9547 Time: 12.39\n",
      "Epoch: 158 Train Loss: 0.2695 Val Loss: 0.2763 Acc: 0.8750 Pre: 0.8988 Recall: 0.8346 F1: 0.8655 Train AUC: 0.9573 Val AUC: 0.9539 Time: 12.66\n",
      "Epoch: 159 Train Loss: 0.2569 Val Loss: 0.2849 Acc: 0.8786 Pre: 0.8755 Recall: 0.8722 F1: 0.8738 Train AUC: 0.9586 Val AUC: 0.9533 Time: 12.91\n",
      "Epoch: 160 Train Loss: 0.2668 Val Loss: 0.2776 Acc: 0.8768 Pre: 0.8779 Recall: 0.8647 F1: 0.8712 Train AUC: 0.9568 Val AUC: 0.9541 Time: 13.70\n",
      "Epoch: 161 Train Loss: 0.2573 Val Loss: 0.2705 Acc: 0.8696 Pre: 0.9008 Recall: 0.8195 F1: 0.8583 Train AUC: 0.9594 Val AUC: 0.9549 Time: 14.88\n",
      "Epoch: 162 Train Loss: 0.2594 Val Loss: 0.2709 Acc: 0.8641 Pre: 0.8963 Recall: 0.8120 F1: 0.8521 Train AUC: 0.9576 Val AUC: 0.9541 Time: 14.66\n",
      "Epoch: 163 Train Loss: 0.2521 Val Loss: 0.2737 Acc: 0.8659 Pre: 0.8871 Recall: 0.8271 F1: 0.8560 Train AUC: 0.9624 Val AUC: 0.9521 Time: 14.13\n",
      "Epoch: 164 Train Loss: 0.2604 Val Loss: 0.2754 Acc: 0.8732 Pre: 0.8828 Recall: 0.8496 F1: 0.8659 Train AUC: 0.9587 Val AUC: 0.9515 Time: 12.53\n",
      "Epoch: 165 Train Loss: 0.2551 Val Loss: 0.2760 Acc: 0.8732 Pre: 0.8798 Recall: 0.8534 F1: 0.8664 Train AUC: 0.9605 Val AUC: 0.9520 Time: 12.42\n",
      "Epoch: 166 Train Loss: 0.2588 Val Loss: 0.2702 Acc: 0.8678 Pre: 0.8814 Recall: 0.8383 F1: 0.8593 Train AUC: 0.9602 Val AUC: 0.9545 Time: 12.61\n",
      "Epoch: 167 Train Loss: 0.2536 Val Loss: 0.2685 Acc: 0.8696 Pre: 0.8911 Recall: 0.8308 F1: 0.8599 Train AUC: 0.9626 Val AUC: 0.9557 Time: 13.25\n",
      "Epoch: 168 Train Loss: 0.2545 Val Loss: 0.2692 Acc: 0.8714 Pre: 0.8884 Recall: 0.8383 F1: 0.8627 Train AUC: 0.9603 Val AUC: 0.9561 Time: 13.93\n",
      "Epoch: 169 Train Loss: 0.2478 Val Loss: 0.2706 Acc: 0.8750 Pre: 0.8863 Recall: 0.8496 F1: 0.8676 Train AUC: 0.9621 Val AUC: 0.9558 Time: 14.40\n",
      "Epoch: 170 Train Loss: 0.2524 Val Loss: 0.2746 Acc: 0.8750 Pre: 0.8774 Recall: 0.8609 F1: 0.8691 Train AUC: 0.9607 Val AUC: 0.9555 Time: 15.04\n",
      "Epoch: 171 Train Loss: 0.2557 Val Loss: 0.2759 Acc: 0.8786 Pre: 0.8842 Recall: 0.8609 F1: 0.8724 Train AUC: 0.9603 Val AUC: 0.9547 Time: 14.96\n",
      "Epoch: 172 Train Loss: 0.2460 Val Loss: 0.2726 Acc: 0.8750 Pre: 0.8863 Recall: 0.8496 F1: 0.8676 Train AUC: 0.9633 Val AUC: 0.9536 Time: 13.01\n",
      "Epoch: 173 Train Loss: 0.2472 Val Loss: 0.2723 Acc: 0.8641 Pre: 0.8866 Recall: 0.8233 F1: 0.8538 Train AUC: 0.9627 Val AUC: 0.9534 Time: 12.88\n",
      "Epoch: 174 Train Loss: 0.2542 Val Loss: 0.2708 Acc: 0.8659 Pre: 0.8840 Recall: 0.8308 F1: 0.8566 Train AUC: 0.9612 Val AUC: 0.9534 Time: 12.68\n",
      "Epoch: 175 Train Loss: 0.2458 Val Loss: 0.2680 Acc: 0.8696 Pre: 0.8760 Recall: 0.8496 F1: 0.8626 Train AUC: 0.9632 Val AUC: 0.9543 Time: 13.34\n",
      "Epoch: 176 Train Loss: 0.2548 Val Loss: 0.2670 Acc: 0.8696 Pre: 0.8760 Recall: 0.8496 F1: 0.8626 Train AUC: 0.9616 Val AUC: 0.9563 Time: 13.99\n",
      "Epoch: 177 Train Loss: 0.2534 Val Loss: 0.2641 Acc: 0.8750 Pre: 0.8774 Recall: 0.8609 F1: 0.8691 Train AUC: 0.9622 Val AUC: 0.9576 Time: 14.33\n",
      "Epoch: 178 Train Loss: 0.2384 Val Loss: 0.2637 Acc: 0.8714 Pre: 0.8980 Recall: 0.8271 F1: 0.8611 Train AUC: 0.9657 Val AUC: 0.9575 Time: 14.88\n",
      "Epoch: 179 Train Loss: 0.2447 Val Loss: 0.2670 Acc: 0.8714 Pre: 0.9046 Recall: 0.8195 F1: 0.8600 Train AUC: 0.9635 Val AUC: 0.9569 Time: 13.27\n",
      "Epoch: 180 Train Loss: 0.2578 Val Loss: 0.2690 Acc: 0.8768 Pre: 0.8837 Recall: 0.8571 F1: 0.8702 Train AUC: 0.9594 Val AUC: 0.9570 Time: 12.55\n",
      "Epoch: 181 Train Loss: 0.2453 Val Loss: 0.2744 Acc: 0.8768 Pre: 0.8722 Recall: 0.8722 F1: 0.8722 Train AUC: 0.9633 Val AUC: 0.9562 Time: 12.48\n",
      "Epoch: 182 Train Loss: 0.2537 Val Loss: 0.2689 Acc: 0.8768 Pre: 0.8837 Recall: 0.8571 F1: 0.8702 Train AUC: 0.9619 Val AUC: 0.9557 Time: 12.54\n",
      "Epoch: 183 Train Loss: 0.2458 Val Loss: 0.2659 Acc: 0.8696 Pre: 0.8911 Recall: 0.8308 F1: 0.8599 Train AUC: 0.9643 Val AUC: 0.9556 Time: 12.71\n",
      "Epoch: 184 Train Loss: 0.2447 Val Loss: 0.2666 Acc: 0.8659 Pre: 0.8967 Recall: 0.8158 F1: 0.8543 Train AUC: 0.9637 Val AUC: 0.9558 Time: 12.48\n",
      "Epoch: 185 Train Loss: 0.2547 Val Loss: 0.2689 Acc: 0.8714 Pre: 0.8764 Recall: 0.8534 F1: 0.8648 Train AUC: 0.9643 Val AUC: 0.9553 Time: 12.87\n",
      "Epoch: 186 Train Loss: 0.2401 Val Loss: 0.2836 Acc: 0.8714 Pre: 0.8571 Recall: 0.8797 F1: 0.8683 Train AUC: 0.9656 Val AUC: 0.9540 Time: 14.30\n",
      "Epoch: 187 Train Loss: 0.2534 Val Loss: 0.2685 Acc: 0.8750 Pre: 0.8833 Recall: 0.8534 F1: 0.8681 Train AUC: 0.9635 Val AUC: 0.9569 Time: 14.46\n",
      "Epoch: 188 Train Loss: 0.2381 Val Loss: 0.2684 Acc: 0.8587 Pre: 0.9017 Recall: 0.7932 F1: 0.8440 Train AUC: 0.9659 Val AUC: 0.9576 Time: 15.08\n",
      "Epoch: 189 Train Loss: 0.2476 Val Loss: 0.2649 Acc: 0.8714 Pre: 0.8980 Recall: 0.8271 F1: 0.8611 Train AUC: 0.9636 Val AUC: 0.9581 Time: 15.09\n",
      "Epoch: 190 Train Loss: 0.2490 Val Loss: 0.2707 Acc: 0.8786 Pre: 0.8783 Recall: 0.8684 F1: 0.8733 Train AUC: 0.9621 Val AUC: 0.9566 Time: 13.08\n",
      "Epoch: 191 Train Loss: 0.2469 Val Loss: 0.2807 Acc: 0.8732 Pre: 0.8630 Recall: 0.8759 F1: 0.8694 Train AUC: 0.9625 Val AUC: 0.9535 Time: 12.55\n",
      "Epoch: 192 Train Loss: 0.2454 Val Loss: 0.2724 Acc: 0.8750 Pre: 0.8745 Recall: 0.8647 F1: 0.8696 Train AUC: 0.9637 Val AUC: 0.9540 Time: 12.41\n",
      "Epoch: 193 Train Loss: 0.2440 Val Loss: 0.2639 Acc: 0.8714 Pre: 0.9046 Recall: 0.8195 F1: 0.8600 Train AUC: 0.9655 Val AUC: 0.9575 Time: 12.74\n",
      "Epoch: 194 Train Loss: 0.2452 Val Loss: 0.2615 Acc: 0.8750 Pre: 0.9053 Recall: 0.8271 F1: 0.8644 Train AUC: 0.9654 Val AUC: 0.9579 Time: 12.69\n",
      "Epoch: 195 Train Loss: 0.2359 Val Loss: 0.2641 Acc: 0.8750 Pre: 0.8803 Recall: 0.8571 F1: 0.8686 Train AUC: 0.9675 Val AUC: 0.9567 Time: 13.89\n",
      "Epoch: 196 Train Loss: 0.2421 Val Loss: 0.2757 Acc: 0.8822 Pre: 0.8736 Recall: 0.8835 F1: 0.8785 Train AUC: 0.9657 Val AUC: 0.9550 Time: 14.00\n",
      "Epoch: 197 Train Loss: 0.2390 Val Loss: 0.2685 Acc: 0.8768 Pre: 0.8779 Recall: 0.8647 F1: 0.8712 Train AUC: 0.9662 Val AUC: 0.9567 Time: 14.74\n",
      "Epoch: 198 Train Loss: 0.2347 Val Loss: 0.2610 Acc: 0.8768 Pre: 0.8898 Recall: 0.8496 F1: 0.8692 Train AUC: 0.9678 Val AUC: 0.9581 Time: 14.92\n",
      "Epoch: 199 Train Loss: 0.2331 Val Loss: 0.2582 Acc: 0.8750 Pre: 0.8988 Recall: 0.8346 F1: 0.8655 Train AUC: 0.9670 Val AUC: 0.9589 Time: 14.72\n",
      "Epoch: 200 Train Loss: 0.2356 Val Loss: 0.2578 Acc: 0.8750 Pre: 0.8988 Recall: 0.8346 F1: 0.8655 Train AUC: 0.9662 Val AUC: 0.9591 Time: 13.03\n",
      "Epoch: 201 Train Loss: 0.2242 Val Loss: 0.2624 Acc: 0.8750 Pre: 0.8803 Recall: 0.8571 F1: 0.8686 Train AUC: 0.9703 Val AUC: 0.9580 Time: 13.60\n",
      "Epoch: 202 Train Loss: 0.2414 Val Loss: 0.2634 Acc: 0.8696 Pre: 0.8702 Recall: 0.8571 F1: 0.8636 Train AUC: 0.9652 Val AUC: 0.9578 Time: 13.95\n",
      "Epoch: 203 Train Loss: 0.2335 Val Loss: 0.2623 Acc: 0.8714 Pre: 0.8794 Recall: 0.8496 F1: 0.8642 Train AUC: 0.9680 Val AUC: 0.9578 Time: 14.06\n",
      "Epoch: 204 Train Loss: 0.2309 Val Loss: 0.2577 Acc: 0.8696 Pre: 0.8975 Recall: 0.8233 F1: 0.8588 Train AUC: 0.9674 Val AUC: 0.9596 Time: 14.24\n",
      "Epoch: 205 Train Loss: 0.2373 Val Loss: 0.2583 Acc: 0.8732 Pre: 0.9050 Recall: 0.8233 F1: 0.8622 Train AUC: 0.9662 Val AUC: 0.9595 Time: 13.13\n",
      "Epoch: 206 Train Loss: 0.2330 Val Loss: 0.2581 Acc: 0.8768 Pre: 0.8898 Recall: 0.8496 F1: 0.8692 Train AUC: 0.9678 Val AUC: 0.9589 Time: 12.90\n",
      "Epoch: 207 Train Loss: 0.2288 Val Loss: 0.2672 Acc: 0.8768 Pre: 0.8750 Recall: 0.8684 F1: 0.8717 Train AUC: 0.9697 Val AUC: 0.9576 Time: 13.29\n",
      "Epoch: 208 Train Loss: 0.2397 Val Loss: 0.2613 Acc: 0.8786 Pre: 0.8812 Recall: 0.8647 F1: 0.8729 Train AUC: 0.9659 Val AUC: 0.9581 Time: 14.10\n",
      "Epoch: 209 Train Loss: 0.2302 Val Loss: 0.2603 Acc: 0.8786 Pre: 0.8933 Recall: 0.8496 F1: 0.8709 Train AUC: 0.9684 Val AUC: 0.9582 Time: 14.82\n",
      "Epoch: 210 Train Loss: 0.2269 Val Loss: 0.2604 Acc: 0.8786 Pre: 0.8933 Recall: 0.8496 F1: 0.8709 Train AUC: 0.9686 Val AUC: 0.9584 Time: 14.28\n",
      "Epoch: 211 Train Loss: 0.2338 Val Loss: 0.2648 Acc: 0.8768 Pre: 0.8779 Recall: 0.8647 F1: 0.8712 Train AUC: 0.9672 Val AUC: 0.9573 Time: 14.53\n",
      "Epoch: 212 Train Loss: 0.2367 Val Loss: 0.2619 Acc: 0.8732 Pre: 0.8712 Recall: 0.8647 F1: 0.8679 Train AUC: 0.9660 Val AUC: 0.9580 Time: 12.78\n",
      "Epoch: 213 Train Loss: 0.2386 Val Loss: 0.2565 Acc: 0.8696 Pre: 0.8760 Recall: 0.8496 F1: 0.8626 Train AUC: 0.9663 Val AUC: 0.9591 Time: 12.70\n",
      "Epoch: 214 Train Loss: 0.2280 Val Loss: 0.2575 Acc: 0.8750 Pre: 0.8924 Recall: 0.8421 F1: 0.8665 Train AUC: 0.9694 Val AUC: 0.9591 Time: 12.68\n",
      "Epoch: 215 Train Loss: 0.2222 Val Loss: 0.2615 Acc: 0.8732 Pre: 0.8798 Recall: 0.8534 F1: 0.8664 Train AUC: 0.9703 Val AUC: 0.9582 Time: 14.05\n",
      "Epoch: 216 Train Loss: 0.2252 Val Loss: 0.2671 Acc: 0.8750 Pre: 0.8745 Recall: 0.8647 F1: 0.8696 Train AUC: 0.9696 Val AUC: 0.9572 Time: 14.37\n",
      "Epoch: 217 Train Loss: 0.2176 Val Loss: 0.2632 Acc: 0.8768 Pre: 0.8779 Recall: 0.8647 F1: 0.8712 Train AUC: 0.9719 Val AUC: 0.9579 Time: 14.79\n",
      "Epoch: 218 Train Loss: 0.2271 Val Loss: 0.2591 Acc: 0.8750 Pre: 0.8833 Recall: 0.8534 F1: 0.8681 Train AUC: 0.9693 Val AUC: 0.9590 Time: 14.43\n",
      "Epoch: 219 Train Loss: 0.2249 Val Loss: 0.2565 Acc: 0.8714 Pre: 0.8854 Recall: 0.8421 F1: 0.8632 Train AUC: 0.9692 Val AUC: 0.9598 Time: 14.55\n",
      "Epoch: 220 Train Loss: 0.2263 Val Loss: 0.2567 Acc: 0.8714 Pre: 0.8794 Recall: 0.8496 F1: 0.8642 Train AUC: 0.9690 Val AUC: 0.9596 Time: 12.68\n",
      "Epoch: 221 Train Loss: 0.2185 Val Loss: 0.2616 Acc: 0.8696 Pre: 0.8702 Recall: 0.8571 F1: 0.8636 Train AUC: 0.9715 Val AUC: 0.9580 Time: 12.64\n",
      "Epoch: 222 Train Loss: 0.2293 Val Loss: 0.2604 Acc: 0.8750 Pre: 0.8863 Recall: 0.8496 F1: 0.8676 Train AUC: 0.9697 Val AUC: 0.9579 Time: 12.65\n",
      "Epoch: 223 Train Loss: 0.2339 Val Loss: 0.2622 Acc: 0.8786 Pre: 0.8872 Recall: 0.8571 F1: 0.8719 Train AUC: 0.9668 Val AUC: 0.9578 Time: 12.52\n",
      "Epoch: 224 Train Loss: 0.2224 Val Loss: 0.2610 Acc: 0.8804 Pre: 0.8906 Recall: 0.8571 F1: 0.8736 Train AUC: 0.9700 Val AUC: 0.9583 Time: 12.64\n",
      "Epoch: 225 Train Loss: 0.2226 Val Loss: 0.2560 Acc: 0.8768 Pre: 0.8898 Recall: 0.8496 F1: 0.8692 Train AUC: 0.9697 Val AUC: 0.9594 Time: 13.41\n",
      "Epoch: 226 Train Loss: 0.2146 Val Loss: 0.2508 Acc: 0.8786 Pre: 0.8933 Recall: 0.8496 F1: 0.8709 Train AUC: 0.9722 Val AUC: 0.9606 Time: 14.20\n",
      "Epoch: 227 Train Loss: 0.2203 Val Loss: 0.2510 Acc: 0.8804 Pre: 0.8817 Recall: 0.8684 F1: 0.8750 Train AUC: 0.9711 Val AUC: 0.9610 Time: 14.90\n",
      "Epoch: 228 Train Loss: 0.2266 Val Loss: 0.2515 Acc: 0.8714 Pre: 0.8794 Recall: 0.8496 F1: 0.8642 Train AUC: 0.9696 Val AUC: 0.9599 Time: 15.27\n",
      "Epoch: 229 Train Loss: 0.2161 Val Loss: 0.2547 Acc: 0.8678 Pre: 0.8784 Recall: 0.8421 F1: 0.8599 Train AUC: 0.9725 Val AUC: 0.9588 Time: 15.16\n",
      "Epoch: 230 Train Loss: 0.2240 Val Loss: 0.2569 Acc: 0.8678 Pre: 0.8784 Recall: 0.8421 F1: 0.8599 Train AUC: 0.9702 Val AUC: 0.9584 Time: 13.58\n",
      "Epoch: 231 Train Loss: 0.2201 Val Loss: 0.2614 Acc: 0.8822 Pre: 0.8764 Recall: 0.8797 F1: 0.8780 Train AUC: 0.9716 Val AUC: 0.9579 Time: 12.46\n",
      "Epoch: 232 Train Loss: 0.2247 Val Loss: 0.2618 Acc: 0.8859 Pre: 0.8830 Recall: 0.8797 F1: 0.8814 Train AUC: 0.9685 Val AUC: 0.9582 Time: 12.47\n",
      "Epoch: 233 Train Loss: 0.2119 Val Loss: 0.2604 Acc: 0.8841 Pre: 0.8826 Recall: 0.8759 F1: 0.8792 Train AUC: 0.9726 Val AUC: 0.9586 Time: 12.72\n",
      "Epoch: 234 Train Loss: 0.2255 Val Loss: 0.2553 Acc: 0.8786 Pre: 0.8902 Recall: 0.8534 F1: 0.8714 Train AUC: 0.9695 Val AUC: 0.9592 Time: 12.55\n",
      "Epoch: 235 Train Loss: 0.2263 Val Loss: 0.2511 Acc: 0.8768 Pre: 0.8867 Recall: 0.8534 F1: 0.8697 Train AUC: 0.9705 Val AUC: 0.9603 Time: 12.94\n",
      "Epoch: 236 Train Loss: 0.2177 Val Loss: 0.2557 Acc: 0.8859 Pre: 0.8745 Recall: 0.8910 F1: 0.8827 Train AUC: 0.9715 Val AUC: 0.9598 Time: 13.73\n",
      "Epoch: 237 Train Loss: 0.2163 Val Loss: 0.2646 Acc: 0.8859 Pre: 0.8718 Recall: 0.8947 F1: 0.8831 Train AUC: 0.9733 Val AUC: 0.9578 Time: 13.77\n",
      "Epoch: 238 Train Loss: 0.2167 Val Loss: 0.2624 Acc: 0.8696 Pre: 0.8849 Recall: 0.8383 F1: 0.8610 Train AUC: 0.9730 Val AUC: 0.9571 Time: 14.86\n",
      "Epoch: 239 Train Loss: 0.2183 Val Loss: 0.2630 Acc: 0.8696 Pre: 0.8819 Recall: 0.8421 F1: 0.8615 Train AUC: 0.9734 Val AUC: 0.9571 Time: 15.11\n",
      "Epoch: 240 Train Loss: 0.2158 Val Loss: 0.2708 Acc: 0.8877 Pre: 0.8696 Recall: 0.9023 F1: 0.8856 Train AUC: 0.9729 Val AUC: 0.9574 Time: 14.99\n",
      "Epoch: 241 Train Loss: 0.2192 Val Loss: 0.2571 Acc: 0.8877 Pre: 0.8750 Recall: 0.8947 F1: 0.8848 Train AUC: 0.9730 Val AUC: 0.9602 Time: 12.57\n",
      "Epoch: 242 Train Loss: 0.2138 Val Loss: 0.2476 Acc: 0.8641 Pre: 0.8930 Recall: 0.8158 F1: 0.8527 Train AUC: 0.9739 Val AUC: 0.9616 Time: 12.54\n",
      "Epoch: 243 Train Loss: 0.2189 Val Loss: 0.2500 Acc: 0.8623 Pre: 0.8926 Recall: 0.8120 F1: 0.8504 Train AUC: 0.9716 Val AUC: 0.9614 Time: 12.46\n",
      "Epoch: 244 Train Loss: 0.2149 Val Loss: 0.2535 Acc: 0.8768 Pre: 0.8750 Recall: 0.8684 F1: 0.8717 Train AUC: 0.9735 Val AUC: 0.9598 Time: 12.58\n",
      "Epoch: 245 Train Loss: 0.2129 Val Loss: 0.2711 Acc: 0.8804 Pre: 0.8597 Recall: 0.8985 F1: 0.8787 Train AUC: 0.9729 Val AUC: 0.9569 Time: 13.06\n",
      "Epoch: 246 Train Loss: 0.2122 Val Loss: 0.2655 Acc: 0.8859 Pre: 0.8773 Recall: 0.8872 F1: 0.8822 Train AUC: 0.9742 Val AUC: 0.9575 Time: 13.77\n",
      "Epoch: 247 Train Loss: 0.2189 Val Loss: 0.2584 Acc: 0.8786 Pre: 0.8842 Recall: 0.8609 F1: 0.8724 Train AUC: 0.9707 Val AUC: 0.9590 Time: 13.99\n",
      "Epoch: 248 Train Loss: 0.2042 Val Loss: 0.2524 Acc: 0.8804 Pre: 0.8846 Recall: 0.8647 F1: 0.8745 Train AUC: 0.9750 Val AUC: 0.9607 Time: 14.70\n",
      "Epoch: 249 Train Loss: 0.2107 Val Loss: 0.2603 Acc: 0.8913 Pre: 0.8705 Recall: 0.9098 F1: 0.8897 Train AUC: 0.9740 Val AUC: 0.9614 Time: 14.57\n",
      "Epoch: 250 Train Loss: 0.2178 Val Loss: 0.2502 Acc: 0.8877 Pre: 0.8750 Recall: 0.8947 F1: 0.8848 Train AUC: 0.9723 Val AUC: 0.9619 Time: 12.62\n",
      "Epoch: 251 Train Loss: 0.2123 Val Loss: 0.2523 Acc: 0.8659 Pre: 0.9000 Recall: 0.8120 F1: 0.8538 Train AUC: 0.9734 Val AUC: 0.9616 Time: 13.01\n",
      "Epoch: 252 Train Loss: 0.2232 Val Loss: 0.2520 Acc: 0.8678 Pre: 0.8784 Recall: 0.8421 F1: 0.8599 Train AUC: 0.9723 Val AUC: 0.9598 Time: 12.61\n",
      "Epoch: 253 Train Loss: 0.2176 Val Loss: 0.2585 Acc: 0.8786 Pre: 0.8672 Recall: 0.8835 F1: 0.8752 Train AUC: 0.9727 Val AUC: 0.9594 Time: 13.26\n",
      "Epoch: 254 Train Loss: 0.2188 Val Loss: 0.2530 Acc: 0.8877 Pre: 0.8750 Recall: 0.8947 F1: 0.8848 Train AUC: 0.9719 Val AUC: 0.9612 Time: 13.95\n",
      "Epoch: 255 Train Loss: 0.2056 Val Loss: 0.2467 Acc: 0.8804 Pre: 0.8968 Recall: 0.8496 F1: 0.8726 Train AUC: 0.9754 Val AUC: 0.9627 Time: 14.26\n",
      "Epoch: 256 Train Loss: 0.2090 Val Loss: 0.2497 Acc: 0.8804 Pre: 0.8968 Recall: 0.8496 F1: 0.8726 Train AUC: 0.9732 Val AUC: 0.9622 Time: 14.11\n",
      "Epoch: 257 Train Loss: 0.2147 Val Loss: 0.2574 Acc: 0.8913 Pre: 0.8787 Recall: 0.8985 F1: 0.8885 Train AUC: 0.9724 Val AUC: 0.9610 Time: 14.18\n",
      "Epoch: 258 Train Loss: 0.2115 Val Loss: 0.2546 Acc: 0.8895 Pre: 0.8782 Recall: 0.8947 F1: 0.8864 Train AUC: 0.9741 Val AUC: 0.9598 Time: 12.47\n",
      "Epoch: 259 Train Loss: 0.2137 Val Loss: 0.2497 Acc: 0.8732 Pre: 0.8889 Recall: 0.8421 F1: 0.8649 Train AUC: 0.9730 Val AUC: 0.9601 Time: 12.97\n",
      "Epoch: 260 Train Loss: 0.2092 Val Loss: 0.2506 Acc: 0.8750 Pre: 0.8924 Recall: 0.8421 F1: 0.8665 Train AUC: 0.9747 Val AUC: 0.9603 Time: 13.39\n",
      "Epoch: 261 Train Loss: 0.2107 Val Loss: 0.2588 Acc: 0.8877 Pre: 0.8835 Recall: 0.8835 F1: 0.8835 Train AUC: 0.9741 Val AUC: 0.9593 Time: 13.81\n",
      "Epoch: 262 Train Loss: 0.2059 Val Loss: 0.2648 Acc: 0.8913 Pre: 0.8759 Recall: 0.9023 F1: 0.8889 Train AUC: 0.9752 Val AUC: 0.9594 Time: 14.41\n",
      "Epoch: 263 Train Loss: 0.2101 Val Loss: 0.2568 Acc: 0.8859 Pre: 0.8859 Recall: 0.8759 F1: 0.8809 Train AUC: 0.9751 Val AUC: 0.9607 Time: 14.36\n",
      "Epoch: 264 Train Loss: 0.2122 Val Loss: 0.2559 Acc: 0.8750 Pre: 0.9020 Recall: 0.8308 F1: 0.8650 Train AUC: 0.9730 Val AUC: 0.9618 Time: 14.00\n",
      "Epoch: 265 Train Loss: 0.2110 Val Loss: 0.2504 Acc: 0.8841 Pre: 0.8885 Recall: 0.8684 F1: 0.8783 Train AUC: 0.9749 Val AUC: 0.9619 Time: 12.40\n",
      "Epoch: 266 Train Loss: 0.2067 Val Loss: 0.2718 Acc: 0.8750 Pre: 0.8385 Recall: 0.9173 F1: 0.8761 Train AUC: 0.9741 Val AUC: 0.9593 Time: 13.17\n",
      "Epoch: 267 Train Loss: 0.2157 Val Loss: 0.2596 Acc: 0.8841 Pre: 0.8633 Recall: 0.9023 F1: 0.8824 Train AUC: 0.9763 Val AUC: 0.9598 Time: 13.17\n",
      "Epoch: 268 Train Loss: 0.2060 Val Loss: 0.2517 Acc: 0.8714 Pre: 0.8854 Recall: 0.8421 F1: 0.8632 Train AUC: 0.9762 Val AUC: 0.9604 Time: 14.45\n",
      "Epoch: 269 Train Loss: 0.2082 Val Loss: 0.2542 Acc: 0.8768 Pre: 0.8929 Recall: 0.8459 F1: 0.8687 Train AUC: 0.9760 Val AUC: 0.9606 Time: 13.97\n",
      "Epoch: 270 Train Loss: 0.2055 Val Loss: 0.2520 Acc: 0.8877 Pre: 0.8893 Recall: 0.8759 F1: 0.8826 Train AUC: 0.9746 Val AUC: 0.9619 Time: 14.24\n",
      "Epoch: 271 Train Loss: 0.2032 Val Loss: 0.2627 Acc: 0.9004 Pre: 0.8781 Recall: 0.9211 F1: 0.8991 Train AUC: 0.9748 Val AUC: 0.9618 Time: 12.55\n",
      "Epoch: 272 Train Loss: 0.2016 Val Loss: 0.2609 Acc: 0.8931 Pre: 0.8764 Recall: 0.9060 F1: 0.8909 Train AUC: 0.9765 Val AUC: 0.9617 Time: 12.64\n",
      "Epoch: 273 Train Loss: 0.2031 Val Loss: 0.2523 Acc: 0.8859 Pre: 0.8949 Recall: 0.8647 F1: 0.8795 Train AUC: 0.9765 Val AUC: 0.9614 Time: 13.58\n",
      "Epoch: 274 Train Loss: 0.1970 Val Loss: 0.2595 Acc: 0.8732 Pre: 0.8952 Recall: 0.8346 F1: 0.8638 Train AUC: 0.9788 Val AUC: 0.9594 Time: 14.05\n",
      "Epoch: 275 Train Loss: 0.2104 Val Loss: 0.2643 Acc: 0.8895 Pre: 0.8839 Recall: 0.8872 F1: 0.8856 Train AUC: 0.9740 Val AUC: 0.9581 Time: 14.52\n",
      "Epoch: 276 Train Loss: 0.2017 Val Loss: 0.2689 Acc: 0.8841 Pre: 0.8633 Recall: 0.9023 F1: 0.8824 Train AUC: 0.9752 Val AUC: 0.9592 Time: 14.62\n",
      "Epoch: 277 Train Loss: 0.2023 Val Loss: 0.2529 Acc: 0.8967 Pre: 0.8746 Recall: 0.9173 F1: 0.8954 Train AUC: 0.9763 Val AUC: 0.9626 Time: 13.64\n",
      "Epoch: 278 Train Loss: 0.1986 Val Loss: 0.2383 Acc: 0.8822 Pre: 0.8941 Recall: 0.8571 F1: 0.8752 Train AUC: 0.9789 Val AUC: 0.9646 Time: 12.62\n",
      "Epoch: 279 Train Loss: 0.2096 Val Loss: 0.2369 Acc: 0.8804 Pre: 0.8937 Recall: 0.8534 F1: 0.8731 Train AUC: 0.9740 Val AUC: 0.9649 Time: 12.61\n",
      "Epoch: 280 Train Loss: 0.2019 Val Loss: 0.2406 Acc: 0.8949 Pre: 0.8881 Recall: 0.8947 F1: 0.8914 Train AUC: 0.9753 Val AUC: 0.9637 Time: 12.78\n",
      "Epoch: 281 Train Loss: 0.2066 Val Loss: 0.2475 Acc: 0.8913 Pre: 0.8759 Recall: 0.9023 F1: 0.8889 Train AUC: 0.9740 Val AUC: 0.9626 Time: 14.21\n",
      "Epoch: 282 Train Loss: 0.1917 Val Loss: 0.2504 Acc: 0.8859 Pre: 0.8718 Recall: 0.8947 F1: 0.8831 Train AUC: 0.9783 Val AUC: 0.9613 Time: 14.08\n",
      "Epoch: 283 Train Loss: 0.1951 Val Loss: 0.2477 Acc: 0.8877 Pre: 0.8778 Recall: 0.8910 F1: 0.8843 Train AUC: 0.9777 Val AUC: 0.9620 Time: 14.68\n",
      "Epoch: 284 Train Loss: 0.1965 Val Loss: 0.2438 Acc: 0.8859 Pre: 0.8980 Recall: 0.8609 F1: 0.8791 Train AUC: 0.9777 Val AUC: 0.9630 Time: 15.70\n",
      "Epoch: 285 Train Loss: 0.2039 Val Loss: 0.2450 Acc: 0.8822 Pre: 0.8880 Recall: 0.8647 F1: 0.8762 Train AUC: 0.9757 Val AUC: 0.9632 Time: 13.86\n",
      "Epoch: 286 Train Loss: 0.2010 Val Loss: 0.2556 Acc: 0.8931 Pre: 0.8791 Recall: 0.9023 F1: 0.8905 Train AUC: 0.9767 Val AUC: 0.9623 Time: 12.74\n",
      "Epoch: 287 Train Loss: 0.1938 Val Loss: 0.2570 Acc: 0.8986 Pre: 0.8860 Recall: 0.9060 F1: 0.8959 Train AUC: 0.9788 Val AUC: 0.9619 Time: 12.69\n",
      "Epoch: 288 Train Loss: 0.1988 Val Loss: 0.2533 Acc: 0.8877 Pre: 0.8864 Recall: 0.8797 F1: 0.8830 Train AUC: 0.9775 Val AUC: 0.9614 Time: 12.48\n",
      "Epoch: 289 Train Loss: 0.1972 Val Loss: 0.2512 Acc: 0.8895 Pre: 0.8868 Recall: 0.8835 F1: 0.8851 Train AUC: 0.9763 Val AUC: 0.9616 Time: 12.56\n",
      "Epoch: 290 Train Loss: 0.1991 Val Loss: 0.2508 Acc: 0.8949 Pre: 0.8881 Recall: 0.8947 F1: 0.8914 Train AUC: 0.9765 Val AUC: 0.9612 Time: 13.58\n",
      "Epoch: 291 Train Loss: 0.1955 Val Loss: 0.2539 Acc: 0.8859 Pre: 0.8638 Recall: 0.9060 F1: 0.8844 Train AUC: 0.9774 Val AUC: 0.9617 Time: 14.02\n",
      "Epoch: 292 Train Loss: 0.1990 Val Loss: 0.2383 Acc: 0.8913 Pre: 0.8843 Recall: 0.8910 F1: 0.8876 Train AUC: 0.9778 Val AUC: 0.9637 Time: 14.46\n",
      "Epoch: 293 Train Loss: 0.1996 Val Loss: 0.2375 Acc: 0.8859 Pre: 0.8949 Recall: 0.8647 F1: 0.8795 Train AUC: 0.9768 Val AUC: 0.9639 Time: 13.55\n",
      "Epoch: 294 Train Loss: 0.2009 Val Loss: 0.2494 Acc: 0.9022 Pre: 0.8897 Recall: 0.9098 F1: 0.8996 Train AUC: 0.9767 Val AUC: 0.9629 Time: 14.27\n",
      "Epoch: 295 Train Loss: 0.1881 Val Loss: 0.2611 Acc: 0.9022 Pre: 0.8897 Recall: 0.9098 F1: 0.8996 Train AUC: 0.9787 Val AUC: 0.9617 Time: 13.55\n",
      "Epoch: 296 Train Loss: 0.1905 Val Loss: 0.2594 Acc: 0.8967 Pre: 0.8885 Recall: 0.8985 F1: 0.8935 Train AUC: 0.9783 Val AUC: 0.9621 Time: 12.42\n",
      "Epoch: 297 Train Loss: 0.1911 Val Loss: 0.2558 Acc: 0.8913 Pre: 0.8872 Recall: 0.8872 F1: 0.8872 Train AUC: 0.9778 Val AUC: 0.9617 Time: 13.43\n",
      "Epoch: 298 Train Loss: 0.1920 Val Loss: 0.2473 Acc: 0.8822 Pre: 0.8764 Recall: 0.8797 F1: 0.8780 Train AUC: 0.9776 Val AUC: 0.9627 Time: 13.26\n",
      "Epoch: 299 Train Loss: 0.1933 Val Loss: 0.2451 Acc: 0.8895 Pre: 0.8755 Recall: 0.8985 F1: 0.8868 Train AUC: 0.9792 Val AUC: 0.9626 Time: 14.25\n",
      "Epoch: 300 Train Loss: 0.1904 Val Loss: 0.2430 Acc: 0.8913 Pre: 0.8759 Recall: 0.9023 F1: 0.8889 Train AUC: 0.9792 Val AUC: 0.9629 Time: 14.58\n",
      "Epoch: 301 Train Loss: 0.1903 Val Loss: 0.2475 Acc: 0.8931 Pre: 0.8683 Recall: 0.9173 F1: 0.8921 Train AUC: 0.9791 Val AUC: 0.9630 Time: 15.25\n",
      "Epoch: 302 Train Loss: 0.1926 Val Loss: 0.2434 Acc: 0.9004 Pre: 0.8836 Recall: 0.9135 F1: 0.8983 Train AUC: 0.9789 Val AUC: 0.9642 Time: 13.12\n",
      "Epoch: 303 Train Loss: 0.1923 Val Loss: 0.2458 Acc: 0.8986 Pre: 0.8918 Recall: 0.8985 F1: 0.8951 Train AUC: 0.9784 Val AUC: 0.9637 Time: 12.36\n",
      "Epoch: 304 Train Loss: 0.1961 Val Loss: 0.2497 Acc: 0.8913 Pre: 0.8872 Recall: 0.8872 F1: 0.8872 Train AUC: 0.9764 Val AUC: 0.9625 Time: 12.87\n",
      "Epoch: 305 Train Loss: 0.1888 Val Loss: 0.2635 Acc: 0.8877 Pre: 0.8723 Recall: 0.8985 F1: 0.8852 Train AUC: 0.9796 Val AUC: 0.9606 Time: 12.95\n",
      "Epoch: 306 Train Loss: 0.1912 Val Loss: 0.2617 Acc: 0.8895 Pre: 0.8727 Recall: 0.9023 F1: 0.8872 Train AUC: 0.9780 Val AUC: 0.9612 Time: 13.87\n",
      "Epoch: 307 Train Loss: 0.1930 Val Loss: 0.2438 Acc: 0.8877 Pre: 0.8835 Recall: 0.8835 F1: 0.8835 Train AUC: 0.9786 Val AUC: 0.9640 Time: 14.26\n",
      "Epoch: 308 Train Loss: 0.1857 Val Loss: 0.2378 Acc: 0.8967 Pre: 0.8856 Recall: 0.9023 F1: 0.8939 Train AUC: 0.9794 Val AUC: 0.9654 Time: 14.56\n",
      "Epoch: 309 Train Loss: 0.1853 Val Loss: 0.2369 Acc: 0.9004 Pre: 0.8864 Recall: 0.9098 F1: 0.8980 Train AUC: 0.9797 Val AUC: 0.9654 Time: 14.37\n",
      "Epoch: 310 Train Loss: 0.1961 Val Loss: 0.2453 Acc: 0.8967 Pre: 0.8746 Recall: 0.9173 F1: 0.8954 Train AUC: 0.9765 Val AUC: 0.9641 Time: 12.30\n",
      "Epoch: 311 Train Loss: 0.1845 Val Loss: 0.2530 Acc: 0.8913 Pre: 0.8679 Recall: 0.9135 F1: 0.8901 Train AUC: 0.9797 Val AUC: 0.9622 Time: 12.46\n",
      "Epoch: 312 Train Loss: 0.1935 Val Loss: 0.2459 Acc: 0.8877 Pre: 0.8806 Recall: 0.8872 F1: 0.8839 Train AUC: 0.9784 Val AUC: 0.9621 Time: 12.76\n",
      "Epoch: 313 Train Loss: 0.1950 Val Loss: 0.2426 Acc: 0.8841 Pre: 0.8945 Recall: 0.8609 F1: 0.8774 Train AUC: 0.9769 Val AUC: 0.9629 Time: 13.59\n",
      "Epoch: 314 Train Loss: 0.1970 Val Loss: 0.2458 Acc: 0.8967 Pre: 0.8773 Recall: 0.9135 F1: 0.8950 Train AUC: 0.9794 Val AUC: 0.9631 Time: 14.04\n",
      "Epoch: 315 Train Loss: 0.1844 Val Loss: 0.2609 Acc: 0.9040 Pre: 0.8817 Recall: 0.9248 F1: 0.9028 Train AUC: 0.9803 Val AUC: 0.9625 Time: 14.56\n",
      "Epoch: 316 Train Loss: 0.1842 Val Loss: 0.2528 Acc: 0.8895 Pre: 0.8810 Recall: 0.8910 F1: 0.8860 Train AUC: 0.9812 Val AUC: 0.9630 Time: 14.70\n",
      "Epoch: 317 Train Loss: 0.1846 Val Loss: 0.2458 Acc: 0.8877 Pre: 0.8893 Recall: 0.8759 F1: 0.8826 Train AUC: 0.9805 Val AUC: 0.9637 Time: 14.63\n",
      "Epoch: 318 Train Loss: 0.1876 Val Loss: 0.2422 Acc: 0.8859 Pre: 0.8801 Recall: 0.8835 F1: 0.8818 Train AUC: 0.9804 Val AUC: 0.9643 Time: 12.60\n",
      "Epoch: 319 Train Loss: 0.1862 Val Loss: 0.2518 Acc: 0.8986 Pre: 0.8750 Recall: 0.9211 F1: 0.8974 Train AUC: 0.9801 Val AUC: 0.9647 Time: 12.53\n",
      "Epoch: 320 Train Loss: 0.1854 Val Loss: 0.2427 Acc: 0.9076 Pre: 0.8909 Recall: 0.9211 F1: 0.9057 Train AUC: 0.9807 Val AUC: 0.9650 Time: 12.41\n",
      "Epoch: 321 Train Loss: 0.1942 Val Loss: 0.2336 Acc: 0.8931 Pre: 0.8996 Recall: 0.8759 F1: 0.8876 Train AUC: 0.9769 Val AUC: 0.9660 Time: 12.54\n",
      "Epoch: 322 Train Loss: 0.1841 Val Loss: 0.2340 Acc: 0.8986 Pre: 0.8947 Recall: 0.8947 F1: 0.8947 Train AUC: 0.9804 Val AUC: 0.9660 Time: 13.50\n",
      "Epoch: 323 Train Loss: 0.1893 Val Loss: 0.2403 Acc: 0.8931 Pre: 0.8606 Recall: 0.9286 F1: 0.8933 Train AUC: 0.9784 Val AUC: 0.9656 Time: 13.97\n",
      "Epoch: 324 Train Loss: 0.1804 Val Loss: 0.2395 Acc: 0.8949 Pre: 0.8662 Recall: 0.9248 F1: 0.8945 Train AUC: 0.9813 Val AUC: 0.9658 Time: 14.64\n",
      "Epoch: 325 Train Loss: 0.1904 Val Loss: 0.2386 Acc: 0.8967 Pre: 0.8856 Recall: 0.9023 F1: 0.8939 Train AUC: 0.9790 Val AUC: 0.9654 Time: 14.91\n",
      "Epoch: 326 Train Loss: 0.1804 Val Loss: 0.2473 Acc: 0.8949 Pre: 0.8939 Recall: 0.8872 F1: 0.8906 Train AUC: 0.9813 Val AUC: 0.9634 Time: 13.69\n",
      "Epoch: 327 Train Loss: 0.1819 Val Loss: 0.2560 Acc: 0.9004 Pre: 0.8781 Recall: 0.9211 F1: 0.8991 Train AUC: 0.9807 Val AUC: 0.9633 Time: 12.45\n",
      "Epoch: 328 Train Loss: 0.1857 Val Loss: 0.2478 Acc: 0.9058 Pre: 0.8794 Recall: 0.9323 F1: 0.9051 Train AUC: 0.9799 Val AUC: 0.9651 Time: 12.60\n",
      "Epoch: 329 Train Loss: 0.1841 Val Loss: 0.2307 Acc: 0.9022 Pre: 0.8869 Recall: 0.9135 F1: 0.9000 Train AUC: 0.9813 Val AUC: 0.9672 Time: 12.80\n",
      "Epoch: 330 Train Loss: 0.1872 Val Loss: 0.2257 Acc: 0.9058 Pre: 0.9053 Recall: 0.8985 F1: 0.9019 Train AUC: 0.9793 Val AUC: 0.9680 Time: 13.43\n",
      "Epoch: 331 Train Loss: 0.1795 Val Loss: 0.2336 Acc: 0.8913 Pre: 0.8843 Recall: 0.8910 F1: 0.8876 Train AUC: 0.9822 Val AUC: 0.9660 Time: 14.10\n",
      "Epoch: 332 Train Loss: 0.1833 Val Loss: 0.2555 Acc: 0.8931 Pre: 0.8710 Recall: 0.9135 F1: 0.8917 Train AUC: 0.9804 Val AUC: 0.9633 Time: 14.56\n",
      "Epoch: 333 Train Loss: 0.1822 Val Loss: 0.2455 Acc: 0.8967 Pre: 0.8856 Recall: 0.9023 F1: 0.8939 Train AUC: 0.9809 Val AUC: 0.9643 Time: 14.64\n",
      "Epoch: 334 Train Loss: 0.1726 Val Loss: 0.2404 Acc: 0.8931 Pre: 0.8935 Recall: 0.8835 F1: 0.8885 Train AUC: 0.9823 Val AUC: 0.9658 Time: 13.02\n",
      "Epoch: 335 Train Loss: 0.1769 Val Loss: 0.2358 Acc: 0.8913 Pre: 0.8872 Recall: 0.8872 F1: 0.8872 Train AUC: 0.9807 Val AUC: 0.9663 Time: 12.78\n",
      "Epoch: 336 Train Loss: 0.1757 Val Loss: 0.2346 Acc: 0.9040 Pre: 0.8817 Recall: 0.9248 F1: 0.9028 Train AUC: 0.9816 Val AUC: 0.9670 Time: 13.25\n",
      "Epoch: 337 Train Loss: 0.1860 Val Loss: 0.2492 Acc: 0.9094 Pre: 0.8803 Recall: 0.9398 F1: 0.9091 Train AUC: 0.9817 Val AUC: 0.9658 Time: 13.70\n",
      "Epoch: 338 Train Loss: 0.1775 Val Loss: 0.2450 Acc: 0.8913 Pre: 0.8843 Recall: 0.8910 F1: 0.8876 Train AUC: 0.9833 Val AUC: 0.9647 Time: 14.15\n",
      "Epoch: 339 Train Loss: 0.1745 Val Loss: 0.2537 Acc: 0.8913 Pre: 0.8843 Recall: 0.8910 F1: 0.8876 Train AUC: 0.9821 Val AUC: 0.9629 Time: 13.85\n",
      "Epoch: 340 Train Loss: 0.1803 Val Loss: 0.2522 Acc: 0.8949 Pre: 0.8796 Recall: 0.9060 F1: 0.8926 Train AUC: 0.9808 Val AUC: 0.9642 Time: 13.59\n",
      "Epoch: 341 Train Loss: 0.1770 Val Loss: 0.2457 Acc: 0.8967 Pre: 0.8746 Recall: 0.9173 F1: 0.8954 Train AUC: 0.9817 Val AUC: 0.9651 Time: 12.69\n",
      "Epoch: 342 Train Loss: 0.1739 Val Loss: 0.2337 Acc: 0.8986 Pre: 0.8889 Recall: 0.9023 F1: 0.8955 Train AUC: 0.9830 Val AUC: 0.9667 Time: 12.84\n",
      "Epoch: 343 Train Loss: 0.1749 Val Loss: 0.2385 Acc: 0.8967 Pre: 0.8943 Recall: 0.8910 F1: 0.8927 Train AUC: 0.9825 Val AUC: 0.9661 Time: 14.46\n",
      "Epoch: 344 Train Loss: 0.1689 Val Loss: 0.2442 Acc: 0.8895 Pre: 0.8810 Recall: 0.8910 F1: 0.8860 Train AUC: 0.9839 Val AUC: 0.9657 Time: 13.99\n",
      "Epoch: 345 Train Loss: 0.1683 Val Loss: 0.2596 Acc: 0.9022 Pre: 0.8813 Recall: 0.9211 F1: 0.9007 Train AUC: 0.9843 Val AUC: 0.9643 Time: 14.53\n",
      "Epoch: 346 Train Loss: 0.1872 Val Loss: 0.2422 Acc: 0.8986 Pre: 0.8860 Recall: 0.9060 F1: 0.8959 Train AUC: 0.9809 Val AUC: 0.9661 Time: 14.65\n",
      "Epoch: 347 Train Loss: 0.1694 Val Loss: 0.2301 Acc: 0.9004 Pre: 0.8981 Recall: 0.8947 F1: 0.8964 Train AUC: 0.9831 Val AUC: 0.9682 Time: 14.65\n",
      "Epoch: 348 Train Loss: 0.1804 Val Loss: 0.2310 Acc: 0.9022 Pre: 0.8813 Recall: 0.9211 F1: 0.9007 Train AUC: 0.9802 Val AUC: 0.9678 Time: 14.91\n",
      "Epoch: 349 Train Loss: 0.1716 Val Loss: 0.2322 Acc: 0.8986 Pre: 0.8832 Recall: 0.9098 F1: 0.8963 Train AUC: 0.9825 Val AUC: 0.9675 Time: 12.58\n",
      "Epoch: 350 Train Loss: 0.1666 Val Loss: 0.2404 Acc: 0.8895 Pre: 0.8755 Recall: 0.8985 F1: 0.8868 Train AUC: 0.9848 Val AUC: 0.9652 Time: 12.59\n",
      "Epoch: 351 Train Loss: 0.1653 Val Loss: 0.2413 Acc: 0.8895 Pre: 0.8727 Recall: 0.9023 F1: 0.8872 Train AUC: 0.9838 Val AUC: 0.9652 Time: 12.48\n",
      "Epoch: 352 Train Loss: 0.1722 Val Loss: 0.2355 Acc: 0.9004 Pre: 0.8951 Recall: 0.8985 F1: 0.8968 Train AUC: 0.9833 Val AUC: 0.9666 Time: 12.65\n",
      "Epoch: 353 Train Loss: 0.1745 Val Loss: 0.2444 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9838 Val AUC: 0.9661 Time: 13.74\n",
      "Epoch: 354 Train Loss: 0.1715 Val Loss: 0.2545 Acc: 0.9076 Pre: 0.8772 Recall: 0.9398 F1: 0.9074 Train AUC: 0.9828 Val AUC: 0.9650 Time: 13.91\n",
      "Epoch: 355 Train Loss: 0.1729 Val Loss: 0.2355 Acc: 0.9112 Pre: 0.8945 Recall: 0.9248 F1: 0.9094 Train AUC: 0.9830 Val AUC: 0.9667 Time: 14.55\n",
      "Epoch: 356 Train Loss: 0.1730 Val Loss: 0.2306 Acc: 0.9058 Pre: 0.8963 Recall: 0.9098 F1: 0.9030 Train AUC: 0.9822 Val AUC: 0.9674 Time: 13.82\n",
      "Epoch: 357 Train Loss: 0.1677 Val Loss: 0.2403 Acc: 0.8967 Pre: 0.8800 Recall: 0.9098 F1: 0.8946 Train AUC: 0.9843 Val AUC: 0.9658 Time: 14.08\n",
      "Epoch: 358 Train Loss: 0.1670 Val Loss: 0.2439 Acc: 0.8949 Pre: 0.8741 Recall: 0.9135 F1: 0.8934 Train AUC: 0.9843 Val AUC: 0.9648 Time: 12.42\n",
      "Epoch: 359 Train Loss: 0.1676 Val Loss: 0.2400 Acc: 0.8931 Pre: 0.8819 Recall: 0.8985 F1: 0.8901 Train AUC: 0.9847 Val AUC: 0.9649 Time: 12.61\n",
      "Epoch: 360 Train Loss: 0.1677 Val Loss: 0.2421 Acc: 0.8931 Pre: 0.8819 Recall: 0.8985 F1: 0.8901 Train AUC: 0.9843 Val AUC: 0.9651 Time: 12.64\n",
      "Epoch: 361 Train Loss: 0.1652 Val Loss: 0.2480 Acc: 0.9112 Pre: 0.8834 Recall: 0.9398 F1: 0.9107 Train AUC: 0.9844 Val AUC: 0.9656 Time: 13.80\n",
      "Epoch: 362 Train Loss: 0.1659 Val Loss: 0.2524 Acc: 0.9112 Pre: 0.8780 Recall: 0.9474 F1: 0.9114 Train AUC: 0.9844 Val AUC: 0.9660 Time: 14.14\n",
      "Epoch: 363 Train Loss: 0.1698 Val Loss: 0.2282 Acc: 0.9094 Pre: 0.8942 Recall: 0.9211 F1: 0.9074 Train AUC: 0.9846 Val AUC: 0.9684 Time: 14.66\n",
      "Epoch: 364 Train Loss: 0.1588 Val Loss: 0.2289 Acc: 0.9076 Pre: 0.8996 Recall: 0.9098 F1: 0.9047 Train AUC: 0.9857 Val AUC: 0.9678 Time: 14.25\n",
      "Epoch: 365 Train Loss: 0.1631 Val Loss: 0.2365 Acc: 0.8967 Pre: 0.8828 Recall: 0.9060 F1: 0.8942 Train AUC: 0.9847 Val AUC: 0.9661 Time: 13.22\n",
      "Epoch: 366 Train Loss: 0.1604 Val Loss: 0.2357 Acc: 0.8986 Pre: 0.8804 Recall: 0.9135 F1: 0.8967 Train AUC: 0.9854 Val AUC: 0.9669 Time: 12.33\n",
      "Epoch: 367 Train Loss: 0.1622 Val Loss: 0.2361 Acc: 0.9004 Pre: 0.8836 Recall: 0.9135 F1: 0.8983 Train AUC: 0.9859 Val AUC: 0.9673 Time: 12.60\n",
      "Epoch: 368 Train Loss: 0.1635 Val Loss: 0.2324 Acc: 0.9004 Pre: 0.8836 Recall: 0.9135 F1: 0.8983 Train AUC: 0.9850 Val AUC: 0.9679 Time: 12.79\n",
      "Epoch: 369 Train Loss: 0.1666 Val Loss: 0.2279 Acc: 0.9058 Pre: 0.8963 Recall: 0.9098 F1: 0.9030 Train AUC: 0.9851 Val AUC: 0.9685 Time: 12.65\n",
      "Epoch: 370 Train Loss: 0.1618 Val Loss: 0.2271 Acc: 0.9022 Pre: 0.8813 Recall: 0.9211 F1: 0.9007 Train AUC: 0.9847 Val AUC: 0.9686 Time: 13.21\n",
      "Epoch: 371 Train Loss: 0.1587 Val Loss: 0.2355 Acc: 0.8986 Pre: 0.8804 Recall: 0.9135 F1: 0.8967 Train AUC: 0.9852 Val AUC: 0.9669 Time: 14.31\n",
      "Epoch: 372 Train Loss: 0.1603 Val Loss: 0.2420 Acc: 0.9004 Pre: 0.8893 Recall: 0.9060 F1: 0.8976 Train AUC: 0.9857 Val AUC: 0.9654 Time: 14.67\n",
      "Epoch: 373 Train Loss: 0.1709 Val Loss: 0.2439 Acc: 0.8949 Pre: 0.8741 Recall: 0.9135 F1: 0.8934 Train AUC: 0.9842 Val AUC: 0.9661 Time: 14.93\n",
      "Epoch: 374 Train Loss: 0.1620 Val Loss: 0.2366 Acc: 0.9094 Pre: 0.8830 Recall: 0.9361 F1: 0.9088 Train AUC: 0.9856 Val AUC: 0.9684 Time: 12.39\n",
      "Epoch: 375 Train Loss: 0.1660 Val Loss: 0.2350 Acc: 0.9167 Pre: 0.9044 Recall: 0.9248 F1: 0.9145 Train AUC: 0.9832 Val AUC: 0.9684 Time: 12.42\n",
      "Epoch: 376 Train Loss: 0.1691 Val Loss: 0.2252 Acc: 0.9185 Pre: 0.8989 Recall: 0.9361 F1: 0.9171 Train AUC: 0.9826 Val AUC: 0.9706 Time: 12.43\n",
      "Epoch: 377 Train Loss: 0.1659 Val Loss: 0.2355 Acc: 0.9058 Pre: 0.8741 Recall: 0.9398 F1: 0.9058 Train AUC: 0.9838 Val AUC: 0.9689 Time: 13.17\n",
      "Epoch: 378 Train Loss: 0.1677 Val Loss: 0.2317 Acc: 0.9076 Pre: 0.8881 Recall: 0.9248 F1: 0.9061 Train AUC: 0.9850 Val AUC: 0.9681 Time: 13.66\n",
      "Epoch: 379 Train Loss: 0.1550 Val Loss: 0.2311 Acc: 0.9058 Pre: 0.8993 Recall: 0.9060 F1: 0.9026 Train AUC: 0.9865 Val AUC: 0.9680 Time: 14.65\n",
      "Epoch: 380 Train Loss: 0.1604 Val Loss: 0.2366 Acc: 0.8986 Pre: 0.8777 Recall: 0.9173 F1: 0.8971 Train AUC: 0.9852 Val AUC: 0.9674 Time: 14.10\n",
      "Epoch: 381 Train Loss: 0.1479 Val Loss: 0.2404 Acc: 0.9058 Pre: 0.8821 Recall: 0.9286 F1: 0.9048 Train AUC: 0.9877 Val AUC: 0.9676 Time: 14.37\n",
      "Epoch: 382 Train Loss: 0.1595 Val Loss: 0.2271 Acc: 0.9076 Pre: 0.8938 Recall: 0.9173 F1: 0.9054 Train AUC: 0.9852 Val AUC: 0.9691 Time: 13.18\n",
      "Epoch: 383 Train Loss: 0.1557 Val Loss: 0.2207 Acc: 0.9221 Pre: 0.9055 Recall: 0.9361 F1: 0.9205 Train AUC: 0.9860 Val AUC: 0.9706 Time: 12.58\n",
      "Epoch: 384 Train Loss: 0.1605 Val Loss: 0.2240 Acc: 0.9130 Pre: 0.8893 Recall: 0.9361 F1: 0.9121 Train AUC: 0.9861 Val AUC: 0.9706 Time: 12.72\n",
      "Epoch: 385 Train Loss: 0.1512 Val Loss: 0.2338 Acc: 0.9094 Pre: 0.8776 Recall: 0.9436 F1: 0.9094 Train AUC: 0.9871 Val AUC: 0.9680 Time: 13.26\n",
      "Epoch: 386 Train Loss: 0.1512 Val Loss: 0.2369 Acc: 0.9022 Pre: 0.8869 Recall: 0.9135 F1: 0.9000 Train AUC: 0.9873 Val AUC: 0.9671 Time: 13.73\n",
      "Epoch: 387 Train Loss: 0.1518 Val Loss: 0.2340 Acc: 0.9094 Pre: 0.9186 Recall: 0.8910 F1: 0.9046 Train AUC: 0.9864 Val AUC: 0.9680 Time: 14.05\n",
      "Epoch: 388 Train Loss: 0.1623 Val Loss: 0.2292 Acc: 0.9076 Pre: 0.8909 Recall: 0.9211 F1: 0.9057 Train AUC: 0.9854 Val AUC: 0.9682 Time: 14.49\n",
      "Epoch: 389 Train Loss: 0.1568 Val Loss: 0.2225 Acc: 0.9130 Pre: 0.8921 Recall: 0.9323 F1: 0.9118 Train AUC: 0.9859 Val AUC: 0.9705 Time: 13.21\n",
      "Epoch: 390 Train Loss: 0.1503 Val Loss: 0.2204 Acc: 0.9149 Pre: 0.8925 Recall: 0.9361 F1: 0.9138 Train AUC: 0.9877 Val AUC: 0.9714 Time: 12.71\n",
      "Epoch: 391 Train Loss: 0.1556 Val Loss: 0.2211 Acc: 0.9130 Pre: 0.9098 Recall: 0.9098 F1: 0.9098 Train AUC: 0.9873 Val AUC: 0.9709 Time: 12.81\n",
      "Epoch: 392 Train Loss: 0.1490 Val Loss: 0.2334 Acc: 0.8967 Pre: 0.8800 Recall: 0.9098 F1: 0.8946 Train AUC: 0.9883 Val AUC: 0.9690 Time: 13.51\n",
      "Epoch: 393 Train Loss: 0.1568 Val Loss: 0.2452 Acc: 0.9022 Pre: 0.8732 Recall: 0.9323 F1: 0.9018 Train AUC: 0.9855 Val AUC: 0.9681 Time: 14.72\n",
      "Epoch: 394 Train Loss: 0.1567 Val Loss: 0.2352 Acc: 0.9040 Pre: 0.8711 Recall: 0.9398 F1: 0.9042 Train AUC: 0.9860 Val AUC: 0.9706 Time: 14.81\n",
      "Epoch: 395 Train Loss: 0.1656 Val Loss: 0.2125 Acc: 0.9185 Pre: 0.9170 Recall: 0.9135 F1: 0.9153 Train AUC: 0.9855 Val AUC: 0.9719 Time: 14.87\n",
      "Epoch: 396 Train Loss: 0.1610 Val Loss: 0.2259 Acc: 0.9185 Pre: 0.9048 Recall: 0.9286 F1: 0.9165 Train AUC: 0.9865 Val AUC: 0.9698 Time: 13.04\n",
      "Epoch: 397 Train Loss: 0.1454 Val Loss: 0.2491 Acc: 0.9040 Pre: 0.8737 Recall: 0.9361 F1: 0.9038 Train AUC: 0.9876 Val AUC: 0.9668 Time: 12.56\n",
      "Epoch: 398 Train Loss: 0.1538 Val Loss: 0.2297 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9869 Val AUC: 0.9695 Time: 12.95\n",
      "Epoch: 399 Train Loss: 0.1467 Val Loss: 0.2138 Acc: 0.9185 Pre: 0.9234 Recall: 0.9060 F1: 0.9146 Train AUC: 0.9879 Val AUC: 0.9722 Time: 13.43\n",
      "Epoch: 400 Train Loss: 0.1567 Val Loss: 0.2119 Acc: 0.9149 Pre: 0.9071 Recall: 0.9173 F1: 0.9121 Train AUC: 0.9862 Val AUC: 0.9727 Time: 13.63\n",
      "Epoch: 401 Train Loss: 0.1523 Val Loss: 0.2212 Acc: 0.9112 Pre: 0.8945 Recall: 0.9248 F1: 0.9094 Train AUC: 0.9867 Val AUC: 0.9715 Time: 14.48\n",
      "Epoch: 402 Train Loss: 0.1418 Val Loss: 0.2386 Acc: 0.8967 Pre: 0.8693 Recall: 0.9248 F1: 0.8962 Train AUC: 0.9894 Val AUC: 0.9687 Time: 14.79\n",
      "Epoch: 403 Train Loss: 0.1532 Val Loss: 0.2407 Acc: 0.8949 Pre: 0.8714 Recall: 0.9173 F1: 0.8938 Train AUC: 0.9862 Val AUC: 0.9675 Time: 15.26\n",
      "Epoch: 404 Train Loss: 0.1498 Val Loss: 0.2323 Acc: 0.9076 Pre: 0.8909 Recall: 0.9211 F1: 0.9057 Train AUC: 0.9869 Val AUC: 0.9688 Time: 13.16\n",
      "Epoch: 405 Train Loss: 0.1508 Val Loss: 0.2213 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9863 Val AUC: 0.9709 Time: 12.68\n",
      "Epoch: 406 Train Loss: 0.1520 Val Loss: 0.2270 Acc: 0.9130 Pre: 0.8865 Recall: 0.9398 F1: 0.9124 Train AUC: 0.9872 Val AUC: 0.9707 Time: 12.51\n",
      "Epoch: 407 Train Loss: 0.1516 Val Loss: 0.2318 Acc: 0.9058 Pre: 0.8849 Recall: 0.9248 F1: 0.9044 Train AUC: 0.9864 Val AUC: 0.9690 Time: 12.69\n",
      "Epoch: 408 Train Loss: 0.1505 Val Loss: 0.2337 Acc: 0.9004 Pre: 0.8951 Recall: 0.8985 F1: 0.8968 Train AUC: 0.9863 Val AUC: 0.9685 Time: 13.38\n",
      "Epoch: 409 Train Loss: 0.1545 Val Loss: 0.2274 Acc: 0.9130 Pre: 0.8949 Recall: 0.9286 F1: 0.9114 Train AUC: 0.9859 Val AUC: 0.9699 Time: 14.00\n",
      "Epoch: 410 Train Loss: 0.1438 Val Loss: 0.2233 Acc: 0.9130 Pre: 0.8921 Recall: 0.9323 F1: 0.9118 Train AUC: 0.9893 Val AUC: 0.9713 Time: 14.61\n",
      "Epoch: 411 Train Loss: 0.1552 Val Loss: 0.2225 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9863 Val AUC: 0.9712 Time: 15.07\n",
      "Epoch: 412 Train Loss: 0.1414 Val Loss: 0.2331 Acc: 0.9094 Pre: 0.8830 Recall: 0.9361 F1: 0.9088 Train AUC: 0.9886 Val AUC: 0.9686 Time: 15.00\n",
      "Epoch: 413 Train Loss: 0.1454 Val Loss: 0.2281 Acc: 0.9112 Pre: 0.8917 Recall: 0.9286 F1: 0.9098 Train AUC: 0.9876 Val AUC: 0.9696 Time: 12.75\n",
      "Epoch: 414 Train Loss: 0.1472 Val Loss: 0.2264 Acc: 0.9076 Pre: 0.8772 Recall: 0.9398 F1: 0.9074 Train AUC: 0.9874 Val AUC: 0.9699 Time: 12.82\n",
      "Epoch: 415 Train Loss: 0.1433 Val Loss: 0.2171 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9892 Val AUC: 0.9712 Time: 12.56\n",
      "Epoch: 416 Train Loss: 0.1496 Val Loss: 0.2261 Acc: 0.9094 Pre: 0.8942 Recall: 0.9211 F1: 0.9074 Train AUC: 0.9880 Val AUC: 0.9700 Time: 12.66\n",
      "Epoch: 417 Train Loss: 0.1434 Val Loss: 0.2430 Acc: 0.9004 Pre: 0.8728 Recall: 0.9286 F1: 0.8998 Train AUC: 0.9884 Val AUC: 0.9672 Time: 12.52\n",
      "Epoch: 418 Train Loss: 0.1470 Val Loss: 0.2373 Acc: 0.9022 Pre: 0.8732 Recall: 0.9323 F1: 0.9018 Train AUC: 0.9873 Val AUC: 0.9687 Time: 12.64\n",
      "Epoch: 419 Train Loss: 0.1408 Val Loss: 0.2141 Acc: 0.9167 Pre: 0.9104 Recall: 0.9173 F1: 0.9139 Train AUC: 0.9895 Val AUC: 0.9728 Time: 13.82\n",
      "Epoch: 420 Train Loss: 0.1367 Val Loss: 0.2091 Acc: 0.9185 Pre: 0.9108 Recall: 0.9211 F1: 0.9159 Train AUC: 0.9906 Val AUC: 0.9732 Time: 14.40\n",
      "Epoch: 421 Train Loss: 0.1569 Val Loss: 0.2244 Acc: 0.9076 Pre: 0.8853 Recall: 0.9286 F1: 0.9064 Train AUC: 0.9872 Val AUC: 0.9721 Time: 15.25\n",
      "Epoch: 422 Train Loss: 0.1413 Val Loss: 0.2294 Acc: 0.9076 Pre: 0.8853 Recall: 0.9286 F1: 0.9064 Train AUC: 0.9893 Val AUC: 0.9710 Time: 13.37\n",
      "Epoch: 423 Train Loss: 0.1515 Val Loss: 0.2257 Acc: 0.9112 Pre: 0.8945 Recall: 0.9248 F1: 0.9094 Train AUC: 0.9861 Val AUC: 0.9699 Time: 13.03\n",
      "Epoch: 424 Train Loss: 0.1537 Val Loss: 0.2264 Acc: 0.9167 Pre: 0.8957 Recall: 0.9361 F1: 0.9154 Train AUC: 0.9863 Val AUC: 0.9702 Time: 13.62\n",
      "Epoch: 425 Train Loss: 0.1437 Val Loss: 0.2355 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9890 Val AUC: 0.9686 Time: 14.13\n",
      "Epoch: 426 Train Loss: 0.1489 Val Loss: 0.2262 Acc: 0.9149 Pre: 0.8925 Recall: 0.9361 F1: 0.9138 Train AUC: 0.9888 Val AUC: 0.9709 Time: 14.28\n",
      "Epoch: 427 Train Loss: 0.1514 Val Loss: 0.2164 Acc: 0.9149 Pre: 0.9132 Recall: 0.9098 F1: 0.9115 Train AUC: 0.9877 Val AUC: 0.9728 Time: 12.65\n",
      "Epoch: 428 Train Loss: 0.1538 Val Loss: 0.2288 Acc: 0.9058 Pre: 0.8849 Recall: 0.9248 F1: 0.9044 Train AUC: 0.9873 Val AUC: 0.9718 Time: 12.62\n",
      "Epoch: 429 Train Loss: 0.1457 Val Loss: 0.2471 Acc: 0.9058 Pre: 0.8715 Recall: 0.9436 F1: 0.9061 Train AUC: 0.9874 Val AUC: 0.9705 Time: 13.04\n",
      "Epoch: 430 Train Loss: 0.1456 Val Loss: 0.2206 Acc: 0.9076 Pre: 0.8853 Recall: 0.9286 F1: 0.9064 Train AUC: 0.9882 Val AUC: 0.9718 Time: 13.73\n",
      "Epoch: 431 Train Loss: 0.1400 Val Loss: 0.2236 Acc: 0.9130 Pre: 0.9291 Recall: 0.8872 F1: 0.9077 Train AUC: 0.9889 Val AUC: 0.9718 Time: 14.18\n",
      "Epoch: 432 Train Loss: 0.1654 Val Loss: 0.2261 Acc: 0.9058 Pre: 0.8963 Recall: 0.9098 F1: 0.9030 Train AUC: 0.9860 Val AUC: 0.9696 Time: 14.75\n",
      "Epoch: 433 Train Loss: 0.1507 Val Loss: 0.2336 Acc: 0.9022 Pre: 0.8681 Recall: 0.9398 F1: 0.9025 Train AUC: 0.9878 Val AUC: 0.9705 Time: 12.81\n",
      "Epoch: 434 Train Loss: 0.1322 Val Loss: 0.2341 Acc: 0.9040 Pre: 0.8790 Recall: 0.9286 F1: 0.9031 Train AUC: 0.9909 Val AUC: 0.9699 Time: 12.92\n",
      "Epoch: 435 Train Loss: 0.1406 Val Loss: 0.2255 Acc: 0.9022 Pre: 0.8841 Recall: 0.9173 F1: 0.9004 Train AUC: 0.9883 Val AUC: 0.9710 Time: 12.70\n",
      "Epoch: 436 Train Loss: 0.1399 Val Loss: 0.2137 Acc: 0.9130 Pre: 0.9037 Recall: 0.9173 F1: 0.9104 Train AUC: 0.9883 Val AUC: 0.9731 Time: 13.21\n",
      "Epoch: 437 Train Loss: 0.1428 Val Loss: 0.2166 Acc: 0.9167 Pre: 0.8873 Recall: 0.9474 F1: 0.9164 Train AUC: 0.9888 Val AUC: 0.9737 Time: 14.03\n",
      "Epoch: 438 Train Loss: 0.1363 Val Loss: 0.2273 Acc: 0.9149 Pre: 0.8925 Recall: 0.9361 F1: 0.9138 Train AUC: 0.9899 Val AUC: 0.9721 Time: 14.85\n",
      "Epoch: 439 Train Loss: 0.1328 Val Loss: 0.2247 Acc: 0.9185 Pre: 0.9018 Recall: 0.9323 F1: 0.9168 Train AUC: 0.9913 Val AUC: 0.9719 Time: 13.90\n",
      "Epoch: 440 Train Loss: 0.1401 Val Loss: 0.2192 Acc: 0.9221 Pre: 0.9176 Recall: 0.9211 F1: 0.9193 Train AUC: 0.9889 Val AUC: 0.9731 Time: 13.16\n",
      "Epoch: 441 Train Loss: 0.1392 Val Loss: 0.2188 Acc: 0.9203 Pre: 0.8936 Recall: 0.9474 F1: 0.9197 Train AUC: 0.9902 Val AUC: 0.9739 Time: 12.37\n",
      "Epoch: 442 Train Loss: 0.1388 Val Loss: 0.2234 Acc: 0.9076 Pre: 0.8799 Recall: 0.9361 F1: 0.9071 Train AUC: 0.9891 Val AUC: 0.9739 Time: 12.93\n",
      "Epoch: 443 Train Loss: 0.1419 Val Loss: 0.2108 Acc: 0.9112 Pre: 0.8974 Recall: 0.9211 F1: 0.9091 Train AUC: 0.9891 Val AUC: 0.9741 Time: 13.89\n",
      "Epoch: 444 Train Loss: 0.1477 Val Loss: 0.2145 Acc: 0.9112 Pre: 0.9033 Recall: 0.9135 F1: 0.9084 Train AUC: 0.9868 Val AUC: 0.9735 Time: 14.16\n",
      "Epoch: 445 Train Loss: 0.1457 Val Loss: 0.2579 Acc: 0.8949 Pre: 0.8636 Recall: 0.9286 F1: 0.8949 Train AUC: 0.9882 Val AUC: 0.9681 Time: 14.59\n",
      "Epoch: 446 Train Loss: 0.1440 Val Loss: 0.2460 Acc: 0.9022 Pre: 0.8786 Recall: 0.9248 F1: 0.9011 Train AUC: 0.9893 Val AUC: 0.9684 Time: 14.34\n",
      "Epoch: 447 Train Loss: 0.1430 Val Loss: 0.2143 Acc: 0.9149 Pre: 0.9195 Recall: 0.9023 F1: 0.9108 Train AUC: 0.9882 Val AUC: 0.9740 Time: 13.18\n",
      "Epoch: 448 Train Loss: 0.1369 Val Loss: 0.1998 Acc: 0.9293 Pre: 0.9251 Recall: 0.9286 F1: 0.9268 Train AUC: 0.9897 Val AUC: 0.9768 Time: 12.60\n",
      "Epoch: 449 Train Loss: 0.1378 Val Loss: 0.2026 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9896 Val AUC: 0.9758 Time: 12.72\n",
      "Epoch: 450 Train Loss: 0.1482 Val Loss: 0.2126 Acc: 0.9149 Pre: 0.9011 Recall: 0.9248 F1: 0.9128 Train AUC: 0.9880 Val AUC: 0.9739 Time: 12.70\n",
      "Epoch: 451 Train Loss: 0.1370 Val Loss: 0.2236 Acc: 0.9076 Pre: 0.8967 Recall: 0.9135 F1: 0.9050 Train AUC: 0.9890 Val AUC: 0.9717 Time: 12.69\n",
      "Epoch: 452 Train Loss: 0.1449 Val Loss: 0.2286 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9877 Val AUC: 0.9717 Time: 12.65\n",
      "Epoch: 453 Train Loss: 0.1339 Val Loss: 0.2179 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9894 Val AUC: 0.9734 Time: 12.87\n",
      "Epoch: 454 Train Loss: 0.1396 Val Loss: 0.2125 Acc: 0.9221 Pre: 0.9055 Recall: 0.9361 F1: 0.9205 Train AUC: 0.9890 Val AUC: 0.9739 Time: 14.19\n",
      "Epoch: 455 Train Loss: 0.1372 Val Loss: 0.2136 Acc: 0.9239 Pre: 0.9088 Recall: 0.9361 F1: 0.9222 Train AUC: 0.9904 Val AUC: 0.9742 Time: 14.66\n",
      "Epoch: 456 Train Loss: 0.1246 Val Loss: 0.2183 Acc: 0.9130 Pre: 0.8949 Recall: 0.9286 F1: 0.9114 Train AUC: 0.9914 Val AUC: 0.9734 Time: 15.37\n",
      "Epoch: 457 Train Loss: 0.1288 Val Loss: 0.2300 Acc: 0.9076 Pre: 0.8799 Recall: 0.9361 F1: 0.9071 Train AUC: 0.9905 Val AUC: 0.9720 Time: 13.63\n",
      "Epoch: 458 Train Loss: 0.1350 Val Loss: 0.2197 Acc: 0.9130 Pre: 0.8921 Recall: 0.9323 F1: 0.9118 Train AUC: 0.9901 Val AUC: 0.9732 Time: 12.64\n",
      "Epoch: 459 Train Loss: 0.1382 Val Loss: 0.2071 Acc: 0.9221 Pre: 0.9176 Recall: 0.9211 F1: 0.9193 Train AUC: 0.9899 Val AUC: 0.9750 Time: 12.98\n",
      "Epoch: 460 Train Loss: 0.1378 Val Loss: 0.2106 Acc: 0.9203 Pre: 0.9111 Recall: 0.9248 F1: 0.9179 Train AUC: 0.9896 Val AUC: 0.9746 Time: 12.66\n",
      "Epoch: 461 Train Loss: 0.1335 Val Loss: 0.2297 Acc: 0.9130 Pre: 0.8811 Recall: 0.9474 F1: 0.9130 Train AUC: 0.9911 Val AUC: 0.9729 Time: 12.78\n",
      "Epoch: 462 Train Loss: 0.1340 Val Loss: 0.2235 Acc: 0.9130 Pre: 0.8811 Recall: 0.9474 F1: 0.9130 Train AUC: 0.9901 Val AUC: 0.9737 Time: 13.63\n",
      "Epoch: 463 Train Loss: 0.1381 Val Loss: 0.2055 Acc: 0.9203 Pre: 0.9173 Recall: 0.9173 F1: 0.9173 Train AUC: 0.9897 Val AUC: 0.9761 Time: 14.03\n",
      "Epoch: 464 Train Loss: 0.1391 Val Loss: 0.2037 Acc: 0.9203 Pre: 0.9173 Recall: 0.9173 F1: 0.9173 Train AUC: 0.9898 Val AUC: 0.9759 Time: 14.81\n",
      "Epoch: 465 Train Loss: 0.1294 Val Loss: 0.2308 Acc: 0.9022 Pre: 0.8681 Recall: 0.9398 F1: 0.9025 Train AUC: 0.9919 Val AUC: 0.9736 Time: 14.67\n",
      "Epoch: 466 Train Loss: 0.1355 Val Loss: 0.2340 Acc: 0.9004 Pre: 0.8728 Recall: 0.9286 F1: 0.8998 Train AUC: 0.9913 Val AUC: 0.9717 Time: 12.75\n",
      "Epoch: 467 Train Loss: 0.1286 Val Loss: 0.2398 Acc: 0.9040 Pre: 0.9019 Recall: 0.8985 F1: 0.9002 Train AUC: 0.9913 Val AUC: 0.9700 Time: 12.73\n",
      "Epoch: 468 Train Loss: 0.1421 Val Loss: 0.2274 Acc: 0.9094 Pre: 0.8913 Recall: 0.9248 F1: 0.9077 Train AUC: 0.9889 Val AUC: 0.9718 Time: 13.07\n",
      "Epoch: 469 Train Loss: 0.1300 Val Loss: 0.2234 Acc: 0.9149 Pre: 0.8925 Recall: 0.9361 F1: 0.9138 Train AUC: 0.9902 Val AUC: 0.9730 Time: 13.95\n",
      "Epoch: 470 Train Loss: 0.1306 Val Loss: 0.2064 Acc: 0.9293 Pre: 0.9188 Recall: 0.9361 F1: 0.9274 Train AUC: 0.9920 Val AUC: 0.9745 Time: 13.75\n",
      "Epoch: 471 Train Loss: 0.1305 Val Loss: 0.2046 Acc: 0.9239 Pre: 0.9275 Recall: 0.9135 F1: 0.9205 Train AUC: 0.9907 Val AUC: 0.9762 Time: 14.15\n",
      "Epoch: 472 Train Loss: 0.1449 Val Loss: 0.2305 Acc: 0.9094 Pre: 0.8750 Recall: 0.9474 F1: 0.9097 Train AUC: 0.9897 Val AUC: 0.9728 Time: 14.09\n",
      "Epoch: 473 Train Loss: 0.1320 Val Loss: 0.2466 Acc: 0.9058 Pre: 0.8690 Recall: 0.9474 F1: 0.9065 Train AUC: 0.9902 Val AUC: 0.9716 Time: 13.42\n",
      "Epoch: 474 Train Loss: 0.1427 Val Loss: 0.2127 Acc: 0.9167 Pre: 0.9015 Recall: 0.9286 F1: 0.9148 Train AUC: 0.9886 Val AUC: 0.9749 Time: 12.36\n",
      "Epoch: 475 Train Loss: 0.1285 Val Loss: 0.2060 Acc: 0.9221 Pre: 0.9176 Recall: 0.9211 F1: 0.9193 Train AUC: 0.9907 Val AUC: 0.9752 Time: 12.81\n",
      "Epoch: 476 Train Loss: 0.1354 Val Loss: 0.2126 Acc: 0.9149 Pre: 0.9041 Recall: 0.9211 F1: 0.9125 Train AUC: 0.9908 Val AUC: 0.9734 Time: 13.47\n",
      "Epoch: 477 Train Loss: 0.1295 Val Loss: 0.2167 Acc: 0.9094 Pre: 0.8913 Recall: 0.9248 F1: 0.9077 Train AUC: 0.9915 Val AUC: 0.9730 Time: 14.04\n",
      "Epoch: 478 Train Loss: 0.1258 Val Loss: 0.2303 Acc: 0.9040 Pre: 0.8737 Recall: 0.9361 F1: 0.9038 Train AUC: 0.9917 Val AUC: 0.9717 Time: 13.98\n",
      "Epoch: 479 Train Loss: 0.1273 Val Loss: 0.2340 Acc: 0.9076 Pre: 0.8772 Recall: 0.9398 F1: 0.9074 Train AUC: 0.9911 Val AUC: 0.9713 Time: 13.57\n",
      "Epoch: 480 Train Loss: 0.1240 Val Loss: 0.2204 Acc: 0.9112 Pre: 0.8889 Recall: 0.9323 F1: 0.9101 Train AUC: 0.9912 Val AUC: 0.9731 Time: 13.19\n",
      "Epoch: 481 Train Loss: 0.1230 Val Loss: 0.2150 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9916 Val AUC: 0.9740 Time: 13.34\n",
      "Epoch: 482 Train Loss: 0.1239 Val Loss: 0.2163 Acc: 0.9149 Pre: 0.9041 Recall: 0.9211 F1: 0.9125 Train AUC: 0.9920 Val AUC: 0.9735 Time: 14.13\n",
      "Epoch: 483 Train Loss: 0.1220 Val Loss: 0.2196 Acc: 0.9167 Pre: 0.9044 Recall: 0.9248 F1: 0.9145 Train AUC: 0.9921 Val AUC: 0.9729 Time: 13.82\n",
      "Epoch: 484 Train Loss: 0.1246 Val Loss: 0.2225 Acc: 0.9149 Pre: 0.8925 Recall: 0.9361 F1: 0.9138 Train AUC: 0.9916 Val AUC: 0.9739 Time: 12.44\n",
      "Epoch: 485 Train Loss: 0.1303 Val Loss: 0.2197 Acc: 0.9167 Pre: 0.8873 Recall: 0.9474 F1: 0.9164 Train AUC: 0.9904 Val AUC: 0.9750 Time: 12.93\n",
      "Epoch: 486 Train Loss: 0.1320 Val Loss: 0.2176 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9898 Val AUC: 0.9758 Time: 13.10\n",
      "Epoch: 487 Train Loss: 0.1357 Val Loss: 0.2028 Acc: 0.9239 Pre: 0.9179 Recall: 0.9248 F1: 0.9213 Train AUC: 0.9898 Val AUC: 0.9761 Time: 14.15\n",
      "Epoch: 488 Train Loss: 0.1239 Val Loss: 0.2092 Acc: 0.9149 Pre: 0.9163 Recall: 0.9060 F1: 0.9112 Train AUC: 0.9914 Val AUC: 0.9749 Time: 14.52\n",
      "Epoch: 489 Train Loss: 0.1309 Val Loss: 0.2313 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9924 Val AUC: 0.9707 Time: 14.29\n",
      "Epoch: 490 Train Loss: 0.1342 Val Loss: 0.2398 Acc: 0.9058 Pre: 0.8821 Recall: 0.9286 F1: 0.9048 Train AUC: 0.9899 Val AUC: 0.9715 Time: 12.72\n",
      "Epoch: 491 Train Loss: 0.1290 Val Loss: 0.2268 Acc: 0.8986 Pre: 0.8804 Recall: 0.9135 F1: 0.8967 Train AUC: 0.9913 Val AUC: 0.9734 Time: 12.68\n",
      "Epoch: 492 Train Loss: 0.1307 Val Loss: 0.2194 Acc: 0.9185 Pre: 0.9139 Recall: 0.9173 F1: 0.9156 Train AUC: 0.9898 Val AUC: 0.9744 Time: 12.56\n",
      "Epoch: 493 Train Loss: 0.1523 Val Loss: 0.2082 Acc: 0.9112 Pre: 0.9004 Recall: 0.9173 F1: 0.9088 Train AUC: 0.9868 Val AUC: 0.9760 Time: 13.67\n",
      "Epoch: 494 Train Loss: 0.1271 Val Loss: 0.2134 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9908 Val AUC: 0.9751 Time: 14.21\n",
      "Epoch: 495 Train Loss: 0.1281 Val Loss: 0.2347 Acc: 0.9094 Pre: 0.9000 Recall: 0.9135 F1: 0.9067 Train AUC: 0.9915 Val AUC: 0.9701 Time: 14.26\n",
      "Epoch: 496 Train Loss: 0.1327 Val Loss: 0.2398 Acc: 0.9094 Pre: 0.8971 Recall: 0.9173 F1: 0.9071 Train AUC: 0.9907 Val AUC: 0.9699 Time: 14.19\n",
      "Epoch: 497 Train Loss: 0.1345 Val Loss: 0.2267 Acc: 0.9112 Pre: 0.8834 Recall: 0.9398 F1: 0.9107 Train AUC: 0.9893 Val AUC: 0.9745 Time: 12.53\n",
      "Epoch: 498 Train Loss: 0.1256 Val Loss: 0.2285 Acc: 0.9112 Pre: 0.8917 Recall: 0.9286 F1: 0.9098 Train AUC: 0.9909 Val AUC: 0.9744 Time: 12.49\n",
      "Epoch: 499 Train Loss: 0.1380 Val Loss: 0.2156 Acc: 0.9094 Pre: 0.8913 Recall: 0.9248 F1: 0.9077 Train AUC: 0.9892 Val AUC: 0.9749 Time: 12.47\n",
      "Epoch: 500 Train Loss: 0.1276 Val Loss: 0.2158 Acc: 0.9112 Pre: 0.8945 Recall: 0.9248 F1: 0.9094 Train AUC: 0.9905 Val AUC: 0.9749 Time: 12.77\n",
      "Epoch: 501 Train Loss: 0.1166 Val Loss: 0.2313 Acc: 0.9076 Pre: 0.8826 Recall: 0.9323 F1: 0.9068 Train AUC: 0.9927 Val AUC: 0.9718 Time: 13.31\n",
      "Epoch: 502 Train Loss: 0.1247 Val Loss: 0.2373 Acc: 0.9058 Pre: 0.8849 Recall: 0.9248 F1: 0.9044 Train AUC: 0.9923 Val AUC: 0.9705 Time: 14.10\n",
      "Epoch: 503 Train Loss: 0.1274 Val Loss: 0.2103 Acc: 0.9203 Pre: 0.9142 Recall: 0.9211 F1: 0.9176 Train AUC: 0.9911 Val AUC: 0.9761 Time: 14.62\n",
      "Epoch: 504 Train Loss: 0.1200 Val Loss: 0.2061 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9933 Val AUC: 0.9769 Time: 14.81\n",
      "Epoch: 505 Train Loss: 0.1211 Val Loss: 0.2216 Acc: 0.9112 Pre: 0.8834 Recall: 0.9398 F1: 0.9107 Train AUC: 0.9919 Val AUC: 0.9752 Time: 12.94\n",
      "Epoch: 506 Train Loss: 0.1340 Val Loss: 0.2170 Acc: 0.9040 Pre: 0.8901 Recall: 0.9135 F1: 0.9017 Train AUC: 0.9910 Val AUC: 0.9751 Time: 12.64\n",
      "Epoch: 507 Train Loss: 0.1310 Val Loss: 0.2228 Acc: 0.9058 Pre: 0.8934 Recall: 0.9135 F1: 0.9033 Train AUC: 0.9897 Val AUC: 0.9739 Time: 12.35\n",
      "Epoch: 508 Train Loss: 0.1246 Val Loss: 0.2243 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9913 Val AUC: 0.9724 Time: 12.84\n",
      "Epoch: 509 Train Loss: 0.1374 Val Loss: 0.2185 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9896 Val AUC: 0.9743 Time: 14.02\n",
      "Epoch: 510 Train Loss: 0.1217 Val Loss: 0.2102 Acc: 0.9130 Pre: 0.8949 Recall: 0.9286 F1: 0.9114 Train AUC: 0.9928 Val AUC: 0.9761 Time: 14.20\n",
      "Epoch: 511 Train Loss: 0.1080 Val Loss: 0.2040 Acc: 0.9130 Pre: 0.9037 Recall: 0.9173 F1: 0.9104 Train AUC: 0.9948 Val AUC: 0.9773 Time: 14.47\n",
      "Epoch: 512 Train Loss: 0.1124 Val Loss: 0.2101 Acc: 0.9022 Pre: 0.8841 Recall: 0.9173 F1: 0.9004 Train AUC: 0.9932 Val AUC: 0.9766 Time: 14.53\n",
      "Epoch: 513 Train Loss: 0.1207 Val Loss: 0.2145 Acc: 0.9076 Pre: 0.8799 Recall: 0.9361 F1: 0.9071 Train AUC: 0.9916 Val AUC: 0.9765 Time: 12.35\n",
      "Epoch: 514 Train Loss: 0.1210 Val Loss: 0.2101 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9920 Val AUC: 0.9759 Time: 12.42\n",
      "Epoch: 515 Train Loss: 0.1300 Val Loss: 0.2040 Acc: 0.9239 Pre: 0.9211 Recall: 0.9211 F1: 0.9211 Train AUC: 0.9908 Val AUC: 0.9767 Time: 12.69\n",
      "Epoch: 516 Train Loss: 0.1204 Val Loss: 0.2063 Acc: 0.9185 Pre: 0.9108 Recall: 0.9211 F1: 0.9159 Train AUC: 0.9926 Val AUC: 0.9759 Time: 12.82\n",
      "Epoch: 517 Train Loss: 0.1187 Val Loss: 0.2293 Acc: 0.9149 Pre: 0.8869 Recall: 0.9436 F1: 0.9144 Train AUC: 0.9924 Val AUC: 0.9739 Time: 13.75\n",
      "Epoch: 518 Train Loss: 0.1255 Val Loss: 0.2353 Acc: 0.9094 Pre: 0.8750 Recall: 0.9474 F1: 0.9097 Train AUC: 0.9913 Val AUC: 0.9734 Time: 14.35\n",
      "Epoch: 519 Train Loss: 0.1235 Val Loss: 0.2149 Acc: 0.9076 Pre: 0.8909 Recall: 0.9211 F1: 0.9057 Train AUC: 0.9924 Val AUC: 0.9752 Time: 14.58\n",
      "Epoch: 520 Train Loss: 0.1199 Val Loss: 0.2105 Acc: 0.9112 Pre: 0.9094 Recall: 0.9060 F1: 0.9077 Train AUC: 0.9921 Val AUC: 0.9766 Time: 14.70\n",
      "Epoch: 521 Train Loss: 0.1310 Val Loss: 0.2146 Acc: 0.9076 Pre: 0.8881 Recall: 0.9248 F1: 0.9061 Train AUC: 0.9910 Val AUC: 0.9756 Time: 12.51\n",
      "Epoch: 522 Train Loss: 0.1173 Val Loss: 0.2168 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9927 Val AUC: 0.9746 Time: 12.68\n",
      "Epoch: 523 Train Loss: 0.1296 Val Loss: 0.2094 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9922 Val AUC: 0.9768 Time: 12.69\n",
      "Epoch: 524 Train Loss: 0.1179 Val Loss: 0.2089 Acc: 0.9167 Pre: 0.9044 Recall: 0.9248 F1: 0.9145 Train AUC: 0.9921 Val AUC: 0.9763 Time: 13.05\n",
      "Epoch: 525 Train Loss: 0.1143 Val Loss: 0.2165 Acc: 0.9076 Pre: 0.8826 Recall: 0.9323 F1: 0.9068 Train AUC: 0.9929 Val AUC: 0.9753 Time: 14.18\n",
      "Epoch: 526 Train Loss: 0.1185 Val Loss: 0.2220 Acc: 0.9076 Pre: 0.8799 Recall: 0.9361 F1: 0.9071 Train AUC: 0.9923 Val AUC: 0.9746 Time: 14.97\n",
      "Epoch: 527 Train Loss: 0.1183 Val Loss: 0.2101 Acc: 0.9185 Pre: 0.9048 Recall: 0.9286 F1: 0.9165 Train AUC: 0.9924 Val AUC: 0.9754 Time: 14.34\n",
      "Epoch: 528 Train Loss: 0.1124 Val Loss: 0.2070 Acc: 0.9167 Pre: 0.9044 Recall: 0.9248 F1: 0.9145 Train AUC: 0.9934 Val AUC: 0.9760 Time: 13.41\n",
      "Epoch: 529 Train Loss: 0.1164 Val Loss: 0.2110 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9926 Val AUC: 0.9763 Time: 12.68\n",
      "Epoch: 530 Train Loss: 0.1143 Val Loss: 0.2203 Acc: 0.9094 Pre: 0.8803 Recall: 0.9398 F1: 0.9091 Train AUC: 0.9928 Val AUC: 0.9761 Time: 12.58\n",
      "Epoch: 531 Train Loss: 0.1166 Val Loss: 0.2133 Acc: 0.9076 Pre: 0.8853 Recall: 0.9286 F1: 0.9064 Train AUC: 0.9935 Val AUC: 0.9766 Time: 12.31\n",
      "Epoch: 532 Train Loss: 0.1140 Val Loss: 0.2088 Acc: 0.9130 Pre: 0.9007 Recall: 0.9211 F1: 0.9108 Train AUC: 0.9929 Val AUC: 0.9770 Time: 12.48\n",
      "Epoch: 533 Train Loss: 0.1206 Val Loss: 0.2091 Acc: 0.9040 Pre: 0.8873 Recall: 0.9173 F1: 0.9020 Train AUC: 0.9917 Val AUC: 0.9766 Time: 13.50\n",
      "Epoch: 534 Train Loss: 0.1204 Val Loss: 0.2189 Acc: 0.9149 Pre: 0.8982 Recall: 0.9286 F1: 0.9131 Train AUC: 0.9920 Val AUC: 0.9744 Time: 13.91\n",
      "Epoch: 535 Train Loss: 0.1151 Val Loss: 0.2146 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9931 Val AUC: 0.9749 Time: 14.45\n",
      "Epoch: 536 Train Loss: 0.1175 Val Loss: 0.2053 Acc: 0.9167 Pre: 0.9104 Recall: 0.9173 F1: 0.9139 Train AUC: 0.9927 Val AUC: 0.9774 Time: 14.92\n",
      "Epoch: 537 Train Loss: 0.1196 Val Loss: 0.2089 Acc: 0.9094 Pre: 0.8913 Recall: 0.9248 F1: 0.9077 Train AUC: 0.9927 Val AUC: 0.9767 Time: 14.42\n",
      "Epoch: 538 Train Loss: 0.1178 Val Loss: 0.2181 Acc: 0.9076 Pre: 0.8799 Recall: 0.9361 F1: 0.9071 Train AUC: 0.9924 Val AUC: 0.9759 Time: 12.95\n",
      "Epoch: 539 Train Loss: 0.1118 Val Loss: 0.2072 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9938 Val AUC: 0.9768 Time: 12.54\n",
      "Epoch: 540 Train Loss: 0.1113 Val Loss: 0.2037 Acc: 0.9185 Pre: 0.9139 Recall: 0.9173 F1: 0.9156 Train AUC: 0.9937 Val AUC: 0.9774 Time: 12.47\n",
      "Epoch: 541 Train Loss: 0.1142 Val Loss: 0.2183 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9935 Val AUC: 0.9748 Time: 12.71\n",
      "Epoch: 542 Train Loss: 0.1106 Val Loss: 0.2137 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9934 Val AUC: 0.9757 Time: 13.38\n",
      "Epoch: 543 Train Loss: 0.1099 Val Loss: 0.2002 Acc: 0.9203 Pre: 0.9111 Recall: 0.9248 F1: 0.9179 Train AUC: 0.9939 Val AUC: 0.9772 Time: 13.71\n",
      "Epoch: 544 Train Loss: 0.1170 Val Loss: 0.2061 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9932 Val AUC: 0.9767 Time: 14.52\n",
      "Epoch: 545 Train Loss: 0.1134 Val Loss: 0.2270 Acc: 0.9058 Pre: 0.8741 Recall: 0.9398 F1: 0.9058 Train AUC: 0.9928 Val AUC: 0.9744 Time: 15.04\n",
      "Epoch: 546 Train Loss: 0.1119 Val Loss: 0.2261 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9935 Val AUC: 0.9738 Time: 14.85\n",
      "Epoch: 547 Train Loss: 0.1116 Val Loss: 0.2118 Acc: 0.9149 Pre: 0.9011 Recall: 0.9248 F1: 0.9128 Train AUC: 0.9935 Val AUC: 0.9758 Time: 12.58\n",
      "Epoch: 548 Train Loss: 0.1091 Val Loss: 0.2022 Acc: 0.9239 Pre: 0.9308 Recall: 0.9098 F1: 0.9202 Train AUC: 0.9933 Val AUC: 0.9774 Time: 12.58\n",
      "Epoch: 549 Train Loss: 0.1114 Val Loss: 0.2043 Acc: 0.9239 Pre: 0.9242 Recall: 0.9173 F1: 0.9208 Train AUC: 0.9936 Val AUC: 0.9759 Time: 12.60\n",
      "Epoch: 550 Train Loss: 0.1038 Val Loss: 0.2155 Acc: 0.9203 Pre: 0.9173 Recall: 0.9173 F1: 0.9173 Train AUC: 0.9949 Val AUC: 0.9738 Time: 12.50\n",
      "Epoch: 551 Train Loss: 0.1130 Val Loss: 0.2158 Acc: 0.9076 Pre: 0.8967 Recall: 0.9135 F1: 0.9050 Train AUC: 0.9939 Val AUC: 0.9756 Time: 12.52\n",
      "Epoch: 552 Train Loss: 0.1085 Val Loss: 0.2166 Acc: 0.9112 Pre: 0.8945 Recall: 0.9248 F1: 0.9094 Train AUC: 0.9936 Val AUC: 0.9758 Time: 13.28\n",
      "Epoch: 553 Train Loss: 0.1117 Val Loss: 0.2250 Acc: 0.9094 Pre: 0.8885 Recall: 0.9286 F1: 0.9081 Train AUC: 0.9933 Val AUC: 0.9749 Time: 14.07\n",
      "Epoch: 554 Train Loss: 0.1168 Val Loss: 0.2169 Acc: 0.9149 Pre: 0.9071 Recall: 0.9173 F1: 0.9121 Train AUC: 0.9928 Val AUC: 0.9755 Time: 14.56\n",
      "Epoch: 555 Train Loss: 0.1131 Val Loss: 0.2106 Acc: 0.9185 Pre: 0.9139 Recall: 0.9173 F1: 0.9156 Train AUC: 0.9936 Val AUC: 0.9762 Time: 15.05\n",
      "Epoch: 556 Train Loss: 0.1207 Val Loss: 0.2119 Acc: 0.9094 Pre: 0.8971 Recall: 0.9173 F1: 0.9071 Train AUC: 0.9923 Val AUC: 0.9761 Time: 14.14\n",
      "Epoch: 557 Train Loss: 0.1057 Val Loss: 0.2161 Acc: 0.9040 Pre: 0.8873 Recall: 0.9173 F1: 0.9020 Train AUC: 0.9943 Val AUC: 0.9760 Time: 12.65\n",
      "Epoch: 558 Train Loss: 0.1204 Val Loss: 0.2133 Acc: 0.9239 Pre: 0.9118 Recall: 0.9323 F1: 0.9219 Train AUC: 0.9929 Val AUC: 0.9767 Time: 12.42\n",
      "Epoch: 559 Train Loss: 0.1231 Val Loss: 0.2190 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9912 Val AUC: 0.9757 Time: 12.86\n",
      "Epoch: 560 Train Loss: 0.1186 Val Loss: 0.2203 Acc: 0.9167 Pre: 0.8957 Recall: 0.9361 F1: 0.9154 Train AUC: 0.9917 Val AUC: 0.9749 Time: 13.55\n",
      "Epoch: 561 Train Loss: 0.1308 Val Loss: 0.2150 Acc: 0.9130 Pre: 0.9067 Recall: 0.9135 F1: 0.9101 Train AUC: 0.9919 Val AUC: 0.9752 Time: 14.00\n",
      "Epoch: 562 Train Loss: 0.1151 Val Loss: 0.2315 Acc: 0.9004 Pre: 0.9105 Recall: 0.8797 F1: 0.8948 Train AUC: 0.9927 Val AUC: 0.9747 Time: 14.23\n",
      "Epoch: 563 Train Loss: 0.1470 Val Loss: 0.2569 Acc: 0.9149 Pre: 0.8842 Recall: 0.9474 F1: 0.9147 Train AUC: 0.9899 Val AUC: 0.9745 Time: 14.56\n",
      "Epoch: 564 Train Loss: 0.1262 Val Loss: 0.2305 Acc: 0.9040 Pre: 0.8763 Recall: 0.9323 F1: 0.9035 Train AUC: 0.9925 Val AUC: 0.9746 Time: 12.96\n",
      "Epoch: 565 Train Loss: 0.1294 Val Loss: 0.2133 Acc: 0.9330 Pre: 0.9490 Recall: 0.9098 F1: 0.9290 Train AUC: 0.9915 Val AUC: 0.9745 Time: 12.52\n",
      "Epoch: 566 Train Loss: 0.1505 Val Loss: 0.2229 Acc: 0.9130 Pre: 0.9098 Recall: 0.9098 F1: 0.9098 Train AUC: 0.9878 Val AUC: 0.9726 Time: 12.77\n",
      "Epoch: 567 Train Loss: 0.1262 Val Loss: 0.2520 Acc: 0.9058 Pre: 0.8821 Recall: 0.9286 F1: 0.9048 Train AUC: 0.9913 Val AUC: 0.9662 Time: 12.81\n",
      "Epoch: 568 Train Loss: 0.1718 Val Loss: 0.2345 Acc: 0.9022 Pre: 0.8813 Recall: 0.9211 F1: 0.9007 Train AUC: 0.9839 Val AUC: 0.9740 Time: 13.66\n",
      "Epoch: 569 Train Loss: 0.1192 Val Loss: 0.2529 Acc: 0.9022 Pre: 0.8841 Recall: 0.9173 F1: 0.9004 Train AUC: 0.9918 Val AUC: 0.9710 Time: 14.16\n",
      "Epoch: 570 Train Loss: 0.1459 Val Loss: 0.2428 Acc: 0.9076 Pre: 0.8909 Recall: 0.9211 F1: 0.9057 Train AUC: 0.9869 Val AUC: 0.9725 Time: 15.07\n",
      "Epoch: 571 Train Loss: 0.1346 Val Loss: 0.2138 Acc: 0.9257 Pre: 0.9213 Recall: 0.9248 F1: 0.9231 Train AUC: 0.9888 Val AUC: 0.9753 Time: 13.19\n",
      "Epoch: 572 Train Loss: 0.1273 Val Loss: 0.2258 Acc: 0.9130 Pre: 0.9291 Recall: 0.8872 F1: 0.9077 Train AUC: 0.9906 Val AUC: 0.9735 Time: 12.76\n",
      "Epoch: 573 Train Loss: 0.1259 Val Loss: 0.2369 Acc: 0.9058 Pre: 0.8934 Recall: 0.9135 F1: 0.9033 Train AUC: 0.9914 Val AUC: 0.9703 Time: 13.46\n",
      "Epoch: 574 Train Loss: 0.1362 Val Loss: 0.2290 Acc: 0.9112 Pre: 0.8889 Recall: 0.9323 F1: 0.9101 Train AUC: 0.9895 Val AUC: 0.9727 Time: 13.83\n",
      "Epoch: 575 Train Loss: 0.1210 Val Loss: 0.2166 Acc: 0.9040 Pre: 0.8817 Recall: 0.9248 F1: 0.9028 Train AUC: 0.9922 Val AUC: 0.9752 Time: 14.41\n",
      "Epoch: 576 Train Loss: 0.1205 Val Loss: 0.2210 Acc: 0.9040 Pre: 0.8817 Recall: 0.9248 F1: 0.9028 Train AUC: 0.9914 Val AUC: 0.9747 Time: 14.41\n",
      "Epoch: 577 Train Loss: 0.1314 Val Loss: 0.2315 Acc: 0.9058 Pre: 0.8741 Recall: 0.9398 F1: 0.9058 Train AUC: 0.9896 Val AUC: 0.9750 Time: 12.68\n",
      "Epoch: 578 Train Loss: 0.1264 Val Loss: 0.2332 Acc: 0.9130 Pre: 0.8759 Recall: 0.9549 F1: 0.9137 Train AUC: 0.9904 Val AUC: 0.9741 Time: 12.58\n",
      "Epoch: 579 Train Loss: 0.1116 Val Loss: 0.2470 Acc: 0.9112 Pre: 0.8834 Recall: 0.9398 F1: 0.9107 Train AUC: 0.9937 Val AUC: 0.9703 Time: 12.56\n",
      "Epoch: 580 Train Loss: 0.1361 Val Loss: 0.2293 Acc: 0.9022 Pre: 0.9109 Recall: 0.8835 F1: 0.8969 Train AUC: 0.9904 Val AUC: 0.9732 Time: 12.57\n",
      "Epoch: 581 Train Loss: 0.1398 Val Loss: 0.2175 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9906 Val AUC: 0.9757 Time: 13.51\n",
      "Epoch: 582 Train Loss: 0.1179 Val Loss: 0.2485 Acc: 0.9130 Pre: 0.8759 Recall: 0.9549 F1: 0.9137 Train AUC: 0.9920 Val AUC: 0.9751 Time: 13.84\n",
      "Epoch: 583 Train Loss: 0.1278 Val Loss: 0.2293 Acc: 0.9094 Pre: 0.8830 Recall: 0.9361 F1: 0.9088 Train AUC: 0.9926 Val AUC: 0.9763 Time: 14.41\n",
      "Epoch: 584 Train Loss: 0.1197 Val Loss: 0.2095 Acc: 0.9185 Pre: 0.9234 Recall: 0.9060 F1: 0.9146 Train AUC: 0.9912 Val AUC: 0.9774 Time: 14.03\n",
      "Epoch: 585 Train Loss: 0.1260 Val Loss: 0.1983 Acc: 0.9185 Pre: 0.9139 Recall: 0.9173 F1: 0.9156 Train AUC: 0.9914 Val AUC: 0.9757 Time: 14.24\n",
      "Epoch: 586 Train Loss: 0.1154 Val Loss: 0.2150 Acc: 0.9149 Pre: 0.8953 Recall: 0.9323 F1: 0.9134 Train AUC: 0.9927 Val AUC: 0.9730 Time: 12.82\n",
      "Epoch: 587 Train Loss: 0.1360 Val Loss: 0.2203 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9907 Val AUC: 0.9751 Time: 12.61\n",
      "Epoch: 588 Train Loss: 0.1055 Val Loss: 0.2303 Acc: 0.9076 Pre: 0.8772 Recall: 0.9398 F1: 0.9074 Train AUC: 0.9942 Val AUC: 0.9742 Time: 12.66\n",
      "Epoch: 589 Train Loss: 0.1102 Val Loss: 0.2287 Acc: 0.9112 Pre: 0.8889 Recall: 0.9323 F1: 0.9101 Train AUC: 0.9928 Val AUC: 0.9739 Time: 13.95\n",
      "Epoch: 590 Train Loss: 0.1138 Val Loss: 0.2204 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9923 Val AUC: 0.9749 Time: 14.14\n",
      "Epoch: 591 Train Loss: 0.1229 Val Loss: 0.2023 Acc: 0.9257 Pre: 0.9182 Recall: 0.9286 F1: 0.9234 Train AUC: 0.9914 Val AUC: 0.9772 Time: 14.05\n",
      "Epoch: 592 Train Loss: 0.1278 Val Loss: 0.1992 Acc: 0.9257 Pre: 0.9278 Recall: 0.9173 F1: 0.9225 Train AUC: 0.9906 Val AUC: 0.9776 Time: 12.41\n",
      "Epoch: 593 Train Loss: 0.1192 Val Loss: 0.2182 Acc: 0.9185 Pre: 0.9048 Recall: 0.9286 F1: 0.9165 Train AUC: 0.9925 Val AUC: 0.9761 Time: 12.68\n",
      "Epoch: 594 Train Loss: 0.1175 Val Loss: 0.2157 Acc: 0.9167 Pre: 0.9044 Recall: 0.9248 F1: 0.9145 Train AUC: 0.9929 Val AUC: 0.9766 Time: 13.41\n",
      "Epoch: 595 Train Loss: 0.1150 Val Loss: 0.2033 Acc: 0.9130 Pre: 0.9037 Recall: 0.9173 F1: 0.9104 Train AUC: 0.9926 Val AUC: 0.9772 Time: 13.73\n",
      "Epoch: 596 Train Loss: 0.1216 Val Loss: 0.1999 Acc: 0.9203 Pre: 0.9173 Recall: 0.9173 F1: 0.9173 Train AUC: 0.9916 Val AUC: 0.9779 Time: 14.35\n",
      "Epoch: 597 Train Loss: 0.1242 Val Loss: 0.2154 Acc: 0.9094 Pre: 0.8913 Recall: 0.9248 F1: 0.9077 Train AUC: 0.9918 Val AUC: 0.9768 Time: 15.35\n",
      "Epoch: 598 Train Loss: 0.1074 Val Loss: 0.2527 Acc: 0.9004 Pre: 0.8601 Recall: 0.9474 F1: 0.9016 Train AUC: 0.9942 Val AUC: 0.9724 Time: 13.59\n",
      "Epoch: 599 Train Loss: 0.1255 Val Loss: 0.2245 Acc: 0.9076 Pre: 0.8826 Recall: 0.9323 F1: 0.9068 Train AUC: 0.9926 Val AUC: 0.9749 Time: 12.64\n",
      "Epoch: 600 Train Loss: 0.1110 Val Loss: 0.2032 Acc: 0.9312 Pre: 0.9351 Recall: 0.9211 F1: 0.9280 Train AUC: 0.9934 Val AUC: 0.9780 Time: 12.48\n",
      "Epoch: 601 Train Loss: 0.1165 Val Loss: 0.2021 Acc: 0.9330 Pre: 0.9288 Recall: 0.9323 F1: 0.9306 Train AUC: 0.9928 Val AUC: 0.9771 Time: 12.62\n",
      "Epoch: 602 Train Loss: 0.1125 Val Loss: 0.2174 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9930 Val AUC: 0.9759 Time: 12.90\n",
      "Epoch: 603 Train Loss: 0.1175 Val Loss: 0.2352 Acc: 0.9076 Pre: 0.8826 Recall: 0.9323 F1: 0.9068 Train AUC: 0.9934 Val AUC: 0.9737 Time: 12.53\n",
      "Epoch: 604 Train Loss: 0.1095 Val Loss: 0.2476 Acc: 0.9076 Pre: 0.8938 Recall: 0.9173 F1: 0.9054 Train AUC: 0.9938 Val AUC: 0.9713 Time: 12.51\n",
      "Epoch: 605 Train Loss: 0.1222 Val Loss: 0.2262 Acc: 0.9149 Pre: 0.9041 Recall: 0.9211 F1: 0.9125 Train AUC: 0.9914 Val AUC: 0.9742 Time: 14.05\n",
      "Epoch: 606 Train Loss: 0.1055 Val Loss: 0.2096 Acc: 0.9130 Pre: 0.9037 Recall: 0.9173 F1: 0.9104 Train AUC: 0.9943 Val AUC: 0.9757 Time: 14.68\n",
      "Epoch: 607 Train Loss: 0.1040 Val Loss: 0.2161 Acc: 0.9167 Pre: 0.9015 Recall: 0.9286 F1: 0.9148 Train AUC: 0.9946 Val AUC: 0.9754 Time: 14.74\n",
      "Epoch: 608 Train Loss: 0.1169 Val Loss: 0.2135 Acc: 0.9185 Pre: 0.9018 Recall: 0.9323 F1: 0.9168 Train AUC: 0.9930 Val AUC: 0.9764 Time: 14.16\n",
      "Epoch: 609 Train Loss: 0.1055 Val Loss: 0.2159 Acc: 0.9094 Pre: 0.9030 Recall: 0.9098 F1: 0.9064 Train AUC: 0.9939 Val AUC: 0.9763 Time: 13.89\n",
      "Epoch: 610 Train Loss: 0.1094 Val Loss: 0.2386 Acc: 0.9040 Pre: 0.8817 Recall: 0.9248 F1: 0.9028 Train AUC: 0.9935 Val AUC: 0.9731 Time: 12.40\n",
      "Epoch: 611 Train Loss: 0.1219 Val Loss: 0.2582 Acc: 0.8986 Pre: 0.8671 Recall: 0.9323 F1: 0.8986 Train AUC: 0.9915 Val AUC: 0.9714 Time: 12.63\n",
      "Epoch: 612 Train Loss: 0.1258 Val Loss: 0.2156 Acc: 0.9149 Pre: 0.8953 Recall: 0.9323 F1: 0.9134 Train AUC: 0.9915 Val AUC: 0.9758 Time: 12.73\n",
      "Epoch: 613 Train Loss: 0.1058 Val Loss: 0.1972 Acc: 0.9348 Pre: 0.9323 Recall: 0.9323 F1: 0.9323 Train AUC: 0.9946 Val AUC: 0.9775 Time: 12.59\n",
      "Epoch: 614 Train Loss: 0.1133 Val Loss: 0.1986 Acc: 0.9293 Pre: 0.9251 Recall: 0.9286 F1: 0.9268 Train AUC: 0.9934 Val AUC: 0.9769 Time: 12.85\n",
      "Epoch: 615 Train Loss: 0.1205 Val Loss: 0.2006 Acc: 0.9293 Pre: 0.9219 Recall: 0.9323 F1: 0.9271 Train AUC: 0.9924 Val AUC: 0.9776 Time: 13.30\n",
      "Epoch: 616 Train Loss: 0.1076 Val Loss: 0.2163 Acc: 0.9149 Pre: 0.8953 Recall: 0.9323 F1: 0.9134 Train AUC: 0.9935 Val AUC: 0.9763 Time: 14.28\n",
      "Epoch: 617 Train Loss: 0.1004 Val Loss: 0.2379 Acc: 0.9004 Pre: 0.8754 Recall: 0.9248 F1: 0.8995 Train AUC: 0.9950 Val AUC: 0.9741 Time: 14.70\n",
      "Epoch: 618 Train Loss: 0.1040 Val Loss: 0.2199 Acc: 0.9149 Pre: 0.9071 Recall: 0.9173 F1: 0.9121 Train AUC: 0.9947 Val AUC: 0.9755 Time: 14.21\n",
      "Epoch: 619 Train Loss: 0.1139 Val Loss: 0.2083 Acc: 0.9221 Pre: 0.9208 Recall: 0.9173 F1: 0.9190 Train AUC: 0.9932 Val AUC: 0.9769 Time: 14.23\n",
      "Epoch: 620 Train Loss: 0.1137 Val Loss: 0.2101 Acc: 0.9221 Pre: 0.9055 Recall: 0.9361 F1: 0.9205 Train AUC: 0.9941 Val AUC: 0.9763 Time: 12.54\n",
      "Epoch: 621 Train Loss: 0.1021 Val Loss: 0.2229 Acc: 0.9167 Pre: 0.8929 Recall: 0.9398 F1: 0.9158 Train AUC: 0.9953 Val AUC: 0.9753 Time: 12.56\n",
      "Epoch: 622 Train Loss: 0.1099 Val Loss: 0.2171 Acc: 0.9058 Pre: 0.8821 Recall: 0.9286 F1: 0.9048 Train AUC: 0.9952 Val AUC: 0.9763 Time: 12.49\n",
      "Epoch: 623 Train Loss: 0.1032 Val Loss: 0.2148 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9945 Val AUC: 0.9765 Time: 12.52\n",
      "Epoch: 624 Train Loss: 0.1098 Val Loss: 0.2100 Acc: 0.9221 Pre: 0.9145 Recall: 0.9248 F1: 0.9196 Train AUC: 0.9936 Val AUC: 0.9766 Time: 14.20\n",
      "Epoch: 625 Train Loss: 0.1069 Val Loss: 0.2103 Acc: 0.9167 Pre: 0.9015 Recall: 0.9286 F1: 0.9148 Train AUC: 0.9936 Val AUC: 0.9764 Time: 14.21\n",
      "Epoch: 626 Train Loss: 0.0960 Val Loss: 0.2094 Acc: 0.9203 Pre: 0.9111 Recall: 0.9248 F1: 0.9179 Train AUC: 0.9960 Val AUC: 0.9764 Time: 14.74\n",
      "Epoch: 627 Train Loss: 0.0943 Val Loss: 0.2127 Acc: 0.9221 Pre: 0.9114 Recall: 0.9286 F1: 0.9199 Train AUC: 0.9962 Val AUC: 0.9763 Time: 14.35\n",
      "Epoch: 628 Train Loss: 0.0967 Val Loss: 0.2135 Acc: 0.9221 Pre: 0.9114 Recall: 0.9286 F1: 0.9199 Train AUC: 0.9953 Val AUC: 0.9764 Time: 12.33\n",
      "Epoch: 629 Train Loss: 0.1069 Val Loss: 0.2174 Acc: 0.9149 Pre: 0.8982 Recall: 0.9286 F1: 0.9131 Train AUC: 0.9941 Val AUC: 0.9759 Time: 12.53\n",
      "Epoch: 630 Train Loss: 0.1090 Val Loss: 0.2095 Acc: 0.9185 Pre: 0.9077 Recall: 0.9248 F1: 0.9162 Train AUC: 0.9936 Val AUC: 0.9766 Time: 12.48\n",
      "Epoch: 631 Train Loss: 0.1029 Val Loss: 0.2051 Acc: 0.9239 Pre: 0.9242 Recall: 0.9173 F1: 0.9208 Train AUC: 0.9942 Val AUC: 0.9770 Time: 13.08\n",
      "Epoch: 632 Train Loss: 0.1039 Val Loss: 0.2092 Acc: 0.9239 Pre: 0.9242 Recall: 0.9173 F1: 0.9208 Train AUC: 0.9949 Val AUC: 0.9767 Time: 14.16\n",
      "Epoch: 633 Train Loss: 0.0966 Val Loss: 0.2241 Acc: 0.9130 Pre: 0.8921 Recall: 0.9323 F1: 0.9118 Train AUC: 0.9954 Val AUC: 0.9753 Time: 14.11\n",
      "Epoch: 634 Train Loss: 0.1045 Val Loss: 0.2300 Acc: 0.9076 Pre: 0.8881 Recall: 0.9248 F1: 0.9061 Train AUC: 0.9943 Val AUC: 0.9743 Time: 14.18\n",
      "Epoch: 635 Train Loss: 0.0988 Val Loss: 0.2172 Acc: 0.9112 Pre: 0.9033 Recall: 0.9135 F1: 0.9084 Train AUC: 0.9953 Val AUC: 0.9756 Time: 14.25\n",
      "Epoch: 636 Train Loss: 0.1075 Val Loss: 0.2112 Acc: 0.9203 Pre: 0.9173 Recall: 0.9173 F1: 0.9173 Train AUC: 0.9940 Val AUC: 0.9754 Time: 12.86\n",
      "Epoch: 637 Train Loss: 0.0995 Val Loss: 0.2134 Acc: 0.9239 Pre: 0.9179 Recall: 0.9248 F1: 0.9213 Train AUC: 0.9958 Val AUC: 0.9760 Time: 12.55\n",
      "Epoch: 638 Train Loss: 0.1094 Val Loss: 0.2142 Acc: 0.9221 Pre: 0.9114 Recall: 0.9286 F1: 0.9199 Train AUC: 0.9944 Val AUC: 0.9766 Time: 13.00\n",
      "Epoch: 639 Train Loss: 0.1002 Val Loss: 0.2170 Acc: 0.9221 Pre: 0.9208 Recall: 0.9173 F1: 0.9190 Train AUC: 0.9952 Val AUC: 0.9759 Time: 14.77\n",
      "Epoch: 640 Train Loss: 0.1043 Val Loss: 0.2165 Acc: 0.9257 Pre: 0.9278 Recall: 0.9173 F1: 0.9225 Train AUC: 0.9942 Val AUC: 0.9757 Time: 16.31\n",
      "Epoch: 641 Train Loss: 0.0983 Val Loss: 0.2179 Acc: 0.9239 Pre: 0.9179 Recall: 0.9248 F1: 0.9213 Train AUC: 0.9955 Val AUC: 0.9753 Time: 14.30\n",
      "Epoch: 642 Train Loss: 0.1016 Val Loss: 0.2245 Acc: 0.9185 Pre: 0.9048 Recall: 0.9286 F1: 0.9165 Train AUC: 0.9945 Val AUC: 0.9749 Time: 12.88\n",
      "Epoch: 643 Train Loss: 0.1071 Val Loss: 0.2137 Acc: 0.9221 Pre: 0.9176 Recall: 0.9211 F1: 0.9193 Train AUC: 0.9947 Val AUC: 0.9766 Time: 12.55\n",
      "Epoch: 644 Train Loss: 0.1005 Val Loss: 0.2153 Acc: 0.9112 Pre: 0.8945 Recall: 0.9248 F1: 0.9094 Train AUC: 0.9951 Val AUC: 0.9768 Time: 12.89\n",
      "Epoch: 645 Train Loss: 0.1031 Val Loss: 0.2190 Acc: 0.9040 Pre: 0.8817 Recall: 0.9248 F1: 0.9028 Train AUC: 0.9943 Val AUC: 0.9770 Time: 12.98\n",
      "Epoch: 646 Train Loss: 0.1037 Val Loss: 0.2139 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9944 Val AUC: 0.9769 Time: 14.06\n",
      "Epoch: 647 Train Loss: 0.1011 Val Loss: 0.2147 Acc: 0.9257 Pre: 0.9121 Recall: 0.9361 F1: 0.9239 Train AUC: 0.9944 Val AUC: 0.9763 Time: 12.94\n",
      "Epoch: 648 Train Loss: 0.1047 Val Loss: 0.2131 Acc: 0.9239 Pre: 0.9242 Recall: 0.9173 F1: 0.9208 Train AUC: 0.9942 Val AUC: 0.9767 Time: 12.43\n",
      "Epoch: 649 Train Loss: 0.0952 Val Loss: 0.2172 Acc: 0.9221 Pre: 0.9208 Recall: 0.9173 F1: 0.9190 Train AUC: 0.9953 Val AUC: 0.9763 Time: 12.86\n",
      "Epoch: 650 Train Loss: 0.1012 Val Loss: 0.2201 Acc: 0.9239 Pre: 0.9242 Recall: 0.9173 F1: 0.9208 Train AUC: 0.9949 Val AUC: 0.9757 Time: 13.50\n",
      "Epoch: 651 Train Loss: 0.0997 Val Loss: 0.2213 Acc: 0.9257 Pre: 0.9182 Recall: 0.9286 F1: 0.9234 Train AUC: 0.9945 Val AUC: 0.9753 Time: 14.39\n",
      "Epoch: 652 Train Loss: 0.1087 Val Loss: 0.2159 Acc: 0.9239 Pre: 0.9179 Recall: 0.9248 F1: 0.9213 Train AUC: 0.9936 Val AUC: 0.9762 Time: 14.43\n",
      "Epoch: 653 Train Loss: 0.0984 Val Loss: 0.2145 Acc: 0.9257 Pre: 0.9278 Recall: 0.9173 F1: 0.9225 Train AUC: 0.9951 Val AUC: 0.9768 Time: 12.61\n",
      "Epoch: 654 Train Loss: 0.1003 Val Loss: 0.2160 Acc: 0.9257 Pre: 0.9245 Recall: 0.9211 F1: 0.9228 Train AUC: 0.9944 Val AUC: 0.9767 Time: 13.01\n",
      "Epoch: 655 Train Loss: 0.1003 Val Loss: 0.2184 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9945 Val AUC: 0.9769 Time: 13.77\n",
      "Epoch: 656 Train Loss: 0.0965 Val Loss: 0.2136 Acc: 0.9221 Pre: 0.9145 Recall: 0.9248 F1: 0.9196 Train AUC: 0.9954 Val AUC: 0.9770 Time: 14.70\n",
      "Epoch: 657 Train Loss: 0.0964 Val Loss: 0.2099 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9953 Val AUC: 0.9771 Time: 13.24\n",
      "Epoch: 658 Train Loss: 0.1007 Val Loss: 0.2111 Acc: 0.9257 Pre: 0.9278 Recall: 0.9173 F1: 0.9225 Train AUC: 0.9947 Val AUC: 0.9768 Time: 12.97\n",
      "Epoch: 659 Train Loss: 0.0979 Val Loss: 0.2188 Acc: 0.9203 Pre: 0.9205 Recall: 0.9135 F1: 0.9170 Train AUC: 0.9950 Val AUC: 0.9760 Time: 12.81\n",
      "Epoch: 660 Train Loss: 0.1067 Val Loss: 0.2213 Acc: 0.9221 Pre: 0.9145 Recall: 0.9248 F1: 0.9196 Train AUC: 0.9943 Val AUC: 0.9753 Time: 12.69\n",
      "Epoch: 661 Train Loss: 0.0902 Val Loss: 0.2289 Acc: 0.9293 Pre: 0.9097 Recall: 0.9474 F1: 0.9282 Train AUC: 0.9966 Val AUC: 0.9756 Time: 12.90\n",
      "Epoch: 662 Train Loss: 0.1072 Val Loss: 0.2080 Acc: 0.9312 Pre: 0.9254 Recall: 0.9323 F1: 0.9288 Train AUC: 0.9955 Val AUC: 0.9776 Time: 13.86\n",
      "Epoch: 663 Train Loss: 0.0941 Val Loss: 0.2098 Acc: 0.9239 Pre: 0.9375 Recall: 0.9023 F1: 0.9195 Train AUC: 0.9957 Val AUC: 0.9780 Time: 14.23\n",
      "Epoch: 664 Train Loss: 0.1220 Val Loss: 0.2094 Acc: 0.9293 Pre: 0.9283 Recall: 0.9248 F1: 0.9266 Train AUC: 0.9923 Val AUC: 0.9774 Time: 14.54\n",
      "Epoch: 665 Train Loss: 0.0997 Val Loss: 0.2594 Acc: 0.9076 Pre: 0.8746 Recall: 0.9436 F1: 0.9078 Train AUC: 0.9950 Val AUC: 0.9712 Time: 14.81\n",
      "Epoch: 666 Train Loss: 0.1218 Val Loss: 0.2287 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9934 Val AUC: 0.9743 Time: 12.48\n",
      "Epoch: 667 Train Loss: 0.1070 Val Loss: 0.2033 Acc: 0.9312 Pre: 0.9318 Recall: 0.9248 F1: 0.9283 Train AUC: 0.9944 Val AUC: 0.9778 Time: 12.46\n",
      "Epoch: 668 Train Loss: 0.0994 Val Loss: 0.2042 Acc: 0.9293 Pre: 0.9316 Recall: 0.9211 F1: 0.9263 Train AUC: 0.9957 Val AUC: 0.9781 Time: 12.65\n",
      "Epoch: 669 Train Loss: 0.1061 Val Loss: 0.2189 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9946 Val AUC: 0.9770 Time: 12.56\n",
      "Epoch: 670 Train Loss: 0.0979 Val Loss: 0.2619 Acc: 0.9130 Pre: 0.8759 Recall: 0.9549 F1: 0.9137 Train AUC: 0.9949 Val AUC: 0.9741 Time: 12.69\n",
      "Epoch: 671 Train Loss: 0.1163 Val Loss: 0.2331 Acc: 0.9185 Pre: 0.8905 Recall: 0.9474 F1: 0.9180 Train AUC: 0.9946 Val AUC: 0.9756 Time: 13.91\n",
      "Epoch: 672 Train Loss: 0.0955 Val Loss: 0.2047 Acc: 0.9275 Pre: 0.9313 Recall: 0.9173 F1: 0.9242 Train AUC: 0.9954 Val AUC: 0.9784 Time: 14.25\n",
      "Epoch: 673 Train Loss: 0.1009 Val Loss: 0.2005 Acc: 0.9275 Pre: 0.9313 Recall: 0.9173 F1: 0.9242 Train AUC: 0.9945 Val AUC: 0.9780 Time: 15.43\n",
      "Epoch: 674 Train Loss: 0.1011 Val Loss: 0.2053 Acc: 0.9257 Pre: 0.9121 Recall: 0.9361 F1: 0.9239 Train AUC: 0.9951 Val AUC: 0.9757 Time: 13.81\n",
      "Epoch: 675 Train Loss: 0.1104 Val Loss: 0.2266 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9940 Val AUC: 0.9758 Time: 12.67\n",
      "Epoch: 676 Train Loss: 0.0914 Val Loss: 0.2296 Acc: 0.9221 Pre: 0.8968 Recall: 0.9474 F1: 0.9214 Train AUC: 0.9962 Val AUC: 0.9756 Time: 12.49\n",
      "Epoch: 677 Train Loss: 0.0974 Val Loss: 0.2178 Acc: 0.9293 Pre: 0.9283 Recall: 0.9248 F1: 0.9266 Train AUC: 0.9950 Val AUC: 0.9760 Time: 12.58\n",
      "Epoch: 678 Train Loss: 0.1102 Val Loss: 0.2175 Acc: 0.9239 Pre: 0.9179 Recall: 0.9248 F1: 0.9213 Train AUC: 0.9932 Val AUC: 0.9759 Time: 13.43\n",
      "Epoch: 679 Train Loss: 0.1016 Val Loss: 0.2272 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9946 Val AUC: 0.9743 Time: 13.73\n",
      "Epoch: 680 Train Loss: 0.1073 Val Loss: 0.2154 Acc: 0.9221 Pre: 0.9145 Recall: 0.9248 F1: 0.9196 Train AUC: 0.9941 Val AUC: 0.9764 Time: 14.44\n",
      "Epoch: 681 Train Loss: 0.0989 Val Loss: 0.2193 Acc: 0.9293 Pre: 0.9188 Recall: 0.9361 F1: 0.9274 Train AUC: 0.9947 Val AUC: 0.9768 Time: 14.00\n",
      "Epoch: 682 Train Loss: 0.0957 Val Loss: 0.2370 Acc: 0.9149 Pre: 0.8897 Recall: 0.9398 F1: 0.9141 Train AUC: 0.9952 Val AUC: 0.9755 Time: 14.29\n",
      "Epoch: 683 Train Loss: 0.1040 Val Loss: 0.2314 Acc: 0.9221 Pre: 0.8968 Recall: 0.9474 F1: 0.9214 Train AUC: 0.9939 Val AUC: 0.9756 Time: 12.37\n",
      "Epoch: 684 Train Loss: 0.1079 Val Loss: 0.2093 Acc: 0.9239 Pre: 0.9211 Recall: 0.9211 F1: 0.9211 Train AUC: 0.9942 Val AUC: 0.9768 Time: 12.72\n",
      "Epoch: 685 Train Loss: 0.1039 Val Loss: 0.2020 Acc: 0.9275 Pre: 0.9313 Recall: 0.9173 F1: 0.9242 Train AUC: 0.9950 Val AUC: 0.9776 Time: 12.61\n",
      "Epoch: 686 Train Loss: 0.0988 Val Loss: 0.2065 Acc: 0.9312 Pre: 0.9222 Recall: 0.9361 F1: 0.9291 Train AUC: 0.9958 Val AUC: 0.9776 Time: 12.57\n",
      "Epoch: 687 Train Loss: 0.1009 Val Loss: 0.2287 Acc: 0.9293 Pre: 0.9127 Recall: 0.9436 F1: 0.9279 Train AUC: 0.9948 Val AUC: 0.9757 Time: 12.88\n",
      "Epoch: 688 Train Loss: 0.0992 Val Loss: 0.2457 Acc: 0.9130 Pre: 0.8811 Recall: 0.9474 F1: 0.9130 Train AUC: 0.9947 Val AUC: 0.9737 Time: 13.86\n",
      "Epoch: 689 Train Loss: 0.1020 Val Loss: 0.2259 Acc: 0.9221 Pre: 0.9145 Recall: 0.9248 F1: 0.9196 Train AUC: 0.9952 Val AUC: 0.9750 Time: 14.56\n",
      "Epoch: 690 Train Loss: 0.0985 Val Loss: 0.2104 Acc: 0.9257 Pre: 0.9278 Recall: 0.9173 F1: 0.9225 Train AUC: 0.9955 Val AUC: 0.9774 Time: 14.37\n",
      "Epoch: 691 Train Loss: 0.0966 Val Loss: 0.2080 Acc: 0.9293 Pre: 0.9251 Recall: 0.9286 F1: 0.9268 Train AUC: 0.9951 Val AUC: 0.9771 Time: 13.35\n",
      "Epoch: 692 Train Loss: 0.1005 Val Loss: 0.2245 Acc: 0.9203 Pre: 0.9051 Recall: 0.9323 F1: 0.9185 Train AUC: 0.9954 Val AUC: 0.9761 Time: 13.54\n",
      "Epoch: 693 Train Loss: 0.0988 Val Loss: 0.2218 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9959 Val AUC: 0.9768 Time: 13.56\n",
      "Epoch: 694 Train Loss: 0.0931 Val Loss: 0.2216 Acc: 0.9239 Pre: 0.9179 Recall: 0.9248 F1: 0.9213 Train AUC: 0.9955 Val AUC: 0.9767 Time: 13.65\n",
      "Epoch: 695 Train Loss: 0.0924 Val Loss: 0.2278 Acc: 0.9167 Pre: 0.9135 Recall: 0.9135 F1: 0.9135 Train AUC: 0.9960 Val AUC: 0.9761 Time: 13.00\n",
      "Epoch: 696 Train Loss: 0.0904 Val Loss: 0.2284 Acc: 0.9257 Pre: 0.9151 Recall: 0.9323 F1: 0.9236 Train AUC: 0.9958 Val AUC: 0.9756 Time: 13.44\n",
      "Epoch: 697 Train Loss: 0.0901 Val Loss: 0.2252 Acc: 0.9275 Pre: 0.9154 Recall: 0.9361 F1: 0.9257 Train AUC: 0.9960 Val AUC: 0.9749 Time: 13.84\n",
      "Epoch: 698 Train Loss: 0.0995 Val Loss: 0.2137 Acc: 0.9293 Pre: 0.9188 Recall: 0.9361 F1: 0.9274 Train AUC: 0.9957 Val AUC: 0.9759 Time: 14.07\n",
      "Epoch: 699 Train Loss: 0.1122 Val Loss: 0.2103 Acc: 0.9275 Pre: 0.9280 Recall: 0.9211 F1: 0.9245 Train AUC: 0.9929 Val AUC: 0.9775 Time: 12.35\n",
      "Epoch: 700 Train Loss: 0.0984 Val Loss: 0.2193 Acc: 0.9312 Pre: 0.9254 Recall: 0.9323 F1: 0.9288 Train AUC: 0.9955 Val AUC: 0.9770 Time: 12.38\n",
      "Epoch: 701 Train Loss: 0.0901 Val Loss: 0.2326 Acc: 0.9239 Pre: 0.9000 Recall: 0.9474 F1: 0.9231 Train AUC: 0.9954 Val AUC: 0.9763 Time: 12.57\n",
      "Epoch: 702 Train Loss: 0.1007 Val Loss: 0.2264 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9946 Val AUC: 0.9761 Time: 12.76\n",
      "Epoch: 703 Train Loss: 0.1037 Val Loss: 0.2119 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9946 Val AUC: 0.9777 Time: 13.76\n",
      "Epoch: 704 Train Loss: 0.1040 Val Loss: 0.2053 Acc: 0.9293 Pre: 0.9316 Recall: 0.9211 F1: 0.9263 Train AUC: 0.9938 Val AUC: 0.9776 Time: 14.08\n",
      "Epoch: 705 Train Loss: 0.1041 Val Loss: 0.2101 Acc: 0.9239 Pre: 0.9118 Recall: 0.9323 F1: 0.9219 Train AUC: 0.9943 Val AUC: 0.9760 Time: 14.59\n",
      "Epoch: 706 Train Loss: 0.0974 Val Loss: 0.2182 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9958 Val AUC: 0.9772 Time: 14.61\n",
      "Epoch: 707 Train Loss: 0.0924 Val Loss: 0.2241 Acc: 0.9149 Pre: 0.8897 Recall: 0.9398 F1: 0.9141 Train AUC: 0.9961 Val AUC: 0.9775 Time: 12.99\n",
      "Epoch: 708 Train Loss: 0.0942 Val Loss: 0.2184 Acc: 0.9293 Pre: 0.9158 Recall: 0.9398 F1: 0.9276 Train AUC: 0.9960 Val AUC: 0.9779 Time: 12.39\n",
      "Epoch: 709 Train Loss: 0.1059 Val Loss: 0.2061 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9935 Val AUC: 0.9783 Time: 12.49\n",
      "Epoch: 710 Train Loss: 0.1044 Val Loss: 0.2211 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9945 Val AUC: 0.9756 Time: 12.94\n",
      "Epoch: 711 Train Loss: 0.1179 Val Loss: 0.2236 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9925 Val AUC: 0.9776 Time: 13.84\n",
      "Epoch: 712 Train Loss: 0.1004 Val Loss: 0.2298 Acc: 0.9149 Pre: 0.9101 Recall: 0.9135 F1: 0.9118 Train AUC: 0.9952 Val AUC: 0.9761 Time: 13.95\n",
      "Epoch: 713 Train Loss: 0.1125 Val Loss: 0.2287 Acc: 0.9167 Pre: 0.9167 Recall: 0.9098 F1: 0.9132 Train AUC: 0.9929 Val AUC: 0.9766 Time: 14.67\n",
      "Epoch: 714 Train Loss: 0.1088 Val Loss: 0.2247 Acc: 0.9112 Pre: 0.8917 Recall: 0.9286 F1: 0.9098 Train AUC: 0.9933 Val AUC: 0.9753 Time: 14.53\n",
      "Epoch: 715 Train Loss: 0.0973 Val Loss: 0.2295 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9952 Val AUC: 0.9740 Time: 12.52\n",
      "Epoch: 716 Train Loss: 0.1203 Val Loss: 0.2062 Acc: 0.9348 Pre: 0.9323 Recall: 0.9323 F1: 0.9323 Train AUC: 0.9932 Val AUC: 0.9774 Time: 12.55\n",
      "Epoch: 717 Train Loss: 0.0927 Val Loss: 0.2113 Acc: 0.9275 Pre: 0.9313 Recall: 0.9173 F1: 0.9242 Train AUC: 0.9958 Val AUC: 0.9771 Time: 12.36\n",
      "Epoch: 718 Train Loss: 0.1039 Val Loss: 0.2284 Acc: 0.9239 Pre: 0.9148 Recall: 0.9286 F1: 0.9216 Train AUC: 0.9955 Val AUC: 0.9752 Time: 12.45\n",
      "Epoch: 719 Train Loss: 0.0944 Val Loss: 0.2490 Acc: 0.9185 Pre: 0.8905 Recall: 0.9474 F1: 0.9180 Train AUC: 0.9957 Val AUC: 0.9735 Time: 12.63\n",
      "Epoch: 720 Train Loss: 0.1057 Val Loss: 0.2258 Acc: 0.9239 Pre: 0.9088 Recall: 0.9361 F1: 0.9222 Train AUC: 0.9941 Val AUC: 0.9765 Time: 13.99\n",
      "Epoch: 721 Train Loss: 0.0949 Val Loss: 0.2054 Acc: 0.9239 Pre: 0.9242 Recall: 0.9173 F1: 0.9208 Train AUC: 0.9963 Val AUC: 0.9783 Time: 14.32\n",
      "Epoch: 722 Train Loss: 0.1003 Val Loss: 0.2029 Acc: 0.9275 Pre: 0.9313 Recall: 0.9173 F1: 0.9242 Train AUC: 0.9947 Val AUC: 0.9791 Time: 14.79\n",
      "Epoch: 723 Train Loss: 0.0973 Val Loss: 0.2081 Acc: 0.9275 Pre: 0.9185 Recall: 0.9323 F1: 0.9254 Train AUC: 0.9950 Val AUC: 0.9785 Time: 14.70\n",
      "Epoch: 724 Train Loss: 0.0995 Val Loss: 0.2124 Acc: 0.9167 Pre: 0.9044 Recall: 0.9248 F1: 0.9145 Train AUC: 0.9950 Val AUC: 0.9758 Time: 13.40\n",
      "Epoch: 725 Train Loss: 0.0906 Val Loss: 0.2118 Acc: 0.9112 Pre: 0.9033 Recall: 0.9135 F1: 0.9084 Train AUC: 0.9965 Val AUC: 0.9744 Time: 12.65\n",
      "Epoch: 726 Train Loss: 0.1031 Val Loss: 0.2102 Acc: 0.9203 Pre: 0.9205 Recall: 0.9135 F1: 0.9170 Train AUC: 0.9952 Val AUC: 0.9774 Time: 12.51\n",
      "Epoch: 727 Train Loss: 0.0884 Val Loss: 0.2183 Acc: 0.9239 Pre: 0.9242 Recall: 0.9173 F1: 0.9208 Train AUC: 0.9966 Val AUC: 0.9768 Time: 12.88\n",
      "Epoch: 728 Train Loss: 0.0989 Val Loss: 0.2354 Acc: 0.9149 Pre: 0.8897 Recall: 0.9398 F1: 0.9141 Train AUC: 0.9954 Val AUC: 0.9754 Time: 12.85\n",
      "Epoch: 729 Train Loss: 0.1000 Val Loss: 0.2425 Acc: 0.9149 Pre: 0.8897 Recall: 0.9398 F1: 0.9141 Train AUC: 0.9950 Val AUC: 0.9741 Time: 13.83\n",
      "Epoch: 730 Train Loss: 0.1042 Val Loss: 0.2222 Acc: 0.9185 Pre: 0.8961 Recall: 0.9398 F1: 0.9174 Train AUC: 0.9941 Val AUC: 0.9762 Time: 14.06\n",
      "Epoch: 731 Train Loss: 0.0951 Val Loss: 0.2129 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9958 Val AUC: 0.9765 Time: 14.72\n",
      "Epoch: 732 Train Loss: 0.0924 Val Loss: 0.2160 Acc: 0.9257 Pre: 0.9278 Recall: 0.9173 F1: 0.9225 Train AUC: 0.9965 Val AUC: 0.9762 Time: 13.36\n",
      "Epoch: 733 Train Loss: 0.1054 Val Loss: 0.2233 Acc: 0.9130 Pre: 0.8893 Recall: 0.9361 F1: 0.9121 Train AUC: 0.9949 Val AUC: 0.9750 Time: 12.73\n",
      "Epoch: 734 Train Loss: 0.0934 Val Loss: 0.2363 Acc: 0.9076 Pre: 0.8720 Recall: 0.9474 F1: 0.9081 Train AUC: 0.9956 Val AUC: 0.9754 Time: 12.66\n",
      "Epoch: 735 Train Loss: 0.1001 Val Loss: 0.2201 Acc: 0.9149 Pre: 0.8925 Recall: 0.9361 F1: 0.9138 Train AUC: 0.9950 Val AUC: 0.9777 Time: 12.57\n",
      "Epoch: 736 Train Loss: 0.0962 Val Loss: 0.2144 Acc: 0.9257 Pre: 0.9151 Recall: 0.9323 F1: 0.9236 Train AUC: 0.9948 Val AUC: 0.9775 Time: 12.99\n",
      "Epoch: 737 Train Loss: 0.0928 Val Loss: 0.2223 Acc: 0.9293 Pre: 0.9219 Recall: 0.9323 F1: 0.9271 Train AUC: 0.9954 Val AUC: 0.9759 Time: 13.68\n",
      "Epoch: 738 Train Loss: 0.0955 Val Loss: 0.2204 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9960 Val AUC: 0.9759 Time: 14.36\n",
      "Epoch: 739 Train Loss: 0.1050 Val Loss: 0.2196 Acc: 0.9257 Pre: 0.9213 Recall: 0.9248 F1: 0.9231 Train AUC: 0.9939 Val AUC: 0.9764 Time: 14.78\n",
      "Epoch: 740 Train Loss: 0.0900 Val Loss: 0.2373 Acc: 0.9094 Pre: 0.8885 Recall: 0.9286 F1: 0.9081 Train AUC: 0.9958 Val AUC: 0.9753 Time: 14.11\n",
      "Epoch: 741 Train Loss: 0.0999 Val Loss: 0.2298 Acc: 0.9185 Pre: 0.9018 Recall: 0.9323 F1: 0.9168 Train AUC: 0.9942 Val AUC: 0.9756 Time: 14.24\n",
      "Epoch: 742 Train Loss: 0.0880 Val Loss: 0.2209 Acc: 0.9275 Pre: 0.9185 Recall: 0.9323 F1: 0.9254 Train AUC: 0.9959 Val AUC: 0.9754 Time: 12.37\n",
      "Epoch: 743 Train Loss: 0.0925 Val Loss: 0.2188 Acc: 0.9312 Pre: 0.9222 Recall: 0.9361 F1: 0.9291 Train AUC: 0.9960 Val AUC: 0.9756 Time: 12.39\n",
      "Epoch: 744 Train Loss: 0.0964 Val Loss: 0.2103 Acc: 0.9312 Pre: 0.9318 Recall: 0.9248 F1: 0.9283 Train AUC: 0.9953 Val AUC: 0.9776 Time: 12.45\n",
      "Epoch: 745 Train Loss: 0.1013 Val Loss: 0.2248 Acc: 0.9076 Pre: 0.8799 Recall: 0.9361 F1: 0.9071 Train AUC: 0.9946 Val AUC: 0.9769 Time: 12.45\n",
      "Epoch: 746 Train Loss: 0.0971 Val Loss: 0.2579 Acc: 0.9040 Pre: 0.8660 Recall: 0.9474 F1: 0.9048 Train AUC: 0.9954 Val AUC: 0.9743 Time: 12.75\n",
      "Epoch: 747 Train Loss: 0.1012 Val Loss: 0.2388 Acc: 0.9130 Pre: 0.8865 Recall: 0.9398 F1: 0.9124 Train AUC: 0.9953 Val AUC: 0.9755 Time: 13.99\n",
      "Epoch: 748 Train Loss: 0.0945 Val Loss: 0.2196 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9954 Val AUC: 0.9761 Time: 14.21\n",
      "Epoch: 749 Train Loss: 0.0989 Val Loss: 0.2252 Acc: 0.9239 Pre: 0.9242 Recall: 0.9173 F1: 0.9208 Train AUC: 0.9952 Val AUC: 0.9742 Time: 14.86\n",
      "Epoch: 750 Train Loss: 0.0965 Val Loss: 0.2236 Acc: 0.9257 Pre: 0.9213 Recall: 0.9248 F1: 0.9231 Train AUC: 0.9954 Val AUC: 0.9755 Time: 14.18\n",
      "Epoch: 751 Train Loss: 0.0905 Val Loss: 0.2376 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9959 Val AUC: 0.9752 Time: 14.13\n",
      "Epoch: 752 Train Loss: 0.0873 Val Loss: 0.2421 Acc: 0.9094 Pre: 0.8857 Recall: 0.9323 F1: 0.9084 Train AUC: 0.9965 Val AUC: 0.9755 Time: 12.49\n",
      "Epoch: 753 Train Loss: 0.0919 Val Loss: 0.2244 Acc: 0.9185 Pre: 0.9048 Recall: 0.9286 F1: 0.9165 Train AUC: 0.9960 Val AUC: 0.9771 Time: 12.74\n",
      "Epoch: 754 Train Loss: 0.0901 Val Loss: 0.2115 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9955 Val AUC: 0.9769 Time: 12.62\n",
      "Epoch: 755 Train Loss: 0.0890 Val Loss: 0.2104 Acc: 0.9275 Pre: 0.9313 Recall: 0.9173 F1: 0.9242 Train AUC: 0.9961 Val AUC: 0.9747 Time: 12.85\n",
      "Epoch: 756 Train Loss: 0.1012 Val Loss: 0.2135 Acc: 0.9257 Pre: 0.9245 Recall: 0.9211 F1: 0.9228 Train AUC: 0.9948 Val AUC: 0.9751 Time: 13.85\n",
      "Epoch: 757 Train Loss: 0.0918 Val Loss: 0.2258 Acc: 0.9239 Pre: 0.9088 Recall: 0.9361 F1: 0.9222 Train AUC: 0.9964 Val AUC: 0.9760 Time: 14.35\n",
      "Epoch: 758 Train Loss: 0.0928 Val Loss: 0.2386 Acc: 0.9149 Pre: 0.8953 Recall: 0.9323 F1: 0.9134 Train AUC: 0.9960 Val AUC: 0.9757 Time: 14.69\n",
      "Epoch: 759 Train Loss: 0.0955 Val Loss: 0.2407 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9951 Val AUC: 0.9755 Time: 13.68\n",
      "Epoch: 760 Train Loss: 0.0908 Val Loss: 0.2307 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9953 Val AUC: 0.9758 Time: 13.28\n",
      "Epoch: 761 Train Loss: 0.0941 Val Loss: 0.2170 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9958 Val AUC: 0.9756 Time: 12.67\n",
      "Epoch: 762 Train Loss: 0.0840 Val Loss: 0.2130 Acc: 0.9275 Pre: 0.9313 Recall: 0.9173 F1: 0.9242 Train AUC: 0.9963 Val AUC: 0.9774 Time: 12.44\n",
      "Epoch: 763 Train Loss: 0.0990 Val Loss: 0.2179 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9955 Val AUC: 0.9766 Time: 12.64\n",
      "Epoch: 764 Train Loss: 0.0857 Val Loss: 0.2338 Acc: 0.9112 Pre: 0.8780 Recall: 0.9474 F1: 0.9114 Train AUC: 0.9966 Val AUC: 0.9752 Time: 12.85\n",
      "Epoch: 765 Train Loss: 0.0980 Val Loss: 0.2178 Acc: 0.9203 Pre: 0.9111 Recall: 0.9248 F1: 0.9179 Train AUC: 0.9956 Val AUC: 0.9766 Time: 13.73\n",
      "Epoch: 766 Train Loss: 0.0995 Val Loss: 0.2203 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9951 Val AUC: 0.9766 Time: 14.20\n",
      "Epoch: 767 Train Loss: 0.0862 Val Loss: 0.2400 Acc: 0.9293 Pre: 0.9158 Recall: 0.9398 F1: 0.9276 Train AUC: 0.9969 Val AUC: 0.9735 Time: 15.48\n",
      "Epoch: 768 Train Loss: 0.0953 Val Loss: 0.2379 Acc: 0.9239 Pre: 0.9179 Recall: 0.9248 F1: 0.9213 Train AUC: 0.9953 Val AUC: 0.9733 Time: 13.60\n",
      "Epoch: 769 Train Loss: 0.0961 Val Loss: 0.2321 Acc: 0.9203 Pre: 0.9111 Recall: 0.9248 F1: 0.9179 Train AUC: 0.9955 Val AUC: 0.9748 Time: 12.88\n",
      "Epoch: 770 Train Loss: 0.0968 Val Loss: 0.2272 Acc: 0.9185 Pre: 0.9077 Recall: 0.9248 F1: 0.9162 Train AUC: 0.9945 Val AUC: 0.9762 Time: 12.62\n",
      "Epoch: 771 Train Loss: 0.0993 Val Loss: 0.2210 Acc: 0.9239 Pre: 0.9118 Recall: 0.9323 F1: 0.9219 Train AUC: 0.9946 Val AUC: 0.9776 Time: 12.63\n",
      "Epoch: 772 Train Loss: 0.0918 Val Loss: 0.2228 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9958 Val AUC: 0.9761 Time: 13.20\n",
      "Epoch: 773 Train Loss: 0.0899 Val Loss: 0.2168 Acc: 0.9239 Pre: 0.9118 Recall: 0.9323 F1: 0.9219 Train AUC: 0.9971 Val AUC: 0.9773 Time: 13.98\n",
      "Epoch: 774 Train Loss: 0.0966 Val Loss: 0.2061 Acc: 0.9275 Pre: 0.9313 Recall: 0.9173 F1: 0.9242 Train AUC: 0.9952 Val AUC: 0.9788 Time: 14.54\n",
      "Epoch: 775 Train Loss: 0.0979 Val Loss: 0.2066 Acc: 0.9239 Pre: 0.9211 Recall: 0.9211 F1: 0.9211 Train AUC: 0.9946 Val AUC: 0.9789 Time: 14.07\n",
      "Epoch: 776 Train Loss: 0.0961 Val Loss: 0.2240 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9951 Val AUC: 0.9769 Time: 13.03\n",
      "Epoch: 777 Train Loss: 0.0888 Val Loss: 0.2422 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9962 Val AUC: 0.9747 Time: 12.43\n",
      "Epoch: 778 Train Loss: 0.0981 Val Loss: 0.2400 Acc: 0.9149 Pre: 0.8982 Recall: 0.9286 F1: 0.9131 Train AUC: 0.9955 Val AUC: 0.9750 Time: 12.90\n",
      "Epoch: 779 Train Loss: 0.0944 Val Loss: 0.2200 Acc: 0.9239 Pre: 0.9242 Recall: 0.9173 F1: 0.9208 Train AUC: 0.9954 Val AUC: 0.9770 Time: 13.53\n",
      "Epoch: 780 Train Loss: 0.1015 Val Loss: 0.2097 Acc: 0.9275 Pre: 0.9185 Recall: 0.9323 F1: 0.9254 Train AUC: 0.9952 Val AUC: 0.9780 Time: 13.83\n",
      "Epoch: 781 Train Loss: 0.0944 Val Loss: 0.2217 Acc: 0.9185 Pre: 0.8961 Recall: 0.9398 F1: 0.9174 Train AUC: 0.9954 Val AUC: 0.9761 Time: 14.81\n",
      "Epoch: 782 Train Loss: 0.0919 Val Loss: 0.2178 Acc: 0.9257 Pre: 0.9032 Recall: 0.9474 F1: 0.9248 Train AUC: 0.9963 Val AUC: 0.9763 Time: 13.76\n",
      "Epoch: 783 Train Loss: 0.0889 Val Loss: 0.2150 Acc: 0.9239 Pre: 0.9118 Recall: 0.9323 F1: 0.9219 Train AUC: 0.9964 Val AUC: 0.9774 Time: 12.43\n",
      "Epoch: 784 Train Loss: 0.0949 Val Loss: 0.2200 Acc: 0.9203 Pre: 0.9051 Recall: 0.9323 F1: 0.9185 Train AUC: 0.9958 Val AUC: 0.9776 Time: 12.34\n",
      "Epoch: 785 Train Loss: 0.0863 Val Loss: 0.2196 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9967 Val AUC: 0.9777 Time: 12.48\n",
      "Epoch: 786 Train Loss: 0.0951 Val Loss: 0.2238 Acc: 0.9275 Pre: 0.9094 Recall: 0.9436 F1: 0.9262 Train AUC: 0.9952 Val AUC: 0.9768 Time: 12.36\n",
      "Epoch: 787 Train Loss: 0.0969 Val Loss: 0.2175 Acc: 0.9275 Pre: 0.9124 Recall: 0.9398 F1: 0.9259 Train AUC: 0.9949 Val AUC: 0.9765 Time: 14.14\n",
      "Epoch: 788 Train Loss: 0.0887 Val Loss: 0.2101 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9957 Val AUC: 0.9770 Time: 14.41\n",
      "Epoch: 789 Train Loss: 0.0938 Val Loss: 0.2183 Acc: 0.9221 Pre: 0.9145 Recall: 0.9248 F1: 0.9196 Train AUC: 0.9950 Val AUC: 0.9763 Time: 14.54\n",
      "Epoch: 790 Train Loss: 0.0887 Val Loss: 0.2203 Acc: 0.9239 Pre: 0.9211 Recall: 0.9211 F1: 0.9211 Train AUC: 0.9958 Val AUC: 0.9758 Time: 14.44\n",
      "Epoch: 791 Train Loss: 0.0993 Val Loss: 0.2299 Acc: 0.9167 Pre: 0.8957 Recall: 0.9361 F1: 0.9154 Train AUC: 0.9952 Val AUC: 0.9752 Time: 12.93\n",
      "Epoch: 792 Train Loss: 0.0930 Val Loss: 0.2354 Acc: 0.9112 Pre: 0.8834 Recall: 0.9398 F1: 0.9107 Train AUC: 0.9955 Val AUC: 0.9749 Time: 12.45\n",
      "Epoch: 793 Train Loss: 0.0914 Val Loss: 0.2366 Acc: 0.9221 Pre: 0.8996 Recall: 0.9436 F1: 0.9211 Train AUC: 0.9954 Val AUC: 0.9748 Time: 12.39\n",
      "Epoch: 794 Train Loss: 0.0933 Val Loss: 0.2120 Acc: 0.9221 Pre: 0.9114 Recall: 0.9286 F1: 0.9199 Train AUC: 0.9954 Val AUC: 0.9768 Time: 12.73\n",
      "Epoch: 795 Train Loss: 0.0901 Val Loss: 0.2105 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9959 Val AUC: 0.9773 Time: 12.80\n",
      "Epoch: 796 Train Loss: 0.0838 Val Loss: 0.2131 Acc: 0.9312 Pre: 0.9254 Recall: 0.9323 F1: 0.9288 Train AUC: 0.9967 Val AUC: 0.9775 Time: 13.73\n",
      "Epoch: 797 Train Loss: 0.0884 Val Loss: 0.2118 Acc: 0.9257 Pre: 0.9213 Recall: 0.9248 F1: 0.9231 Train AUC: 0.9968 Val AUC: 0.9784 Time: 14.12\n",
      "Epoch: 798 Train Loss: 0.0846 Val Loss: 0.2195 Acc: 0.9167 Pre: 0.8929 Recall: 0.9398 F1: 0.9158 Train AUC: 0.9969 Val AUC: 0.9780 Time: 14.56\n",
      "Epoch: 799 Train Loss: 0.1027 Val Loss: 0.2210 Acc: 0.9239 Pre: 0.9000 Recall: 0.9474 F1: 0.9231 Train AUC: 0.9940 Val AUC: 0.9775 Time: 14.27\n",
      "Epoch: 800 Train Loss: 0.0924 Val Loss: 0.2060 Acc: 0.9293 Pre: 0.9219 Recall: 0.9323 F1: 0.9271 Train AUC: 0.9954 Val AUC: 0.9777 Time: 14.73\n",
      "Epoch: 801 Train Loss: 0.0867 Val Loss: 0.2090 Acc: 0.9312 Pre: 0.9318 Recall: 0.9248 F1: 0.9283 Train AUC: 0.9969 Val AUC: 0.9760 Time: 12.42\n",
      "Epoch: 802 Train Loss: 0.0941 Val Loss: 0.2161 Acc: 0.9275 Pre: 0.9185 Recall: 0.9323 F1: 0.9254 Train AUC: 0.9958 Val AUC: 0.9770 Time: 12.37\n",
      "Epoch: 803 Train Loss: 0.0989 Val Loss: 0.2187 Acc: 0.9203 Pre: 0.9051 Recall: 0.9323 F1: 0.9185 Train AUC: 0.9955 Val AUC: 0.9778 Time: 12.35\n",
      "Epoch: 804 Train Loss: 0.0949 Val Loss: 0.2301 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9951 Val AUC: 0.9769 Time: 12.64\n",
      "Epoch: 805 Train Loss: 0.0937 Val Loss: 0.2318 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9954 Val AUC: 0.9768 Time: 13.99\n",
      "Epoch: 806 Train Loss: 0.0912 Val Loss: 0.2258 Acc: 0.9257 Pre: 0.9151 Recall: 0.9323 F1: 0.9236 Train AUC: 0.9959 Val AUC: 0.9759 Time: 14.47\n",
      "Epoch: 807 Train Loss: 0.0992 Val Loss: 0.2235 Acc: 0.9275 Pre: 0.9185 Recall: 0.9323 F1: 0.9254 Train AUC: 0.9956 Val AUC: 0.9763 Time: 14.62\n",
      "Epoch: 808 Train Loss: 0.0885 Val Loss: 0.2146 Acc: 0.9167 Pre: 0.9074 Recall: 0.9211 F1: 0.9142 Train AUC: 0.9965 Val AUC: 0.9775 Time: 14.44\n",
      "Epoch: 809 Train Loss: 0.0828 Val Loss: 0.2128 Acc: 0.9257 Pre: 0.9151 Recall: 0.9323 F1: 0.9236 Train AUC: 0.9968 Val AUC: 0.9772 Time: 13.90\n",
      "Epoch: 810 Train Loss: 0.0967 Val Loss: 0.2176 Acc: 0.9239 Pre: 0.9088 Recall: 0.9361 F1: 0.9222 Train AUC: 0.9945 Val AUC: 0.9757 Time: 12.44\n",
      "Epoch: 811 Train Loss: 0.0958 Val Loss: 0.2410 Acc: 0.9112 Pre: 0.8780 Recall: 0.9474 F1: 0.9114 Train AUC: 0.9957 Val AUC: 0.9742 Time: 12.41\n",
      "Epoch: 812 Train Loss: 0.0944 Val Loss: 0.2399 Acc: 0.9076 Pre: 0.8826 Recall: 0.9323 F1: 0.9068 Train AUC: 0.9967 Val AUC: 0.9756 Time: 12.66\n",
      "Epoch: 813 Train Loss: 0.0937 Val Loss: 0.2355 Acc: 0.9058 Pre: 0.8963 Recall: 0.9098 F1: 0.9030 Train AUC: 0.9951 Val AUC: 0.9755 Time: 12.81\n",
      "Epoch: 814 Train Loss: 0.0855 Val Loss: 0.2341 Acc: 0.9203 Pre: 0.9111 Recall: 0.9248 F1: 0.9179 Train AUC: 0.9963 Val AUC: 0.9750 Time: 12.48\n",
      "Epoch: 815 Train Loss: 0.0854 Val Loss: 0.2492 Acc: 0.9167 Pre: 0.9015 Recall: 0.9286 F1: 0.9148 Train AUC: 0.9963 Val AUC: 0.9710 Time: 12.53\n",
      "Epoch: 816 Train Loss: 0.0926 Val Loss: 0.2368 Acc: 0.9203 Pre: 0.9111 Recall: 0.9248 F1: 0.9179 Train AUC: 0.9964 Val AUC: 0.9724 Time: 12.92\n",
      "Epoch: 817 Train Loss: 0.1018 Val Loss: 0.2191 Acc: 0.9203 Pre: 0.9237 Recall: 0.9098 F1: 0.9167 Train AUC: 0.9950 Val AUC: 0.9773 Time: 14.38\n",
      "Epoch: 818 Train Loss: 0.1012 Val Loss: 0.2178 Acc: 0.9167 Pre: 0.9015 Recall: 0.9286 F1: 0.9148 Train AUC: 0.9957 Val AUC: 0.9777 Time: 14.69\n",
      "Epoch: 819 Train Loss: 0.0874 Val Loss: 0.2333 Acc: 0.9312 Pre: 0.9014 Recall: 0.9624 F1: 0.9309 Train AUC: 0.9960 Val AUC: 0.9770 Time: 13.92\n",
      "Epoch: 820 Train Loss: 0.0927 Val Loss: 0.2243 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9962 Val AUC: 0.9769 Time: 14.03\n",
      "Epoch: 821 Train Loss: 0.0933 Val Loss: 0.2150 Acc: 0.9348 Pre: 0.9323 Recall: 0.9323 F1: 0.9323 Train AUC: 0.9968 Val AUC: 0.9769 Time: 12.45\n",
      "Epoch: 822 Train Loss: 0.0909 Val Loss: 0.2171 Acc: 0.9293 Pre: 0.9283 Recall: 0.9248 F1: 0.9266 Train AUC: 0.9961 Val AUC: 0.9772 Time: 13.06\n",
      "Epoch: 823 Train Loss: 0.0938 Val Loss: 0.2307 Acc: 0.9203 Pre: 0.9051 Recall: 0.9323 F1: 0.9185 Train AUC: 0.9954 Val AUC: 0.9757 Time: 13.22\n",
      "Epoch: 824 Train Loss: 0.0860 Val Loss: 0.2495 Acc: 0.9203 Pre: 0.9051 Recall: 0.9323 F1: 0.9185 Train AUC: 0.9965 Val AUC: 0.9725 Time: 13.58\n",
      "Epoch: 825 Train Loss: 0.0908 Val Loss: 0.2340 Acc: 0.9203 Pre: 0.9051 Recall: 0.9323 F1: 0.9185 Train AUC: 0.9966 Val AUC: 0.9739 Time: 13.93\n",
      "Epoch: 826 Train Loss: 0.0944 Val Loss: 0.2284 Acc: 0.9185 Pre: 0.9077 Recall: 0.9248 F1: 0.9162 Train AUC: 0.9954 Val AUC: 0.9762 Time: 14.44\n",
      "Epoch: 827 Train Loss: 0.0841 Val Loss: 0.2207 Acc: 0.9239 Pre: 0.9179 Recall: 0.9248 F1: 0.9213 Train AUC: 0.9968 Val AUC: 0.9771 Time: 12.80\n",
      "Epoch: 828 Train Loss: 0.0948 Val Loss: 0.2197 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9955 Val AUC: 0.9779 Time: 12.68\n",
      "Epoch: 829 Train Loss: 0.0828 Val Loss: 0.2203 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9972 Val AUC: 0.9777 Time: 13.34\n",
      "Epoch: 830 Train Loss: 0.0860 Val Loss: 0.2189 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9968 Val AUC: 0.9772 Time: 13.73\n",
      "Epoch: 831 Train Loss: 0.0881 Val Loss: 0.2170 Acc: 0.9312 Pre: 0.9222 Recall: 0.9361 F1: 0.9291 Train AUC: 0.9964 Val AUC: 0.9769 Time: 14.13\n",
      "Epoch: 832 Train Loss: 0.0853 Val Loss: 0.2181 Acc: 0.9221 Pre: 0.9208 Recall: 0.9173 F1: 0.9190 Train AUC: 0.9966 Val AUC: 0.9777 Time: 14.45\n",
      "Epoch: 833 Train Loss: 0.0942 Val Loss: 0.2268 Acc: 0.9203 Pre: 0.9051 Recall: 0.9323 F1: 0.9185 Train AUC: 0.9953 Val AUC: 0.9768 Time: 13.21\n",
      "Epoch: 834 Train Loss: 0.0882 Val Loss: 0.2455 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9959 Val AUC: 0.9744 Time: 12.70\n",
      "Epoch: 835 Train Loss: 0.0823 Val Loss: 0.2381 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9968 Val AUC: 0.9743 Time: 12.59\n",
      "Epoch: 836 Train Loss: 0.0871 Val Loss: 0.2222 Acc: 0.9275 Pre: 0.9124 Recall: 0.9398 F1: 0.9259 Train AUC: 0.9963 Val AUC: 0.9756 Time: 12.62\n",
      "Epoch: 837 Train Loss: 0.0887 Val Loss: 0.2138 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9961 Val AUC: 0.9764 Time: 13.03\n",
      "Epoch: 838 Train Loss: 0.0867 Val Loss: 0.2156 Acc: 0.9275 Pre: 0.9248 Recall: 0.9248 F1: 0.9248 Train AUC: 0.9966 Val AUC: 0.9771 Time: 14.04\n",
      "Epoch: 839 Train Loss: 0.0907 Val Loss: 0.2283 Acc: 0.9167 Pre: 0.8929 Recall: 0.9398 F1: 0.9158 Train AUC: 0.9956 Val AUC: 0.9772 Time: 14.39\n",
      "Epoch: 840 Train Loss: 0.0773 Val Loss: 0.2417 Acc: 0.9149 Pre: 0.8869 Recall: 0.9436 F1: 0.9144 Train AUC: 0.9975 Val AUC: 0.9757 Time: 13.99\n",
      "Epoch: 841 Train Loss: 0.0921 Val Loss: 0.2134 Acc: 0.9203 Pre: 0.9111 Recall: 0.9248 F1: 0.9179 Train AUC: 0.9964 Val AUC: 0.9783 Time: 14.22\n",
      "Epoch: 842 Train Loss: 0.0822 Val Loss: 0.2075 Acc: 0.9275 Pre: 0.9313 Recall: 0.9173 F1: 0.9242 Train AUC: 0.9970 Val AUC: 0.9783 Time: 12.69\n",
      "Epoch: 843 Train Loss: 0.0859 Val Loss: 0.2092 Acc: 0.9330 Pre: 0.9257 Recall: 0.9361 F1: 0.9308 Train AUC: 0.9966 Val AUC: 0.9780 Time: 12.49\n",
      "Epoch: 844 Train Loss: 0.0854 Val Loss: 0.2261 Acc: 0.9257 Pre: 0.8975 Recall: 0.9549 F1: 0.9253 Train AUC: 0.9965 Val AUC: 0.9763 Time: 12.28\n",
      "Epoch: 845 Train Loss: 0.0877 Val Loss: 0.2252 Acc: 0.9185 Pre: 0.8905 Recall: 0.9474 F1: 0.9180 Train AUC: 0.9967 Val AUC: 0.9773 Time: 12.59\n",
      "Epoch: 846 Train Loss: 0.0823 Val Loss: 0.2204 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9968 Val AUC: 0.9782 Time: 13.14\n",
      "Epoch: 847 Train Loss: 0.0865 Val Loss: 0.2157 Acc: 0.9239 Pre: 0.9148 Recall: 0.9286 F1: 0.9216 Train AUC: 0.9960 Val AUC: 0.9782 Time: 14.20\n",
      "Epoch: 848 Train Loss: 0.0920 Val Loss: 0.2224 Acc: 0.9257 Pre: 0.9061 Recall: 0.9436 F1: 0.9245 Train AUC: 0.9962 Val AUC: 0.9771 Time: 14.43\n",
      "Epoch: 849 Train Loss: 0.0795 Val Loss: 0.2547 Acc: 0.9257 Pre: 0.8947 Recall: 0.9586 F1: 0.9256 Train AUC: 0.9975 Val AUC: 0.9752 Time: 14.26\n",
      "Epoch: 850 Train Loss: 0.0999 Val Loss: 0.2393 Acc: 0.9221 Pre: 0.8968 Recall: 0.9474 F1: 0.9214 Train AUC: 0.9959 Val AUC: 0.9757 Time: 12.75\n",
      "Epoch: 851 Train Loss: 0.1001 Val Loss: 0.2139 Acc: 0.9312 Pre: 0.9318 Recall: 0.9248 F1: 0.9283 Train AUC: 0.9956 Val AUC: 0.9776 Time: 12.51\n",
      "Epoch: 852 Train Loss: 0.0888 Val Loss: 0.2066 Acc: 0.9312 Pre: 0.9318 Recall: 0.9248 F1: 0.9283 Train AUC: 0.9962 Val AUC: 0.9787 Time: 12.57\n",
      "Epoch: 853 Train Loss: 0.0945 Val Loss: 0.2133 Acc: 0.9257 Pre: 0.9032 Recall: 0.9474 F1: 0.9248 Train AUC: 0.9960 Val AUC: 0.9760 Time: 13.74\n",
      "Epoch: 854 Train Loss: 0.0877 Val Loss: 0.2310 Acc: 0.9130 Pre: 0.8759 Recall: 0.9549 F1: 0.9137 Train AUC: 0.9965 Val AUC: 0.9753 Time: 14.14\n",
      "Epoch: 855 Train Loss: 0.0903 Val Loss: 0.2315 Acc: 0.9167 Pre: 0.8873 Recall: 0.9474 F1: 0.9164 Train AUC: 0.9971 Val AUC: 0.9764 Time: 14.44\n",
      "Epoch: 856 Train Loss: 0.0875 Val Loss: 0.2253 Acc: 0.9185 Pre: 0.9048 Recall: 0.9286 F1: 0.9165 Train AUC: 0.9963 Val AUC: 0.9771 Time: 12.70\n",
      "Epoch: 857 Train Loss: 0.0896 Val Loss: 0.2239 Acc: 0.9185 Pre: 0.9077 Recall: 0.9248 F1: 0.9162 Train AUC: 0.9957 Val AUC: 0.9767 Time: 13.43\n",
      "Epoch: 858 Train Loss: 0.0917 Val Loss: 0.2325 Acc: 0.9203 Pre: 0.9051 Recall: 0.9323 F1: 0.9185 Train AUC: 0.9959 Val AUC: 0.9747 Time: 12.96\n",
      "Epoch: 859 Train Loss: 0.0969 Val Loss: 0.2409 Acc: 0.9221 Pre: 0.9055 Recall: 0.9361 F1: 0.9205 Train AUC: 0.9947 Val AUC: 0.9737 Time: 13.62\n",
      "Epoch: 860 Train Loss: 0.0885 Val Loss: 0.2330 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9964 Val AUC: 0.9746 Time: 14.16\n",
      "Epoch: 861 Train Loss: 0.0898 Val Loss: 0.2181 Acc: 0.9185 Pre: 0.9077 Recall: 0.9248 F1: 0.9162 Train AUC: 0.9960 Val AUC: 0.9775 Time: 14.92\n",
      "Epoch: 862 Train Loss: 0.0895 Val Loss: 0.2276 Acc: 0.9185 Pre: 0.8989 Recall: 0.9361 F1: 0.9171 Train AUC: 0.9967 Val AUC: 0.9762 Time: 13.07\n",
      "Epoch: 863 Train Loss: 0.0945 Val Loss: 0.2434 Acc: 0.9167 Pre: 0.8873 Recall: 0.9474 F1: 0.9164 Train AUC: 0.9951 Val AUC: 0.9742 Time: 12.64\n",
      "Epoch: 864 Train Loss: 0.0968 Val Loss: 0.2257 Acc: 0.9257 Pre: 0.9032 Recall: 0.9474 F1: 0.9248 Train AUC: 0.9953 Val AUC: 0.9755 Time: 12.61\n",
      "Epoch: 865 Train Loss: 0.0826 Val Loss: 0.2214 Acc: 0.9330 Pre: 0.9288 Recall: 0.9323 F1: 0.9306 Train AUC: 0.9971 Val AUC: 0.9758 Time: 12.36\n",
      "Epoch: 866 Train Loss: 0.0822 Val Loss: 0.2323 Acc: 0.9257 Pre: 0.9245 Recall: 0.9211 F1: 0.9228 Train AUC: 0.9969 Val AUC: 0.9741 Time: 12.51\n",
      "Epoch: 867 Train Loss: 0.0940 Val Loss: 0.2313 Acc: 0.9221 Pre: 0.9145 Recall: 0.9248 F1: 0.9196 Train AUC: 0.9950 Val AUC: 0.9753 Time: 12.73\n",
      "Epoch: 868 Train Loss: 0.0980 Val Loss: 0.2430 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9946 Val AUC: 0.9746 Time: 13.73\n",
      "Epoch: 869 Train Loss: 0.0870 Val Loss: 0.2515 Acc: 0.9185 Pre: 0.8932 Recall: 0.9436 F1: 0.9177 Train AUC: 0.9961 Val AUC: 0.9726 Time: 14.35\n",
      "Epoch: 870 Train Loss: 0.0972 Val Loss: 0.2198 Acc: 0.9167 Pre: 0.9104 Recall: 0.9173 F1: 0.9139 Train AUC: 0.9952 Val AUC: 0.9741 Time: 14.62\n",
      "Epoch: 871 Train Loss: 0.1019 Val Loss: 0.2049 Acc: 0.9293 Pre: 0.9316 Recall: 0.9211 F1: 0.9263 Train AUC: 0.9951 Val AUC: 0.9779 Time: 14.36\n",
      "Epoch: 872 Train Loss: 0.0916 Val Loss: 0.2185 Acc: 0.9402 Pre: 0.9299 Recall: 0.9474 F1: 0.9385 Train AUC: 0.9964 Val AUC: 0.9770 Time: 12.57\n",
      "Epoch: 873 Train Loss: 0.0850 Val Loss: 0.2401 Acc: 0.9293 Pre: 0.8982 Recall: 0.9624 F1: 0.9292 Train AUC: 0.9967 Val AUC: 0.9763 Time: 12.35\n",
      "Epoch: 874 Train Loss: 0.1039 Val Loss: 0.2329 Acc: 0.9312 Pre: 0.8986 Recall: 0.9662 F1: 0.9312 Train AUC: 0.9952 Val AUC: 0.9766 Time: 12.67\n",
      "Epoch: 875 Train Loss: 0.1018 Val Loss: 0.2263 Acc: 0.9221 Pre: 0.8996 Recall: 0.9436 F1: 0.9211 Train AUC: 0.9950 Val AUC: 0.9779 Time: 13.53\n",
      "Epoch: 876 Train Loss: 0.0953 Val Loss: 0.2242 Acc: 0.9167 Pre: 0.9104 Recall: 0.9173 F1: 0.9139 Train AUC: 0.9950 Val AUC: 0.9772 Time: 13.48\n",
      "Epoch: 877 Train Loss: 0.0937 Val Loss: 0.2306 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9960 Val AUC: 0.9731 Time: 14.50\n",
      "Epoch: 878 Train Loss: 0.0892 Val Loss: 0.2327 Acc: 0.9167 Pre: 0.8957 Recall: 0.9361 F1: 0.9154 Train AUC: 0.9961 Val AUC: 0.9718 Time: 14.95\n",
      "Epoch: 879 Train Loss: 0.0912 Val Loss: 0.2190 Acc: 0.9221 Pre: 0.9114 Recall: 0.9286 F1: 0.9199 Train AUC: 0.9964 Val AUC: 0.9759 Time: 12.73\n",
      "Epoch: 880 Train Loss: 0.0828 Val Loss: 0.2132 Acc: 0.9257 Pre: 0.9213 Recall: 0.9248 F1: 0.9231 Train AUC: 0.9971 Val AUC: 0.9777 Time: 12.96\n",
      "Epoch: 881 Train Loss: 0.0901 Val Loss: 0.2180 Acc: 0.9257 Pre: 0.9213 Recall: 0.9248 F1: 0.9231 Train AUC: 0.9957 Val AUC: 0.9770 Time: 13.45\n",
      "Epoch: 882 Train Loss: 0.0920 Val Loss: 0.2355 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9955 Val AUC: 0.9743 Time: 13.49\n",
      "Epoch: 883 Train Loss: 0.0830 Val Loss: 0.2468 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9967 Val AUC: 0.9736 Time: 13.87\n",
      "Epoch: 884 Train Loss: 0.0900 Val Loss: 0.2258 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9966 Val AUC: 0.9764 Time: 14.09\n",
      "Epoch: 885 Train Loss: 0.0927 Val Loss: 0.2197 Acc: 0.9167 Pre: 0.9044 Recall: 0.9248 F1: 0.9145 Train AUC: 0.9958 Val AUC: 0.9757 Time: 12.80\n",
      "Epoch: 886 Train Loss: 0.0953 Val Loss: 0.2203 Acc: 0.9149 Pre: 0.8925 Recall: 0.9361 F1: 0.9138 Train AUC: 0.9958 Val AUC: 0.9766 Time: 12.52\n",
      "Epoch: 887 Train Loss: 0.0903 Val Loss: 0.2256 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9957 Val AUC: 0.9761 Time: 12.54\n",
      "Epoch: 888 Train Loss: 0.0813 Val Loss: 0.2207 Acc: 0.9293 Pre: 0.9158 Recall: 0.9398 F1: 0.9276 Train AUC: 0.9972 Val AUC: 0.9764 Time: 12.83\n",
      "Epoch: 889 Train Loss: 0.0929 Val Loss: 0.2169 Acc: 0.9293 Pre: 0.9219 Recall: 0.9323 F1: 0.9271 Train AUC: 0.9956 Val AUC: 0.9773 Time: 13.71\n",
      "Epoch: 890 Train Loss: 0.0875 Val Loss: 0.2198 Acc: 0.9185 Pre: 0.9018 Recall: 0.9323 F1: 0.9168 Train AUC: 0.9962 Val AUC: 0.9776 Time: 14.23\n",
      "Epoch: 891 Train Loss: 0.0855 Val Loss: 0.2222 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9967 Val AUC: 0.9766 Time: 14.19\n",
      "Epoch: 892 Train Loss: 0.0915 Val Loss: 0.2207 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9954 Val AUC: 0.9775 Time: 12.97\n",
      "Epoch: 893 Train Loss: 0.0885 Val Loss: 0.2224 Acc: 0.9275 Pre: 0.9124 Recall: 0.9398 F1: 0.9259 Train AUC: 0.9959 Val AUC: 0.9762 Time: 13.08\n",
      "Epoch: 894 Train Loss: 0.0840 Val Loss: 0.2294 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9968 Val AUC: 0.9730 Time: 14.32\n",
      "Epoch: 895 Train Loss: 0.0941 Val Loss: 0.2229 Acc: 0.9257 Pre: 0.9061 Recall: 0.9436 F1: 0.9245 Train AUC: 0.9955 Val AUC: 0.9769 Time: 13.89\n",
      "Epoch: 896 Train Loss: 0.0716 Val Loss: 0.2249 Acc: 0.9239 Pre: 0.9000 Recall: 0.9474 F1: 0.9231 Train AUC: 0.9981 Val AUC: 0.9777 Time: 14.14\n",
      "Epoch: 897 Train Loss: 0.0847 Val Loss: 0.2167 Acc: 0.9185 Pre: 0.8989 Recall: 0.9361 F1: 0.9171 Train AUC: 0.9966 Val AUC: 0.9785 Time: 12.94\n",
      "Epoch: 898 Train Loss: 0.0819 Val Loss: 0.2127 Acc: 0.9293 Pre: 0.9219 Recall: 0.9323 F1: 0.9271 Train AUC: 0.9967 Val AUC: 0.9783 Time: 12.44\n",
      "Epoch: 899 Train Loss: 0.0937 Val Loss: 0.2233 Acc: 0.9293 Pre: 0.9158 Recall: 0.9398 F1: 0.9276 Train AUC: 0.9954 Val AUC: 0.9763 Time: 12.82\n",
      "Epoch: 900 Train Loss: 0.0897 Val Loss: 0.2194 Acc: 0.9293 Pre: 0.9219 Recall: 0.9323 F1: 0.9271 Train AUC: 0.9964 Val AUC: 0.9764 Time: 13.43\n",
      "Epoch: 901 Train Loss: 0.0892 Val Loss: 0.2149 Acc: 0.9293 Pre: 0.9219 Recall: 0.9323 F1: 0.9271 Train AUC: 0.9961 Val AUC: 0.9774 Time: 13.90\n",
      "Epoch: 902 Train Loss: 0.0865 Val Loss: 0.2128 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9964 Val AUC: 0.9785 Time: 14.39\n",
      "Epoch: 903 Train Loss: 0.0925 Val Loss: 0.2181 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9955 Val AUC: 0.9761 Time: 14.23\n",
      "Epoch: 904 Train Loss: 0.0869 Val Loss: 0.2325 Acc: 0.9112 Pre: 0.8754 Recall: 0.9511 F1: 0.9117 Train AUC: 0.9962 Val AUC: 0.9735 Time: 14.06\n",
      "Epoch: 905 Train Loss: 0.0887 Val Loss: 0.2280 Acc: 0.9167 Pre: 0.8929 Recall: 0.9398 F1: 0.9158 Train AUC: 0.9967 Val AUC: 0.9735 Time: 12.50\n",
      "Epoch: 906 Train Loss: 0.0897 Val Loss: 0.2091 Acc: 0.9330 Pre: 0.9321 Recall: 0.9286 F1: 0.9303 Train AUC: 0.9971 Val AUC: 0.9779 Time: 12.45\n",
      "Epoch: 907 Train Loss: 0.0873 Val Loss: 0.2208 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9970 Val AUC: 0.9769 Time: 12.56\n",
      "Epoch: 908 Train Loss: 0.0925 Val Loss: 0.2603 Acc: 0.9130 Pre: 0.8865 Recall: 0.9398 F1: 0.9124 Train AUC: 0.9957 Val AUC: 0.9749 Time: 12.80\n",
      "Epoch: 909 Train Loss: 0.0962 Val Loss: 0.2453 Acc: 0.9094 Pre: 0.8803 Recall: 0.9398 F1: 0.9091 Train AUC: 0.9950 Val AUC: 0.9750 Time: 12.46\n",
      "Epoch: 910 Train Loss: 0.0897 Val Loss: 0.2251 Acc: 0.9293 Pre: 0.9219 Recall: 0.9323 F1: 0.9271 Train AUC: 0.9965 Val AUC: 0.9754 Time: 13.01\n",
      "Epoch: 911 Train Loss: 0.0933 Val Loss: 0.2155 Acc: 0.9348 Pre: 0.9323 Recall: 0.9323 F1: 0.9323 Train AUC: 0.9955 Val AUC: 0.9750 Time: 14.02\n",
      "Epoch: 912 Train Loss: 0.0921 Val Loss: 0.2180 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9962 Val AUC: 0.9747 Time: 14.35\n",
      "Epoch: 913 Train Loss: 0.0828 Val Loss: 0.2364 Acc: 0.9239 Pre: 0.8944 Recall: 0.9549 F1: 0.9236 Train AUC: 0.9973 Val AUC: 0.9753 Time: 14.47\n",
      "Epoch: 914 Train Loss: 0.0841 Val Loss: 0.2349 Acc: 0.9112 Pre: 0.8834 Recall: 0.9398 F1: 0.9107 Train AUC: 0.9966 Val AUC: 0.9771 Time: 12.72\n",
      "Epoch: 915 Train Loss: 0.0953 Val Loss: 0.2202 Acc: 0.9275 Pre: 0.9154 Recall: 0.9361 F1: 0.9257 Train AUC: 0.9950 Val AUC: 0.9775 Time: 12.44\n",
      "Epoch: 916 Train Loss: 0.0856 Val Loss: 0.2208 Acc: 0.9312 Pre: 0.9191 Recall: 0.9398 F1: 0.9294 Train AUC: 0.9960 Val AUC: 0.9768 Time: 12.73\n",
      "Epoch: 917 Train Loss: 0.0936 Val Loss: 0.2281 Acc: 0.9330 Pre: 0.9134 Recall: 0.9511 F1: 0.9319 Train AUC: 0.9956 Val AUC: 0.9766 Time: 13.84\n",
      "Epoch: 918 Train Loss: 0.0860 Val Loss: 0.2176 Acc: 0.9239 Pre: 0.9088 Recall: 0.9361 F1: 0.9222 Train AUC: 0.9972 Val AUC: 0.9779 Time: 13.93\n",
      "Epoch: 919 Train Loss: 0.0784 Val Loss: 0.2192 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9973 Val AUC: 0.9780 Time: 14.33\n",
      "Epoch: 920 Train Loss: 0.0812 Val Loss: 0.2267 Acc: 0.9185 Pre: 0.8989 Recall: 0.9361 F1: 0.9171 Train AUC: 0.9968 Val AUC: 0.9770 Time: 14.36\n",
      "Epoch: 921 Train Loss: 0.0909 Val Loss: 0.2216 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9956 Val AUC: 0.9751 Time: 12.60\n",
      "Epoch: 922 Train Loss: 0.0864 Val Loss: 0.2229 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9966 Val AUC: 0.9740 Time: 12.65\n",
      "Epoch: 923 Train Loss: 0.0836 Val Loss: 0.2229 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9969 Val AUC: 0.9768 Time: 12.72\n",
      "Epoch: 924 Train Loss: 0.0797 Val Loss: 0.2269 Acc: 0.9221 Pre: 0.8996 Recall: 0.9436 F1: 0.9211 Train AUC: 0.9974 Val AUC: 0.9775 Time: 12.76\n",
      "Epoch: 925 Train Loss: 0.0792 Val Loss: 0.2307 Acc: 0.9076 Pre: 0.8881 Recall: 0.9248 F1: 0.9061 Train AUC: 0.9970 Val AUC: 0.9774 Time: 13.51\n",
      "Epoch: 926 Train Loss: 0.0931 Val Loss: 0.2199 Acc: 0.9221 Pre: 0.9055 Recall: 0.9361 F1: 0.9205 Train AUC: 0.9951 Val AUC: 0.9777 Time: 14.20\n",
      "Epoch: 927 Train Loss: 0.0784 Val Loss: 0.2254 Acc: 0.9293 Pre: 0.9097 Recall: 0.9474 F1: 0.9282 Train AUC: 0.9972 Val AUC: 0.9760 Time: 13.89\n",
      "Epoch: 928 Train Loss: 0.0851 Val Loss: 0.2242 Acc: 0.9293 Pre: 0.9011 Recall: 0.9586 F1: 0.9290 Train AUC: 0.9970 Val AUC: 0.9751 Time: 14.29\n",
      "Epoch: 929 Train Loss: 0.0941 Val Loss: 0.2266 Acc: 0.9275 Pre: 0.8979 Recall: 0.9586 F1: 0.9273 Train AUC: 0.9958 Val AUC: 0.9766 Time: 14.21\n",
      "Epoch: 930 Train Loss: 0.0880 Val Loss: 0.2176 Acc: 0.9167 Pre: 0.9015 Recall: 0.9286 F1: 0.9148 Train AUC: 0.9961 Val AUC: 0.9783 Time: 12.62\n",
      "Epoch: 931 Train Loss: 0.0855 Val Loss: 0.2239 Acc: 0.9149 Pre: 0.8953 Recall: 0.9323 F1: 0.9134 Train AUC: 0.9968 Val AUC: 0.9777 Time: 12.42\n",
      "Epoch: 932 Train Loss: 0.0844 Val Loss: 0.2302 Acc: 0.9185 Pre: 0.8989 Recall: 0.9361 F1: 0.9171 Train AUC: 0.9965 Val AUC: 0.9763 Time: 12.63\n",
      "Epoch: 933 Train Loss: 0.0884 Val Loss: 0.2295 Acc: 0.9275 Pre: 0.9124 Recall: 0.9398 F1: 0.9259 Train AUC: 0.9960 Val AUC: 0.9744 Time: 12.69\n",
      "Epoch: 934 Train Loss: 0.0840 Val Loss: 0.2197 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9973 Val AUC: 0.9761 Time: 12.62\n",
      "Epoch: 935 Train Loss: 0.0828 Val Loss: 0.2142 Acc: 0.9185 Pre: 0.9108 Recall: 0.9211 F1: 0.9159 Train AUC: 0.9966 Val AUC: 0.9781 Time: 13.12\n",
      "Epoch: 936 Train Loss: 0.1005 Val Loss: 0.2242 Acc: 0.9167 Pre: 0.8957 Recall: 0.9361 F1: 0.9154 Train AUC: 0.9946 Val AUC: 0.9779 Time: 14.08\n",
      "Epoch: 937 Train Loss: 0.0872 Val Loss: 0.2417 Acc: 0.9094 Pre: 0.8750 Recall: 0.9474 F1: 0.9097 Train AUC: 0.9964 Val AUC: 0.9760 Time: 15.04\n",
      "Epoch: 938 Train Loss: 0.0907 Val Loss: 0.2189 Acc: 0.9257 Pre: 0.9004 Recall: 0.9511 F1: 0.9250 Train AUC: 0.9964 Val AUC: 0.9776 Time: 13.83\n",
      "Epoch: 939 Train Loss: 0.0806 Val Loss: 0.2094 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9971 Val AUC: 0.9768 Time: 12.71\n",
      "Epoch: 940 Train Loss: 0.0857 Val Loss: 0.2094 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9965 Val AUC: 0.9769 Time: 12.71\n",
      "Epoch: 941 Train Loss: 0.0850 Val Loss: 0.2201 Acc: 0.9275 Pre: 0.9124 Recall: 0.9398 F1: 0.9259 Train AUC: 0.9966 Val AUC: 0.9758 Time: 12.64\n",
      "Epoch: 942 Train Loss: 0.0809 Val Loss: 0.2284 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9973 Val AUC: 0.9763 Time: 12.39\n",
      "Epoch: 943 Train Loss: 0.0883 Val Loss: 0.2337 Acc: 0.9167 Pre: 0.8929 Recall: 0.9398 F1: 0.9158 Train AUC: 0.9962 Val AUC: 0.9771 Time: 13.69\n",
      "Epoch: 944 Train Loss: 0.0823 Val Loss: 0.2353 Acc: 0.9221 Pre: 0.8996 Recall: 0.9436 F1: 0.9211 Train AUC: 0.9973 Val AUC: 0.9772 Time: 14.19\n",
      "Epoch: 945 Train Loss: 0.0937 Val Loss: 0.2170 Acc: 0.9149 Pre: 0.9011 Recall: 0.9248 F1: 0.9128 Train AUC: 0.9948 Val AUC: 0.9784 Time: 14.65\n",
      "Epoch: 946 Train Loss: 0.0885 Val Loss: 0.2175 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9964 Val AUC: 0.9774 Time: 14.65\n",
      "Epoch: 947 Train Loss: 0.0765 Val Loss: 0.2182 Acc: 0.9293 Pre: 0.9158 Recall: 0.9398 F1: 0.9276 Train AUC: 0.9976 Val AUC: 0.9761 Time: 12.82\n",
      "Epoch: 948 Train Loss: 0.0780 Val Loss: 0.2120 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9976 Val AUC: 0.9780 Time: 12.62\n",
      "Epoch: 949 Train Loss: 0.0812 Val Loss: 0.2125 Acc: 0.9167 Pre: 0.8986 Recall: 0.9323 F1: 0.9151 Train AUC: 0.9972 Val AUC: 0.9789 Time: 12.60\n",
      "Epoch: 950 Train Loss: 0.0846 Val Loss: 0.2210 Acc: 0.9275 Pre: 0.9007 Recall: 0.9549 F1: 0.9270 Train AUC: 0.9965 Val AUC: 0.9784 Time: 12.74\n",
      "Epoch: 951 Train Loss: 0.0753 Val Loss: 0.2225 Acc: 0.9239 Pre: 0.9058 Recall: 0.9398 F1: 0.9225 Train AUC: 0.9977 Val AUC: 0.9776 Time: 13.69\n",
      "Epoch: 952 Train Loss: 0.0787 Val Loss: 0.2183 Acc: 0.9312 Pre: 0.9191 Recall: 0.9398 F1: 0.9294 Train AUC: 0.9974 Val AUC: 0.9774 Time: 14.30\n",
      "Epoch: 953 Train Loss: 0.0858 Val Loss: 0.2183 Acc: 0.9293 Pre: 0.9219 Recall: 0.9323 F1: 0.9271 Train AUC: 0.9969 Val AUC: 0.9776 Time: 14.75\n",
      "Epoch: 954 Train Loss: 0.0986 Val Loss: 0.2118 Acc: 0.9330 Pre: 0.9225 Recall: 0.9398 F1: 0.9311 Train AUC: 0.9951 Val AUC: 0.9781 Time: 14.34\n",
      "Epoch: 955 Train Loss: 0.0780 Val Loss: 0.2092 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9971 Val AUC: 0.9785 Time: 12.36\n",
      "Epoch: 956 Train Loss: 0.0783 Val Loss: 0.2106 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9973 Val AUC: 0.9787 Time: 12.53\n",
      "Epoch: 957 Train Loss: 0.0805 Val Loss: 0.2227 Acc: 0.9257 Pre: 0.8975 Recall: 0.9549 F1: 0.9253 Train AUC: 0.9971 Val AUC: 0.9776 Time: 12.58\n",
      "Epoch: 958 Train Loss: 0.0849 Val Loss: 0.2274 Acc: 0.9221 Pre: 0.8968 Recall: 0.9474 F1: 0.9214 Train AUC: 0.9969 Val AUC: 0.9775 Time: 13.09\n",
      "Epoch: 959 Train Loss: 0.0875 Val Loss: 0.2140 Acc: 0.9293 Pre: 0.9158 Recall: 0.9398 F1: 0.9276 Train AUC: 0.9963 Val AUC: 0.9785 Time: 14.03\n",
      "Epoch: 960 Train Loss: 0.0848 Val Loss: 0.2145 Acc: 0.9312 Pre: 0.9222 Recall: 0.9361 F1: 0.9291 Train AUC: 0.9967 Val AUC: 0.9789 Time: 14.11\n",
      "Epoch: 961 Train Loss: 0.0839 Val Loss: 0.2128 Acc: 0.9221 Pre: 0.8968 Recall: 0.9474 F1: 0.9214 Train AUC: 0.9969 Val AUC: 0.9792 Time: 14.21\n",
      "Epoch: 962 Train Loss: 0.0862 Val Loss: 0.2150 Acc: 0.9312 Pre: 0.9014 Recall: 0.9624 F1: 0.9309 Train AUC: 0.9962 Val AUC: 0.9778 Time: 12.56\n",
      "Epoch: 963 Train Loss: 0.0830 Val Loss: 0.2156 Acc: 0.9312 Pre: 0.9014 Recall: 0.9624 F1: 0.9309 Train AUC: 0.9968 Val AUC: 0.9782 Time: 12.58\n",
      "Epoch: 964 Train Loss: 0.0844 Val Loss: 0.2152 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9970 Val AUC: 0.9786 Time: 13.88\n",
      "Epoch: 965 Train Loss: 0.0740 Val Loss: 0.2288 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9978 Val AUC: 0.9775 Time: 13.87\n",
      "Epoch: 966 Train Loss: 0.0828 Val Loss: 0.2329 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9966 Val AUC: 0.9774 Time: 13.70\n",
      "Epoch: 967 Train Loss: 0.0850 Val Loss: 0.2203 Acc: 0.9167 Pre: 0.9104 Recall: 0.9173 F1: 0.9139 Train AUC: 0.9963 Val AUC: 0.9777 Time: 13.01\n",
      "Epoch: 968 Train Loss: 0.0808 Val Loss: 0.2185 Acc: 0.9257 Pre: 0.9151 Recall: 0.9323 F1: 0.9236 Train AUC: 0.9967 Val AUC: 0.9767 Time: 13.21\n",
      "Epoch: 969 Train Loss: 0.0847 Val Loss: 0.2264 Acc: 0.9239 Pre: 0.9000 Recall: 0.9474 F1: 0.9231 Train AUC: 0.9969 Val AUC: 0.9767 Time: 12.53\n",
      "Epoch: 970 Train Loss: 0.0795 Val Loss: 0.2402 Acc: 0.9112 Pre: 0.8754 Recall: 0.9511 F1: 0.9117 Train AUC: 0.9970 Val AUC: 0.9755 Time: 13.23\n",
      "Epoch: 971 Train Loss: 0.0827 Val Loss: 0.2209 Acc: 0.9221 Pre: 0.8968 Recall: 0.9474 F1: 0.9214 Train AUC: 0.9974 Val AUC: 0.9784 Time: 13.25\n",
      "Epoch: 972 Train Loss: 0.0784 Val Loss: 0.2025 Acc: 0.9312 Pre: 0.9254 Recall: 0.9323 F1: 0.9288 Train AUC: 0.9976 Val AUC: 0.9798 Time: 14.57\n",
      "Epoch: 973 Train Loss: 0.0863 Val Loss: 0.2078 Acc: 0.9275 Pre: 0.9124 Recall: 0.9398 F1: 0.9259 Train AUC: 0.9964 Val AUC: 0.9782 Time: 14.21\n",
      "Epoch: 974 Train Loss: 0.0842 Val Loss: 0.2275 Acc: 0.9275 Pre: 0.8951 Recall: 0.9624 F1: 0.9275 Train AUC: 0.9968 Val AUC: 0.9765 Time: 13.91\n",
      "Epoch: 975 Train Loss: 0.0800 Val Loss: 0.2307 Acc: 0.9239 Pre: 0.8944 Recall: 0.9549 F1: 0.9236 Train AUC: 0.9973 Val AUC: 0.9770 Time: 12.51\n",
      "Epoch: 976 Train Loss: 0.0798 Val Loss: 0.2244 Acc: 0.9167 Pre: 0.8929 Recall: 0.9398 F1: 0.9158 Train AUC: 0.9973 Val AUC: 0.9777 Time: 12.44\n",
      "Epoch: 977 Train Loss: 0.0801 Val Loss: 0.2186 Acc: 0.9167 Pre: 0.8957 Recall: 0.9361 F1: 0.9154 Train AUC: 0.9969 Val AUC: 0.9781 Time: 12.69\n",
      "Epoch: 978 Train Loss: 0.0889 Val Loss: 0.2084 Acc: 0.9293 Pre: 0.9039 Recall: 0.9549 F1: 0.9287 Train AUC: 0.9962 Val AUC: 0.9787 Time: 12.62\n",
      "Epoch: 979 Train Loss: 0.0772 Val Loss: 0.2107 Acc: 0.9275 Pre: 0.8951 Recall: 0.9624 F1: 0.9275 Train AUC: 0.9972 Val AUC: 0.9776 Time: 12.38\n",
      "Epoch: 980 Train Loss: 0.0890 Val Loss: 0.2071 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9960 Val AUC: 0.9782 Time: 13.69\n",
      "Epoch: 981 Train Loss: 0.0857 Val Loss: 0.2181 Acc: 0.9221 Pre: 0.9025 Recall: 0.9398 F1: 0.9208 Train AUC: 0.9965 Val AUC: 0.9783 Time: 14.46\n",
      "Epoch: 982 Train Loss: 0.0849 Val Loss: 0.2239 Acc: 0.9149 Pre: 0.8953 Recall: 0.9323 F1: 0.9134 Train AUC: 0.9970 Val AUC: 0.9780 Time: 14.61\n",
      "Epoch: 983 Train Loss: 0.0806 Val Loss: 0.2156 Acc: 0.9203 Pre: 0.9051 Recall: 0.9323 F1: 0.9185 Train AUC: 0.9969 Val AUC: 0.9785 Time: 14.34\n",
      "Epoch: 984 Train Loss: 0.0801 Val Loss: 0.2139 Acc: 0.9130 Pre: 0.8978 Recall: 0.9248 F1: 0.9111 Train AUC: 0.9973 Val AUC: 0.9777 Time: 13.02\n",
      "Epoch: 985 Train Loss: 0.0900 Val Loss: 0.2124 Acc: 0.9312 Pre: 0.9130 Recall: 0.9474 F1: 0.9299 Train AUC: 0.9960 Val AUC: 0.9792 Time: 12.47\n",
      "Epoch: 986 Train Loss: 0.0775 Val Loss: 0.2190 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9972 Val AUC: 0.9784 Time: 12.41\n",
      "Epoch: 987 Train Loss: 0.0803 Val Loss: 0.2205 Acc: 0.9257 Pre: 0.9091 Recall: 0.9398 F1: 0.9242 Train AUC: 0.9971 Val AUC: 0.9781 Time: 12.90\n",
      "Epoch: 988 Train Loss: 0.0912 Val Loss: 0.2063 Acc: 0.9257 Pre: 0.9061 Recall: 0.9436 F1: 0.9245 Train AUC: 0.9963 Val AUC: 0.9792 Time: 13.61\n",
      "Epoch: 989 Train Loss: 0.0834 Val Loss: 0.2106 Acc: 0.9239 Pre: 0.9000 Recall: 0.9474 F1: 0.9231 Train AUC: 0.9966 Val AUC: 0.9775 Time: 14.45\n",
      "Epoch: 990 Train Loss: 0.0872 Val Loss: 0.2201 Acc: 0.9239 Pre: 0.9000 Recall: 0.9474 F1: 0.9231 Train AUC: 0.9959 Val AUC: 0.9766 Time: 14.74\n",
      "Epoch: 991 Train Loss: 0.0776 Val Loss: 0.2314 Acc: 0.9203 Pre: 0.8993 Recall: 0.9398 F1: 0.9191 Train AUC: 0.9977 Val AUC: 0.9754 Time: 13.37\n",
      "Epoch: 992 Train Loss: 0.0777 Val Loss: 0.2318 Acc: 0.9112 Pre: 0.8889 Recall: 0.9323 F1: 0.9101 Train AUC: 0.9970 Val AUC: 0.9751 Time: 12.50\n",
      "Epoch: 993 Train Loss: 0.0802 Val Loss: 0.2187 Acc: 0.9275 Pre: 0.9154 Recall: 0.9361 F1: 0.9257 Train AUC: 0.9969 Val AUC: 0.9775 Time: 12.53\n",
      "Epoch: 994 Train Loss: 0.0821 Val Loss: 0.2133 Acc: 0.9221 Pre: 0.9084 Recall: 0.9323 F1: 0.9202 Train AUC: 0.9973 Val AUC: 0.9783 Time: 12.42\n",
      "Epoch: 995 Train Loss: 0.0929 Val Loss: 0.2340 Acc: 0.9348 Pre: 0.9021 Recall: 0.9699 F1: 0.9348 Train AUC: 0.9960 Val AUC: 0.9777 Time: 12.86\n",
      "Epoch: 996 Train Loss: 0.0892 Val Loss: 0.2395 Acc: 0.9312 Pre: 0.8958 Recall: 0.9699 F1: 0.9314 Train AUC: 0.9961 Val AUC: 0.9786 Time: 13.84\n",
      "Epoch: 997 Train Loss: 0.0889 Val Loss: 0.2247 Acc: 0.9239 Pre: 0.9000 Recall: 0.9474 F1: 0.9231 Train AUC: 0.9964 Val AUC: 0.9789 Time: 13.92\n",
      "Epoch: 998 Train Loss: 0.0853 Val Loss: 0.2000 Acc: 0.9420 Pre: 0.9149 Recall: 0.9699 F1: 0.9416 Train AUC: 0.9967 Val AUC: 0.9845 Time: 13.59\n",
      "Epoch: 999 Train Loss: 0.0888 Val Loss: 0.1964 Acc: 0.9330 Pre: 0.9225 Recall: 0.9398 F1: 0.9311 Train AUC: 0.9964 Val AUC: 0.9783 Time: 13.92\n",
      "Epoch: 1000 Train Loss: 0.0803 Val Loss: 0.2150 Acc: 0.9275 Pre: 0.8951 Recall: 0.9624 F1: 0.9275 Train AUC: 0.9971 Val AUC: 0.9733 Time: 14.72\n",
      "Fold: 5 Best Epoch: 998 Test acc: 0.9420 Test Pre: 0.9149 Test Recall: 0.9699 Test F1: 0.9416 Test PRC: 0.9833 Test AUC: 0.9845\n",
      "## Training Finished !\n",
      "-----------------------------------------------------------------------------------------------\n",
      "Auc [0.9791360100903931, 0.9706599051633298, 0.9663595683074492, 0.9686969439728353, 0.9845023397654976]\n",
      "Acc [0.9239130434782609, 0.9130434782608695, 0.9166666666666666, 0.8967391304347826, 0.9420289855072463]\n",
      "Pre [0.9115646258503401, 0.9066666666666666, 0.9357142857142857, 0.8550185873605948, 0.9148936170212766]\n",
      "Recall [0.9436619718309859, 0.9315068493150684, 0.903448275862069, 0.9274193548387096, 0.9699248120300752]\n",
      "F1 [0.9273356401384083, 0.918918918918919, 0.9192982456140351, 0.8897485493230173, 0.9416058394160584]\n",
      "Prc [0.9806285569512897, 0.96506145195687, 0.9755785912845337, 0.9689372242573016, 0.9832742359576913]\n",
      " AUC mean: 0.9739, variance: 0.0068  \n",
      " Accuracy mean: 0.9185, variance: 0.0148  \n",
      " Precision mean: 0.9048, variance: 0.0268  \n",
      " Recall mean: 0.9352, variance: 0.0217 \n",
      " F1-score mean: 0.9194, variance: 0.0169  \n",
      " PRC mean: 0.9747, variance: 0.0069 \n",
      "\n",
      "fprs [array([0.        , 0.        , 0.        , 0.        , 0.        ,\n",
      "       0.        , 0.        , 0.00373134, 0.00373134, 0.00746269,\n",
      "       0.00746269, 0.01119403, 0.01119403, 0.01492537, 0.01492537,\n",
      "       0.02238806, 0.02238806, 0.0261194 , 0.0261194 , 0.02985075,\n",
      "       0.02985075, 0.03731343, 0.03731343, 0.04104478, 0.04104478,\n",
      "       0.04477612, 0.04477612, 0.05970149, 0.05970149, 0.06343284,\n",
      "       0.06343284, 0.06716418, 0.06716418, 0.08208955, 0.08208955,\n",
      "       0.09328358, 0.09328358, 0.12313433, 0.12313433, 0.13432836,\n",
      "       0.13432836, 0.15671642, 0.15671642, 0.1641791 , 0.1641791 ,\n",
      "       0.16791045, 0.16791045, 0.20149254, 0.20149254, 0.21268657,\n",
      "       0.21268657, 0.22014925, 0.22014925, 0.2238806 , 0.2238806 ,\n",
      "       0.24253731, 0.24253731, 0.25373134, 0.25373134, 0.27985075,\n",
      "       0.27985075, 0.29477612, 0.29477612, 0.50746269, 0.50746269,\n",
      "       0.51119403, 0.51119403, 1.        ]), array([0.        , 0.        , 0.        , 0.        , 0.        ,\n",
      "       0.        , 0.        , 0.00384615, 0.00384615, 0.00769231,\n",
      "       0.00769231, 0.01153846, 0.01153846, 0.01538462, 0.01538462,\n",
      "       0.02692308, 0.02692308, 0.03076923, 0.03076923, 0.03461538,\n",
      "       0.03461538, 0.03846154, 0.03846154, 0.04615385, 0.04615385,\n",
      "       0.05384615, 0.05384615, 0.06153846, 0.06153846, 0.06538462,\n",
      "       0.06538462, 0.06923077, 0.06923077, 0.07307692, 0.07307692,\n",
      "       0.07692308, 0.07692308, 0.09615385, 0.09615385, 0.10769231,\n",
      "       0.10769231, 0.11153846, 0.11153846, 0.12307692, 0.12307692,\n",
      "       0.13461538, 0.13461538, 0.13846154, 0.13846154, 0.14230769,\n",
      "       0.14230769, 0.15      , 0.15      , 0.16538462, 0.16538462,\n",
      "       0.16923077, 0.16923077, 0.17307692, 0.17307692, 0.17692308,\n",
      "       0.17692308, 0.18076923, 0.18076923, 0.18461538, 0.18461538,\n",
      "       0.2       , 0.2       , 0.20384615, 0.20384615, 0.20769231,\n",
      "       0.20769231, 0.21153846, 0.21153846, 0.23076923, 0.23076923,\n",
      "       0.33076923, 0.33076923, 0.35769231, 0.35769231, 0.36538462,\n",
      "       0.36538462, 0.39230769, 0.39230769, 0.40769231, 0.40769231,\n",
      "       0.41923077, 0.41923077, 0.50384615, 0.50384615, 0.51538462,\n",
      "       0.51538462, 0.54230769, 0.54230769, 0.55769231, 0.55769231,\n",
      "       1.        ]), array([0.        , 0.        , 0.        , 0.00381679, 0.00381679,\n",
      "       0.00763359, 0.00763359, 0.01145038, 0.01145038, 0.01526718,\n",
      "       0.01526718, 0.01908397, 0.01908397, 0.02290076, 0.02290076,\n",
      "       0.02671756, 0.02671756, 0.03435115, 0.03435115, 0.04198473,\n",
      "       0.04198473, 0.04580153, 0.04580153, 0.04961832, 0.04961832,\n",
      "       0.05343511, 0.05343511, 0.05725191, 0.05725191, 0.10305344,\n",
      "       0.10305344, 0.11450382, 0.11450382, 0.11832061, 0.11832061,\n",
      "       0.13358779, 0.13358779, 0.16793893, 0.16793893, 0.17557252,\n",
      "       0.17557252, 0.1870229 , 0.1870229 , 0.21374046, 0.21374046,\n",
      "       0.23282443, 0.23282443, 0.25954198, 0.25954198, 0.27480916,\n",
      "       0.27480916, 0.28244275, 0.28244275, 0.28625954, 0.28625954,\n",
      "       0.33206107, 0.33206107, 0.35114504, 0.35114504, 0.6259542 ,\n",
      "       0.6259542 , 0.82824427, 0.82824427, 0.83969466, 0.83969466,\n",
      "       1.        ]), array([0.        , 0.        , 0.        , 0.        , 0.        ,\n",
      "       0.00328947, 0.00328947, 0.00657895, 0.00657895, 0.00986842,\n",
      "       0.00986842, 0.01315789, 0.01315789, 0.02631579, 0.02631579,\n",
      "       0.02960526, 0.02960526, 0.03289474, 0.03289474, 0.03947368,\n",
      "       0.03947368, 0.04605263, 0.04605263, 0.04934211, 0.04934211,\n",
      "       0.05921053, 0.05921053, 0.06578947, 0.06578947, 0.07236842,\n",
      "       0.07236842, 0.09210526, 0.09210526, 0.09868421, 0.09868421,\n",
      "       0.14144737, 0.14144737, 0.16447368, 0.16447368, 0.21381579,\n",
      "       0.21381579, 0.22368421, 0.22368421, 0.24342105, 0.24342105,\n",
      "       0.26315789, 0.26315789, 0.29276316, 0.29276316, 0.30592105,\n",
      "       0.30592105, 0.44736842, 0.44736842, 0.46381579, 0.46381579,\n",
      "       0.50657895, 0.50657895, 0.54605263, 0.54605263, 0.86513158,\n",
      "       0.86513158, 0.88157895, 0.88157895, 1.        ]), array([0.        , 0.        , 0.        , 0.        , 0.        ,\n",
      "       0.        , 0.        , 0.        , 0.0034965 , 0.0034965 ,\n",
      "       0.00699301, 0.00699301, 0.01048951, 0.01048951, 0.01748252,\n",
      "       0.01748252, 0.02097902, 0.02097902, 0.02447552, 0.02447552,\n",
      "       0.02797203, 0.02797203, 0.03496503, 0.03496503, 0.03846154,\n",
      "       0.03846154, 0.04195804, 0.04195804, 0.04545455, 0.04545455,\n",
      "       0.04895105, 0.04895105, 0.05244755, 0.05244755, 0.05594406,\n",
      "       0.05594406, 0.06293706, 0.06293706, 0.06643357, 0.06643357,\n",
      "       0.08741259, 0.08741259, 0.09440559, 0.09440559, 0.11538462,\n",
      "       0.11538462, 0.11888112, 0.11888112, 0.12237762, 0.12237762,\n",
      "       0.12937063, 0.12937063, 0.13286713, 0.13286713, 0.15734266,\n",
      "       0.15734266, 0.16083916, 0.16083916, 0.30769231, 0.30769231,\n",
      "       0.31118881, 0.31118881, 1.        ])]\n",
      "tprs [array([0.        , 0.00352113, 0.04929577, 0.05633803, 0.33450704,\n",
      "       0.3415493 , 0.58450704, 0.58450704, 0.58802817, 0.58802817,\n",
      "       0.73239437, 0.73239437, 0.74647887, 0.74647887, 0.78873239,\n",
      "       0.78873239, 0.81338028, 0.81338028, 0.82042254, 0.82042254,\n",
      "       0.83802817, 0.83802817, 0.86619718, 0.86619718, 0.88028169,\n",
      "       0.88028169, 0.8943662 , 0.8943662 , 0.90140845, 0.90140845,\n",
      "       0.9084507 , 0.9084507 , 0.92253521, 0.92253521, 0.92605634,\n",
      "       0.92605634, 0.92957746, 0.92957746, 0.93661972, 0.93661972,\n",
      "       0.95070423, 0.95070423, 0.95422535, 0.95422535, 0.95774648,\n",
      "       0.95774648, 0.96478873, 0.96478873, 0.96830986, 0.96830986,\n",
      "       0.97183099, 0.97183099, 0.97535211, 0.97535211, 0.97887324,\n",
      "       0.97887324, 0.98239437, 0.98239437, 0.98591549, 0.98591549,\n",
      "       0.98943662, 0.98943662, 0.99295775, 0.99295775, 0.99647887,\n",
      "       0.99647887, 1.        , 1.        ]), array([0.        , 0.00342466, 0.17123288, 0.17808219, 0.35616438,\n",
      "       0.3630137 , 0.53082192, 0.53082192, 0.55136986, 0.55136986,\n",
      "       0.63013699, 0.63013699, 0.65753425, 0.65753425, 0.71917808,\n",
      "       0.71917808, 0.7260274 , 0.7260274 , 0.75      , 0.75      ,\n",
      "       0.76369863, 0.76369863, 0.80479452, 0.80479452, 0.81164384,\n",
      "       0.81164384, 0.82534247, 0.82534247, 0.83219178, 0.83219178,\n",
      "       0.84246575, 0.84246575, 0.84589041, 0.84589041, 0.86643836,\n",
      "       0.86643836, 0.86986301, 0.86986301, 0.88356164, 0.88356164,\n",
      "       0.89041096, 0.89041096, 0.89383562, 0.89383562, 0.89726027,\n",
      "       0.89726027, 0.90068493, 0.90068493, 0.9109589 , 0.9109589 ,\n",
      "       0.91438356, 0.91438356, 0.91780822, 0.91780822, 0.92123288,\n",
      "       0.92123288, 0.92465753, 0.92465753, 0.92808219, 0.92808219,\n",
      "       0.93493151, 0.93493151, 0.93835616, 0.93835616, 0.94520548,\n",
      "       0.94520548, 0.94863014, 0.94863014, 0.95205479, 0.95205479,\n",
      "       0.95890411, 0.95890411, 0.96232877, 0.96232877, 0.96575342,\n",
      "       0.96575342, 0.96917808, 0.96917808, 0.97260274, 0.97260274,\n",
      "       0.9760274 , 0.9760274 , 0.97945205, 0.97945205, 0.98287671,\n",
      "       0.98287671, 0.98630137, 0.98630137, 0.98972603, 0.98972603,\n",
      "       0.99315068, 0.99315068, 0.99657534, 0.99657534, 1.        ,\n",
      "       1.        ]), array([0.        , 0.00344828, 0.44137931, 0.44137931, 0.55517241,\n",
      "       0.55517241, 0.70689655, 0.70689655, 0.74137931, 0.74137931,\n",
      "       0.75517241, 0.75517241, 0.7862069 , 0.7862069 , 0.78965517,\n",
      "       0.78965517, 0.79310345, 0.79310345, 0.82758621, 0.82758621,\n",
      "       0.84137931, 0.84137931, 0.85517241, 0.85517241, 0.8862069 ,\n",
      "       0.8862069 , 0.90689655, 0.90689655, 0.91724138, 0.91724138,\n",
      "       0.92068966, 0.92068966, 0.92413793, 0.92413793, 0.92758621,\n",
      "       0.92758621, 0.93103448, 0.93103448, 0.93448276, 0.93448276,\n",
      "       0.94137931, 0.94137931, 0.94482759, 0.94482759, 0.94827586,\n",
      "       0.94827586, 0.95862069, 0.95862069, 0.96206897, 0.96206897,\n",
      "       0.96551724, 0.96551724, 0.97586207, 0.97586207, 0.97931034,\n",
      "       0.97931034, 0.98275862, 0.98275862, 0.9862069 , 0.9862069 ,\n",
      "       0.99310345, 0.99310345, 0.99655172, 0.99655172, 1.        ,\n",
      "       1.        ]), array([0.        , 0.00403226, 0.35080645, 0.35887097, 0.44758065,\n",
      "       0.44758065, 0.46370968, 0.46370968, 0.76612903, 0.76612903,\n",
      "       0.77016129, 0.77016129, 0.7983871 , 0.7983871 , 0.81854839,\n",
      "       0.81854839, 0.83064516, 0.83064516, 0.83467742, 0.83467742,\n",
      "       0.83870968, 0.83870968, 0.87096774, 0.87096774, 0.875     ,\n",
      "       0.875     , 0.87903226, 0.87903226, 0.90322581, 0.90322581,\n",
      "       0.90725806, 0.90725806, 0.91532258, 0.91532258, 0.92741935,\n",
      "       0.92741935, 0.93548387, 0.93548387, 0.94354839, 0.94354839,\n",
      "       0.94758065, 0.94758065, 0.9516129 , 0.9516129 , 0.96370968,\n",
      "       0.96370968, 0.96774194, 0.96774194, 0.97177419, 0.97177419,\n",
      "       0.97580645, 0.97580645, 0.97983871, 0.97983871, 0.98387097,\n",
      "       0.98387097, 0.98790323, 0.98790323, 0.99193548, 0.99193548,\n",
      "       0.99596774, 0.99596774, 1.        , 1.        ]), array([0.        , 0.0037594 , 0.01503759, 0.03007519, 0.04135338,\n",
      "       0.04887218, 0.05639098, 0.62030075, 0.62030075, 0.63533835,\n",
      "       0.63533835, 0.63909774, 0.63909774, 0.64285714, 0.64285714,\n",
      "       0.65037594, 0.65037594, 0.65413534, 0.65413534, 0.67669173,\n",
      "       0.67669173, 0.79699248, 0.79699248, 0.80075188, 0.80075188,\n",
      "       0.82330827, 0.82330827, 0.83082707, 0.83082707, 0.83834586,\n",
      "       0.83834586, 0.89849624, 0.89849624, 0.90225564, 0.90225564,\n",
      "       0.93233083, 0.93233083, 0.93609023, 0.93609023, 0.93984962,\n",
      "       0.93984962, 0.94360902, 0.94360902, 0.94736842, 0.94736842,\n",
      "       0.95488722, 0.95488722, 0.96240602, 0.96240602, 0.97744361,\n",
      "       0.97744361, 0.98120301, 0.98120301, 0.98496241, 0.98496241,\n",
      "       0.9887218 , 0.9887218 , 0.9924812 , 0.9924812 , 0.9962406 ,\n",
      "       0.9962406 , 1.        , 1.        ])]\n",
      "precisions [array([0.51449275, 0.5154265 , 0.51636364, 0.51730419, 0.51824818,\n",
      "       0.51919561, 0.52014652, 0.52110092, 0.52205882, 0.52302026,\n",
      "       0.52398524, 0.52495379, 0.52592593, 0.52690167, 0.52788104,\n",
      "       0.52886406, 0.52985075, 0.53084112, 0.53183521, 0.53283302,\n",
      "       0.53383459, 0.53483992, 0.53584906, 0.536862  , 0.53787879,\n",
      "       0.53889943, 0.53992395, 0.54095238, 0.54198473, 0.54302103,\n",
      "       0.5440613 , 0.54510557, 0.54615385, 0.54720617, 0.54826255,\n",
      "       0.54932302, 0.5503876 , 0.55145631, 0.55252918, 0.55360624,\n",
      "       0.5546875 , 0.55577299, 0.55686275, 0.55795678, 0.55905512,\n",
      "       0.56015779, 0.56126482, 0.56237624, 0.56349206, 0.56461233,\n",
      "       0.56573705, 0.56686627, 0.568     , 0.56913828, 0.57028112,\n",
      "       0.57142857, 0.57258065, 0.57373737, 0.57489879, 0.57606491,\n",
      "       0.57723577, 0.57841141, 0.57959184, 0.5807771 , 0.58196721,\n",
      "       0.58316222, 0.58436214, 0.58556701, 0.58677686, 0.58799172,\n",
      "       0.58921162, 0.59043659, 0.59166667, 0.59290188, 0.59414226,\n",
      "       0.59538784, 0.59663866, 0.59789474, 0.59915612, 0.60042283,\n",
      "       0.60169492, 0.6029724 , 0.60425532, 0.60554371, 0.60683761,\n",
      "       0.60813704, 0.60944206, 0.61075269, 0.61206897, 0.61339093,\n",
      "       0.61471861, 0.61605206, 0.6173913 , 0.61873638, 0.62008734,\n",
      "       0.6214442 , 0.62280702, 0.62417582, 0.62555066, 0.62693157,\n",
      "       0.62831858, 0.62971175, 0.63111111, 0.6325167 , 0.63392857,\n",
      "       0.63534676, 0.6367713 , 0.63820225, 0.63963964, 0.64108352,\n",
      "       0.64253394, 0.64399093, 0.64545455, 0.64692483, 0.64840183,\n",
      "       0.64988558, 0.65137615, 0.65287356, 0.65437788, 0.65588915,\n",
      "       0.65740741, 0.65893271, 0.66046512, 0.66200466, 0.6635514 ,\n",
      "       0.66510539, 0.66666667, 0.66823529, 0.66981132, 0.6713948 ,\n",
      "       0.67298578, 0.67458432, 0.67380952, 0.67541766, 0.67464115,\n",
      "       0.67625899, 0.67788462, 0.67951807, 0.68115942, 0.68280872,\n",
      "       0.68446602, 0.68613139, 0.68780488, 0.68948655, 0.69117647,\n",
      "       0.69287469, 0.69458128, 0.6962963 , 0.6980198 , 0.69975186,\n",
      "       0.70149254, 0.7032419 , 0.705     , 0.70676692, 0.70854271,\n",
      "       0.71032746, 0.71212121, 0.71392405, 0.71573604, 0.71755725,\n",
      "       0.71938776, 0.72122762, 0.72307692, 0.72493573, 0.72680412,\n",
      "       0.72868217, 0.73056995, 0.73246753, 0.734375  , 0.73629243,\n",
      "       0.7382199 , 0.74015748, 0.74210526, 0.74406332, 0.74603175,\n",
      "       0.74801061, 0.75      , 0.752     , 0.7540107 , 0.75603217,\n",
      "       0.75806452, 0.76010782, 0.76216216, 0.76422764, 0.76630435,\n",
      "       0.76839237, 0.7704918 , 0.77260274, 0.77472527, 0.7768595 ,\n",
      "       0.77900552, 0.78116343, 0.78055556, 0.78272981, 0.7849162 ,\n",
      "       0.78711485, 0.78932584, 0.78873239, 0.79096045, 0.79320113,\n",
      "       0.79545455, 0.7977208 , 0.8       , 0.80229226, 0.8045977 ,\n",
      "       0.80403458, 0.80635838, 0.80869565, 0.81104651, 0.81049563,\n",
      "       0.8128655 , 0.81524927, 0.81764706, 0.820059  , 0.82248521,\n",
      "       0.82195846, 0.82440476, 0.8238806 , 0.82634731, 0.82882883,\n",
      "       0.82831325, 0.83081571, 0.83333333, 0.83586626, 0.83536585,\n",
      "       0.83792049, 0.8404908 , 0.84307692, 0.84567901, 0.84829721,\n",
      "       0.85093168, 0.85358255, 0.85625   , 0.85893417, 0.85849057,\n",
      "       0.85804416, 0.86075949, 0.86031746, 0.86305732, 0.8658147 ,\n",
      "       0.86538462, 0.8681672 , 0.87096774, 0.87378641, 0.87662338,\n",
      "       0.87947883, 0.88235294, 0.88196721, 0.88157895, 0.88118812,\n",
      "       0.8807947 , 0.88372093, 0.88666667, 0.88963211, 0.88926174,\n",
      "       0.88888889, 0.89189189, 0.89491525, 0.89795918, 0.90102389,\n",
      "       0.90410959, 0.90721649, 0.91034483, 0.91349481, 0.91319444,\n",
      "       0.91637631, 0.91958042, 0.92280702, 0.92253521, 0.92579505,\n",
      "       0.92907801, 0.93238434, 0.93571429, 0.93548387, 0.9352518 ,\n",
      "       0.93501805, 0.93478261, 0.93818182, 0.9379562 , 0.93772894,\n",
      "       0.94117647, 0.94095941, 0.94074074, 0.94423792, 0.94776119,\n",
      "       0.95131086, 0.95488722, 0.95471698, 0.95454545, 0.95437262,\n",
      "       0.95419847, 0.95785441, 0.95769231, 0.95752896, 0.95736434,\n",
      "       0.95719844, 0.9609375 , 0.96078431, 0.96062992, 0.96047431,\n",
      "       0.96031746, 0.96015936, 0.96      , 0.95983936, 0.95967742,\n",
      "       0.96356275, 0.96747967, 0.96734694, 0.96721311, 0.96707819,\n",
      "       0.96694215, 0.96680498, 0.97083333, 0.9707113 , 0.97058824,\n",
      "       0.97468354, 0.97457627, 0.97446809, 0.97435897, 0.97424893,\n",
      "       0.97413793, 0.97402597, 0.97391304, 0.97816594, 0.98245614,\n",
      "       0.98237885, 0.98230088, 0.98222222, 0.98214286, 0.98206278,\n",
      "       0.98198198, 0.98190045, 0.98181818, 0.98173516, 0.98165138,\n",
      "       0.98156682, 0.98148148, 0.98604651, 0.98598131, 0.98591549,\n",
      "       0.98584906, 0.98578199, 0.99047619, 0.99043062, 0.99038462,\n",
      "       0.99033816, 0.99029126, 0.9902439 , 0.99019608, 0.99014778,\n",
      "       0.99009901, 0.99004975, 0.99      , 0.98994975, 0.98989899,\n",
      "       0.98984772, 0.98979592, 0.98974359, 0.98969072, 0.98963731,\n",
      "       0.98958333, 0.9895288 , 0.98947368, 0.98941799, 0.9893617 ,\n",
      "       0.98930481, 0.98924731, 0.98918919, 0.98913043, 0.98907104,\n",
      "       0.98901099, 0.98895028, 0.98888889, 0.98882682, 0.98876404,\n",
      "       0.98870056, 0.98863636, 0.98857143, 0.98850575, 0.98843931,\n",
      "       0.98837209, 0.98830409, 0.98823529, 0.98816568, 0.99404762,\n",
      "       0.99401198, 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        ]), array([0.52898551, 0.52994555, 0.53090909, 0.53187614, 0.53284672,\n",
      "       0.53382084, 0.53479853, 0.53577982, 0.53676471, 0.53775322,\n",
      "       0.53874539, 0.53974122, 0.54074074, 0.54174397, 0.54275093,\n",
      "       0.54376164, 0.54477612, 0.54579439, 0.54681648, 0.5478424 ,\n",
      "       0.54887218, 0.54990584, 0.5509434 , 0.55198488, 0.5530303 ,\n",
      "       0.5540797 , 0.55513308, 0.55619048, 0.55725191, 0.5583174 ,\n",
      "       0.55938697, 0.56046065, 0.56153846, 0.56262042, 0.56370656,\n",
      "       0.56479691, 0.56589147, 0.56699029, 0.56809339, 0.56920078,\n",
      "       0.5703125 , 0.57142857, 0.57254902, 0.57367387, 0.57480315,\n",
      "       0.57593688, 0.5770751 , 0.57821782, 0.57936508, 0.5805169 ,\n",
      "       0.58167331, 0.58283433, 0.584     , 0.58517034, 0.58634538,\n",
      "       0.58752515, 0.58870968, 0.58989899, 0.59109312, 0.59229209,\n",
      "       0.59349593, 0.59470468, 0.59591837, 0.59713701, 0.59836066,\n",
      "       0.59958932, 0.60082305, 0.60206186, 0.60330579, 0.60455487,\n",
      "       0.60580913, 0.60706861, 0.60833333, 0.60960334, 0.61087866,\n",
      "       0.61215933, 0.61344538, 0.61473684, 0.61603376, 0.61733615,\n",
      "       0.61864407, 0.61995754, 0.6212766 , 0.62260128, 0.62393162,\n",
      "       0.62526767, 0.62660944, 0.62795699, 0.62931034, 0.63066955,\n",
      "       0.63203463, 0.63340564, 0.63478261, 0.63616558, 0.63755459,\n",
      "       0.63894967, 0.64035088, 0.64175824, 0.64317181, 0.64459161,\n",
      "       0.6460177 , 0.64745011, 0.64888889, 0.65033408, 0.65178571,\n",
      "       0.65324385, 0.65470852, 0.65617978, 0.65765766, 0.65914221,\n",
      "       0.66063348, 0.66213152, 0.66363636, 0.66514806, 0.66666667,\n",
      "       0.66819222, 0.66743119, 0.66896552, 0.67050691, 0.67205543,\n",
      "       0.67361111, 0.67285383, 0.6744186 , 0.67599068, 0.67757009,\n",
      "       0.67915691, 0.68075117, 0.68235294, 0.68396226, 0.68321513,\n",
      "       0.68483412, 0.68646081, 0.68809524, 0.68735084, 0.68899522,\n",
      "       0.69064748, 0.69230769, 0.6939759 , 0.69565217, 0.69733656,\n",
      "       0.69902913, 0.70072993, 0.70243902, 0.70415648, 0.70588235,\n",
      "       0.70761671, 0.70935961, 0.71111111, 0.71287129, 0.7146402 ,\n",
      "       0.71641791, 0.71820449, 0.72      , 0.72180451, 0.72361809,\n",
      "       0.72544081, 0.72474747, 0.72658228, 0.7284264 , 0.7302799 ,\n",
      "       0.72959184, 0.7314578 , 0.73333333, 0.73521851, 0.7371134 ,\n",
      "       0.73643411, 0.73834197, 0.74025974, 0.7421875 , 0.74412533,\n",
      "       0.7460733 , 0.7480315 , 0.75      , 0.74934037, 0.75132275,\n",
      "       0.75331565, 0.75265957, 0.75466667, 0.75668449, 0.75871314,\n",
      "       0.76075269, 0.76280323, 0.76486486, 0.76693767, 0.76630435,\n",
      "       0.76839237, 0.7704918 , 0.77260274, 0.77472527, 0.7768595 ,\n",
      "       0.77900552, 0.78116343, 0.78333333, 0.78551532, 0.7877095 ,\n",
      "       0.78991597, 0.79213483, 0.7943662 , 0.79661017, 0.79886686,\n",
      "       0.80113636, 0.8034188 , 0.80571429, 0.80802292, 0.81034483,\n",
      "       0.81268012, 0.8150289 , 0.8173913 , 0.81976744, 0.82215743,\n",
      "       0.8245614 , 0.82404692, 0.82647059, 0.82890855, 0.83136095,\n",
      "       0.83382789, 0.83630952, 0.8358209 , 0.83832335, 0.83783784,\n",
      "       0.8373494 , 0.83987915, 0.83939394, 0.84194529, 0.84146341,\n",
      "       0.8440367 , 0.84662577, 0.84923077, 0.85185185, 0.85139319,\n",
      "       0.85093168, 0.85358255, 0.853125  , 0.85579937, 0.85534591,\n",
      "       0.85488959, 0.85759494, 0.85714286, 0.85987261, 0.85942492,\n",
      "       0.86217949, 0.86173633, 0.86451613, 0.86731392, 0.87012987,\n",
      "       0.87296417, 0.87254902, 0.87540984, 0.87828947, 0.87788779,\n",
      "       0.8807947 , 0.88039867, 0.88      , 0.87959866, 0.88255034,\n",
      "       0.88215488, 0.88513514, 0.88813559, 0.89115646, 0.89078498,\n",
      "       0.89383562, 0.89690722, 0.9       , 0.89965398, 0.90277778,\n",
      "       0.90243902, 0.9020979 , 0.90526316, 0.9084507 , 0.91166078,\n",
      "       0.91134752, 0.91103203, 0.91071429, 0.91039427, 0.91366906,\n",
      "       0.91696751, 0.92028986, 0.92363636, 0.9270073 , 0.92673993,\n",
      "       0.93014706, 0.9298893 , 0.92962963, 0.92936803, 0.92910448,\n",
      "       0.92883895, 0.92857143, 0.93207547, 0.93181818, 0.93536122,\n",
      "       0.9351145 , 0.9348659 , 0.93461538, 0.93822394, 0.9379845 ,\n",
      "       0.93774319, 0.94140625, 0.94509804, 0.94488189, 0.94466403,\n",
      "       0.94444444, 0.94422311, 0.948     , 0.95180723, 0.9516129 ,\n",
      "       0.951417  , 0.95528455, 0.95918367, 0.95901639, 0.95884774,\n",
      "       0.95867769, 0.95850622, 0.95833333, 0.958159  , 0.95798319,\n",
      "       0.95780591, 0.95762712, 0.95744681, 0.95726496, 0.95708155,\n",
      "       0.9612069 , 0.96103896, 0.96086957, 0.96069869, 0.96052632,\n",
      "       0.96475771, 0.96460177, 0.96444444, 0.96428571, 0.96412556,\n",
      "       0.96396396, 0.9638009 , 0.96363636, 0.96803653, 0.96788991,\n",
      "       0.96774194, 0.97222222, 0.97674419, 0.98130841, 0.98122066,\n",
      "       0.98113208, 0.98104265, 0.98095238, 0.98086124, 0.98076923,\n",
      "       0.98067633, 0.98058252, 0.9804878 , 0.98039216, 0.98029557,\n",
      "       0.98019802, 0.9800995 , 0.98      , 0.9798995 , 0.97979798,\n",
      "       0.97969543, 0.97959184, 0.98461538, 0.98453608, 0.98445596,\n",
      "       0.984375  , 0.98429319, 0.98421053, 0.98412698, 0.98404255,\n",
      "       0.98395722, 0.98924731, 0.98918919, 0.98913043, 0.98907104,\n",
      "       0.98901099, 0.98895028, 0.98888889, 0.98882682, 0.98876404,\n",
      "       0.98870056, 0.98863636, 0.98857143, 0.98850575, 0.98843931,\n",
      "       0.98837209, 0.98830409, 0.98823529, 0.98816568, 0.98809524,\n",
      "       0.98802395, 0.98795181, 0.98787879, 0.98780488, 0.98773006,\n",
      "       0.99382716, 0.99378882, 0.99375   , 0.99371069, 0.99367089,\n",
      "       0.99363057, 0.99358974, 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        ]), array([0.52536232, 0.52631579, 0.52727273, 0.52823315, 0.52919708,\n",
      "       0.53016453, 0.53113553, 0.53211009, 0.53308824, 0.53406998,\n",
      "       0.53505535, 0.53604436, 0.53703704, 0.5380334 , 0.53903346,\n",
      "       0.54003724, 0.54104478, 0.54205607, 0.54307116, 0.54409006,\n",
      "       0.54511278, 0.54613936, 0.54716981, 0.54820416, 0.54924242,\n",
      "       0.55028463, 0.5513308 , 0.55238095, 0.55343511, 0.55449331,\n",
      "       0.55555556, 0.55662188, 0.55769231, 0.55876686, 0.55984556,\n",
      "       0.56092843, 0.5620155 , 0.5631068 , 0.56420233, 0.56530214,\n",
      "       0.56640625, 0.56751468, 0.56862745, 0.56777996, 0.56889764,\n",
      "       0.57001972, 0.57114625, 0.57029703, 0.57142857, 0.57256461,\n",
      "       0.57370518, 0.5748503 , 0.576     , 0.57715431, 0.57831325,\n",
      "       0.57947686, 0.58064516, 0.58181818, 0.58299595, 0.5841785 ,\n",
      "       0.58536585, 0.58655804, 0.5877551 , 0.58895706, 0.59016393,\n",
      "       0.59137577, 0.59259259, 0.59381443, 0.59504132, 0.59627329,\n",
      "       0.59751037, 0.5987526 , 0.6       , 0.60125261, 0.60251046,\n",
      "       0.60377358, 0.60504202, 0.60631579, 0.60759494, 0.60887949,\n",
      "       0.61016949, 0.61146497, 0.61276596, 0.61407249, 0.61538462,\n",
      "       0.61670236, 0.61802575, 0.61935484, 0.62068966, 0.62203024,\n",
      "       0.62337662, 0.62472885, 0.62608696, 0.62745098, 0.62882096,\n",
      "       0.63019694, 0.63157895, 0.63296703, 0.63436123, 0.63576159,\n",
      "       0.63716814, 0.63636364, 0.63555556, 0.63697105, 0.63839286,\n",
      "       0.63982103, 0.64125561, 0.64269663, 0.64414414, 0.64559819,\n",
      "       0.64705882, 0.64852608, 0.65      , 0.65148064, 0.65296804,\n",
      "       0.65446224, 0.6559633 , 0.65747126, 0.65898618, 0.66050808,\n",
      "       0.66203704, 0.66357309, 0.66511628, 0.66666667, 0.6682243 ,\n",
      "       0.66978923, 0.6713615 , 0.67294118, 0.6745283 , 0.67612293,\n",
      "       0.67772512, 0.67933492, 0.68095238, 0.68257757, 0.68421053,\n",
      "       0.68585132, 0.6875    , 0.68915663, 0.69082126, 0.69249395,\n",
      "       0.69417476, 0.69586375, 0.69756098, 0.6992665 , 0.70098039,\n",
      "       0.7027027 , 0.7044335 , 0.70617284, 0.70792079, 0.70967742,\n",
      "       0.71144279, 0.71321696, 0.715     , 0.71679198, 0.71859296,\n",
      "       0.72040302, 0.72222222, 0.72405063, 0.72588832, 0.72773537,\n",
      "       0.72959184, 0.7314578 , 0.73333333, 0.73521851, 0.7371134 ,\n",
      "       0.73901809, 0.74093264, 0.74285714, 0.74479167, 0.74673629,\n",
      "       0.7486911 , 0.75065617, 0.75263158, 0.75461741, 0.75661376,\n",
      "       0.75596817, 0.75797872, 0.76      , 0.76203209, 0.76407507,\n",
      "       0.76612903, 0.76549865, 0.76756757, 0.7696477 , 0.77173913,\n",
      "       0.77384196, 0.77595628, 0.77808219, 0.78021978, 0.78236915,\n",
      "       0.78453039, 0.7867036 , 0.78888889, 0.79108635, 0.79050279,\n",
      "       0.79271709, 0.79213483, 0.7915493 , 0.79096045, 0.79320113,\n",
      "       0.79545455, 0.79487179, 0.79714286, 0.79942693, 0.80172414,\n",
      "       0.80403458, 0.80346821, 0.8057971 , 0.80813953, 0.81049563,\n",
      "       0.8128655 , 0.81524927, 0.81764706, 0.820059  , 0.81952663,\n",
      "       0.8189911 , 0.81845238, 0.82089552, 0.82335329, 0.82582583,\n",
      "       0.82831325, 0.83081571, 0.83030303, 0.83282675, 0.83536585,\n",
      "       0.83792049, 0.8404908 , 0.84307692, 0.84567901, 0.84829721,\n",
      "       0.84782609, 0.85046729, 0.853125  , 0.85579937, 0.85534591,\n",
      "       0.85488959, 0.85759494, 0.86031746, 0.85987261, 0.86261981,\n",
      "       0.86538462, 0.8681672 , 0.87096774, 0.87378641, 0.87662338,\n",
      "       0.87947883, 0.88235294, 0.8852459 , 0.88486842, 0.88778878,\n",
      "       0.89072848, 0.89368771, 0.89666667, 0.89632107, 0.89932886,\n",
      "       0.8989899 , 0.90202703, 0.90508475, 0.90816327, 0.90784983,\n",
      "       0.9109589 , 0.91408935, 0.91724138, 0.92041522, 0.92361111,\n",
      "       0.92682927, 0.93006993, 0.93333333, 0.93661972, 0.93992933,\n",
      "       0.94326241, 0.94661922, 0.94642857, 0.94623656, 0.94604317,\n",
      "       0.94945848, 0.94927536, 0.94909091, 0.94890511, 0.94871795,\n",
      "       0.94852941, 0.94833948, 0.95185185, 0.95167286, 0.95149254,\n",
      "       0.95131086, 0.95112782, 0.9509434 , 0.95075758, 0.95057034,\n",
      "       0.95038168, 0.95019157, 0.95384615, 0.95366795, 0.95348837,\n",
      "       0.95330739, 0.953125  , 0.95686275, 0.95669291, 0.95652174,\n",
      "       0.95634921, 0.9561753 , 0.96      , 0.96385542, 0.96370968,\n",
      "       0.96356275, 0.96341463, 0.96326531, 0.96311475, 0.96296296,\n",
      "       0.96280992, 0.9626556 , 0.9625    , 0.9623431 , 0.96638655,\n",
      "       0.97046414, 0.97033898, 0.97446809, 0.97435897, 0.97854077,\n",
      "       0.97844828, 0.97835498, 0.97826087, 0.97816594, 0.97807018,\n",
      "       0.97797357, 0.97787611, 0.97777778, 0.97767857, 0.98206278,\n",
      "       0.98198198, 0.98190045, 0.98181818, 0.98173516, 0.98623853,\n",
      "       0.98617512, 0.98611111, 0.98604651, 0.98598131, 0.98591549,\n",
      "       0.98584906, 0.98578199, 0.98571429, 0.98564593, 0.98557692,\n",
      "       0.99033816, 0.99029126, 0.9902439 , 0.99019608, 0.99014778,\n",
      "       0.99009901, 0.99004975, 0.99      , 0.98994975, 0.98989899,\n",
      "       0.98984772, 0.98979592, 0.98974359, 0.98969072, 0.98963731,\n",
      "       0.98958333, 0.9895288 , 0.98947368, 0.98941799, 0.9893617 ,\n",
      "       0.98930481, 0.98924731, 0.98918919, 0.98913043, 0.98907104,\n",
      "       0.98901099, 0.98895028, 0.98888889, 0.98882682, 0.98876404,\n",
      "       0.98870056, 0.98863636, 0.98857143, 0.98850575, 0.98843931,\n",
      "       0.98837209, 0.98830409, 0.98823529, 0.98816568, 0.98809524,\n",
      "       0.98802395, 0.98795181, 0.98787879, 0.98780488, 0.98773006,\n",
      "       0.99382716, 0.99378882, 0.99375   , 0.99371069, 0.99367089,\n",
      "       0.99363057, 0.99358974, 0.99354839, 0.99350649, 0.99346405,\n",
      "       0.99342105, 0.99337748, 0.99333333, 0.99328859, 0.99324324,\n",
      "       0.99319728, 0.99315068, 0.99310345, 0.99305556, 0.99300699,\n",
      "       0.99295775, 0.9929078 , 0.99285714, 0.99280576, 0.99275362,\n",
      "       0.99270073, 0.99264706, 0.99259259, 0.99253731, 0.9924812 ,\n",
      "       0.99242424, 0.99236641, 0.99230769, 0.99224806, 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        ]), array([0.44927536, 0.45009074, 0.45090909, 0.45173042, 0.45255474,\n",
      "       0.45338208, 0.45421245, 0.45504587, 0.45588235, 0.45672192,\n",
      "       0.45756458, 0.45841035, 0.45925926, 0.46011132, 0.46096654,\n",
      "       0.46182495, 0.46268657, 0.4635514 , 0.46441948, 0.46529081,\n",
      "       0.46616541, 0.46704331, 0.46792453, 0.46880907, 0.46969697,\n",
      "       0.47058824, 0.47148289, 0.47238095, 0.47328244, 0.47418738,\n",
      "       0.47509579, 0.47600768, 0.47692308, 0.477842  , 0.47876448,\n",
      "       0.47969052, 0.48062016, 0.47961165, 0.48054475, 0.48148148,\n",
      "       0.48242188, 0.48336595, 0.48431373, 0.48330059, 0.48425197,\n",
      "       0.4852071 , 0.48616601, 0.48712871, 0.48809524, 0.48906561,\n",
      "       0.49003984, 0.49101796, 0.492     , 0.49298597, 0.4939759 ,\n",
      "       0.49496982, 0.49596774, 0.4969697 , 0.49797571, 0.4989858 ,\n",
      "       0.5       , 0.50101833, 0.50204082, 0.50306748, 0.50409836,\n",
      "       0.50513347, 0.50617284, 0.50721649, 0.50826446, 0.50931677,\n",
      "       0.51037344, 0.51143451, 0.5125    , 0.51356994, 0.51464435,\n",
      "       0.51572327, 0.51680672, 0.51789474, 0.51898734, 0.52008457,\n",
      "       0.52118644, 0.52229299, 0.52340426, 0.52452026, 0.52564103,\n",
      "       0.5267666 , 0.527897  , 0.52903226, 0.53017241, 0.53131749,\n",
      "       0.53246753, 0.53362256, 0.53478261, 0.53594771, 0.5371179 ,\n",
      "       0.53829322, 0.53947368, 0.54065934, 0.54185022, 0.54304636,\n",
      "       0.54424779, 0.54545455, 0.54666667, 0.54788419, 0.54910714,\n",
      "       0.55033557, 0.55156951, 0.55280899, 0.55405405, 0.55530474,\n",
      "       0.55656109, 0.55782313, 0.55909091, 0.56036446, 0.56164384,\n",
      "       0.56292906, 0.56422018, 0.56551724, 0.56682028, 0.56812933,\n",
      "       0.56944444, 0.57076566, 0.57209302, 0.57342657, 0.57476636,\n",
      "       0.57611241, 0.57746479, 0.57882353, 0.58018868, 0.58156028,\n",
      "       0.58293839, 0.58432304, 0.58571429, 0.58711217, 0.58851675,\n",
      "       0.58992806, 0.59134615, 0.59277108, 0.5942029 , 0.59564165,\n",
      "       0.59708738, 0.59610706, 0.59756098, 0.599022  , 0.6004902 ,\n",
      "       0.6019656 , 0.60344828, 0.60493827, 0.60643564, 0.60794045,\n",
      "       0.60945274, 0.61097257, 0.6125    , 0.61403509, 0.61306533,\n",
      "       0.61460957, 0.61616162, 0.61772152, 0.61928934, 0.62086514,\n",
      "       0.62244898, 0.62404092, 0.62564103, 0.62724936, 0.62886598,\n",
      "       0.63049096, 0.63212435, 0.63376623, 0.6328125 , 0.63446475,\n",
      "       0.63612565, 0.63779528, 0.63947368, 0.64116095, 0.64021164,\n",
      "       0.64190981, 0.64361702, 0.64533333, 0.64705882, 0.64879357,\n",
      "       0.65053763, 0.65229111, 0.65405405, 0.65582656, 0.6576087 ,\n",
      "       0.65940054, 0.66120219, 0.6630137 , 0.66483516, 0.66666667,\n",
      "       0.66850829, 0.67036011, 0.67222222, 0.67409471, 0.67597765,\n",
      "       0.67787115, 0.67977528, 0.68169014, 0.68361582, 0.68555241,\n",
      "       0.6875    , 0.68945869, 0.69142857, 0.69340974, 0.6954023 ,\n",
      "       0.69740634, 0.69942197, 0.70144928, 0.70348837, 0.70553936,\n",
      "       0.70760234, 0.70967742, 0.71176471, 0.71386431, 0.71597633,\n",
      "       0.71810089, 0.7202381 , 0.72238806, 0.72155689, 0.72372372,\n",
      "       0.72590361, 0.72809668, 0.73030303, 0.72948328, 0.73170732,\n",
      "       0.73394495, 0.73619632, 0.73846154, 0.74074074, 0.74303406,\n",
      "       0.74534161, 0.74766355, 0.75      , 0.7492163 , 0.75157233,\n",
      "       0.75394322, 0.75632911, 0.75873016, 0.7611465 , 0.76357827,\n",
      "       0.76282051, 0.76205788, 0.76129032, 0.76375405, 0.76623377,\n",
      "       0.76872964, 0.77124183, 0.77377049, 0.77631579, 0.77557756,\n",
      "       0.7781457 , 0.7807309 , 0.78333333, 0.7826087 , 0.7852349 ,\n",
      "       0.78787879, 0.79054054, 0.79322034, 0.79591837, 0.79863481,\n",
      "       0.80136986, 0.80412371, 0.80689655, 0.80968858, 0.8125    ,\n",
      "       0.81533101, 0.81818182, 0.82105263, 0.82394366, 0.82332155,\n",
      "       0.82269504, 0.82562278, 0.82857143, 0.83154122, 0.83453237,\n",
      "       0.83754513, 0.84057971, 0.84363636, 0.84306569, 0.84249084,\n",
      "       0.84558824, 0.84870849, 0.85185185, 0.85501859, 0.85820896,\n",
      "       0.86142322, 0.86466165, 0.86792453, 0.87121212, 0.87452471,\n",
      "       0.8778626 , 0.88122605, 0.88461538, 0.88416988, 0.88372093,\n",
      "       0.88326848, 0.88671875, 0.89019608, 0.88976378, 0.88932806,\n",
      "       0.89285714, 0.89641434, 0.9       , 0.90361446, 0.90725806,\n",
      "       0.91093117, 0.91056911, 0.91428571, 0.91803279, 0.91769547,\n",
      "       0.91735537, 0.91701245, 0.91666667, 0.91631799, 0.91596639,\n",
      "       0.91983122, 0.92372881, 0.92340426, 0.92735043, 0.93133047,\n",
      "       0.93534483, 0.93506494, 0.93913043, 0.93886463, 0.93859649,\n",
      "       0.93832599, 0.9380531 , 0.93777778, 0.9375    , 0.93721973,\n",
      "       0.93693694, 0.94117647, 0.94545455, 0.94520548, 0.94954128,\n",
      "       0.95391705, 0.9537037 , 0.95813953, 0.95794393, 0.95774648,\n",
      "       0.95754717, 0.96208531, 0.96190476, 0.96172249, 0.96153846,\n",
      "       0.96135266, 0.96116505, 0.96585366, 0.97058824, 0.97536946,\n",
      "       0.98019802, 0.9800995 , 0.98      , 0.9798995 , 0.97979798,\n",
      "       0.97969543, 0.97959184, 0.97948718, 0.98453608, 0.98445596,\n",
      "       0.98958333, 0.9895288 , 0.98947368, 0.98941799, 0.9893617 ,\n",
      "       0.98930481, 0.98924731, 0.98918919, 0.98913043, 0.98907104,\n",
      "       0.98901099, 0.98895028, 0.98888889, 0.98882682, 0.98876404,\n",
      "       0.98870056, 0.98863636, 0.98857143, 0.98850575, 0.98843931,\n",
      "       0.98837209, 0.98830409, 0.98823529, 0.98816568, 0.98809524,\n",
      "       0.98802395, 0.98795181, 0.98787879, 0.98780488, 0.98773006,\n",
      "       0.98765432, 0.98757764, 0.9875    , 0.98742138, 0.98734177,\n",
      "       0.98726115, 0.98717949, 0.98709677, 0.98701299, 0.9869281 ,\n",
      "       0.98684211, 0.98675497, 0.98666667, 0.98657718, 0.98648649,\n",
      "       0.98639456, 0.98630137, 0.9862069 , 0.98611111, 0.98601399,\n",
      "       0.98591549, 0.9858156 , 0.98571429, 0.98561151, 0.98550725,\n",
      "       0.98540146, 0.98529412, 0.98518519, 0.98507463, 0.98496241,\n",
      "       0.98484848, 0.98473282, 0.98461538, 0.98449612, 0.984375  ,\n",
      "       0.98425197, 0.98412698, 0.984     , 0.98387097, 0.98373984,\n",
      "       0.98360656, 0.98347107, 0.98333333, 0.98319328, 0.98305085,\n",
      "       0.98290598, 0.99137931, 0.99130435, 0.99122807, 0.99115044,\n",
      "       0.99107143, 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        ]), array([0.48188406, 0.48275862, 0.48363636, 0.4845173 , 0.48540146,\n",
      "       0.48628885, 0.48717949, 0.48807339, 0.48897059, 0.48987109,\n",
      "       0.49077491, 0.49168207, 0.49259259, 0.49350649, 0.49442379,\n",
      "       0.49534451, 0.49626866, 0.49719626, 0.49812734, 0.49906191,\n",
      "       0.5       , 0.50094162, 0.50188679, 0.50283554, 0.50378788,\n",
      "       0.50474383, 0.50570342, 0.50666667, 0.50763359, 0.50860421,\n",
      "       0.50957854, 0.51055662, 0.51153846, 0.51252408, 0.51351351,\n",
      "       0.51450677, 0.51550388, 0.51650485, 0.51750973, 0.51851852,\n",
      "       0.51953125, 0.52054795, 0.52156863, 0.52259332, 0.52362205,\n",
      "       0.52465483, 0.5256917 , 0.52673267, 0.52777778, 0.52882704,\n",
      "       0.52988048, 0.53093812, 0.532     , 0.53306613, 0.53413655,\n",
      "       0.53521127, 0.53629032, 0.53737374, 0.53846154, 0.53955375,\n",
      "       0.54065041, 0.54175153, 0.54285714, 0.54396728, 0.54508197,\n",
      "       0.54620123, 0.5473251 , 0.54845361, 0.54958678, 0.55072464,\n",
      "       0.55186722, 0.55301455, 0.55416667, 0.55532359, 0.55648536,\n",
      "       0.55765199, 0.55882353, 0.56      , 0.56118143, 0.56236786,\n",
      "       0.56355932, 0.56475584, 0.56595745, 0.56716418, 0.56837607,\n",
      "       0.56959315, 0.57081545, 0.57204301, 0.57327586, 0.57451404,\n",
      "       0.57575758, 0.57700651, 0.57826087, 0.5795207 , 0.58078603,\n",
      "       0.58205689, 0.58333333, 0.58461538, 0.58590308, 0.58719647,\n",
      "       0.58849558, 0.58980044, 0.59111111, 0.59242762, 0.59375   ,\n",
      "       0.5950783 , 0.59641256, 0.59775281, 0.5990991 , 0.60045147,\n",
      "       0.60180995, 0.6031746 , 0.60454545, 0.60592255, 0.60730594,\n",
      "       0.60869565, 0.61009174, 0.61149425, 0.61290323, 0.61431871,\n",
      "       0.61574074, 0.61716937, 0.61860465, 0.62004662, 0.62149533,\n",
      "       0.62295082, 0.62441315, 0.62588235, 0.62735849, 0.62884161,\n",
      "       0.63033175, 0.63182898, 0.63333333, 0.63484487, 0.63636364,\n",
      "       0.63788969, 0.63942308, 0.64096386, 0.64251208, 0.6440678 ,\n",
      "       0.64563107, 0.64720195, 0.64878049, 0.65036675, 0.65196078,\n",
      "       0.65356265, 0.65517241, 0.65679012, 0.65841584, 0.66004963,\n",
      "       0.66169154, 0.66334165, 0.665     , 0.66666667, 0.66834171,\n",
      "       0.67002519, 0.67171717, 0.67341772, 0.6751269 , 0.67684478,\n",
      "       0.67857143, 0.68030691, 0.68205128, 0.68380463, 0.68556701,\n",
      "       0.6873385 , 0.68911917, 0.69090909, 0.69270833, 0.69451697,\n",
      "       0.69633508, 0.69816273, 0.7       , 0.70184697, 0.7037037 ,\n",
      "       0.70557029, 0.70744681, 0.70933333, 0.71122995, 0.71313673,\n",
      "       0.71505376, 0.71698113, 0.71891892, 0.72086721, 0.72282609,\n",
      "       0.72479564, 0.72677596, 0.72876712, 0.73076923, 0.73278237,\n",
      "       0.73480663, 0.73684211, 0.73888889, 0.74094708, 0.74301676,\n",
      "       0.74509804, 0.74719101, 0.74929577, 0.74858757, 0.75070822,\n",
      "       0.75      , 0.75213675, 0.75428571, 0.75644699, 0.75862069,\n",
      "       0.76080692, 0.76300578, 0.76521739, 0.76744186, 0.7696793 ,\n",
      "       0.77192982, 0.77419355, 0.77647059, 0.77876106, 0.78106509,\n",
      "       0.78338279, 0.78571429, 0.7880597 , 0.79041916, 0.79279279,\n",
      "       0.79518072, 0.79758308, 0.8       , 0.80243161, 0.80487805,\n",
      "       0.80733945, 0.80981595, 0.81230769, 0.81481481, 0.81733746,\n",
      "       0.81987578, 0.82242991, 0.825     , 0.82758621, 0.83018868,\n",
      "       0.83280757, 0.83544304, 0.83809524, 0.84076433, 0.84345048,\n",
      "       0.84615385, 0.8488746 , 0.8516129 , 0.85113269, 0.8538961 ,\n",
      "       0.8534202 , 0.85620915, 0.85901639, 0.86184211, 0.86468647,\n",
      "       0.86754967, 0.87043189, 0.87333333, 0.8729097 , 0.87583893,\n",
      "       0.87542088, 0.87837838, 0.88135593, 0.88095238, 0.88054608,\n",
      "       0.88013699, 0.87972509, 0.88275862, 0.88235294, 0.88194444,\n",
      "       0.88501742, 0.88461538, 0.88421053, 0.88732394, 0.89045936,\n",
      "       0.89361702, 0.89679715, 0.9       , 0.90322581, 0.9028777 ,\n",
      "       0.90613718, 0.90942029, 0.90909091, 0.91240876, 0.91575092,\n",
      "       0.91911765, 0.92250923, 0.92592593, 0.92936803, 0.92910448,\n",
      "       0.93258427, 0.93233083, 0.93584906, 0.93939394, 0.9391635 ,\n",
      "       0.9389313 , 0.93869732, 0.93846154, 0.93822394, 0.9379845 ,\n",
      "       0.93774319, 0.9375    , 0.94117647, 0.94094488, 0.94466403,\n",
      "       0.94444444, 0.94422311, 0.944     , 0.9437751 , 0.94354839,\n",
      "       0.94331984, 0.94308943, 0.94285714, 0.94262295, 0.94238683,\n",
      "       0.94214876, 0.94190871, 0.94166667, 0.94142259, 0.94117647,\n",
      "       0.94092827, 0.94491525, 0.94468085, 0.94444444, 0.94849785,\n",
      "       0.94827586, 0.94805195, 0.95217391, 0.95196507, 0.95175439,\n",
      "       0.95154185, 0.95132743, 0.95111111, 0.95089286, 0.95515695,\n",
      "       0.95495495, 0.95927602, 0.96363636, 0.96347032, 0.96330275,\n",
      "       0.96313364, 0.96296296, 0.9627907 , 0.96261682, 0.96244131,\n",
      "       0.96226415, 0.96208531, 0.96190476, 0.96172249, 0.96153846,\n",
      "       0.96135266, 0.96116505, 0.96097561, 0.96078431, 0.96059113,\n",
      "       0.96039604, 0.960199  , 0.96      , 0.95979899, 0.95959596,\n",
      "       0.95939086, 0.95918367, 0.95897436, 0.95876289, 0.95854922,\n",
      "       0.95833333, 0.95811518, 0.95789474, 0.95767196, 0.95744681,\n",
      "       0.96256684, 0.96236559, 0.96216216, 0.96195652, 0.96174863,\n",
      "       0.96153846, 0.96132597, 0.96666667, 0.96648045, 0.97191011,\n",
      "       0.97175141, 0.97159091, 0.97714286, 0.98275862, 0.98265896,\n",
      "       0.98837209, 0.98830409, 0.99411765, 0.99408284, 0.99404762,\n",
      "       0.99401198, 0.9939759 , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        ])]\n",
      "recalls [array([1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 0.99647887, 0.99647887, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.99295775, 0.99295775, 0.99295775,\n",
      "       0.99295775, 0.99295775, 0.98943662, 0.98943662, 0.98943662,\n",
      "       0.98943662, 0.98943662, 0.98591549, 0.98591549, 0.98591549,\n",
      "       0.98591549, 0.98591549, 0.98591549, 0.98591549, 0.98591549,\n",
      "       0.98239437, 0.98239437, 0.98239437, 0.98239437, 0.97887324,\n",
      "       0.97887324, 0.97887324, 0.97887324, 0.97887324, 0.97887324,\n",
      "       0.97535211, 0.97535211, 0.97183099, 0.97183099, 0.97183099,\n",
      "       0.96830986, 0.96830986, 0.96830986, 0.96830986, 0.96478873,\n",
      "       0.96478873, 0.96478873, 0.96478873, 0.96478873, 0.96478873,\n",
      "       0.96478873, 0.96478873, 0.96478873, 0.96478873, 0.96126761,\n",
      "       0.95774648, 0.95774648, 0.95422535, 0.95422535, 0.95422535,\n",
      "       0.95070423, 0.95070423, 0.95070423, 0.95070423, 0.95070423,\n",
      "       0.95070423, 0.95070423, 0.9471831 , 0.94366197, 0.94014085,\n",
      "       0.93661972, 0.93661972, 0.93661972, 0.93661972, 0.93309859,\n",
      "       0.92957746, 0.92957746, 0.92957746, 0.92957746, 0.92957746,\n",
      "       0.92957746, 0.92957746, 0.92957746, 0.92957746, 0.92605634,\n",
      "       0.92605634, 0.92605634, 0.92605634, 0.92253521, 0.92253521,\n",
      "       0.92253521, 0.92253521, 0.92253521, 0.91901408, 0.91549296,\n",
      "       0.91197183, 0.9084507 , 0.9084507 , 0.90492958, 0.90140845,\n",
      "       0.90140845, 0.89788732, 0.8943662 , 0.8943662 , 0.8943662 ,\n",
      "       0.8943662 , 0.8943662 , 0.89084507, 0.88732394, 0.88380282,\n",
      "       0.88028169, 0.88028169, 0.87676056, 0.87323944, 0.86971831,\n",
      "       0.86619718, 0.86619718, 0.86267606, 0.85915493, 0.8556338 ,\n",
      "       0.85211268, 0.84859155, 0.84507042, 0.8415493 , 0.83802817,\n",
      "       0.83802817, 0.83802817, 0.83450704, 0.83098592, 0.82746479,\n",
      "       0.82394366, 0.82042254, 0.82042254, 0.81690141, 0.81338028,\n",
      "       0.81338028, 0.80985915, 0.80633803, 0.8028169 , 0.79929577,\n",
      "       0.79577465, 0.79225352, 0.78873239, 0.78873239, 0.78873239,\n",
      "       0.78521127, 0.78169014, 0.77816901, 0.77464789, 0.77112676,\n",
      "       0.76760563, 0.76408451, 0.76056338, 0.75704225, 0.75352113,\n",
      "       0.75      , 0.74647887, 0.74647887, 0.74295775, 0.73943662,\n",
      "       0.73591549, 0.73239437, 0.73239437, 0.72887324, 0.72535211,\n",
      "       0.72183099, 0.71830986, 0.71478873, 0.71126761, 0.70774648,\n",
      "       0.70422535, 0.70070423, 0.6971831 , 0.69366197, 0.69014085,\n",
      "       0.68661972, 0.68309859, 0.67957746, 0.67605634, 0.67253521,\n",
      "       0.66901408, 0.66549296, 0.66197183, 0.6584507 , 0.65492958,\n",
      "       0.65140845, 0.64788732, 0.6443662 , 0.64084507, 0.63732394,\n",
      "       0.63380282, 0.63028169, 0.62676056, 0.62323944, 0.61971831,\n",
      "       0.61619718, 0.61267606, 0.60915493, 0.6056338 , 0.60211268,\n",
      "       0.59859155, 0.59507042, 0.5915493 , 0.58802817, 0.58802817,\n",
      "       0.58450704, 0.58450704, 0.58098592, 0.57746479, 0.57394366,\n",
      "       0.57042254, 0.56690141, 0.56338028, 0.55985915, 0.55633803,\n",
      "       0.5528169 , 0.54929577, 0.54577465, 0.54225352, 0.53873239,\n",
      "       0.53521127, 0.53169014, 0.52816901, 0.52464789, 0.52112676,\n",
      "       0.51760563, 0.51408451, 0.51056338, 0.50704225, 0.50352113,\n",
      "       0.5       , 0.49647887, 0.49295775, 0.48943662, 0.48591549,\n",
      "       0.48239437, 0.47887324, 0.47535211, 0.47183099, 0.46830986,\n",
      "       0.46478873, 0.46126761, 0.45774648, 0.45422535, 0.45070423,\n",
      "       0.4471831 , 0.44366197, 0.44014085, 0.43661972, 0.43309859,\n",
      "       0.42957746, 0.42605634, 0.42253521, 0.41901408, 0.41549296,\n",
      "       0.41197183, 0.4084507 , 0.40492958, 0.40140845, 0.39788732,\n",
      "       0.3943662 , 0.39084507, 0.38732394, 0.38380282, 0.38028169,\n",
      "       0.37676056, 0.37323944, 0.36971831, 0.36619718, 0.36267606,\n",
      "       0.35915493, 0.3556338 , 0.35211268, 0.34859155, 0.34507042,\n",
      "       0.3415493 , 0.33450704, 0.33098592, 0.32746479, 0.32394366,\n",
      "       0.32042254, 0.31690141, 0.31338028, 0.30985915, 0.30633803,\n",
      "       0.3028169 , 0.29929577, 0.29577465, 0.29225352, 0.28873239,\n",
      "       0.28521127, 0.28169014, 0.27816901, 0.27464789, 0.27112676,\n",
      "       0.26760563, 0.26408451, 0.26056338, 0.25704225, 0.25352113,\n",
      "       0.25      , 0.24647887, 0.24295775, 0.23943662, 0.23591549,\n",
      "       0.23239437, 0.22887324, 0.22535211, 0.22183099, 0.21830986,\n",
      "       0.21478873, 0.21126761, 0.20774648, 0.20422535, 0.20070423,\n",
      "       0.1971831 , 0.19366197, 0.19014085, 0.18661972, 0.18309859,\n",
      "       0.17957746, 0.17605634, 0.17253521, 0.16901408, 0.16549296,\n",
      "       0.16197183, 0.1584507 , 0.15492958, 0.15140845, 0.14788732,\n",
      "       0.1443662 , 0.14084507, 0.13732394, 0.13380282, 0.13028169,\n",
      "       0.12676056, 0.12323944, 0.11971831, 0.11619718, 0.11267606,\n",
      "       0.10915493, 0.1056338 , 0.10211268, 0.09859155, 0.09507042,\n",
      "       0.0915493 , 0.08802817, 0.08450704, 0.08098592, 0.07746479,\n",
      "       0.07394366, 0.07042254, 0.06690141, 0.06338028, 0.05985915,\n",
      "       0.05633803, 0.04929577, 0.04577465, 0.04225352, 0.03873239,\n",
      "       0.03521127, 0.03169014, 0.02816901, 0.02464789, 0.02112676,\n",
      "       0.01760563, 0.01408451, 0.01056338, 0.00704225, 0.00352113,\n",
      "       0.        ]), array([1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 0.99657534, 0.99657534, 0.99657534, 0.99657534,\n",
      "       0.99657534, 0.99315068, 0.99315068, 0.99315068, 0.99315068,\n",
      "       0.99315068, 0.99315068, 0.99315068, 0.99315068, 0.98972603,\n",
      "       0.98972603, 0.98972603, 0.98972603, 0.98630137, 0.98630137,\n",
      "       0.98630137, 0.98630137, 0.98630137, 0.98630137, 0.98630137,\n",
      "       0.98630137, 0.98630137, 0.98630137, 0.98630137, 0.98630137,\n",
      "       0.98630137, 0.98630137, 0.98630137, 0.98630137, 0.98630137,\n",
      "       0.98630137, 0.98630137, 0.98630137, 0.98630137, 0.98630137,\n",
      "       0.98630137, 0.98287671, 0.98287671, 0.98287671, 0.98287671,\n",
      "       0.97945205, 0.97945205, 0.97945205, 0.97945205, 0.97945205,\n",
      "       0.9760274 , 0.9760274 , 0.9760274 , 0.9760274 , 0.9760274 ,\n",
      "       0.9760274 , 0.9760274 , 0.9760274 , 0.97260274, 0.97260274,\n",
      "       0.97260274, 0.96917808, 0.96917808, 0.96917808, 0.96917808,\n",
      "       0.96917808, 0.96917808, 0.96917808, 0.96917808, 0.96575342,\n",
      "       0.96575342, 0.96575342, 0.96575342, 0.96575342, 0.96575342,\n",
      "       0.96575342, 0.96575342, 0.96575342, 0.96575342, 0.96575342,\n",
      "       0.96575342, 0.96575342, 0.96575342, 0.96575342, 0.96575342,\n",
      "       0.96575342, 0.96575342, 0.96575342, 0.96575342, 0.96575342,\n",
      "       0.96575342, 0.96575342, 0.96575342, 0.96575342, 0.96575342,\n",
      "       0.96575342, 0.96232877, 0.96232877, 0.96232877, 0.96232877,\n",
      "       0.96232877, 0.96232877, 0.95890411, 0.95890411, 0.95547945,\n",
      "       0.95205479, 0.95205479, 0.94863014, 0.94863014, 0.94520548,\n",
      "       0.94520548, 0.94520548, 0.94520548, 0.94520548, 0.94178082,\n",
      "       0.93835616, 0.93835616, 0.93493151, 0.93493151, 0.93150685,\n",
      "       0.92808219, 0.92808219, 0.92465753, 0.92465753, 0.92123288,\n",
      "       0.92123288, 0.91780822, 0.91780822, 0.91780822, 0.91780822,\n",
      "       0.91780822, 0.91438356, 0.91438356, 0.91438356, 0.9109589 ,\n",
      "       0.9109589 , 0.90753425, 0.90410959, 0.90068493, 0.90068493,\n",
      "       0.89726027, 0.89726027, 0.89726027, 0.89726027, 0.89383562,\n",
      "       0.89383562, 0.89383562, 0.89383562, 0.89041096, 0.89041096,\n",
      "       0.8869863 , 0.88356164, 0.88356164, 0.88356164, 0.88356164,\n",
      "       0.88013699, 0.87671233, 0.87328767, 0.86986301, 0.86986301,\n",
      "       0.86986301, 0.86986301, 0.86986301, 0.86986301, 0.86643836,\n",
      "       0.86643836, 0.8630137 , 0.85958904, 0.85616438, 0.85273973,\n",
      "       0.84931507, 0.84589041, 0.84589041, 0.84246575, 0.84246575,\n",
      "       0.8390411 , 0.83561644, 0.83219178, 0.83219178, 0.82876712,\n",
      "       0.82534247, 0.82534247, 0.82534247, 0.82191781, 0.81849315,\n",
      "       0.81506849, 0.81164384, 0.81164384, 0.81164384, 0.80821918,\n",
      "       0.80479452, 0.80479452, 0.80479452, 0.80136986, 0.79794521,\n",
      "       0.79452055, 0.79109589, 0.78767123, 0.78424658, 0.78082192,\n",
      "       0.77739726, 0.7739726 , 0.77054795, 0.76712329, 0.76369863,\n",
      "       0.76369863, 0.76027397, 0.75684932, 0.75342466, 0.75      ,\n",
      "       0.75      , 0.74657534, 0.74315068, 0.73972603, 0.73630137,\n",
      "       0.73287671, 0.72945205, 0.7260274 , 0.7260274 , 0.72260274,\n",
      "       0.71917808, 0.71917808, 0.71917808, 0.71917808, 0.71575342,\n",
      "       0.71232877, 0.70890411, 0.70547945, 0.70205479, 0.69863014,\n",
      "       0.69520548, 0.69178082, 0.68835616, 0.68493151, 0.68150685,\n",
      "       0.67808219, 0.67465753, 0.67123288, 0.66780822, 0.66438356,\n",
      "       0.6609589 , 0.65753425, 0.65753425, 0.65410959, 0.65068493,\n",
      "       0.64726027, 0.64383562, 0.64041096, 0.6369863 , 0.63356164,\n",
      "       0.63013699, 0.63013699, 0.62671233, 0.62328767, 0.61986301,\n",
      "       0.61643836, 0.6130137 , 0.60958904, 0.60616438, 0.60273973,\n",
      "       0.59931507, 0.59589041, 0.59246575, 0.5890411 , 0.58561644,\n",
      "       0.58219178, 0.57876712, 0.57534247, 0.57191781, 0.56849315,\n",
      "       0.56506849, 0.56164384, 0.55821918, 0.55479452, 0.55136986,\n",
      "       0.55136986, 0.54794521, 0.54452055, 0.54109589, 0.53767123,\n",
      "       0.53424658, 0.53082192, 0.53082192, 0.52739726, 0.5239726 ,\n",
      "       0.52054795, 0.51712329, 0.51369863, 0.51027397, 0.50684932,\n",
      "       0.50342466, 0.5       , 0.49657534, 0.49315068, 0.48972603,\n",
      "       0.48630137, 0.48287671, 0.47945205, 0.4760274 , 0.47260274,\n",
      "       0.46917808, 0.46575342, 0.46232877, 0.45890411, 0.45547945,\n",
      "       0.45205479, 0.44863014, 0.44520548, 0.44178082, 0.43835616,\n",
      "       0.43493151, 0.43150685, 0.42808219, 0.42465753, 0.42123288,\n",
      "       0.41780822, 0.41438356, 0.4109589 , 0.40753425, 0.40410959,\n",
      "       0.40068493, 0.39726027, 0.39383562, 0.39041096, 0.3869863 ,\n",
      "       0.38356164, 0.38013699, 0.37671233, 0.37328767, 0.36986301,\n",
      "       0.36643836, 0.3630137 , 0.35616438, 0.35273973, 0.34931507,\n",
      "       0.34589041, 0.34246575, 0.3390411 , 0.33561644, 0.33219178,\n",
      "       0.32876712, 0.32534247, 0.32191781, 0.31849315, 0.31506849,\n",
      "       0.31164384, 0.30821918, 0.30479452, 0.30136986, 0.29794521,\n",
      "       0.29452055, 0.29109589, 0.28767123, 0.28424658, 0.28082192,\n",
      "       0.27739726, 0.2739726 , 0.27054795, 0.26712329, 0.26369863,\n",
      "       0.26027397, 0.25684932, 0.25342466, 0.25      , 0.24657534,\n",
      "       0.24315068, 0.23972603, 0.23630137, 0.23287671, 0.22945205,\n",
      "       0.2260274 , 0.22260274, 0.21917808, 0.21575342, 0.21232877,\n",
      "       0.20890411, 0.20547945, 0.20205479, 0.19863014, 0.19520548,\n",
      "       0.19178082, 0.18835616, 0.18493151, 0.18150685, 0.17808219,\n",
      "       0.17123288, 0.16780822, 0.16438356, 0.1609589 , 0.15753425,\n",
      "       0.15410959, 0.15068493, 0.14726027, 0.14383562, 0.14041096,\n",
      "       0.1369863 , 0.13356164, 0.13013699, 0.12671233, 0.12328767,\n",
      "       0.11986301, 0.11643836, 0.1130137 , 0.10958904, 0.10616438,\n",
      "       0.10273973, 0.09931507, 0.09589041, 0.09246575, 0.0890411 ,\n",
      "       0.08561644, 0.08219178, 0.07876712, 0.07534247, 0.07191781,\n",
      "       0.06849315, 0.06506849, 0.06164384, 0.05821918, 0.05479452,\n",
      "       0.05136986, 0.04794521, 0.04452055, 0.04109589, 0.03767123,\n",
      "       0.03424658, 0.03082192, 0.02739726, 0.0239726 , 0.02054795,\n",
      "       0.01712329, 0.01369863, 0.01027397, 0.00684932, 0.00342466,\n",
      "       0.        ]), array([1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 0.99655172, 0.99655172,\n",
      "       0.99655172, 0.99655172, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.99310345, 0.99310345, 0.99310345, 0.99310345,\n",
      "       0.99310345, 0.98965517, 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 , 0.9862069 ,\n",
      "       0.98275862, 0.98275862, 0.98275862, 0.98275862, 0.98275862,\n",
      "       0.98275862, 0.97931034, 0.97931034, 0.97931034, 0.97931034,\n",
      "       0.97931034, 0.97931034, 0.97931034, 0.97931034, 0.97931034,\n",
      "       0.97931034, 0.97931034, 0.97931034, 0.97931034, 0.97586207,\n",
      "       0.97586207, 0.97241379, 0.96896552, 0.96551724, 0.96551724,\n",
      "       0.96551724, 0.96206897, 0.96206897, 0.96206897, 0.96206897,\n",
      "       0.96206897, 0.95862069, 0.95862069, 0.95862069, 0.95862069,\n",
      "       0.95862069, 0.95862069, 0.95862069, 0.95862069, 0.95517241,\n",
      "       0.95172414, 0.94827586, 0.94827586, 0.94827586, 0.94827586,\n",
      "       0.94827586, 0.94827586, 0.94482759, 0.94482759, 0.94482759,\n",
      "       0.94482759, 0.94482759, 0.94482759, 0.94482759, 0.94482759,\n",
      "       0.94137931, 0.94137931, 0.94137931, 0.94137931, 0.93793103,\n",
      "       0.93448276, 0.93448276, 0.93448276, 0.93103448, 0.93103448,\n",
      "       0.93103448, 0.93103448, 0.93103448, 0.93103448, 0.93103448,\n",
      "       0.93103448, 0.93103448, 0.93103448, 0.92758621, 0.92758621,\n",
      "       0.92758621, 0.92758621, 0.92758621, 0.92413793, 0.92413793,\n",
      "       0.92068966, 0.92068966, 0.92068966, 0.92068966, 0.91724138,\n",
      "       0.91724138, 0.91724138, 0.91724138, 0.91724138, 0.91724138,\n",
      "       0.91724138, 0.91724138, 0.91724138, 0.91724138, 0.91724138,\n",
      "       0.91724138, 0.91724138, 0.9137931 , 0.91034483, 0.90689655,\n",
      "       0.90689655, 0.90344828, 0.9       , 0.89655172, 0.89310345,\n",
      "       0.88965517, 0.8862069 , 0.8862069 , 0.88275862, 0.87931034,\n",
      "       0.87586207, 0.87241379, 0.86896552, 0.86551724, 0.86206897,\n",
      "       0.85862069, 0.85517241, 0.85517241, 0.85172414, 0.84827586,\n",
      "       0.84482759, 0.84137931, 0.84137931, 0.83793103, 0.83448276,\n",
      "       0.83103448, 0.82758621, 0.82758621, 0.82758621, 0.82413793,\n",
      "       0.82068966, 0.81724138, 0.8137931 , 0.81034483, 0.80689655,\n",
      "       0.80344828, 0.8       , 0.79655172, 0.79310345, 0.79310345,\n",
      "       0.79310345, 0.78965517, 0.78965517, 0.7862069 , 0.7862069 ,\n",
      "       0.78275862, 0.77931034, 0.77586207, 0.77241379, 0.76896552,\n",
      "       0.76551724, 0.76206897, 0.75862069, 0.75517241, 0.75517241,\n",
      "       0.75172414, 0.74827586, 0.74482759, 0.74137931, 0.74137931,\n",
      "       0.73793103, 0.73448276, 0.73103448, 0.72758621, 0.72413793,\n",
      "       0.72068966, 0.71724138, 0.7137931 , 0.71034483, 0.70689655,\n",
      "       0.70689655, 0.70344828, 0.7       , 0.69655172, 0.69310345,\n",
      "       0.68965517, 0.6862069 , 0.68275862, 0.67931034, 0.67586207,\n",
      "       0.67241379, 0.66896552, 0.66551724, 0.66206897, 0.65862069,\n",
      "       0.65517241, 0.65172414, 0.64827586, 0.64482759, 0.64137931,\n",
      "       0.63793103, 0.63448276, 0.63103448, 0.62758621, 0.62413793,\n",
      "       0.62068966, 0.61724138, 0.6137931 , 0.61034483, 0.60689655,\n",
      "       0.60344828, 0.6       , 0.59655172, 0.59310345, 0.58965517,\n",
      "       0.5862069 , 0.58275862, 0.57931034, 0.57586207, 0.57241379,\n",
      "       0.56896552, 0.56551724, 0.56206897, 0.55862069, 0.55517241,\n",
      "       0.55517241, 0.55172414, 0.54827586, 0.54482759, 0.54137931,\n",
      "       0.53793103, 0.53448276, 0.53103448, 0.52758621, 0.52413793,\n",
      "       0.52068966, 0.51724138, 0.5137931 , 0.51034483, 0.50689655,\n",
      "       0.50344828, 0.5       , 0.49655172, 0.49310345, 0.48965517,\n",
      "       0.4862069 , 0.48275862, 0.47931034, 0.47586207, 0.47241379,\n",
      "       0.46896552, 0.46551724, 0.46206897, 0.45862069, 0.45517241,\n",
      "       0.45172414, 0.44827586, 0.44482759, 0.44137931, 0.44137931,\n",
      "       0.43793103, 0.43448276, 0.43103448, 0.42758621, 0.42413793,\n",
      "       0.42068966, 0.41724138, 0.4137931 , 0.41034483, 0.40689655,\n",
      "       0.40344828, 0.4       , 0.39655172, 0.39310345, 0.38965517,\n",
      "       0.3862069 , 0.38275862, 0.37931034, 0.37586207, 0.37241379,\n",
      "       0.36896552, 0.36551724, 0.36206897, 0.35862069, 0.35517241,\n",
      "       0.35172414, 0.34827586, 0.34482759, 0.34137931, 0.33793103,\n",
      "       0.33448276, 0.33103448, 0.32758621, 0.32413793, 0.32068966,\n",
      "       0.31724138, 0.3137931 , 0.31034483, 0.30689655, 0.30344828,\n",
      "       0.3       , 0.29655172, 0.29310345, 0.28965517, 0.2862069 ,\n",
      "       0.28275862, 0.27931034, 0.27586207, 0.27241379, 0.26896552,\n",
      "       0.26551724, 0.26206897, 0.25862069, 0.25517241, 0.25172414,\n",
      "       0.24827586, 0.24482759, 0.24137931, 0.23793103, 0.23448276,\n",
      "       0.23103448, 0.22758621, 0.22413793, 0.22068966, 0.21724138,\n",
      "       0.2137931 , 0.21034483, 0.20689655, 0.20344828, 0.2       ,\n",
      "       0.19655172, 0.19310345, 0.18965517, 0.1862069 , 0.18275862,\n",
      "       0.17931034, 0.17586207, 0.17241379, 0.16896552, 0.16551724,\n",
      "       0.16206897, 0.15862069, 0.15517241, 0.15172414, 0.14827586,\n",
      "       0.14482759, 0.14137931, 0.13793103, 0.13448276, 0.13103448,\n",
      "       0.12758621, 0.12413793, 0.12068966, 0.11724138, 0.1137931 ,\n",
      "       0.11034483, 0.10689655, 0.10344828, 0.1       , 0.09655172,\n",
      "       0.09310345, 0.08965517, 0.0862069 , 0.08275862, 0.07931034,\n",
      "       0.07586207, 0.07241379, 0.06896552, 0.06551724, 0.06206897,\n",
      "       0.05862069, 0.05517241, 0.05172414, 0.04827586, 0.04482759,\n",
      "       0.04137931, 0.03793103, 0.03448276, 0.03103448, 0.02758621,\n",
      "       0.02413793, 0.02068966, 0.01724138, 0.0137931 , 0.01034483,\n",
      "       0.00689655, 0.00344828, 0.        ]), array([1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 0.99596774, 0.99596774, 0.99596774,\n",
      "       0.99596774, 0.99596774, 0.99596774, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.99193548, 0.99193548, 0.99193548, 0.99193548,\n",
      "       0.99193548, 0.98790323, 0.98790323, 0.98790323, 0.98790323,\n",
      "       0.98790323, 0.98790323, 0.98790323, 0.98790323, 0.98790323,\n",
      "       0.98790323, 0.98790323, 0.98790323, 0.98790323, 0.98387097,\n",
      "       0.98387097, 0.98387097, 0.98387097, 0.98387097, 0.98387097,\n",
      "       0.98387097, 0.98387097, 0.98387097, 0.98387097, 0.98387097,\n",
      "       0.98387097, 0.98387097, 0.98387097, 0.97983871, 0.97983871,\n",
      "       0.97983871, 0.97983871, 0.97983871, 0.97983871, 0.97580645,\n",
      "       0.97580645, 0.97580645, 0.97580645, 0.97580645, 0.97580645,\n",
      "       0.97580645, 0.97580645, 0.97580645, 0.97580645, 0.97580645,\n",
      "       0.97580645, 0.97580645, 0.97580645, 0.97580645, 0.97580645,\n",
      "       0.97580645, 0.97580645, 0.97580645, 0.97580645, 0.97580645,\n",
      "       0.97580645, 0.97580645, 0.97580645, 0.97580645, 0.97580645,\n",
      "       0.97580645, 0.97580645, 0.97580645, 0.97580645, 0.97580645,\n",
      "       0.97580645, 0.97580645, 0.97580645, 0.97580645, 0.97580645,\n",
      "       0.97580645, 0.97580645, 0.97580645, 0.97580645, 0.97580645,\n",
      "       0.97580645, 0.97580645, 0.97580645, 0.97177419, 0.97177419,\n",
      "       0.97177419, 0.97177419, 0.97177419, 0.96774194, 0.96774194,\n",
      "       0.96774194, 0.96774194, 0.96774194, 0.96774194, 0.96774194,\n",
      "       0.96774194, 0.96774194, 0.96774194, 0.96370968, 0.96370968,\n",
      "       0.96370968, 0.96370968, 0.96370968, 0.96370968, 0.96370968,\n",
      "       0.95967742, 0.95564516, 0.9516129 , 0.9516129 , 0.9516129 ,\n",
      "       0.9516129 , 0.9516129 , 0.9516129 , 0.9516129 , 0.94758065,\n",
      "       0.94758065, 0.94758065, 0.94758065, 0.94354839, 0.94354839,\n",
      "       0.94354839, 0.94354839, 0.94354839, 0.94354839, 0.94354839,\n",
      "       0.94354839, 0.94354839, 0.94354839, 0.94354839, 0.94354839,\n",
      "       0.94354839, 0.94354839, 0.94354839, 0.94354839, 0.93951613,\n",
      "       0.93548387, 0.93548387, 0.93548387, 0.93548387, 0.93548387,\n",
      "       0.93548387, 0.93548387, 0.93548387, 0.93145161, 0.92741935,\n",
      "       0.92741935, 0.92741935, 0.92741935, 0.92741935, 0.92741935,\n",
      "       0.92741935, 0.92741935, 0.92741935, 0.92741935, 0.92741935,\n",
      "       0.92741935, 0.92741935, 0.92741935, 0.9233871 , 0.91935484,\n",
      "       0.91532258, 0.91532258, 0.91532258, 0.91129032, 0.90725806,\n",
      "       0.90725806, 0.90725806, 0.90725806, 0.90725806, 0.90725806,\n",
      "       0.90725806, 0.90322581, 0.90322581, 0.90322581, 0.89919355,\n",
      "       0.89516129, 0.89112903, 0.88709677, 0.88306452, 0.87903226,\n",
      "       0.87903226, 0.87903226, 0.875     , 0.875     , 0.875     ,\n",
      "       0.875     , 0.87096774, 0.87096774, 0.86693548, 0.86290323,\n",
      "       0.85887097, 0.85483871, 0.85080645, 0.84677419, 0.84274194,\n",
      "       0.83870968, 0.83870968, 0.83870968, 0.83467742, 0.83467742,\n",
      "       0.83467742, 0.83064516, 0.83064516, 0.8266129 , 0.82258065,\n",
      "       0.81854839, 0.81854839, 0.81451613, 0.81048387, 0.80645161,\n",
      "       0.80241935, 0.7983871 , 0.7983871 , 0.7983871 , 0.7983871 ,\n",
      "       0.7983871 , 0.79435484, 0.79032258, 0.78629032, 0.78225806,\n",
      "       0.77822581, 0.77419355, 0.77016129, 0.77016129, 0.76612903,\n",
      "       0.76612903, 0.76209677, 0.75806452, 0.75403226, 0.75      ,\n",
      "       0.74596774, 0.74193548, 0.73790323, 0.73387097, 0.72983871,\n",
      "       0.72580645, 0.72177419, 0.71774194, 0.71370968, 0.70967742,\n",
      "       0.70564516, 0.7016129 , 0.69758065, 0.69354839, 0.68951613,\n",
      "       0.68548387, 0.68145161, 0.67741935, 0.6733871 , 0.66935484,\n",
      "       0.66532258, 0.66129032, 0.65725806, 0.65322581, 0.64919355,\n",
      "       0.64516129, 0.64112903, 0.63709677, 0.63306452, 0.62903226,\n",
      "       0.625     , 0.62096774, 0.61693548, 0.61290323, 0.60887097,\n",
      "       0.60483871, 0.60080645, 0.59677419, 0.59274194, 0.58870968,\n",
      "       0.58467742, 0.58064516, 0.5766129 , 0.57258065, 0.56854839,\n",
      "       0.56451613, 0.56048387, 0.55645161, 0.55241935, 0.5483871 ,\n",
      "       0.54435484, 0.54032258, 0.53629032, 0.53225806, 0.52822581,\n",
      "       0.52419355, 0.52016129, 0.51612903, 0.51209677, 0.50806452,\n",
      "       0.50403226, 0.5       , 0.49596774, 0.49193548, 0.48790323,\n",
      "       0.48387097, 0.47983871, 0.47580645, 0.47177419, 0.46774194,\n",
      "       0.46370968, 0.46370968, 0.45967742, 0.45564516, 0.4516129 ,\n",
      "       0.44758065, 0.44758065, 0.44354839, 0.43951613, 0.43548387,\n",
      "       0.43145161, 0.42741935, 0.4233871 , 0.41935484, 0.41532258,\n",
      "       0.41129032, 0.40725806, 0.40322581, 0.39919355, 0.39516129,\n",
      "       0.39112903, 0.38709677, 0.38306452, 0.37903226, 0.375     ,\n",
      "       0.37096774, 0.36693548, 0.36290323, 0.35887097, 0.35080645,\n",
      "       0.34677419, 0.34274194, 0.33870968, 0.33467742, 0.33064516,\n",
      "       0.3266129 , 0.32258065, 0.31854839, 0.31451613, 0.31048387,\n",
      "       0.30645161, 0.30241935, 0.2983871 , 0.29435484, 0.29032258,\n",
      "       0.28629032, 0.28225806, 0.27822581, 0.27419355, 0.27016129,\n",
      "       0.26612903, 0.26209677, 0.25806452, 0.25403226, 0.25      ,\n",
      "       0.24596774, 0.24193548, 0.23790323, 0.23387097, 0.22983871,\n",
      "       0.22580645, 0.22177419, 0.21774194, 0.21370968, 0.20967742,\n",
      "       0.20564516, 0.2016129 , 0.19758065, 0.19354839, 0.18951613,\n",
      "       0.18548387, 0.18145161, 0.17741935, 0.1733871 , 0.16935484,\n",
      "       0.16532258, 0.16129032, 0.15725806, 0.15322581, 0.14919355,\n",
      "       0.14516129, 0.14112903, 0.13709677, 0.13306452, 0.12903226,\n",
      "       0.125     , 0.12096774, 0.11693548, 0.11290323, 0.10887097,\n",
      "       0.10483871, 0.10080645, 0.09677419, 0.09274194, 0.08870968,\n",
      "       0.08467742, 0.08064516, 0.0766129 , 0.07258065, 0.06854839,\n",
      "       0.06451613, 0.06048387, 0.05645161, 0.05241935, 0.0483871 ,\n",
      "       0.04435484, 0.04032258, 0.03629032, 0.03225806, 0.02822581,\n",
      "       0.02419355, 0.02016129, 0.01612903, 0.01209677, 0.00806452,\n",
      "       0.00403226, 0.        ]), array([1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 1.        , 1.        ,\n",
      "       1.        , 1.        , 1.        , 0.9962406 , 0.9962406 ,\n",
      "       0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 ,\n",
      "       0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 ,\n",
      "       0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 ,\n",
      "       0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 ,\n",
      "       0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 ,\n",
      "       0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 ,\n",
      "       0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 ,\n",
      "       0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 , 0.9924812 ,\n",
      "       0.9924812 , 0.9924812 , 0.9924812 , 0.9887218 , 0.9887218 ,\n",
      "       0.98496241, 0.98496241, 0.98496241, 0.98496241, 0.98496241,\n",
      "       0.98496241, 0.98496241, 0.98496241, 0.98120301, 0.98120301,\n",
      "       0.97744361, 0.97744361, 0.97744361, 0.97368421, 0.96992481,\n",
      "       0.96616541, 0.96240602, 0.96240602, 0.95864662, 0.95488722,\n",
      "       0.95488722, 0.95112782, 0.94736842, 0.94736842, 0.94736842,\n",
      "       0.94736842, 0.94736842, 0.94736842, 0.94736842, 0.94360902,\n",
      "       0.94360902, 0.94360902, 0.93984962, 0.93984962, 0.93984962,\n",
      "       0.93984962, 0.93984962, 0.93984962, 0.93984962, 0.93609023,\n",
      "       0.93609023, 0.93233083, 0.93233083, 0.93233083, 0.92857143,\n",
      "       0.92481203, 0.92105263, 0.91729323, 0.91353383, 0.90977444,\n",
      "       0.90601504, 0.90225564, 0.90225564, 0.89849624, 0.89849624,\n",
      "       0.89473684, 0.89097744, 0.88721805, 0.88345865, 0.87969925,\n",
      "       0.87593985, 0.87218045, 0.86842105, 0.86466165, 0.86090226,\n",
      "       0.85714286, 0.85338346, 0.84962406, 0.84586466, 0.84210526,\n",
      "       0.83834586, 0.83834586, 0.83458647, 0.83082707, 0.83082707,\n",
      "       0.82706767, 0.82330827, 0.82330827, 0.81954887, 0.81578947,\n",
      "       0.81203008, 0.80827068, 0.80451128, 0.80075188, 0.80075188,\n",
      "       0.79699248, 0.79699248, 0.79699248, 0.79323308, 0.78947368,\n",
      "       0.78571429, 0.78195489, 0.77819549, 0.77443609, 0.77067669,\n",
      "       0.76691729, 0.76315789, 0.7593985 , 0.7556391 , 0.7518797 ,\n",
      "       0.7481203 , 0.7443609 , 0.7406015 , 0.73684211, 0.73308271,\n",
      "       0.72932331, 0.72556391, 0.72180451, 0.71804511, 0.71428571,\n",
      "       0.71052632, 0.70676692, 0.70300752, 0.69924812, 0.69548872,\n",
      "       0.69172932, 0.68796992, 0.68421053, 0.68045113, 0.67669173,\n",
      "       0.67669173, 0.67293233, 0.66917293, 0.66541353, 0.66165414,\n",
      "       0.65789474, 0.65413534, 0.65413534, 0.65037594, 0.65037594,\n",
      "       0.64661654, 0.64285714, 0.64285714, 0.64285714, 0.63909774,\n",
      "       0.63909774, 0.63533835, 0.63533835, 0.63157895, 0.62781955,\n",
      "       0.62406015, 0.62030075, 0.62030075, 0.61654135, 0.61278195,\n",
      "       0.60902256, 0.60526316, 0.60150376, 0.59774436, 0.59398496,\n",
      "       0.59022556, 0.58646617, 0.58270677, 0.57894737, 0.57518797,\n",
      "       0.57142857, 0.56766917, 0.56390977, 0.56015038, 0.55639098,\n",
      "       0.55263158, 0.54887218, 0.54511278, 0.54135338, 0.53759398,\n",
      "       0.53383459, 0.53007519, 0.52631579, 0.52255639, 0.51879699,\n",
      "       0.51503759, 0.5112782 , 0.5075188 , 0.5037594 , 0.5       ,\n",
      "       0.4962406 , 0.4924812 , 0.4887218 , 0.48496241, 0.48120301,\n",
      "       0.47744361, 0.47368421, 0.46992481, 0.46616541, 0.46240602,\n",
      "       0.45864662, 0.45488722, 0.45112782, 0.44736842, 0.44360902,\n",
      "       0.43984962, 0.43609023, 0.43233083, 0.42857143, 0.42481203,\n",
      "       0.42105263, 0.41729323, 0.41353383, 0.40977444, 0.40601504,\n",
      "       0.40225564, 0.39849624, 0.39473684, 0.39097744, 0.38721805,\n",
      "       0.38345865, 0.37969925, 0.37593985, 0.37218045, 0.36842105,\n",
      "       0.36466165, 0.36090226, 0.35714286, 0.35338346, 0.34962406,\n",
      "       0.34586466, 0.34210526, 0.33834586, 0.33458647, 0.33082707,\n",
      "       0.32706767, 0.32330827, 0.31954887, 0.31578947, 0.31203008,\n",
      "       0.30827068, 0.30451128, 0.30075188, 0.29699248, 0.29323308,\n",
      "       0.28947368, 0.28571429, 0.28195489, 0.27819549, 0.27443609,\n",
      "       0.27067669, 0.26691729, 0.26315789, 0.2593985 , 0.2556391 ,\n",
      "       0.2518797 , 0.2481203 , 0.2443609 , 0.2406015 , 0.23684211,\n",
      "       0.23308271, 0.22932331, 0.22556391, 0.22180451, 0.21804511,\n",
      "       0.21428571, 0.21052632, 0.20676692, 0.20300752, 0.19924812,\n",
      "       0.19548872, 0.19172932, 0.18796992, 0.18421053, 0.18045113,\n",
      "       0.17669173, 0.17293233, 0.16917293, 0.16541353, 0.16165414,\n",
      "       0.15789474, 0.15413534, 0.15037594, 0.14661654, 0.14285714,\n",
      "       0.13909774, 0.13533835, 0.13157895, 0.12781955, 0.12406015,\n",
      "       0.12030075, 0.11654135, 0.11278195, 0.10902256, 0.10526316,\n",
      "       0.10150376, 0.09774436, 0.09398496, 0.09022556, 0.08646617,\n",
      "       0.08270677, 0.07894737, 0.07518797, 0.07142857, 0.06766917,\n",
      "       0.06390977, 0.06015038, 0.05639098, 0.04887218, 0.04511278,\n",
      "       0.04135338, 0.03007519, 0.02255639, 0.01503759, 0.0112782 ,\n",
      "       0.0075188 , 0.0037594 , 0.        ])]\n"
     ]
    },
    {
     "data": {
      "text/plain": "<Figure size 640x480 with 1 Axes>",
      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAAjgAAAHJCAYAAACIU0PXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAA9hAAAPYQGoP6dpAADAcElEQVR4nOzdd3xV9f348dc5d2YnkEUggTDD3ntDAMEqgrWIRdGiP7W11fJtFdFWRb/a1rqqhWqL26pfBXGjEGSvMJU9w0pCSMhed5zP748LF0IGCSTcjPfz8bgPcs95n3PfN8A97/s5n6EppRRCCCGEEI2I7usEhBBCCCFqmxQ4QgghhGh0pMARQgghRKMjBY4QQgghGh0pcIQQQgjR6EiBI4QQQohGRwocIYQQQjQ6UuAIIYQQotGRAkcIIYQQjY4UOEIIIYRodKTAEUJ4paSkoGlamYfFYqFly5b84he/YMuWLVUen5uby9NPP03//v0JDQ3FbrcTHx/PzJkz2bZt22VfPykpidtuu402bdrg5+dHQEAAnTt35t5772XTpk219TaFEE2AJmtRCSHOS0lJIT4+nnbt2jFjxgwACgsL2bp1Kz/88AMWi4Xly5czYsSIcscmJydz4403kp6eTrdu3Rg9ejT+/v7s3buXpUuX4nQ6eeKJJ3jiiSfKHVtcXMyvfvUrPvroI/z9/UlMTKRjx44AHDhwgKSkJAoLC3n33Xe5/fbb6/aXIIRoHJQQQpxz9OhRBagJEyaU2/fcc88pQI0YMaLcvuPHj6tmzZopXdfVggULyu3ft2+fateunQLUP//5z3L7p0+frgA1btw4lZ6eXm5/dna2evjhh9U//vGPK3xnQoimRgocIYRXVQVORkaGAlRAQEC5fTNmzFCAeuyxxyo9965du5TFYlHBwcEqJyfHu33FihUKUB07dlSFhYVV5ldSUlKt91FaWqpefvll1b9/fxUYGKgCAgJU586d1e9//3t19uxZbxygRo4cWeE5WrdurVq3bl1m28yZMxWgDh8+rF588UXVpUsXZbVa1cyZM9VTTz2lAPXuu+9WeL73339fAWrevHllth85ckTNmjVLxcbGKqvVqqKjo9XMmTNVSkpKtd6rEKJi0gdHCFEjZrO5zPPCwkI+/vhj7HY7f/jDHyo9rmvXrkydOpW8vDw++eQT7/aFCxcC8Ic//AF/f/8qX9tms102v5KSEsaNG8dDDz1ETk4Od911F/fffz8dO3bkX//6F8eOHbvsOS7nt7/9Lc888wx9+/bloYceokePHt5beu+//36Fx7z//vtomuaNA9i0aRO9e/fmnXfeoV+/fjz44IMMHz6cDz74gAEDBnDkyJGrzlWIpsp8+RAhhIDXX38dgGHDhpXZvmXLFpxOJwMGDCA0NLTKc4wdO5aPP/6YDRs2cPfddwOwbt06AMaMGVMref75z39m9erV3H777bz11luYTCbvvtzc3DLPr9SPP/7I9u3biYuLK7N96NChJCUlkZ6eTnR0tHd7RkYGy5cvZ+jQocTHxwPgdDq59dZbMQyDLVu20LNnT2/82rVrGTVqFA8++CBffvnlVecrRFMkBY4QopxDhw7x5JNPAp4WmuTkZFatWkVkZCTPP/98mdj09HQAYmNjL3ve8zFpaWnljm/VqtVV5+12u3n99dcJCQnhlVdeKVfMhISEXPVrAPzxj38sV9wAzJgxg3Xr1vHhhx/y+9//3rv9ww8/xOVylWm9+eqrr0hJSeHpp58uU9yAp4icPHkyS5YsIS8vj+Dg4FrJW4imRAocIUQ5hw8f5qmnniqzLTIykjVr1nhHN10JdW7QpqZpV5VfZfbt20deXh6JiYmEhYXVyWsADBgwoMLt06ZN48EHH+T9998vU+C89957WK1WfvGLX3i3bdy40Zvz+WLyYunp6RiGwYEDB+jXr1/tvgEhmgApcIQQ5UyYMIGlS5cCcObMGd555x0eeeQRbrrpJjZv3kxgYKA39vytmBMnTlz2vCdPnixzzPmfU1JSOHXqFG3btr2qvHNycgBo2bLlVZ3ncqKioircHhYWxvXXX89nn33Gvn37SEhIYP/+/WzdupWpU6eWKbrOnj0LwAcffFDlaxUWFtZe4kI0IdLJWAhRpYiICP7whz8wd+5c9u7dy+OPP15mf79+/bBYLGzdupXc3Nwqz5WUlATA4MGDvduGDh1aZt/VON8H6NSpU9WK1zQNl8tV4b6q3ktVLVDn5+k539n4vffeK7P9vPO3nb788kuUZ0RrhY+RI0dW670IIcqSAkcIUS1z584lJiaG+fPnk5KS4t0eEBDALbfcQklJCS+88EKlx+/du5fPPvuMoKAgfv7zn3u3z5o1C4AXXniB4uLiKnMoLS2tcn+nTp0IDg4mOTmZ7Ozsy76nsLCwCouhlJQUb2tQTV1//fWEhYXxwQcfYBgG//3vf2nWrBmTJk0qEzdw4EAANmzYcEWvI4SomhQ4Qohq8fPz45FHHsHpdPL000+X2ffss88SFhbGs88+y3/+859yxx48eJDJkyfjcDj4y1/+Uma01ejRo5k+fTr79+9n6tSpZGRklDs+Ly+PuXPn8sYbb1SZo9ls5t577yU3N5cHH3wQt9tdZn9ubi4FBQXe5/369SMlJYWVK1d6tzkcDmbPnl3l61TlfF+blJQU/vrXv3L06FF+8YtfYLVay8RNnjyZuLg4XnzxRVavXl3uPE6nk7Vr115xHkI0dbJUgxDC6/xSDRf3wblYSUkJ7dq1IyMjg3379tGuXTvvvo0bNzJ58mQyMjLo0aMHo0aN8i7V8O2339ZoqYbx48fTsWNHlFIcPHiQpKQk8vPzee+998qMRKpISUkJ48ePZ82aNXTo0IGJEydis9k4cuQIS5cuZe3atfTq1QuApUuXMnHiRPz9/Zk+fTr+/v4sW7aM0NBQjh07htVqLdNadeedd/LOO+9w9OhR2rRpU2kO69atY9iwYVgsFpxOJ+vWrWPIkCHl4pKTk5k4cSJZWVmMHTuWbt26AXD8+HHWrFlD8+bN2bdvX5XvVwhRCR9NMCiEqIeqmsn4vFdffVUB6vbbby+37+zZs+rJJ59Uffr0UcHBwcpqtaq4uDh1xx13qC1btlz29ZctW6amT5+uWrdurex2u7Lb7apDhw5q1qxZatOmTdV+HyUlJervf/+76tWrl/Lz81OBgYGqS5cu6n/+539UdnZ2mdiPP/5Yde/e3TuL8G9/+1uVn59f5UzGR48evWwObdu2VYBq27ZtlXEnT55UDz74oOrQoYOy2WwqODhYde7cWd19990qKSmp2u9ZCFGWtOAIIYQQotGRPjhCCCGEaHSkwBFCCCFEoyMFjhBCCCEaHSlwhBBCCNHoSIEjhBBCiEZHChwhhBBCNDpNcrFNwzBITU0lKCiozlY1FkIIIUTtUkqRn59PTEwMul51G02TLHBSU1OJjY31dRpCCCGEuAInTpygVatWVcY0yQInKCgI8PyCzq/oK4QQQoj6LS8vj9jYWO91vCpNssA5f1sqODhYChwhhBCigalO9xLpZCyEEEKIRkcKHCGEEEI0OlLgCCGEEKLRkQJHCCGEEI2OFDhCCCGEaHSkwBFCCCFEoyMFjhBCCCEaHSlwhBBCCNHoSIEjhBBCiEZHChwhhBBCNDo+L3BWr17NDTfcQExMDJqmsWTJksses2rVKvr27Yvdbqdt27b861//qvtEhRBCCNFg+LzAKSwspGfPnrz22mvVij969CiTJk1i+PDhbN++nblz5/K73/2ORYsW1XGmQgghhGgofL7Y5sSJE5k4cWK14//1r38RFxfHyy+/DEDnzp3ZsmULf//737n55pvrKEsh6o7hNnC5jXNPDHAVA2DSNUz2ANBNGIaBq9QB7tIKz2HSNUw2fzCZLxur6RqWc7EAjpJScJVUEesHJssVxDq876XCWKsdzNbLxgJY7X4XYkud4CyqItYOZlv1Ym02sNjPxbrAWVj7sQ4XOKqKtYLFDwCny0CV5Fcea7WA1f+KYo2CHEBVGGvz9/PGOkqdqJICUEaFsWabBZOfZ5Fil8OFuzi/ilgzJr+QK4wtAOWuXmxJIYbDSV6mq3ysxYzVPwQ0DYfDiaukAIxKzlvTWL8Q0M/HFoJR/vUvjXW53TiKCsDtrCI2GHT9CmILwe2oIjYIdNNlY3WTCXtAMOgmAIoK8sFV8eeJJzYIdPMlsRoBzcMJjDBVeNy14PMCp6Y2bNjA+PHjy2ybMGECCxcuxOl0YrFYyh1TWlpKaemFv5y8vLw6z1OI6jiy4wxb92dSUOoCZdA8dxe64fkwa9PSTbcJkyC4BZlnS0havJGgouMVnqdVtJte142H0Fhy8x0s/b+NBBWmVBgbE+Gmz8Qx0CyegiInn/93EyEFRyqMjWxuMGDiCAhvj8Ph4pP3kwkpOFRhbHiYwaCJgyGyMwD/98EWQvMPVBgbFmIwdGJ/iO4OwKcfbSc4Z2+FsSGBBsMn9YaY3gAs+mQnQZm7KowN9FeMmtgNYvsD8NlnuwlI31FhrJ9dMXZiArQeDMDnX+7D7+SWCmNtVsW4ie0hfjgAX357EFvKpgpjLWbFhIltoN1oAL75PgXzobUVxuo6TJrYEjqMA2Bp0lHYuw6NiouAn02Mgk6eL4TLVx7DtWstWiUX1fFjQrH2muKJfeE7ig+dRFMVx/a5Lgb/np7zJn+xmaJ9ezFVEttrTCSB/W8AYOs3WyjYtQeTqvgC3G1YM0KHenLY8f1Ocnf8iCM3qEydpdAoyg0nspUFv9ZdAMg4lkFx2mlwKYrzmqPpZYsMv0Ada2gkaFCUW4QjvwBnibXCHMxWDYuf5/PfUeLAVepAq6TQM1vOxWrgKHXgKqk81mTRsPqVgKbhdDhxlpSiqUpizRpW/+rF6maw+ReDpuNyunAUl1Qr1u1yU1pUXHms6VysruN2G5QWFlUaq5nA7l987suVoqSgsPJYHewBnlilFMX5F2LD2ypmfhpd4XHXQoMrcNLT04mKiiqzLSoqCpfLRWZmJi1atCh3zHPPPcdTTz11rVIU9ZDhvnDB0DQNTdcAcLsM3EbFFxMAk65jMus1jjUMA5er6lhNA92i4cjPIrzgSJmLmkIHDdC0Cwdpmmd7RS6NpfJYVS6Wys9L9c97IZFqxGplY5UbSrIDKww1lyoyU8xQ5LmIOs+YKcmpOFYvUWQes0CpJ9ZxxoTpovO6Syw48j2tJUUW+MkcAM0LAMjfbqEkO6bC81pM8JMlCMLPxW4zU5RVcaxJhxVp4bhsZwHIOGZGy02oMFbTFN8fD4RgT2z6CRPkdIJKLibfn/CHEE/s4c0m3Fl90SppYXhnsYGpWRol+QaOok4oV7tKz5u614xmPQOAo6glyhVZRazpQmxxC5QzvNLYU3tM6O+cj41AOYdXGluar6Mf9bTiOUvsGI4WoBQGCij7xdVhKGxOJ7qm4yzVcZfa8FZNlxRDuknHbNFA87RkaU43lbVk6Sbd01Coabjc6lxhVVmshtmqeWINhaa5QausYNC853Ur0Eorj9VNmidf3fOJoJVUcV5dw2wBdA2lcZkczjWC6hq4QNMM0Cr+jNL1C7Fut0LTjUpb3jQTnhxMGk6Xi1KVj133/B8zWbUKj7lWNKUq+dfmA5qm8dlnn3HTTTdVGtOxY0fuuusuHn30Ue+2devWMWzYMNLS0oiOLl8tVtSCExsbS25uLsHBwbX6HkQ9YbjhyErcxXkcPWKmuPjCf7RWrVyEdOmF3qwNew+eZeua/QQWVtwy0q2DTvv+faB5Ow6m5LBxxf5KW0YS2mkkDOgN4R1IOZHH6uX7Ca6kZaR9G51uA7uRb41nfdKPBLvW0CcuFF3XICgG1T6xxgVTZbFKKYqzDe91RdN1LKYLhUdJsYv0Hx0Vf37pGmb9QmzK5mIq/cjSNE/+51xcVFYWW5pvkPajA2dxFbFQtm663CdWQ469XPwlscrhQFXye9YAzc9e5rz+zSsuOLWL/t6UUXXCmqZdyEOpyuqVKmMtARDa+vytC0VaYTouUynWtkV0adYZu+aHYRikF6aRWpiGuXkpeuiF2ym6yUT/mH40szfD7XSTknOMAzn7aRZjZ1SHoWgX/aIsFovnNiCeVhmns+LWpmsV63K5KC2p+HZPfYnVTSb8Lvq3U1hQ+S3W87Fnz55l+fLllJaWcuMNN2I2mwkIDKj0uCuVl5dHSEhIta7fDa4FJzo6mvT09DLbMjIyMJvNNG/evMJjbDYbNpvtWqQnfKngjKe/RVhrKMxk995d5BW6yUpvViYs25FP+6h4Ys5t1gwDs7vi/h+6w13m3rPGZWKdF/ZpqvJYk8MAZzGBETa6D+zM+i8jCDIFk7bbjWYyg1Z534qaOrC88j4o9ca565E9SIei3Mov8mYzpqCLWmRyc6GyC7LZhCko6KLYPDAM3G6N5lFFWGwXvulrVguW2Fjvc+eJEyhHJRcssxlr67gLsSdPokrL92VwuzSi44vx793Lu61k337cBRX/3Wq6jn+fPhdiDx70vL9KBPTrB5qiZS9LpUWnZvYUEbpZIzjG5Ck46oHdWbvZk7XH+zzkon0d43rQzO65cO0/e5rCzCxCbaGMik0sU7iYtAvvJ1J1pJ9qj1mv+pJmtVm9F/nLqatYs9mMObB6l976EAtctlDZt28f69atw+124+/vj6GMOiluaqrBFTiDBw/myy+/LLPt+++/p1+/fhX2vxEXXPyNWr/oG7xhqEqbjcHz7e78B4kyFFU1+l3z2KwjUJoDgDv1R1R4R0zmIJQlkF05PdELiygN8nx8+rexgA6ZGsQHtQSgY3wo7Vr2gqK4S18S8HTeJSAUgLatgml9a08oalVFrOe14loGMm16Dyis7DaGRsbJADb8PYtTO0oBKxmc77xb+TfBq6WbKr7AGW5FSIyZkFaX/0jQTYqWfS5qGaiypQZPe3elsYrivftp3hrift4PW5BOzqL1KFclHTsjIggaM9r7PGfJxgqLCwBTszCCx43zPs/9cgtGUcXFnikkmODrenqf5y3d7CmIKqD7+xNyQ68Lscu24j6bXS7OUG40m43gyZ7flVk3k78iD9eZMxgVdZ41mQmZeiG2YE0BztQMDGVQUcV3cex5bsONqqQ6dCs3Zq16sTU5L5QtNgxlnMu58tiKeIqYUWVy7BDWgXah7S5buOiajq75fFBwk+N0OlmzZg2HDnn65cXGxjJ69Gjsdvtljrw2fH6LqqCgwPvL6d27Ny+++CKjR4+mWbNmxMXF8eijj3Lq1CneffddwDNMvFu3btx7773cc889bNiwgfvuu48PP/yw2qOoatLE1Vgc2XGG4vwLF4JOg6IxWzwfNKmHcshOq7wJskP/KKx2zwdM+pFcsk4VVBrbrk8k9gBPoZlxLI8zxytviWjbKwK/IM+3nsyT+Zw+Wnnn7zbdwwkI9bTCZaUWkL4/AwoyIPeE5z3kFlPqNACNLmP60rpLF/YczCI7tYgQk4mAECtteoR7z6dfcjulruSfdrH/+2KyU5w4iz3/1ZQBxzaVH4nUrI2FsDgzmg6Rnav3bbC6QluZaTPk6j908pOS0AODCBg4AADlcpGzaHGl8dbYVgQMGeJ9nv3x/1We481T0cxmlKvizq3naeYLF7srjXVV0DH34ljTRdfnmsYqFKtOriLnXOGNyYTNZOPGdjd6c1h1chVnis+UT9hkwqyZmdJhCsrtBqVYl7qOtMK0CmMBbul4i3fThtQNnCw4WT72nCntp3iLheT0ZFLyUiqNvbHdjdhMnv9z205v43Du4UpjJ8VPIsDi+ca+88xODmRX3LkcYHzr8QRZg8oVQZcrYkT9kpWVxfLly8nNzUXTNPr370/Pnj3rvJWwQd2i2rJlC6NHX/hGNnv2bABmzpzJ22+/TVpaGsePX+gfER8fzzfffMPvf/97/vnPfxITE8M//vEPGSJeBcNtlCluGqzSfHA5ABvkpUJxDgBuQ5FjCgcTGJoZd2A0JrNO984RGB09H6IXt1hVxOVQ5Bx3VdmKdJ7hhlPbSknd6eDkllIq+1xWCtzOy5+vVR8bYx4JIzCy7odTKqVwV9B6cP7Cq2k6ZsuF4srpKCkT4ziTAWcysPbugW62lOlCXFGrhNtw4zJcaGiYdFOlsaawMNw6YLjQ9LKx5QqMi5/rZS+M1Y39/sh3FLkqbs0JtgYzoc0E7/MVKUnkOSouvv3N/lzf9nrv89XHVnG29Oy5N1X+79NbGJlMFe4vE3t+fzViGxppcWn4Nm3aRG5uLgEBAYwdO7bC/q++5vMWHF9oai04httg73rPN8BOA6PRTVr9v0XldpeZA0MrOo12eAVu3YbW81bPxszDbNu5jVS9BfkBrQGY0rslNvOFDrcARWfd5KV5zpX6YymGE45vKuH0Puf5KVtwldbtf4NWfWz4hZmI6Wk9P7UEIS00ortZyn7j0XW0c7d0lFKe30NlriC20FnIN0e+9sy3A6AgePM+THmevkLhnXvRa9x0AIqL8lj3n/+t8HTZ43rTJqwd/aP7o1wuXIaLJYeXVBCpgUmnVWArBscM9hZSnx78tGzYRRfwFgEtGNZymPf5Zwc/w1XJkOUIvwhGxY7yPv/i8BeUVjL/TzNbM8a2HgvA10e+rnaB813Kd9UucJKOJXkLnPO3XM73G6myELtETW4P1YfYmt6iqi99gcSVKywsZPPmzQwePPia3pJqUC044tq6tLgBzt2qqd4HjqZrZTr61Uls1mFIWVOm6FLArlO5FDrctGnvICrYjiu8Haci/SksdaMBEUE2/M/dSivIdLP08bNkHXFW2Ypy6dxVfqEm9Gp8WVYK/JvphLe30HmSf5WTWfmFKEw204VixDDIX7Yc94EcsveXLUrsPXrg17kzuqbjzs4mf9mySi8Wtq5d8O/WHV3TMfLyyFu6tFysUoq0ojRUu9Z0GzkVAL3YQcjqny7/JivhCgssU5BoZrNntGk1WhnKtGD40MUFzOWMjRtb7diRsSO9P1d1y6Umt2NM1fkHWY9ipXWmccrMzOTkyZP06tULgICAgDJ3X+ojKXBE/RMaB50mwaHl3gpk96lcCp0GmWG9aHVu1IzZpNO5RTCHMwroH9ycn/6viK/eycJRaJC+u/wtueAYzz93k1nztKSYNTpP8sca4Cms/MJ0LPba/WDOT0oiPzML/z69sbRvh67puNLTceWcZd/ZfRQ5y46yKj6RScdoM52adQKg0FnEvrP7Kjx3cXAGbWPMdG3e1fPcVVJmZMrFSvI0uigDf7M/N7S7gfx9ZS9YemgogaNHoV10IfPzD2bkfeXnjzpfpFxcvJp1M1PaT6n093BpoVuT2Bva3VBp7KUmxU+qVlxNCoy6ihWiodi1axcbN27EMAzCwsJo3bq1r1OqFvnf2Igd2XGGNt2bg6YRGumZhv3SCd58yXXpiBq3E+34evTIBLSgaOhxK4ZSONwGP7pOARBosxBhs1GY5Sb7mIudrxdz5oDBUSrosAnE9LAxZk4ogZGmSkcQXQ3lcpW9VWQY3ts/yuXClZkFwKaTG4gJ12gf1h5MZvJK88n3g7yRvS+ZG++iYbBhYfjddD3ZJ/wqfvGLYvXgYPxv+hnZxyueDiHcL8J78bUEBtPs57eU2X9xp9mLWazVb3quD0WDFBhC1J7S0lJWrVpFSkoKAG3atKmXfW0qI31wGmkfnPP9biLigghvFXjZTrbX2tJd6ZwtvNDKElSYQnjOj+juUhJ69Cek0wgA9qTmseNEDq7TkLcY9GNVN5XHDbDTepANi12nZR8rQVF1c8FTSpH//TLcOTn4D+iPLT4eAGdqKgVryk7L7zScrO6h4+fn6dth0kxk5KexOn1duX4aULaJv7JOwTWNlQu/EKImMjIySEpKIj8/H13XGTRoEN26dfN1WtIHR1xw5ng+4a0qnta+rpVrocFzW8nlNsgrvjDXi2a4iDy7xdvnpjQ9m/U/5HJ8UymFDhcFBeDK9PQVsl3yLzastYWQliZG/j4Ue4iOyXL5VhplGBW2uFSootYZpchftgx3vme4vMtRislwlel3oDAwlOJA9n7yAk0oS4K3U6umaUQGtWBK4JTLFh6appWZF6S2YoUQojJ79uxh/fr1GIZBcHAwiYmJhIeHX/7AekY+DUWd+H53OpkFZfvBxIcH0KVFMMF+Zjq3COZkdhGJncLQ8lLJyYwh9VAIx1M6sHWnHbQLc+1YMHF+5HKbwXaG3B+Cf3Mds02r8W2n/KQk7J07Y4nxTMDnOHaMos3JQMXDnP0HD8IW19qz5s2pUxSsX+cd7aWAfaXHSVMKDv1E/6j+tI6OI/TmqaQVpLMubS2Q4O1QG24P9xY0UowIIeorPz/PUhlt27ZlxIgRWK21Oy/XtSKfsOKKVNQ6Yzo37NvlNjiT78DsKCizgOTR7QVkbsumT1wYKsUg0hHIqvcPk3FQkZPRzxNk8fP2SWnRzUpsfzvR3axoGkR0smD1r/6ttktbZ873iSnZtx9zZGSZfif7zu6l0Fl+2HDBsTz6NptEbJBnGv+ckhyO5B4FwB3sR97gLmX6wmi67mn1MV+Yu+T8bSiLLjNtCyHqJ5fLhfncZ2J8fDw33nhjg+pvUxEpcBqpKqahuCpuQ/H97nSyi8ovJzCxYzQFhw12LiogfTWYDAtWnTIFQA5wgmxAA4sDCPUUIZoTNJ02Q+zYg3R6Tw8krHXFBUF159lwHD5M4bat5ebicWSkeyafM8DaujV6yxbknggg31HBrMsX5W5p2RLzjdeRnb7x3AuVLWAunoI+KiDKO1JI+r8IIeorpRQ7d+5k165dTJ06FX9/z4CUhl7cgBQ4jcaRHWdo2yvC+zz1UE6dvM7mo2c5c9JJ9htlt7uz4L/6GW89YDbpoDQqmgYnLKoEdB3DGkhMLxuGU9Gih5WO4/wx2zwHuAxXucnQzhcuB7MP8mPmj5XmOLLVSCL9IwHILMrkeP6JMvtdYYHkp3xJhF8EncI6ERUQRUJ4Vw7mHCzX4Rfw9q3RdJ1WoXHEhJRdi6qiAkbmAhFC1HclJSX88MMPnDjh+Yw8cOCAd56bxkAKnAbq4oUzDbeiON9B5sl8mrUIKDNiyj/YWmsjqHJOutj2m2JK8j1rOdksF85r0dzgLvF0FNY0ek0OpOuUKALDL7Rq6GawBlw+lxXHV5BVklVu+9i4sTSzNyt/wCUz+CqXC+VyYW3XDi10HNmZ28vGn2t5OVN8BqUULQJbEBccR5uQNpfNTQoXIURjkJaWRlJSEkVFRZhMJoYOHUpCQoKv06pVUuA0QJcunHne6aN5NGvhWfCuZacwWnYMrXFx475oyQRlKDb/J5/8024OJhdhKtTRNA0/q6dASBjtJqHvuXXCCk5jtbtp1qIQzWRG6zkNzDXvc+IyXBUWN3ChcGkf2p52oe08L7s8CffZs2Xi9B/XUNiiBbZOCbSNaE9883aVvt751hcpWoQQTYFSiu3bt7N1q+f2fWhoKImJiTRrVsGXxwZOCpwGxjAUJkvFF+OLW2uqu/zC97vTGZMQSfYRF1vfLSAjv4TMbS40KxhlJ9nForswa9DjRjND/ycG7cw+OH7QszPqXFBUV4juDuaa9bp3Ga4yfVgAbmh7g2elZqUo+GElbP+BHDSCxiVibtbMM8leUTG6Vn5uHGdaOsrlImjMmBrlIYQQjdlPP/3Eli1bAOjYsSPDhg3zdi5ubBrnu2rEdF2jddfmZW5ReffVsLWmxOnm6CIH/1qdiu72FENOl0IZoC4pbgJ/ZtDSL4VB7VMJ6dkJTWsJQS2gzXDcyu2ZwsZsg+AY0PUyqzhXtRCfQrHyxEpySnMY33o8QdYgujTvAkDpyrUYWRe3zpQt2DSzGXtCAo5jxwkaM7rcLM2Vzc4rhBBNVZcuXTh8+DBdu3alY8eOvk6nTskVoIG62n41S3eksf9JJ64zAIb3tlOvqYGExpkIjDIRGmtm0+lV5FtTACjOT+OH4mxIS+fGmG7Y/ELBL5Sdp7dx+OxBQMHp8q81sc1EAuzBaJrGT5k/cSBrX4WrlzsdJWimALo06wJuNzlZu8vsN4WGegqZiwoXW8eO2BvZfWMhhKgtSikOHjxIhw4dPPNvmc3cdNNNTWJFdylwmhjDrdj+f/nsec3Th0fD02H4xr8EENEFTBYNs8kMFj9cLgeFh5ahuy8ZEq5pFK74AcuI0eg2z9pH/vtPYDte8XpQheFO/G6ciinQ0z/I78Ap7CllKyF/ix+mZho5aARfNwE9KAh7V88ikraOHdB0vcIWmfOzDAshhCirqKiIpKQk0tLSKC4upmfPngBNorgBKXAaHLfbYN/6NDRdI2FQdLVbcpShSN/tYM0/csk85EQ3HGhKEdcpj5t+8xM/FBxiwwbPRHdTWg7H3HWq5zaT2Q5uJzdED8Ksm8Bkw4jsQ+HydZTu3Yu9Wzd6RvSkQ0sXjpLDFb72xR14u4d3p22sC0fRgUtiyvaj0XQdv25da/KrEUIIcc7JkydZsWIFJSUlWCwWAgICfJ3SNScFTgOTsjMT8BQsNbHlxe0kfx4KJqu3r0rX6esZPsSgFIMsd/GFPiznChKz1Z82nW6gwFGAPW404FnqwPXjOgBK9h/A3q0bJt1EYJ++0LtP5QmcG5qtazqBPXtD956XjRVCCFEzhmGwZcsWduzYAUDz5s1JTEwkJCTEt4n5gBQ4DYjhNigp9NwusgdYatR6k7wkGM4tm9BmsB3/X1jZXhhMdsi5OWRCBgOekUtms90zQgnoE9kHk+4pOAyHA1fmhSHc5ohw720jrQZFyfnlDIQQQtSewsJCkpKSSE9PBzwdigcPHoypiX5plAKngWrT8/IruzpLDE5tzGLZ/15YguAX8/0J79IMh8uFLaMFJwtOeveFW8KwKRN5332POycHgNCbp8K5WqR4+w5vbMjkG9Ht9lp5L0IIIa5eUVERGRkZWK1WRowYQdu2bX2dkk9JgdNAVdRFzOU2PEskALmnXPzfPWdw5pd4F6YKiShieY7C2FTA1D7RDI4Z7F0OoWDZclTOWXI2f3bZ1zZHhEtxI4QQ9UxERARjxowhPDyc4OBgX6fjc3KfoAGoaM6bS32/O53F2055n5856KQw34XDZeB0GYSNPIX9XieGycaB/A18fngJ4JnJ12SAyskrcz5TaCihU6eUGbnk368voTdPlcnzhBCiHsjPz+fLL78kMzPTu61t27ZS3JwjLTj13JEdZ3CUuEgY1AIA/xBbuRiX2yCzwOH9uTRbsX+pZ0RUUPscutywgoyw3uQHNcNtlKKbC9D1YFyGC7NuRjObsca2wp1f4J0wr8Ih2U30Pq4QQtQ3KSkprFy5EofDwZo1a5gyZYqvU6p3pMCpxwy34V1zynAb6Cad+B5l+9643AaucyOqst+BNx5J896/spp1YtvG4OjZkRJXOhbTWSxAZ/1Cda9cLjSzGf+BA6WAEUKIes4wDDZu3MiuXbsAiIyMZOzYsT7Oqn6SAqcB+353urflpnAVFOxwYjFdmJQvrLmNPtMD2BfYHL3w3OKcbje4IdwWTtHS73HnFwDnOhMLIYSot/Ly8khKSuLMGc+kqj169GDAgAHoMiq1QlLgNFAut4F+bt6aguWQ/w1oGIBOi7/uIiowjzEOF1haM7jFSBSKgqQVuLM8w7x17SznBohjCg2VdZuEEKIey8nJYcmSJTgcDmw2G6NHjyYuLs7XadVrclVroMwmncQuUWz9MJ9Ny/Pws0CJWyf6md1MGTAB+9kUOL4BAJNuQrndmO1+qEtmDDaFhhI0fpwP3oEQQojqCgkJITIyEpfLxZgxYwgMDPR1SvWeFDgN2E+fFbDpjXOjnzRo8dddmMMdmPWKOwgHDh/uncDPu11aboQQol7Ky8vD398fs9mMpmkkJiZiNpvlllQ1yW+pgXIWG6z5R673+bR3mmMOd1z2OM1sLvMQQghR/xw6dIhFixaxfv167zar1SrFTQ3IFa4e0006foFWNJ0LyzK4HFBwmsLTBqUOz+ip21/XCAkvJCIr4kJMcTbKbXg6FV/UaiNFjRBC1F8ul4v169ezb98+AHJzc3G5XJjls7vG5DdWz7Xp0bzsmlNHV0PuCfZ+2Rrd3Qqz3UFY8WZMOX0YFTvKE1N0lrxvvsCdXwT+qRCR7T1cM5sImTxZCh0hhKhncnJyWL58OWfPngWgT58+9OnTR1ptrpBc5eq5cgtqOgvZ/E1rtie1RmkKzd/gy/xj6KkOJoYnYLHaUQa4HSawBUNQdJnDTWFhUtwIIUQ9c+DAAdauXYvL5cLPz48xY8bQsmVLX6fVoMmVrp4y3AaHtmYA0L5vpLfQyU6zseX7ODBZcStFeP+9BPxUjKYdJf/ENzSbMhUtsBnm7mNRTqd3ZuLzpLgRQoj6pbS0lI0bN+JyuWjZsiWjR4/G39/f12k1eHK1q8ecpe5y2zZ83xPMbkAj/EEHwUdOVHhs4IjhUswIIUQDYLPZGDVqFJmZmfTu3RtNq2g5ZVFTcgVsQNxORcpmAzSN+OF2sls5MA67MQE5o3sS1GmSN1aKGyGEqL/27duH3W6nTZs2AMTFxcnEfbVMei41IIeSCrw/970jkIP5G0gt2Q+AMutS1AghRD3ndDpZsWIFq1evZuXKlRQVFfk6pUZLrogNxIkPV7P8lZaAgWa1EhxdQMBJE9ZzfXPC7eEVTvAnhBCifsjKymL58uXk5uaiaRq9evXCz8/P12k1WnJFbAAMh4tl/4k898RgyI37MDu78IdR08hxhIKuExqf6NMchRBCVG7Pnj1s2LABt9tNQEAAY8eOJTo6+vIHiismBU4953YqPpp1luJCC6AI7XyWktjdJO0LZ8LgWELGjJVbU0IIUU8ZhsGKFSs4cuQI4OlrM2rUKOx2u48za/zkyliP2fwt5J00yEt1gzKwWfNoO349hmawLW8DriPpTGgzQf4ShRCintJ1Hbvdjq7rDBgwgO7du8soqWtEro31lG7Sad83knS7A+U4idVWyuSJ73DEFEupOYDWLUMocknnNCGEqI+cTicWiwWAwYMHk5CQQHh4uI+zalpkFFU9plwuCjPPzYVjLiLFbSYnrDNHW4zDZLb4NjkhhBDllJaW8v333/Pdd9+hlGe9QJPJJMWND0gLTj2knE7yV/wAyuDg+gFoVhulIdEU9ejFDsdBSktP01kP9nWaQgghLpKRkUFSUhL5+fnous6ZM2eIjIz0dVpNlhQ49YhSipyl32Hk5GAoWPZZBzJTCzFbdDr2CmTgqOtIP/WDd9kGGRouhBD1w48//sjmzZsxDIPg4GDGjh1LRESEr9Nq0uTqWI+sWfh/FGd6Vv52GW5Sj/bF0EvR3U76jNiPPXgMoWdD0TWdUbGjsOhym0oIIXyptLSUlStXcuzYMQDatm3LiBEjsFqtPs5MSIFTTzhLHd7i5qR2igMdYmmma2hA4O/eIDxqNABj48ZKq40QQtQTSUlJnDx5EpPJxODBg+nSpYuvUxLnyJXSh1xuAwCzScds0rEE+qMrNwFjutAj1076uQLnV+26eY+R4kYIIeqPQYMG8cMPPzBq1CiaN2/u63TEReRq6SPf704ns8ABQLMAK4mdIxlx/wzQDD479BmGW8esWzBbDMyaDHYTQoj6oKSkhLS0NOLj4wFo1qwZU6dOlblt6iEpcHzA5Ta8xQ2ASdcwm84XMTp2dwA578ajaRqayTc5CiGEKCstLY2kpCRKSkq48cYbvSOkpLipn6TA8bGpfVpiNimSjiUBMDJ2JMHfDeLEoWLAwBqgfJugEEI0cUoptm/fztatW1FKERoailmWyKn35G/Ix8y6BijOFp0haPN+Clq4KMrs693f/04HutyhEkIInyguLmbFihWcOnUKgI4dOzJ06FDvLMWi/pICpx4x5xbhNGdzdF0pAOMeb0bn4TY43hzsIT7OTgghmpbU1FSSkpIoLi7GbDYzbNgwOnbs6Ou0RDVJgeMjNnP5Zhm308zHr/ZDmT2jq6wBGgRGQpcbr3V6QgjR5J09e5bi4mLCwsJITEwkLCzM1ymJGpACxwfMJp2b+7byPncZnoKmJNszxFAZ4BcOsf1sPslPCCEEdOvWDV3X6dixo/S5aYCkd0c9obkNDJfnP5A9DIb8Ucdsk575QghxrZw8eZIvvvgCp9Pp3dalSxcpbhooKXDqiZC1u3HkB+E2NKxBF+3IT4efPoVDy32WmxBCNGaGYbB582a++eYb0tPT2bFjh69TErVAylIfcLkNVu4/A8CoThGgeWYozjveFjSd0jywB1g8i2oaLijNB5OsayKEELWtsLCQpKQk0tPTAU+LTZ8+fXyclagN9aIFZ/78+cTHx2O32+nbty9r1qypMv6DDz6gZ8+e+Pv706JFC+666y6ysrKuUba1IyO/lIx8z2gps25mwE1zcea3RzObiewKbXqG+zhDIYRo3I4fP86iRYtIT0/HYrGQmJjIsGHDMJlkhtXGwOcFzscff8xDDz3EY489xvbt2xk+fDgTJ07k+PHjFcavXbuWO+64g1mzZrF7924++eQTkpOTufvuu69x5rUr/4wG57rcxI81eX7MPQWZB32ZlhBCNEr79+9n6dKllJSUEB4ezs0330zbtm19nZaoRT4vcF588UVmzZrF3XffTefOnXn55ZeJjY1lwYIFFcZv3LiRNm3a8Lvf/Y74+HiGDRvGvffey5YtW65x5rVDuVxkfbecRfd4JpGK6mSj13Ux6LoGh1dAdoon0CSTSgkhRG2Ji4vD39+fbt26MXnyZIKDg32dkqhlPi1wHA4HW7duZfz48WW2jx8/nvXr11d4zJAhQzh58iTffPMNSilOnz7Np59+yvXXX1/p65SWlpKXl1fmUV+sPLmSdWt34HCV4nA7iB95rq+NUp7+NwCRnSF2oO+SFEKIRuDirgx+fn7ccsstDBkyRG5JNVI+LXAyMzNxu91ERUWV2R4VFeXt8HWpIUOG8MEHHzBt2jSsVivR0dGEhoby6quvVvo6zz33HCEhId5HbGxsrb6PK+J2Y7gcZBec8Q4PNwe7GTAz1LNf16H7LZ5H7EDwb+a7XIUQogEzDIP169ezaNEiDh065N1us8lcY42Zz29RQfmVWJVSla7OumfPHn73u9/x5z//ma1bt7J06VKOHj3KfffdV+n5H330UXJzc72PEydO1Gr+V6LZuuWEr/mesJU7Kc6KwKKbiW4VzNEfMzn6YyaG2wBboOchK9UKIcQVycvL4/PPP2fXrl0A5OTk+DYhcc34dJh4eHg4JpOpXGtNRkZGuVad85577jmGDh3KH//4RwB69OhBQEAAw4cP55lnnqFFixbljrHZbPWuUjdpoDRwldg5vbM3FpOO4YKi3FLIPw3u5RDSAtqO9HWqQgjRIB05coTVq1fjcDiw2WyMGjWK1q1b+zotcY34tMCxWq307duXZcuWMWXKFO/2ZcuWMXny5AqPKSoqKjer5Pn7p0qpukv2KimXi4LVa9AsZgKGDCHxobtwGS4W3HoYw2pDM9toP8YOOKHwDDiLIfekr9MWQogGx+12s2HDBvbs2QN4uj2MHTuWwMBAH2cmriWfT/Q3e/Zsbr/9dvr168fgwYN54403OH78uPeW06OPPsqpU6d49913Abjhhhu45557WLBgARMmTCAtLY2HHnqIAQMGEBMT48u3UinldJK/bBnu/AIM5cbmdpKXrrH2lXxcZ+wARHa20HmSP0d+yr9wYEwv3yQshBAN2OnTp73FTa9evejXrx+6Xi96ZIhryOcFzrRp08jKymLevHmkpaXRrVs3vvnmG28zYlpaWpk5ce68807y8/N57bXX+J//+R9CQ0MZM2YMf/3rX331FqqUs+x73FlZKOBA9n7y/SAv5UucnyaQs60ZdrOnwLn5nxEo45IWKIv/tU9YCCEauJiYGPr37094eHj9GFQifEJT9fm+Th3Jy8sjJCSE3NzcOp37QLlcJM1/zPvcHexH3uAuGEoj9Y22aMmRhEVbuHl+BIERJgy3wd71aZC+i86tU9Hbj4Jm8XWWnxBCNAYul4vk5GS6d+8ut6EauZpcv33egtMYuQwXGlqZIWo5o3uibBZCbaEMixnBW0Y6xYai62R/AiNkDgYhhLgSOTk5LF++nLNnz3LmzBluvPFGX6ck6gkpcGrZiuMryCrJolVgKwbHDGbUA/9bZr9ZN+NyG54hVIB2yW1hTdfqyeB9IYSo3w4cOMDatWtxuVz4+fnRt29fX6ck6hEpcGqRy3CRVVJ20U+zXvGvWBnenzxz3gC6SafL0BjIdoIzDvyb12G2QgjRMLlcLtauXcuBAwcAT5+bMWPG4O8v/RbFBVLg1JE+4b0oPLfchP/AgWgXTQWesjMTI9sBLgunU/LYu74AgMBmdmI7N0MPk3kahBCiIgUFBXz77bdkZ2ejaRp9+/ald+/elU4OK5ouKXDqiI6G44RnHhv/AQO82w23QVG+A8517b74FpVx5gh6yVcQ3hFaD76W6QohRIPg5+eHruv4+/szZsyYejs9iPA9KXB8RBmAWaNFhxA6D/H0+tdPn4FUA2/1I4QQAqfTidlsRtM0TCYT48aNw2Kx4Ofn5+vURD0m3VmvMd2k02NkK3S7FV3XsNpM6CYd3SR/FUIIcamsrCwWL17Mtm3bvNuCg4OluBGXJVdVHwnMs2Iz655RU0IIIcrZu3cvS5YsITc3l/379+NyuXydkmhA5BZVLTLrZqa096ypZTLK73e6DJavPAaA2+mZwTg45lznY7cL3M5rkqcQQtRnDoeDNWvWcPjwYQDi4uIYNWpUuXUIhaiK/GupZeeHhSuj/DcNl9NNeqpnxJTh9qxuHhhhAmcJ7F4MrtJrl6gQQtRDmZmZLF++nLy8PHRdZ8CAAXTv3l1GSYkakwLnGlBKcWRbBlsOZHq3Gecaa3QTYLFDj1vh6CrIT4OQVr5JVAghfMjhcPDVV1/hcDgIDAwkMTGRyMhIX6clGigpcGqR23CzLcPTEa5PZB9Cb54KwJEfz1KY76DI4QYgKNRGhtMzUko3n/tWouvQbvS1T1oIIeoJq9XKoEGDOH78OCNHjsRms/k6JdGASYFTixSKlLwUAHpH9kYzmzHcBiWFnuYazaKDJZCiD2yAp9jR81Mg9ziEtZGFNYUQTU5GRgaaphEREQFAQkICCQkJPs5KNAZS4NQx3aTTeUgLHA43P+5wc/oPGn5WNyiFf7AT25nVnrFszmIozoaWfXydshBCXBM//vgjmzdvxt/fn5tvvllabEStkgKnjii3m8Jkz+0q/359MVtNFP1woZNc99HZDJ24Bf38QP3gFtC8vQ8yFUKIa6u0tJSVK1dy7JhnVGlERIR0Iha1TgqcuqIUjpQUDAPsvXqj6WZcy0z4WT27h92WgZYLBJ0rbMKluBFCNH6nT58mKSmJgoICdF1nyJAhdOnSxddpiUZICpw6dPKMlRKnht+GdIzCC7NuTn8nEs2xz/OkWVspboQQjZ5SyntLSilFcHAwiYmJhIeH+zo10UhJgVNHDLeixHmhybUo58K8OGFxFjgTC9ZA8Av1QXZCCHFtaZpGeno6SinatWvH8OHDsVqtvk5LNGJS4FwDnQZEkrLZjdN1lsCOOm5DYYro6Ou0hBDimho1ahTHjh2jY0f5/BN1TwqcWmTWzdzY7kbPz64Ly3w5CmHZU9m4DINiw0ApBUiHOiFE46WUYseOHeTm5jJq1CgAbDabFDfimpECp5bZTJ5hjm7XhXWlPv99tvdnPejcD65SMNxgsoJJ/hqEEI1HcXExP/zwAydPngSgY8eOxMTE+Dgr0dTIlbWOKQU5pzyT+pmCIeSWcztS1kLOcWg9FOR2lRCikUhNTWXFihUUFRVhNpsZOnSoFDfCJ/TLh5S3b98+pk+fTosWLbBarWzb5pnv5amnnuKHH36o1QQbErfhZtvpbWw7vQ0sOj3vGU+HmxO9+8MfBU361AkhGiGlFFu3buWrr76iqKiIsLAwpkyZQqdOnXydmmiialzg7Nixg/79+7Nq1SpGjRqF2+327isoKOBf//pXrSbYkCgUh3MPczj3MIbLicXPyp6vLoyekuJGCNFY/fDDD2zduhWATp06MWXKFMLCwnyclWjKanyLas6cOfTo0YNly5ZhtVr5+OOPvfsGDBjAokWLajXBhiho417ytrnIKOrGzk+bAdC8nQW35r7MkUII0TB16tSJ48ePM3ToUDp06ODrdISoeQvOunXrePjhh/H39y83tXZUVBTp6em1llyD5HZjzinEUHB0l47h8qwaPvz3wT5OTAghao9hGGRlZXmft2zZkunTp0txI+qNGrfgKKUqnZwpOztbFks7T4HDEoRhKHr/PJCYrnamOlsCYDZdUdcnIYSoFwoLC0lKSiIrK4ubb76Z4GDPFzj5/Bf1SY2vtD169OCzzz6rcN/SpUvp27fvVSfVeGjeP1xuA4tJx24x+TYlIYS4CsePH2fRokXe1vrc3FwfZyRExWrcgvPggw9y2223ERAQwO233w54/sGvWLGCN998k08//bTWk2yo1Lk/D58p5NiWPDpEBdK/jadPDsExYLaDXW5dCSHqP8MwSE5OZufOnQCEh4eTmJjobb0Ror6pcYEzbdo0Dh8+zJNPPsk//vEPAG6++WbMZjNPPfUUN9xwQ60n2WCdq3CKHC6CL525OLLztc9HCCGuQEFBAUlJSZw+fRqArl27MmjQIEwmaZEW9dcVTfQ3d+5c7rjjDr777jtOnz5NeHg4EyZMoHXr1rWdX4Ni0kxMbDORwnAnqVn2C00452qbHq1CfJabEEJcqb1793L69GmsVisjR44kPj7e1ykJcVk1LnBWr15Nnz59aNWqFbNmzSqzr6CggG3btjFixIhaS7Ah0TSNAHswtuunkJF8Bk4AGmi6p8IxXTzqzO0CZYBuBl06HQsh6q++fftSUlJCr169CAoKuvwBQtQDNb6yjh49mj179lS4b//+/YwePfqqk2rINE3DHBxAp2GxhEUHYLZU8is+ugp2fABZh65tgkIIcRn5+fmsXbsWwzAA0HWd4cOHS3EjGpQrGiZeGafTid6EWyMMZfBT5k8AdA/vjvn8iClZOFwI0UAcPXqUVatW4XA4sNvt9OvXz9cpCXFFqlXg5OXlkZOT432enp7O8ePHy8QUFxfzzjvvEB0dXasJNiSGMjiQtQ+/A6doG+tCGe18nZIQQlSL2+1m48aN7N69G/BM3JqQkODjrIS4ctUqcF566SXmzZsHeG7BTJkypcI4pRRz586tvewaIqWwHskgNe0oBWc9E/t1iQmmd69ATLo05Qgh6p+8vDyWL19OZmYmAD179qR///5NukVeNHzVKnDGjx9PYGAgSikefvhhfvvb3xIXF1cmxmaz0b17d0aOHFkniTYUpYet5OW2JMcwU5zvBHSsFp1A2xUNWBNCiDp1/PhxkpKScDqd2O12Ro0aVe7zXYiGqFpX3cGDBzN48GDAM0X3PffcQ0xMTJ0m1hAZboVRdOEbj9nm+VnabYQQ9VVwcDBKKaKjoxk7diwBAQG+TkmIWlHjZoUnnniiLvJodNpEFVOKHycp5lRuMduOu+nVKhRdblMJIXzM4XB41xQMDQ3lxhtvpFmzZnJLSjQqV3TfxO128+2337J3716Ki4vL7NM0jT/96U+1klxDpmvn5vlTkJFfSnFaKT1ahqCfb88JjPLMgWOTYZdCiGvn4MGDrFu3jgkTJtCiRQvAs+yCEI1NjQucrKwshg8fzr59+9A0zTtsXLtoEjspcDzFzZGMQoqdisCKAqK7XeOMhBBNmcvlYt26dezfvx/wzE58vsARojGqcXvkY489ht1u59ixYyil2LRpEwcPHmT27Nl07Nix3PDxpsRsMjFh9GD6tQ5hX1o+JU7Duy8iyIbZJM2/QohrLzs7m88++8xb3PTt27fJT8oqGr8at+AkJSXxxBNPeDsZ67pOu3bteP755ykpKeEPf/gDH374Ya0n2hDouk5keCSOG29g045USNXQNY1escEM7HLJOlRKeR6a5nkIIUQd2L9/P+vWrcPlcuHv78+YMWNkkIhoEmrcpHDy5EnatGmDyWRC13UKCwu9+2644QaWLVtWqwk2NJqmYQ0L5bpBHWkTHoDNomOqqOXm8ArY9g5kHrz2SQohmoTU1FRWrVqFy+WiVatW3HzzzVLciCajxi044eHh5ObmAhATE8OuXbu8i2uePXsWl8tVuxk2IG63m+TdnqUa+nftjt0sSzUIIXwnJiaG9u3bExYWRq9evcr0lRSisatxgdO3b192797N9ddfz6RJk5g3bx7BwcFYrVbmzp3LoEGD6iLPBsFtGBzYfwzLmVy6KFCqNSD1jRDi2jlw4ACtW7fGZrMBMGbMGB9nJIRv1PgW1QMPPEBIiKc/ydNPP010dDR33HEHt956KyaTiVdeeaXWk2xQlMKckcPRddvIzCvxbJMKRwhRxxwOB0lJSaxcuZJVq1b5Oh0hfK7GLTiJiYkkJiYCEBERwfbt29m1axeappGQkIDZ3LSXJEjLLSG00EFqbgmZBQ7ALH2IhRB1KjMzk+XLl5OXl4emaURFRaGUkltSokm76mpE0zS6d+8OeBbbfP/995kxY8ZVJ9YQudwGpRcNDfe36BSBtOAIIerM7t272bBhA4ZhEBgYyNixY4mKivJ1WkL4XK1NzPLxxx/TtWtXZs6cWVunbKA8Ex/2jA0hrtm5NV2kwBFC1DKHw8GyZctYt24dhmHQunVrbr75ZiluhDin2gXOX/7yF+Lj4/H396d3794sXboUgPXr19OrVy9uu+02srOzee211+os2Yag2alMAPSLZnmuUEA4hMaCVRa2E0LUnGEYnDlzBl3XGTx4MBMmTPB2LBZCVPMW1T//+U/mzp1LSEgI3bt358SJE9x00028+uqr/OY3v8FisfDnP/+ZP/zhD016JVrlcmFxOAEwhYSCdm418YpacFr0vHaJCSEaHbvdTmJiIpqmERER4et0hKh3qlXgvPnmmwwbNoyvv/6aoKAg3G43999/P/fddx9t2rThu+++o3379nWda71ns9sYfvtEcLkJ69EFfsz3dUpCiEaitLSUlStX0qZNGzp16gRAZGSkj7MSov6qVoGzf/9+3n//fYKCPCtfm0wmHn/8cf7zn//w9NNPS3FzjtlspkuPrt7nF25QSSccIcSVO336NElJSRQUFJCWlkZ8fDxWq9XXaQlRr1WrwCkqKio3vXfLli0B6NChQ+1n1Vicq3AqvEV1eAXknIC4wRDR8ZqmJYRoGJRS/PjjjyQnJ2MYBsHBwSQmJkpxI0Q1VHuYeGXzKTT1eW8u5ih18P2SbwAYf9MkvH2MK/rVKQXKqGCHEEJASUkJK1eu5Pjx4wC0a9eO4cOHS3EjRDVVuzr5n//5H0JDQ73Pz48Qeuihh7wzG4OnEPr8889rlMT8+fN5/vnnSUtLo2vXrrz88ssMHz680vjS0lLmzZvH+++/T3p6Oq1ateKxxx7jV7/6VY1et7Y5nU5ObTnk+fl6J2ePeDocy1xbQoiacDqdLF68mIKCAkwmE0OGDKFz586+TkuIBqVaBU5cXBwnTpzgxIkTZba3bt3a++3ivJrOnPnxxx/z0EMPMX/+fIYOHcrrr7/OxIkT2bNnD3FxcRUe84tf/ILTp0+zcOFC2rdvT0ZGRr1c5NPl8BSBujRyCSFqwGKx0LFjR44cOUJiYiLNmjXzdUpCNDjVuvSmpKTUWQIvvvgis2bN4u677wbg5Zdf5rvvvmPBggU899xz5eKXLl3KqlWrOHLkiPc/fZs2beosv6vhKvEUOFFdpElZCFG14uJiXC6XdzBHnz596NmzJxaLxceZCdEw1dpMxlfC4XCwdetWxo8fX2b7+PHjWb9+fYXHfPHFF/Tr14+//e1vtGzZko4dO/KHP/yB4uLiSl+ntLSUvLy8Mo9roTDLDYDVv4Jfc87x8tuEEE1SamoqixYtYtmyZbjdns8NXdeluBHiKvj05klmZiZut7vc1OJRUVGkp6dXeMyRI0dYu3Ytdrudzz77jMzMTH79619z9uxZ3nzzzQqPee6553jqqadqPf+quEovDBK3BVdw207TPZ2MddM1zEoIUZ8opdi2bRvbtm1DKYXNZqO4uJjAwEBfpyZEg1cveodc2m+nqlVwDcNA0zQ++OADb+fmF198kZ///Of885//xM/Pr9wxjz76KLNnz/Y+z8vLIzY2thbfQXlu54UCx+qvg6sUslNAM0F4e89MxsqA0Ir7GQkhGreioiJWrFhBamoqAJ06dWLo0KEyMlWIWuLT/0nh4eGYTKZyrTUZGRmVLhjXokULWrZsWWbkVufOnVFKcfLkyQrn5bHZbNd8jRbDeeFnXXNBaQEcWw9mm6fAiel1TfMRQtQfJ0+e5IcffqC4uBiz2czw4cNlTjEhaplP++BYrVb69u3LsmXLymxftmwZQ4YMqfCYoUOHkpqaSkFBgXfbgQMH0HWdVq1a1Wm+l2Oz2+j/8zH0mzqGA9+6wXBjUoVoO96HvV/4NDchRP2xZcsWiouLadasGVOnTpXiRog64NMCB2D27Nn85z//4c0332Tv3r38/ve/5/jx49x3332A5/bSHXfc4Y2/7bbbaN68OXfddRd79uxh9erV/PGPf+RXv/pVhbenriWz2UyfgX2IsnVlx4eFoNzY/C4Zvh4cU/HBQogmY+zYsXTr1o2bbrqpzPxiQojac8W3qHJzc9m4cSOZmZlMmjSJsLCwKzrPtGnTyMrKYt68eaSlpdGtWze++eYbWrduDUBaWlqZuXYCAwNZtmwZv/3tb+nXrx/NmzfnF7/4Bc8888yVvpVad2pb6bmfNMb+6qRnOYbm7TybTDIqQoim5sSJE2RlZdGrVy8AgoKCKm2lFkLUDk0ppS4fVtbTTz/NX/7yF4qLi9E0jeTkZPr06cPYsWMZN24cc+bMqYtca01eXh4hISHk5uYSHBxca+d1lDpY8fUyzqyNIH9bS6K6WLl5fkStnV8I0bAYhkFycjI7d+4E4IYbbqBFixY+zkqIhqsm1+8a36KaP38+Tz31FLNmzeLrr7/m4vroZz/7GV9//XXNM24knE4nxzbsJTvlDAolE/wJ0YQVFBTw5Zdfeoubrl27EhkZ6eOshGg6anyL6rXXXmP27Nn87W9/805IdV6HDh04ePBgrSXXYBmeulHuRgnRNB07doyVK1dSWlqK1Wpl5MiRxMfH+zotIZqUGhc4R44cYcKECRXuCwoKIicn52pzavCU8szhoxemwu4VENUNwmWUhBBNQXJyMtu3bwcgIiKCsWPH1uqtcCFE9dS4wAkJCeH06dMV7ktJSZEmWPC24Og4oDgHXCW+zUcIcc2cHxXVvXt3Bg4ciK77fLCqEE1SjQucsWPH8re//Y3Jkydjt9sBz0zELpeLBQsWVNq601S4c8NwprbBHAS6qcb9t4UQDVBpaal3MtEOHToQFhZGeHi4j7MSommrcYEzb948+vfvT5cuXZgyZQqapvHaa6+xfft2jh8/zv/93//VRZ4NRvGPA70/+wUbPsxECFHX3G43GzduJCUlhZtvvtn7pU+KGyF8r8Ztp+3bt2fdunV07tyZ+fPno5Ti3XffJTw8nDVr1hAX17TXVlIuT8/i3r/0o9PQQh9nI4SoK3l5eXz++efs3r2bwsLCMvN1CSF874om+uvSpQtLly6ltLSUrKwswsLCfD6LcH1gsViw2K2gNDpf74+p9PLHCCEaniNHjrBq1SqcTid2u51Ro0Y1+S93QtQ3NS5wvvrqKyZNmoSu69hsNmJiZOmB86w2KzabDcNQWK1WkAJHiEbF5XKxYcMG9u7dC0B0dDRjx44lICDAx5kJIS5V41tUN954Iy1btuSRRx7x/icXlTDbwRoIJpnwT4jGYNu2bd7Pvd69e/Ozn/1Mihsh6qkaFzhff/01I0aM4B//+AfdunVj8ODB/Pvf/yY/P78u8mtQHKUOSkpKcJQ4cDgcENsfetwCEZ18nZoQohb06tWLqKgoJk2aRP/+/WUIuBD1WI3/d06cOJGPP/6YtLQ0Xn31VQzD4N577yU6Oprbb7+dFStW1EWeDYLT6cRV6sTlcOJyui5/gBCiXnO5XOzZs8f73Gq1MnnyZFq1auXDrIQQ1XHFXz9CQ0P59a9/zaZNm9i9eze/+c1v+P777xk/fnxt5tfwyNQ3QjQK2dnZfPbZZ6xdu5bdu3f7Oh0hRA1d0SiqiymlOHHiBCdOnCAvL48rWJy8cdKA1O2QexIiu0Dzdr7OSAhRTQcOHGDt2rW4XC78/f0JCwvzdUpCiBq64gLn0KFDvP3227z77rucOnWKmJgYZs+ezV133VWb+TVspQVQmAnOIl9nIoSoBqfTybp16zhw4AAALVu2ZMyYMTINhhANUI0LnLfeeou33nqLdevWYbVaufHGG7nrrrsYP368dLgTQjRYZ8+eZfny5eTk5KBpGv369aNXr15omubr1IQQV6DGBc6sWbPo3bs3r7zyCr/85S+l6VYI0Sg4HA5yc3Px9/dn7NixtGjRwtcpCSGuQo0LnB07dtCjR4+6yEUIIXzm/KR9MTEx3jWlhBANV43vKUlxUzmLxYLZbsVss2CxWHydjhCiCpmZmSxatIjs7GzvtrZt20pxI0QjUa0WnHnz5nH33XcTExPDvHnzqozVNI0//elPtZJcQ3N+qQalFFabBUp8nZEQoiJ79uxh/fr1GIbBxo0bmThxoq9TEkLUMk1VY1y3ruts3LiRAQMGXLYjsaZpuN3uWkuwLuTl5RESEkJubi7BwcG1eu4FY1JRSnHnomj8C5Lh7FGI6QWRnWv1dYQQNedwOFi9ejVHjhwBoHXr1owcOVJabYRoIGpy/a5WC45hGBX+LMpylDooLS0FBQ6HE/+4QRA3yNdpCSGAM2fOsHz5cvLz89F1nYEDB9K9e3dfpyWEqCNXPdGfuMDpdOIqcQAaLqcTkG+FQtQHp0+f5ssvv8QwDIKCghg7diyRkZG+TksIUYdq3MnYZDKxefPmCvdt3boVk8l01UkJIURtioiIIDIykvj4eG6++WYpboRoAmrcglNVlx3DMGRSrIul/Qh5qRDREZq19XU2QjQpmZmZhIWFYTKZ0HWdiRMnyuhGIZqQK5p6uLIiZuvWrYSEhFxVQg3fRb+bklzITwNHoe/SEaKJUUrx448/smTJEjZt2uTdLsWNEE1LtVpwXnnlFV555RXAU9zcdNNN2Gy2MjHFxcVkZGTw85//vPazbIikIUuIa66kpISVK1dy/PhxwPO5pJSSlmUhmqBqFTiRkZF07doVgJSUFNq2bUtoaGiZGJvNRvfu3XnwwQdrPUkhhLic9PR0kpKSKCwsxGQyMWTIEDp3lukZhGiqqlXgTJ8+nenTpwMwevRoFixYQEJCQp0mJoQQ1aGUYufOnSQnJ6OUIiQkhMTERJo3b+7r1IQQPlTjTsY//PBDXeTRKFgsFsxWz31+i9kCTh8nJEQTUFRUxI4dO1BK0b59e4YPHy79bYQQ1Stwjh8/TosWLbBYLN5721WJi4u76sQaIqvNitVuBcBis0CxjxMSogkICAhg1KhRlJaW0qlTJ1+nI4SoJ6pV4MTHx7NhwwYGDBhAmzZtLtthr74v1XDNaDroZs+fQohaoZRi+/btREZG0qpVKwDatGnj26SEEPVOtQqcN998k3bt2nl/lhEJFXOUOnA6PPelnA4ntBnqeQghakVRURErVqwgNTUVu93OtGnTyo3oFEIIqGaBM3PmTO/Pd955Z13l0uA5nU6cJQ4AXC5ZqkGI2nTq1ClWrFhBcXExZrOZwYMHS3EjhKhUraxFVVJSQkpKCh06dJClGoQQtcowDLZt28a2bdsAaNasGYmJieWmqhBCiIvVuHPIq6++ytNPP+19vnXrVmJjY+natSsdO3bkxIkTtZpgg3Z6NxxcDtkpvs5EiAbJ5XLx9ddfe4ubzp07c9NNN0lxI4S4rBoXOP/5z3/KfLg88sgjNGvWjJdeegmlFM8880xt5tewFZ2F3BNQmu/rTIRokMxmM0FBQVgsFsaOHcvw4cMxm2ul4VkI0cjV+JPi+PHj3kn+8vPzWb16NR999BFTp04lLCyMP//5z7WepBCi6TAMA5fLhdXqmXJh2LBh9OnTh+DgYB9nJoRoSGrcglNaWuqdRGvDhg0YhkFiYiLgGaqZnp5euxk2IFWttC6EuLyCggK+/PJLkpKSvP+fzGazFDdCiBqrcYETFxfHmjVrAPj888/p1auX98PnzJkz8kEkhLgix44dY9GiRZw+fZrTp0+Tm5vr65SEEA1YjW9RzZgxg6eeeoolS5awc+dO/v73v3v3bdmyhY4dO9Zqgg2JxWLBZDV7f8bh44SEaAAMw2Dz5s38+OOPAERERDB27Fj5siSEuCo1LnAee+wxzGYz69evZ8qUKfz2t7/17tu1axc333xzrSbYkFhtVmx2z7wcFpsFCn2ckBD1XH5+PklJSWRkZADQvXt3BgwYINNNCCGuWo0LHE3TmDNnToX7vvjii6tOSAjRdCxbtozMzEysViujRo2SJReEELXmisdb5ufns2HDBrKysggPD2fQoEEEBQXVZm4Njsvl8i7V4Ha5IX645yGEqNDw4cPZuHEjo0ePJjAw0NfpCCEakSsqcP7+97/z1FNPUVRUhFIKTdPw9/fnqaeeYvbs2bWdY4NRWlzqXaqhtKSUQKw+zkiI+iUvL4/MzEzatm0LePrb3HDDDT7OSgjRGNW4wHn33Xd5+OGHmThxInfeeScxMTGkpqbyzjvv8Mc//pGIiAhuv/32usi1YZH1SIUo48iRI6xevRqXy0VwcDDh4eG+TkkI0YjVuMB56aWXuO2223j//ffLbL/llluYMWMGL730khQ452Xshfx0aN4OQuN8nY0QPuF2u9mwYQN79uwBIDo6Gj8/Px9nJYRo7Go8D86+ffuYMWNGhftmzJjB3r17rzqpRqMw07MOVYnM5yGaptzcXJYsWeItbnr37s3PfvYzAgICfJyZEKKxq3ELjp+fH2fPnq1w39mzZ+WbmRACgEOHDnlvSdntdsaMGUOrVq18nZYQoomocQvO8OHDefLJJ0lNTS2zPT09nXnz5jFixIhaS04I0XDl5+fjcrmIiYnh5z//uRQ3QohrqsYtOM8++yyDBw+mffv2jB07lhYtWpCWlsaKFSuwWCwsXry4LvIUQjQA50dVAvTq1YuAgAA6dOjg3SaEENdKjVtwunbtSnJyMpMnTyY5OZm33nqL5ORkbrrpJjZv3kyXLl3qIs8GwWy2YLKYMFlM3gVJhWgqDhw4wOeff47L5QI8k4J27NhRihshhE/UqAXH7XZz5swZ2rRpw4cfflhXOTVYVpsVm5/d+zMFPk5IiGvA5XKxdu1aDhw4AMDevXvp3r27j7MSQjR11WrBUUrx6KOPEhoaSsuWLQkODmb69Onk5+fXdX5CiHrs7NmzLF68mAMHDqBpGv3796dbt26+TksIIarXgvOPf/yDv/71r7Rt25a+ffty6NAhPv74Y6xWK++8805d59hguFwuXE6X92daD4G4QaDJwoGi8dm3bx/r1q3D7Xbj7+/v7ZMnhBD1QbUKnLfeeotJkyaxZMkSzGbPIY8++igvvfQSr7/+Ona7vU6TbChKS0pxFJcC4CgtBT0IkOJGND47duxg8+bNAMTGxjJ69Gj5HBBC1CvVukV14MAB7rvvPm9xA/C73/0Oh8PB0aNHrzqJ+fPnEx8fj91up2/fvqxZs6Zax61btw6z2UyvXr2uOgchRPV16NABf39/Bg4cyHXXXSfFjRCi3qlWgVNSUkJkZGSZbeefl5SUXFUCH3/8MQ899BCPPfYY27dvZ/jw4UycOJHjx49XeVxubi533HEHY8eOvarXr1NnDsDRNZB70teZCHHV0tPTvT8HBAQwbdo0evbsKaOkhBD1UrWHidfVh9iLL77IrFmzuPvuu+ncuTMvv/wysbGxLFiwoMrj7r33Xm677TYGDx5cJ3ldEXXJ84LTkHUIirN9ko4QtcHhcLB8+XK++OILUlJSvNtlKgQhRH1W7WHit912W4XLMEybNq1M87SmaezcubNa53Q4HGzdupU5c+aU2T5+/HjWr19f6XFvvfUWhw8f5v333+eZZ56p5ju4xuRLrWgEzpw5Q1JSEnl5eei6TlFRka9TEkKIaqlWgTNixIgKW3BGjhx5VS+emZmJ2+0mKiqqzPaoqKgyzeEXO3jwIHPmzGHNmjVl+gRVpbS0lNLSUu/zvLy8K09aiCZi165dbNy4EcMwCAoKYuzYseVuVQshRH1VrQph5cqVdZrEpcXTxdO9X8ztdnPbbbfx1FNP0bFjx2qf/7nnnuOpp5666jyFaApKS0tZtWqV93ZUfHw8I0eOxGq1+jYxIYSogRqvRVWbwsPDMZlM5VprMjIyyrXqgGfxvi1btrB9+3YeeOABAAzDQCmF2Wzm+++/Z8yYMeWOe/TRR5k9e7b3eV5eHrGxsbX8bjx9EnSLZ1i4xSz9E0TDlJaWRkpKCrquM3jwYLp27errlIQQosZ8WuBYrVb69u3LsmXLmDJlinf7smXLmDx5crn44OBgfvrppzLb5s+fz4oVK/j000+Jj4+v8HVsNhs2m612k6+A1WbFfvFSDTLRs2iA2rRpQ//+/YmNjSU8PNzX6QghxBXxaYEDMHv2bG6//Xb69evH4MGDeeONNzh+/Dj33Xcf4Gl9OXXqFO+++y66rpebBj4yMhK73V4vpodXl46iEqIBKCkpYePGjQwYMAB/f38Aevfu7eOshBDi6vi8wJk2bRpZWVnMmzePtLQ0unXrxjfffEPr1q0BT3P55ebEqS9cLhdul9vzs9uFLXYgtOoHutyuEvVTeno6SUlJFBYWUlxczMSJE32dkhBC1ApNqabX7pCXl0dISAi5ubkEBwfX3nlzCnhjbCoAs76JISwqsNbOLURtUkqxc+dOkpOTUUoREhJCYmIizZs393VqQghRqZpcv33egiOEuLZKSkr44YcfOHHiBADt27dn+PDhMnGfEKJRueICZ9++faxatYrMzExmzZpFdHQ0qamphIWFVTghYJOUdRgKz0BoHATH+DobITh79izffPMNRUVFmM1mhg4dSqdOnXydlhBC1LoaFzhut5v/9//+H2+//bZ3vpqJEycSHR3NvffeS+/evZk3b15d5Nrw5KV6lmqwBkiBI+qFoKAgrFYrVquVxMREmjVr5uuUhBCiTlR7Larz/vd//5f//ve/PP/88+zatYuLu/BMnDiRpUuX1mqCQoirU1JS4v1/arFYmDhxIlOnTpXiRgjRqNW4Beftt9/mT3/6E7Nnz8btdpfZFx8fz9GjR2stOSHE1Tl16hQrVqygZ8+e9OjRA/C04gghRGNX4xacU6dOVbqCt91uJz9fZrcTwteUUmzZsoWvv/6a4uJiDh06hGEYvk5LCCGumRoXOJGRkRw5cqTCffv376dVq1ZXnVRDZTFb0M06ulmv9kKgQtS2oqIivvrqK7Zt2wZAQkICN954I7pe4//uQgjRYNX4Kjxp0iT+93//l+uuu47o6GjAs1hmbm4u//jHP7jhhhtqPcmGwmqzYvf38/4sxLV28uRJVqxYQUlJCRaLheHDh9O+fXtfpyXqmNvtxul0+joNIWqF1WqtlS9kNS5w5s2bx7fffkuXLl0YPXo0mqYxd+5cdu3ahcVi4U9/+tNVJyWEqLmioiK+++473G43zZs3JzExkZCQEF+nJeqQUor09HRycnJ8nYoQtUbXdeLj47Far66hoMYFTlRUFMnJyTzxxBN8/fXXmEwmdu7cyc9+9jPmzZvXpEdmuFwu3G7D+7OtVX+I6QWmul/oUwh/f38GDhxITk4OgwcPxmQy+TolUcfOFzeRkZH4+/ujaZqvUxLiqhiGQWpqKmlpacTFxV3Vv+kr6igSFRXFv/71ryt+0caqtKSU0sJiABylDgJCAwG7b5MSjdrx48cJCAjwLrFQHxadFdeG2+32FjeyxIZoTCIiIkhNTcXlcl3VDOvS61CIBsgwDDZu3MjSpUtZvny59L9ogs7/nZ9fAV6IxuL8ralLp6KpqRq34PzqV7+qcr+maSxcuPCKE2pUzh6BorMQ3BKCW/g6G9FI5Ofnk5SUREZGBgBxcXEyQqoJk9tSorGprX/TNS5wVqxYUe7Fs7KyKCgoIDQ0lNDQ0FpJrFHIPeVZqsFskwJH1IqUlBRWrlyJw+HAarUyatQo2rRp4+u0hBCi3qnx176UlBSOHj1a5pGXl8fy5cuJjIzk888/r4s8hWjSDMNg/fr1fP/99zgcDiIjI/n5z38uxY1ockaNGsVDDz1UZUybNm14+eWXr0k+ov6qtXbtMWPG8MADD/Dggw/W1imFEOdomkZ2djYAPXr04MYbbyQwMNDHWQlRc3feeSeappV7HDp06JrlsHv3bm6++WbatGmDpmk1LoY6deqE1Wrl1KlT5fZVVly9/PLL5b6Q5OXl8dhjj5GQkIDdbic6OprExEQWL15cZp3Hy1FK8eSTTxITE4Ofnx+jRo1i9+7dVR7jdDqZN28e7dq1w26307Nnz3JrSZ7//Vz6+M1vfuONOX36NHfeeScxMTH4+/tz3XXXcfDgwTLneeONNxg1ahTBwcFomnbNpjWo1Rv3Xbp0YfPmzbV5SiGatPMfcpqmMWbMGK677joGDRokfW5Eg3bdddeRlpZW5hEfH3/NXr+oqIi2bdvyl7/8xTthbXWtXbuWkpISbrnlFt5+++0rziEnJ4chQ4bw7rvv8uijj7Jt2zZWr17NtGnTePjhh8nNza32uf72t7/x4osv8tprr5GcnEx0dDTjxo2rcumkxx9/nNdff51XX32VPXv2cN999zFlyhS2b9/ujUlOTi7zd7Rs2TIAbrnlFsDz+XTTTTdx5MgRPv/8c7Zv307r1q1JTEyksLDQe56ioiKuu+465s6dW9Nf01Wp1U/JVatWER4eXpunbFB03eRdqkE3yQVIXDm3283atWtZu3atd5ufnx9xcXE+zEqI2mGz2YiOji7zOD9v06pVqxgwYAA2m40WLVowZ84cXC5XpefKyMjghhtuwM/Pj/j4eD744IPLvn7//v15/vnnufXWW7HZajZP2cKFC7ntttu4/fbbefPNN2vU0nKxuXPnkpKSwqZNm5g5cyZdunShY8eO3HPPPezYsaPaLbRKKV5++WUee+wxpk6dSrdu3XjnnXcoKiriv//9b6XHvffee8ydO5dJkybRtm1b7r//fiZMmMALL7zgjYmIiCjzd/TVV1/Rrl07Ro4cCcDBgwfZuHEjCxYsoH///nTq1In58+dTUFDAhx9+6D3PQw89xJw5cxg0aNAV/a6u1BXNZHyp0tJSfvzxR7799lv++Mc/1kpiDZHdz+5dqsHP3w6y7qi4Arm5uSxfvpysrCwAunbt2qQn0BQ143JXvqiqpmmYdK1asQDmi76oVRZrrsUvc6dOnWLSpEnceeedvPvuu+zbt4977rkHu93Ok08+WeExd955JydOnGDFihVYrVZ+97vfeUcY1rb8/Hw++eQTNm3aREJCAoWFhaxcuZLRo0fX6DyGYfDRRx/xy1/+kpiYmHL7Ly5unnzySd5++21SUlIqPNfRo0dJT09n/Pjx3m02m42RI0eyfv167r333gqPKy0txW4vO0+bn59fmS9VF3M4HLz//vvMnj3bO9CotLQUoMx5TCYTVquVtWvXcvfdd1d4rmulxgVORf/IbDYbbdq0Yd68eU26wBHiah06dIg1a9bgdDqx2+2MGTNGihtRI/+35WSl+2JC7YzqFOl9vnjbKVxGxS0QkUE2ErtEeZ9/viOVUlf5Iue2gTVvVfzqq6/KXMQnTpzIJ598wvz584mNjeW1115D0zQSEhJITU3lkUce4c9//nO5W7MHDhzg22+/ZePGjQwcOBDwtLB07ty5xjlVx0cffUSHDh3o2rUrALfeeisLFy6scYGTmZlJdnY2CQkJl40NDw+nXbt2le5PT08HPBPwXiwqKopjx45VetyECRN48cUXGTFiBO3atSMpKYnPP/+80rlnlixZQk5ODnfeead3W0JCAq1bt+bRRx/l9ddfJyAggBdffJH09HTS0tIu+97qWo0LHMOouuJv6oyLPyxa9oGormCRibhE1VwuF+vXr2ffvn0AxMTEMGbMGJnETTRKo0ePZsGCBd7nAQEBAOzdu5fBgweXmYpk6NChFBQUcPLkyXK3aPfu3YvZbKZfv37ebQkJCXU2XcnChQuZMWOG9/mMGTMYMWIEOTk5NXrNi/vWXc4DDzzAAw88cNm4S8+llKry/K+88gr33HMPCQkJaJpGu3btuOuuu3jrrbcqjF+4cCETJ04s0+JksVhYtGgRs2bNolmzZphMJhITE5k4ceJl870WalTgFBcXM2vWLH79618zbNiwusqpwSoqKKSkoOjcz0VYIwPBGuDjrERDsHTpUlJTUwHo06cPffv2lQncxBX5Rb9Wle679N/U1D4tq33eyb3K30q5UgEBARWucl/RRbmqYqAmhcLV2rNnD5s2bSI5OZlHHnnEu93tdvPhhx9y//33AxAcHFxhB+GcnBzv4rcRERGEhYWxd+/eq87rfCfp9PR0WrS4MN9aRkZGuVadi0VERLBkyRJKSkrIysoiJiaGOXPmVNjZ+9ixYyxfvpzFixeX29e3b1927NhBbm4uDoeDiIgIBg4cWKbo9JUa3Tz18/Pj888/l1YcIWpZjx498Pf352c/+xn9+vWT4kZcMbNJr/Rxcf+by8Ve2remOjFXq0uXLqxfv75Mx93169cTFBREy5bli7HOnTvjcrnYsmWLd9v+/fvrZBjywoULGTFiBDt37mTHjh3ex8MPP1xm9v6EhASSk5PLHZ+cnEynTp0Az2rZ06ZN44MPPvB+sblYYWFhlR2rLxYfH090dLR3hBN4+susWrWKIUOGXPZ4u91Oy5YtcblcLFq0iMmTJ5eLeeutt4iMjOT666+v9DwhISFERERw8OBBtmzZUuF5rrUa/+vs1asXu3btqotcGrxynemzj0Hqdsg/7ZN8RP3lcrk4c+aM93lcXBy33nprhR0OhWgqfv3rX3PixAl++9vfsm/fPj7//HOeeOIJZs+eXeHUCJ06deK6667jnnvuYdOmTWzdupW7774bPz+/Kl/H4XB4CxSHw8GpU6fYsWNHpXPxOJ1O3nvvPaZPn063bt3KPO6++262bt3Kzp07AZg9ezbffvst8+bNY8+ePezZs4enn36apUuX8j//8z/ecz777LPExsYycOBA3n33Xfbs2cPBgwd588036dWrFwUFBQC89tprjB07ttL3omkaDz30EM8++yyfffYZu3bt4s4778Tf35/bbrvNG3fHHXfw6KOPep9v2rSJxYsXc+TIEdasWcN1112HYRg8/PDDZc5vGAZvvfUWM2fOxGwuf9Pnk08+YeXKld6h4uPGjeOmm24q0+k5PT29zO/3p59+YseOHZw9e7bS91UrVA2tXr1atW7dWq1cubKmh9Ybubm5ClC5ubm1et6czHz1fJ/96vk++1V2Rr5SR1YrlfymUmk/1urriIYtKytLffzxx+rtt99W+fn5vk5HNFDFxcVqz549qri42Nep1MjMmTPV5MmTK92/cuVK1b9/f2W1WlV0dLR65JFHlNPp9O4fOXKkevDBB73P09LS1PXXX69sNpuKi4tT7777rmrdurV66aWXKn2No0ePKqDcY+TIkRXGf/rpp0rXdZWenl7h/u7du6vf/va33ufLli1Tw4cPV2FhYSosLEwNGzZMLVu2rNxxOTk5as6cOapDhw7KarWqqKgolZiYqD777DNlGIZSSqknnnhCtW7dutL3opRShmGoJ554QkVHRyubzaZGjBihfvrppzIxI0eOVDNnzvQ+X7lypercubOy2WyqefPm6vbbb1enTp0qd+7vvvtOAWr//v0VvvYrr7yiWrVqpSwWi4qLi1OPP/64Ki0tLRPzxBNPVPj7fuuttyo8Z1X/tmty/daUuvwg/tWrV9OnTx8CAwPp3r07aWlpZGdnExYWRosWLco0p2ua5q1k66u8vDxCQkLIzc0lODi41s6bm1XAv8d7mhvvXhpDaMF2z1pUrfpBdPdaex3RcO3bt4/169fjcrnw9/dn/PjxREZGXv5AIS5RUlLC0aNHiY+PLzfcV4iGrKp/2zW5flerk/Ho0aPZsGEDAwYMoHnz5k16Mj8hroTT6WTNmjXeJtrY2FhGjx4tFyYhhKgj1SpwLm7kWblyZV3lIkSjlJWVxfLly8nNzUXTNPr370/Pnj2lI7EQQtShGs+DIyqnm0xo50YV6OemHRdi37595ObmEhAQwNixY2u89o0QQoiaq3aBI982L8/Pz45fgKf3vt1ug+qvlSYasfOLY/bu3VtuSQkhxDVS7QJn9OjR1VrBWNO0Gq2C2phUo7+2aAIyMzPZvXs3I0aM8Kz9YzIxePBgX6clhBBNSrULnFGjRhEREVGXuTQKyjuzJtCiJ0QkyGzGTciuXbvYuHEjhmEQFhZGjx49fJ2SEEI0SdUucP785z8zYMCAusylwSsqKKI437NUQ2FhEaHhtTcEXdRvpaWlrFq1yrvib5s2bbyzlgohhLj2pJOxEFcpIyODpKQk8vPz0XWdQYMG0a1bN1+nJYQQTZoUOHUp9yQU50BgFATK7b3G6MCBA6xevRrDMAgODiYxMVHmiRJCiHqgdldKE2WdPQonk6Eg3deZiDoSHh6Opmm0bduWqVOnSnEjRB0bNWoUDz30UJUxbdq04eWXX74m+Yj6q1oFjmEY0v9GiHOKi4u9Pzdr1oybb76ZxMRErFarD7MSomG488470TSt3KOyhS7rwr///W+GDx9OWFgYYWFhJCYmsnnz5mof36lTJ6xWK6dOnSq3r7Li6uWXX6ZNmzZltuXl5fHYY4+RkJCA3W4nOjqaxMREFi9eXKNRuUopnnzySWJiYvDz82PUqFHs3r27ymOcTifz5s2jXbt22O12evbsydKlS8u9l4r+rn7zm99U+7VTUlIqPIemaXzyySfVfo9XQlpwapEMEm/clFLs2LGDDz/8kIyMDO/20NBQ3yUlRAN03XXXkZaWVuYRHx9/zV5/5cqVTJ8+nR9++IENGzYQFxfH+PHjKyxYLrV27VpKSkq45ZZbePvtt684h5ycHIYMGcK7777Lo48+yrZt21i9ejXTpk3j4YcfrtF0K3/729948cUXee2110hOTiY6Oppx48aRn59f6TGPP/44r7/+Oq+++ip79uzhvvvuY8qUKWzfvt0bk5ycXObvaNmyZQDccsst1X7t2NjYcn/XTz31FAEBAUycOLGmv7YakQKnjsi0iI1LSUkJS5cuZfPmzbhcLo4cOeLrlIRosGw2G9HR0WUepnOzv69atYoBAwZgs9lo0aIFc+bMweVyVXqujIwMbrjhBvz8/IiPj+eDDz647Ot/8MEH/PrXv6ZXr14kJCTw73//G8MwSEpKuuyxCxcu5LbbbuP222/nzTffvOL5z+bOnUtKSgqbNm1i5syZdOnShY4dO3LPPfewY8cOAgMDq3UepRQvv/wyjz32GFOnTqVbt2688847FBUV8d///rfS49577z3mzp3LpEmTaNu2Lffffz8TJkzghRde8MZERESU+Tv66quvaNeuHSNHjqz2a5tMpnJ/15999hnTpk2r9nu8UlLg1CLPUg0amkmTpRoakbS0ND799FNOnDiByWRixIgRDBo0yNdpCVExt6vyh+GufqzbVb3YWnTq1CkmTZpE//792blzJwsWLGDhwoU888wzlR5z5513kpKSwooVK/j000+ZP39+mRbW6igqKsLpdNKsWbMq4/Lz8/nkk0+YMWMG48aNo7Cw8IrWZzQMg48++ohf/vKXxMTElNsfGBiI2ewZA/Tkk0+Wu7V1saNHj5Kens748eO922w2GyNHjmT9+vWVHldaWlpuZnU/Pz/Wrl1bYbzD4eD999/nV7/6lXdlgyt57a1bt7Jjxw5mzZpVaW61RUZR1SI/uw2/AH8A7H42yPFtPuLqKKXYvn07W7duRSlFaGgoiYmJl/0QFMKntr9X+b6QWOiQeOH5zg/BqKRICYqGThfdQvjpE3CVlI/rd1eNU/zqq6/KfHufOHEin3zyCfPnzyc2NpbXXnsNTdNISEggNTWVRx55hD//+c/lZtM/cOAA3377LRs3bmTgwIGAp4Wlc+fONcpnzpw5tGzZksTExCrjPvroIzp06EDXrl0BuPXWW1m4cCGjR4+u0etlZmaSnZ1NQkLCZWPDw8Np165dpfvT0z2DWKKiospsj4qK4tixY5UeN2HCBF588UVGjBhBu3btSEpK4vPPP8ftdlcYv2TJEnJycrjzzjuv6rXP//0MGTKk0txqixQ4QlTi6NGjbNmyBYCOHTsybNgw77cqIcSVGz16NAsWLPA+DwjwzPa+d+9eBg8eXGbtw6FDh1JQUMDJkyeJi4src569e/diNpvp16+fd1tCQkKN+sX97W9/48MPP2TlypWXXStu4cKFzJgxw/t8xowZjBgxgpycnBq95oUZ7y/fmeGBBx7ggQceuGzcpedSSlV5/ldeeYV77rmHhIQENE2jXbt23HXXXbz11lsVxi9cuJCJEydW2OJU3dcuLi7mv//9L3/6058u+35qg3xa16Xo7tC8PdiCfJ2JuAJt27alffv2tGrVio4dO/o6HSGqp/ftle+79KLTc3r1z9v9lsvHVFNAQADt27cvt72iC2NVxUBNCoWK/P3vf+fZZ59l+fLll11WZc+ePWzatInk5GQeeeQR73a3282HH37I/fffD0BwcHCFHYRzcnIICQkBPH1bwsLC2Lt37xXlfbHo6GjA05rSokUL7/aMjIxyLSsXi4iIYMmSJZSUlJCVlUVMTAxz5sypsLP3sWPHWL58OYsXL76q1/70008pKirijjvuqNmbvELSB6cWFRYUUZRX6HkUFIFfKAS3AFvddqQStUMpxU8//YTT6fRuGzNmjBQ3omExmSt/6Kbqx5rM1YutRV26dGH9+vVlOu6uX7+eoKAgWrZsWS6+c+fOuFwub0srwP79+8nJybnsaz3//PM8/fTTLF26tEwLUGUWLlzIiBEj2LlzJzt27PA+Hn74YRYuXOiNS0hIIDk5udzxycnJ3uVbdF1n2rRpfPDBB6SmppaLLSwsrLJj9cXi4+OJjo72jnACT3+ZVatWVes2kN1up2XLlrhcLhYtWsTkyZPLxbz11ltERkZy/fXXX9VrL1y4kBtvvPHarWupmqDc3FwFqNzc3Fo9b3ZGvnq+z371fJ/9Kiczv1bPLepWYWGh+uKLL9Trr7+ukpKSfJ2OEJdVXFys9uzZo4qLi32dSo3MnDlTTZ48ucJ9J0+eVP7+/uo3v/mN2rt3r1qyZIkKDw9XTzzxhDdm5MiR6sEHH/Q+v+6661SPHj3Uxo0b1ZYtW9SwYcOUn5+feumllyrN4a9//auyWq3q008/VWlpad5Hfn7Fn9sOh0NFRESoBQsWlNt34MABBagdO3YopZTasGGD0nVdPfXUU2r37t1q9+7dat68eUrXdbVx40bvcWfPnlUJCQmqVatW6p133lG7d+9WBw4cUAsXLlTt27dX2dnZSimlXn31VTVmzJhK34tSSv3lL39RISEhavHixeqnn35S06dPVy1atFB5eXnemNtvv13NmTPH+3zjxo1q0aJF6vDhw2r16tVqzJgxKj4+3vu657ndbhUXF6ceeeSRK35tpZQ6ePCg0jRNffvtt1W+F6Wq/rddk+u33KKqS3mpUJIHAREQ0NzX2YhKnDx5khUrVlBSUoLFYil3n18IcW20bNmSb775hj/+8Y/07NmTZs2aMWvWLB5//PFKj3nrrbe4++67GTlyJFFRUTzzzDOX7eMxf/58HA4HP//5z8tsf+KJJ3jyySfLxX/xxRdkZWUxZcqUcvs6dOhA9+7dWbhwIf/4xz8YNGgQ3333HfPmzfNO+Ne1a1e+++47b0dogLCwMDZu3Mhf/vIXnnnmGY4dO0ZYWBjdu3fn+eef997OyszM5PDhw1W+n4cffpji4mJ+/etfk52dzcCBA/n+++8JCrrQPeL48eNlOmmXlJTw+OOPc+TIEQIDA5k0aRLvvfdeub5Ey5cv5/jx4/zqV7+64tcGePPNN2nZsmWZEVd1TVPqCgfxN2B5eXmEhISQm5tLcHDtrfidc6aA/1znaW685/sYQvK2Q9YhaNXP0x9H1CuGYbBlyxZ27NgBQPPmzUlMTPR+sAhRn5WUlHD06FHi4+Mv2zlWiIakqn/bNbl+SwuOaJIKCwtJSkryDnPs0qULgwcP9k42JoQQomGTAkc0SZqmkZubi9VqZcSIEbRt29bXKQkhhKhFUuCIJkNdNATV39+f8ePH4+fnV6u3KYUQQtQPMky8Fmm6CU3X0HRZqqG+yc/P5/PPPy/TWS8qKkqKGyGEaKSkBacW+fnZ8Av0LNXg52+HbB8nJABISUlh5cqVOBwONm3aRHx8fLkp34UQQjQuUuCIRsswDDZu3MiuXbsAiIyMZOzYsVLcCCFEEyAFTl2K6gphbcAuw46vtby8PJKSkjhz5gwAPXr0YMCAAVLcCCFEEyEFTi0qOrdUg+fnQoKbNQN/WXn6WisuLmbx4sU4HA5sNhujRo2idevWvk5LCCHENSQFjmh0/Pz8SEhI4PTp04wdO5bAQFkLTAghmpp60V4/f/5874yFffv2Zc2aNZXGLl68mHHjxhEREUFwcDCDBw/mu+++u4bZVq7cnND5pyHzEBSd9Uk+TUlubi4FBQXe5wMGDOCGG26Q4kaIRmbUqFE89NBDVca0adPGu0yCaLp8XuB8/PHHPPTQQzz22GNs376d4cOHM3HiRI4fP15h/OrVqxk3bhzffPMNW7duZfTo0dxwww1s3779Gmd+GRqQeQBS1kDeKV9n06gdOnSIxYsXk5SUhGEYgGe1XulvI0T9c+edd6JpWrnHoUOHrlkOixcvpl+/foSGhhIQEECvXr147733qn18p06dsFqtnDpV/rO9suLq5Zdfpk2bNmW25eXl8dhjj5GQkIDdbic6OprExEQWL15MTVZRUkrx5JNPEhMTg5+fH6NGjWL37t1VHuN0Opk3bx7t2rXDbrfTs2dPli5dWi7u1KlTzJgxg+bNm+Pv70+vXr3YunVrmZi9e/dy4403EhISQlBQEIMGDarwGq6UYuLEiWiaxpIlS6r9/q6Uz68AL774IrNmzeLuu++mc+fOvPzyy8TGxrJgwYIK419++WUefvhh+vfvT4cOHXj22Wfp0KEDX3755TXOXPiay+Vi9erVrFixAqfTia7rOJ1OX6clhLiM6667jrS0tDKP+Pj4a/b6zZo147HHHmPDhg38+OOP3HXXXdx1113Vuhuwdu1aSkpKuOWWW3j77bevOIecnByGDBnCu+++y6OPPsq2bdtYvXo106ZN4+GHHyY3N7fa5/rb3/7Giy++yGuvvUZycjLR0dGMGzeO/Pz8So95/PHHef3113n11VfZs2cP9913H1OmTCnTWJCdnc3QoUOxWCx8++237NmzhxdeeKHMgpyHDx9m2LBhJCQksHLlSnbu3Mmf/vSnCtdHe/nll72TrV4LPu2D43A42Lp1K3PmzCmzffz48axfv75a5zAMg/z8fJo1k868TUlOTg7Lly/n7FnP7b8+ffrQp08fabURogGw2WxER0dXuG/VqlX88Y9/ZOfOnTRr1oyZM2fyzDPPYDZXfLnKyMhg1qxZLF++nOjoaJ555pnLvv6oUaPKPH/wwQd55513WLt2LRMmTKjy2IULF3LbbbcxcuRIfvOb3zB37twrumjPnTuXlJQUDhw4QExMjHd7x44dmT59erUXUFVK8fLLL/PYY48xdepUAN555x2ioqL473//y7333lvhce+99x6PPfYYkyZNAuD+++/nu+++44UXXuD9998H4K9//SuxsbG89dZb3uMubYU6f46//e1v3m0VLX2zc+dOXnzxRZKTk2nRokW13tvV8unVIDMzE7fbTVRUVJntUVFR3kUQL+eFF16gsLCQX/ziF5XGlJaWkpeXV+YhGq4DBw6wePFizp49i5+fH9dffz39+vWT4kYIwGW4Kn24DXe1Y12Gq1qxtenUqVNMmjSJ/v37s3PnThYsWMDChQurLFruvPNOUlJSWLFiBZ9++inz588nIyOj2q+plCIpKYn9+/czYsSIKmPz8/P55JNPmDFjBuPGjaOwsJCVK1dW+7XOMwyDjz76iF/+8pdlipvzAgMDvQXdk08+Wa6ouNjRo0dJT09n/Pjx3m02m42RI0dW2VBQWlparojy8/Nj7dq13udffPEF/fr145ZbbiEyMpLevXvz73//u8z7+Prrr+nYsSMTJkwgMjKSgQMHlrv9VFRUxPTp03nttdcqLWzrQr0YRXVp9XvxmkFV+fDDD3nyySf5/PPPiYyMrDTuueee46mnnrrqPKtD069d81tTZBgGP/74Iy6Xi5YtWzJ69Gj8/f19nZYQ9cZnhz6rdF+LgBYMaznM+/zLw1/iUhUXKRF+EYyKHeV9/s3Rbyh1l5aLu6XjLTXO8auvviozAGDixIl88sknzJ8/n9jYWF577TU0TSMhIYHU1FQeeeQR/vznP5f7EnPgwAG+/fZbNm7cyMCBAwFPC0vnzp0vm0Nubi4tW7aktLQUk8nE/PnzGTduXJXHfPTRR3To0IGuXbsCcOutt7Jw4UJGjx5do/efmZlJdnY2CQkJl40NDw+nXbt2le4/3xhQUUPBsWPHKj1uwoQJvPjii4wYMYJ27dqRlJTE559/jtt9oQg+cuQICxYsYPbs2cydO5fNmzfzu9/9DpvNxh133EFGRgYFBQX85S9/4ZlnnuGvf/0rS5cuZerUqfzwww+MHDkSgN///vcMGTKEyZMnX/b91iafFjjh4eGYTKZyrTUZGRnl/rIu9fHHHzNr1iw++eQTEhMTq4x99NFHmT17tvd5Xl4esbGxV554JQIC/b1LNQQEBkBOrb9Ek6frOomJiRw9epRevXpd0/u5QojaMXr06DL9LAMCAgBPZ9XBgweX+X89dOhQCgoKOHnyJHFxcWXOs3fvXsxmM/369fNuS0hIKNNHpDJBQUHs2LGDgoICkpKSmD17Nm3bti13++piCxcuZMaMGd7nM2bMYMSIEeTk5FTrNc8734G4Op9fDzzwAA888MBl42raUPDKK69wzz33kJCQgKZptGvXjrvuuqvM7SjDMOjXrx/PPvssAL1792b37t0sWLCAO+64wzuoY/Lkyfz+978HoFevXqxfv55//etfjBw5ki+++IIVK1b4ZCCQTwscq9VK3759WbZsGVOmTPFuX7ZsWZWV3ocffsivfvUrPvzwQ66//vrLvo7NZsNms9VKzlWpQad3UQP79u2jtLSUnj17AhAaGkrv3r19nJUQ9dOU9lMq3adR9oJ3Q7sbqn3eSfGTrjinSwUEBNC+ffty2yu6KFdVDNSkULiUruveHHr16sXevXt57rnnKi1w9uzZw6ZNm0hOTuaRRx7xbne73Xz44Yfcf//9AAQHB1fYQTgnJ4eQEM+s9hEREYSFhbF3794a532p87d80tPTy/RtuVxDQUREBEuWLKGkpISsrCxiYmKYM2dOmc7eLVq0oEuXLmWO69y5M4sWLQI8jRRms7nCmPO3ulasWMHhw4fLFYA333wzw4cPv6JbfNXl804Ls2fP5j//+Q9vvvkme/fu5fe//z3Hjx/nvvvuAzytL3fccYc3/sMPP+SOO+7ghRdeYNCgQaSnp5Oenl6jHufXTEQCtB0FIbXfWtQUOJ1OVqxYwerVq9m8eTOZmZm+TkmIes+smyt9mHRTtWPNurlasbWpS5curF+/vswQ6fXr1xMUFETLli3LxXfu3BmXy8WWLVu82/bv309OTk6NX1spRWlp+Vtw5y1cuJARI0awc+dOduzY4X08/PDDLFy40BuXkJBAcnJyueOTk5Pp1KkT4Cmupk2bxgcffEBqamq52MLCQlyu6vVvio+PJzo6mmXLlnm3ORwOVq1axZAhQy57vN1up2XLlrhcLhYtWlSmcWHo0KHs37+/TPyBAwe8M8NbrVb69+9fZcycOXP48ccfy/zOAF566aUyrUV1QtUD//znP1Xr1q2V1WpVffr0UatWrfLumzlzpho5cqT3+ciRIxVQ7jFz5sxqv15ubq4CVG5ubi2+C6XOpuer5/vsV8/32a9ys/Nr9dxNTWZmpvroo4/U66+/rt544w21fft2ZRiGr9MSot4oLi5We/bsUcXFxb5OpUZmzpypJk+eXOG+kydPKn9/f/Wb3/xG7d27Vy1ZskSFh4erJ554whszcuRI9eCDD3qfX3fddapHjx5q48aNasuWLWrYsGHKz89PvfTSS5Xm8Oyzz6rvv/9eHT58WO3du1e98MILymw2q3//+98VxjscDhUREaEWLFhQbt+BAwcUoHbs2KGUUmrDhg1K13X11FNPqd27d6vdu3erefPmKV3X1caNG73HnT17ViUkJKhWrVqpd955R+3evVsdOHBALVy4ULVv315lZ2crpZR69dVX1ZgxYyp9L0op9Ze//EWFhISoxYsXq59++klNnz5dtWjRQuXl5Xljbr/9djVnzhzv840bN6pFixapw4cPq9WrV6sxY8ao+Ph47+sqpdTmzZuV2WxW//u//6sOHjyoPvjgA+Xv76/ef/99b8zixYuVxWJRb7zxhjp48KB69dVXlclkUmvWrKk0X0B99tlnle6v6t92Ta7f9aLAudakwKnf9uzZo/7zn/+o119/Xb3//vsqLS3N1ykJUe80xgJHKaVWrlyp+vfvr6xWq4qOjlaPPPKIcjqd3v2XFjhpaWnq+uuvVzabTcXFxal3331XtW7dusoC57HHHlPt27dXdrtdhYWFqcGDB6uPPvqo0vhPP/1U6bqu0tPTK9zfvXt39dvf/tb7fNmyZWr48OEqLCxMhYWFqWHDhqlly5aVOy4nJ0fNmTNHdejQQVmtVhUVFaUSExPVZ5995v1C98QTT6jWrVtXmptSShmGoZ544gkVHR2tbDabGjFihPrpp5/KxIwcObJMQ8DKlStV586dlc1mU82bN1e33367OnXqVLlzf/nll6pbt27KZrOphIQE9cYbb5SLOV+U2e121bNnT7VkyZIq871WBY527sWalLy8PEJCQsjNzSU4OLjWzpt9uoCFkzzNjf8vKYZgSwmU5oNfGPiF1trrNGYrV67kwIEDAMTFxTFq1KhqzwchRFNSUlLC0aNHvcvcCNFYVPVvuybXb5/3wWnUMvbCkZWQ+//bu++wKK71D+DfXcrSkSZdBEUWaxS7RkARFGKMKRpb0BhLvBpLLFgSxZgYNRrUCHoNzZLY2zU2ghFRELGg/oAriqJXiljoRWD3/f3hZa7rAgKhCL6f59kn7pkzc945Eub1zJk5/2nsSJoMU1NTiMVi9O7dG0OGDOFf3IwxxmrljXgPDnu7FRYWCu+ycXR0hLm5eY0euWSMMcZexSM4rNE8f/4cp0+fxpEjR1BSUiKUc3LDGGPs7+IRHNYoMjMzER4ejry8PIjFYmRkZCi9xIsxxhirLU5w6hq/Wfe1bty4gUuXLkEul0NPTw+DBg2CiYlJY4fFGGOsGeEEpw5paWtBS/e/SzVoawNZjRzQG6a4uBhnz57FgwcPALxYcXbAgAFQV1dv5MgYY4w1N5zgsAZz6dIlPHjwACoqKujTp4/S670ZY4yxusIJTn0ybgfomgFaRo0dyRuhZ8+eyMvLQ+/evWFkxH3CGGOs/nCCU4cK8gtQmFfw4s8FBdBrYQroVr0qenNWXFyM27dvo1OnTgBerHlSncVRGWOMsb+LHxOva/9dHettn2qcnp6O/fv3Izo6WmkhNsYYqy0XFxfMnj27yjqtW7eGn59fg8TD3lyc4NSnwmdA9n+A4tzGjqTBEBGuXr2KY8eOobCwEC1atOAnpBhjggkTJkAkEil97ty50yjx7N69GyKRCB988EG193FwcIC6ujpSU1OVtlWWXPn5+aF169YKZbm5uViyZAmkUik0NDRgZmYGNzc3HDx4EDVZRYmIsHz5clhYWEBTUxMuLi6Ij4+vcp/S0lKsWLECbdq0gYaGBrp06YKTJ08q1UtNTcW4ceNgZGQELS0tvPPOO7hy5YqwPT8/HzNmzICVlRU0NTXh6OiIgIAAYXtKSkqFf98ikQj79u2r9jnWBic49elRPHDnTyD7fmNH0iCKiopw/PhxXL58GUSEdu3aYcSIETA0NGzs0Bhjb5AhQ4YgPT1d4WNra9vgcdy/fx/z5s3Du+++W+19zp8/j+LiYnzyyScICQmpddvZ2dno27cvtm/fjkWLFuHq1as4d+4cRo0ahQULFiAnJ6fax1qzZg3Wr1+PX375BbGxsTAzM8PgwYORl5dX6T5Lly7F1q1bsWnTJiQkJGDatGkYMWIErl27JtTJyspCv379oKamhhMnTiAhIQHr1q1TeBnrnDlzcPLkSezcuROJiYmYM2cOZs6ciSNHjgAArK2tlf6ufX19oa2tjaFDh9a842qAExxWJ9LS0rB//36kpqZCVVUVLi4ucHFxgZqaWmOHxthbhcrKKv/IZNWvW1ZWrbq1IZFIYGZmpvBRUVEBAERERKBnz56QSCQwNzeHj48PyqpoJzMzE8OGDYOmpiZsbW2xa9euasUgk8kwduxY+Pr6ws7OrtqxBwYGYsyYMRg/fjyCgoJqNNLyssWLFyMlJQUxMTHw9vZG+/bt0a5dO0yePBlxcXHQ0dGp1nGICH5+fliyZAk+/PBDdOzYEaGhoSgsLMRvv/1W6X47duzA4sWL4enpCTs7O3z55Zfw8PDAunXrhDqrV6+GtbU1goOD0bNnT7Ru3RqDBg1CmzZthDrR0dHw9vaGi4sLWrdujSlTpqBLly64fPkyAEBFRUXp7/rQoUMYNWpUtc+xtniSMasTRISioiIYGBjAzc0NBgYGjR0SY2+l7AMHK92mZmEOnZdGK3KOHAGVySqsq2piAt2Brv+re+wY6HmJUj2DUSP/RrSKUlNT4enpiQkTJmD79u3497//jcmTJ0NDQwPLly+vcJ8JEybgP//5D86cOQN1dXV89dVXyMzMfG1bK1asgImJCSZNmoTIyMhqxZeXl4d9+/YhJiYGUqkUBQUFOHv2LFxdXV+/80vkcjl2796NsWPHwsLCQmn7yxf+5cuXIyQkBCkpKRUe6969e8jIyIC7u7tQJpFI4OzsjKioKEydOrXC/Z4/f660mLGmpibOnz8vfD969Cg8PDzwySefICIiApaWlpg+fTomT54s1Onfvz+OHj2Kzz//HBYWFjh79iySkpKwYcOGCtu9cuUK4uLisHnz5gq31yVOcFityeVyiMUvBgEtLS3h7u4OKysrqKryjxVjrHLHjh1TuIgPHToU+/btg7+/P6ytrfHLL79AJBJBKpUiLS0NCxcuxLfffiv8vimXlJSEEydO4OLFi+jVqxeAFyMsjo6OVbZ/4cIFBAYGIi4urkZx7969G/b29ujQoQMA4NNPP0VgYGCNE5wnT54gKysLUqn0tXWNjY0VRkxelZGRAQAwNVV8YtfU1BT371c+PcLDwwPr16/HgAED0KZNG4SHh+PIkSOQvTTKd/fuXQQEBGDu3LlYvHgxLl26hK+++goSiQSfffYZAGDjxo2YPHmy8LtfLBbj119/Rf/+/Stst/zvp2/fvq8997+Lr0R1ifDWPD718OFDnD9/Hp6entDT0wMApQl0jLGG1+KjDyvf+MpSMvrDh1f7uPrvvVfbkJS4uroqTETV1tYGACQmJqJPnz4QvRRnv379kJ+fj4cPHyqtV5eYmAhVVVV0795dKJNKpVUu2JuXl4dx48Zh27ZtMDY2rlHcgYGBGDdunPB93LhxGDBgALKzs2u0SHD5bS1RNZb2mTFjBmbMmPHaeq8ei4iqPP6GDRswefJkSKVSiEQitGnTBhMnTkRwcLBQRy6Xo3v37vjhhx8AAF27dkV8fDwCAgIUEpyLFy/i6NGjsLGxwblz5zB9+nSYm5vDzc1Noc2ioiL89ttv+Oabb157PnWBE5w6pKWtDS3dF/+jaus2z6Ua5HI5Ll++LPzL5+rVq3BxcWnUmBhj/yOqwQhqfdV9HW1tbbRt21apvKKLclXJQE0ShXLJyclISUnBsGHDhDK5XA4AUFVVxa1btyocMUlISEBMTAxiY2OxcOFCoVwmk+H333/Hl19+CQDQ09OrcIJwdnY29PX1AQAmJiYwMDBAYmJiteOujJmZGYAXIznm5uZCeWZmptKozstMTExw+PBhFBcX4+nTp7CwsICPj4/CZG9zc3OlN847OjriwIEDAF4kLIsXL8ahQ4eEd5x17twZcXFx+Omnn5QSnP3796OwsFBIjuobTzJm1VZQUIBjx44JyU379u0rHYZkjLGaat++PaKiohQm7kZFRUFXVxeWlpZK9R0dHVFWViZMaAWAW7duITs7u9I2pFIpbt68ibi4OOHz/vvvw9XVFXFxcbC2tq5wv8DAQAwYMADXr19X2HfBggUIDAxUOH5sbKzS/rGxsXBwcAAAiMVijBo1Crt27UJaWppS3YKCgionVr/M1tYWZmZmCAsLE8pKSkoQERFRrdtAGhoasLS0RFlZGQ4cOIDhL43q9evXT+k9ZklJSbCxsQHw4lHz0tJSpVuHKioqQtL4ssDAQLz//vsN9+oQegvl5OQQAMrJyanT4xblyGizy0Pa7PKQZGVyopxUokcJRPlP6rSdxnD//n0KDQ2lrVu3UlBQECUnJzd2SIy91YqKiighIYGKiooaO5Qa8fb2puHDh1e47eHDh6SlpUX/+Mc/KDExkQ4fPkzGxsa0bNkyoY6zszPNmjVL+D5kyBDq3LkzXbx4kS5fvkz9+/cnTU1N+vnnn+skJiKikpISMjExoYCAAKVtSUlJBIDi4uKIiCg6OprEYjH5+vpSfHw8xcfH04oVK0gsFtPFixeF/Z49e0ZSqZSsrKwoNDSU4uPjKSkpiQIDA6lt27aUlZVFRESbNm2igQMHVhn/jz/+SPr6+nTw4EG6efMmjR49mszNzSk3N1eoM378ePLx8RG+X7x4kQ4cOEDJycl07tw5GjhwINna2grtEhFdunSJVFVV6fvvv6fbt2/Trl27SEtLi3bu3CnUcXZ2pg4dOtBff/1Fd+/epeDgYNLQ0CB/f3+FGG/fvk0ikYhOnDhR5bkQVf2zXZPrNyc4dehpWi6tdbpFa51uUW52Xp0euzHdv3+ftm7dSlu3bqUDBw7Ueb8xxmquOSY4RERnz56lHj16kLq6OpmZmdHChQuptLRU2P5qgpOenk5eXl4kkUioVatWtH37drKxsanTBGf//v0kFospIyOjwu2dOnWimTNnCt/DwsLo3XffJQMDAzIwMKD+/ftTWFiY0n7Z2dnk4+ND9vb2pK6uTqampuTm5kaHDh0iuVxORETLli0jGxubKuOXy+W0bNkyMjMzI4lEQgMGDKCbN28q1HF2diZvb2/h+9mzZ8nR0ZEkEgkZGRnR+PHjKTU1VenY//rXv6hjx44kkUhIKpXSP//5T4Xt6enpNGHCBLKwsCANDQ1ycHCgdevWCfGXW7RoEVlZWZFMJqvyXIjqLsEREdXyIf4mLDc3F/r6+sjJyREmyNaFZ+l5CHovHQAwNcwEujoSQEUdUGnaU53kcjmOHj2Kli1bolevXsL7Khhjjae4uBj37t2Dra2t0uO+jDVlVf1s1+T63bSvvG+Y/6WKBFHCAUBHC2jdH9A0BCT1+0KjupaWlgYzMzOIxWKIxWIMGzaMExvGGGNNBk8yrktEEJEcov/+F2XPgZQLQFlxY0dWbXK5HFFRUTh27JjCxD1ObhhjjDUlPIJTl2T/S2TIZgDQ6vUvcXqT5ObmIjw8HI8fPwaACmfBM8YYY00BJzj1pYVVY0dQI3fv3sW5c+dQUlICiUQCFxcX4VFAxhhjrKnhBOctJ5PJEB0djYSEBAAvXu89aNCgel8EjTHGGKtPnODUKXGTW6ohPz8fSUlJAIB33nkH3bt3V3ppE2OMMdbUcIJTh7T1DaGp9/zFn3W0Gzma6tHX14eLiwvU1NQqfYMnY4wx1tTwP9XrEOHNf6VQWVkZIiMjkZ6eLpTZ2dlxcsMYY6xZ4QSnvryBt6qys7Nx+PBhJCYm4syZM5DJZI0dEmOM1YiLiwtmz55dZZ3WrVvDz8+vQeJhby5OcOpQYU4WinLzUJSbh4L8gsYOR0FSUhIOHjyIZ8+eQVNTE87OzvxuG8ZYg5swYQJEIpHS586dOw0WQ0hISIUxFBdX751lDg4OUFdXR2pqqtK2ypIrPz8/tG7dWqEsNzcXS5YsgVQqhYaGBszMzODm5oaDBw+iJosMEBGWL18OCwsLaGpqwsXFBfHx8VXuU1paihUrVqBNmzbQ0NBAly5dcPLkSYU6ZWVlWLp0KWxtbaGpqQk7OzusWLGi0leITJ06FSKRSOn8XVxclPr6008/rfb51RbPwalLJMebdpeqrKwM58+fFyYSW1hYYODAgdDS0mrkyBhjb6shQ4YgODhYoazBVpj+Lz09PaWVsquz5MX58+dRXFyMTz75BCEhIViyZEmt2s/Ozkb//v2Rk5ODlStXokePHlBVVUVERAQWLFiAgQMHokWLFtU61po1a7B+/XqEhISgXbt2WLlyJQYPHoxbt25BV1e3wn2WLl2KnTt3Ytu2bZBKpTh16hRGjBiBqKgodO3aFQCwevVqbNmyBaGhoejQoQMuX76MiRMnQl9fH7NmzVI43uHDhxETEwMLC4sK25s8eTJWrFghfNfU1KzWuf0dPILTjD1//hyHDh1CUlISRCIRunfvDi8vL05uGGONSiKRwMzMTOFTPqIcERGBnj17QiKRwNzcHD4+PigrK6v0WJmZmRg2bBg0NTVha2uLXbt2VSsGkUikFEN1BAYGYsyYMRg/fjyCgoJqNNLyssWLFyMlJQUxMTHw9vZG+/bt0a5dO0yePBlxcXHVflUHEcHPzw9LlizBhx9+iI4dOyI0NBSFhYX47bffKt1vx44dWLx4MTw9PWFnZ4cvv/wSHh4eWLdunVAnOjoaw4cPh5eXF1q3bo2PP/4Y7u7uCm+5B4DU1FTMmDEDu3btgpqaWoXtaWlpKfS1vr5+tc7v7+AEpxmTSCQwMDCAlpYWvLy80K1bN4hEb+DkIMZYnZHL5JV/5FT9ujJ5terWpdTUVHh6eqJHjx64fv06AgICEBgYiJUrV1a6z4QJE5CSkoIzZ85g//798Pf3R2Zm5mvbys/Ph42NDaysrPDee+/h2rVrr90nLy8P+/btw7hx4zB48GAUFBTg7NmzNTlFAC/eEr97926MHTu2whEPHR0dqKq+uMGyfPlypVtbL7t37x4yMjLg7u4ulEkkEjg7OyMqKqrS/Z4/f640YqWpqYnz588L3/v374/w8HDhDsD169dx/vx5eHp6KpzL+PHjMX/+fHTo0KHS9nbt2gVjY2N06NAB8+bNQ15eXqV16wrfoqpLb8DtqdLSUhAR1NXVAQADBgyATCZrkOFAxljjS4xKr3SbjqEGbDoYCd//fTEDJK/4F5eWvgS2nY2F70mxjyArVU5oOrxrWeMYjx07pjBCMXToUOzbtw/+/v6wtrbGL7/8ApFIBKlUirS0NCxcuBDffvut0ju6kpKScOLECVy8eBG9evUC8GKExdHRscr2pVIpQkJC0KlTJ+Tm5mLDhg3o168frl+/Dnt7+0r32717N+zt7YUL+aefforAwEC4urrW6PyfPHmCrKwsSKWvX87H2NgYbdq0qXR7RkYGgBcvaX2Zqakp7t+/X+l+Hh4eWL9+PQYMGIA2bdogPDwcR44cUXj4ZOHChcjJyYFUKoWKigpkMhm+//57jB49WqizevVqqKqq4quvvqq0rbFjx8LW1hZmZmb4v//7PyxatAjXr19HWFjYa8//7+AEp540xkjJ06dP8eeff8LQ0BCDBw8GACHRYYyxN4WrqysCAgKE79raL94blpiYiD59+ij8/uzXrx/y8/Px8OFDtGrVSuE4iYmJUFVVRffu3YUyqVT62rkrvXv3Ru/evRXa6NatGzZt2oSNGzdWul9gYCDGjRsnfB83bhwGDBiA7Ozsas+XASDc1qrOdWLGjBmYMWPGa+u9eiwiqvL4GzZswOTJkyGVSiESidCmTRtMnDhRYW7Unj17sHPnTvz222/o0KED4uLiMHv2bFhYWMDb2xtXrlzBhg0bcPXq1Srbmjx5svDnjh07wt7eHt27d8fVq1fRrVu3155bbXGC00wkJiYiKioKMpkMZWVlKCws5Lk2jL2FHPuaV77xlYuQtHf15p0AQLsepq+vVE3a2tpo27atUnlFF+WqkoGaJApVEYvF6NGjB27fvl1pnYSEBMTExCA2NhYLFy4UymUyGX7//Xd8+eWXAF5MXs7JyVHaPzs7W5h3YmJiAgMDAyQmJv6tuAEIc4cyMjJgbv6/v/vMzEylUZ2XmZiY4PDhwyguLsbTp09hYWEBHx8f2NraCnXmz58PHx8f4YmnTp064f79+1i1ahW8vb0RGRmJzMxMhcRTJpPh66+/hp+fH1JSUipsu1u3blBTU8Pt27frNcHhOTh1SfTfpRoacPCmpKQE4eHhiIyMhEwmQ6tWrfDRRx9xcsPYW0qsIq78IxZVv66KuFp161L79u0RFRWlMHE3KioKurq6sLRUvhXm6OiIsrIyhUmvt27dQnZ2do3aJSLExcUpJAivCgwMxIABA3D9+nXExcUJnwULFiAwMFCoJ5VKERsbq7R/bGwsHBwcALxIqEaNGoVdu3YhLS1NqW5BQUGVE6tfVn7r5+XbPSUlJYiIiEDfvn1fu7+GhgYsLS1RVlaGAwcOYPjw4cK2wsJCpduCKioqwmPi48ePx40bNxT6w8LCAvPnz8epU6cqbTM+Ph6lpaVV9nedoLdQTk4OAaCcnJw6PW5hVhltdnlIm10e1ulxK/P48WP6/fffaevWrbRt2za6fv06yeXyBmmbMda4ioqKKCEhgYqKiho7lBrx9vam4cOHV7jt4cOHpKWlRf/4xz8oMTGRDh8+TMbGxrRs2TKhjrOzM82aNUv4PmTIEOrcuTNdvHiRLl++TP379ydNTU36+eefK41h+fLldPLkSUpOTqZr167RxIkTSVVVlWJiYiqsX1JSQiYmJhQQEKC0LSkpiQBQXFwcERFFR0eTWCwmX19fio+Pp/j4eFqxYgWJxWK6ePGisN+zZ89IKpWSlZUVhYaGUnx8PCUlJVFgYCC1bduWsrKyiIho06ZNNHDgwErPhYjoxx9/JH19fTp48CDdvHmTRo8eTebm5pSbmyvUGT9+PPn4+AjfL168SAcOHKDk5GQ6d+4cDRw4kGxtbYV2iV78XVlaWtKxY8fo3r17dPDgQTI2NqYFCxZUGouNjY1C39+5c4d8fX0pNjaW7t27R3/88QdJpVLq2rUrlZWVVXiMqn62a3L95ltUTZRcLseff/6J3Nxc6OjowM3NDS1btmzssBhjrNYsLS1x/PhxzJ8/H126dIGhoSEmTZqEpUuXVrpPcHAwvvjiCzg7O8PU1BQrV67EN998U2U72dnZmDJlCjIyMqCvr4+uXbvi3Llz6NmzZ4X1jx49iqdPn2LEiBFK2+zt7dGpUycEBgZi48aN6N27N06dOoUVK1YIL7zr0KEDTp06JUyEBgADAwNcvHgRP/74I1auXIn79+/DwMAAnTp1wtq1a4XbWU+ePEFycnKV57NgwQIUFRVh+vTpyMrKQq9evXD69GmFd+A8ePBAYTSmuLgYS5cuxd27d6GjowNPT0/s2LFDYS7Rpk2b8M0332D69OnIzMyEhYUFpk6dim+//bbKeF6mrq6O8PBwbNiwAfn5+bC2toaXlxeWLVtW7y+bFRHV8iH+Jiw3Nxf6+vrIycmBnp5enR23KFuG4BEvZrRP/6vmTxbUVEZGBm7evIkBAwZAIpHUe3uMsTdHcXEx7t27B1tb22q9oI6xpqKqn+2aXL95BKcOFWRnoei/z/YX5BfU+YrimZmZyM/Ph52dHQDU6OVUjDHG2NuEE5y6RHKgbt97Jbh58yZiYmIgFothYGAAAwOD+mmIMcYYawY4wXnDPX/+HGfPnhVe2GRjYyO8M4IxxhhjFeME5w326NEjhIeHIz8/H2KxGH379kX79u0bOyzGGGPsjccJzhvqxo0biImJARFBT08Pbm5uMDY2fv2OjDHGGOME5031/PlzEBHatGmDd999l5dcYIwxxmqAE5y69DcfuJfL5cJ7CpycnGBiYlLlKrKMMcYYqxgv1VCnRAr/qS4iwrVr13D06FFhJVexWMzJDWOMMVZLPIJTh7RaGEJTvwQikaja78ApKirCX3/9hYcPHwIA7t69C3t7+/oMkzHGGGv2OMFpRGlpaThz5gwKCwuhqqqKfv36cXLDGGOM1QG+RdUIiAhXrlzBsWPHUFhYCAMDA4wYMUJYaZYxxpqrCRMmQCQSYdq0aUrbpk+fDpFIhAkTJjR8YJUoKiqCgYEBDA0NUVRUpLRdJBLh8OHDSuWzZ8+Gi4uLQllGRgZmzpwJOzs7SCQSWFtbY9iwYQgPD69RTM+fP8fMmTNhbGwMbW1tvP/++8JdgMrk5eVh9uzZsLGxgaamJvr27au06rlIJKrws3btWqHO1KlT0aZNG2hqasLExATDhw/Hv//9b4XjXL16FYMHD0aLFi1gZGSEKVOmID8/v0bnWBc4walDxfk5KM7LQ1FeLoqKiiutFx0djStXrgAAHBwcMGLECH4zMWPsrWFtbY3du3crJAzFxcX4/fff0apVq0aMTNmBAwfQsWNHtG/fHgcPHqz1cVJSUuDk5IQzZ85gzZo1uHnzJk6ePAlXV1f84x//qNGxZs+ejUOHDmH37t04f/488vPz8d577wlzOCvyxRdfICwsDDt27MDNmzfh7u4ONzc3pKamCnXS09MVPkFBQRCJRPjoo4+EOk5OTggODkZiYiJOnToFIoK7u7vQdlpaGtzc3NC2bVvExMTg5MmTiI+Pb5yk9bXrjTdDNVluvSYeJz+in7om0k/dEig/L7/K9nfu3ElJSUl12j5j7O1RVFRECQkJVFRU1Nih1Ii3tzcNHz6cOnXqRDt37hTKd+3aRZ06daLhw4eTt7e3UC6Xy2n16tVka2tLGhoa1LlzZ9q3b5+wvaysjD7//HNq3bo1aWhoULt27cjPz6/CNteuXUtmZmZkaGhI06dPp5KSktfG6+LiQlu2bKGAgABydXVV2g6ADh06pFQ+a9YscnZ2Fr4PHTqULC0tKT9f+dqQlZX12jjKZWdnk5qaGu3evVsoS01NJbFYTCdPnqxwn8LCQlJRUaFjx44plHfp0oWWLFlSaVvDhw+ngQMHVhnP9evXCQDduXOHiIi2bt1KLVu2JJlMJtS5du0aAaDbt2+/9vyIqv7Zrsn1m+fg1KHKnhKXy+VIS0uDlZUVAEBPTw+ffvppvS8Vzxh7exARyor/5rsqaklV48WtjJqYOHEigoODMXbsWABAUFAQPv/8c5w9e1ah3tKlS3Hw4EEEBATA3t4e586dw7hx42BiYgJnZ2fI5XJYWVlh7969MDY2RlRUFKZMmQJzc3OMHDlSOM5ff/0Fc3Nz/PXXX7hz5w5GjRqFd955B5MnT640xuTkZERHR+PgwYMgIsyePRt3794VFjyurmfPnuHkyZP4/vvvK1xqp0WLFsKfJ0yYgJSUFKV+KHflyhWUlpbC3d1dKLOwsEDHjh0RFRUFDw8PpX3Kysogk8mUVubW1NTE+fPnK2zn0aNH+OOPPxAaGlrpeRUUFCA4OBi2trawtrYG8OL2mbq6uvDKk/J2AOD8+fNo27Ztpcera5zg1LOCggKEh4cjIyMDnp6eQpLDyQ1jrC6VFRO2eaY3StuTj5tDTbNmCc748eOxaNEipKSkQCQS4cKFC9i9e7fChb2goADr16/HmTNn0KdPHwCAnZ0dzp8/j61bt8LZ2Rlqamrw9fUV9rG1tUVUVBT27t2rkOAYGBjgl19+gYqKCqRSKby8vBAeHl5lghMUFIShQ4cKUwiGDBmCoKAgrFy5skbneufOHRARpFLpa+uam5tDLq981eaMjAyoq6srTWswNTVFRkZGhfvo6uqiT58++O677+Do6AhTU1P8/vvviImJqfTBltDQUOjq6uLDDz9U2ubv748FCxagoKAAUqkUYWFhwstoBw4ciLlz52Lt2rWYNWsWCgoKsHjxYgAvboE1pDdiDo6/vz9sbW2hoaEBJycnREZGVlk/IiICTk5O0NDQgJ2dHbZs2dJAkdbMgwcPcODAAWRkZEBNTQ1lZWWNHRJjjL0RjI2N4eXlhdDQUAQHB8PLy0tpOZqEhAQUFxdj8ODB0NHRET7bt29HcnKyUG/Lli3o3r07TExMoKOjg23btuHBgwcKx+rQoYPCPyzNzc2RmZlZaXwymQyhoaEYN26cUDZu3DiEhoZWOdelIkQvRtaqM8q1atUqbN++vUbHL2+jquPv2LEDRARLS0tIJBJs3LgRY8aMqfQf20FBQRg7dqzSqA8AjB07FteuXUNERATs7e0xcuRIFBe/mHfaoUMHhIaGYt26ddDS0oKZmRns7Oxgamra4P+wb/QRnD179mD27Nnw9/dHv379sHXrVgwdOhQJCQkVTja7d+8ePD09MXnyZOzcuRMXLlzA9OnTYWJiojARqjHJSY7Y2Fgk3U4C8OJ/ZDc3N+jp6TVyZIyx5kpVQ4TJx80bre3a+PzzzzFjxgwAwObNm5W2l49k/PHHH7C0tFTYJpFIAAB79+7FnDlzsG7dOvTp0we6urpYu3YtYmJiFOqrqakpfBeJRFWOlJw6dQqpqakYNWqUQrlMJsPp06cxdOhQAC9GR3JycpT2z87Ohr6+PgDA3t4eIpEIiYmJ+OCDDyptszrMzMxQUlKCrKwshVGczMxM9O3bt9L92rRpg4iICBQUFCA3Nxfm5uYYNWoUbG1tlepGRkbi1q1b2LNnT4XH0tfXh76+Puzt7dG7d28YGBjg0KFDGD16NABgzJgxGDNmDB49egRtbW2IRCKsX7++wrbqU6OP4Kxfvx6TJk3CF198AUdHR/j5+cHa2hoBAQEV1t+yZQtatWoFPz8/ODo64osvvsDnn3+On376qYEjr9hzWQEScs7g5s2bAICOHTti+PDhnNwwxuqVSCSCmqa4UT41nX9TbsiQISgpKUFJSUmFc0fat28PiUSCBw8eoG3btgqf8jkfkZGR6Nu3L6ZPn46uXbuibdu2CqM7tRUYGIhPP/0UcXFxCp+xY8ciMDBQqCeVSpUet6b/vgqk/NUfhoaG8PDwwObNm1FQUKDUVnZ2drXjcnJygpqaGsLCwoSy9PR0/N///V+VCU45bW1tmJubIysrC6dOncLw4cOV6gQGBsLJyQldunSpVkxEhOfPnyuVm5qaQkdHB3v27IGGhgYGDx5crePVlUYdwSkpKcGVK1fg4+OjUO7u7o6oqKgK94mOjlaYXAUAHh4eCAwMRGlpqVKWDryY9PRy5+fm5tZB9BURIbf0MfLLnkFd3QyDBw9u8IyVMcaaChUVFSQmJgp/fpWuri7mzZuHOXPmQC6Xo3///sjNzUVUVBR0dHTg7e2Ntm3bYvv27Th16hRsbW2xY8cOxMbG/q3fvY8fP8a//vUvHD16FB07dlTY5u3tDS8vLzx+/BgmJiaYN28evL29IZVK4e7ujqKiIvzzn/9EcnKywuPf/v7+6Nu3L3r27IkVK1agc+fOKCsrQ1hYGAICAoR+WLRoEVJTUyu9TaWvr49Jkybh66+/hpGREQwNDTFv3jx06tQJbm5uQr1BgwZhxIgRwghZ+SPdDg4OuHPnDubPnw8HBwdMnDhR4fi5ubnYt28f1q1bp9T23bt3sWfPHri7u8PExASpqalYvXo1NDU14enpKdT75Zdf0LdvX+jo6CAsLAzz58/Hjz/+qDCZuiE0aoLz5MkTyGQymJqaKpRXNVkqIyOjwvplZWV48uQJzM2Vh2hXrVqlMAmtvmjpG8LOujPU8sQYM/bFUCljjLHKvW50+7vvvkPLli2xatUq3L17Fy1atEC3bt2EiavTpk1DXFwcRo0aBZFIhNGjR2P69Ok4ceJErWPavn07tLW1MWjQIKVtrq6u0NXVxY4dOzB37lyMHDkSRISffvoJS5YsgYaGBrp27YrIyEjY2NgI+9na2uLq1av4/vvv8fXXXyM9PR0mJiZwcnJSuGORnp6uNH/oVT///DNUVVUxcuRIFBUVYdCgQQgJCVFIEpOTk/HkyRPhe05ODhYtWoSHDx/C0NAQH330Eb7//nulQYHdu3eDiITbTS/T0NBAZGQk/Pz8kJWVBVNTUwwYMABRUVFo2bKlUO/SpUtYtmwZ8vPzIZVKsXXrVowfP77Kc6oPIiqf/dQI0tLSYGlpiaioKGGGPAB8//332LFjh9LbEQGgXbt2mDhxIhYtWiSUXbhwAf3790d6ejrMzMyU9qloBMfa2ho5OTl864gx1iQVFxfj3r17wgMajDUXVf1s5+bmQl9fv1rX70YdwTE2NoaKiorSaE1mZqbSKE05MzOzCuurqqrCyMiown0kEokwIY0xxhhjzV+jTjJWV1eHk5OTwmQpAAgLC6t0slSfPn2U6p8+fRrdu3evcP4NY4wxxt4+jf4U1dy5c/Hrr78iKCgIiYmJmDNnDh48eCAsxLZo0SJ89tlnQv1p06bh/v37mDt3LhITExEUFITAwEDMmzevsU6BMcYYY2+YRn8PzqhRo/D06VOsWLEC6enp6NixI44fPy5Mznp1wpWtrS2OHz+OOXPmYPPmzbCwsMDGjRvfmHfgMMYYY6zxNeok48ZSk0lKjDH2JuJJxqy5qqtJxo1+i4oxxljtvYX/RmXNXF39THOCwxhjTVD5QxWFhYWNHAljdaukpATA31+UutHn4DDGGKs5FRUVtGjRQlgwUktLq9ZLJjD2ppDL5Xj8+DG0tLSgqvr3UhROcBhjrIkqf7FpVatiM9bUiMVitGrV6m8n7JzgMMZYEyUSiWBubo6WLVuitLS0scNhrE6oq6tDLP77M2g4wWGMsSZORUXlb89XYKy54UnGjDHGGGt2OMFhjDHGWLPDCQ5jjDHGmp23cg5O+UuEcnNzGzkSxhhjjFVX+XW7Oi8DfCsTnLy8PACAtbV1I0fCGGOMsZrKy8uDvr5+lXXeyrWo5HI50tLSoKurW+cvxsrNzYW1tTX+85//8DpX9Yj7uWFwPzcM7ueGw33dMOqrn4kIeXl5sLCweO2j5G/lCI5YLIaVlVW9tqGnp8f/8zQA7ueGwf3cMLifGw73dcOoj35+3chNOZ5kzBhjjLFmhxMcxhhjjDU7nODUMYlEgmXLlkEikTR2KM0a93PD4H5uGNzPDYf7umG8Cf38Vk4yZowxxljzxiM4jDHGGGt2OMFhjDHGWLPDCQ5jjDHGmh1OcBhjjDHW7HCCUwv+/v6wtbWFhoYGnJycEBkZWWX9iIgIODk5QUNDA3Z2dtiyZUsDRdq01aSfDx48iMGDB8PExAR6enro06cPTp061YDRNl01/Xkud+HCBaiqquKdd96p3wCbiZr28/Pnz7FkyRLY2NhAIpGgTZs2CAoKaqBom66a9vOuXbvQpUsXaGlpwdzcHBMnTsTTp08bKNqm6dy5cxg2bBgsLCwgEolw+PDh1+7TKNdBYjWye/duUlNTo23btlFCQgLNmjWLtLW16f79+xXWv3v3LmlpadGsWbMoISGBtm3bRmpqarR///4GjrxpqWk/z5o1i1avXk2XLl2ipKQkWrRoEampqdHVq1cbOPKmpab9XC47O5vs7OzI3d2dunTp0jDBNmG16ef333+fevXqRWFhYXTv3j2KiYmhCxcuNGDUTU9N+zkyMpLEYjFt2LCB7t69S5GRkdShQwf64IMPGjjypuX48eO0ZMkSOnDgAAGgQ4cOVVm/sa6DnODUUM+ePWnatGkKZVKplHx8fCqsv2DBApJKpQplU6dOpd69e9dbjM1BTfu5Iu3btydfX9+6Dq1ZqW0/jxo1ipYuXUrLli3jBKcaatrPJ06cIH19fXr69GlDhNds1LSf165dS3Z2dgplGzduJCsrq3qLsbmpToLTWNdBvkVVAyUlJbhy5Qrc3d0Vyt3d3REVFVXhPtHR0Ur1PTw8cPnyZZSWltZbrE1Zbfr5VXK5HHl5eTA0NKyPEJuF2vZzcHAwkpOTsWzZsvoOsVmoTT8fPXoU3bt3x5o1a2BpaYl27dph3rx5KCoqaoiQm6Ta9HPfvn3x8OFDHD9+HESER48eYf/+/fDy8mqIkN8ajXUdfCsX26ytJ0+eQCaTwdTUVKHc1NQUGRkZFe6TkZFRYf2ysjI8efIE5ubm9RZvU1Wbfn7VunXrUFBQgJEjR9ZHiM1Cbfr59u3b8PHxQWRkJFRV+ddHddSmn+/evYvz589DQ0MDhw4dwpMnTzB9+nQ8e/aM5+FUojb93LdvX+zatQujRo1CcXExysrK8P7772PTpk0NEfJbo7GugzyCUwsikUjhOxEplb2ufkXlTFFN+7nc77//juXLl2PPnj1o2bJlfYXXbFS3n2UyGcaMGQNfX1+0a9euocJrNmry8yyXyyESibBr1y707NkTnp6eWL9+PUJCQngU5zVq0s8JCQn46quv8O233+LKlSs4efIk7t27h2nTpjVEqG+VxrgO8j/BasDY2BgqKipK/xrIzMxUyk7LmZmZVVhfVVUVRkZG9RZrU1abfi63Z88eTJo0Cfv27YObm1t9htnk1bSf8/LycPnyZVy7dg0zZswA8OJCTERQVVXF6dOnMXDgwAaJvSmpzc+zubk5LC0toa+vL5Q5OjqCiPDw4UPY29vXa8xNUW36edWqVejXrx/mz58PAOjcuTO0tbXx7rvvYuXKlTzCXkca6zrIIzg1oK6uDicnJ4SFhSmUh4WFoW/fvhXu06dPH6X6p0+fRvfu3aGmplZvsTZlteln4MXIzYQJE/Dbb7/xPfRqqGk/6+np4ebNm4iLixM+06ZNg4ODA+Li4tCrV6+GCr1Jqc3Pc79+/ZCWlob8/HyhLCkpCWKxGFZWVvUab1NVm34uLCyEWKx4GVRRUQHwvxEG9vc12nWwXqcwN0PljyEGBgZSQkICzZ49m7S1tSklJYWIiHx8fGj8+PFC/fLH4+bMmUMJCQkUGBjIj4lXQ037+bfffiNVVVXavHkzpaenC5/s7OzGOoUmoab9/Cp+iqp6atrPeXl5ZGVlRR9//DHFx8dTREQE2dvb0xdffNFYp9Ak1LSfg4ODSVVVlfz9/Sk5OZnOnz9P3bt3p549ezbWKTQJeXl5dO3aNbp27RoBoPXr19O1a9eEx/HflOsgJzi1sHnzZrKxsSF1dXXq1q0bRURECNu8vb3J2dlZof7Zs2epa9eupK6uTq1bt6aAgIAGjrhpqkk/Ozs7EwClj7e3d8MH3sTU9Of5ZZzgVF9N+zkxMZHc3NxIU1OTrKysaO7cuVRYWNjAUTc9Ne3njRs3Uvv27UlTU5PMzc1p7Nix9PDhwwaOumn566+/qvx9+6ZcB0VEPA7HGGOMseaF5+AwxhhjrNnhBIcxxhhjzQ4nOIwxxhhrdjjBYYwxxlizwwkOY4wxxpodTnAYY4wx1uxwgsMYY4yxZocTHMaagJCQEIhEogo/8+bNq/ZxUlJSIBKJEBISUn/BVtJm+UcsFsPIyAienp6Ijo6ulzZdXFzg4uIifC8sLMTy5ctx9uxZpbrlfZuSklIvsVTm7NmzCv2ioqICExMTDBs2DJcvX671cf39/Rv075exNxUvtslYExIcHAypVKpQZmFh0UjR1MzMmTMxZswYyGQyxMfHw9fXF66uroiOjkbXrl3rtC1/f3+F74WFhfD19QUAhcQHALy8vBAdHd1oCyv+8MMPcHV1RWlpKa5duwZfX184OzsjLi6uVotq+vv7w9jYGBMmTKj7YBlrQjjBYawJ6dixI7p3797YYdRKq1at0Lt3bwAvFpNs27YtBg0aBH9/f2zbtq1O22rfvn2165qYmMDExKRO268Je3t7oV/effddtGjRAt7e3ti5c6eQlDHGao5vUTHWDNy5cwcTJ06Evb09tLS0YGlpiWHDhuHmzZuv3ffx48eYMmUKrK2tIZFIYGJign79+uHPP/9UqPfnn39i0KBB0NPTg5aWFvr164fw8PBax1x+Ub9//75QFhQUhC5dukBDQwOGhoYYMWIEEhMTFfa7e/cuPv30U1hYWEAikcDU1BSDBg1CXFycUOflW1QpKSlCAuPr6yvcEiof4Xj1FtXs2bOhra2N3NxcpZhHjRoFU1NTlJaWCmV79uxBnz59oK2tDR0dHXh4eODatWu17pfyBPbRo0cK5b6+vujVqxcMDQ2hp6eHbt26ITAwUGHV69atWyM+Ph4RERHCebZu3VrYnpubi3nz5sHW1hbq6uqwtLTE7NmzUVBQUOt4GXtTcYLDWBMik8lQVlam8AGAtLQ0GBkZ4ccff8TJkyexefNmqKqqolevXrh161aVxxw/fjwOHz6Mb7/9FqdPn8avv/4KNzc3PH36VKizc+dOuLu7Q09PD6Ghodi7dy8MDQ3h4eFR6yTnzp07ACAkH6tWrcKkSZPQoUMHHDx4EBs2bMCNGzfQp08f3L59W9jP09MTV65cwZo1axAWFoaAgAB07doV2dnZFbZjbm6OkydPAgAmTZqE6OhoREdH45tvvqmw/ueff47CwkLs3btXoTw7OxtHjhzBuHHjoKamBuDF7aXRo0ejffv22Lt3L3bs2IG8vDy8++67SEhIqFW/3Lt3DwDQrl07hfKUlBRMnToVe/fuxcGDB/Hhhx9i5syZ+O6774Q6hw4dgp2dHbp27Sqc56FDhwC8uE3n7OyM0NBQfPXVVzhx4gQWLlyIkJAQvP/+++BlCVmzU+/LeTLG/rbg4OAKV+8FQKWlpUr1y8rKqKSkhOzt7WnOnDlC+b179wgABQcHC2U6Ojo0e/bsStsuKCggQ0NDGjZsmEK5TCajLl26UM+ePauMvbzN1atXU2lpKRUXF9OVK1eoR48eBID++OMPysrKIk1NTfL09FTY98GDBySRSGjMmDFERPTkyRMCQH5+flW26ezsrLCa8ePHjwkALVu2TKlued/eu3dPKOvWrRv17dtXoZ6/vz8BoJs3bwqxqaqq0syZMxXq5eXlkZmZGY0cObLKGMtXZN6zZw+VlpZSYWEhXbhwgRwcHKh9+/aUlZVV6b4ymYxKS0tpxYoVZGRkRHK5XNjWoUOHCleAX7VqFYnFYoqNjVUo379/PwGg48ePVxkvY00Nz8FhrAnZvn07HB0dFcpUVVVRVlaGNWvWYOfOnbhz547CLZRXb/G8qmfPnggJCYGRkRHc3Nzg5OQkjFAAQFRUFJ49ewZvb29hxKjckCFDsGbNGhQUFEBbW7vKdhYuXIiFCxcK301NTbF161Z4enrixIkTKCoqUpoYa21tjYEDBwqjRIaGhmjTpg3Wrl0LmUwGV1dXdOnSBWJx3Q5GT5w4ETNnzsStW7fg4OAA4MUE7x49eqBjx44AgFOnTqGsrAyfffaZQr9oaGjA2dkZf/31V7XaGjVqlMJ3c3NzREVFoUWLFgrlZ86cwQ8//IDY2Fil22eZmZkwNTWtsp1jx46hY8eOeOeddxTi9fDwgEgkwtmzZzF06NBqxcxYU8C3qBhrQhwdHdG9e3eFDwDMnTsX33zzDT744AP861//QkxMDGJjY9GlSxcUFRVVecw9e/bA29sbv/76K/r06QNDQ0N89tlnyMjIAPC/uSAff/wx1NTUFD6rV68GEeHZs2evjX3WrFmIjY3FlStXkJycjPT0dEyZMgUAhNthFT3JZGFhIWwXiUQIDw+Hh4cH1qxZg27dusHExARfffUV8vLyqtmLrzd27FhIJBLhceuEhATExsZi4sSJQp3yfunRo4dSv+zZswdPnjypVlurV69GbGwsIiIisGTJEjx69AgffPABnj9/LtS5dOkS3N3dAQDbtm3DhQsXEBsbiyVLlgDAa/+Oy+O9ceOGUqy6urogomrHy1hTwSM4jDUDO3fuxGeffYYffvhBofzJkydKIwGvMjY2hp+fH/z8/PDgwQMcPXoUPj4+yMzMxMmTJ2FsbAwA2LRpkzAx+FWvGz0AACsrq0qfADMyMgIApKenK21LS0sTYgAAGxsbBAYGAgCSkpKwd+9eLF++HCUlJdiyZctr46gOAwMDDB8+HNu3b8fKlSsRHBwMDQ0NjB49WqhTHtP+/fthY2NT67bs7OyEfhkwYAA0NTWxdOlSbNq0SXjH0e7du6GmpoZjx45BQ0ND2Pfw4cPVbsfY2BiampoICgqqdDtjzQknOIw1AyKRCBKJRKHsjz/+QGpqKtq2bVvt47Rq1QozZsxAeHg4Lly4AODFI90tWrRAQkICZsyYUadxl+vTpw80NTWxc+dOfPLJJ0L5w4cPcebMGXz88ccV7teuXTssXboUBw4cwNWrVys9fnnfVGeko9zEiROxd+9eHD9+HDt37sSIESMUkkUPDw+oqqoiOTkZH330UbWP+zoLFixASEgIfvzxR0ydOhW6uroQiURQVVWFioqKUK+oqAg7duxQ2l8ikVR4nu+99x5++OEHGBkZwdbWts7iZexNxQkOY83Ae++9h5CQEEilUnTu3BlXrlzB2rVrYWVlVeV+OTk5cHV1xZgxYyCVSqGrq4vY2FicPHkSH374IQBAR0cHmzZtgre3N549e4aPP/4YLVu2xOPHj3H9+nU8fvwYAQEBfyv+Fi1a4JtvvsHixYvx2WefYfTo0Xj69Cl8fX2hoaGBZcuWAQBu3LiBGTNm4JNPPoG9vT3U1dVx5swZ3LhxAz4+PpUeX1dXFzY2Njhy5AgGDRoEQ0NDGBsbKzxC/Sp3d3dYWVlh+vTpyMjIULg9Bbx4JHvFihVYsmQJ7t69iyFDhsDAwACPHj3CpUuXoK2tXav32KipqeGHH37AyJEjsWHDBixduhReXl5Yv349xowZgylTpuDp06f46aeflJJaAOjUqRN2796NPXv2wM7ODhoaGujUqRNmz56NAwcOYMCAAZgzZw46d+4MuVyOBw8e4PTp0/j666/Rq1evGsfL2BursWc5M8Zer/xJn1efgCmXlZVFkyZNopYtW5KWlhb179+fIiMjlZ4mevUpquLiYpo2bRp17tyZ9PT0SFNTkxwcHGjZsmVUUFCg0EZERAR5eXmRoaEhqampkaWlJXl5edG+ffuqjL28zbVr1772PH/99Vfq3Lkzqaurk76+Pg0fPpzi4+OF7Y8ePaIJEyaQVColbW1t0tHRoc6dO9PPP/9MZWVlQr1Xz5uI6M8//6SuXbuSRCIhAOTt7U1EFT9FVW7x4sUEgKytrUkmk1UY8+HDh8nV1ZX09PRIIpGQjY0Nffzxx/Tnn39Wea7lT1FV1n+9evUiAwMDys7OJiKioKAgcnBwIIlEQnZ2drRq1SoKDAxUij0lJYXc3d1JV1eXAJCNjY2wLT8/n5YuXUoODg5CH3fq1InmzJlDGRkZVcbLWFMjIuKXHzDGGGOseeGnqBhjjDHW7HCCwxhjjLFmhxMcxhhjjDU7nOAwxhhjrNnhBIcxxhhjzQ4nOIwxxhhrdjjBYYwxxlizwwkOY4wxxpodTnAYY4wx1uxwgsMYY4yxZocTHMYYY4w1O5zgMMYYY6zZ+X8Je4Ypg/d9iAAAAABJRU5ErkJggg=="     
},
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": "<Figure size 640x480 with 1 Axes>",
      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAAjgAAAHJCAYAAACIU0PXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAA9hAAAPYQGoP6dpAAC8KElEQVR4nOzdeXxU9b34/9c5Z9bs+0qAQNj3fV9FEEURogWtWr1yb1ttrdreVmtr1dtbq9349ipUW9Sf1qoVEDeUVRDZRUD2HULIRgLZM+s5vz8mmTAkgSQEJiTv5+MxZOaczznnPUMy857PqhiGYSCEEEII0YaowQ5ACCGEEKKlSYIjhBBCiDZHEhwhhBBCtDmS4AghhBCizZEERwghhBBtjiQ4QgghhGhzJMERQgghRJsjCY4QQggh2hxJcIQQQgjR5kiCI4QQQog2RxIcIUSjnTx5EkVRAm4Wi4W0tDTuvvtuvv3224DyzzzzTJ3yISEh9O3bl6eeeorS0tIgPRMhRFtnCnYAQojrT9euXbnnnnsAKC8vZ8uWLbzzzjssXbqUtWvXMnr06IDymZmZ9O3bF4C8vDw+++wzfve73/HJJ5+wbds2rFbrNX8OQoi2TRIcIUSTZWRk8MwzzwRs+9WvfsX//u//8tRTT/HFF18E7LvjjjuYO3eu/7HD4WDkyJHs3r2bf/3rXzzwwAPXImwhRDsiTVRCiBbx4x//GIDt27dftqzNZuO73/0uADt27GjSdU6cOMEPfvAD0tPTsVqtJCQkMHHiRN544w1/mTfeeANFUQK21Vi3bh2KotRJ0BRFYeLEiZw5c4b777+fpKQkVFVl3bp1dO3alfDwcCorK+uN6cYbb0RVVbKysgK2f/jhh9xwww1ER0djs9no27cvf/zjH/F6vU16zkKIppMERwjRIhRFaVJ5wzAAMJkaX5G8efNmBg0axKuvvkrPnj15/PHHmT17NlVVVfy///f/mnT9+hQVFTFq1Ch27drFnDlz+P73v09ERAT33HMP5eXlfPjhh3WOyc3NZe3atYwfP56OHTv6t//yl7/k9ttv5/Dhw2RmZvLQQw9hs9n47//+74DaLCHE1SFNVEKIFvHXv/4VgGHDhl22bFVVFf/85z8BGDt2bKPO73Q6mTNnDmVlZSxfvpybbropYH92dnYTI65r7969PPDAA/z9739H0zT/9oiICJ577jn++c9/ctdddwUc869//Qtd17n33nv921atWsXzzz/P9OnTWbx4MSEhIYAvqXvooYf429/+xpIlS8jMzLzimIUQ9ZMERwjRZEePHvU38dR0Mt64cSM2m43f/e53dcovXryYgwcPApCfn88nn3xCdnY2M2fOZPbs2Y265kcffcTp06e577776iQ3AB06dGj+E6pmsVh48cUXA5Ib8PU5GjFiBCtXruTs2bPEx8f79/3zn//EZrNxxx13+Le99NJLALzyyiv+5AZ8tVy///3veeWVV3jnnXckwRHiKpIERwjRZMeOHePZZ58FwGw2k5iYyN13380TTzxBv3796pRfsmQJS5YsCdg2e/ZsFi9e3OimrW3btgEwderUK4y+Yenp6cTFxdW7795772Xr1q28++67/v5G+/btY9euXdx5551ERkb6y27ZsoXQ0FAWLVpU77nsdrs/4RNCXB2S4AghmmzatGl8/vnnjS7/zjvvMHfuXDweD4cOHeJnP/sZS5cu5emnn+Z//ud/GnWO4uJiAFJTU5sTcqMkJiY2uG/u3Lk89thjvP322/4E56233gIIaJ4COHfuHB6Px58E1qeioqIFIhZCNEQ6GQshrhmTyUSfPn344IMPyMjI4H//93/55ptvGnVsVFQUAGfOnLlsWVX1vbV5PJ46+0pKSho87lK1SbGxsUyfPp2tW7dy9OhRDMPgX//6F3FxcXWazCIiIoiNjcUwjAZvJ06cuOzzEEI0nyQ4Qohrzmaz8cc//hHDMHjiiScadczw4cMBWLly5WXLRkdHA/UnQzt37mxCpIFqJjf85z//yfr16zl9+jRz5szBbDYHlBsxYgRFRUUcOXKk2dcSQlwZSXCEEEExc+ZMBg8ezKpVq9iwYcNly99222106NCBf/7zn6xYsaLO/guTmcGDB6MoCu+++y4Oh8O//ciRI1c0nPzWW28lMjKSt99+u8HmKYBHHnkEgP/4j/+gqKiozv68vDwOHDjQ7DiEEJcnCY4QImhqRmI9/fTTly1rtVr597//TVhYGNOnT+fmm2/mySef5OGHH2bMmDHMmDHDXzY1NZU5c+Zw8OBBhgwZwk9/+lPuvfdeBg0axA033NDseG02G3feeSdHjx7lrbfeolu3bowYMaJOuZtuuolf//rXfPXVV2RkZHDXXXfxxBNP8J//+Z9MmjSJDh061DunjhCi5UiCI4QImltvvZWhQ4eybt061q5de9nyo0aN4ptvvuE//uM/2LNnD3/6059YvHgxFouFxx9/PKDsokWL+PGPf0xRUREvv/wy3377La+++io/+tGPrijmmhobt9vtb7Kqz3PPPceqVasYN24ca9as4c9//jOffPIJTqeTZ555xj+TsxDi6lCMmulEhRBCCCHaCKnBEUIIIUSbIwmOEEIIIdocSXCEEEII0eZIgiOEEEKINkcSHCGEEEK0OZLgCCGEEKLNaZeLbeq6Tk5ODuHh4Y1eyVgIIYQQwWUYBmVlZaSkpPjXnGtIu0xwcnJySEtLC3YYQgghhGiG06dP06FDh0uWaZcJTnh4OOB7gSIiIoIcjRBCCCEao7S0lLS0NP/n+KW0ywSnplkqIiJCEhwhhBDiOtOY7iXSyVgIIYQQbY4kOEIIIYRocyTBEUIIIUSbIwmOEEIIIdocSXCEEEII0eZIgiOEEEKINkcSHCGEEEK0OZLgCCGEEKLNkQRHCCGEEG2OJDhCCCGEaHOCnuB8+eWX3HrrraSkpKAoCsuWLbvsMevXr2fIkCHYbDa6dOnC3/72t6sfqBBCCCGuG0FPcCoqKhgwYAAvvfRSo8qfOHGCm2++mXHjxrFz505++ctf8sgjj7BkyZKrHKkQQgghrhdBX2xz+vTpTJ8+vdHl//a3v9GxY0fmz58PQK9evfj666/54x//SGZm5lWKsnF0r0Fpnouy/AJUDTQTKKqBYbLh0Sw4PV7wVILTCYaBSTVjUjQUDUwmQDWguqzDo/vLKoaOppoxqxqKCiYLKIqOYbbjUa04PF7wVKG4nKB7UVUTZsWEqoLJ6iuLOQS3asbh1sFbBQ4nGF401YRZNaGoYLEogAdsoXhUM07D8D0xlws8XlRVw6JZUFQFs0UBxQ2ARzFT6dbrL6uA2arWX9btBrcHRVWxalZfWYsGqstXFhOVHqPesihgtZowFCcAXsVX1jAAjwdcblAUbCZb3bJoVHkVdMOoWxaw2DRQXNVlTVR5uags2Ex2AMxWDaU6Xq+hUaVffN4GyqLh0BW8ugFeLzhdAWVNFhXN5MEwjMuXNauoJt/rqyu+5+bVDdB13/8zYDPXltVMHgwMdFSqdLXBsppZRdM8oBjohoIDDY/XAMOAKkedsiaTFwMd3VBwYsLt1QPKWk02FEVBM6loZi+gY6DiMDRfWYDKKt//gcmKqqhoJhWTWcfAG3jeessqaCYdFB1dB5dqxuVpuKzJZGAoXnQd3KoZZ03ZqiowwKxZ0FQNVVMwmQ1QvOgGuJULyjocoBuYNDMm1eQrazEAL4YObs2Cw+1tuKwZUDwYuoFbs9ZT1oRJNdcp69GsVNWUrWbRzNjNFqwWDTQDl9eFYRh4XDoXMytmTJovBkUDp9dZp4y/rGrGrJl9v1+GjsPjaLCsSTVh0SwtXlZTNd/fPWAYBlWeqtr4NDNm1dzgsaL9CnqC01SbN29m6tSpAdumTZvGokWLcLvdmM11f9GdTidOZ+0fcGlp6VWJrapY51/fPYuztBJFMVAULzqAWolXNePVDVRDR9F1FAMUdBRouCw6irduWVXx4gVQqy5R1lVdVseLDmoVumrG06iyDgy7SsXI4+jp5wkvLCG0pBybFoZdi8SkeUiKLaNSOQVAXuJwjp33fXCHF5UQWlyOVQ0lxBSFpnpIiq2gSj0BQH7iUI4Xe9ENg7DzpYSdK8OihhBqikZTvSTGVODQjvvKJgzmRImB1zAIKS4noqgEi2on1BSDqugkxlTgNB0DoCBhICfKFDxeg5DSCiLOFmNWbYSZYlEVnYSYStymYxgYnE3oz8kyEy6vjr2sksiC85hUK+GmOBQM4qPLMGyn8OpeCuP7crLCgtOjYyuvIir/HJpiIcIcD0BsVCkmexZu3UNRXG9OVtpxeLxYKxxE5xWhKWYizAkARIWXEBKeg8Pr5FxcT046QqlyebFUuYjJOYuqmIg0JwIQEVZKRFQule4qzsd255QzggqXB7PTRWz2WVQ0Ii1Jvtc8tJTIqDwqPZUUR3fllCeacqcHk8tN3OkCQCHakgJAWEgZ0dH5VHjKKYnqzCk9jjKHB83jJf5UXj1lz1LhKaU0qiNZRiIlVW5Ur07CyVwMA6ItyWCohNoriI4upMJdSnlEKlmkUFLpRtENEk7m+Z6TKQkFlRBbJVGR56j0llARkkiOlkpxlRsMxReDAeGmBBRFxW6tIjqymApvCY6QWHJMHSiudIMBsdkFKIbh+z/GjM3iICqyhCpvMQ5bFHmWNM5Xl43JKUTVDUJN0aiYsZqdREWWUuUtxmUJJ9/W0VcWiMo7h+rRfb+/mLGYXURFlFGlF+M2hVBg78z5ShcYCpFnz6O5vdi0CEyKBbPZTVR4BVX6ebyalYLQdIorfOcNLyxBc3uwqeGYTSYiUs6SnHCOcj0Xj2bldMxgckp8H9wxOYVYqpyEmqKxqCFYzE4SY0upMM7gVUycjh/GmeLaD3mAGEsKEeYYRg5IIrKrlw1nNmB4wbHPWue9qkN4BxJDEomIt2PvpPPF6S8afF/rG9uXXrG9ACh1lrIqa1WDZXtG96RffD8AKtwVfH7y8wbLdovqxsCEgYAvwfr0xKcNlk2PSGdo0lAAPLonoKxJMTGt8zRCzCENHi/ap+suwcnLyyMxMTFgW2JiIh6Ph8LCQpKTk+sc8/zzz/Pss89ek/g0CyiqjqroVHmLsZrCQFNRVANFMcCo/laLgYKKb8F3A0XBd7/6H0UBjItOrtTeURQDw3fvMmV9ZWp2K4py2bIAulPDsqEr7uj9qGoJqAqoCooKiqqgqBqKqgGgqiqq6jtO8ZerLqspKJrqL6uoKoqioyqgKGpgWVVB0TQUrfq8moqq6Rg6qCoXlSWwrKqioqCig65g6GYMw4xumDAAt8PApURhGODVQlHKNTSPgVJmwlOigmLFqUaAoVDqtKOrCl4dqs4loFSaMXl1lEoPrpIoVMNEmRrue53ssaimaDxeg8qQRFSXGbPHQHF6cJYnoRgauhoKhkKVxY3J3BGPV8dhi0ZxW7B4DFSPl6qKTiiGikcJwTCgxOzBbOqJR/fiMkdgeMxYvQaKx6DS4QJDxaFYwVAo1LyYTC68uo5Hs6PrJmw6oBtUON2AgkOxgAGqqqNpHnRDx6ta8RombF4DDCh3+WoEKhUTGAqqYqCqvmRUV0x4DA2771eXsuoajEpFrf69MlBVHcMw0FUNL2p1WYNy7yXKKiq6omGr/p2s8PQFoMpfFk6rOoahYygq3gvKVnm8/rJKddkzio5B3bKOesrm+ssqeBUT1uqyzuqyjpqyQL5af1mX1wsGOFD9f8NnLyprqamE9HpxG+CsLlu0qxtVPbNw6xnoioYzKRK1exb+i174N1fzd2RoqIqGqqqYPR7MDjdes4bbbgVFQTegzOkmUlfQFI1zVS68bh1033+cpoDNrKF4dV9toNsNDi+qrqFXVf8NO3yJKfjeY5yecrxm35dCw1mOpmj+9wu1woHi8YJhoIf4ascufC+5sOzFFEUJeHypshee98KyXsOLx/Dwdf7X9IzpSUJIQoPnEO2PYhjGxR93QaMoCh988AG33357g2W6d+/OAw88wJNPPunftnHjRsaOHUtubi5JSUl1jqmvBictLY2SkhIiIiJa9DlceI2lS5eSkJDApEmTsNvtzT6X/7+o5kfN/9jFj+scd3G5Bs7DReW8XlY9c5asb7zE97Bx5yvxTYrV46y+OXw3t+Oixxfc9zgMPC4Dr9tAd4PX7bvvdRl4ax5X39c91fc9tdt1d3U5l4HX02p+lcUlKNWZgFJ9Q/ElrFCd5NZsuzDxvnBb9Y6abXBB2QuOqynrL6dw4XeEC85Zu0O5aJ+/LIFlFeXi7VxwgvrjrPl5ZmfDzUEA9igNDMP/Xcio+V6k+xJRo7rFSXe50Z0eQiNcpHQuoajChdej0KtfCWHRoYSOGs3qffkYOoQc2INWVYXJ5mRoX4VQS+132yrsOHqMpKrMVwvrOHgIvbzMv99iMuiY4NunmExEZc727ytfvx53Xn71Po2IW29FtVgu+fxa0vrT6ymoKgAg2hrNlE5Trtm1RXCUlpYSGRnZqM/v664GJykpiby8vIBtBQUFmEwmYmNj6z3GarVitdatpr2azp8/j67rZGdns2TJEiZPnkxKSkqzzuX/phP4ozlnalyx4jOMmbyRrO3DOHtY5eBnlTgrdFzlOs5yA1eFjqui9qez3PfTXaXjcba+JENVFVST71uwqoFqqv6pgWJSUFXfNkUDVVN8fac03zGqesH96uMVTaneDii+8ytadS2T4vvGrWrVtUyKb5/vfu2xVO/zl/PXjPnuqxfWVF2wT9UuuoZSXWtW87tRcx21ng/jC5IJRVUCPpyVmtgDko0Lzll9Df8xNXGhVB8beB1/+QvjqompLfK6a7KQC24XZCP2qNqipUUc+MyNwxni+/1QYcs/apvNq4q9NI7vP6ii1MKRb+PRDV8ftC2nUnz/l4sLcXp8r3eJqyeG7ovl1Bdg0qBzj2J0AwzFhGuDG3u8QmSagjM7FL1CwzAUMBQ01cBdBIbZQ0SSi/L9JZjtCgYGVSVmvI4wdKeD2AgPruPHUe12yk3RVJV7MQBvcTHe8gpfLY/ue3+IjNYID/clctauXVGqkyJ3Xh6eggJ/ZmcYRvUXL99jW69eqCG+pihX9hn6Fdo5mtiRU+VZhFvCm/VfJ9qu6y7BGTVqFB9//HHAtpUrVzJ06NB6+98ES6dOnZg1axarV6/m/PnzfPrppwwePJjBgwe3/jd5WyRmq9dfxbP2xfPNOo1mVjBZFcx238+A+zYFk0313beAZlHQzAqqWUGz+I6tuanmCx9X37cE3ldNvvuqqTYZUVR8iU1rf73bI10Hw+tLAHSvL/sx22r3VxT6tl9YpiZxMNkgMrW2bP4+8Lqqy1WXqanqsIRC8oDasic3gruqdn9t9QhYw6DLxNqyh1eAoySwTM3NEgr97qgte+gzqCyq/7ma7TBgrv+hdmYzfbuehdQhkNwfgIFzwzh/0uMroAQmhBcmkP7kUQV0OLi8DK/Li6Iq5O7zUJbvrU24NbU6kQZFtXJmvxOvbqCoCl5F4dSpC/usXFjd26Hep+H16BheYFEpKBDZEQwjDfQ0PCVlmAzo2rcARQFHgomqSl8tlOdcBd6yiuqECUwqhIa76NCxHN0LqXcmE5bma+Z25OTjOXKo/tcRsHTp4k9w3FmnUE5nM7jDJIZ3H9HgMaL9CnqCU15eztGjR/2PT5w4wa5du4iJiaFjx448+eSTnDlzhjfffBOAH/zgB7z00ks8/vjj/Od//iebN29m0aJFvPPOO8F6Cg2Kjo5m1qxZbNy4kUOHDrFjxw5yc3OZPHkyISGtuEOcNZzQaA99x+ZQWNoNS7gFS6iCNUzFEqpiDVOwhKpYQqt/hilYQ1XMIRckL1YFVZPEotXRvdU3jy950L2gmX0f2DX7S7Kr9+u1SUbNMfZoiEn3lfV64OSG6v167TlrjotIhY4XfPB881ZtgnCxyDTodkHzwqHPfOerT3hSYIKT+y00NAInNC4wwSnNAVd5/WW9rsDH7kpwVdRf9uL23Qv7iNRmJ76f2kVfvCrO+n5eEIeqKcR2bfoXtKH3RzW6bHGFi8NHy/EcU7C7TJwtd5JX5kB3Q9lWA9UOKGCogGKQGGnDbvWN3Dy2uQp3zQg6QFNUys5c8PftDUUxdL7dkuZ7bDJqu/p5wzH00AuaB301hUd2gtdroHxSCkoZljCwRUQQGT2A0Xc4iUg0arO66gxPtdUmwa7T2QCUf/klAOcqiyhxFmPr1Zv0EVP8oyJF+xX0BOfrr79m0qRJ/sePP/44AN/73vd44403yM3NJSsry78/PT2d5cuX89hjj/Hyyy+TkpLCX//616APEW+IyWRiwoQJpKSksGHDBnJycti7dy/Dhw8PdmgNUxQUeyTj7zgG3bpAZP1Nf+Iq8Hqo7ozk+4CvSSx0N1jDfQkGgNsBBfur99eU9VQf74HozpDY21fWWQ77lvrK1ie+B3QaXX19Nxxb23B8MV1qExxFhfMnGy5riwx83FBy49sZ+NAa7kuWlAva8Gra8+wxgWVjM3zPOaBNrzq5uHhkTerg2hqj2ra86kTkor4j6RN8H+i1VSf4q1fUizrE9rj5gg/jy0gZBDk7L1+uhUWFWhg+IAaq8739OQaFpysBiJ1Qt/z4XhEkRviShCP5ZWw7ch5QcGeDuVIhJcZGmN1EfISNvL0uSvM8FFU5fe2Y1c2wvvsahgqhNo2EcCuHV1dhDVMpKnPizdZ9fYswcJdARYlK0WkbZw6HoSrQc1qIL+ny+qbhUDUH3W9UiM0wY+7YEXdWFkZ1p/BzlUUUO4txFB8lK9vMZPsgTHGx/oEIov1pVZ2Mr5WmdFJqScXFxezatYtx48ahtfY/umNf+D680oZDYp9gR9P6ed3gcfpqAUw2sFR/sLoq4dzx2qTF1yO6OoFxQ2w3iO/uK1tRBAc+avgaSf2hwxDffUcp7L3E5JbxPaHTKN99dxXsfrduGV9bHsR0ra1p8XrgyApfYlHbCam2bEgcxGXUnqPgQPX+mk5NWm0CYLIF9D3BWVadUFyctLSzmr6cXb4EJyQWIlJ8f1/m5g9CaK5Kl4cKp7d6RKNS56fdrKFVj47UdQPdMMgrdfDl4UL/OTrHhTC6axwAXt3gve2nG7xeh2g747vXDlj419ZTvry8EtzHwXveQ+UGBW+RhsmiYdZ8o8Jq+u0AqEp1HzYgsbeFGx63EJ7o6wxX4ChgX9F+znvLMCsmJp8MxdKlK7Ye3VvwVRPB1qY7GV/PoqKimDhxov+xruts27aNfv36ERoaGrzA6lPz7dtREtw4rjWPCzxVvmSlJmHxOCEsEUKra7Iqz0HW5sAyF9ZMXNC3AncVZG9v+HphF4z6u7ApQzVR3bGoOnkwBX4ImmyQ0Ku2XMBNC6w9Mdmg3521CUhN4lIfzQQ9b2ncawW+GBrLKp1AAd/rD74+O5VFvlqxICQ4IRYTIZbGfQSoqm9Si+RIO72Sw6lye1EVhbiw2sEbqgJ9UiJQq5ugapIRtTppCrUGXmtCj4Tq0fAK6gjfKMkzU0vQvQZkh1J+ykVuXhnFDjcoBu5TJpw5Jswm3wSQ+ftd/GueixEPRpA2zYTZnkLXuHDWZq8k9OhJDuarRJ0rxXb4OFE2DS0ikpDhwzhaUI7To/umjDCM6s7ZBl4dbGaV/h2i/DFuPV5EhcuDVyegnIFBRkIYPZOu3Rdk0XSS4ATRrl27+Pbbbzl8+DCTJk0iLS0t2CHVaisJjq77+lJ4nL7Exe2o/RmV5uvPAVCaC0dWNtyE0mFobYJjGFBeULdMTTPHhTUSZrvvA0yzVN9Mvp++ntOBiYg1HAbd6ytzOSYLdBzZuNdAUXwdaEXrEJvh+33U3b6E02SF8rO+dhh7jO//9lpzVVb3naqn75XJ5v/d11SFQbY8sFTvd3sh21deMbwMsEUG1vgeXln9BaD6XFm1504NiYPugZO2JsVUJ3rD4OzpMiz7ywmvAJdHp3SQrznN8IB3YzTu476/s62LSln/N9+8SlqiztkxdkLDXZTn5xBVVEliaAJaghk9/zRlnWPZklWB4g1HrWfenQi7KSDBKSx3UVLlrvclO5xfTs+kCBxuL/mlDjrFtrIvqUISnGDKyMjg5MmTFBYW8tlnnzFgwACGDRuG2tC362spLMH3oX5xf4fWxOup7Qha8zM8GcKqq8FLc3wjYRpistYmOCZrbXKjmX1v6prFt12zgPWCb2rWcOg6uXafyQqatf7ExBICXerp4FAfRWlcciOub2ZbbVNjjUOf+Zrwut8EITG1/a9UU21zp9cDJadrExF//6zq+6GxvmQaqvtSfVG7/8KO4roXojtB57G+sroXvn2v4XijOkLGDbWPT29r+ItATZNbjYqzdTtv16jZXn4WSrIgoXdATVZ0UghD4+woCujAyn15FB0oRg1T6fCoxpCQON6b5/uioVTPX2AUaEQvHYTBAOy3nyOyP5gjNPbveh/F7aU0W+OsF+KtHekWPaB6LkUFVVEAL6rmocxV5psI0/CSFu8hyevFwCDaFoXdZMerGxRVlhAbZkPXDc5Xuth8rIgO0SH+Jj3ROsi7aRBFREQwc+ZMtmzZwr59+9i9ezd5eXnccMMNhIUF+Ru3NRyS+gU3BsPwjTS5sHmmohBObfJt99QzYVrqkNoEp2YUhaL67pvt1T9tYLJD6AUTGNqifM04ZnvdDqQXM1l8HxBCtLTDFy1tkNCrtqbO64Lj6xo+NjajNsFBgdIzDZf1XlArUTuhUm1/Kv9PFSwXvRf5O5lfWLb62IubIWuSKNWEf96GmvPWNMmWZPlGwjnLIbFvdTLmwWQNx2SvPd+tg1MpTotB9+jEpvhieuiLVNwOneLTHg6vqmLP0gp0r+8LYsnyZEqWQ1iiRpnjLqKHnMViyyct3AQUkuX6grGpY0kM9c2Mf6z4GN8UfMPBBgbZJUeNJiUsBpfXhVdzcrz0MMnh49hwpNDXV8gw0K5gljLR8iTBCTJN0xgzZgwpKSmsX7+e/Px8lixZwpQpU0hNTb38CdoCrwcqC6Gq2Nck5igBZ6mvRsbQA/u0KGrgfCOqyTfE2RLqGzFzYadWW5Rv/hGT7fIdWVVVmnFE8IQl+mpwaihKbVJQQzP7ytUkCTV9s2r6VYXG1ZZVNUgff0FSYQo8Trto4tMh9zc+1vTxjS/bmC8CFdWdls8d991qKCr0n+OfH0kxDKJjTHidLioKilAVHXuEHbMtjPhuFuK7WRjzUCTb3yhlxz/LfX15gPJ8LwoxFK+NISO0N9nqEbQ4J2q4hwqrF4dXx+MycBRqUGXBHG6gKRqqovoWZq2+b1J8H5fnHec5cO6Af7kIT/V1luzIZkSXWNLjpKmqtZBRVNdwFNXllJaWsmbNGs6fP8+sWbOIjo4OXjBeD1T4Fmokou76Xs0+Z9U5XyddW2TteSvPwf4P6z9GUSGpry/Jgdp5Wqzhvm+WweivIMTV4HZckJC0gmbqa6U0F06s932ZqakVcpb5Rg0m9vbdP7TcPwdPWblGVrYdm9VL1/Qq6HYjRF4wOaGuY3jdnD3kxF3pIW+fh61vBvajqfnYq28SULNdpdMIq28JGafhG3fgrLlvUFHswtv5LBFTzxLZz8Wu0+ewGol0COlNelwoo7rKtBpXk4yiuk7VNFkVFhYGJDcejweT6Rr/V7krff1XNDMMuqfpx+u6r/29srB2tEjNrLDgm3ulJsGxRvgSFlukr9bFFul7bA331cxc+CakatI8JNomczudmC4iOWCm5zpclfVOrOhwwsEjoaRFlxAa2QFOb/fNDWXoKEACgAqp/SD07u4c+ToW3TBRcMKExa5TXqRgVDcpqVazf5ood6XO0XWBK7VfyDAUnHsiceyJ5KzNS6hbRY+rRH/kil4FcRVIgtPKqKpKQkLtirh5eXmsXLmScePGkZ6eHsTILsPt8M0mW9NE5HX5vnVdzBzim/8j5IJvOZopcNp7IYSoYY+G/t/xN7NZnQbKjgLfkG3gZA50iKgiUlHA0PF64fQZu29ybUPB0KHq/BGSexViGNCj4wESLbnouoLbmkyW2o/QwQPQvQZZ61yYFZ24RBOa6kRT3WiqG0V3osWE49LtnD8Buz82YXi8GOUaHt2NKctM4X876fDgCaC6Q3d4UvD7MbZzkuC0cnv37sXhcLBq1Sr69OnDyJEjW8ckgV43lOVCyRnfT0eJbwRF92m+/Wabb0STZvJNDleT1FhCLn1eIYS4kKrWLiUCWOzQY0QSOUeKKS3y1bRUFDuITO/n65Stq1Q4C33NfY5SOHcMrUcfjJISFFXFFHYWW4SOoqkYqkJI0Xks+dsoL1RIjLdhuJx4y90kubZhN/kGMhS7wimt6oopOozoZJjzdASlB11oJp1tXySQdywScPPtokRcB88w9OZcTFouelUkhj3W3x8oNrW2n9/Z02U4yty+SRR1A92ro3sNDN23aGqPERfMkVUPo3pxVVVGbjVIEpxWbvLkyYSHh7N792727dtHfn4+U6ZMCV7foby9vj4w5fl1h4pePBy0x03XLi4hRLuhmVUSu0RgtmtggD28eroGrKiGQYdesSiqgqrGoXqTUW2hvgV4NQXVGIRmuPwzgfft6AJclMfHcOiQCcViQcGL5o3GEupFMZmxu0OpIhzDEgqqRl5FBHpaCJrZxIwnnGz+2wH27eyDx6vw7Zdp7P2qI3FdzzHwOwVocRo4SlFcJcQaJv8yLJXH3JSXVC8QG9PFl7mBr09SaQ5Gn1EoYfGc/LYQ7wXJj+71JUSGbhAaZaVzv7gGX6f2ThKcVk5VVUaMGEFycjLr1q2jsLCQJUuWMH78eLp27Xr1A/BeNMlVaY6vxgZ8fWQiUn01N2GJ7bcPgRDimrPYTCSlR9bZrigKkfEX1hRfNGIMDbBB39m+/j3Vo8vCzHYGjrbi8eiomoLJNMW3cjsQAiQBOUfOcz6vEqX6LKqmYBuQRI97iynrspujy4b4Zj3GoOB4LKtfMGHu7MKEl5ikIhLDsokN9Q2MiNY0wiNUVNVA6doTNSLWtxL8sW9Qo877vkiGxVNV7kb31j/vUE3NkKifjKJqRaOoLqeiooI1a9aQl5cHwIwZM0hJSbk6F7twraP+36mtIi7J9o1qiEgF2/Xz2gkhxJUyDANnpae6dsh308y1I96+OvMVJzYcJO9PY1AMA1QTitmGio5ZNVAUiEnz0H92Oakj3IRbI9A0i685v2aur+p1AJ0JAyiL6UNpoQOPrqMrvsTJAyiqQnp8GCFWjcpSN+fzKqqbuQwMr4G3OiFK7BxBRFz9y4DoXh1FVeodSdaaySiqNio0NJQZM2awY8cOSkpKrl5yA9WT4ll9NTiVRbUJzoXDMYUQoh1RFAVbqLnB/WNTx9Kpk5N9d79Gxfko9LxeWCOGcWq7isfw4tG9nD6pcPrPYeiRTlQXhHU0oTsK8ZYZDP6ejbhYJ3bLeU7nn+TEmQhMav1TYbiAIZ2icTs9lBbWP+rrfH6lP8EpOFXK+bxKf18fALNNI2NwAqrWNqclkBqc66gG50KGYfgzb5fLRXZ2Nl26dLnMUU3kcVUvH9DwH7QQQohahttN5ZnTOHbvhion4WPGsNvI4eDO8xR+Y8e5KvCLqUkxo+BLMEyagdfrQAsvxXrPN+SkWOga3gdD8ZIc0gG7yYbLo5MabSclyo7Lo1NS4iTEC3arhqopqJpKRbGTojPldBkUjz3MlyDlHS+h6EzdaZq7Dk64ZNLW2kgNTjtwYbXil19+yfHjx+nRowdjxoxpuTlzZBI9IYRoEsVsJrRzF7wHj+B1uKnYtJn+w4bSf8ZASsZWkjW2HK9bZc/p47g9DroldaV8WyjOcwZnjzgxdBVvaRj6l73p97M84BQAwzr2IMYWg27oHD5/mDOVOqfzIigq9yU8EzrVLj0TFmUlJMKCxVb7WRCTEkpUQoi/s/WxHQV4PQ2sKdZGSIJznTMMwz8p4KFDhygoKGDKlCnBnQVZCCHaOVNCAt6SEgAMjwebZiWyNIeu57ZjTkpk+H+Nx8BAVVTI9B3jqtD54L+OUXTShGt/MnmPpBA9pAJNcbDZW0h4dD7FY9bhUMtR0HHqA1BNvalyeQOurahKnb43FyY7vkJX7am3GtJEdZ02UV0sJyeHtWvXUllZiclkYuzYsXTv3j3YYQkhRLtleDygaf4ad+eJE1Ru8yU4YRMm1HtM/trtLHkmvnrZDs03HccFCwt7DR2PoRN712f06GdwzH4HMaEWbupoQME+6DQmYFX2tkaaqNqhlJQUMjMz+eKLL8jOzmbdunXk5OQwZswYzObrp31VCCHaCqUZ3QUSByfyvVcr2b8lhbI8L1a7C4srj+0fxYECGiZ0r4vCd24iNjQL+lQfWHwKik9D8bu+5W68bt/cZJEdoOukeq/ldnkpKahCMylEJ7W9RUIlwWlD7HY706dPZ9euXXz99ddkZ2fj8XgkwRFCiFbEMAx0p9M3qeDFw7SjOhIaBcN6XrgxkaE/M6g6r3Pw80o2/K0IdBdHXu+OV4fQZw2wVdQWd5TU3j9/0jeZYD0LuLodXvJPlGAyq6iagu418Hp8sypHJYbUbda6zlzf0Ys6FEVh0KBBJCf7FrK029tuVaUQQlyPPPkFlCz7kMjbZ6JYL56IsH6KohASozH47nAKQk6w/0+hvmYsIP/fXlgwDspyQFHRFTMuQ8FSchLVFhG4YHF98bh1sg+eD9hW00m57JwDr1snJNJy3SU811e0otGSkgLXMTly5AhZWVmMGzcOi0VGRwkhxLWmRUWBqoDu6/pquN1gtVLyyacYTgfhU6agRdadnflicSNd9P17EXkvp1O4U6Viv85LU/IJ6WZQPqaAiO4RWDQbqpLCtD5J6BUuLCaVcFtgbb49zEx4jA2304tqUtE0BdXkq80xWXxrHhZll1NR4iQ00opmVvF69OqaHp0uA+PRTK13Dh1JcNoBl8vFxo0bcblcnD17lilTphAXJ+uXCCHEtWSKjiZq5kzfPGZmM0pNs5HXg+HxUrFxI+a0NOz9Lr0KeXJYMl7Dy6Q/dGRRZjZVZR5cXgPHAQPPvkgqH8ymY9+u6AZ8tjcPq0nFbFKZ2CMew4CSSjep0XY0VaFjn9hLXkvRfLU/FSXOOvu8Hr1VJzgyiqqNjKK6nPz8fNasWUN5eTmqqjJ69Gh69+4d7LCEEKLdK12xEm9xse+BqhB9552NPvZ8ZTHrVxwi/8NIKo9b0A0DtXq1LEuam+R7y4nsnMy4bvFYTCr7c0rZdbqYYZ2j6ZYYftnzOyrclBRUVU8iqKBV1/BoJhV7uPmaz4LclM9vSXDaSYID4HQ6WbduHadO+SaO6tKlC+PHj5cmKyGECCK9ogJ3Xh6VX+8AIHrOd5p1nuV/OcXBpR4ANEXDrPvuP/RlF38n48/35nKuwk2/1Ej6dbh8c9ilFJwqxdAhLi3smtXkNOXzu/XWLYkWZ7VamTZtGqNGjUJVVY4fP87SpUvxeDzBDk0IIdotNTQUc4fadf4MvXkzDE95JJmh/1dCr1+WoF7QsbjgsBujut9PbFjjOjU3RmF2OYXZZa12RmTpg9MO9evXj8TERNasWUOXLl1abmkHIYQQzaJardh69cRz7hx4PNCMmnWLZmH8wMEwEIqHnuPN28/iNQzeefAU4f0cTH4hAmg/rRbyydZOJSQkkJmZGZDcVFRUYDKZsDZy2KIQQoiWY+/fv97thtuN7nShWMyojUx83HYX5t7H8OxLx2t4qTjvYG9RFonqaDRPJYrHClxZE1VrJwlOO3Zh3xtd11m5ciVVVVVMmTKFhISEIEYmhBDtk6ewkIqtW9HLK1BD7BhOJ4a3uglIVYi8+WbU0MvPOhwXmsgN/5NE7upD7F7YCwyDmi630WWHCT+aA6n3g9l2FZ9NcEkfHAH4am+cTifl5eV89NFH7N69m3bY/1wIIYJKMZvRy32zEuuVVbXJDYBu4C0vb9x5FIXuSYNJT+yHhoo7JxbPORMKEFl2BItigKuCgjIHFc622Q9TanAEAOHh4cyePZsNGzZw7Ngxtm7dSm5uLhMnTsRma7sZvhBCtCZaZCThN07BcLlQrFZUiwXFam3WulYA5qQOYCoA3UXOXzox/tVQcsMiiLArlLs8ZJVUcji/nN4pEQzoEFl36YjrmNTgCD+LxcINN9zAuHHj0DSNrKwslixZQl5eXrBDE0KIdsMUE4M5KQlTdDRqaGhAclO+cSNVe/Y2+lxJfSwk9LBhM9kIc8UQG2alb2okoRaN0jOHyDl7FoD9OaUUlrta/LkEkyQ4oo5evXoxa9YsIiMjqaioYMuWLcEOSQghBODOPoNj/37KVq+mdOVKSj79FFf13Gb1UVSFyb+IAqCiyEv2N06M6o/+xKqjDHbuwFo9h43exG4JXQbG03VwAubqZR1aG0lwRL1iYmKYPXs2vXr1YvLkycEORwghxAU8Refwni9GL6+gYsvWS5Y1h9Q2O33000IW/ng4RTkhaIpCh/Qe2JuZoNhCzdhCzShq62zWkgRHNMhsNjNu3LiA2SL37NlDTk5OEKMSQoj2K3zyJOwDBxAyfBi2Ht0BsHbrdsljTHEuom8oxq27fRtUjVNl06HDMIhMw+IoIi13JZaT665y9NeWdDIWjZabm8vmzZtRFIXBgwczePDgNtUhTQghWjtTfDym+Hj/Y3NqKorFgisrC+fx4yiaiZARwwPmy3Hrbkzf2U/6XBvGglGc2elkyz/KSBnYnaQkC4qRQ6RSjuIqa1IsZ0+XgQExKaGtctHN1heRaLXi4+Pp0aMHhmGwY8cOPv30UyorK4MdlhBCtFum+Hi0yEj0Kgee/ALcOTl4GhgYoijQe0btHDpLf3QWV6VOqMVEXJiFCJuZ/TmlrDtUwOlzl39vP5tVRsGp0la7VIMkOKLRTCYTEyZMYNKkSZhMJnJycliyZAnZ2dnBDk0IIdo1c3ISWkT16uCX6CzcbbKd8T+J8j8+srqK4em+0VWqAsWVLnKKHZworKCw3HmVo766JMERTdatWzdmz55NTEwMVVVVLF++nG+++SbYYQkhRLulRUSg2O2NKtv39lBCon0di9f/pZiNr+pYNRU8Duylx8DQyT5fxRcHC67rCV8lwRHNEhUVxe23306vXr0ACG3E1OFCCCFah4k/i/Lf3/OpgcelgruKjKpv6Ww+D0B40R427dpLUVlVkKK8MpLgiGYzmUyMGzeOmTNn0qNHD/92t9sdxKiEEELUx+Wtnciv82gb33s/yfdAUTm4fwCExhHedzoj+vUkquo0YVXZpBasR9n5JhQchDPfwPmTwQm+GWQUlbhiiYmJ/vsOh4OlS5eSkZHB0KFDUVXJoYUQ4lpQFAUtMgI1PBzD48Gxdy+6w4Gzohiz+Tw9+473lzUMA1sMWMNUnOU6X74dT1jvnnTuZUMDRnaOxH3CS7jVRohFg6zNNReBAXeDqXGrmgeTJDiiRZ08eZLy8nJ27dpFbm4uN9xwA2FhYcEOSwgh2jxr9+64s7MxxcRg6DqOQ4cBMAM9zGbi7fEUVhWyNXcrDo8DgG73DGfv33xfRJc/VcRDX6QCENO5L6T1hDM7oLIQTDYozoKQOOD66JcjCY5oUT179sRqtbJ+/Xry8/NZsmQJEydOpFOnTsEOTQgh2jRzcjLm5GQAFFXF1rs3hqMK5/ETpIamEmGPpdhRTKWndgh4+KRC+uams/dD3wrmeXtdJPX11c44DQVXwmDMmorNXHe248794gAwyVINor1IT09n9uzZxMfH43Q6WbFiBZs3b0bXW+dcCUII0RbZ+/XFnJYWsC3MEsbktMlkRGX4t/W6OcR/f+mPz1JyxgPA0YJyPt6dy+7TxbUnKDkDh1fA7ncJcZ4gJMKCKks1iPYkIiKCmTNn0q9fP8C3xMPXX38d5KiEEKJ9M6kmYu2xWFRfLc3R4qPEdjHTdULtEPMdbwXOaJxX6uCbLN/IKpylUJoD7io4tQl2/H9QdOyaxd8UkuCIq0ZVVUaNGsXUqVOJi4tjwIABwQ5JCCEEvkSnhmpSmPZMDBEpvm0lOV4AzJovRahwejmYW8a/tmaxryoGEnoDUHTOTGGhhvf0zmscfeNIHxxx1XXu3JlOnToFrFt1+PBhunbtiqa1zrZbIYRoC0wxMYTfOAXlovfaLpFdAFAVFd3QURWVYfeFs+b358nd42T34nJ63R6KVzfYmVXsPy673KBPnxGQ1I/85VsxyguJiPXSGt/JJcER18SFyc2BAwfYsGEDe/fuZcqUKQGrlQshhGg5isWCKSamznazZqZHTI+AbYl9aod+b3y5hM0LS/nOonjmDAunuNKF2aQSUtPZ2BKCHpWO7nJBrK+zMUdWQ0QyJPa5as+nKaSJSlxzoaGh2Gw2CgsLWbJkCcePHw92SEII0S4dOneIg+cO4vK6iOpg4rY/xfn36brBuw8UkPeti9gwKxE2M2fLnXx5+Cz/2prF9tPl7HCkcNraDcrPQkUBFB0N4rMJJAmOuOY6duxIZmYmSUlJuN1uVq9ezYYNG/B4PMEOTQgh2hRveQWOgwdxnjhR7/79RfvZU7jHP8txh8FW7n03kciU2gae3f8u99/PPl9F9vnApRvOVbrA6wJP61qcUxIcERShoaHMmDGDgQMHAr5mq2XLllFcXBzUuIQQoi3Ry8uo2v0tzsNHGn1MeKKJ776dSKcRNgBObnZQluf7AjooLYopvRK4uV8SiRFWAA7nlTd4rmCSBEcEjaqqDB8+nJtvvhmbzcb58+epqro+F3UTQoi2ZtBdtbPQv3VXPmX5HkyaSkKEjagQCxZT604hWnd0ol3o0KEDd9xxB5MmTSK5ehZOIYQQwZUywErGxNr5cY6sCfwCmhhhIz0ulJFd6nZibg0kwRGtQkhICBkZtTNrlpSUsGzZMs6fPx/EqIQQon3Ykb+D846677dTfxPj74+z5e+l5B+sXZG8S784ho5KJT0pnEP5ZRzML+N8pfuaxXw5kuCIVmnTpk0UFBTwwQcfcPjw4WCHI4QQbVLNhH9hljCqPPV3Eeg43Oq/v+SHZ/33Q6OshEVb2Z1dzLkKJyWVbs4UV9Z3iqCQBEe0ShMnTiQ1NRWPx8O6detYt24dbnfr+WYghBBtwbCkYXSP7k5aeBopYSn1lul/RxhpQ23+x4YRuJp4WkwIhi2WvLjRlMQOuqrxNkWrSHAWLFhAeno6NpuNIUOGsGHDhkuWf/vttxkwYAAhISEkJyfzwAMPUFRUdI2iFdeC3W7n5ptvZtiwYSiKwuHDh/nggw84d+5csEMTQog2Iyk0iQHxA0gISWBN1ho+PPohhVWFAWUiU01Meyba/9hb/V3zfF4FhdnlhKGSFBdFRUgqLnvitQz/koKe4Lz33ns8+uijPPXUU+zcuZNx48Yxffp0srKy6i3/1Vdfcd999/Hggw+yb98+3n//fbZv3868efOuceTialMUhUGDBjFjxgxCQkIoLi7mgw8+oKCgINihCSHEdUGLiSFs4kRChg29bFm37salu+rU0ABoltrZ6LctKsVZrpN7rIT8EyUc21nA6a/P4ilyonv0Fo3/SgQ9wfnzn//Mgw8+yLx58+jVqxfz588nLS2NhQsX1lt+y5YtdO7cmUceeYT09HTGjh3L97//fVmpug1LTk7mjjvuIC0tjbi4OOLi4i5/kBBCCFSLBXNiAhgGld/spPKbb3Dn5GDoTUtEVBNoJl+Ss+vf5ez7sIKQiNqlHRSvC9OZHIycMy0a/5UIaoLjcrnYsWMHU6dODdg+depUNm3aVO8xo0ePJjs7m+XLl2MYBvn5+SxevJhbbrmlwes4nU5KS0sDbuL6YrPZuOmmm7jppptQVd+vra7rMspKCCEawXP2LM4jR3AeOUr5hq9wnz7dpOMVRWHMjyL9j8vPeuncL46eI5OJTg7FrFYRUXGC0OLWMygkqAlOYWEhXq+XxMTANrvExETy8vLqPWb06NG8/fbbzJkzB4vFQlJSElFRUfzf//1fg9d5/vnniYyM9N/S0tJa9HmIa0NRFKzW2t7827ZtY+nSpezfvz+IUQkhROtnTkpCiwhHMfkWy9QdjgbLlrnL6m2m6jszlGH3+xZH1r2+bZpZJSUjiozu4fTqDOnJljrHBUvQm6ggcKVp8PXQvnhbjf379/PII4/w9NNPs2PHDj7//HNOnDjBD37wgwbP/+STT1JSUuK/nW5i5ipaH8MwKCkpwev18tVXX7F69WpcLtflDxRCiHZIi4oiYvp0zKmply27I38H2/O213+e6iWqdE9gAhQfb6NrB4OkWFM9RwVHUCOJi4tD07Q6tTUFBQV1anVqPP/884wZM4b//u//BqB///6EhoYybtw4fvvb39Y7E67Vag345i+uf4qiMG3aNL799lu2bdvG8ePHKSws5IYbbiA+Pj7Y4QkhRKum19NVI94eT5mrjMEJg0mPTK/3OEXzVT5UFrWezsQNCWoNjsViYciQIaxatSpg+6pVqxg9enS9x1RWVvr7YNTQNF+VW31VaqJt69+/P7fddhvh4eGUlpby4Ycfsnfv3mCHJYQQrZIaEgKAqZ4vgkMShzA7YzZdo7qiKvWnB4bu+5zN2u5g1wWrjBeXucgucnO2tPXMVxb0JqrHH3+cf/zjH7z22mscOHCAxx57jKysLH+T05NPPsl9993nL3/rrbeydOlSFi5cyPHjx9m4cSOPPPIIw4cPJyWl/kmKRNuWkJBAZmYmnTt3Rtd1tm/fTkVFRbDDEkKIVsfWuzdhE8ZjaqCVRFM1nF4nu8/u5tuz39aZ3TgqrbbhZ9PCEsoLfZ1xTp6sYNcBC0ezWk+CE/TGsjlz5lBUVMRzzz1Hbm4uffv2Zfny5XTq1AmA3NzcgDlx7r//fsrKynjppZf46U9/SlRUFJMnT+aFF14I1lMQrYDFYmHq1Kns27cPm81GaGhosEMSQohWRzGZMCcl4ThwAMehQ2hhYaCq6BWVWDp3wt2jEweKDpBV5vvcPXT+EKmhqYxO9bWqpI+xMfbhSL56uQSAg59VMvTecAC8qgWvZqv/wkGgGO2wXae0tJTIyEhKSkqIiIgIdjjiKsrJyaGwsJB+/fo12HFdCCHam7LVq/EUBc4Mr9pthN86gwNFB9h/rnZ0qkkxMTNjZkCz1YJJvvluotJM3P1mIju3HCVr2zYs/UYwfVgE5O6G6M4Ql0FLasrnd9CbqIS4WpxOJ2vXrmXLli2sWLECxyWGRQohRHti698fa0ZX7P36Yu/fz79dVVT6xPXhzu53MjZlLAAew0Oxszjg+DEP++bEKT7twVmuo5vDKAnLwFBN4CyFktOQs/OaPZ/6SIIj2iyr1crgwYPRNI2srCyWLFnS4PxKQgjRnpgTEggZMgRb796YkpJAAS6q5Y6zx2FRffPaVLgD+zVmTLT777sqfCOqvCY7To8XzL6OzBjBHWkV9D44QlxNvXv3JjExkdWrV1NSUsLHH3/MsGHDGDBggDRZCSEEYIqOJvo736mz3ayZmdFlBl7Di0ULnMAvNE5DMyt43YG9XMocHjC1jmlZpAZHtHmxsbHMnj2bjIwMDMNg27ZtfPbZZ3g8nmCHJoQQrYY7N5eytWup2rXLv01TtTrJTR0GRNhMqKqCqoDb2zrmyJEaHNEumM1mJk+eTEpKCps2bcJms2Eyya+/EELU0B0OPGcLUarnlqvxTf43nC47Te/Y3nSL7ubfXlMJbhiQmBjK1FEdCIu2Uukq4cip86gWJ4MHXMtnEEje4UW70rNnTxITEwkLC/Nvc7vdmEwmabISQoh6eA0vLt2FfnGfmgveMiPi7ETE+frlFBeW4NUNX+YTRJLgiHYnOjo64PHatWvxeDxMmjSJkOpZPoUQQjROnTxGUfFqdgjynDjSB0e0a8XFxZw5c4YzZ86wZMkSzpw5E+yQhBDiurLzX+V43F5cVR68bh3DFsXJ1BnkpNwY1LgkwRHtWlRUFLNnzyYmJoaqqio+/fRTvv76a3S9dXSSE0KI1srj9FXdlOZ6OHuqjCNf55NztBhHeetYrkESHNHuRUVFcfvtt9OzZ08AvvnmGz799FNZz0oIIS5h9A99k/2ZbLWdcUoLq8jeU4TzSCnuwuBOrioJjhCAyWRi/PjxTJ48GbPZTG5uLitXrgx2WEIIcc0oqopiMoHWuO65tnBfCmF4ITzWhslSPfrKU0VU8QHsJ/dcrVAbRToZC3GBjIwM4uPjWbt2LWPGjAl2OEIIcc1YOnXCUr3QtaewEMehQyiKQkiXEFRUukR14WzlWQqrCom2RaOYfGtBndrqIG9XCD0mJeGs8pBz1E3ZiUrsIcGtQ5EER4iLREZGMmvWrIBtp06dIjY2NmB4uRBCtEXOo0dxHj2Kt6QUgK6pw0lMm4CmaBRUFrD/3H66RXUjKjTKf8zK585hCY2l43Ab6V0jSHfqYJZRVEK0aufOnWP16tUsWbKEU6dOBTscIYS4qswdOmBOS0MNCwVARSHOHhewmjhA2jArI+fVruj9yS+KKDnTemaIlwRHiMswmUxER0fjdDpZsWIFW7ZskVFWQog2S7XZsPfpgxoaeslymllh8HfDGf9olH9beYH3KkfXeJLgCHEZERERzJw5k759+wLw7bff8tFHH1FWVhbkyIQQ4uozLrNuX9+ZocR0MvvKGnC2qIrPNqms3BLcZEcSHCEaQdM0Ro8ezdSpU7FYLBQUFLBkyRJOnjwZ7NCEEOKqqFm+xnC5GlG49q5uGHh1A10P7lINkuAI0QSdO3cmMzOThIQEXC4XRUVFwQ5JCCGuCkuXLmjRUWgXLW9Tn5ruOYZuAAqGoqEr2iWPudpkFJUQTRQeHs5tt93GwYMH6dWrV7DDEUKIq8KSloYlLQ3DMKjatw9Pbi7YKiChblm9ujXq9NdOusyOojB6IBarJDhCXHdUVaV3797+x16vl+XLl9OnTx+6dOkSxMiEEKJllX32Gd6ycgDiDDeThkzDdtFCmlXnfRmO+YJZjYNNmqiEaAH79u0jNzeX1atX89VXX+H1tp6RBEIIcSVCRo7E1ttXW21RzcTZ4wizBM4JljHJDtSzsngQSYIjRAvo27cvAwcOBGD//v0sW7aMkpKS4AYlhBAtwBQTgzUj45JlqvsjU3TMDa5yIsuOEFF65BpE1zBJcIRoAaqqMnz4cG6++WZsNhtFRUUsXbqUo0ePBjs0IYRoMeXuco6cP0JBZUHAdkeZr+qmvEDHrHmJtZ4nxhbcqTQkwRGiBXXo0IE77riD5ORk3G43a9euZffu3cEOSwghWkSps5RdZ3eRU54TsD2pj28eHGu4QmykjQkDVcb0NwcjRD9JcIRoYSEhIdxyyy0MHjwYm81G165dgx2SEEJcEUXTsGZ0hc5p9e63R/tGTOmtqPuhjKIS4ipQVZWhQ4fSr18/rFarf3thYSFxcXFBjEwIIZpOsVgIGTIECm1wbn+d/Wr1iHDD23p6GUsNjhBX0YXJTVZWFkuXLmXdunV4LjP1uRBCXE9UzdfLWPfC2XNVrNyisnpbcN/nJMER4hopKSlBURQOHz7M0qVLOXfuXLBDEkKIRjEMA93pBGf9yzbUTFqse0HXDVweg2B/j5MER4hrpF+/fsyYMYOQkBCKi4v54IMPOHjwYLDDEkKIyzIcDkqWfQgr1te7v6aJ6uxhF4c+BVAIdmOVJDhCXEPJycnccccdpKWl4fV6+fLLL1m7di1utzvYoQkhRLNZwmrTiW/fhRO7RlNgDA5iRJLgCHHN2Ww2brrpJoYPH46iKBw9epTs7OxghyWEEJcVa4tlXOo4ukQFLkmT0MPMuEci/Y9dByyUfG67+PBrSkZRCREEiqIwcOBAkpKSOH36NOnp6cEOSQghLstqshIdmlRnu6Io9JsVRmicxvJfFQGgO4K7LpXU4AgRRElJSQwbNsz/uKqqiq+++gqXq/6OfEII0Rp4dA+6odfZ3mWcncnPmVAMD5rXEYTIakkNjhCtyPr168nKyiI7O5sbbriB+Pj4YIckhBB+Fe5KjuV9zYnSE4SYQrip801oNT2Mq2mqF1Ux0JTgzvonNThCtCKDBw8mLCyM0tJSPvzwQ/bu3RvskIQQws+p6ZwoPQFApacSRz21NFHhFixmCA+RJiohRLWEhAQyMzPp3Lkzuq6zadMmVq5cidPpDHZoQoh2TDGZQFXoOPFmpneeHuxwGkUSHCFaGavVytSpUxk9ejSqqnLy5EmWLFlCcXFxsEMTQrRTitlM2IQJqDYbtgo3lorWP7WFJDhCtFJ9+/Zl5syZREREoKoqISEhwQ5JCNGOmRMS0B0OylauInT3sQbLnS914XRBaUVwp/qTTsZCtGLx8fHMnj2bqqoqLBaLf7vL5Qp4LIQQ15Ja4SDiq7147EMgLXBOHMPrG11lBHkqY6nBEaKVs1gsREbWTqC1d+9eFi9eTF5eXhCjEkK0R0r1F6sUeyIdiYHs1vs+JAmOENcRXdc5ePAg5eXlfPzxx+zatQsj2F+ThBDthikmhtCRI+jQfQiJoYnkVuRQ6a4MKGNYI9AVMx5TaJCi9JEER4jriKqqzJw5k4yMDAzDYNu2bXz++ec4HMGdUEsI0X5YOnXCFBeL2+vmVGkW+4r2BTukekmCI8R1xmw2M3nyZMaPH4+maZw+fZrFixeTm5sb7NCEEO2EOTmZsr6dqUyNwu1tnSOqJMER4jrVs2dPZs2aRVRUFJWVlSxfvpzKysrLHyiEEFdIi4xE7ZiKJzq87k53FYrhRfUGd/4uGUUlxHUsJiaG2bNn89VXXxEdHS1DyYUQQacpXhR0NDxBjUMSHCGucyaTiYkTJwZsO3fuHFVVVaSmpgYnKCFEm+YtL8fILUCrqICQwIEOUREWrJYqwkODu1SDJDhCtDEej4fVq1dTXFzM4MGDGTJkCIoS3DcaIUTb4s7ORt+6g4jSLCzWXIoTz2JOSCBkxIhgh+YnfXCEaIOSkpIA+Oabb/jkk0+kb44QokWpdnvAY8PpwnU6G09hUZAiqksSHCHaGJPJxPjx45k8eTJms5nc3FwWL15MdnZ2sEMTQrQR5rQ0QidOIGzUaEJ79Ua1Wav3GJSUuXC5oEyWahBCXA0ZGRnEx8ezevVqioqKWL58OQMHDmTo0KGoqny3EUI0n6KqpHTuS0rnvgDoLhcYBorZjHf/GQxAl6UahBBXS2RkJLfffju9e/cGoKCgQPrjCCFanGqxoFqtKK3oy5PU4AjRxmmaxtixY0lNTSUxMVESHCHEVWVYwtEVHY9JC2ockuAI0U6kp6cHPN64cSOapjF8+HBpshJCNNnx4uN8U/ANKWEpDC6OQq+qwtqtG/i/RMkwcSHENVZUVMS+fb71Y/Ly8rjhhhsID69nRlIhhLgEAwMMcGWdxltSgrlDB8AW7LAA6YMjRLsUGxvL1KlTsVgsFBQUsGTJEk6ePBnssIQQbUErWapBEhwh2qnOnTuTmZlJQkICLpeLlStXsmnTJrxeb7BDE0Jcx1Q8rWKphlaR4CxYsID09HRsNhtDhgxhw4YNlyzvdDp56qmn6NSpE1arla5du/Laa69do2iFaDvCw8O57bbb6N+/PwB79+7l888/D3JUQojrWXSkFasFItr7Ug3vvfcejz76KAsWLGDMmDG88sorTJ8+nf3799OxY8d6j/nOd75Dfn4+ixYtIiMjg4KCAjye4GaKQlyvVFVl5MiRJCcns379evr06RPskIQQ4ooFPcH585//zIMPPsi8efMAmD9/PitWrGDhwoU8//zzdcp//vnnrF+/nuPHjxMTEwP4qtqFEFemU6dOzJ07F4vF4t927tw5IiMj0bTgDvcUQoimCmoTlcvlYseOHUydOjVg+9SpU9m0aVO9x3z00UcMHTqUF198kdTUVLp3787PfvYzqqqqGryO0+mktLQ04CaEqOvC5KaiooJPPvmEZcuWUVJSEsSohBCtkd1kJzEkkShbVMD2kjIXLjeUBXkJvCuuwSkoKODUqVP1Jhjjx4+/5LGFhYV4vV4SExMDticmJpKXl1fvMcePH+err77CZrPxwQcfUFhYyEMPPcS5c+ca7Ifz/PPP8+yzzzbyGQkhAMrLywHfkPKlS5cybtw4MjIyghyVEKK1SA5LJjksGYAD/VxklzjpYirF67FhGKAHea2GZic4ubm53HvvvXzxxRd19hmGgaIojR6NcfHMqjXH10fXdRRF4e233yYyMhLwNXPdcccdvPzyy9gvWuEU4Mknn+Txxx/3Py4tLSUtLa1RsQnRXiUmJpKZmcnatWvJzc1l7dq15OTkMHr0aEymoLduCyFaie152zlZcRJM8E3RLuKqugCRwQ6r+QnOj370I3bu3MkLL7xA//79sVqtlz/oInFxcWiaVqe2pqCgoE6tTo3k5GRSU1P9yQ1Ar169MAyD7OxsunXrVucYq9XarPiEaO9CQ0O55ZZb+Oabb/jmm284ePAgBQUFTJkyhaioqGCHJ4RoBXrE9MDpdZJbkQvAscozeJUYFC0kqHE1O8FZv349f/zjH3nggQeafXGLxcKQIUNYtWoVs2bN8m9ftWoVM2fOrPeYMWPG8P7771NeXk5YWBgAhw8fRlVVOnTo0OxYhBD1U1WVoUOHkpyczNq1azl37hx79+5l7NixwQ5NCNEKRFgiGFgajXb6GDnxKijVMxkHed27ZncyVhSlRZp5Hn/8cf7xj3/w2muvceDAAR577DGysrL4wQ9+APial+677z5/+bvvvpvY2FgeeOAB9u/fz5dffsl///d/8x//8R/1Nk8JIVpGamoqmZmZ9OrVi5EjRwY7HCFEK6KfyqbneTsJhBNliSbY61DBFSQ4d955J5988skVBzBnzhzmz5/Pc889x8CBA/nyyy9Zvnw5nTp1Anx9fbKysvzlw8LCWLVqFcXFxQwdOpTvfve73Hrrrfz1r3+94liEEJcWEhLCuHHj/H1wDMNg48aNnDt3LsiRCSFag8SQJGxoqIaOqruCGotiGEazujmvW7eO//zP/2T69OnceuutxMbG1ikzePDgKw7waigtLSUyMpKSkhIiIiKCHY4Q1609e/awefNmTCYTo0ePpmfPnsEOSQgRBKWfr8BbUkLYxAkc2lfJyic8aGEOfrK2X8tepwmf383ugzN58mQAXnrpJV5++eWAfU0dRSWEuD5lZGRw+vRpsrOz+fLLL8nJyWHcuHGYzeZghyaECBJ7GGDyoNqCW4PT7ATn9ddfb8k4hBDXIbvdzvTp09m9ezfbt2/n6NGjnD17lilTptRbqyuEENdKsxOc733vey0ZhxDiOqUoCgMHDiQpKYk1a9ZQUlLCsmXLmDBhgkwMKIQImhZZquHw4cNs3ryZI0eOtMTphBDXoaSkJDIzM+nYsSOGYcg8OUK0U2UVbrweFbczuGvYXdF0pO+//z4/+9nPyM7O9m/r0KEDf/rTn7jjjjuuODghxPXFZrMxbdo0ioqKiIuL8293Op0y2aYQbVjo2DHg9YJh4MorBCMWXb9O58FZvnw5c+fOJTIykt///ve8+eabPP/880RGRjJ37lw+++yzloxTCHGdUBQlILkpLCzkX//6F3v37g1iVEKIq0kLC0OLjER3ODCOHA92OMAVDBMfM2YMERERfPrpp6hqbZ5kGAbTp0+nrKyMjRs3tligLUmGiQtx7WzatMmf3KSnpzNhwoSAVcuFEG1H8ZIlHDzg4cuPhkC4i5+v6d2i52/K53eza3B27drFQw89FJDcgO/b20MPPcTu3bube2ohRBsyevRoRo8ejaqqnDhxgiVLllBQUBDssIQQV4Hh8WLSLGg6mL0t0s232Zp9dU3TcLnqH+PudrvrJD5CiParb9++zJw5k4iICMrKyvjoo4/49ttvgx2WEKKFKRYLmqqhGKB5m9VA1GKanYUMGzaMF198kaqqqoDtTqeTP/7xj4wYMeKKgxNCtB3x8fHMnj2bLl26oOs6W7Zs4fjx1tFWL4RoGWETxqOlJAMGiuEJaizNHkX17LPPcsMNN9ClSxfuvPNOkpKSyM3NZenSpRQVFbF27dqWjFMI0QZYLBamTJnC/v37yc7OJj09PdghCSFakCkmBmLyAR0I7moGzU5wxo4dy8qVK3niiSd4+eWXMQwDVVUZMWIE77zzDqNHj27JOIUQbUjv3r3p3bu286HH4+HIkSP07NkTRQn+KsRCiOYLTbCB1QER12mCAzBhwgQ2b95MZWUl58+fJzo6mpCQkJaKTQjRTmzcuJFDhw5x8uRJJk2ahM1mC3ZIQojrXIv0BA4JCSE1NVWSGyFEsyQmJqJpGqdPn2bx4sXk5uYGOyQhxHWuSTU4b775JrfccguxsbG8+eably1/3333NTswIUT70bNnTxISEli9ejXFxcV88sknDBkyhEGDBkmTlRDXmbKCUgynBW9ZcEdRNWmiP1VV2bJlC8OHD7/sMHBFUfB6g9v+1hCZ6E+I1sntdrNx40YOHz4MQGpqKpMnT8Zutwc5MiFEY21f9g1fPheCaqvgsU1DWvTcTfn8blINzokTJ0hOTvbfF0KIlmQ2m5k4cSIpKSl89dVXFBUVoet6sMMSQlyHmpTgdOrUqd77QgjRkrp37058fDwOh4PQ0NBghyOEaALDHIKhKBjqFY1jumItOt3w1q1b+dvf/saBAwda8rRCiHYoOjraX2MMvlrjTz75hMrKyiBGJYS4HLNmQ0FFVbSgxtHsBGfevHncf//9/sfvvvsuo0eP5qGHHmLQoEFs3ry5JeITQgh0XWfTpk3k5OSwePFisrOzgx2SEKIBFtWMgoLKdZrgfPHFF0yePNn/+H//93+ZNm0au3btYvTo0fzud79rkQCFEEJVVWbMmEFsbCwOh4Ply5ezbds26Z8jhGhQsxOcvLw8fz+cnJwc9u3bx5NPPkn//v35yU9+wtdff91iQQohRGRkJLfffrt/BuRdu3bxySefUFFREeTIhBABNBUUxXcLomYnOGazGYfDAfhmIbXZbIwcORLwtZ0XFxe3SIBCCFFD0zTGjh3LlClTMJvN5OXlsWTJkjqL/gohgicuNQZbuI2o5KigxtHsBKdnz5689dZblJWVsWjRIsaMGYPZbAYgOzub+Pj4FgtSCCEu1KVLFzIzM4mLi6Nr164yT44QrYiCUv0zuJo9huunP/0pc+fO5Z133gFg2bJl/n1r1qyhf//+VxycEEI0JCIigpkzZwZsq6ysRNd1wsLCghSVEKK1aHaCc+edd5KWlsamTZsYNmwY48aN8+/r0KEDmZmZLRKgEEI0RNNqR2nous6aNWs4d+4cEyZMoHPnzsELTIh2zHm+DE+VC8d5D5AUtDiuaBaekSNH+vvdXOjZZ5+9ktMKIUSTuVwuPB4PTqeTlStX0rdvX0aOHHnZZWWEEC3L6/Zg6Dq6J7ijHOUvXwjRJthsNmbOnOlvHt+7dy8ffvghpaWlQY5MCBEMTarB6dKlCx988AEDBgwgPT39kqv8KorCsWPHrjhAIYRoLFVVGTlyJMnJyaxbt46zZ8+ydOlSxo8fT5cuXYIdnhDiGmpSgjNhwgT/6p0TJky4ZIIjhBDB0qlTJzIzM1mzZg35+fns2LGDzp07S3OVEO1IkxKc119/3X//jTfeaOlYhBCixYSFhXHrrbeyY8cOMjIyJLkRop0J7lKfQghxFamqyrBhwwK27d27F7vdTteuXYMUlRDiWmj2V5rXX3+dZ555pt59zzzzDG+++WZzTy2EEFdFYWEhmzdvZs2aNWzYsAGPxxPskIRoc3wT/dXcgqfZCc5f//pXoqOj690XFxfHX//612YHJYQQV0NMTAyDBg0C4MCBAyxbtkyWlRGihdkTojCHWrHHRwQ1jmYnOEePHqVv37717uvduzdHjhxpdlBCCHE1qKrK0KFDueWWW7Db7Zw7d46lS5fK+5UQV0GwhyFdUa+7kpKSBrdL1a8QorVKTU0lMzOTlJQUPB4PX3zxBRs2bAh2WEKIFtTsBKdfv368++679e5755136NevX7ODEkKIqy0kJIRbbrmFoUOHoihKg03uQoim0asqMZwuvOUVQY2j2aOofvSjH3HPPffwve99j4ceeogOHTqQnZ3NwoULWbJkiXQyFkK0eoqiMHjwYDp16kRsbKx/u8PhwGazBTEyIa5fhscDug4ed1DjaHaCc/fdd3Pw4EGef/55/vnPf/q3q6rKr371K7773e+2SIBCCHG1XZjcuFwuli1bRmJiImPHjsVsNgcxMiFEc13RPDjPPfcc//Ef/8HKlSspLCwkPj6eqVOn0qlTp5aKTwghrqmcnBzKysooLS2loKCAG2+8kZiYmGCHJYRoIsUwDCPYQVxrpaWlREZGUlJS4l96QgghauTl5bFmzRoqKirQNI3Ro0fTq1evYIclxHUhd3shS39cQmikm/tX9GzRczfl8/uKRlE5nU5eeeUV7rrrLqZOneofavnhhx9y/PjxKzm1EEIETVJSEpmZmXTs2BGv18uGDRtYs2YNLpcr2KEJcd0wgjxQvNlNVIWFhUyaNIl9+/aRlJREfn4+ZWVlACxbtowVK1awYMGCFgtUCCGuJZvNxrRp09izZw/btm3j2LFjmM1mxo8fH+zQhBCN0OwanJ///OcUFxfz9ddfk5WVxYUtXZMmTWL9+vUtEqAQQgSLoij079+f2267jcTExDrrWgkh6lKUYE/x59PsBOeTTz7hueeeY/DgwXWeTM2QcSGEaAsSEhKYOXMmdrvdv+3AgQPSZCVEPdTwcBS7DS0yMrhxNPfA0tLSBkdLud1umclYCNFmHTlyhA0bNrBkyRIKCgqCHY4Qoh7NTnDS09PZvHlzvfu2bdtGjx49mh2UEEK0ZlFRUYSHh1NWVsZHH33Enj17gh2SEOIizU5wvvvd7/LCCy/w4Ycf+vvfKIrC9u3b+X//7/9x7733tliQQgjRmsTHx5OZmUmXLl3QdZ3NmzezYsUKnE5nsEMTQlRrdoLzi1/8gjFjxjBr1iwSExMBmDZtGiNHjmTEiBH85Cc/abEghRCitbFYLEyZMoWxY8eiqiqnTp1iyZIl5OfnBzs0IQRXMEzcbDazfPly3nvvPT799FPy8/OJi4tjxowZzJ07F1W9oil2hBDiutC7d28SEhJYvXo1paWl0vFYiFaiWQlOVVUVU6ZM4dlnn2Xu3LnMnTu3peMSQojrRlxcHLNnz+b06dOkpaX5txuG0WqGzArR3jSrmsVut7Nnzx5MpitaykoIIdoMi8VC165d/Y/LyspYsmQJeXl5QYxKiPar2e1Io0aNYtu2bS0ZixBCtBnbt2/n3LlzfPzxx+zcuZN2uOyfEEHV7ATnT3/6E6+88gpvvvkm5eXlLRmTEEJc98aNG0e3bt0wDIPt27fz2WefUVVVFeywhGg3rqgGJzs7mwceeIDIyEjCw8OJiIjw3yKDPIOhEEIEk9lsZtKkSUycOBGTyUR2djZLliwhJycn2KEJ0S40uxPNHXfc0ZJxCCFEm9S9e3fi4+NZvXo158+f59NPP+Wmm24K6IwshGh5TU5wqqqqWLZsGT169CA+Pp7bbruN+Pj4KwpiwYIF/OEPfyA3N5c+ffowf/58xo0bd9njNm7cyIQJE+jbty+7du26ohiEEOJqiY6OZtasWWzatInCwkJSUlKCHZIQbV6TEpycnBzGjx/PiRMn/MMff/azn/HZZ58xcuTIZgXw3nvv8eijj7JgwQLGjBnDK6+8wvTp09m/fz8dO3Zs8LiSkhLuu+8+brjhBplYSwjR6plMJsaPH4/b7UbTNMA3jPzs2bMkJCQEOToh2p4m9cH51a9+xZkzZ/jVr37Fp59+yl/+8hcsFgs//OEPmx3An//8Zx588EHmzZtHr169mD9/PmlpaSxcuPCSx33/+9/n7rvvZtSoUc2+thBCXGtms9l/f8eOHSxbtozt27ej63oQoxKi7WlSgrNq1Sp++ctf8uyzzzJ9+nQeeeQRXnvtNb799ttm1aK4XC527NjB1KlTA7ZPnTqVTZs2NXjc66+/zrFjx/jNb37T5GsKIURrUbN21c6dO/nkk0+oqKgIckRCtB1NSnDy8vIYP358wLaJEydiGEazEpzCwkK8Xq9/LasaiYmJDU6OdeTIEZ544gnefvvtRk806HQ6KS0tDbgJIUSwjRkzhilTpmA2m8nLy2PJkiVkZWUFOywh2oQmJTherxe73R6wzWazAeDxeJodxMVTmTc0vbnX6+Xuu+/m2WefpXv37o0+//PPP09kZKT/JqMXhBCtRZcuXcjMzCQuLg6Hw8Hnn3/O1q1bpclKiCvU5FFUhw4dCqg58Xq9ABw8eLBO2cGDB1/yXHFxcWiaVqe2pqCgoE6tDvimPv/666/ZuXMnP/rRjwDQdR3DMDCZTKxcuZLJkyfXOe7JJ5/k8ccf9z8uLS2VJEcI0WpEREQwc+ZMtm7dyt69e9m3bx89evQgKioq2KEJcd1qcoJz//3317v93nvv9d+vqYGpSX4aYrFYGDJkCKtWrWLWrFn+7atWrWLmzJl1ykdERLBnz56AbQsWLGDt2rUsXryY9PT0eq9jtVqxWq2XjEUIIYJJ0zRGjx5NcnIyHo9HkhshrlCTEpzXX3+9xQN4/PHHuffeexk6dCijRo3i1VdfJSsrix/84AeAr/blzJkzvPnmm6iqSt++fQOOT0hIwGaz1dkuhBDXo4u/qOXn53PixAmGDx+OqjZ78nkh2p0mJTjf+973WjyAOXPmUFRUxHPPPUdubi59+/Zl+fLldOrUCYDc3FzpdCeEaJe8Xi9r1qyhvLyc3NxcpkyZQnh4eLDDEuK6oBjtcInb0tJSIiMjKSkpISIiItjhCCFEg06dOsW6detwOp1YLBYmTJjQYHO8EK1BwSEXi39wlrB4jfv+ndSi527K57fUdwohRCvWqVMnMjMzSUxMxOVysWrVKjZu3HjZPo5CtHeS4AghRCsXFhbGrbfeysCBAwHYt28fH374oX+iQCFEXZLgCCHEdUBVVYYPH8706dOx2WyEhYXJ6FAhLqHJw8SFEEIET1paGpmZmQHzkbndbhRFafTs7kK0B/LXIIQQ15nQ0NCAx19++SXnz59nypQpMn+OENWkiUoIIa5jFRUV5OTkcO7cOZYuXcqRI0eCHZIQrYIkOEIIcR0LDQ0lMzOTlJQUPB4PX3zxBevXr7+i9QGFaAukieoSvF4vbrc72GEIcU2YzWY0TQt2GKIZQkJCuOWWW9i5cyc7duzg0KFDFBQUMGXKFKKjo4MdnhBBIQlOPQzDIC8vj+Li4mCHIsQ1FRUVRVJSEoqiBDsU0USKojB48GCSkpJYu3Yt58+fZ8WKFXznO9+RJR5EuyQJTj1qkpuEhARCQkLkzV60eYZhUFlZSUFBAQDJyclBjkg0V0pKCpmZmaxfv57+/ftLciPaLUlwLuL1ev3JTWxsbLDDEeKasdvtABQUFJCQkCDNVdcxu93OTTfdFLDt1KlThIeHExMTE6SohLi2JMG5SE2fm5CQkCBHIsS1V/N773a7JcFpQ0pKSli7di26rjN69Gh69eoV7JCEuOqk7rIB0iwl2iP5vW+brFYrycnJeL1eNmzYwJo1a3C5XMEOS4irShIcIYRo42w2G9OmTWPkyJGoqsqxY8dYunQphYWFwQ5NtEGt5XuSJDjCb+LEiTz66KOXLNO5c2fmz59/TeIRQrQcRVHo378/t956K2FhYZSWlrJs2TL2798f7NCEuCokwWlD7r//fhRFqXM7evToNYth3759ZGZm0rlzZxRFaXIy1KNHDywWC2fOnKmzr6Hkav78+XTu3Nn/+JlnnvE/d03TSEtLY968eZw9e9Zf5sLXJywsjAEDBvDGG280KdYaCxYsID09HZvNxpAhQ9iwYcNlj3n55Zfp1asXdrudHj168Oabb9b7vHr06IHdbictLY3HHnsMh8PR5GsfOHCA2267jcjISMLDwxk5ciRZWVnNeq7i+peYmEhmZiadOnVC13XKysqCHZIQV4UkOG3MTTfdRG5ubsAtPT39ml2/srKSLl268Pvf/56kpKQmHfvVV1/hcDi48847m51s1OjTpw+5ublkZWWxcOFCPv74Y+67776AMq+//jq5ubns3r2bOXPm8MADD7BixYomXee9997j0Ucf5amnnmLnzp2MGzeO6dOnXzKBWLhwIU8++STPPPMM+/bt49lnn+Xhhx/m448/9pd5++23eeKJJ/jNb37DgQMHWLRoEe+99x5PPvlkk6597Ngxxo4dS8+ePVm3bh27d+/m17/+NTabrUnPU7QtVquVadOmMXnyZIYNG+bfbhhGEKMSomVJgtPGWK1WkpKSAm41o2HWr1/P8OHD/R0On3jiiUtO515QUMCtt96K3W4nPT2dt99++7LXHzZsGH/4wx+YO3cuVqu1SbEvWrSIu+++m3vvvZfXXnvtit5sTSYTSUlJpKamMmPGDB555BFWrlxJVVWVv0zNpHZdu3bll7/8JTExMaxcubJJ1/nzn//Mgw8+yLx58+jVqxfz588nLS2NhQsXNnjMW2+9xfe//33mzJlDly5dmDt3Lg8++CAvvPCCv8zmzZsZM2YMd999N507d2bq1KncddddfP3110269lNPPcXNN9/Miy++yKBBg+jSpQu33HILCQkJTXqeom3KyMjwz5Oj6zqffPIJe/bsCXJUQrQMSXCawOPVG7x5daPRZT1evVFlW9KZM2e4+eabGTZsGLt372bhwoUsWrSI3/72tw0ec//993Py5EnWrl3L4sWLWbBggX8iuJZWVlbG+++/zz333MONN95IRUUF69ata7Hz2+12dF2vN6Hzer38+9//5ty5c5jNZv/2N95445KjilwuFzt27GDq1KkB26dOncqmTZsaPM7pdNapQbHb7Wzbts0/TcHYsWPZsWMH27ZtA+D48eMsX76cW265pdHX1nWdTz/9lO7duzNt2jQSEhIYMWIEy5YtazA20X4dPXqU3NxcNm/ezMqVK3E6ncEOSYgrIvPgNMG/v85ucF9KlI2JPWq/FS/95gwevf4aiIRwK1N6J/off7grB6enbkJz94iOTY7xk08+ISwszP94+vTpvP/++yxYsIC0tDReeuklFEWhZ8+e5OTk8Itf/IKnn366zmynhw8f5rPPPmPLli2MGDEC8NWwXK35M9599126detGnz59AJg7dy6LFi1i0qRJV3zugwcPsnDhQoYPH054eLh/+1133YWmaTgcDrxeLzExMcybN8+/PzIykh49ejR43sLCQrxeL4mJiQHbExMTycvLa/C4adOm8Y9//IPbb7+dwYMHs2PHDl577TXcbjeFhYUkJyczd+5czp49y9ixYzEMA4/Hww9/+EOeeOKJRl+7oKCA8vJyfv/73/Pb3/6WF154gc8//5zZs2fzxRdfMGHChMa/iKLN6969O263m82bN3Py5EkKCwuZMmWK1PaJ65YkOG3MpEmTApooQkNDAV9H01GjRgXUSIwZM4by8nKys7Pp2DEwmTpw4AAmk4mhQ4f6t/Xs2ZOoqKirEveiRYu45557/I/vuecexo8fT3FxcbOuuWfPHsLCwvB6vTidTiZOnMirr74aUOYvf/kLU6ZM4fTp0zz++OM89thjZGRk+PfPmjWLWbNmXfZaF9fyGIZxyZqfX//61+Tl5TFy5EgMwyAxMZH777+fF1980d+cuG7dOv73f/+XBQsWMGLECI4ePcpPfvITkpOT+fWvf92oa+u6L2meOXMmjz32GAADBw5k06ZN/O1vf5MER9TRp08fEhMTWb16NaWlpXz00UcMGzaM/v37yxxJ4rojCU4TfGdohwb3XfzHP3twaqPPO3NgSrNjulhoaGjAh3SN+j50a/q41PfGdal9LW3//v1s3bqV7du384tf/MK/3ev18s477/DDH/4QgIiICEpKSuocX1xcTGRkZMC2Hj168NFHH6FpGikpKfX2B0pKSiIjI4OMjAzef/99Bg0axNChQ+ndu3ej4o6Li0PTtDq1NQUFBXVqVi5kt9t57bXXeOWVV8jPzyc5OZlXX32V8PBw4uLiAF8SdO+99/prlPr160dFRQX/9V//xVNPPdWoa8fFxWEymeo8n169evHVV1816jmK9icuLo7Zs2ezYcMGjh07xtatW6mqqmLkyJHBDk2IJpE+OE1g0tQGb5qqNLqsSVMbVbYl9e7dm02bNgV03N20aRPh4eGkptZNxnr16oXH4wno1Hro0KGrssL6okWLGD9+PLt372bXrl3+289//nMWLVrkL9ezZ0+2b99e5/jt27fXaUqyWCxkZGSQnp7eqM7OGRkZZGZmBoxSuhyLxcKQIUNYtWpVwPZVq1YxevToyx5vNpvp0KEDmqbx7rvvMmPGDH9TYWVlZZ1mQ03TMAwDwzAadW2LxcKwYcM4dOhQQJnDhw/TqVOnRj9P0f5YLBZuuOEGxo8fj81mk6UdxPXJaIdKSkoMwCgpKamzr6qqyti/f79RVVUVhMiuzPe+9z1j5syZ9e7Lzs42QkJCjIcfftg4cOCAsWzZMiMuLs74zW9+4y8zYcIE4yc/+Yn/8U033WT079/f2LJli/H1118bY8eONex2u/GXv/ylwRicTqexc+dOY+fOnUZycrLxs5/9zNi5c6dx5MiResu7XC4jPj7eWLhwYZ19hw8fNgBj165dhmEYxubNmw1VVY1nn33W2Ldvn7Fv3z7jueeeM1RVNbZs2eI/7je/+Y0xYMCABmM0DMMAjA8++CBg27fffmsoimJs377dMAzDWLp0qdGjR49Lnufdd981zGazsWjRImP//v3Go48+aoSGhhonT570l3niiSeMe++91//40KFDxltvvWUcPnzY2Lp1qzFnzhwjJibGOHHiRMBzCA8PN9555x3j+PHjxsqVK42uXbsa3/nOd5p07aVLlxpms9l49dVXjSNHjhj/93//Z2iaZmzYsKHe53M9//6Lq8Ptdgc8zs3NNXRdD1I04npQcMhpvDwx2/j/7sxt8XNf6vP7YpLgXOR6foO/VIJjGIaxbt06Y9iwYYbFYjGSkpKMX/ziFwFvXhcnOLm5ucYtt9xiWK1Wo2PHjsabb75pdOrU6ZIJzokTJwygzm3ChAn1ll+8eLGhqqqRl5dX7/5+/foZP/7xj/2PV61aZYwbN86Ijo42oqOjjbFjxxqrVq0KOKa5CY5hGMaNN95oTJ8+3TAMw3j99deNxnwHePnll41OnToZFovFGDx4sLF+/fqA/d/73vcCnv/+/fuNgQMHGna73YiIiDBmzpxpHDx4MOAYt9ttPPPMM0bXrl0Nm81mpKWlGQ899JBx/vz5Jl3bMAxj0aJFRkZGhmGz2YwBAwYYy5Yta/C5XM+//+LqO3PmjPHKK68Yn376qVFZWRnscEQr1VoSHMUw2t/MTqWlpURGRlJSUkJERETAPofDwYkTJ/yzwwrRnsjvv7iUY8eOsX79ejweDyEhIUyePJmUlJbrQyjahrOHXbz//bOExWvc9++mTfh6OZf6/L6Y9MERQgjRKF27dmXWrFlER0dTWVnJp59+yo4dO2QGZNEqSYIjhBCi0aKjo5k1axY9evTAMAx27NjBp59+SmVlZbBDEyKAJDhCCCGaxGQyMWHCBCZNmoTJZCInJ4fs7IYnQhUiGGQeHCGEEM3SrVs34uPjOXr0KN27dw92OEIEkBocIYQQzRYVFRUw47nT6WTt2rVUVFQEMSohJMERQgjRgjZu3MjRo0dZsmQJp0+fDnY4oh2TBEcIIUSLGTp0KHFxcTgcDj777DO2bt3qXxdNiGtJEhwhhBAtJiIigpkzZ9KnTx8Adu/ezccff0x5eXmQIxPtjSQ4QgghWpSmaYwZM4Ybb7wRi8VCfn4+S5YsIT8/P9ihiXZEEhzhN3HiRB599NFLluncuTPz58+/JvEIIa5v6enpZGZmEh8fj9lsJioqKtghiXZEEpw25P7770dRlDq3o0ePXrMY/v73vzNu3Diio6OJjo5mypQpbNu2rdHH9+jRA4vFwpkzZ+rsayi5mj9/Pp07d/Y/fuaZZ/zPXdM00tLSmDdvHmfPnvWXufD1CQsLY8CAAbzxxhtNeap+CxYs8C9tMGTIEDZs2HDZY15++WV69eqF3W6nR48evPnmm3XKFBcX8/DDD5OcnOxf0Xn58uX1Ps+aW1JS4LToS5cuZdq0acTFxaEoCrt27WrWcxSiucLDw5k5cyYzZszAarX6t1dVVQUxKtEeSILTxtx0003k5uYG3NLT06/Z9detW8ddd93FF198webNm+nYsSNTp06tN2G52FdffYXD4eDOO+9sdrJRo0+fPuTm5pKVlcXChQv5+OOPue+++wLKvP766+Tm5rJ7927mzJnDAw88wIoVK5p0nffee49HH32Up556ip07dzJu3DimT59OVlZWg8csXLiQJ598kmeeeYZ9+/bx7LPP8vDDD/Pxxx/7y7hcLm688UZOnjzJ4sWLOXToEH//+99JTU2t93nW3Pbs2ROwv6KigjFjxvD73/++Sc9LiJakqmrAukEHDx7kvffe48SJE0GMSrR1kuC0MVarlaSkpICbpmkArF+/nuHDh2O1WklOTuaJJ57A4/E0eK6CggJuvfVW7HY76enpvP3225e9/ttvv81DDz3EwIED6dmzJ3//+9/RdZ01a9Zc9thFixZx9913c++99/Laa69d0fo2JpOJpKQkUlNTmTFjBo888ggrV64M+NYYFRVFUlISXbt25Ze//CUxMTGsXLmySdf585//zIMPPsi8efPo1asX8+fPJy0tjYULFzZ4zFtvvcX3v/995syZQ5cuXZg7dy4PPvggL7zwgr/Ma6+9xrlz51i2bBljxoyhU6dOjB07lgEDBtT7PGtu8fHxAfvvvfdenn76aaZMmdKk5yXE1XTs2DFcLherVq1i48aNeL3eYIck2iBJcJrC62n4pnsbX9braVzZFnTmzBluvvlmhg0bxu7du1m4cCGLFi3it7/9bYPH3H///Zw8eZK1a9eyePFiFixYQEFBQZOuW1lZidvtJiYm5pLlysrKeP/997nnnnu48cYbqaioYN26dU261qXY7XZ0Xa83ofN6vfz73//m3LlzmM1m//Y33ngDRVEaPKfL5WLHjh1MnTo1YPvUqVPZtGlTg8c5nc46K3Xb7Xa2bduG2+0G4KOPPmLUqFE8/PDDJCYm0rdvX373u9/V+SA4cuQIKSkppKenM3fuXI4fP97wiyBEKzF9+nR/sr5v3z4+/PBDSktLgxyVaGtkqYam2PlWw/si06DbBd+Sd78DegNJSngS9Jhe+3jP++Bx1C039IEmh/jJJ58QFhbmfzx9+nTef/99FixYQFpaGi+99BKKotCzZ09ycnL4xS9+wdNPP42qBua6hw8f5rPPPmPLli2MGDEC8NWw9OrVq0nxPPHEE6Smpl62BuHdd9+lW7du/qGlc+fOZdGiRUyaNKlJ16vPwYMHWbhwIcOHDyc8PNy//a677kLTNBwOB16vl5iYGObNm+ffHxkZSY8ePRo8b2FhIV6vl8TExIDtiYmJ5OXlNXjctGnT+Mc//sHtt9/O4MGD2bFjB6+99hput5vCwkKSk5M5fvw4a9eu5bvf/S7Lly/nyJEjPPzww3g8Hp5++mkARowYwZtvvkn37t3Jz8/nt7/9LaNHj2bfvn3ExsY29+US4qpTVZURI0aQnJzMunXrKCwsZMmSJUyYMIEuXboEOzzRRkiC08ZMmjQpoHkkNDQUgAMHDjBq1KiAGokxY8ZQXl5OdnY2HTt2DDjPgQMHMJlMAVOw9+zZs0mjIF588UXeeecd1q1bV6fG4mKLFi3innvu8T++5557GD9+PMXFxc0aebFnzx7CwsLwer04nU4mTpzIq6++GlDmL3/5C1OmTOH06dM8/vjjPPbYY2RkZPj3z5o1i1mzZl32WhfX8hiGccman1//+tfk5eUxcuRIDMMgMTGR+++/nxdffNHfnKjrOgkJCbz66qtomsaQIUPIycnhD3/4gz/BmT69Nknu168fo0aNomvXrvx//9//x+OPP375F0mIIOvYsSOZmZmsWbOGvLw81qxZQ0xMjIy2Ei1CEpymGHRvw/su/kAbcFfjz9vvzubFU4/Q0NCAD+ka9X3o1vRxqe/D+FL7GuOPf/wjv/vd71i9ejX9+/e/ZNn9+/ezdetWtm/fzi9+8Qv/dq/XyzvvvMMPf/hDwDeBWElJSZ3ji4uLiYyMDNjWo0cPPvroIzRNIyUlJWD0Ro2kpCQyMjLIyMjg/fffZ9CgQQwdOpTevXs36jnGxcWhaVqd2pqCgoI6tToXstvtvPbaa7zyyivk5+eTnJzMq6++Snh4OHFxcQAkJydjNpv9CQ9Ar169yMvLw+VyYbFY6pw3NDSUfv36ceTIkUbFL0RrEBoayowZM/jmm29QVVWSG9FipA9OU2imhm+q1viymqlxZVtQ79692bRpU0DH3U2bNhEeHl5nZA74Pkw9Hg9ff/21f9uhQ4coLi6+7LX+8Ic/8D//8z98/vnnATVADVm0aBHjx49n9+7d7Nq1y3/7+c9/zqJFi/zlevbsyfbt2+scv3379jpNSRaLhYyMDNLT0+tNbi6WkZFBZmYmTz755GXLXniNIUOGsGrVqoDtq1atYvTo0Zc93mw206FDBzRN491332XGjBn+psIxY8Zw9OjRgCnuDx8+THJycr3JDfj69hw4cIDk5ORGPwchWgNVVRk6dCiDBw/2byspKbmmU1yItkcSnHbioYce4vTp0/z4xz/m4MGDfPjhh/zmN7/h8ccfr9P/Bnw1IDfddBP/+Z//ydatW9mxYwfz5s3Dbrdf8jovvvgiv/rVr3jttdfo3LkzeXl55OXlNThNu9vt5q233uKuu+6ib9++Abd58+axY8cOdu/eDcDjjz/OZ599xnPPPcf+/fvZv3+/P5H66U9/esWv0U9/+lM+/vhjf1L3wQcf0LNnz0se8/jjj/OPf/yD1157jQMHDvDYY4+RlZXFD37wA3+ZJ598MmCI+uHDh/nnP//JkSNH2LZtG3PnzmXv3r387ne/85f54Q9/SFFRET/5yU84fPgwn376Kb/73e94+OGH/WV+9rOfsX79ek6cOMHWrVu54447KC0t5Xvf+56/zLlz59i1axf79+8HfEnqrl27LtlHSIhg83q9rF69mrVr17J+/fpLjvYUoiGS4LQTqampLF++nG3btjFgwAB+8IMf8OCDD/KrX/2qwWNef/110tLSmDBhArNnz+a//uu/SEhIuOR1FixYgMvl4o477iA5Odl/++Mf/1hv+Y8++oiioqJ6+7p069aNfv36+WtxRo4cyYoVK1i9ejVjx45l7NixrFy5khUrVvg7Ql+Jfv36MWXKFH8fl5KSEg4dOnTJY+bMmcP8+fN57rnnGDhwIF9++SXLly+nU6dO/jI18/HU8Hq9/OlPf2LAgAHceOONOBwONm3aFDBZYVpaGitXrmT79u3079+fRx55hJ/85Cc88cQT/jLZ2dncdddd9OjRg9mzZ2OxWNiyZUvAtT/66CMGDRrELbfcAvg6bw8aNIi//e1vV/RaCXE1qarq/3s4dOgQH3zwAefPnw9uUOK6oxhXMtnIdaq0tJTIyEhKSkoCJp8CcDgcnDhxwj8zrRDtifz+i9YkJyeHtWvXUllZiclkYuzYsXTv3j3YYYnLOHvYxfvfP0tYvMZ9/066/AFNcKnP74tJDY4QQohWKSUlhczMTDp06IDH42HdunWsW7dOmqxEo0iCI4QQotWy2+1Mnz6dYcOGoSgKxcXF9fYbFOJiMkxcCCFEq6YoCoMGDSI5OZmwsDB/gnO5OadE+yYJjhBCiOtCUlJgf45t27ZRXl7OuHHjGpw+QbRfkuAIIYS47pSXl7Nnzx50Xefs2bNMmTLFP1GmaB2CPYRJGjKFEEJcd8LCwrj11lsJCwujtLSUZcuW+ed7EkHWSloNJcERQghxXUpMTCQzM5NOnTqh6zpfffUVq1evxuVyBTs00QpIgiOEEOK6ZbVamTZtGqNGjUJVVY4fP85HH31EO5ziTVxEEhwhhBDXvX79+jFz5kzCw8MZOHCgjK4SrSPBWbBggX/m1CFDhrBhw4YGyy5dupQbb7yR+Ph4IiIiGDVqFCtWrLiG0bZdEydO5NFHH71kmc6dOzN//vxrEo8QQjRFfHw8d955JxkZGf5thYWFOJ3OIEYlgiXoCc57773Ho48+ylNPPcXOnTsZN24c06dPD1i750JffvklN954I8uXL2fHjh1MmjSJW2+9lZ07d17jyFuf+++/H0VR6tyu5Yq8S5cuZejQoURFRREaGsrAgQN56623Gn18jx49sFgsnDlzps6+hpKr+fPnB6zj9Mwzz/ifu6ZppKWlMW/ePM6ePesvc+HrExYWxoABA3jjjTea8lT9mpKg13j55Zfp1asXdrudHj168OabbwbsnzhxYr3/lzVrSl38PGtuFw+jBThw4AC33XYbkZGRhIeHM3LkyAb/voS43plMtYODq6qq+Pzzz1myZAn5+flBjEoEQ9ATnD//+c88+OCDzJs3j169ejF//nzS0tJYuHBhveXnz5/Pz3/+c4YNG0a3bt343e9+R7du3fj444+vceSt00033URubm7ALT09/ZpdPyYmhqeeeorNmzfz7bff8sADD/DAAw80qpbtq6++wuFwcOeddzY72ajRp08f/yKXCxcu5OOPPw5Y0Rt8i4nm5uaye/du5syZ0+g4L9TUBB1g4cKFPPnkkzzzzDPs27ePZ599locffjjgd3jp0qUB/4d79+5F0zTuvPPOep9nzW3Pnj0B+48dO8bYsWPp2bMn69atY/fu3fz617+WdaZEu+BwODCZTJSXl/Pxxx+ze/du6ZvTjgQ1wXG5XOzYsYOpU6cGbJ86dSqbNm1q1Dl0XaesrIyYmJirEeJ1x2q1kpSUFHDTNA2A9evXM3z4cKxWK8nJyTzxxBOXXNOloKCAW2+9FbvdTnp6Om+//fZlrz9x4kRmzZpFr1696Nq1Kz/5yU/o378/X3311WWPXbRoEXfffTf33nsvr7322hW9EZlMJpKSkkhNTWXGjBk88sgjrFy5kqqqKn+ZqKgokpKS6Nq1K7/85S+JiYlh5cqVTbpOUxN0gLfeeovvf//7zJkzhy5dujB37lwefPBBXnjhBX+ZmJiYgP/DVatWERISUifBqXmeNbf4+PiA/U899RQ333wzL774IoMGDaJLly7ccsstl10VXoi2IDo6mtmzZ9O1a1d0XWfr1q2sWLECh8MR7NDENRDUBKewsBCv10tiYmLA9sTERPLy8hp1jj/96U9UVFTwne98p8EyTqeT0tLSgFtzeHRPgzev7m10WY/uaVTZlnTmzBluvvlmhg0bxu7du1m4cCGLFi3it7/9bYPH3H///Zw8eZK1a9eyePFiFixYQEFBQaOvaRgGa9as4dChQ4wfP/6SZcvKynj//fe55557uPHGG6moqGDdunWNvtbl2O12dF2vN6Hzer38+9//5ty5c5jNZv/2N95445IdFZuboDudzjo1KHa7nW3btuF2u+s9ZtGiRcydO5fQ0NCA7UeOHCElJYX09HTmzp3L8ePH/ft0XefTTz+le/fuTJs2jYSEBEaMGMGyZcsajE2ItsZisXDDDTcwbtw4NE0jKyuLJUuWNPozRly/WsVMxhd/iDR2fZF33nmHZ555hg8//PCS30iff/55nn322SuO84OjHzS4Lzk0mbGpY/2PPz72MR6j/iQl3h7PxLSJ/sfLTyzH6a3bCe7O7nfW2XY5n3zyCWFhYf7H06dP5/3332fBggWkpaXx0ksvoSgKPXv2JCcnh1/84hc8/fTTdRavO3z4MJ999hlbtmxhxIgRgO9DtlevXpeNoaSkhNTUVJxOJ5qmsWDBAm688cZLHvPuu+/SrVs3+vTpA8DcuXNZtGgRkyZNaupLUMfBgwdZuHAhw4cPJzw83L/9rrvuQtM0HA4HXq+XmJgY5s2b598fGRlJjx49GjxvcxP0adOm8Y9//IPbb7+dwYMHs2PHDl577TXcbjeFhYUkJycHlN+2bRt79+5l0aJFAdtHjBjBm2++Sffu3cnPz+e3v/0to0ePZt++fcTGxlJQUEB5eTm///3v+e1vf8sLL7zA559/zuzZs/niiy+YMGFCo14/IdqCXr16kZiYyKpVqygpKWH//v319lkTbUdQE5y4uDg0TavzYVBQUFDnQ+Ni7733Hg8++CDvv/8+U6ZMuWTZJ598kscff9z/uLS0lLS0/7+9O4+rKf//AP663W51tZFSt0WLaCGhLBExQ8q+jSxRpmbGMF+DMWQZS8P4jfkyDEq4ZRlkKVkmUxnLREqiGKWMwmRKYiq03z6/Pzw6X9dtH3Vb3s/H4zwe7ud8Pue8z8et8+5zPucco4YH3owNGzZM6vJI5V/8KSkpcHBwkEocBw0ahFevXiEzMxOdO3eW2k5KSgoUFRVhb2/PlVlaWqJ9+/a1xqCuro7ExES8evUKv/32GxYvXgwzMzMMHTq02jZisRju7u7cZ3d3dwwZMgR5eXl12ue77ty5AzU1NUgkEpSUlGDo0KHYvXu3VJ0ff/wRw4cPx19//YXFixdj0aJFUndfTJw4ERMnTqx1X/VN0L/55htkZ2djwIABYIxBV1cXnp6e2LRpE3c58W1isRg9evRAv379pMpdXV25f9vY2MDBwQFdunTB/v37sXjxYlRUVAAAxo8fj0WLFgEAevXqhZiYGOzatYsSHNLmaGlpYdKkSbh58yZ69+4t73BII5NrgqOkpAQ7OztERUVJnUiioqIwfvz4atsdOXIEH3/8MY4cOSJ1V0l1lJWVoays/K/jnWhe/cmO986zqcd2GVvn7Y4yHdXgmN6lqqoqdZKuVNVJt3KOS1Un45rW1UZBQYGLoVevXkhJScHGjRurTXCSk5MRFxeH+Ph4LFu2jCuXSCQ4cuQIPv/8cwCAhoYG8vPzZdrn5eVBU1NTqszCwgKnT58Gn8+Hvr5+lf//enp6MDc3h7m5OY4fP47evXvD3t4e1tbWdTrOhiboQqEQgYGBCAgIwNOnTyESibB7926oq6vLvEunsLAQwcHB8PX1rTUeVVVV2NjY4P79+1x8ioqKMsdjZWVVpzlRhLRGAoGAG5WuFB0djS5dukBfX19OUZHGIPe7qBYvXoy9e/ciMDAQKSkpWLRoER4/foy5c+cCeDP68vbdL0eOHMHs2bOxefNmDBgwANnZ2cjOzq7yxPe+KSooVrvwFfh1rquooFinuu+TtbU1YmJipCbuxsTEQF1dHQYGBjL1raysUF5ejhs3bnBlqampyMvLq/e+GWM1PodCLBZjyJAhSEpKQmJiIrcsXbpU6rKMpaUl4uPjZdrHx8fLXEpSUlKCubk5TE1N65TcmpubY/LkyVi+fHmdj+vtBP1tUVFRGDhwYK3tBQIBDA0NwefzERwcjDFjxshcKjx27BhKSkqkRreqU1JSgpSUFO4Sl5KSEvr27YvU1FSpemlpaTA2Nq51e4S0BWlpaUhJScEvv/yCmzdv0l1WrQlrBnbu3MmMjY2ZkpIS69OnD7t8+TK3zsPDgzk5OXGfnZycGACZxcPDo877y8/PZwBYfn6+zLqioiKWnJzMioqK/s0hyYWHhwcbP358lesyMzNZu3bt2Pz581lKSgoLCwtj2trabM2aNVwdJycn9uWXX3KfXVxcWM+ePVlsbCy7ceMGc3R0ZEKhkP3444/VxvDdd9+xyMhI9uDBA5aSksI2b97MFBUV2Z49e6qsX1paynR0dJi/v7/MurS0NAaAJSYmMsYYu3btGlNQUGDr1q1jd+/eZXfv3mW+vr5MQUGBxcbGcu3WrFnDbG1tq42RMcYAsJMnT0qV3b59m/F4PBYfH88YYyw0NJRZWFjUuJ3g4GAmEAiYWCxmycnJbOHChUxVVZU9fPiQq+Pj48NmzZrFfU5NTWUHDx5kaWlpLC4ujrm5uTEtLS2WkZEhs31HR0fm5uZW5b6/+uordunSJZaens5iY2PZmDFjmLq6utS+Q0NDmUAgYLt372b3799n27dvZ3w+n0VHR1e5zZb8/SekIcrKytilS5dYQEAACwgIYGfOnGGvX7+Wd1gtWk5aCds5NJPtm5L13rdd0/n7Xc0iwWlqbTHBYYyxS5cusb59+zIlJSWmp6fHli1bxsrKyrj17yY4WVlZbPTo0UxZWZl17tyZHThwgBkbG9eY4KxcuZKZm5szFRUV1qFDB+bg4MCCg4OrrX/ixAmmoKDAsrOzq1xvY2PD/vOf/3Cfo6Ki2ODBg1mHDh1Yhw4dmKOjI4uKipJq09AEhzHGRowYwVxdXRljjAUFBbG6/A1QU4LOmGySnpyczHr16sWEQiHT0NBg48ePZ/fu3ZPZbmpqKgPAIiMjq9yvm5sbE4lETCAQMH19fTZp0iR29+5dmXpisZj7P7G1tWVhYWHVHktL/v4T8m+kpaUxsVjMAgIC2IEDB1hmZqa8Q2qxmkuCw2Os7Y3HFRQUQFNTE/n5+dDQ0JBaV1xcjIyMDO7JtIS0JfT9J21ZXl4ezp8/jxcvXgAAHBwcYGNjI+eoWp5n90tx/NNnUNXmw+P4+71Trabz97vkPgeHEEIIaQ7at2+PCRMmwMrKCgoKCvRAzBauWTwHhxBCCGkOFBUVMXjwYNjY2Eg9oqKwsBDt2rWTX2Ck3mgEhxBCCHnH28nNixcvEBwcjLi4OO75UqT5owSHEEIIqcHjx49RXl6OpKQknDlzBq9evZJ3SKQOKMEhhBBCatCrVy8MHz4cSkpKePr0KUJCQvDo0SN5h0VqQQkOIYQQUgszMzNMmjQJOjo6KCkpQUREBGJjY+mSVTNGCQ4hhBBSBxoaGhg/fjx36/jt27dx7949OUdFqkN3URFCCCF1pKCgAAcHB4hEIty/fx9WVlbyDolUg0ZwCCGEkHoyMTHBiBEjuBcSSyQSJCYmQiKRyDkyUokSHMIZOnQoFi5cWGMdExMTbN26tUniIYSQliI2NhbXr1/HqVOnUFBQIO9wCCjBaVU8PT3B4/Fklj///FMu8QQHB4PH42HChAl1bmNhYQElJSU8efJEZl11ydXWrVthYmLCfV67di137Hw+H0ZGRvD29sazZ8+4Om/3j5qaGmxtbbFv3756HN3/+Pn5ca82sLOzQ3R0dK1tdu7cCSsrKwiFQlhYWODAgQMydfLy8jB//nyIRCKoqKjAysoK4eHh3Pry8nKsWrUKpqamEAqFMDMzg6+vr9Skx6dPn8LT0xP6+vpo164dXFxccP/+/QYdJyGkekZGRlBRUUFubi5CQkKQnp4u75DaPEpwWhkXFxdkZWVJLaampk0ex6NHj7BkyRIMHjy4zm2uXLmC4uJifPTRRw1ONip1794dWVlZePz4Mfz9/XHmzBnMnj1bqk5QUBCysrKQlJQENzc3zJkzBxEREfXaz9GjR7Fw4UKsXLkSt27dwuDBg+Hq6orHjx9X28bf3x/Lly/H2rVrcffuXaxbtw7z58/HmTNnuDqlpaUYMWIEHj58iBMnTiA1NRV79uyBgYEBV+f777/Hrl27sGPHDqSkpGDTpk344YcfsH37dgAAYwwTJkxAeno6Tp06hVu3bsHY2BjDhw/H69ev63WchJCade7cGZMnT4aenh7Kyspw/vx5REdHo7y8XN6htVmU4NQDKy+vfnnnumuNdd/5wtelTl0pKytDT09PauHz+QCAy5cvo1+/flBWVoZIJIKPj0+NP3w5OTkYO3YshEIhTE1NcejQoTrFIJFIMHPmTKxbtw5mZmZ1jl0sFmPGjBmYNWsWAgMD8W/eA6uoqAg9PT0YGBhgzJgxWLBgASIjI1FUVMTVad++PfT09NClSxesWLECWlpaiIyMrNd+tmzZAi8vL3h7e8PKygpbt26FkZER/P39q21z8OBBfPbZZ3Bzc4OZmRmmTZsGLy8vfP/991ydwMBAvHjxAmFhYRg0aBCMjY3h6OgIW1tbrs61a9cwfvx4jB49GiYmJpgyZQqcnZ1x48YNAMD9+/cRGxsLf39/9O3bFxYWFvDz88OrV69w5MiReh0nIaR2qqqqGDNmDHr37g0ASElJQVhYGF2ykhO6i6oe8kJCq10n0BdB7a3RivxTp8DKq55spqijA/UPhv2v7tmzYCWlMvU6uE39F9FKe/LkCUaNGgVPT08cOHAA9+7dwyeffAIVFRWsXbu2yjaenp7466+/cOHCBSgpKWHBggXIycmpdV++vr7Q0dGBl5dXnS7XAMDLly9x/PhxxMXFwdLSEq9fv8alS5cwbNiw2hvXgVAoREVFRZUJnUQiQUhICF68eAGBQMCV79u3D3PmzKk20SotLUVCQgJ8fHykyp2dnRETE1NtLCUlJTJv6hYKhbh+/TrKysogEAhw+vRpODg4YP78+Th16hR0dHQwY8YMLFu2jEtYHR0dsWvXLqSlpaFbt25ISkrClStXuMt4JSUlACC1Lz6fDyUlJVy5cgXe3t419BghpCEUFBTQt29fiEQiXLhwAa9eveImIpOmRQlOK3P27Fmoqalxn11dXXH8+HH4+fnByMgIO3bsAI/Hg6WlJf7++28sW7YMq1evhoKC9GBeWloazp07h9jYWPTv3x/AmxGW2m6JvHr1KsRiMRITE+sVd3BwMLp27Yru3bsDAKZNmwaxWPxeEpx79+7B398f/fr1g7q6Olc+ffp08Pl8FBcXQyKRQEtLS+qkr6mpCQsLi2q3m5ubC4lEAl1dXalyXV1dZGdnV9tu5MiR2Lt3LyZMmIA+ffogISEBgYGBKCsrQ25uLkQiEdLT03HhwgXMnDkT4eHhuH//PubPn4/y8nKsXr0aALBs2TLk5+fD0tISfD4fEokEGzZswPTp0wEAlpaWMDY2xvLlyxEQEABVVVVs2bIF2dnZyMrKalBfEkLqxtDQEFOmTEF+fr7U752KigqZ37ekcVCCUw/tJ0+qfuU7Gbrm+PF13q7mmDENDUnGsGHDpC6PqKqqAngzVOrg4CD1l8SgQYPw6tUrZGZmonPnzlLbSUlJgaKiIuzt7bkyS0tLqRfQvevly5dwd3fHnj17oK2tXa+4xWIx3N3duc/u7u4YMmQI8vLyatxnde7cuQM1NTVIJBKUlJRg6NCh2L17t1SdH3/8EcOHD8dff/2FxYsXY9GiRTA3N+fWT5w4ERMnTqx1X+/+dcYYq/Evtm+++QbZ2dkYMGAAGGPQ1dWFp6cnNm3axI3OVFRUoFOnTti9ezf4fD7s7Ozw999/44cffuASnKNHj+Lnn3/G4cOH0b17dyQmJmLhwoXQ19eHh4cHBAIBQkJC4OXlBS0tLfD5fAwfPhyurq517kdCSMO1a9dO6g3kf/31F+Li4vDhhx+iQ4cOcoysbaAEpx54inXvrsaqWxtVVVWpk3Slqk66lZdeqjoZ17SuOg8ePMDDhw8xduxYrqzyjh5FRUWkpqaiS5cuMu2Sk5MRFxeH+Ph4LFu2jCuXSCQ4cuQIPv/8cwBvniKan58v0z4vLw+amppSZRYWFjh9+jT4fD709fWhrKws005PTw/m5uYwNzfH8ePH0bt3b9jb28Pa2rpOx6utrQ0+ny8zWpOTkyMzqvM2oVCIwMBABAQE4OnTpxCJRNi9ezfU1dW5xFAkEkEgEHAJDwBYWVkhOzsbpaWlUFJSwtdffw0fHx9MmzYNAGBjY4NHjx5h48aN8PDwAADY2dkhMTER+fn5KC0thY6ODvr37y+VuBJCGh9jDHFxcXjx4gVOnjwJR0dHdOvWTd5htWo0TtZGWFtbIyYmRmo+SUxMDNTV1aXuzKlkZWWF8vJybsIqAKSmpiIvL6/afVhaWuLOnTtITEzklnHjxmHYsGFITEyEkZFRle3EYjGGDBmCpKQkqbZLly6FWCyW2n58fLxM+/j4eJlLSUpKSjA3N4epqWmVyc27zM3NMXnyZCxfvrzWum/vw87ODlFRUVLlUVFRGDhwYK3tBQIBDA0NwefzERwcjDFjxnBD14MGDcKff/4pdct3WloaRCIRlJSUAACFhYUyQ918Pr/Kd+NoampCR0cH9+/fx40bNzC+HiOMhJB/j8fjYfTo0TAwMEB5eTkuXbqES5cuoaysTN6htV6sDcrPz2cAWH5+vsy6oqIilpyczIqKiuQQ2b/j4eHBxo8fX+W6zMxM1q5dOzZ//nyWkpLCwsLCmLa2NluzZg1Xx8nJiX355ZfcZxcXF9azZ08WGxvLbty4wRwdHZlQKGQ//vjje4mJMcZKS0uZjo4O8/f3l1mXlpbGALDExETGGGPXrl1jCgoKbN26dezu3bvs7t27zNfXlykoKLDY2Fiu3Zo1a5itrW2NcQFgJ0+elCq7ffs24/F4LD4+njHGWGhoKLOwsKhxO8HBwUwgEDCxWMySk5PZwoULmaqqKnv48CFXx8fHh82aNYv7nJqayg4ePMjS0tJYXFwcc3NzY1paWiwjI4Or8/jxY6ampsa++OILlpqays6ePcs6derE1q9fz9Xx8PBgBgYG7OzZsywjI4OFhoYybW1ttnTpUq7OsWPH2MWLF9mDBw9YWFgYMzY2ZpMmTar2eFry95+QlqCiooLdvHmT7d69mwUEBLCjR4+y58+fyzus9yonrYTtHJrJ9k3Jeu/brun8/S4awWkjDAwMEB4ejuvXr8PW1hZz586Fl5cXVq1aVW2boKAgGBkZwcnJCZMmTcKnn36KTp06vde4Tp8+jefPn1c516Vr166wsbHhRnEGDBiAiIgInD9/Ho6OjnB0dERkZCQiIiK4idD/ho2NDYYPH87NccnPz0dqamqNbdzc3LB161b4+vqiV69e+P333xEeHg5jY2OuTuXzeCpJJBJs3rwZtra2GDFiBIqLixETEyP1sEIjIyNERkYiPj4ePXv2xIIFC/Dll19K3bG1fft2TJkyBfPmzYOVlRWWLFmCzz77DN9++63UvmfNmgVLS0ssWLAAs2bNolvECZEjHo+H3r17Y8yYMWjXrh3y8vJw8uRJupW8EfAY+xcPG2mhCgoKoKmpifz8fGhoaEitKy4uRkZGBvdkWkLaEvr+E9J0iouLcfHiRaiqqmLIkCHyDue9eXa/FMc/fQZVbT48juu9123XdP5+F00yJoQQQuRARUUFLi4uUvPmiouL8erVq3rfiUpk0SUqQgghRE4q35kHvLnT6sKFCwgLC0NycrKcI3sP5Hx9iBIcQgghpBmQSCTcnZBXrlzB+fPnUVoq+5T75q65PLmZEhxCCCGkGVBUVMTIkSPh4OAABQUFpKenIyQkBM+ePZN3aC0SJTiEEEJIM2JjY4Nx48ZBXV0dL1++xKlTp/DHH3/IO6wWhxIcQgghpJnp1KkTJk+eDBMTE1RUVOCPP/6ghwLWE91FRQghhDRDSkpKcHZ2xh9//AE9PT0IBAJ5h9SiUIJDCCGENGM9evSQ+pycnIzy8nLY2Ng0mwm9zRFdoiKcoUOHYuHChTXWMTExwdatW5skHkIIIdIKCgoQExOD2NhYREREoLi4WN4hNVuU4LQinp6e4PF4Msuff/7ZZDHs27evyhjq+kNoYWEBJSUlPHnyRGZddcnV1q1bpV5zsHbtWm6/fD4fRkZG8Pb2lroT4e3Y1NTUYGtri3379tX3cAEAfn5+3JN/7ezsEB0dXWubnTt3wsrKCkKhEBYWFjhw4ECVx2VhYQGhUAgjIyMsWrRIqh/9/f3Rs2dPaGhoQENDAw4ODjh37pzUNtauXQtLS0uoqqqiQ4cOGD58OOLi4hp0nIQQ+dPQ0MDAgQPB5/Px+PFjhISEIDs7W95hNUuU4LQyLi4uyMrKklpMTU2bNAYNDQ2ZGOry2P8rV66guLgYH330UYOTjUrdu3fn3gHl7++PM2fOYPbs2VJ1goKCkJWVhaSkJLi5uWHOnDmIiIio136OHj2KhQsXYuXKlbh16xYGDx4MV1dXqXdPvcvf3x/Lly/H2rVrcffuXaxbtw7z58/HmTNnuDqHDh2Cj48P1qxZg5SUFIjFYhw9elTqbeeGhob4v//7P9y4cQM3btzABx98gPHjx+Pu3btcnW7dumHHjh24c+cOrly5AhMTEzg7O9Ntp4S0YNbW1pgwYQI0NTXx+vVrnDlzBomJiWiDb16qESU4rYyysjL09PSklsqnZF6+fBn9+vWDsrIyRCIRfHx8UF5eXu22cnJyMHbsWAiFQpiamuLQoUN1ioHH48nEUBdisRgzZszArFmzEBgY+K9+WBUVFaGnpwcDAwOMGTMGCxYsQGRkJIqKirg67du3h56eHrp06YIVK1ZAS0sLkZGR9drPli1b4OXlBW9vb1hZWWHr1q0wMjKCv79/tW0OHjyIzz77DG5ubjAzM8O0adPg5eWF77//nqtz7do1DBo0CDNmzOCSkunTp+PGjRtcnbFjx2LUqFHo1q0bunXrhg0bNkBNTQ2xsbFcnRkzZmD48OEwMzND9+7dsWXLFhQUFOD27dv1Ok5CSPPSsWNHTJo0Cebm5mCM4fr164iIiKAk5y2U4NRDhaSi+qWC1b2upKJOdd+nJ0+eYNSoUejbty+SkpLg7+8PsViM9evXV9vG09MTDx8+xIULF3DixAn4+fkhJyen1n29evUKxsbGMDQ0xJgxY3Dr1q1a27x8+RLHjx+Hu7s7RowYgdevX+PSpUv1OcQaCYVCVFRUVJnQSSQSHDt2DC9evJC6S6Hyclt1SktLkZCQAGdnZ6lyZ2dnxMTEVNuupKREZkRLKBTi+vXr3G2gjo6OSEhIwPXr1wEA6enpCA8Px+jRo6vcpkQiQXBwMF6/fg0HB4dq4929ezc0NTVha2tbbXyEkJZBIBDggw8+gJOTE/dHHU06/h+6i6oeUmKyql2npqUC4+4duc/3YrPBKqrOpNtpKsO05/9epJYW/xSSMtmEpvtgg3rHePbsWaipqXGfXV1dcfz4cfj5+cHIyAg7duwAj8eDpaUl/v77byxbtgyrV6+GgoJ0rpuWloZz584hNjYW/fv3B/BmhMXKyqrG/VtaWmLfvn2wsbFBQUEBtm3bhkGDBiEpKQldu3attl1wcDC6du2K7t27AwCmTZsGsViMYcOG1bsP3nXv3j34+/ujX79+UFdX58qnT58OPp+P4uJiSCQSaGlpwdvbm1uvqakJCwuLarebm5sLiUQCXV1dqXJdXd0ar4mPHDkSe/fuxYQJE9CnTx8kJCQgMDAQZWVlyM3NhUgkwrRp0/Ds2TM4OjqCMYby8nJ8/vnn8PHxkdrWnTt34ODggOLiYqipqeHkyZOwtraWqnP27FlMmzYNhYWFEIlEiIqKohf5EdKKWFhYQCQSSf1+KywshFAobNMJD43gtDLDhg1DYmIit/z0008AgJSUFDg4OEh92QcNGoRXr14hMzNTZjspKSlQVFSEvb09V2ZpaYn27dvXuP8BAwbA3d0dtra2GDx4MI4dO4Zu3bph+/btNbYTi8Vwd3fnPru7uyM0NBR5eXl1OGpZd+7cgZqaGoRCIaytrWFkZCRzie3HH39EYmIioqKi0KtXL/z4448wNzfn1k+cOBH37t2rdV/v/gJhjNX4S+Wbb76Bq6srBgwYAIFAgPHjx8PT0xMAuMuJly5dwoYNG+Dn54ebN28iNDQUZ8+exbfffiu1LQsLCyQmJiI2Nhaff/45PDw8ZF7SV/mdiImJgYuLC6ZOnVqnkThCSMuhoaHB/d4pLy/HL7/8gl9++QWFhYVyjkx+aASnHqwGiqpf+c4JzXJA3eadAEC3vrq1V6ojVVVVqZN0papOupXXaqs6Gde0rj4UFBTQt29f3L9/v9o6ycnJiIuLQ3x8PJYtW8aVSyQSHDlyBJ9//jmANz/A+fn5Mu3z8vKgqakpVWZhYYHTp0+Dz+dDX18fysrKMu309PRgbm4Oc3NzHD9+HL1794a9vb3MCEh1tLW1wefzZUZrcnJyZEZ13iYUChEYGIiAgAA8ffoUIpEIu3fvhrq6Ojey8s0332DWrFnciJKNjQ1ev36NTz/9FCtXruRG3JSUlLj/b3t7e8THx2Pbtm0ICAjg9lf5nTA3N8eAAQPQtWtXiMViqQnLhJDWIzc3Fy9fvsQ///yDkJAQfPDBBzAwqP8VgZaORnDqQYGvUP2iwKt7Xb5Cneq+T9bW1oiJiZGagBYTEwN1dfUqv/hWVlYoLy+XmtSamppa7xEVxhgSExMhElWfHIrFYgwZMgRJSUlSo09Lly6FWCzm6llaWiI+Pl6mfXx8vMylpMoTv6mpaZXJzbvMzc0xefLkep30lZSUYGdnh6ioKKnyqKgoDBw4sNb2AoEAhoaG4PP5CA4OxpgxY7jEpbCwUOayIZ/PB2OsxkmEjDGUlJTUuN+61CGEtFx6enqYNGkStLS0UFRUhF9++QU3btxARcX7ndvZ3FGC00bMmzcPf/31F/7zn//g3r17OHXqFNasWYPFixfLnEiBNyMgLi4u+OSTTxAXF4eEhAR4e3tDKBTWuJ9169YhIiIC6enpSExMhJeXFxITEzF37twq65eVleHgwYOYPn06evToIbV4e3sjISEBSUlJAIDFixfj3Llz8PX1RXJyMpKTk/Htt9/i119/xVdfffWv++irr77CmTNnuKTu5MmTsLS0rLHN4sWLsXfvXgQGBiIlJQWLFi3C48ePpY53+fLlUreop6Wl4eeff8b9+/dx/fp1TJs2DX/88Qe+++47rs7YsWPh7++P4OBgZGRkICoqCt988w3GjRvHXcZasWIFoqOj8fDhQ9y5cwcrV67EpUuXMHPmTADA69evsWLFCsTGxuLRo0e4efMmvL29kZmZiY8++uhf9xchpPlq3749JkyYwP0Ou3nzJn755Re8fv1azpE1HbpE1UYYGBggPDwcX3/9NWxtbaGlpQUvLy+sWrWq2jZBQUHw9vaGk5MTdHV1sX79enzzzTc17icvLw+ffvopsrOzoampid69e+P3339Hv379qqx/+vRpPH/+HBMnTpRZ17VrV9jY2EAsFuOnn37CgAEDEBERAV9fX+6Bf927d0dERAQ3EfrfsLGxwfDhw7F69WqEh4cjPz8fqampNbZxc3PD8+fP4evri6ysLPTo0QPh4eEwNjbm6lQ+j6eSRCLB5s2bkZqaCoFAgGHDhiEmJkbqYYWrVq0Cj8fDqlWr8OTJE+jo6GDs2LHYsGEDV+fp06eYNWsWsrKyoKmpiZ49e+LXX3/FiBEjALwZ8bl37x7279+P3NxcdOzYEX379kV0dDQ3mZsQ0nopKipiyJAh0NfXR3R0NLKyshATE8P9jmjteKwN3jRfUFAATU1N5OfnQ0NDQ2pdcXExMjIyuCfTEtKW0PefkNYpPz8fV69ehZOTE1RVVRt1X7l/luHYJzlQ7ciHx4m6z0eti5rO3++iS1SEEEJIK6epqYlRo0ZJJTd3797Fq1ev5BhV46IEhxBCCGljHj58iKtXryIkJKTGV8u0ZJTgEEIIIW2MlpYWdHR0UFJSgl9//RWxsbGt7i4rSnAIIYSQNkZDQwPjxo1Djx49AAC3b9/G6dOn8fLlSzlH9v5QgkMIIYS0QXw+HwMHDoSzszOUlJSQk5ODkJAQPHr0SN6hvReU4BBCCCFtmImJCaZMmYJOnTqhtLS01VyqoufgEEIIIW2cmpoaxo0bh8ePH0s9k6uioqLKh8G2BC0zakIIIYS8VwoKClLJzevXr3Hs2DGkp6fLL6h/gRIcQgghhMi4ffs2CgoKcP78eVy5cgUSiUTeIdULJTiEEEIIkdG/f3/06tULAJCcnIywsDDk5+fLN6h6oASnFfH09ASPx6vyxZbz5s0Dj8eDp6dn0wdWjaKiInTo0IF74+27eDwewsLCZMoXLlyIoUOHcp8rj5vH40EgEMDMzAxLlizhXir38OFDbj2Px4OmpiYGDBiAM2fO1DtmxhjWrl0LfX19CIVCDB06FHfv3q2xTVlZGXx9fdGlSxeoqKjA1tYWv/76q1QdExMTqRgrl/nz51e5zc8++ww8Ho97J1dVx/n2cvz48XofKyGkbVNQUEC/fv0watQoqKio4Pnz5wgNDcWff/4p79DqhBKcVsbIyAjBwcFSCUNxcTGOHDmCzp07yzEyWSEhIejRowesra0RGhr6r7bl4uKCrKwspKenY/369fDz88OSJUuk6pw/fx5ZWVmIi4tDv379MHnyZPzxxx/12s+mTZuwZcsW7NixA/Hx8dDT08OIESNqfHbEqlWrEBAQgO3btyM5ORlz587FxIkTcevWLa5OfHw8srKyuCUqKgoAqnzrd1hYGOLi4qCvry9VbmRkJLWNrKwsrFu3DqqqqnB1da3XcRJCSCVDQ0NMmTIF+vr6KCsrw4ULF5CWlibvsGpFCU4r06dPH3Tu3FkqYQgNDYWRkRF69+4tVZcxhk2bNsHMzAxCoRC2trY4ceIEt14ikcDLywumpqYQCoWwsLDAtm3bpLbh6emJCRMm4L///S9EIhE6duyI+fPno6ysrNZYxWIx3N3d4e7uDrFY/K+OW1lZGXp6ejAyMsKMGTMwc+ZMmdGfjh07Qk9PD5aWltiwYQPKyspw8eLFOu+DMYatW7di5cqVmDRpEnr06IH9+/ejsLAQhw8frrbdwYMHsWLFCowaNQpmZmb4/PPPMXLkSGzevJmro6OjAz09PW45e/YsunTpAicnJ6ltPXnyBF988QUOHToEgUAgtY7P50ttQ09PDydPnoSbmxvU1NTqfJyEEPKudu3aYfTo0ejTpw+0tLRgZmYm75BqRQlOHTDGUFZUIZelIS97nzNnDoKCgrjPgYGB+Pjjj2XqrVq1CkFBQfD398fdu3exaNEiuLu74/LlywDe3B5oaGiIY8eOITk5GatXr8aKFStw7Ngxqe1cvHgRDx48wMWLF7F//37s27cP+/btqzHGBw8e4Nq1a5g6dSqmTp2KmJiY9zpTXygUVptklZWVYc+ePQAglSSsXbtW6g6Cd2VkZCA7OxvOzs5cmbKyMpycnBATE1Ntu5KSEpk3cwuFQly5cqXK+qWlpfj555/x8ccfg8fjceUVFRWYNWsWvv76a3Tv3r3a/VVKSEhAYmIivLy8aq1LCCG14fF4sLe3x8SJE6Go+OYpM4wxZGZmyjmyqtFzcOqgvJhhz6gsuez7k3ARBEJe7RXfMmvWLCxfvpybk3H16lUEBwfj0qVLXJ3Xr19jy5YtuHDhAhwcHAAAZmZmuHLlCgICAuDk5ASBQIB169ZxbUxNTRETE4Njx45h6tSpXHmHDh2wY8cO8Pl8WFpaYvTo0fjtt9/wySefVBtjYGAgXF1d0aFDBwBvLjEFBgZi/fr19TrWqly/fh2HDx/Ghx9+KFU+cOBAKCgooKioCBUVFTAxMZE6Dm1tbXTp0qXa7WZnZwMAdHV1pcp1dXVrfPLnyJEjsWXLFgwZMgRdunTBb7/9hlOnTlV7R0JYWBjy8vJk5kt9//33UFRUxIIFC6rd19vEYjGsrKwwcODAOtUnhJC64PP53L+TkpJw/fp1dOvWDY6Ojlzi0xw0ixEcPz8/mJqaQkVFBXZ2doiOjq6x/uXLl2FnZwcVFRWYmZlh165dTRRpy6CtrY3Ro0dj//79CAoKwujRo6GtrS1VJzk5GcXFxRgxYgTU1NS45cCBA3jw4AFXb9euXbC3t4eOjg7U1NSwZ88emTfPdu/eXeoLLxKJkJOTU218EokE+/fvh7u7O1fm7u6O/fv3N/g2xLNnz0JNTQ0qKipwcHDAkCFDsH37dqk6R48exa1bt3D69GmYm5tj79690NLS4tZ/8cUX+O2332rd19ujKsCbv2DeLXvbtm3b0LVrV1haWkJJSQlffPEF5syZI9VnbxOLxXB1dZWaY5OQkIBt27Zh3759Ne6rUlFREQ4fPkyjN4SQRsfj8ZCWlobQ0FC8ePFC3uFw5J5qHT16FAsXLoSfnx8GDRqEgIAAuLq6Ijk5ucpJsRkZGRg1ahQ++eQT/Pzzz7h69SrmzZsHHR0dTJ48uVFiVFTh4ZNwUaNsuy77boiPP/4YX3zxBQBg586dMusrH8X9yy+/wMDAQGqdsrIyAODYsWNYtGgRNm/eDAcHB6irq+OHH35AXFycVP1354LweLwaH/UdERGBJ0+ewM3NTapcIpEgMjKSmxCrrq5e5S2JeXl50NTUlCobNmwY/P39IRAIoK+vLxMT8GYSbteuXdG1a1eoqalh8uTJSE5ORqdOnaqN9W16enoA3ozkiET/+z7k5OTIjOq8TUdHB2FhYSguLsbz58+hr68PHx8fmJqaytR99OgRzp8/LzPpOjo6Gjk5OVI/ExKJBF999RW2bt2Khw8fStU/ceIECgsLMXv27DodGyGENESvXr2gq6uL3377DXl5eTh58iSsDfsB0Kq1bWOT+wjOli1b4OXlBW9vb1hZWWHr1q0wMjKCv79/lfV37dqFzp07Y+vWrbCysoK3tzc+/vhj/Pe//220GHk8HgRCBbksdflrvSouLi4oLS1FaWkpRo4cKbPe2toaysrKePz4MczNzaUWIyMjAG9OqgMHDsS8efPQu3dvmJubS43uNJRYLMa0adOQmJgotcycOVNqsrGlpSXi4+Ol2jLGkJCQAAsLC6lyVVVVmJubw9jYuMrk5l1OTk7o0aMHNmzYUOe4TU1Noaenx93hBLyZL3P58uU6XQZSUVGBgYEBysvLERISgvHjx8vUCQoKQqdOnTB69Gip8lmzZuH27dtS/aWvr4+vv/4aERERMtsRi8UYN24cdHR06nx8hBDSECKRCFOmTIGRkREkEgmuxV9B6j9XUV5R+80mjUmuIzilpaVISEiAj4+PVLmzs3O1kzavXbsmNckTeDPHQSwWo6ysrMqTW0lJCUpKSrjPBQUF7yH65o3P5yMlJYX797vU1dWxZMkSLFq0CBUVFXB0dERBQQFiYmKgpqYGDw8PmJub48CBA4iIiICpqSkOHjyI+Pj4Kkce6urZs2c4c+YMTp8+jR49ekit8/DwwOjRo/Hs2TPo6OhgyZIl8PDwgKWlJZydnVFUVITdu3fjwYMH1T4fpj6++uorfPTRR1i6dCkMDAywY8cOnDx5strLVDweDwsXLsR3333HjQR99913aNeuHWbMmMHVmz17NgwMDLBx40YAQFxcHJ48eYJevXrhyZMnWLt2LSoqKrB06VKp7VdUVCAoKAgeHh4y17E7duyIjh07SpUJBALo6enJJHt//vknfv/9d4SHhze4bwghpD5UVFTg4uKCpKQkXPwlFrlFj1BUVvvNEI1JrglObm4uJBJJlZM2Kyd0vis7O7vK+uXl5cjNzZW6dFBp48aNUpNl2woNDY0a13/77bfo1KkTNm7ciPT0dLRv3x59+vTBihUrAABz585FYmIi3NzcwOPxMH36dMybNw/nzp1rcEwHDhyAqqqqzARg4M1lJnV1dRw8eBCLFy/G1KlTwRjDf//7X6xcuRIqKiro3bs3oqOjYWxs3OAYKo0ZMwYmJibYsGED/Pz8kJubW+sI1dKlS1FUVIR58+bhn3/+Qf/+/REZGQl1dXWuzuPHj6VeTldcXIxVq1YhPT0dampqGDVqFA4ePIj27dtLbfv8+fN4/PhxlXe81UdgYCAMDAxk/hAghJDGxOPx0KtXLygVd8Srcw/RXl279kaNGQ9ryH3I78nff/8NAwMDxMTEcHfyAMCGDRtw8OBB3Lt3T6ZNt27dMGfOHCxfvpwru3r1KhwdHZGVlcXNk3hbVSM4RkZGyM/Pl0kCiouLkZGRwU16JqQtoe8/IaQ5KygogKamZpXn73fJdQRHW1sbfD5fZrSmpkmbenp6VdZXVFSUGcKvpKyszE2cJYQQQkjrJ9dJxkpKSrCzs5OatAkAUVFR1U7adHBwkKkfGRkJe3v7Ok0uJYQQQkjrJ/e7qBYvXoy9e/ciMDAQKSkpWLRoER4/fsy9MHL58uVSt7rOnTsXjx49wuLFi5GSkoLAwECIxWKZ9w4RQgghpO2S+3Nw3Nzc8Pz5c/j6+iIrKws9evRAeHg4N4k0KytL6sFypqamCA8Px6JFi7Bz507o6+vjp59+arRn4BBCCCGk5ZHrJGN5qWmSEk2yJG0Zff8JIc1ZfSYZy/0SVXPVBvM+Quh7TwhpNSjBeUflROXCwkI5R0JI06v83tOEfUJISyf3OTjNDZ/PR/v27bmXRbZr167Br0sgpKVgjKGwsBA5OTlo3759tS8CJYSQloISnCpUPiywpjdiE9IatW/fvsqHZRJCSEtDCU4VeDweRCIROnXqhLIy+b4sjJCmIhAIaOSGENJqUIJTAz6fT7/wCSGEkBaIJhkTQgghpNWhBIcQQgghrQ4lOIQQQghpddrkHJzKh5kVFBTIORJCCCGE1FXlebsuDyVtkwnOy5cvAQBGRkZyjoQQQggh9fXy5UtoamrWWKdNvouqoqICf//9N9TV1d/7Q/wKCgpgZGSEv/76q9b3ZJCGo35uGtTPTYP6uelQXzeNxupnxhhevnwJfX19KCjUPMumTY7gKCgowNDQsFH3oaGhQT88TYD6uWlQPzcN6uemQ33dNBqjn2sbualEk4wJIYQQ0upQgkMIIYSQVocSnPdMWVkZa9asgbKysrxDadWon5sG9XPToH5uOtTXTaM59HObnGRMCCGEkNaNRnAIIYQQ0upQgkMIIYSQVocSHEIIIYS0OpTgEEIIIaTVoQSnAfz8/GBqagoVFRXY2dkhOjq6xvqXL1+GnZ0dVFRUYGZmhl27djVRpC1bffo5NDQUI0aMgI6ODjQ0NODg4ICIiIgmjLblqu/3udLVq1ehqKiIXr16NW6ArUR9+7mkpAQrV66EsbExlJWV0aVLFwQGBjZRtC1Xffv50KFDsLW1Rbt27SASiTBnzhw8f/68iaJtmX7//XeMHTsW+vr64PF4CAsLq7WNXM6DjNRLcHAwEwgEbM+ePSw5OZl9+eWXTFVVlT169KjK+unp6axdu3bsyy+/ZMnJyWzPnj1MIBCwEydONHHkLUt9+/nLL79k33//Pbt+/TpLS0tjy5cvZwKBgN28ebOJI29Z6tvPlfLy8piZmRlzdnZmtra2TRNsC9aQfh43bhzr378/i4qKYhkZGSwuLo5dvXq1CaNueerbz9HR0UxBQYFt27aNpaens+joaNa9e3c2YcKEJo68ZQkPD2crV65kISEhDAA7efJkjfXldR6kBKee+vXrx+bOnStVZmlpyXx8fKqsv3TpUmZpaSlV9tlnn7EBAwY0WoytQX37uSrW1tZs3bp17zu0VqWh/ezm5sZWrVrF1qxZQwlOHdS3n8+dO8c0NTXZ8+fPmyK8VqO+/fzDDz8wMzMzqbKffvqJGRoaNlqMrU1dEhx5nQfpElU9lJaWIiEhAc7OzlLlzs7OiImJqbLNtWvXZOqPHDkSN27cQFlZWaPF2pI1pJ/fVVFRgZcvX0JLS6sxQmwVGtrPQUFBePDgAdasWdPYIbYKDenn06dPw97eHps2bYKBgQG6deuGJUuWoKioqClCbpEa0s8DBw5EZmYmwsPDwRjD06dPceLECYwePbopQm4z5HUebJMv22yo3NxcSCQS6OrqSpXr6uoiOzu7yjbZ2dlV1i8vL0dubi5EIlGjxdtSNaSf37V582a8fv0aU6dObYwQW4WG9PP9+/fh4+OD6OhoKCrSr4+6aEg/p6en48qVK1BRUcHJkyeRm5uLefPm4cWLFzQPpxoN6eeBAwfi0KFDcHNzQ3FxMcrLyzFu3Dhs3769KUJuM+R1HqQRnAbg8XhSnxljMmW11a+qnEirbz9XOnLkCNauXYujR4+iU6dOjRVeq1HXfpZIJJgxYwbWrVuHbt26NVV4rUZ9vs8VFRXg8Xg4dOgQ+vXrh1GjRmHLli3Yt28fjeLUoj79nJycjAULFmD16tVISEjAr7/+ioyMDMydO7cpQm1T5HEepD/B6kFbWxt8Pl/mr4GcnByZ7LSSnp5elfUVFRXRsWPHRou1JWtIP1c6evQovLy8cPz4cQwfPrwxw2zx6tvPL1++xI0bN3Dr1i188cUXAN6ciBljUFRURGRkJD744IMmib0lacj3WSQSwcDAAJqamlyZlZUVGGPIzMxE165dGzXmlqgh/bxx40YMGjQIX3/9NQCgZ8+eUFVVxeDBg7F+/XoaYX9P5HUepBGcelBSUoKdnR2ioqKkyqOiojBw4MAq2zg4OMjUj4yMhL29PQQCQaPF2pI1pJ+BNyM3np6eOHz4MF1Dr4P69rOGhgbu3LmDxMREbpk7dy4sLCyQmJiI/v37N1XoLUpDvs+DBg3C33//jVevXnFlaWlpUFBQgKGhYaPG21I1pJ8LCwuhoCB9GuTz+QD+N8JA/j25nQcbdQpzK1R5G6JYLGbJycls4cKFTFVVlT18+JAxxpiPjw+bNWsWV7/y9rhFixax5ORkJhaL6TbxOqhvPx8+fJgpKiqynTt3sqysLG7Jy8uT1yG0CPXt53fRXVR1U99+fvnyJTM0NGRTpkxhd+/eZZcvX2Zdu3Zl3t7e8jqEFqG+/RwUFMQUFRWZn58fe/DgAbty5Qqzt7dn/fr1k9chtAgvX75kt27dYrdu3WIA2JYtW9itW7e42/Gby3mQEpwG2LlzJzM2NmZKSkqsT58+7PLly9w6Dw8P5uTkJFX/0qVLrHfv3kxJSYmZmJgwf3//Jo64ZapPPzs5OTEAMouHh0fTB97C1Pf7/DZKcOquvv2ckpLChg8fzoRCITM0NGSLFy9mhYWFTRx1y1Pffv7pp5+YtbU1EwqFTCQSsZkzZ7LMzMwmjrpluXjxYo2/b5vLeZDHGI3DEUIIIaR1oTk4hBBCCGl1KMEhhBBCSKtDCQ4hhBBCWh1KcAghhBDS6lCCQwghhJBWhxIcQgghhLQ6lOAQQgghpNWhBIcQIhf79u0Dj8fjFkVFRYhEIkybNg3379+Xd3gwMTGBp6cn9/nhw4fg8XjYt2+f3GIihNQdvWyTECJXQUFBsLS0RHFxMa5evYoNGzbg4sWLuHfvHjp06CDv8AghLRQlOIQQuerRowfs7e0BAEOHDoVEIsGaNWsQFhaGOXPmyDk6QkhLRZeoCCHNSmWy8/TpU67sxo0bGDduHLS0tKCiooLevXvj2LFjMm2fPHmCTz/9FEZGRlBSUoK+vj6mTJnCbau4uBhfffUVevXqBU1NTWhpacHBwQGnTp1qmoMjhDQZGsEhhDQrGRkZAIBu3boBAC5evAgXFxf0798fu3btgqamJoKDg+Hm5obCwkJunsyTJ0/Qt29flJWVYcWKFejZsyeeP3+OiIgI/PPPP9DV1UVJSQlevHiBJUuWwMDAAKWlpTh//jwmTZqEoKAgzJ49W16HTQh5zyjBIYTIlUQiQXl5OTcHZ/369RgyZAjGjRsHAJg3bx66d++OCxcuQFHxza+skSNHIjc3FytWrMDs2bOhoKCA1atXIzc3F0lJSbCysuK2P3XqVO7fmpqaCAoKktr3hx9+iH/++Qdbt26lBIeQVoQuURFC5GrAgAEQCARQV1eHi4sLOnTogFOnTkFRURF//vkn7t27h5kzZwIAysvLuWXUqFHIyspCamoqAODcuXMYNmyYVHJTlePHj2PQoEFQU1ODoqIiBAIBxGIxUlJSGv1YCSFNhxIcQohcHThwAPHx8bhw4QI+++wzpKSkYPr06QD+Nw9nyZIlEAgEUsu8efMAALm5uQCAZ8+ewdDQsMZ9hYaGYurUqTAwMMDPP/+Ma9euIT4+Hh9//DGKi4sb8SgJIU2NLlERQuTKysqKm1g8bNgwSCQS7N27FydOnICNjQ0AYPny5Zg0aVKV7S0sLAAAOjo6yMzMrHFfP//8M0xNTXH06FHweDyuvKSk5H0cCiGkGaEEhxDSrGzatAkhISFYvXo1/vjjD3Tt2hVJSUn47rvvamzn6uqKgwcPIjU1lUt63sXj8aCkpCSV3GRnZ9NdVIS0QnSJihDSrHTo0AHLly9HSkoKDh8+jICAAPz2228YOXIkjhw5gt9//x1hYWHYuHEjPvroI66dr68vtLW1MWTIEGzbtg0XLlxAaGgoPv30U9y7dw8AMGbMGKSmpmLevHm4cOEC9u/fD0dHR4hEInkdLiGkkdAIDiGk2fnPf/6DHTt2wNfXFykpKbh+/To2bNiAhQsX4p9//kHHjh1hbW0tdYeUgYEBrl+/jjVr1uD//u//8Pz5c+jo6MDR0RFaWloAgDlz5iAnJwe7du1CYGAgzMzM4OPjg8zMTKxbt05eh0sIaQQ8xhiTdxCEEEIIIe8TXaIihBBCSKtDCQ4hhBBCWh1KcAghhBDS6lCCQwghhJBWhxIcQgghhLQ6lOAQQgghpNWhBIcQQgghrQ4lOIQQQghpdSjBIYQQQkirQwkOIYQQQlodSnAIIYQQ0upQgkMIIYSQVuf/Ad79hv/iC2QtAAAAAElFTkSuQmCC"
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "if __name__ == '__main__':\n",
    "    warnings.filterwarnings(\"ignore\")  # 在终端中忽略警告消息\n",
    "    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "    fprs, tprs, auc, precisions, recalls, prc = Train(device, directory='..\\data',\n",
    "                                                      epochs=1000,\n",
    "                                                      attn_size=64,\n",
    "                                                      attn_heads=6,\n",
    "                                                      out_dim=64,\n",
    "                                                      dropout=0.2,\n",
    "                                                      slope=0.2,\n",
    "                                                      lr=0.001,\n",
    "                                                      wd=5e-3,\n",
    "                                                      random_seed=1234,\n",
    "                                                      sample_num=50,\n",
    "                                                      model_type='MAHN')\n",
    "\n",
    "    plot_auc_curves(fprs, tprs, auc, directory='../result', name='test_auc_1')\n",
    "    plot_prc_curves(precisions, recalls, prc, directory='../result', name='test_prc_1')"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2024-05-07T22:27:49.423970800Z",
     "start_time": "2024-05-07T03:39:01.382884300Z"
    }
   },
   "id": "8eb6b250556d4d18",
   "execution_count": 2
  },
  {
   "cell_type": "code",
   "outputs": [],
   "source": [],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2024-05-07T22:27:49.423970800Z",
     "start_time": "2024-05-07T22:27:49.404970Z"
    }
   },
   "id": "cc838e26b01b9da6",
   "execution_count": 2
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.16"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
