{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "F:\\anaconda\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
      "  from ._conv import register_converters as _register_converters\n"
     ]
    }
   ],
   "source": [
    "import os\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import tensorflow as tf\n",
    "from sklearn.metrics import make_scorer\n",
    "from sklearn.model_selection import StratifiedKFold, train_test_split\n",
    "from DataReader import FeatureDictionary, DataParser\n",
    "from matplotlib import pyplot as plt\n",
    "import config\n",
    "import pickle\n",
    "from NFM import NFM\n",
    "from metrics import Logloss\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "\n",
    "def load_data():\n",
    "    ## 读取特征矩阵\n",
    "    with open('../data/temp.pkl', 'rb') as file:\n",
    "        data = pickle.load(file)\n",
    "    \n",
    "    cols = [c for c in data.columns if c not in ['instance_id','click']]#提取特征集合\n",
    "    cols = [c for c in cols if (not c in config.IGNORE_COLS)]\n",
    "    \n",
    "    ### 为了测量结果，使用Logloss衡量，且将训练集划分为：训练集+测试集+验证集(按照时间划分)\n",
    "    ### period小于33的为训练集，period=33随即划分为验证集+测试集\n",
    "    total_train = data[data.click != -1]\n",
    "    \n",
    "    #将数值特征归一化\n",
    "    if config.NUMERIC_COLS != []:\n",
    "        mms = MinMaxScaler()\n",
    "        total_train[config.NUMERIC_COLS] = mms.fit_transform(total_train[config.NUMERIC_COLS])\n",
    "    \n",
    "    train = total_train[total_train.period <= 32][cols+['instance_id']]\n",
    "    train_y = total_train[total_train.period <= 32]['click'] ##标签\n",
    "    val_and_test = total_train[total_train.period == 33][cols+['instance_id']]\n",
    "    val_and_test_y = total_train[total_train.period == 33]['click']\n",
    "    val, test, val_y, test_y = train_test_split(val_and_test, val_and_test_y, test_size=0.5, random_state=1024)\n",
    "\n",
    "    dfTrain_fea = pd.concat((train, val), axis = 0)\n",
    "    dfTrain_y = pd.concat((train_y, val_y), axis = 0)\n",
    "    dfTrain = pd.concat((dfTrain_fea, dfTrain_y), axis = 1)\n",
    "    dfTest = pd.concat((test, test_y), axis = 1)\n",
    "\n",
    "\n",
    "\n",
    "    X_train = dfTrain[cols].values\n",
    "    y_train = dfTrain['click'].values\n",
    "\n",
    "    X_test = dfTest[cols].values\n",
    "    ids_test = dfTest['instance_id'].values\n",
    "\n",
    "    cat_features_indices = [i for i,c in enumerate(cols) if c in config.CATEGORICAL_COLS]\n",
    "\n",
    "    return dfTrain,dfTest,X_train,y_train,val, X_test,ids_test,cat_features_indices\n",
    "\n",
    "# load data\n",
    "dfTrain, dfTest, X_train, y_train, val, X_test, ids_test, cat_features_indices = load_data()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "nfm_params = {\n",
    "    \"embedding_size\":64,\n",
    "    \"deep_layers\":[128],  #原文中单层NN最好\n",
    "    \"dropout_deep\":[0.5, 0.5],\n",
    "    \"deep_layer_activation\":tf.nn.relu,\n",
    "    \"epoch\":30,\n",
    "    \"batch_size\":256,\n",
    "    \"learning_rate\":0.001,\n",
    "    \"optimizer\":\"adam\",\n",
    "    \"batch_norm\":1,\n",
    "    \"batch_norm_decay\":0.995,\n",
    "    \"verbose\":True,\n",
    "    \"greater_is_better\":False,\n",
    "    \"eval_metric\":Logloss,\n",
    "    \"random_seed\":config.RANDOM_SEED\n",
    "}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "def run_base_model_nfm(dfTrain,dfTest,nfm_params):\n",
    "    fd = FeatureDictionary(dfTrain=dfTrain,\n",
    "                           dfTest=dfTest,\n",
    "                           numeric_cols=config.NUMERIC_COLS,\n",
    "                           ignore_cols = config.IGNORE_COLS)\n",
    "    data_parser = DataParser(feat_dict= fd)\n",
    "    # Xi_train ：列的序号\n",
    "    # Xv_train ：列的对应的值\n",
    "    Xi_train,Xv_train,y_train = data_parser.parse(df=dfTrain,has_label=True)\n",
    "    Xi_test,Xv_test,ids_test = data_parser.parse(df=dfTest)\n",
    "\n",
    "    print(dfTrain.dtypes)\n",
    "    #将Xi_train分为训练集+验证集\n",
    "    Xi_train_, Xv_train_, y_train_ =  Xi_train[:-val.shape[0]], Xv_train[:-val.shape[0]], y_train[:-val.shape[0]]\n",
    "    Xi_valid_, Xv_valid_, y_valid_ =  Xi_train[-val.shape[0]:], Xv_train[-val.shape[0]:], y_train[-val.shape[0]:]\n",
    "\n",
    "    nfm_params['feature_size'] = fd.feat_dim\n",
    "    nfm_params['field_size'] = len(Xi_train[0])\n",
    "    \n",
    "    y_val_meta = np.zeros((val.shape[0],1),dtype=float)\n",
    "    y_test_meta = np.zeros((dfTest.shape[0],1),dtype=float)\n",
    "    #开始训练\n",
    "    nfm = NFM(**nfm_params)\n",
    "    nfm.fit(Xi_train_, Xv_train_, y_train_, Xi_valid_, Xv_valid_, y_valid_, early_stopping=True)\n",
    "    y_val_meta[:,0] += nfm.predict(Xi_valid_, Xv_valid_)  #预测验证集\n",
    "    losses = Logloss(y_valid_, y_val_meta[:,0])##验证集loss\n",
    "    print('验证集loss为: %.4f' %losses)\n",
    "    \n",
    "    y_test_meta[:,0] += nfm.predict(Xi_test, Xv_test)  #预测测试集\n",
    "\n",
    "    # save result\n",
    "    filename = \"%s_loss%.4f.csv\"%('nfm', losses)\n",
    "    _make_submission(ids_test, y_test_meta, filename)\n",
    "    \n",
    "    return y_test_meta\n",
    "\n",
    "def _make_submission(ids, y_pred, filename=\"submission.csv\"):\n",
    "    pd.DataFrame({\"instance_id\": ids, \"click\": y_pred.flatten()}).to_csv(\n",
    "        os.path.join(config.SUB_DIR, filename), index=False, float_format=\"%.5f\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练NFM模型\n",
      "[ 553   98  190 ... 2065 2053 1907]\n",
      "[17 11 16 21  6  0 18 22 19  5 12 14  8 13 10  3 20  4  1  7  9 24 23 15]\n",
      "[25 32  0 20  6 14 11 12  8 10 19 30 13 18 33 27  9 26 16 17 23 28  3  7\n",
      " 15  4 22 29  5 34 31 24  2 21]\n",
      "[ 8  9  1  4 12  6 20 10 16  3 22 11 14  2 21  5 18  0 19  7 13 17 15]\n",
      "[233  17 255 157   5 162 138 117  67 131   7 240  28  46 427 122  14 197\n",
      " 158 305 103 165 151 208  29  45   4  68 262 166 153 234 124 121  30 227\n",
      " 426 421  91 407  12 155 111 133 125  95  47 132  66  76 126 436 437 107\n",
      "  65 242  42  73  23  96 120  22 221 239 429   8   6  88 139  31 236  52\n",
      "   3 163 209  54  74 241   0 251   2 285 146  59 154 167 250 164  79 195\n",
      " 318 142 152  86 246 266 137 264 196 245  75  15 235 418 150 269 410 311\n",
      " 223 237  18 413  84 114 271 123 253 189 263  93 175 433 260   9  56 261\n",
      " 259 160 247 193  38 109 243 102 287  19  77 187 130  11  63 423 301 135\n",
      "  39  21  80 159  25 129 118 113 330 293 136  44 238  72  13  55  41 145\n",
      "  82 415 416 291  24  43 289 156 304 254 336  99 203  26 439 419 302  81\n",
      " 220 307  87 380 290 350 194 300  27 270 108  62  60 213 403 168  90  97\n",
      " 268 148 229 314 267 115 315  78 322 323 412 317  16 232 144 297 288 182\n",
      " 112 252 179 428 104 332 244 277  69 347 349 381  58   1 265 217 299  85\n",
      " 424 425 312 303 212 116  98 258 357  51 328 420 438  71 214 230 143 172\n",
      " 225 248  49 392 368 119 391 298 191  61 364 316 161 409 348 331 325  53\n",
      " 256  89  35 353 180 377 140  92 226 206 249 373 127 401  37 327  64 308\n",
      " 411 386 374 339 275 351 370  20 176  57 273 192 384 417 326 398 292  70\n",
      " 274 334 396 224 294 363 422 110 105 346 406 362 211 393 198 367 360 204\n",
      " 397 319 333 184  36 101 404  40 435 335 382 313  83 320 355 100 372 222\n",
      " 394 344 171 207  48 338 178 174 219 390 106 329 341 177  34 359  94 310\n",
      " 366 272 365 342 295 405 190 356 228 400 430 278 399 216 376 309 378 183\n",
      " 354 276 257 352 284 205 286 395 337 343 128 379 173 388 383 385 170 324\n",
      " 306 387 202 231 201  10 181 375 186 210 282 408 147 321 185 279 280 296\n",
      "  50 402 188 432 369 169 141 340 345 281 283 371 215 358 414 218 200 361\n",
      " 199 134 149 431  32 434 389]\n",
      "[ 1  0 38 15 31 48 12 40 20 52 24 51 41 23 13 33 49 39 32 11  3 18 17 19\n",
      " 29  7 47 16  2 30  5 22  9 46 42 34 56 55 44 37 21 10 36 58 43 50 45  4\n",
      " 26 59 57 53 25  8 28  6 27 35 14 60 62 63 64 54]\n",
      "[1 3 0 2]\n",
      "[ 78 234 108  86  82 200  73   2 276 164 243 198  43 193  11 281 169 213\n",
      " 182 143 206 199 210 123 233  80 129 320   8  75 298 119  74 216   1 256\n",
      "   3 148  38 197 241 178 104 259 188  77   7  83 122 308 165  60   4 136\n",
      "  48  65 254  25 313 139 238  29 105 286  96  84 244  49  22 235  20  88\n",
      " 242 142 293  14 147 220 268 115  99 297   0 102 159 208  57 161  15  90\n",
      " 150 204  12 247  21 146 183 285  31 251  76 209  18 222  23 207  19 179\n",
      "   9 118 130  37  87 239  62 226 312 249 152 228 215 205  92  51  24 295\n",
      " 202  55  97 184 154 138 248 168 180  85  98 194 329  67 149 124  16 106\n",
      " 151 245 153 132 214 170 167 319 144 227   6 317  44 109  91 273 172 232\n",
      " 224 156 158 263  17 173  53 275 289  63 291 218 117 294 127 265 137  89\n",
      "  71  39 141 231 212 174 163 166 135 116 299 201 236 155  28  95 114  79\n",
      " 300  61 145 330  50 133 223 177 113  13  41 284 111 296  10 252 140 112\n",
      " 211 255 181 264 219 128 267 203 175 160 189 195 283 246 230 287 258  56\n",
      " 305 191 171  93 260 217 126 120 157 321 266 225 282 326 162 323  52 192\n",
      "  81  64 221 274 278  47  42 253 190  59 311 240  46 314 131 110   5  66\n",
      "  35 290  30 107 288 125 103 229 101 185 176  33 271 196 301 250 328 187\n",
      " 304  45 186  40  32 121 279  94 277 324 262 303  26 272 315 322 309 257\n",
      " 302  27 100 237 327  70 261  54  58 292 269 318 316  68  69 270 331 325\n",
      "  36 306  72  34 134 332 310 307 280]\n",
      "[0 1]\n",
      "[ 720  640  480  300  320  600 1200  360  150  728  960  800 1080]\n",
      "[209  20 174 570   5 274 604 154 426 675 144 203 657 673  12 420 566  72\n",
      " 357 400 582 349 208 422 656 411 412 688 506 185 266 645 251 268  25 685\n",
      "  94 674 503 338 430 339   1 191 226 598 408 462 421 332 315 172 432 701\n",
      " 573 188 498 336  13 392 571 587 107 584 451 416 394 450 399 542 329 305\n",
      " 333 108 140 525 344 487 248  30 446 265  93 276 390 447   0 440 109 162\n",
      " 655 404 145 216 100 478 273 441 187 130 433 165 569 468 427 536 520 499\n",
      " 111 486 580 398 470 531 356 246 585 625 113 395   8 243 382 215 280 502\n",
      "   3 591 518 417 484 622 307 355 163 351 166  85 463 158 504 471 114 384\n",
      " 454 287 335 281 479  19 510 730 770 269 388 732 725 763 225 762 418 448\n",
      " 406  32 227 458 608 419 480 397  48 438 402 460 494 492 530 396 393 436\n",
      " 105 428 112 639 442 561 401 424 614 449 169 555 391 443 403 410 415  55\n",
      " 489 516 431 409  71 407 482 509 547 528 592 493 554 626 785 715 181 511\n",
      " 572 288 206 106 319 638 337 634 297 170 340 773  18 383 717 381 224 232\n",
      " 698 702  83 155 255 267 712 127 147 218 389 272 289 676 687 317 631 644\n",
      " 664 684 160 290 679  97 250   9 686 653 202 291 186 514 746 764 435 754\n",
      " 588 758 752 423 736 233 742 354 760 726 718 596 720 497 740 524 750 508\n",
      " 414 745 757 469 744 738  28 244 586 481 734 755 437 731 748 575 425 490\n",
      " 464 439 568 728 546 474 590 593 759 607 595 488 505 538 765 766 527 519\n",
      " 465 534 405 529 526 727 461 434 756 578 500 452 286 550 771 635 722 562\n",
      " 633 275  81  89 346 229 314 308 119 296  23 615 278 610 791 149 789 252\n",
      "  86 270 298 316 264 141 665 681 682 636 646 660 159 648 641 136 703 782\n",
      " 121 234 126 223 630 175 161 104 683 197 367 271 157 312 201 659 211 221\n",
      " 700 695  99 228 103 309 377 558 556 654 219 128 116 376 375 235 220 249\n",
      "  29 620 279 652 196 661  10 672 135 306 621 171 385 148 552 739 713 768\n",
      " 551 600 577 176 380 311 204 365 680  24 820 827 212 110 629 167 245 331\n",
      " 780 819 194 733 299 366  70  84 707  15 602 146  14 706 102 788 775 616\n",
      "  65  77 350 347 284 348 455 472 413 576 557 515 457 544 532 721 603 548\n",
      " 475 729 485 699 594 560 459 453 606 379 690 689 371 239 205 372 327 574\n",
      " 537 691 476 295 217 809  11 813 810   4 540 258 173  78  82  96 302 254\n",
      " 387 139 256 668 697 623 120 781 323 328  26 374 512 456 565 618 261 326\n",
      " 784 324 777 666 628 151   7 303 667 650 658 670 776 599 617 285 466 495\n",
      " 522 533 553 543 696 214 772  47 124 322 236 796  43 662 496 564 609 501\n",
      " 345 118 294 293 793 711 632 647 663 521 473 753 429 769 597 767 716 761\n",
      " 794 222 651 277 467  73 241   6 611 184 129 825 137 117 477 352  35 539\n",
      " 483 751 179 649 724 737 313 830 792  67 774 831 790 822 800 125 210 373\n",
      " 694 292 513 153 709 583 541 491 198 640 242  64 370 123 444 579 719 743\n",
      " 741 747 358 183 362 710 642 812 723 132 300 134 138 164  75 678 304 507\n",
      " 559 301  39 192  42 318 260 823  49 535 200 445 199 101 178 195  60  87\n",
      " 605  74 517  33 341 230 613 364 330 705 677 360 811 240  98 786  53 795\n",
      "  88 612  56 523 133 816  16 368 150 259 142  40  51  76 386 601  36 826\n",
      " 797 821 828 693  68 637 749 213 321  17 567 671 180 177  66 643  63  21\n",
      " 189 369  61 238 320 263 708 798 325 735 359 669 779  37 231 563 589 310\n",
      "   2 143  59 549 361 545 342 282  45 817 353 115 343 152 182 818 802 581\n",
      " 193 122 704 190 168  62 692  38 778  58 247  91  80  95 624  22  50  44\n",
      "  27 627 262 257  90 787 156  41  46 824  52  92 801 283  34 619 378  79\n",
      " 253 237 363 131 851 856 845 859 858 852 841 857  54 835 808 837 862 854\n",
      "  31 850 836 815 832 803 847 853 861 207 799 840 855 843 848 860 783 838\n",
      " 714  69 846 834 842 866  57 864 863 865 839 849 806 833 844 805]\n",
      "[0 1]\n",
      "[1 0]\n",
      "[13  1 31  6 12 15 25 36 29 33 18  5 11 23 20 38 27 26 10 17 32 40 24  0\n",
      " 21  4 39  3 14 22  8 28 35 37 16 30 19  9 34  2]\n",
      "[ 8  3  5  2 10]\n",
      "[1280  960  320  300  160  100   50  240  500  800  270  150   90  480\n",
      "  568  640  200  360 1200 1920]\n",
      "[2 3 1 0]\n",
      "[ 0 22 72 14  2 39 55 42 24 65 48  3  6  1 23 67 26 62 31 47 44 41 19 64\n",
      "  9 46 17 28 45 34 30 58 27 56 77 73 50 11 33  8 21 32 12 53 20 10  7 75\n",
      " 51 52 66 13 38 54  5 71 57  4 29 16 61 60 59 68 49 15 76 36 37 43 35 25\n",
      " 63 74]\n",
      "[ 623 1091  851 ...  287  186  862]\n",
      "[1 3 4 0 5 2]\n",
      "[302  77 214 340  14 120 628 631 448 701 254  67 690 699 335 252 441 590\n",
      "  38 354 421 606 369 223  76 443 688 430 433 717 530 673 176 685 250 140\n",
      " 373 712  98 527 356 453 364   2 227 622 301 734 429 257 431 487 442 345\n",
      " 308 328 342 455 469 642 597 186  56 524 269 192 303  99 411 493 610  93\n",
      "  36 608 474 438 417 471 420 569 341 454  51 327 219 161 352 286  54 134\n",
      " 167 343 178 551 224 218 514 278  62 473  44 268  88 132 412 357  92 424\n",
      " 273 403 464  74  68  55 594 183 522 347 671 425 272 184 247 679 503  43\n",
      "   0 113 206 230 593 491 598 449  48 561 546 675 736 511 605 497 554 397\n",
      " 277 639 387 106 457   8 138 645 385 437 292 562 528 202 614 146 543 510\n",
      " 426 682 332 390 162 346  50 731  85 371 392 733 276 531 409 187 413 329\n",
      " 216 477 242 305 643 366 389 299 508  17 537 194 773 813 535 419 148 405\n",
      " 775 492 116 767 805 462 407 439 145 470 428  20 432 244 174 483 632 119\n",
      " 114 365 490 361 504 416  26 463 440 423 406 484  16 523 544 518 418 459\n",
      " 128 472  19 318 209 451 558  72 566 325 655 465 585 446 509 410 496 378\n",
      " 579 532 351 467 436  28 422 513 447 460 542 284  75 108 117 427 656 507\n",
      " 533 112 130 571 555 617 857  15 393 748  81 750 157 724 307 135 196 821\n",
      " 372 394 677 820 596 208 306 414  66 336 185 653 395 703 658 320  70 629\n",
      " 711 391 818 359  71 718 728 368 251 644 753 444 198 756 275 258 175 226\n",
      " 735 115  87  41 179 199 197 281 263 355 751 150 177 670 744 103 317 246\n",
      " 131  89 188 674 124 109 141 221 715  91  37 338 684 683 692 358 641 710\n",
      " 667 349 666 374 595 155 287  90 168 291 714 241 205 159 680 353 350 517\n",
      " 445 539 853 789 807 456 798 613 801 795 780 259 785 797 362 803 770 761\n",
      " 621 481 764 774 769 520 783 790 478 804 794 534 787 814 800 806 623 234\n",
      " 482 782  97 839 200 311 611 778 793 791 600 802 784 516 485 592 233 772\n",
      " 505 500 615 591 695 458 618 855 578 396 776 565 450 810 552 779 486 560\n",
      " 603 526 475 549 304 434 575 762 399 660 788 765 766 781 587 376  63 163\n",
      " 648 137 125  40  53 398 139 638 180 243 709 104 415 331 203 843 757 849\n",
      " 282   1 668 129 858 190 845 758 847 154 834 142  69 164 165 700 634 321\n",
      " 337 506 169 696 706 707 681 172  13 686 222 160 859 676 156 310 838  11\n",
      " 808 388 375 210 236 136 363 201 151 296 348 295 229  83 171 293 708  79\n",
      " 650 689 153 253 381  82 143  39 693 334 408 324 166 239 360 647  18 730\n",
      " 248 725 121 126 105 739  65 721 173 582 654 123 580 249 760 742 752 170\n",
      " 754  73 207 874 881 261 868 722 875 133 217 255 279 840  78 877  64 127\n",
      " 212 181 309  84 195 716 191 698 330 799 576 771 763 809 574 461 777 746\n",
      " 811  86 624 678 640 702 100 669 601 290 149 122 182 452 705 904 895 905\n",
      " 894 908 238 883 101 878 892 910 903 909 911 280 232 842 831 745 891 912\n",
      " 880 659 570 713 152 649 322 380  47  52 740 743 627 189   9 110 844 315\n",
      " 829 755 830   7 824  34 144 118 723 300 495 529 581 563 540 498 479 568\n",
      " 557 599 626 572 604 607 625 609 435 619 584 553 812 630 841 914 211 525\n",
      " 288 379 867 256 220 344 339 720 719 502 664 476 314 665  94 657  59 267\n",
      " 869 872 870  49 564 147  32 215 494 651 559 326 404 386  45 323 652 367\n",
      " 635 876 837 620 521 577 538 480 589 573 846 889 825 377 370   6 663 274\n",
      " 586 691 550 823 193 661 662 567 519 489 547 556 732 245 747 111 848 851\n",
      " 888 262 879 819 866  25 588 612 729 861 727  46 796 786 816 316 862 792\n",
      " 738  95 737 602 817 741  57  80  96 501 541 488 633 204 907 897 102 515\n",
      "  21 616 583 228 697 768 833 913  35 890 856 900 865 860 240 726 902 285\n",
      " 536 545 548 231  58 312 270 158 384 107 466 400 759 646 319  60 827 512\n",
      " 704 832   4 298 836  23  24  29 901  27 313 822  42   5 294  61 235 850\n",
      " 271 835 826 884 382 402 871  22 899 896 289 854  33  12 383 266 815 672\n",
      " 264 694 333   3 468 637 297 499 885 401 886 225 213 852  31 687 283 636\n",
      " 882 893 828  10 898 237 260 265 938 935 929 921 936 928 919 926 916 934\n",
      " 917 931 887 924 932 863 927 915 925 922 930 933 920 923 937 918 749  30]\n",
      "[2 1 0]\n",
      "[10 22 12 19  9  2 25 16 23  6 18  3 27 17 15 14 21 31 28 13  1 24 29  8\n",
      "  5 30 11  4  0  7 20 32 26 33 34]\n",
      "[6 8 4 0 5 7 1 3 2]\n",
      "[40 75 57 25 72 58 63 39 47  5 52 79 44 46 36 65 21 51 48  0 38 74 27  6\n",
      " 64 59 77 78 49 53  1 15 60  9 23 55 31 56 22 17 10  3 67 32  7 37 11 76\n",
      " 45  2 43 30 26 41 73 16 18 66 61 70 42 35 24  4 20 33 19 69 29  8 50 28\n",
      " 14 68 34 13 62 12 71 54]\n",
      "[ 88 210 184 179 123 114 126 103 277 256 285 265 273 274 119 187 112 180\n",
      " 127  13 185 181 231 171   0 143 124 110 219   1  82 115 250 162 101 230\n",
      " 166 249 156 216  75 117 131 190  31  57 297 178 111 236  83 247 215  84\n",
      "   8 211 196 197 222 158 267 268  72 142 153 175 276 266 206 259 188  92\n",
      " 261 151 137 212 209  95 248  78  15 125  14  86   7 118  47 243 257  70\n",
      " 132  60 130  59 144 220 138  45 116  49  37 150 264 161 100 235 278  97\n",
      " 186  22 198 192 213 270 275 208 168   9  74  87  26 263 228 253 104 191\n",
      "  35 148 149 223  27 255 269 107 252  24  19  80  42 224  18  20 288 282\n",
      " 204 225 155  43 183  58 159  50 234  51   4 214 134 207  94 194 133  71\n",
      " 229   5 200 128 173  69 218  29  44 177 174  23  68 254  66  16 163 120\n",
      "  93 245 182 102 294 201  96  25 280  46 281 121 251  64 172 246 260 160\n",
      " 164 293  65  21 189 295 157 296 239 199  41 258  53  73  36  76  56 226\n",
      " 154  63 167  85 146  81 203 122  48 241 140  67 292  11 217 129 193  33\n",
      " 105 233 147  32  17  99  77  55 289 287 291 139  61 141 271 176 106   3\n",
      "  91 279 145 286 227 284  28   2  10  90  12 237 195  34 205  30 283 290\n",
      " 113 135  89 242 108 202 244  52 272  40 262 232 165 169  62  79  54 136\n",
      " 221  38   6 152 240  98  39 238 109 170]\n",
      "[ 89  81  91  83 126  95 142  67  92  77 186 140 117 134 187  75 113 166\n",
      " 174  82 111  97  62  88  73 190 132  76 136 125  65 129  57 185  56 130\n",
      " 115  72   0 143 189 102 182  51 180  71  20 179 109 121 124 133 184  32\n",
      "  26 165  90 120 123 127  23  94 116 163 106 170  35 139 168 167 105 122\n",
      " 131  70 108 107  55  46   1 154 172  50  21 176  47 188  29  58 114  96\n",
      " 104  53 101 159 158  66 152  85  16 175   7  84  39 161 157 191  19 156\n",
      "  24 178 162 177  86 155 103 160 151  28  60  48  18  33  78  68  45  11\n",
      " 153 138  10  25 128  61 148  12 146  13 118  42  64  98 149  15  37   9\n",
      "  31   2  17 150  34  30 112  52 119  22  74  63  14 100 135   6   3  44\n",
      "  27 183  59 173  99 137  54 110  69  41 181   5  36   4 141  43  49  87\n",
      " 169 164 171  93  40   8 144 147  38  79 145]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "adid                       int64\n",
      "advert_industry_inner      int64\n",
      "advert_name                int64\n",
      "app_cate_id                int64\n",
      "app_id                     int64\n",
      "campaign_id                int64\n",
      "carrier                    int64\n",
      "city                       int64\n",
      "creative_has_deeplink      int64\n",
      "creative_height            int64\n",
      "creative_id                int64\n",
      "creative_is_download       int64\n",
      "creative_is_jump           int64\n",
      "creative_tp_dnf            int64\n",
      "creative_type              int64\n",
      "creative_width             int64\n",
      "devtype                    int64\n",
      "f_channel                  int64\n",
      "inner_slot_id              int64\n",
      "nnt                        int64\n",
      "orderid                    int64\n",
      "os                         int64\n",
      "province                   int64\n",
      "sim_ip                     int64\n",
      "clear_make                 int64\n",
      "clear_model                int64\n",
      "clear_osv                  int64\n",
      "instance_id                int64\n",
      "click                    float64\n",
      "dtype: object\n",
      "WARNING:tensorflow:From F:\\jupyter notebook\\推荐系统项目\\NFM\\NFM.py:103: calling reduce_sum (from tensorflow.python.ops.math_ops) with keep_dims is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "keep_dims is deprecated, use keepdims instead\n",
      "#params: 447265\n",
      "[1] train-result=0.4172, valid-result=0.4299 [52.0 s]\n",
      "[2] train-result=0.4162, valid-result=0.4293 [51.8 s]\n",
      "[3] train-result=0.4157, valid-result=0.4298 [50.9 s]\n",
      "[4] train-result=0.4154, valid-result=0.4298 [50.2 s]\n",
      "[5] train-result=0.4152, valid-result=0.4301 [50.8 s]\n",
      "[6] train-result=0.4150, valid-result=0.4302 [50.5 s]\n",
      "验证集loss为: 0.4302\n"
     ]
    }
   ],
   "source": [
    "print(\"训练NFM模型\")\n",
    "# # ------------------ NFM Model ------------------\n",
    "y_test_nfm = run_base_model_nfm(dfTrain, dfTest, nfm_params)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 测试集loss为： 0.4265\n"
     ]
    }
   ],
   "source": [
    "### 评判测试集的logloss\n",
    "print(' 测试集loss为： %.4f' %Logloss(dfTest['click'].values, y_test_nfm[:,0]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
