{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "%matplotlib inline\n",
    "import time\n",
    "import random"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(15000, 160)"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train = pd.read_csv('../data/train_xy.csv')\n",
    "train.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(10000, 159)"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test = pd.read_csv('../data/train_x.csv')\n",
    "test.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(15000, 157)"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_train = train.drop(['cust_id','cust_group','y'],axis=1)\n",
    "x_train.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(10000, 157)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_test = test.drop(['cust_id','cust_group'],axis=1)\n",
    "x_test.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# iv_feature = ['x_3', 'x_4', 'x_5', 'x_6', 'x_7', 'x_8', 'x_9', 'x_10', 'x_11', 'x_12', 'x_13', 'x_14', 'x_15', 'x_16', 'x_17', 'x_18', 'x_19', 'x_20', 'x_21', 'x_22', 'x_23', 'x_24', 'x_25', 'x_26', 'x_27', 'x_28', 'x_29', 'x_30', 'x_31', 'x_32', 'x_33', 'x_34', 'x_35', 'x_36', 'x_37', 'x_38', 'x_39', 'x_40', 'x_41', 'x_42', 'x_43', 'x_44', 'x_45', 'x_46', 'x_47', 'x_48', 'x_50', 'x_51', 'x_52', 'x_53', 'x_54', 'x_55', 'x_56', 'x_57', 'x_58', 'x_59', 'x_60', 'x_61', 'x_62', 'x_63', 'x_64', 'x_65', 'x_66', 'x_67', 'x_68', 'x_69', 'x_70', 'x_71', 'x_72', 'x_73', 'x_74', 'x_75', 'x_76', 'x_77', 'x_78', 'x_79', 'x_80', 'x_81', 'x_82', 'x_83', 'x_84', 'x_85', 'x_86', 'x_87', 'x_88', 'x_90', 'x_97', 'x_98', 'x_99', 'x_100', 'x_101', 'x_139', 'x_140', 'x_141', 'x_142', 'x_143', 'x_144', 'x_149', 'x_150', 'x_153', 'x_154', 'x_155', 'x_157']\n",
    "# x_train = train[iv_feature]\n",
    "# x_train.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# iv_feature = ['x_3', 'x_4', 'x_5', 'x_6', 'x_7', 'x_8', 'x_9', 'x_10', 'x_11', 'x_12', 'x_13', 'x_14', 'x_15', 'x_16', 'x_17', 'x_18', 'x_19', 'x_20', 'x_21', 'x_22', 'x_23', 'x_24', 'x_25', 'x_26', 'x_27', 'x_28', 'x_29', 'x_30', 'x_31', 'x_32', 'x_33', 'x_34', 'x_35', 'x_36', 'x_37', 'x_38', 'x_39', 'x_40', 'x_41', 'x_42', 'x_43', 'x_44', 'x_45', 'x_46', 'x_47', 'x_48', 'x_50', 'x_51', 'x_52', 'x_53', 'x_54', 'x_55', 'x_56', 'x_57', 'x_58', 'x_59', 'x_60', 'x_61', 'x_62', 'x_63', 'x_64', 'x_65', 'x_66', 'x_67', 'x_68', 'x_69', 'x_70', 'x_71', 'x_72', 'x_73', 'x_74', 'x_75', 'x_76', 'x_77', 'x_78', 'x_79', 'x_80', 'x_81', 'x_82', 'x_83', 'x_84', 'x_85', 'x_86', 'x_87', 'x_88', 'x_90', 'x_97', 'x_98', 'x_99', 'x_100', 'x_101', 'x_139', 'x_140', 'x_141', 'x_142', 'x_143', 'x_144', 'x_149', 'x_150', 'x_153', 'x_154', 'x_155', 'x_157']\n",
    "# x_test = test[iv_feature]\n",
    "# x_test.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<matplotlib.axes._subplots.AxesSubplot at 0x21034f3f5c0>"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPkAAADuCAYAAAD7nKGzAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvqOYd8AAAGE1JREFUeJzt3XmYFNW9xvHv6Z5hZlhlXwQtFAUV\nRRRjFBGuihgKlxi9KipkMYlGk3hjTOpeE4PJVSsxV2NyTW6iYlRc467lIxrAHRWXqGDYgqUi6rDO\nMDPM0t11/6hGWQamZ6a7T9Xp3+d55hkemO5+h2feOaeqT51SQRAghDBXQncAIURhScmFMJyUXAjD\nScmFMJyUXAjDScmFMJyUXAjDScmFMJyUXAjDScmFMJyUXAjDScmFMJyUXAjDScmFMJyUXAjDScmF\nMJyUXAjDScmFMJyUXAjDScmFMJyUXAjDScmFMJyUXAjDScmFMJyUXAjDScmFMJyUXAjDScmFMJyU\nXAjDScmFMFyZ7gCiOCzH6wkMA/oA5UCXHT5v/XMaqM1+1AAbgWrftWs1xBZ5oOT+5GawHK8LcAhw\nAGGZ98p+bP1zz06+RCNQDXwE/BNYsvXDd+01nXxuUUBS8hiyHE8B+wFfAo7Mfh4DVGiKtImw8O8B\nbwLzfdderimL2IGUPCYsxzsEsIFJwBFAb62B2vYhMA/4O/B337WrNecpWVLyiLIcrxw4HjiVsNzD\n9CbqlABYTFj4R4HnfdeWH7wikZJHiOV4SWAKcCZhuaM+WnfUB8Ac4HbftVfoDmM6KXkEWI43EPg2\n8F1gqOY4xfYKcAdwr+/aG3WHMZGUXCPL8cYDFwNfI3z7qpQ1AY8Dv/dd+wXdYUwiJS8yy/G6AucS\nlnuM5jhR9QpwHfCI79oZ3WHiTkpeJNn3sS8ErgAGaI4TF8uAq4G7fddO6w4TV1LyArMcLwGcB1wF\nWHrTxNZy4L+RsneIlLyALMebBlwDHKw7iyHeAS72XftF3UHiREpeAJbjHQX8BjhGdxZD3Qlc7rv2\nZ7qDxIGUPI8sx+sOuMD3AKU5julqgCuBm2QKv3tS8jyxHO944BbkuLvY3iacwr+kO0hUSck7KXsJ\n53XAd3RnKWEB8EfgMt+1m3SHiRopeSdYjjcFuJl4rys3ydvAWb5rL9MdJEqk5B1gOV4FcCPhMlQR\nLfWE0/fbdQeJCil5O1mOtzfwADBOdxaxW3cC3/Ndu053EN2k5O1gOd6JwN1AX91ZRE5WEE7f39Id\nRCfZyDFHluP9CHgSKXic7Ae8bDneabqD6CQjeRuya87/BHxTdxbRYRngEt+1/6Q7iA5S8t3Ivj32\nGDBRdxaRF9f6rv1fukMUm5R8FyzH2wOYS7hJojDH7cAFvmundAcpFil5KyzH6ws8A4zVnUUUxFzg\njFI58y4l34HleAMINxyUK8fM9jow2XftTbqDFJqcXd+G5XhDgOeQgpeCccBTluP10B2k0KTkWZbj\nDSMs+CjdWUTRHAk8YTlele4ghSTTdcByvD7AQmB/3VmEFnOBk33XbtEdpBBKfiTPvg/+EFLwUjYF\nuC17+ynjlHzJgb8g74OLcAfd63SHKISSLrnleFcAM3XnEJFxmeV4l+gOkW8le0xuOd5ZwD3INk1i\ney3ARN+1F+oOki8lWfLsRovzgUrdWUQkrQYO8117re4g+VBy0/XsYpeHkYKLXRsK3JXdMz/2jPgm\n2mk2MFB3CBF5k4Ff6A6RDyU1Xbcc7yLCDf+EyEUATPVd+yndQTqjZEpuOd5I4E2gq+4sIlbWEx6f\nf6g7SEeVxHTdcrxy4C6k4KL9+hKupYitkig54c0GD9cdQsTWFMvxztEdoqOMn65bjncM4YUnpfIL\nTRRGNTDKd+2NuoO0l9E/+Nlp+s0Y/n2KohhATJe9mv7Dfyly6ajIn29ajnes7hDtZex0PbsBxFLA\n+E0BRFEtBcb4rt2sO0iuTB7JXaTgIv9GAY7uEO1h5EhuOd5hhHt4ycUnohDqgOG+a6/THSQXpo7k\n1yEFF4XTHbhcd4hcGTeSW443FfB05xDGqwf28V27WneQtpg4kl+lO4AoCd2An+gOkQujRnLL8SYC\nz+rOIUrGFsLR/FPdQXbHtJH8x7oDiJJSRQzOtBszkluONwp4DznhJoqrEdjXd+01uoPsikkj+WVI\nwUXxVQKR3vzRiJHccryBwAdAhe4soiRVA8OiugrOlJH8EqTgQp8BwOm6Q+xK7EuevQPKRbpziJIX\n2Z/B2Jec8BY3fXWHECXvWMvxRugO0RoTSn627gBCZH1dd4DWxPrEW/aWs9WEa4mF0O0jwPJdO6M7\nyLbiPpJPQwouomMYMEl3iB3FveSx3VxPGOsU3QF2FNuSW47XE/iK7hxC7GCq7gA7im3JgdOQ+5mJ\n6NnPcrx9dYfYVpxLfrLuAELsQqRG8ziXPHa7ZoqSEamSx/IttOwVZ//UnUOIXWgE+viuvUV3EIjv\nSD5RdwAhdqMSOE53iK3iWnKZqouom6w7wFZSciEKIzI32IzdMbnleMOBVbpzCNGGzUAv37W1F0zr\nSK6UOkkptUwptVIpleteWXI8LuKgBxCJ98u1lVwplQRuIly1diBwjlLqwBweGplpkBBtOFR3ANA7\nkn8JWBkEwaogCJqBe4FTc3hcLr8IhIiCsboDgN6S70l4ad5Wq7N/1xYpuYiLkh/JW9tZdbcnKSzH\n2wMYVJg4QuRdyY/kqwmvv91qKNDW3tX7FS6OEHk32HK8PrpDlGl87UXAfkqp4cDHhNs4TW/jMcPz\n8cK1rz9K3dtzIYDuY6bQ84hT2fTiXdS9PZdE114A9D52BlX7HrHd44JUM5/e/VOCVAtkMnQdOZ49\nJpwLwJYP3mbTgtkE6Ra6DBpB36/8EJVIUr/sJWpeuItEVXf6n/4zklU9adn4CZuev4P+p/40H9+O\niLYhwAadAbSVPAiClFLqEmAukARmB0GwpI2H7dPZ121e61P39lwGzbgelSyn+v4rqdp3HAA9xp1G\nryN3s7NuspyBZ19DoksVQTrFp3f9hKp9DqfLkP1Z793AwLOvprzPnmx6YQ51786jx5gT2fzawww6\n/7fU//N56t97jp6Hn8ymF+5kjwnndfZbEfEwCFisM4DW98mDIHgyCIL9gyDYNwiCq3N4SKdH8pb1\nq6kYMopEeSUqkaRi2GgaVizM6bFKKRJdqgAIMinIpEEpMls2o5LllPcJzxtWWofSsPyl7IMSBOkW\nglQTKpGk8aPFJLv1/vxrhfG0n0OK27LWTv+Hdem3N40fLSa9pZZMSyNbVr1OunYdAJvffII1sy9h\n3ZO/I91Y1+rjg0yaNbd9n9V/OI9K61AqhowkUdWTIJOi6ZMVADQse+nz5+w1/hyq77+SRv8fdDtw\nIjUv30ev8bJrVQnRXnKdx+Qd0aOzT1Debxg9jzyD6vt+jiqvpMuA4ZBI0mPsVHodfTYoxaYX5rBx\n/i30m3rpTo9XiSRDvvEHMo11VD98Nc1rfbr0t+h/yk/YOP9mgnQLldZhkEgCUDV8LFXDw5Osde/O\no2rfcaTWr2bDaw+RqOxO7xO+Q6JcNrgxmPaSx20k73TJAXqMOZHBX7+RQef+mkRlD8p7DyHZrTcq\nkUSpBD3GTKH5k+W7fY5EZXcqhx3MllVvAlCx5wEMOvc3DJ5xA5XDDqK895Dtvj7T0kjd4nn0GGuz\n8fnb6Tv1UroMGkH9kmfz8S2J6JKSt1PPfDxJun4TAKnaahqWL6TrgRNJ1X1xArRh+ULK++298+Ma\nashkp/GZliYaP/gH5X2HbvecQaqF2lcfoPvY7feYrH31QXqOOwWVLCNoyd4XTyUIUk35+JZEdGkv\neclN1wHWPnINmS2bIZGkz+QLSVZ2Z90T/0PzZ6tAKcp6DaDPlPButKnN61n/1O8ZeOZVpOs2sM67\nAYIMBBm6jppA1xFfAqD2tYdoWPkaENDj0KlU7T3m89dLbV5P86cr2eOY8O22nl/6Kp/e+WMSld3o\nf/rP8vEtiegaoDtArC41tRxvM3IzBREvy3zXHqUzQGym65bjKaCb7hxCtJP22XKbJVdKXaKU6l2M\nMG3oTuvr3YWIMu0lzyXAIGCRUupNYDYwN9Azxy/X8Jol4yD1/sppyVfWlJPSHcUoKZKbwNaaIadj\ncqWUAk4EvgGMA+4Hbg2C4F+FjfcFy/EqgUhscWuqntTVnJl8/r2zkgsyI9THByYUUZjBxd2HzKrZ\n+a2aIsr5xJtSagxhyU8CFgBfBp4JguAnhYu3Pcvx0sToPEKcJcikJybeXjIj+fTGoxLvDa1ULZHY\nyiiGfGbV5OXCqo5qc7qulPoBMBNYB9wCXB4EQYtSKgGsAIpWcqABObteFBkSyQWZsYcsyISr9Yar\nNR/OSD7jT0u+0q0fNQcrRRfNEeNC+/FPLsfk/YDTgyD4YNu/DIIgo5SaVphYu1SPlFyL94Mhe12V\nmrnXVamZdGPL5tOTL755dnJ+6gD14ciECvoX6nXTmYBxN9ezZ48ET0zvutO/37+khVnPNqEUjBmY\n4O6vhV9z0px6Xlmd5pi9yrZ73LkPNfDuZxmm7V/GNceHy4l/9VwThwxMcOqogpz2qS3Ek7ZHmyUP\nguDK3fxbsW9VVF/k1xOtqKeqx53pyV++Mz0ZCIKjE0uWzEzOXTch8e7grqp5/3y+1o2vNnNAvwS1\nrSwMXLE+zbUvNvHSN7vRu0pRXZ/5/N8uP7qChpaAP7/R8vnfvfNZOvx8UXcm3FZPTWNAQ0vAa2vS\n/HxiRT5jb2t9oZ44V9pP77eTlDxylHo5M/qglzOjARiq1q45L/nMylOTL1UNYuPBSnX89tKrazN4\nK1JcMaGC6xc27/TvN7/ZwsVHdKF3VfjO6oBuX5yuOX6fMp71t58plydgSwtkgoDmdEAyAVcuaOKX\nkwpWcJCSt5uUPOJWB/2HuKnpQ9zUdKpoapiWXLjo3OS8xoPVqv2SKmjXOu5Ln2rkNydUsrm59ZPD\ny9eHI/f42fWkMzBrUgUnjdj1j/QB/ZPs1SvBYX+u5/xDylm5IUMAjB2cbE+s9tK6KwzEr+SbdAcQ\nudtCRde/pScd8bf0JAAOV8uWzix7+rPjEm/170bjAUrtenHTE8tbGNBNcfiQ5E4j8lapDKzYkOHZ\nmV1ZXRsw4bZ6Fn+vO3tU7nrN1O9O+mJicfI9Dfx5WiVXP9/E25+lmbxPGd8+PO/nE2Ukbye5PVKM\nvRGMHPVGy8hRAAPZUD29bN7y0xMvlg9Va0crtf2S5Zc+TPPYshRPrthMYwpqmwLOe2gLc06v+vxr\nhvZUfHlokvKkYnhvxch+CVasz3DEnm2PzI8ubWHc4CT1zQGL16a5/8yuHHtbPeceUk7X8rwurNQ+\nksftPeeiLb4RhfUZfQbckDrzmAnNNx45sun2sh80X/zGoszI51NBYjXAtSdUsvpHPfAv7cG9Z1Rx\n3PCy7QoOcNqochb44cm0dQ0Zlq/PsE/vtgvakg648dVmLh/fhYaWL9ZKZwJoTuf3+6TtHYgBUErN\nVkpVK6Xyvh9c3EZyKbmBmimveCwz/vDHmscDcLBateLrZXPXnJB4o09PGg5im8HoygWNjBuS5JSR\n5UzZN8nT/0px4E11JBNw3eRK+nYNv3TCbfUsXZehrjlg6PWbufWUKqZkj9dvWtTMzDHhiH3IwAQB\ncPCf6pg6omy3U/0OynX2+Vfgf4E78h0gbpeajgbe1Z1DFE8fatafnXx26ZnJ5xKW+vQgpfKzcUgR\n9WVWTU5TdqWUBTwRBMHofAaIW8m7ImfYS1YZqZbjE28tmZF8uuaIxFKri0prXROegw3Mqumb6xdL\nybMsx1sDDNadQ+g3Un34/ozk0x9+JbmoV282j1YqcoefLzOrZnyuX1yokkftPyUX/0JKLoBlwV7D\nr0hdMPyK1AXbXkEXjFAfHxCRK+iKvSK0VXEs+TLgGN0hRLTU0r3XrempR92anrr1Crp3InAFXSTO\nH8Wx5IuAb+kOIaJrN1fQde9HzegiXkH3Sq5fqJS6B5gE9FNKrQZ+EQTBrfkIEcdj8jHAP3TnEPHU\njS11X02+uPicwl9B1wT0ZFbNzovuiyyOJU8CNcimjqLTguCoxHvvfT05d+2ExDtD8nwF3UJm1Ryd\nx+frsNiVHMByvPnAv+nOIcyyJ2s/Ob/smRX5uIIOuJ5ZNZflLVwnxPGYHOA5pOQizz6m/2A3NX1w\nPq6gox3H44UW15I/qzuAMFtnrqDLeqngIXMU1+l6BeFlp3I7UFF0bV1BB7zDrJoxrT5Yg1iWHMBy\nvLmE20QLoU0XWpqmJBa9O6PsmYaxasU+ZSozFPg1s2oc3dm2iut0HeABpORCs2bKKx7PHD3u8ebw\nRPpotWrlyclXHv2u5lzbitv15Nt6mAhsdyvEthYH++xxbWr6a7pzbCu2Jfddex3hTR6EiJLHfNfO\n/9YTnRDbkmfdrzuAEDt4WHeAHcW95A8hU3YRHRuBv+sOsaNYl9x37Q3APN05hMi6w3ftRt0hdhTr\nkmfJlF1ExZ91B2iNCSV/ANisO4QoeS/4rh2JTSJ2FPuS+65dC+TlulshOiGSozgYUPKsG4FIvW0h\nSsp6whllJBlRct+1feAR3TlEyfqr79qt3Hc1Gowoedb1ugOIkhQAf9EdYneMKbnv2i8Dr+rOIUrO\nQ75rL9cdYneMKXnWDboDiJKSAa7UHaItppX8QeB93SFEybjbd+33dIdoi1El9107Bfyn7hyiJKSA\nWbpD5MKokgP4rn0fEdpfSxjrNt+1Y3GXXeNKnvUj3QGE0ZqAX+kOkSsjS+679kJkTbsonL/4rv2R\n7hC5MrLkWT8l/I0rRD6tA36pO0R7GFvy7Cq43+vOIYzzH9ldiWLD2JJnXQ18pjuEMMZTvmvP0R2i\nvYwuue/aNcD3dOcQRqgHLtQdoiOMLjmA79oPAffqziFi7wrftT/QHaIjjC951iVAte4QIrZeBf6g\nO0RHlUTJfddeD3xLdw4RSy3ABb5rZ3QH6aiSKDmA79pPAH/UnUPEjuO79mLdITqjZEqe9WMg8hcU\niMh4xHft2O9TUFIl9117C3AWUKc7i4i8VcA3dIfIh5IqOUB26jWd8FpgIVqzBTjDd+1NuoPkQ8mV\nHMB37ccJl70K0Zpv+a79lu4Q+VKSJQfwXfu3wGzdOUTkXOe79j26Q+RTyZY860LgOd0hRGQ8Bji6\nQ+SbCoJAdwatLMfrS7jYYV/dWYRW8wA7ylsrd1Spj+RbF8pMI7yEUJSmhcCpJhYcpOQA+K69FDgO\nWKs7iyi6t4CpvmvX6w5SKCU/Xd+W5XgHAfOBAbqziKJYChzru7bRv9xlJN+G79pLgEnAp5qjiMLz\ngRNMLzhIyXeSvf3sJOATzVFE4SwHjvNd+2PdQYpBSt4K37WXERa9JH4ISszLwNG+a5fMTTik5LuQ\nvb/VsUAkbywvOuRB4PjsOyolQ0q+G75rrwK+DHi6s4hOuwH4d9+1G3UHKTY5u54Dy/ESwDXIevc4\nyhDusFqyO/dKydvBcrzpwC1Ale4sIiebgZm+az+sO4hOUvJ2shxvHPAIsKfuLGK3XgOmx+V+ZYUk\nx+Tt5Lv268A4YIHuLKJVAfBr4BgpeEhG8g6yHE8B/0F4rF6hOY4IfQKc77v2PN1BokRK3knZpbB3\nAmN1ZylxTwDfiNstjIpBSp4HluOVEZ55/zkyqhdbDeGOqv+nO0hUScnzyHK8UYRn38frzlIi7gIu\n811b7ne3G1LyPMseq59DeLNFS28aYy0BfuC79nzdQeJASl4gluN1AS4Gfgb00RzHFGuBK4GbfddO\n6w4TF1LyArMcrxfhvmE/RBbRdNRmwrvfXJu9U61oByl5kViONxT4JTADSGqOExfVwO+Bm0zZA10H\nKXmRWY63F3AR8G2gr+Y4UeUDvwVmZ+96IzpBSq6J5XiVwLnA94ExmuNExbuEq9Xu8107pTuMKaTk\nEWA53rGEZf8qpTeVXwfcD8zxXXuh7jAmkpJHiOV4gwmLfjowESjTm6hgtgCPA3OAp3zXbtGcx2hS\n8ojK3vThFMLCTyb+K+kagReAe4AHfdeu1ZynZEjJY8ByvB6Anf2YAOytN1FOWoBFhFtczwcWluKu\nLFEgJY8hy/GGEZb9KOAIwhN3lVpDQQPhibPnCUv9ou/ach/4CJCSG8ByvHJgNHAw4VJai3C0t4Bh\nQHkeX24tsAJYmf28mLDc7/uuLfd8jyApueGy+9MNISz8YMJVdxWEI3/lNn+uIDzRVwNsyn5s3OHz\nBhmd40dKLoThZPsnIQwnJRfCcFJyIQwnJRfCcFJyIQwnJRfCcFJyIQwnJRfCcFJyIQwnJRfCcFJy\nIQwnJRfCcFJyIQwnJRfCcFJyIQwnJRfCcFJyIQwnJRfCcFJyIQwnJRfCcFJyIQwnJRfCcFJyIQwn\nJRfCcFJyIQwnJRfCcFJyIQwnJRfCcFJyIQwnJRfCcFJyIQz3/zbtcAuj3nkLAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "train['y'].value_counts().plot.pie(autopct = '%1.2f%%')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(25000, 157)"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x = pd.concat([x_train,x_test])\n",
    "x.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "Y_train = train['y']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "for i in range(96,158):\n",
    "    col = 'x'+'_'+str(i)\n",
    "    if col in x.columns.values:\n",
    "        dummies_df = pd.get_dummies(x[col]).rename(columns=lambda x: col + str(x))\n",
    "        x = pd.concat([x, dummies_df], axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(15000, 361)\n",
      "(10000, 361)\n"
     ]
    }
   ],
   "source": [
    "train_X = x[0:15000]\n",
    "test_X = x[15000:25000]\n",
    "print(train_X.shape)\n",
    "print(test_X.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Using TensorFlow backend.\n",
      "C:\\Program Files\\Anaconda3\\lib\\site-packages\\sklearn\\ensemble\\weight_boosting.py:29: DeprecationWarning: numpy.core.umath_tests is an internal NumPy module and should not be imported. It will be removed in a future NumPy release.\n",
      "  from numpy.core.umath_tests import inner1d\n"
     ]
    }
   ],
   "source": [
    "from keras.models import Model #泛型模型\n",
    "from keras.layers import Dense, Input\n",
    "from sklearn.ensemble import IsolationForest\n",
    "from sklearn.model_selection import train_test_split"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def split_data(data_):\n",
    "    idx_1 = data_[data_['label']==0].index\n",
    "    idx_2 = data_[data_['label']==1].index\n",
    "    nb_1 = len(data_.loc[idx_1])\n",
    "    nb_2 = len(data_.loc[idx_2])\n",
    "#     print(nb_1)\n",
    "#     print(nb_2)\n",
    "    idx_list_1 = list(idx_1)\n",
    "    idx_list_2 = list(idx_2)\n",
    "    train_x1 = data_.loc[idx_list_1]\n",
    "    train_x2 = data_.loc[idx_list_2]\n",
    "#     print(train_x1.shape)\n",
    "#     print(train_x2.shape)\n",
    "    return train_x1,train_x2"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def resample_data(data, number):\n",
    "    idx_1 = data.index\n",
    "    nb_1 = len(idx_1)\n",
    "#     print(nb_1)\n",
    "#     number = int(nb_1 * rate)\n",
    "    idx_1_sub = np.random.choice(idx_1, number)\n",
    "#     print(idx_1_sub)\n",
    "    nb_2 = len(data.loc[idx_1_sub])\n",
    "#     print(nb_2)\n",
    "    idx_list_1 = list(idx_1_sub)\n",
    "    train_1 = data.loc[idx_1_sub]\n",
    "#     print(train_1.shape)\n",
    "    return train_1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(12000, 361)\n"
     ]
    }
   ],
   "source": [
    "X_train,X_val,y_train,y_val= train_test_split(train_X,Y_train,test_size=0.2,random_state=2)\n",
    "print(X_train.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Program Files\\Anaconda3\\lib\\site-packages\\ipykernel\\__main__.py:2: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  from ipykernel import kernelapp as app\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(12000, 362)"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xx = X_train\n",
    "xx['label'] = y_train\n",
    "\n",
    "xx.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(11442, 362)\n",
      "(558, 362)\n"
     ]
    }
   ],
   "source": [
    "train_x1, train_x2 = split_data(xx)\n",
    "print(train_x1.shape)\n",
    "print(train_x2.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(10000, 362)\n"
     ]
    }
   ],
   "source": [
    "train_data = resample_data(train_x1, 10000)\n",
    "print(train_data.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(11442, 361)"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_data = train_x1.drop(['label'],axis=1)\n",
    "train_data.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(558, 361)"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_data1 = train_x2.drop(['label'],axis=1)\n",
    "train_data1.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>x_1</th>\n",
       "      <th>x_2</th>\n",
       "      <th>x_3</th>\n",
       "      <th>x_4</th>\n",
       "      <th>x_5</th>\n",
       "      <th>x_6</th>\n",
       "      <th>x_7</th>\n",
       "      <th>x_8</th>\n",
       "      <th>x_9</th>\n",
       "      <th>x_10</th>\n",
       "      <th>...</th>\n",
       "      <th>x_1561</th>\n",
       "      <th>x_1562</th>\n",
       "      <th>x_1563</th>\n",
       "      <th>x_157-99</th>\n",
       "      <th>x_1571</th>\n",
       "      <th>x_1572</th>\n",
       "      <th>x_1573</th>\n",
       "      <th>x_1574</th>\n",
       "      <th>x_15710</th>\n",
       "      <th>x_15711</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>1154</th>\n",
       "      <td>0.291667</td>\n",
       "      <td>0.389913</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>3</td>\n",
       "      <td>2</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7919</th>\n",
       "      <td>0.083333</td>\n",
       "      <td>0.491195</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4854</th>\n",
       "      <td>0.291667</td>\n",
       "      <td>0.361088</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>-99</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5947</th>\n",
       "      <td>0.250000</td>\n",
       "      <td>0.272055</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "      <td>2</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10536</th>\n",
       "      <td>0.458333</td>\n",
       "      <td>0.519782</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 361 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "            x_1       x_2  x_3  x_4  x_5  x_6  x_7  x_8  x_9  x_10   ...     \\\n",
       "1154   0.291667  0.389913    0    0    0    0    3    2    0     0   ...      \n",
       "7919   0.083333  0.491195  -99  -99  -99  -99  -99  -99  -99   -99   ...      \n",
       "4854   0.291667  0.361088  -99  -99  -99  -99  -99  -99  -99   -99   ...      \n",
       "5947   0.250000  0.272055    0    0    0    0    2    2    0     0   ...      \n",
       "10536  0.458333  0.519782    0    0    0    0    1    1    0     0   ...      \n",
       "\n",
       "       x_1561  x_1562  x_1563  x_157-99  x_1571  x_1572  x_1573  x_1574  \\\n",
       "1154        1       0       0         0       0       0       0       1   \n",
       "7919        0       1       0         1       0       0       0       0   \n",
       "4854        0       0       0         1       0       0       0       0   \n",
       "5947        0       1       0         1       0       0       0       0   \n",
       "10536       0       1       0         0       0       1       0       0   \n",
       "\n",
       "       x_15710  x_15711  \n",
       "1154         0        0  \n",
       "7919         0        0  \n",
       "4854         0        0  \n",
       "5947         0        0  \n",
       "10536        0        0  \n",
       "\n",
       "[5 rows x 361 columns]"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_data.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/200\n",
      "558/558 [==============================] - 1s 920us/step - loss: 1499.6892\n",
      "Epoch 2/200\n",
      "558/558 [==============================] - 0s 235us/step - loss: 1490.3059\n",
      "Epoch 3/200\n",
      "558/558 [==============================] - 0s 267us/step - loss: 1487.4792\n",
      "Epoch 4/200\n",
      "558/558 [==============================] - 0s 247us/step - loss: 1486.0535\n",
      "Epoch 5/200\n",
      "558/558 [==============================] - 0s 267us/step - loss: 1486.0507\n",
      "Epoch 6/200\n",
      "558/558 [==============================] - 0s 269us/step - loss: 1485.9172\n",
      "Epoch 7/200\n",
      "558/558 [==============================] - 0s 260us/step - loss: 1485.6946\n",
      "Epoch 8/200\n",
      "558/558 [==============================] - 0s 265us/step - loss: 1485.6937\n",
      "Epoch 9/200\n",
      "558/558 [==============================] - 0s 265us/step - loss: 1485.6878\n",
      "Epoch 10/200\n",
      "558/558 [==============================] - 0s 262us/step - loss: 1485.6806\n",
      "Epoch 11/200\n",
      "558/558 [==============================] - 0s 269us/step - loss: 1485.6798\n",
      "Epoch 12/200\n",
      "558/558 [==============================] - 0s 274us/step - loss: 1485.6797\n",
      "Epoch 13/200\n",
      "558/558 [==============================] - 0s 244us/step - loss: 1485.6796\n",
      "Epoch 14/200\n",
      "558/558 [==============================] - 0s 264us/step - loss: 1485.6760\n",
      "Epoch 15/200\n",
      "558/558 [==============================] - 0s 267us/step - loss: 1485.6751\n",
      "Epoch 16/200\n",
      "558/558 [==============================] - 0s 264us/step - loss: 1485.6740\n",
      "Epoch 17/200\n",
      "558/558 [==============================] - 0s 242us/step - loss: 1485.6842\n",
      "Epoch 18/200\n",
      "558/558 [==============================] - 0s 244us/step - loss: 1485.6801\n",
      "Epoch 19/200\n",
      "558/558 [==============================] - 0s 285us/step - loss: 1485.7036\n",
      "Epoch 20/200\n",
      "558/558 [==============================] - 0s 278us/step - loss: 1485.5249\n",
      "Epoch 21/200\n",
      "558/558 [==============================] - 0s 310us/step - loss: 1485.2774\n",
      "Epoch 22/200\n",
      "558/558 [==============================] - 0s 301us/step - loss: 1482.9138\n",
      "Epoch 23/200\n",
      "558/558 [==============================] - 0s 283us/step - loss: 1482.8695\n",
      "Epoch 24/200\n",
      "558/558 [==============================] - 0s 264us/step - loss: 1483.1817\n",
      "Epoch 25/200\n",
      "558/558 [==============================] - 0s 260us/step - loss: 1483.0264\n",
      "Epoch 26/200\n",
      "558/558 [==============================] - 0s 280us/step - loss: 1482.8660\n",
      "Epoch 27/200\n",
      "558/558 [==============================] - 0s 326us/step - loss: 1482.8742\n",
      "Epoch 28/200\n",
      "558/558 [==============================] - 0s 312us/step - loss: 1482.8377\n",
      "Epoch 29/200\n",
      "558/558 [==============================] - 0s 276us/step - loss: 1482.8437\n",
      "Epoch 30/200\n",
      "558/558 [==============================] - 0s 265us/step - loss: 1482.8259\n",
      "Epoch 31/200\n",
      "558/558 [==============================] - 0s 271us/step - loss: 1482.8147\n",
      "Epoch 32/200\n",
      "558/558 [==============================] - 0s 319us/step - loss: 1482.8019\n",
      "Epoch 33/200\n",
      "558/558 [==============================] - 0s 330us/step - loss: 1482.8012\n",
      "Epoch 34/200\n",
      "558/558 [==============================] - 0s 332us/step - loss: 1482.8012\n",
      "Epoch 35/200\n",
      "558/558 [==============================] - 0s 283us/step - loss: 1482.8012\n",
      "Epoch 36/200\n",
      "558/558 [==============================] - 0s 298us/step - loss: 1482.8012\n",
      "Epoch 37/200\n",
      "558/558 [==============================] - 0s 267us/step - loss: 1482.8012\n",
      "Epoch 38/200\n",
      "558/558 [==============================] - 0s 289us/step - loss: 1482.8012\n",
      "Epoch 39/200\n",
      "558/558 [==============================] - 0s 307us/step - loss: 1482.8012\n",
      "Epoch 40/200\n",
      "558/558 [==============================] - 0s 330us/step - loss: 1482.8012\n",
      "Epoch 41/200\n",
      "558/558 [==============================] - 0s 262us/step - loss: 1482.8012\n",
      "Epoch 42/200\n",
      "558/558 [==============================] - 0s 260us/step - loss: 1482.6097\n",
      "Epoch 43/200\n",
      "558/558 [==============================] - 0s 280us/step - loss: 1482.4442\n",
      "Epoch 44/200\n",
      "558/558 [==============================] - 0s 328us/step - loss: 1482.4409\n",
      "Epoch 45/200\n",
      "558/558 [==============================] - 0s 285us/step - loss: 1482.4383\n",
      "Epoch 46/200\n",
      "558/558 [==============================] - 0s 246us/step - loss: 1482.4364\n",
      "Epoch 47/200\n",
      "558/558 [==============================] - 0s 278us/step - loss: 1482.4363\n",
      "Epoch 48/200\n",
      "558/558 [==============================] - 0s 251us/step - loss: 1482.4363\n",
      "Epoch 49/200\n",
      "558/558 [==============================] - 0s 325us/step - loss: 1482.4363\n",
      "Epoch 50/200\n",
      "558/558 [==============================] - 0s 298us/step - loss: 1482.4363\n",
      "Epoch 51/200\n",
      "558/558 [==============================] - 0s 278us/step - loss: 1482.4363\n",
      "Epoch 52/200\n",
      "558/558 [==============================] - 0s 321us/step - loss: 1482.4363\n",
      "Epoch 53/200\n",
      "558/558 [==============================] - 0s 287us/step - loss: 1482.4363\n",
      "Epoch 54/200\n",
      "558/558 [==============================] - 0s 289us/step - loss: 1482.4363\n",
      "Epoch 55/200\n",
      "558/558 [==============================] - 0s 256us/step - loss: 1482.4363\n",
      "Epoch 56/200\n",
      "558/558 [==============================] - 0s 262us/step - loss: 1482.4363\n",
      "Epoch 57/200\n",
      "558/558 [==============================] - 0s 256us/step - loss: 1482.4364\n",
      "Epoch 58/200\n",
      "558/558 [==============================] - 0s 253us/step - loss: 1482.4363\n",
      "Epoch 59/200\n",
      "558/558 [==============================] - 0s 289us/step - loss: 1482.4364\n",
      "Epoch 60/200\n",
      "558/558 [==============================] - 0s 231us/step - loss: 1482.4363\n",
      "Epoch 61/200\n",
      "558/558 [==============================] - 0s 233us/step - loss: 1482.4363\n",
      "Epoch 62/200\n",
      "558/558 [==============================] - 0s 237us/step - loss: 1482.4364\n",
      "Epoch 63/200\n",
      "558/558 [==============================] - 0s 246us/step - loss: 1482.4363\n",
      "Epoch 64/200\n",
      "558/558 [==============================] - 0s 224us/step - loss: 1482.4363\n",
      "Epoch 65/200\n",
      "558/558 [==============================] - 0s 226us/step - loss: 1482.4363\n",
      "Epoch 66/200\n",
      "558/558 [==============================] - 0s 256us/step - loss: 1482.4364\n",
      "Epoch 67/200\n",
      "558/558 [==============================] - 0s 230us/step - loss: 1482.4364\n",
      "Epoch 68/200\n",
      "558/558 [==============================] - 0s 247us/step - loss: 1482.4363\n",
      "Epoch 69/200\n",
      "558/558 [==============================] - 0s 215us/step - loss: 1482.4364\n",
      "Epoch 70/200\n",
      "558/558 [==============================] - 0s 247us/step - loss: 1482.4363\n",
      "Epoch 71/200\n",
      "558/558 [==============================] - 0s 217us/step - loss: 1482.4363\n",
      "Epoch 72/200\n",
      "558/558 [==============================] - 0s 233us/step - loss: 1482.4363\n",
      "Epoch 73/200\n",
      "558/558 [==============================] - 0s 274us/step - loss: 1482.4363\n",
      "Epoch 74/200\n",
      "558/558 [==============================] - 0s 244us/step - loss: 1482.4363\n",
      "Epoch 75/200\n",
      "558/558 [==============================] - 0s 230us/step - loss: 1482.4363\n",
      "Epoch 76/200\n",
      "558/558 [==============================] - 0s 224us/step - loss: 1482.4363\n",
      "Epoch 77/200\n",
      "558/558 [==============================] - 0s 251us/step - loss: 1482.4363\n",
      "Epoch 78/200\n",
      "558/558 [==============================] - 0s 219us/step - loss: 1482.4364\n",
      "Epoch 79/200\n",
      "558/558 [==============================] - 0s 256us/step - loss: 1482.4363\n",
      "Epoch 80/200\n",
      "558/558 [==============================] - 0s 204us/step - loss: 1482.4363\n",
      "Epoch 81/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 82/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4363\n",
      "Epoch 83/200\n",
      "558/558 [==============================] - 0s 215us/step - loss: 1482.4364\n",
      "Epoch 84/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 85/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 86/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4364\n",
      "Epoch 87/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4364\n",
      "Epoch 88/200\n",
      "558/558 [==============================] - 0s 228us/step - loss: 1482.4363\n",
      "Epoch 89/200\n",
      "558/558 [==============================] - 0s 239us/step - loss: 1482.4363\n",
      "Epoch 90/200\n",
      "558/558 [==============================] - 0s 226us/step - loss: 1482.4364\n",
      "Epoch 91/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 92/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4364\n",
      "Epoch 93/200\n",
      "558/558 [==============================] - 0s 215us/step - loss: 1482.4363\n",
      "Epoch 94/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 95/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 96/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 97/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4364\n",
      "Epoch 98/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 99/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 100/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 101/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 102/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 103/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4364\n",
      "Epoch 104/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 105/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4363\n",
      "Epoch 106/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4363\n",
      "Epoch 107/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 108/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 109/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 110/200\n",
      "558/558 [==============================] - 0s 204us/step - loss: 1482.4364\n",
      "Epoch 111/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 112/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4364\n",
      "Epoch 113/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4363\n",
      "Epoch 114/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 115/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 116/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 117/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4364\n",
      "Epoch 118/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 119/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 120/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 121/200\n",
      "558/558 [==============================] - 0s 215us/step - loss: 1482.4364\n",
      "Epoch 122/200\n",
      "558/558 [==============================] - 0s 217us/step - loss: 1482.4364\n",
      "Epoch 123/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 124/200\n",
      "558/558 [==============================] - 0s 215us/step - loss: 1482.4363\n",
      "Epoch 125/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 126/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 127/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 128/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 129/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 130/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 131/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 132/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 133/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 134/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 135/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 136/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 137/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 138/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 139/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 140/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4364\n",
      "Epoch 141/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 142/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 143/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4364\n",
      "Epoch 144/200\n",
      "558/558 [==============================] - 0s 215us/step - loss: 1482.4363\n",
      "Epoch 145/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 146/200\n",
      "558/558 [==============================] - 0s 204us/step - loss: 1482.4364\n",
      "Epoch 147/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 148/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 149/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 150/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 151/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 152/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 153/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 154/200\n",
      "558/558 [==============================] - 0s 203us/step - loss: 1482.4363\n",
      "Epoch 155/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 156/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 157/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4363\n",
      "Epoch 158/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 159/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 160/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4363\n",
      "Epoch 161/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 162/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 163/200\n",
      "558/558 [==============================] - 0s 204us/step - loss: 1482.4363\n",
      "Epoch 164/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 165/200\n",
      "558/558 [==============================] - 0s 215us/step - loss: 1482.4363\n",
      "Epoch 166/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4364\n",
      "Epoch 167/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 168/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 169/200\n",
      "558/558 [==============================] - 0s 203us/step - loss: 1482.4363\n",
      "Epoch 170/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 171/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 172/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 173/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4364\n",
      "Epoch 174/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 175/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4363\n",
      "Epoch 176/200\n",
      "558/558 [==============================] - 0s 204us/step - loss: 1482.4363\n",
      "Epoch 177/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 178/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4364\n",
      "Epoch 179/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 180/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 181/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 182/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 183/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4364\n",
      "Epoch 184/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n",
      "Epoch 185/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 186/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4364\n",
      "Epoch 187/200\n",
      "558/558 [==============================] - 0s 203us/step - loss: 1482.4363\n",
      "Epoch 188/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4364\n",
      "Epoch 189/200\n",
      "558/558 [==============================] - 0s 204us/step - loss: 1482.4363\n",
      "Epoch 190/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 191/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 192/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 193/200\n",
      "558/558 [==============================] - 0s 206us/step - loss: 1482.4363\n",
      "Epoch 194/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 195/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 196/200\n",
      "558/558 [==============================] - 0s 213us/step - loss: 1482.4363\n",
      "Epoch 197/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 198/200\n",
      "558/558 [==============================] - 0s 210us/step - loss: 1482.4363\n",
      "Epoch 199/200\n",
      "558/558 [==============================] - 0s 212us/step - loss: 1482.4363\n",
      "Epoch 200/200\n",
      "558/558 [==============================] - 0s 208us/step - loss: 1482.4363\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<keras.callbacks.History at 0x210445732b0>"
      ]
     },
     "execution_count": 35,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 压缩特征维度至2维\n",
    "encoding_dim = 260\n",
    " \n",
    "# this is our input placeholder\n",
    "input_img = Input(shape=(361,))\n",
    " \n",
    "# 编码层\n",
    "encoded = Dense(361, activation='relu')(input_img)\n",
    "# encoded = Dense(64, activation='relu')(encoded)\n",
    "# encoded = Dense(32, activation='relu')(encoded)\n",
    "encoder_output = Dense(encoding_dim)(encoded)\n",
    "\n",
    "# 解码层\n",
    "decoded = Dense(300, activation='relu')(encoder_output)\n",
    "# decoded = Dense(64, activation='relu')(decoded)\n",
    "# decoded = Dense(128, activation='relu')(decoded)\n",
    "decoded = Dense(361, activation='tanh')(decoded)\n",
    " \n",
    "# 构建自编码模型\n",
    "autoencoder = Model(inputs=input_img, outputs=decoded)\n",
    " \n",
    "# 构建编码模型\n",
    "encoder = Model(inputs=input_img, outputs=encoder_output)\n",
    " \n",
    "# compile autoencoder\n",
    "autoencoder.compile(optimizer='adam', loss='mse')\n",
    "\n",
    "# training\n",
    "autoencoder.fit(train_data1.values, train_data1.values, epochs=200, batch_size=64, shuffle=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "aa = train_data.iloc[0:100,:]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "aaa = train_data1.iloc[0:100,:]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "bb = encoder.predict(aa.values)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(100, 260)"
      ]
     },
     "execution_count": 47,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "bb.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "bbb = autoencoder.predict(aaa.values)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(100, 361)"
      ]
     },
     "execution_count": 40,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "bb.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(361,)"
      ]
     },
     "execution_count": 41,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "bb[0].shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(361,)"
      ]
     },
     "execution_count": 42,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "aa.values[0].shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1069.034182509594\n",
      "2054.2862094703655\n",
      "2162.0781682997713\n",
      "1199.3218567466263\n",
      "1146.4164646823294\n",
      "1070.7083496728608\n",
      "1576.729007162186\n",
      "1172.9181144647812\n",
      "1552.0607394594304\n",
      "1629.1869839558112\n",
      "1146.2891124813134\n",
      "1147.4630085070835\n",
      "1146.4278456249792\n",
      "1042.3355124994591\n",
      "2108.12884113931\n",
      "1147.2681690446348\n",
      "2029.3422053377176\n",
      "2081.3851971730346\n",
      "1629.833804541598\n",
      "2107.1884192519806\n",
      "2002.9040749764831\n",
      "1147.586923475177\n",
      "1147.3034930531148\n",
      "1069.2094798931018\n",
      "1125.6720281707464\n",
      "1044.0733749352507\n",
      "1149.3195599228995\n",
      "2160.2856295186652\n",
      "1068.2616787009306\n",
      "2107.4678520267485\n",
      "2109.197127059287\n",
      "1628.5588355152206\n",
      "2160.027000335839\n",
      "1147.7319646925619\n",
      "1550.1689840646686\n",
      "1097.4933170790378\n",
      "2160.1642041145037\n",
      "1122.488146003726\n",
      "2133.737084374556\n",
      "2030.0258308765985\n",
      "1121.3075427817278\n",
      "2188.220499536656\n",
      "1175.0532832122904\n",
      "1632.4773556437817\n",
      "2081.8980302324117\n",
      "2027.8887416207067\n",
      "1147.231828751506\n",
      "2107.672251324085\n",
      "2107.063320006637\n",
      "2108.4772783829058\n",
      "2107.191412469513\n",
      "1252.874251046101\n",
      "2002.765398822956\n",
      "2133.6652739573865\n",
      "1068.2328254374077\n",
      "1146.624274870286\n",
      "1704.811656689519\n",
      "2161.6418454431882\n",
      "1148.8212620044983\n",
      "1281.484901583928\n",
      "1124.4201741125912\n",
      "2081.910063732285\n",
      "1041.8117293400499\n",
      "1147.5956114068856\n",
      "2055.9558691014067\n",
      "2029.5502760677432\n",
      "1146.1357625817311\n",
      "2109.4549912800753\n",
      "1148.4938197904805\n",
      "1069.9803226718705\n",
      "2057.593227656026\n",
      "1120.182330271914\n",
      "1174.4607216438226\n",
      "2107.541474753518\n",
      "2028.274858150982\n",
      "1148.846369185912\n",
      "1172.8149502096037\n",
      "1151.246627488865\n",
      "2108.102298982592\n",
      "1603.5415695242614\n",
      "2029.158528428377\n",
      "2186.933333473911\n",
      "1203.7946080407962\n",
      "2107.301991404026\n",
      "2187.7834669072463\n",
      "2028.561792654159\n",
      "1041.2387294028174\n",
      "1147.31875826617\n",
      "2161.849982923157\n",
      "1122.9203675925583\n",
      "2054.7465016684964\n",
      "2081.347521693089\n",
      "1069.1259198910925\n",
      "1174.6289812473547\n",
      "2082.128687383571\n",
      "1150.4338012683709\n",
      "2030.573724208374\n",
      "1149.0438120469425\n",
      "1253.3697851788133\n",
      "1096.274732104914\n"
     ]
    }
   ],
   "source": [
    "from sklearn.metrics import mean_squared_error\n",
    "for i in range(100):\n",
    "    print(mean_squared_error(aa.values[i], bb[i]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1173.017262410767\n",
      "1200.5509244814689\n",
      "2133.9077799397287\n",
      "1146.4980670982798\n",
      "2187.0571106492175\n",
      "1226.9692953738358\n",
      "1147.1788504828962\n",
      "1122.923050200922\n",
      "1147.6516246144436\n",
      "1146.5705592517995\n",
      "1631.1805968152544\n",
      "1199.4304022961167\n",
      "2108.286571309914\n",
      "2160.2413809136\n",
      "1546.4366418922245\n",
      "1069.7359271394707\n",
      "2030.2591719873224\n",
      "1146.2158570865022\n",
      "2107.302532767192\n",
      "2160.225005922525\n",
      "1199.7010973468762\n",
      "1044.1568076376823\n",
      "1252.3874796358682\n",
      "1280.1152451547473\n",
      "1147.1115793165268\n",
      "1629.7169543084233\n",
      "2133.9423257955877\n",
      "1252.4896796095616\n",
      "1147.8477053268855\n",
      "1149.6006534848918\n",
      "1226.6202009263184\n",
      "1175.9745336460912\n",
      "1148.1025241692835\n",
      "1122.22440041552\n",
      "1226.0619836077062\n",
      "1226.7494728256906\n",
      "1227.2644958881663\n",
      "1173.3408505243674\n",
      "2107.3143713912823\n",
      "1227.0367365977065\n",
      "1627.5147629162886\n",
      "1149.2449899033945\n",
      "1147.6866202009119\n",
      "1172.912896536208\n",
      "1120.267342989637\n",
      "1120.4517422451604\n",
      "3286.9159535355484\n",
      "1200.2074206956947\n",
      "1200.1910001599506\n",
      "2186.9532407851148\n",
      "1173.053490043315\n",
      "1147.702068304333\n",
      "1631.8352358781383\n",
      "2108.2958457009568\n",
      "1226.1462443229468\n",
      "1199.9329734546027\n",
      "1199.2629603925836\n",
      "2135.4420442239234\n",
      "1145.942236649701\n",
      "1199.896877969834\n",
      "1200.5405164751016\n",
      "1173.0874509708124\n",
      "2109.1038225514603\n",
      "1172.8276360627654\n",
      "1120.1207923115799\n",
      "1202.456548125378\n",
      "1226.6239717774615\n",
      "1176.6269048324932\n",
      "2160.435034146298\n",
      "1630.6408902866979\n",
      "1172.7713072537335\n",
      "1122.7073378220591\n",
      "1199.2974492030908\n",
      "1173.4479852295688\n",
      "1201.4064989538733\n",
      "2213.6175372723724\n",
      "2133.8784945823686\n",
      "1147.2585974310207\n",
      "1147.2097627598166\n",
      "1199.355942274463\n",
      "1199.293334614415\n",
      "2160.288158094295\n",
      "1151.042081090588\n",
      "1094.3835088322828\n",
      "1199.3143977525303\n",
      "1200.0937034208985\n",
      "1173.879543480489\n",
      "2133.3468525628505\n",
      "1199.3547232041649\n",
      "1174.2833679589578\n",
      "1199.4329908558796\n",
      "1678.3486631383075\n",
      "1149.5394244433903\n",
      "1147.9967744943544\n",
      "1199.8259820223207\n",
      "2081.759697244161\n",
      "1146.5480340079982\n",
      "2107.245605804101\n",
      "2134.0331541558908\n",
      "1200.0765361018464\n"
     ]
    }
   ],
   "source": [
    "for i in range(100):\n",
    "    print(mean_squared_error(aaa.values[i], bbb[i]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [default]",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
