{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import h2o4gpu as h2o4gpu\n",
    "from h2o4gpu.types import *\n",
    "\n",
    "'''\n",
    "Elastic Net\n",
    "\n",
    "   minimize    (1/2) ||Ax - b||_2^2 + \\alpha * \\lambda ||x||_1 + 0.5 * (1-\\alpha) * \\lambda ||x||_2\n",
    "\n",
    "   for 100 values of \\lambda, and alpha in [0,1]\n",
    "   See <h2o4gpu>/matlab/examples/lasso_path.m for detailed description.\n",
    "'''\n",
    "\n",
    "\n",
    "def elastic_net(X, y, nGPUs=0, nlambda=100, nfolds=5, nalpha=5, validFraction=0.2, family=\"elasticnet\", verbose=0):\n",
    "    # choose solver\n",
    "    Solver = h2o4gpu.GLM\n",
    "\n",
    "    sharedA = 0\n",
    "    nThreads = None  # let internal method figure this out\n",
    "    intercept = 0\n",
    "    standardize = 0\n",
    "    lambda_min_ratio = 1e-9\n",
    "    nFolds = nfolds\n",
    "    nLambdas = nlambda\n",
    "    nAlphas = nalpha\n",
    "\n",
    "    if standardize:\n",
    "        print(\"implement standardization transformer\")\n",
    "        exit()\n",
    "\n",
    "    # Setup Train/validation Set Split\n",
    "    morig = X.shape[0]\n",
    "    norig = X.shape[1]\n",
    "    print(\"Original m=%d n=%d\" % (morig, norig))\n",
    "    fortran = X.flags.f_contiguous\n",
    "    print(\"fortran=%d\" % fortran)\n",
    "\n",
    "    # Do train/valid split\n",
    "    HO = int(validFraction * morig)\n",
    "    H = morig - HO\n",
    "    print(\"Size of Train rows=%d valid rows=%d\" % (H, HO))\n",
    "    trainX = np.copy(X[0:H, :])\n",
    "    trainY = np.copy(y[0:H])\n",
    "    validX = np.copy(X[H:-1, :])\n",
    "\n",
    "    mTrain = trainX.shape[0]\n",
    "    mvalid = validX.shape[0]\n",
    "    print(\"mTrain=%d mvalid=%d\" % (mTrain, mvalid))\n",
    "\n",
    "    if intercept == 1:\n",
    "        trainX = np.hstack([trainX, np.ones((trainX.shape[0], 1), dtype=trainX.dtype)])\n",
    "        validX = np.hstack([validX, np.ones((validX.shape[0], 1), dtype=validX.dtype)])\n",
    "        n = trainX.shape[1]\n",
    "        print(\"New n=%d\" % n)\n",
    "\n",
    "    ## Constructor\n",
    "    print(\"Setting up solver\")\n",
    "    enet = Solver(sharedA, nThreads, nGPUs, 'c' if fortran else 'r', intercept, standardize, lambda_min_ratio, nLambdas, nFolds, nAlphas, verbose=verbose,family=family)\n",
    "\n",
    "    print(\"trainX\")\n",
    "    print(trainX)\n",
    "    print(\"trainY\")\n",
    "    print(trainY)\n",
    "\n",
    "    ## Solve\n",
    "    print(\"Solving\")\n",
    "    Xvsalpha = enet.fit(trainX, trainY)\n",
    "    # Xvsalphalambda, Xvsalpha = enet.fit(trainX, trainY, validX, validY)\n",
    "    # Xvsalphalambda, Xvsalpha = enet.fit(trainX, trainY, validX, validY, trainW)\n",
    "    # Xvsalphalambda, Xvsalpha = enet.fit(trainX, trainY, validX, validY, trainW, 0)\n",
    "    # givefullpath=1\n",
    "    #  Xvsalphalambda, Xvsalpha = enet.fit(trainX, trainY, validX, validY, trainW, givefullpath)\n",
    "    print(\"Done Solving\")\n",
    "\n",
    "    # show something about Xvsalphalambda or Xvsalpha\n",
    "    print(\"Xvsalpha\")\n",
    "    print(Xvsalpha)\n",
    "    print(\"np.shape(Xvsalpha)\")\n",
    "    print(np.shape(Xvsalpha))\n",
    "\n",
    "    rmse = enet.getrmse()\n",
    "    if family == \"elasticnet\":\n",
    "        print(\"rmse\")\n",
    "        print(rmse)\n",
    "    else:\n",
    "        print(\"logloss\")\n",
    "        print(rmse)\n",
    "\n",
    "    print(\"lambdas\")\n",
    "    lambdas = enet.getlambdas()\n",
    "    print(lambdas)\n",
    "\n",
    "    print(\"alphas\")\n",
    "    alphas = enet.getalphas()\n",
    "    print(alphas)\n",
    "\n",
    "    print(\"tols\")\n",
    "    tols = enet.gettols()\n",
    "    print(tols)\n",
    "\n",
    "    testvalidY = np.dot(trainX, Xvsalpha.T)\n",
    "    print(\"testvalidY (newvalidY should be this)\")\n",
    "    print(testvalidY)\n",
    "\n",
    "    print(\"Predicting, assuming unity weights\")\n",
    "    if validX == None or mvalid == 0:\n",
    "        print(\"Using trainX for validX\")\n",
    "        newvalidY = enet.predict(trainX)  # for testing\n",
    "    else:\n",
    "        print(\"Using validX for validX\")\n",
    "        newvalidY = enet.predict(validX)\n",
    "    print(\"newvalidY\")\n",
    "    print(newvalidY)\n",
    "\n",
    "    print(\"Done Reporting\")\n",
    "    return trainX,trainY\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(23999, 25)\n",
      "Original m=23999 n=24\n",
      "fortran=0\n",
      "Size of Train rows=23999 valid rows=0\n",
      "mTrain=23999 mvalid=0\n",
      "Setting up solver\n",
      "\n",
      "Using GPU GLM solver with 1 GPUs\n",
      "\n",
      "trainX\n",
      "[[  1.00000000e+00   2.00000000e+04   2.00000000e+00 ...,   0.00000000e+00\n",
      "    0.00000000e+00   0.00000000e+00]\n",
      " [  2.00000000e+00   1.20000000e+05   2.00000000e+00 ...,   1.00000000e+03\n",
      "    0.00000000e+00   2.00000000e+03]\n",
      " [  3.00000000e+00   9.00000000e+04   2.00000000e+00 ...,   1.00000000e+03\n",
      "    1.00000000e+03   5.00000000e+03]\n",
      " ..., \n",
      " [  2.39970000e+04   2.00000000e+04   1.00000000e+00 ...,   6.06000000e+02\n",
      "    5.00000000e+02   1.00000000e+03]\n",
      " [  2.39980000e+04   1.00000000e+04   1.00000000e+00 ...,   3.20000000e+02\n",
      "    1.82000000e+03   1.00000000e+03]\n",
      " [  2.39990000e+04   2.00000000e+04   1.00000000e+00 ...,   1.00000000e+03\n",
      "    0.00000000e+00   0.00000000e+00]]\n",
      "trainY\n",
      "[ 1.  1.  0. ...,  0.  0.  0.]\n",
      "Solving\n",
      "Done Solving\n",
      "Xvsalpha\n",
      "[[  6.37776679e-07   3.02116332e-09   2.79653966e-02   7.12255016e-04\n",
      "    3.50422710e-02   4.33784025e-03   9.78749692e-02   2.02176422e-02\n",
      "    1.44230574e-02  -5.28440624e-03   1.43812299e-02  -8.57718289e-04\n",
      "   -7.72769909e-07   1.89067904e-07  -1.23286990e-08   1.56967374e-07\n",
      "   -1.03427780e-07   1.12823564e-07  -9.45487159e-07  -3.24843739e-07\n",
      "   -5.45738715e-07  -2.15467068e-07  -3.72314361e-07  -2.37775438e-07]]\n",
      "np.shape(Xvsalpha)\n",
      "(1, 24)\n",
      "logloss\n",
      "[[ 8.91615295 -1.         -1.        ]]\n",
      "lambdas\n",
      "[[  6.96527744e+08]]\n",
      "alphas\n",
      "[[ 0.5]]\n",
      "tols\n",
      "[[ 0.01]]\n",
      "testvalidY (newvalidY should be this)\n",
      "[[ 0.39389668]\n",
      " [ 0.17861546]\n",
      " [ 0.25330295]\n",
      " ..., \n",
      " [ 0.17903049]\n",
      " [ 0.22091484]\n",
      " [ 0.22127387]]\n",
      "Predicting, assuming unity weights\n",
      "Using trainX for validX\n",
      "newvalidY\n",
      "[[ 0.3938967   0.17861545  0.25330293 ...,  0.17903049  0.22091483\n",
      "   0.22127387]]\n",
      "Done Reporting\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "#from numpy.random import randn\n",
    "#  m=1000\n",
    "#  n=100\n",
    "#  A=randn(m,n)\n",
    "#  x_true=(randn(n)/n)*float64(randn(n)<0.8)\n",
    "#  b=A.dot(x_true)+0.5*randn(m)\n",
    "import pandas as pd\n",
    "import feather\n",
    "\n",
    "# NOTE: cd ~/h2oai-prototypes/glm-bench/ ; gunzip ipums.csv.gz ; Rscript h2oai-prototypes/glm-bench/ipums.R to produce ipums.feather\n",
    "#df = feather.read_dataframe(\"../../../h2oai-prototypes/glm-bench/ipums.feather\")\n",
    "df = feather.read_dataframe(\"../../../h2oai-prototypes/glm-bench/credit.feather\")\n",
    "# df = pd.read_csv(\"../cpp/train.txt\", sep=\" \", header=None)\n",
    "#df = pd.read_csv(\"../cpp/simple.txt\", sep=\" \", header=None)\n",
    "#df = pd.read_csv(\"Hyatt_Subset.csv\")\n",
    "#df = pd.read_csv(\"Hyatt_Subset.nohead.csv\")\n",
    "print(df.shape)\n",
    "X = np.array(df.iloc[:, :df.shape[1] - 1], dtype='float32', order='C')\n",
    "y = np.array(df.iloc[:, df.shape[1] - 1], dtype='float32', order='C')\n",
    "# elastic_net(X, y, nGPUs=2, nlambda=100, nfolds=5, nalpha=5, validFraction=0.2)\n",
    "x_y = elastic_net(X, y, nGPUs=1, nlambda=100, nfolds=1, nalpha=1, validFraction=0, family=\"logistic\",verbose=0)\n",
    "# elastic_net(X, y, nGPUs=0, nlambda=100, nfolds=1, nalpha=1, validFraction=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Checking whether there is an H2O instance running at http://localhost:54321. connected.\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div style=\"overflow:auto\"><table style=\"width:50%\"><tr><td>H2O cluster uptime:</td>\n",
       "<td>14 mins 42 secs</td></tr>\n",
       "<tr><td>H2O cluster version:</td>\n",
       "<td>3.10.4.8</td></tr>\n",
       "<tr><td>H2O cluster version age:</td>\n",
       "<td>2 months and 11 days </td></tr>\n",
       "<tr><td>H2O cluster name:</td>\n",
       "<td>H2O_from_python_navdeep_ma4ybp</td></tr>\n",
       "<tr><td>H2O cluster total nodes:</td>\n",
       "<td>1</td></tr>\n",
       "<tr><td>H2O cluster free memory:</td>\n",
       "<td>21.25 Gb</td></tr>\n",
       "<tr><td>H2O cluster total cores:</td>\n",
       "<td>40</td></tr>\n",
       "<tr><td>H2O cluster allowed cores:</td>\n",
       "<td>40</td></tr>\n",
       "<tr><td>H2O cluster status:</td>\n",
       "<td>locked, healthy</td></tr>\n",
       "<tr><td>H2O connection url:</td>\n",
       "<td>http://localhost:54321</td></tr>\n",
       "<tr><td>H2O connection proxy:</td>\n",
       "<td>None</td></tr>\n",
       "<tr><td>H2O internal security:</td>\n",
       "<td>False</td></tr>\n",
       "<tr><td>Python version:</td>\n",
       "<td>2.7.12 final</td></tr></table></div>"
      ],
      "text/plain": [
       "--------------------------  ------------------------------\n",
       "H2O cluster uptime:         14 mins 42 secs\n",
       "H2O cluster version:        3.10.4.8\n",
       "H2O cluster version age:    2 months and 11 days\n",
       "H2O cluster name:           H2O_from_python_navdeep_ma4ybp\n",
       "H2O cluster total nodes:    1\n",
       "H2O cluster free memory:    21.25 Gb\n",
       "H2O cluster total cores:    40\n",
       "H2O cluster allowed cores:  40\n",
       "H2O cluster status:         locked, healthy\n",
       "H2O connection url:         http://localhost:54321\n",
       "H2O connection proxy:\n",
       "H2O internal security:      False\n",
       "Python version:             2.7.12 final\n",
       "--------------------------  ------------------------------"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Parse progress: |█████████████████████████████████████████████████████████| 100%\n",
      "Parse progress: |█████████████████████████████████████████████████████████| 100%\n"
     ]
    }
   ],
   "source": [
    "import h2o\n",
    "h2o.init()\n",
    "\n",
    "#Make Ax\n",
    "trainX = x_y[0]\n",
    "trainY = x_y[1]\n",
    "h2o_df = h2o.H2OFrame(trainX)\n",
    "y = h2o.H2OFrame(trainY)\n",
    "h2o_df = h2o_df.cbind(y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<table>\n",
       "<thead>\n",
       "<tr><th style=\"text-align: right;\">         C1</th><th style=\"text-align: right;\">         C2</th><th style=\"text-align: right;\">       C3</th><th style=\"text-align: right;\">         C4</th><th style=\"text-align: right;\">       C5</th><th style=\"text-align: right;\">       C6</th><th style=\"text-align: right;\">       C7</th><th style=\"text-align: right;\">        C8</th><th style=\"text-align: right;\">        C9</th><th style=\"text-align: right;\">       C10</th><th style=\"text-align: right;\">       C11</th><th style=\"text-align: right;\">         C12</th><th style=\"text-align: right;\">        C13</th><th style=\"text-align: right;\">        C14</th><th style=\"text-align: right;\">         C15</th><th style=\"text-align: right;\">        C16</th><th style=\"text-align: right;\">         C17</th><th style=\"text-align: right;\">        C18</th><th style=\"text-align: right;\">         C19</th><th style=\"text-align: right;\">         C20</th><th style=\"text-align: right;\">         C21</th><th style=\"text-align: right;\">         C22</th><th style=\"text-align: right;\">         C23</th><th style=\"text-align: right;\">         C24</th><th style=\"text-align: right;\">  C110</th></tr>\n",
       "</thead>\n",
       "<tbody>\n",
       "<tr><td style=\"text-align: right;\">6.37777e-07</td><td style=\"text-align: right;\">6.04233e-05</td><td style=\"text-align: right;\">0.0559308</td><td style=\"text-align: right;\">0.00142451 </td><td style=\"text-align: right;\">0.0350423</td><td style=\"text-align: right;\">0.104108 </td><td style=\"text-align: right;\"> 0.19575 </td><td style=\"text-align: right;\"> 0.0404353</td><td style=\"text-align: right;\">-0.0144231</td><td style=\"text-align: right;\">0.00528441</td><td style=\"text-align: right;\">-0.0287625</td><td style=\"text-align: right;\"> 0.00171544 </td><td style=\"text-align: right;\">-0.00302385</td><td style=\"text-align: right;\">0.000586489</td><td style=\"text-align: right;\">-8.49447e-06</td><td style=\"text-align: right;\">0          </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">0          </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.000223817</td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">     1</td></tr>\n",
       "<tr><td style=\"text-align: right;\">1.27555e-06</td><td style=\"text-align: right;\">0.00036254 </td><td style=\"text-align: right;\">0.0559308</td><td style=\"text-align: right;\">0.00142451 </td><td style=\"text-align: right;\">0.0700845</td><td style=\"text-align: right;\">0.112784 </td><td style=\"text-align: right;\">-0.097875</td><td style=\"text-align: right;\"> 0.0404353</td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\">0         </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\">-0.00171544 </td><td style=\"text-align: right;\">-0.00207257</td><td style=\"text-align: right;\">0.000326142</td><td style=\"text-align: right;\">-3.30656e-05</td><td style=\"text-align: right;\">0.000513597</td><td style=\"text-align: right;\">-0.000357343</td><td style=\"text-align: right;\">0.000367918</td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.000324844</td><td style=\"text-align: right;\">-0.000545739</td><td style=\"text-align: right;\">-0.000215467</td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.000475551</td><td style=\"text-align: right;\">     1</td></tr>\n",
       "<tr><td style=\"text-align: right;\">1.91333e-06</td><td style=\"text-align: right;\">0.000271905</td><td style=\"text-align: right;\">0.0559308</td><td style=\"text-align: right;\">0.00142451 </td><td style=\"text-align: right;\">0.0700845</td><td style=\"text-align: right;\">0.147487 </td><td style=\"text-align: right;\"> 0       </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\">0         </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.022595  </td><td style=\"text-align: right;\">0.00265206 </td><td style=\"text-align: right;\">-0.000167165</td><td style=\"text-align: right;\">0.0022495  </td><td style=\"text-align: right;\">-0.00154604 </td><td style=\"text-align: right;\">0.00175429 </td><td style=\"text-align: right;\">-0.00143525 </td><td style=\"text-align: right;\">-0.000487266</td><td style=\"text-align: right;\">-0.000545739</td><td style=\"text-align: right;\">-0.000215467</td><td style=\"text-align: right;\">-0.000372314</td><td style=\"text-align: right;\">-0.00118888 </td><td style=\"text-align: right;\">     0</td></tr>\n",
       "<tr><td style=\"text-align: right;\">2.55111e-06</td><td style=\"text-align: right;\">0.000151058</td><td style=\"text-align: right;\">0.0559308</td><td style=\"text-align: right;\">0.00142451 </td><td style=\"text-align: right;\">0.0350423</td><td style=\"text-align: right;\">0.1605   </td><td style=\"text-align: right;\"> 0       </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\">0         </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.0363125 </td><td style=\"text-align: right;\">0.00911931 </td><td style=\"text-align: right;\">-0.000607694</td><td style=\"text-align: right;\">0.00444437 </td><td style=\"text-align: right;\">-0.00299517 </td><td style=\"text-align: right;\">0.0033336  </td><td style=\"text-align: right;\">-0.00189097 </td><td style=\"text-align: right;\">-0.00065586 </td><td style=\"text-align: right;\">-0.000654886</td><td style=\"text-align: right;\">-0.000237014</td><td style=\"text-align: right;\">-0.000398004</td><td style=\"text-align: right;\">-0.000237775</td><td style=\"text-align: right;\">     0</td></tr>\n",
       "<tr><td style=\"text-align: right;\">3.18888e-06</td><td style=\"text-align: right;\">0.000151058</td><td style=\"text-align: right;\">0.0279654</td><td style=\"text-align: right;\">0.00142451 </td><td style=\"text-align: right;\">0.0350423</td><td style=\"text-align: right;\">0.247257 </td><td style=\"text-align: right;\">-0.097875</td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\">-0.0144231</td><td style=\"text-align: right;\">0         </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.00665896</td><td style=\"text-align: right;\">0.00107202 </td><td style=\"text-align: right;\">-0.000441799</td><td style=\"text-align: right;\">0.0032869  </td><td style=\"text-align: right;\">-0.00198023 </td><td style=\"text-align: right;\">0.00215843 </td><td style=\"text-align: right;\">-0.00189097 </td><td style=\"text-align: right;\">-0.0119156  </td><td style=\"text-align: right;\">-0.00545739 </td><td style=\"text-align: right;\">-0.0019392  </td><td style=\"text-align: right;\">-0.000256525</td><td style=\"text-align: right;\">-0.00016145 </td><td style=\"text-align: right;\">     0</td></tr>\n",
       "<tr><td style=\"text-align: right;\">3.82666e-06</td><td style=\"text-align: right;\">0.000151058</td><td style=\"text-align: right;\">0.0279654</td><td style=\"text-align: right;\">0.000712255</td><td style=\"text-align: right;\">0.0700845</td><td style=\"text-align: right;\">0.1605   </td><td style=\"text-align: right;\"> 0       </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\">0         </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.0497664 </td><td style=\"text-align: right;\">0.0107899  </td><td style=\"text-align: right;\">-0.000710232</td><td style=\"text-align: right;\">0.00304423 </td><td style=\"text-align: right;\">-0.00202915 </td><td style=\"text-align: right;\">0.00225918 </td><td style=\"text-align: right;\">-0.00236372 </td><td style=\"text-align: right;\">-0.000589591</td><td style=\"text-align: right;\">-0.00035855 </td><td style=\"text-align: right;\">-0.000215467</td><td style=\"text-align: right;\">-0.000372314</td><td style=\"text-align: right;\">-0.00019022 </td><td style=\"text-align: right;\">     0</td></tr>\n",
       "<tr><td style=\"text-align: right;\">4.46444e-06</td><td style=\"text-align: right;\">0.00151058 </td><td style=\"text-align: right;\">0.0279654</td><td style=\"text-align: right;\">0.000712255</td><td style=\"text-align: right;\">0.0700845</td><td style=\"text-align: right;\">0.125797 </td><td style=\"text-align: right;\"> 0       </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\">0         </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.284352  </td><td style=\"text-align: right;\">0.0779003  </td><td style=\"text-align: right;\">-0.00548636 </td><td style=\"text-align: right;\">0.0851788  </td><td style=\"text-align: right;\">-0.0499559  </td><td style=\"text-align: right;\">0.0534721  </td><td style=\"text-align: right;\">-0.0520018  </td><td style=\"text-align: right;\">-0.0129937  </td><td style=\"text-align: right;\">-0.0207381  </td><td style=\"text-align: right;\">-0.00436084 </td><td style=\"text-align: right;\">-0.00511932 </td><td style=\"text-align: right;\">-0.00327417 </td><td style=\"text-align: right;\">     0</td></tr>\n",
       "<tr><td style=\"text-align: right;\">5.10221e-06</td><td style=\"text-align: right;\">0.000302116</td><td style=\"text-align: right;\">0.0559308</td><td style=\"text-align: right;\">0.00142451 </td><td style=\"text-align: right;\">0.0700845</td><td style=\"text-align: right;\">0.0997703</td><td style=\"text-align: right;\"> 0       </td><td style=\"text-align: right;\">-0.0202176</td><td style=\"text-align: right;\">-0.0144231</td><td style=\"text-align: right;\">0         </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0.000857718</td><td style=\"text-align: right;\">-0.00917742</td><td style=\"text-align: right;\">7.18458e-05</td><td style=\"text-align: right;\">-7.40955e-06</td><td style=\"text-align: right;\">3.46898e-05</td><td style=\"text-align: right;\"> 1.6445e-05 </td><td style=\"text-align: right;\">6.3971e-05 </td><td style=\"text-align: right;\">-0.000359285</td><td style=\"text-align: right;\">-0.000195231</td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.000125186</td><td style=\"text-align: right;\">-0.000628094</td><td style=\"text-align: right;\">-0.00036665 </td><td style=\"text-align: right;\">     0</td></tr>\n",
       "<tr><td style=\"text-align: right;\">5.73999e-06</td><td style=\"text-align: right;\">0.000422963</td><td style=\"text-align: right;\">0.0559308</td><td style=\"text-align: right;\">0.00213677 </td><td style=\"text-align: right;\">0.0350423</td><td style=\"text-align: right;\">0.12146  </td><td style=\"text-align: right;\"> 0       </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0.0288461</td><td style=\"text-align: right;\">0         </td><td style=\"text-align: right;\"> 0        </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.00872071</td><td style=\"text-align: right;\">0.0026651  </td><td style=\"text-align: right;\">-0.000149276</td><td style=\"text-align: right;\">0.00191673 </td><td style=\"text-align: right;\">-0.00121972 </td><td style=\"text-align: right;\">0.000419591</td><td style=\"text-align: right;\">-0.00314753 </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.000235759</td><td style=\"text-align: right;\">-0.000215467</td><td style=\"text-align: right;\">-0.000372314</td><td style=\"text-align: right;\">-0.000237775</td><td style=\"text-align: right;\">     0</td></tr>\n",
       "<tr><td style=\"text-align: right;\">6.37777e-06</td><td style=\"text-align: right;\">6.04233e-05</td><td style=\"text-align: right;\">0.0279654</td><td style=\"text-align: right;\">0.00213677 </td><td style=\"text-align: right;\">0.0700845</td><td style=\"text-align: right;\">0.151824 </td><td style=\"text-align: right;\">-0.19575 </td><td style=\"text-align: right;\">-0.0404353</td><td style=\"text-align: right;\">-0.0288461</td><td style=\"text-align: right;\">0.0105688 </td><td style=\"text-align: right;\">-0.0143812</td><td style=\"text-align: right;\"> 0.000857718</td><td style=\"text-align: right;\"> 0         </td><td style=\"text-align: right;\">0          </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">0          </td><td style=\"text-align: right;\">-0.00134529 </td><td style=\"text-align: right;\">0.0015696  </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">-0.00280258 </td><td style=\"text-align: right;\">-0.000417737</td><td style=\"text-align: right;\"> 0          </td><td style=\"text-align: right;\">     0</td></tr>\n",
       "</tbody>\n",
       "</table>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    }
   ],
   "source": [
    "print(h2o_df)\n",
    "h2o_df[\"C110\"] = h2o_df[\"C110\"].asfactor()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "glm Model Build progress: |███████████████████████████████████████████████| 100%\n"
     ]
    }
   ],
   "source": [
    "from h2o.estimators.glm import H2OGeneralizedLinearEstimator\n",
    "h2o_model = H2OGeneralizedLinearEstimator(family=\"binomial\")\n",
    "h2o_model.train(x=h2o_df.columns[0:h2o_df.ncol-1], y=\"C110\", training_frame=h2o_df)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model Details\n",
      "=============\n",
      "H2OGeneralizedLinearEstimator :  Generalized Linear Modeling\n",
      "Model Key:  GLM_model_python_1501618727152_1\n",
      "\n",
      "\n",
      "ModelMetricsBinomialGLM: glm\n",
      "** Reported on train data. **\n",
      "\n",
      "MSE: 0.146489020603\n",
      "RMSE: 0.382738841251\n",
      "LogLoss: 0.468923868548\n",
      "Null degrees of freedom: 23998\n",
      "Residual degrees of freedom: 23975\n",
      "Null deviance: 25514.4486641\n",
      "Residual deviance: 22507.4078426\n",
      "AIC: 22555.4078426\n",
      "AUC: 0.721087149974\n",
      "Gini: 0.442174299949\n",
      "Confusion Matrix (Act/Pred) for max f1 @ threshold = 0.276534117996: \n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div style=\"overflow:auto\"><table style=\"width:50%\"><tr><td><b></b></td>\n",
       "<td><b>0</b></td>\n",
       "<td><b>1</b></td>\n",
       "<td><b>Error</b></td>\n",
       "<td><b>Rate</b></td></tr>\n",
       "<tr><td>0</td>\n",
       "<td>16123.0</td>\n",
       "<td>2507.0</td>\n",
       "<td>0.1346</td>\n",
       "<td> (2507.0/18630.0)</td></tr>\n",
       "<tr><td>1</td>\n",
       "<td>2646.0</td>\n",
       "<td>2723.0</td>\n",
       "<td>0.4928</td>\n",
       "<td> (2646.0/5369.0)</td></tr>\n",
       "<tr><td>Total</td>\n",
       "<td>18769.0</td>\n",
       "<td>5230.0</td>\n",
       "<td>0.2147</td>\n",
       "<td> (5153.0/23999.0)</td></tr></table></div>"
      ],
      "text/plain": [
       "       0      1     Error    Rate\n",
       "-----  -----  ----  -------  ----------------\n",
       "0      16123  2507  0.1346   (2507.0/18630.0)\n",
       "1      2646   2723  0.4928   (2646.0/5369.0)\n",
       "Total  18769  5230  0.2147   (5153.0/23999.0)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Maximum Metrics: Maximum metrics at their respective thresholds\n",
      "\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div style=\"overflow:auto\"><table style=\"width:50%\"><tr><td><b>metric</b></td>\n",
       "<td><b>threshold</b></td>\n",
       "<td><b>value</b></td>\n",
       "<td><b>idx</b></td></tr>\n",
       "<tr><td>max f1</td>\n",
       "<td>0.2765341</td>\n",
       "<td>0.5138221</td>\n",
       "<td>199.0</td></tr>\n",
       "<tr><td>max f2</td>\n",
       "<td>0.0971304</td>\n",
       "<td>0.5943267</td>\n",
       "<td>343.0</td></tr>\n",
       "<tr><td>max f0point5</td>\n",
       "<td>0.3900551</td>\n",
       "<td>0.5652477</td>\n",
       "<td>150.0</td></tr>\n",
       "<tr><td>max accuracy</td>\n",
       "<td>0.4139961</td>\n",
       "<td>0.8146173</td>\n",
       "<td>140.0</td></tr>\n",
       "<tr><td>max precision</td>\n",
       "<td>0.7131259</td>\n",
       "<td>0.7453184</td>\n",
       "<td>39.0</td></tr>\n",
       "<tr><td>max recall</td>\n",
       "<td>0.0008697</td>\n",
       "<td>1.0</td>\n",
       "<td>399.0</td></tr>\n",
       "<tr><td>max specificity</td>\n",
       "<td>0.9908562</td>\n",
       "<td>0.9995706</td>\n",
       "<td>0.0</td></tr>\n",
       "<tr><td>max absolute_mcc</td>\n",
       "<td>0.3900551</td>\n",
       "<td>0.3953803</td>\n",
       "<td>150.0</td></tr>\n",
       "<tr><td>max min_per_class_accuracy</td>\n",
       "<td>0.2157707</td>\n",
       "<td>0.6596350</td>\n",
       "<td>247.0</td></tr>\n",
       "<tr><td>max mean_per_class_accuracy</td>\n",
       "<td>0.2629462</td>\n",
       "<td>0.6870612</td>\n",
       "<td>208.0</td></tr></table></div>"
      ],
      "text/plain": [
       "metric                       threshold    value     idx\n",
       "---------------------------  -----------  --------  -----\n",
       "max f1                       0.276534     0.513822  199\n",
       "max f2                       0.0971304    0.594327  343\n",
       "max f0point5                 0.390055     0.565248  150\n",
       "max accuracy                 0.413996     0.814617  140\n",
       "max precision                0.713126     0.745318  39\n",
       "max recall                   0.000869682  1         399\n",
       "max specificity              0.990856     0.999571  0\n",
       "max absolute_mcc             0.390055     0.39538   150\n",
       "max min_per_class_accuracy   0.215771     0.659635  247\n",
       "max mean_per_class_accuracy  0.262946     0.687061  208"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Gains/Lift Table: Avg response rate: 22.37 %\n",
      "\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div style=\"overflow:auto\"><table style=\"width:50%\"><tr><td><b></b></td>\n",
       "<td><b>group</b></td>\n",
       "<td><b>cumulative_data_fraction</b></td>\n",
       "<td><b>lower_threshold</b></td>\n",
       "<td><b>lift</b></td>\n",
       "<td><b>cumulative_lift</b></td>\n",
       "<td><b>response_rate</b></td>\n",
       "<td><b>cumulative_response_rate</b></td>\n",
       "<td><b>capture_rate</b></td>\n",
       "<td><b>cumulative_capture_rate</b></td>\n",
       "<td><b>gain</b></td>\n",
       "<td><b>cumulative_gain</b></td></tr>\n",
       "<tr><td></td>\n",
       "<td>1</td>\n",
       "<td>0.0100004</td>\n",
       "<td>0.7211343</td>\n",
       "<td>3.3338153</td>\n",
       "<td>3.3338153</td>\n",
       "<td>0.7458333</td>\n",
       "<td>0.7458333</td>\n",
       "<td>0.0333395</td>\n",
       "<td>0.0333395</td>\n",
       "<td>233.3815267</td>\n",
       "<td>233.3815267</td></tr>\n",
       "<tr><td></td>\n",
       "<td>2</td>\n",
       "<td>0.0200008</td>\n",
       "<td>0.6384574</td>\n",
       "<td>3.2406919</td>\n",
       "<td>3.2872536</td>\n",
       "<td>0.725</td>\n",
       "<td>0.7354167</td>\n",
       "<td>0.0324083</td>\n",
       "<td>0.0657478</td>\n",
       "<td>224.0691935</td>\n",
       "<td>228.7253601</td></tr>\n",
       "<tr><td></td>\n",
       "<td>3</td>\n",
       "<td>0.0300013</td>\n",
       "<td>0.6061510</td>\n",
       "<td>3.2965659</td>\n",
       "<td>3.2903577</td>\n",
       "<td>0.7375</td>\n",
       "<td>0.7361111</td>\n",
       "<td>0.0329670</td>\n",
       "<td>0.0987148</td>\n",
       "<td>229.6565934</td>\n",
       "<td>229.0357712</td></tr>\n",
       "<tr><td></td>\n",
       "<td>4</td>\n",
       "<td>0.0400017</td>\n",
       "<td>0.5772870</td>\n",
       "<td>3.3151906</td>\n",
       "<td>3.2965659</td>\n",
       "<td>0.7416667</td>\n",
       "<td>0.7375</td>\n",
       "<td>0.0331533</td>\n",
       "<td>0.1318681</td>\n",
       "<td>231.5190600</td>\n",
       "<td>229.6565934</td></tr>\n",
       "<tr><td></td>\n",
       "<td>5</td>\n",
       "<td>0.0500021</td>\n",
       "<td>0.5526400</td>\n",
       "<td>3.2220673</td>\n",
       "<td>3.2816662</td>\n",
       "<td>0.7208333</td>\n",
       "<td>0.7341667</td>\n",
       "<td>0.0322220</td>\n",
       "<td>0.1640901</td>\n",
       "<td>222.2067269</td>\n",
       "<td>228.1666201</td></tr>\n",
       "<tr><td></td>\n",
       "<td>6</td>\n",
       "<td>0.1000042</td>\n",
       "<td>0.4543369</td>\n",
       "<td>2.8532989</td>\n",
       "<td>3.0674825</td>\n",
       "<td>0.6383333</td>\n",
       "<td>0.68625</td>\n",
       "<td>0.1426709</td>\n",
       "<td>0.3067610</td>\n",
       "<td>185.3298876</td>\n",
       "<td>206.7482539</td></tr>\n",
       "<tr><td></td>\n",
       "<td>7</td>\n",
       "<td>0.1500063</td>\n",
       "<td>0.3665800</td>\n",
       "<td>2.1045873</td>\n",
       "<td>2.7465175</td>\n",
       "<td>0.4708333</td>\n",
       "<td>0.6144444</td>\n",
       "<td>0.1052337</td>\n",
       "<td>0.4119948</td>\n",
       "<td>110.4587291</td>\n",
       "<td>174.6517456</td></tr>\n",
       "<tr><td></td>\n",
       "<td>8</td>\n",
       "<td>0.2000083</td>\n",
       "<td>0.2872431</td>\n",
       "<td>1.4825234</td>\n",
       "<td>2.4305190</td>\n",
       "<td>0.3316667</td>\n",
       "<td>0.54375</td>\n",
       "<td>0.0741293</td>\n",
       "<td>0.4861240</td>\n",
       "<td>48.2523437</td>\n",
       "<td>143.0518951</td></tr>\n",
       "<tr><td></td>\n",
       "<td>9</td>\n",
       "<td>0.3000125</td>\n",
       "<td>0.2424670</td>\n",
       "<td>0.9777950</td>\n",
       "<td>1.9462776</td>\n",
       "<td>0.21875</td>\n",
       "<td>0.4354167</td>\n",
       "<td>0.0977836</td>\n",
       "<td>0.5839076</td>\n",
       "<td>-2.2205020</td>\n",
       "<td>94.6277628</td></tr>\n",
       "<tr><td></td>\n",
       "<td>10</td>\n",
       "<td>0.4000167</td>\n",
       "<td>0.2179438</td>\n",
       "<td>0.7114623</td>\n",
       "<td>1.6375738</td>\n",
       "<td>0.1591667</td>\n",
       "<td>0.3663542</td>\n",
       "<td>0.0711492</td>\n",
       "<td>0.6550568</td>\n",
       "<td>-28.8537748</td>\n",
       "<td>63.7573784</td></tr>\n",
       "<tr><td></td>\n",
       "<td>11</td>\n",
       "<td>0.5000208</td>\n",
       "<td>0.1956065</td>\n",
       "<td>0.5699148</td>\n",
       "<td>1.4240420</td>\n",
       "<td>0.1275</td>\n",
       "<td>0.3185833</td>\n",
       "<td>0.0569939</td>\n",
       "<td>0.7120507</td>\n",
       "<td>-43.0085211</td>\n",
       "<td>42.4041985</td></tr>\n",
       "<tr><td></td>\n",
       "<td>12</td>\n",
       "<td>0.5999833</td>\n",
       "<td>0.1687140</td>\n",
       "<td>0.5347507</td>\n",
       "<td>1.2758782</td>\n",
       "<td>0.1196332</td>\n",
       "<td>0.2854365</td>\n",
       "<td>0.0534550</td>\n",
       "<td>0.7655057</td>\n",
       "<td>-46.5249265</td>\n",
       "<td>27.5878244</td></tr>\n",
       "<tr><td></td>\n",
       "<td>13</td>\n",
       "<td>0.6999875</td>\n",
       "<td>0.1365907</td>\n",
       "<td>0.6593132</td>\n",
       "<td>1.1877923</td>\n",
       "<td>0.1475</td>\n",
       "<td>0.2657301</td>\n",
       "<td>0.0659341</td>\n",
       "<td>0.8314397</td>\n",
       "<td>-34.0686813</td>\n",
       "<td>18.7792278</td></tr>\n",
       "<tr><td></td>\n",
       "<td>14</td>\n",
       "<td>0.7999917</td>\n",
       "<td>0.1094134</td>\n",
       "<td>0.6742129</td>\n",
       "<td>1.1235915</td>\n",
       "<td>0.1508333</td>\n",
       "<td>0.2513673</td>\n",
       "<td>0.0674241</td>\n",
       "<td>0.8988638</td>\n",
       "<td>-32.5787080</td>\n",
       "<td>12.3591515</td></tr>\n",
       "<tr><td></td>\n",
       "<td>15</td>\n",
       "<td>0.8999958</td>\n",
       "<td>0.0661022</td>\n",
       "<td>0.5196282</td>\n",
       "<td>1.0564814</td>\n",
       "<td>0.11625</td>\n",
       "<td>0.2363535</td>\n",
       "<td>0.0519650</td>\n",
       "<td>0.9508288</td>\n",
       "<td>-48.0371810</td>\n",
       "<td>5.6481372</td></tr>\n",
       "<tr><td></td>\n",
       "<td>16</td>\n",
       "<td>1.0</td>\n",
       "<td>0.0000001</td>\n",
       "<td>0.4916912</td>\n",
       "<td>1.0</td>\n",
       "<td>0.11</td>\n",
       "<td>0.2237177</td>\n",
       "<td>0.0491712</td>\n",
       "<td>1.0</td>\n",
       "<td>-50.8308810</td>\n",
       "<td>0.0</td></tr></table></div>"
      ],
      "text/plain": [
       "    group    cumulative_data_fraction    lower_threshold    lift      cumulative_lift    response_rate    cumulative_response_rate    capture_rate    cumulative_capture_rate    gain      cumulative_gain\n",
       "--  -------  --------------------------  -----------------  --------  -----------------  ---------------  --------------------------  --------------  -------------------------  --------  -----------------\n",
       "    1        0.0100004                   0.721134           3.33382   3.33382            0.745833         0.745833                    0.0333395       0.0333395                  233.382   233.382\n",
       "    2        0.0200008                   0.638457           3.24069   3.28725            0.725            0.735417                    0.0324083       0.0657478                  224.069   228.725\n",
       "    3        0.0300013                   0.606151           3.29657   3.29036            0.7375           0.736111                    0.032967        0.0987148                  229.657   229.036\n",
       "    4        0.0400017                   0.577287           3.31519   3.29657            0.741667         0.7375                      0.0331533       0.131868                   231.519   229.657\n",
       "    5        0.0500021                   0.55264            3.22207   3.28167            0.720833         0.734167                    0.032222        0.16409                    222.207   228.167\n",
       "    6        0.100004                    0.454337           2.8533    3.06748            0.638333         0.68625                     0.142671        0.306761                   185.33    206.748\n",
       "    7        0.150006                    0.36658            2.10459   2.74652            0.470833         0.614444                    0.105234        0.411995                   110.459   174.652\n",
       "    8        0.200008                    0.287243           1.48252   2.43052            0.331667         0.54375                     0.0741293       0.486124                   48.2523   143.052\n",
       "    9        0.300013                    0.242467           0.977795  1.94628            0.21875          0.435417                    0.0977836       0.583908                   -2.2205   94.6278\n",
       "    10       0.400017                    0.217944           0.711462  1.63757            0.159167         0.366354                    0.0711492       0.655057                   -28.8538  63.7574\n",
       "    11       0.500021                    0.195606           0.569915  1.42404            0.1275           0.318583                    0.0569939       0.712051                   -43.0085  42.4042\n",
       "    12       0.599983                    0.168714           0.534751  1.27588            0.119633         0.285436                    0.053455        0.765506                   -46.5249  27.5878\n",
       "    13       0.699987                    0.136591           0.659313  1.18779            0.1475           0.26573                     0.0659341       0.83144                    -34.0687  18.7792\n",
       "    14       0.799992                    0.109413           0.674213  1.12359            0.150833         0.251367                    0.0674241       0.898864                   -32.5787  12.3592\n",
       "    15       0.899996                    0.0661022          0.519628  1.05648            0.11625          0.236354                    0.051965        0.950829                   -48.0372  5.64814\n",
       "    16       1                           1.16817e-07        0.491691  1                  0.11             0.223718                    0.0491712       1                          -50.8309  0"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Scoring History: \n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div style=\"overflow:auto\"><table style=\"width:50%\"><tr><td><b></b></td>\n",
       "<td><b>timestamp</b></td>\n",
       "<td><b>duration</b></td>\n",
       "<td><b>iteration</b></td>\n",
       "<td><b>negative_log_likelihood</b></td>\n",
       "<td><b>objective</b></td></tr>\n",
       "<tr><td></td>\n",
       "<td>2017-08-01 13:19:31</td>\n",
       "<td> 0.000 sec</td>\n",
       "<td>0</td>\n",
       "<td>12757.2243320</td>\n",
       "<td>0.5315732</td></tr>\n",
       "<tr><td></td>\n",
       "<td>2017-08-01 13:19:31</td>\n",
       "<td> 0.084 sec</td>\n",
       "<td>1</td>\n",
       "<td>11332.2686168</td>\n",
       "<td>0.4724727</td></tr>\n",
       "<tr><td></td>\n",
       "<td>2017-08-01 13:19:31</td>\n",
       "<td> 0.091 sec</td>\n",
       "<td>2</td>\n",
       "<td>11257.7179512</td>\n",
       "<td>0.4694486</td></tr>\n",
       "<tr><td></td>\n",
       "<td>2017-08-01 13:19:31</td>\n",
       "<td> 0.101 sec</td>\n",
       "<td>3</td>\n",
       "<td>11254.0082798</td>\n",
       "<td>0.4693228</td></tr>\n",
       "<tr><td></td>\n",
       "<td>2017-08-01 13:19:31</td>\n",
       "<td> 0.107 sec</td>\n",
       "<td>4</td>\n",
       "<td>11253.7039213</td>\n",
       "<td>0.4693187</td></tr></table></div>"
      ],
      "text/plain": [
       "    timestamp            duration    iteration    negative_log_likelihood    objective\n",
       "--  -------------------  ----------  -----------  -------------------------  -----------\n",
       "    2017-08-01 13:19:31  0.000 sec   0            12757.2                    0.531573\n",
       "    2017-08-01 13:19:31  0.084 sec   1            11332.3                    0.472473\n",
       "    2017-08-01 13:19:31  0.091 sec   2            11257.7                    0.469449\n",
       "    2017-08-01 13:19:31  0.101 sec   3            11254                      0.469323\n",
       "    2017-08-01 13:19:31  0.107 sec   4            11253.7                    0.469319"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": []
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "h2o_model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 2",
   "language": "python",
   "name": "python2"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.12"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
