{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Using TensorFlow backend.\n"
     ]
    }
   ],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "from scipy import sparse as ssp\n",
    "from sklearn.model_selection import KFold\n",
    "from keras.preprocessing.text import one_hot\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "from keras.layers import Input, Embedding, LSTM, Dense,Flatten, Dropout, merge,Convolution1D,MaxPooling1D,Lambda\n",
    "from keras.layers.advanced_activations import PReLU,LeakyReLU,ELU\n",
    "from keras.models import Model\n",
    "import h5py\n",
    "import os\n",
    "from sklearn.preprocessing import StandardScaler"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "\n",
    "os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"2\"\n",
    "seed = 1024\n",
    "np.random.seed(seed)\n",
    "path = '../data/'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "load context data...\n"
     ]
    }
   ],
   "source": [
    "ft = ['context']\n",
    "print('load context data...')\n",
    "\n",
    "train = pd.read_pickle(path + \"train_clean.pkl\")[ft].astype(str)\n",
    "valid = pd.read_pickle(path + 'valid_clean.pkl')[ft].astype(str)\n",
    "dev = pd.read_pickle(path + 'dev_clean.pkl')[ft].astype(str)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "load other data...\n"
     ]
    }
   ],
   "source": [
    "train_ot = pd.read_pickle(path + 'train_X.pkl')\n",
    "valid_ot = pd.read_pickle(path + 'valid_X.pkl')\n",
    "dev_ot = pd.read_pickle(path + 'dev_X.pkl')\n",
    "\n",
    "print('load other data...')\n",
    "data_all = pd.concat([train, valid, dev], ignore_index=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "st = StandardScaler()\n",
    "st.fit(train_ot)\n",
    "train_ot = st.transform(train_ot)\n",
    "valid_ot = st.transform(valid_ot)\n",
    "dev_ot = st.transform(dev_ot)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "transform question data..\n"
     ]
    }
   ],
   "source": [
    "maxlen = 30\n",
    "n = 100000\n",
    "print('transform question data..')\n",
    "q1 = data_all['context'].apply(lambda x: one_hot(x, n=n, lower=True, split=\" \")).values.tolist()\n",
    "q1 = pad_sequences(q1, padding='post', maxlen=maxlen)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "X_tr = q1[:train.shape[0]]\n",
    "X_va = q1[train.shape[0]:train.shape[0]:(train.shape[0]+valid.shape[0])]\n",
    "X_de = q1[train.shape[0]:(train.shape[0]+valid.shape[0]):]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def MLP():\n",
    "    x = Input(shape=(136,), name='input')\n",
    "\n",
    "    fc1 = Dense(512)(x)\n",
    "    fc1 = PReLU()(fc1)\n",
    "    fc1 = Dropout(0.3)(fc1)\n",
    "    fc1 = Dense(512)(fc1)\n",
    "    fc1 = PReLU()(fc1)\n",
    "    fc1 = Dropout(0.3)(fc1)\n",
    "    fc1 = Dense(128)(fc1)\n",
    "    fc1 = PReLU()(fc1)\n",
    "    fc1 = Dropout(0.2)(fc1)\n",
    "    output_1 = Dense(1, activation='sigmoid')(fc1)\n",
    "    model = Model(input=[x], output=[output_1])\n",
    "    model.compile(\n",
    "        optimizer='adam',\n",
    "        loss='binary_crossentropy',\n",
    "    )\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# model_mlp=MLP()\n",
    "path = '../data/'\n",
    "# path = '../data/'\n",
    "y = pd.read_pickle(path + 'train.pkl')['label'].values\n",
    "y_va = pd.read_pickle(path+'valid.pkl')['label'].values\n",
    "y_te = pd.read_pickle(path+'dev.pkl')['label'].values\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "res = np.zeros((y_va.shape[0], 1))\n",
    "best_it = 23\n",
    "fold = 1\n",
    "skf = KFold(n_splits=5, shuffle=True, random_state=seed).split(y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 19788 samples, validate on 4948 samples\n",
      "Epoch 1/8\n",
      "1s - loss: 0.5053 - val_loss: 0.4488\n",
      "Epoch 2/8\n",
      "1s - loss: 0.4305 - val_loss: 0.4249\n",
      "Epoch 3/8\n",
      "1s - loss: 0.4109 - val_loss: 0.4189\n",
      "Epoch 4/8\n",
      "1s - loss: 0.4013 - val_loss: 0.4227\n",
      "Epoch 5/8\n",
      "1s - loss: 0.3908 - val_loss: 0.4234\n",
      "Epoch 6/8\n",
      "1s - loss: 0.3818 - val_loss: 0.4257\n",
      "Epoch 7/8\n",
      "1s - loss: 0.3773 - val_loss: 0.4242\n",
      "Epoch 8/8\n",
      "1s - loss: 0.3692 - val_loss: 0.4217\n",
      "(3092, 1) (3092, 1)\n",
      "end fold:1\n",
      "Train on 19789 samples, validate on 4947 samples\n",
      "Epoch 1/8\n",
      "2s - loss: 0.5031 - val_loss: 0.4443\n",
      "Epoch 2/8\n",
      "2s - loss: 0.4372 - val_loss: 0.4106\n",
      "Epoch 3/8\n",
      "1s - loss: 0.4158 - val_loss: 0.3948\n",
      "Epoch 4/8\n",
      "1s - loss: 0.4094 - val_loss: 0.4000\n",
      "Epoch 5/8\n",
      "1s - loss: 0.3959 - val_loss: 0.3944\n",
      "Epoch 6/8\n",
      "2s - loss: 0.3898 - val_loss: 0.4086\n",
      "Epoch 7/8\n",
      "1s - loss: 0.3833 - val_loss: 0.3945\n",
      "Epoch 8/8\n",
      "1s - loss: 0.3734 - val_loss: 0.3936\n",
      "(3092, 1) (3092, 1)\n",
      "end fold:2\n",
      "Train on 19789 samples, validate on 4947 samples\n",
      "Epoch 1/8\n",
      "2s - loss: 0.5137 - val_loss: 0.4285\n",
      "Epoch 2/8\n",
      "2s - loss: 0.4367 - val_loss: 0.4117\n",
      "Epoch 3/8\n",
      "2s - loss: 0.4194 - val_loss: 0.4089\n",
      "Epoch 4/8\n",
      "2s - loss: 0.4088 - val_loss: 0.4104\n",
      "Epoch 5/8\n"
     ]
    }
   ],
   "source": [
    "for ind_tr, ind_te in skf:\n",
    "    X_ot_train = train_ot[ind_tr]\n",
    "    X_ot_test = train_ot[ind_te]\n",
    "\n",
    "    y_train = y[ind_tr]\n",
    "    y_test = y[ind_te]\n",
    "    # break\n",
    "\n",
    "    model_mlp = MLP()\n",
    "    # model_name = 'mlp.hdf5'\n",
    "    # model_checkpoint = ModelCheckpoint(path+model_name, monitor='val_loss', save_best_only=True,mode='min')\n",
    "    model_mlp.fit(X_ot_train, y_train, batch_size=128, nb_epoch=8, verbose=2,\n",
    "                  validation_data=[X_ot_test, y_test], shuffle=True)\n",
    "    tmp_res = model_mlp.predict(valid_ot)\n",
    "    print(tmp_res.shape, res.shape)\n",
    "    res += tmp_res\n",
    "    print('end fold:{}'.format(fold))\n",
    "    fold += 1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
