{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 1. 数据预处理"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<class 'pandas.core.frame.DataFrame'>\n",
      "RangeIndex: 891 entries, 0 to 890\n",
      "Data columns (total 12 columns):\n",
      "PassengerId    891 non-null int64\n",
      "Survived       891 non-null int64\n",
      "Pclass         891 non-null int64\n",
      "Name           891 non-null object\n",
      "Sex            891 non-null object\n",
      "Age            714 non-null float64\n",
      "SibSp          891 non-null int64\n",
      "Parch          891 non-null int64\n",
      "Ticket         891 non-null object\n",
      "Fare           891 non-null float64\n",
      "Cabin          204 non-null object\n",
      "Embarked       889 non-null object\n",
      "dtypes: float64(2), int64(5), object(5)\n",
      "memory usage: 83.6+ KB\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>PassengerId</th>\n",
       "      <th>Survived</th>\n",
       "      <th>Pclass</th>\n",
       "      <th>Name</th>\n",
       "      <th>Sex</th>\n",
       "      <th>Age</th>\n",
       "      <th>SibSp</th>\n",
       "      <th>Parch</th>\n",
       "      <th>Ticket</th>\n",
       "      <th>Fare</th>\n",
       "      <th>Cabin</th>\n",
       "      <th>Embarked</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>3</td>\n",
       "      <td>Braund, Mr. Owen Harris</td>\n",
       "      <td>male</td>\n",
       "      <td>22.0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>A/5 21171</td>\n",
       "      <td>7.2500</td>\n",
       "      <td>NaN</td>\n",
       "      <td>S</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>2</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>Cumings, Mrs. John Bradley (Florence Briggs Th...</td>\n",
       "      <td>female</td>\n",
       "      <td>38.0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>PC 17599</td>\n",
       "      <td>71.2833</td>\n",
       "      <td>C85</td>\n",
       "      <td>C</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>3</td>\n",
       "      <td>1</td>\n",
       "      <td>3</td>\n",
       "      <td>Heikkinen, Miss. Laina</td>\n",
       "      <td>female</td>\n",
       "      <td>26.0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>STON/O2. 3101282</td>\n",
       "      <td>7.9250</td>\n",
       "      <td>NaN</td>\n",
       "      <td>S</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>4</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>Futrelle, Mrs. Jacques Heath (Lily May Peel)</td>\n",
       "      <td>female</td>\n",
       "      <td>35.0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>113803</td>\n",
       "      <td>53.1000</td>\n",
       "      <td>C123</td>\n",
       "      <td>S</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>5</td>\n",
       "      <td>0</td>\n",
       "      <td>3</td>\n",
       "      <td>Allen, Mr. William Henry</td>\n",
       "      <td>male</td>\n",
       "      <td>35.0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>373450</td>\n",
       "      <td>8.0500</td>\n",
       "      <td>NaN</td>\n",
       "      <td>S</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   PassengerId  Survived  Pclass  \\\n",
       "0            1         0       3   \n",
       "1            2         1       1   \n",
       "2            3         1       3   \n",
       "3            4         1       1   \n",
       "4            5         0       3   \n",
       "\n",
       "                                                Name     Sex   Age  SibSp  \\\n",
       "0                            Braund, Mr. Owen Harris    male  22.0      1   \n",
       "1  Cumings, Mrs. John Bradley (Florence Briggs Th...  female  38.0      1   \n",
       "2                             Heikkinen, Miss. Laina  female  26.0      0   \n",
       "3       Futrelle, Mrs. Jacques Heath (Lily May Peel)  female  35.0      1   \n",
       "4                           Allen, Mr. William Henry    male  35.0      0   \n",
       "\n",
       "   Parch            Ticket     Fare Cabin Embarked  \n",
       "0      0         A/5 21171   7.2500   NaN        S  \n",
       "1      0          PC 17599  71.2833   C85        C  \n",
       "2      0  STON/O2. 3101282   7.9250   NaN        S  \n",
       "3      0            113803  53.1000  C123        S  \n",
       "4      0            373450   8.0500   NaN        S  "
      ]
     },
     "execution_count": 1,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "import tensorflow as tf\n",
    "\n",
    "## 读取数据信息\n",
    "data = pd.read_csv(\"D:/cao/kaggle/Titanic/data/train.csv\")\n",
    "data.info()\n",
    "data.head(5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>PassengerId</th>\n",
       "      <th>Survived</th>\n",
       "      <th>Pclass</th>\n",
       "      <th>Name</th>\n",
       "      <th>Sex</th>\n",
       "      <th>Age</th>\n",
       "      <th>SibSp</th>\n",
       "      <th>Parch</th>\n",
       "      <th>Ticket</th>\n",
       "      <th>Fare</th>\n",
       "      <th>Cabin</th>\n",
       "      <th>Embarked</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>3</td>\n",
       "      <td>Braund, Mr. Owen Harris</td>\n",
       "      <td>1</td>\n",
       "      <td>22.0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>A/5 21171</td>\n",
       "      <td>7.2500</td>\n",
       "      <td>NaN</td>\n",
       "      <td>S</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>2</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>Cumings, Mrs. John Bradley (Florence Briggs Th...</td>\n",
       "      <td>0</td>\n",
       "      <td>38.0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>PC 17599</td>\n",
       "      <td>71.2833</td>\n",
       "      <td>C85</td>\n",
       "      <td>C</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>3</td>\n",
       "      <td>1</td>\n",
       "      <td>3</td>\n",
       "      <td>Heikkinen, Miss. Laina</td>\n",
       "      <td>0</td>\n",
       "      <td>26.0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>STON/O2. 3101282</td>\n",
       "      <td>7.9250</td>\n",
       "      <td>NaN</td>\n",
       "      <td>S</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>4</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>Futrelle, Mrs. Jacques Heath (Lily May Peel)</td>\n",
       "      <td>0</td>\n",
       "      <td>35.0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>113803</td>\n",
       "      <td>53.1000</td>\n",
       "      <td>C123</td>\n",
       "      <td>S</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>5</td>\n",
       "      <td>0</td>\n",
       "      <td>3</td>\n",
       "      <td>Allen, Mr. William Henry</td>\n",
       "      <td>1</td>\n",
       "      <td>35.0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>373450</td>\n",
       "      <td>8.0500</td>\n",
       "      <td>NaN</td>\n",
       "      <td>S</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   PassengerId  Survived  Pclass  \\\n",
       "0            1         0       3   \n",
       "1            2         1       1   \n",
       "2            3         1       3   \n",
       "3            4         1       1   \n",
       "4            5         0       3   \n",
       "\n",
       "                                                Name  Sex   Age  SibSp  Parch  \\\n",
       "0                            Braund, Mr. Owen Harris    1  22.0      1      0   \n",
       "1  Cumings, Mrs. John Bradley (Florence Briggs Th...    0  38.0      1      0   \n",
       "2                             Heikkinen, Miss. Laina    0  26.0      0      0   \n",
       "3       Futrelle, Mrs. Jacques Heath (Lily May Peel)    0  35.0      1      0   \n",
       "4                           Allen, Mr. William Henry    1  35.0      0      0   \n",
       "\n",
       "             Ticket     Fare Cabin Embarked  \n",
       "0         A/5 21171   7.2500   NaN        S  \n",
       "1          PC 17599  71.2833   C85        C  \n",
       "2  STON/O2. 3101282   7.9250   NaN        S  \n",
       "3            113803  53.1000  C123        S  \n",
       "4            373450   8.0500   NaN        S  "
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 将 sex 特征转换为类别数据\n",
    "data['Sex'] = data['Sex'].apply(lambda s:1 if s == 'male' else 0)\n",
    "data.head(5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[ 1.    , 22.    ,  3.    ,  1.    ,  0.    ,  7.25  ],\n",
       "       [ 0.    , 38.    ,  1.    ,  1.    ,  0.    , 71.2833],\n",
       "       [ 0.    , 26.    ,  3.    ,  0.    ,  0.    ,  7.925 ],\n",
       "       ...,\n",
       "       [ 0.    ,  0.    ,  3.    ,  1.    ,  2.    , 23.45  ],\n",
       "       [ 1.    , 26.    ,  1.    ,  0.    ,  0.    , 30.    ],\n",
       "       [ 1.    , 32.    ,  3.    ,  0.    ,  0.    ,  7.75  ]])"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 将所有缺失的字段填充为0,取部分特征用于训练\n",
    "data = data.fillna(0)\n",
    "dataset_X = data[['Sex', 'Age', 'Pclass', 'SibSp', 'Parch', 'Fare']]\n",
    "dataset_X = dataset_X.values\n",
    "dataset_X"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0      1\n",
       "1      0\n",
       "2      0\n",
       "3      0\n",
       "4      1\n",
       "5      1\n",
       "6      1\n",
       "7      1\n",
       "8      0\n",
       "9      0\n",
       "10     0\n",
       "11     0\n",
       "12     1\n",
       "13     1\n",
       "14     1\n",
       "15     0\n",
       "16     1\n",
       "17     0\n",
       "18     1\n",
       "19     0\n",
       "20     1\n",
       "21     0\n",
       "22     0\n",
       "23     0\n",
       "24     1\n",
       "25     0\n",
       "26     1\n",
       "27     1\n",
       "28     0\n",
       "29     1\n",
       "      ..\n",
       "861    1\n",
       "862    0\n",
       "863    1\n",
       "864    1\n",
       "865    0\n",
       "866    0\n",
       "867    1\n",
       "868    1\n",
       "869    0\n",
       "870    1\n",
       "871    0\n",
       "872    1\n",
       "873    1\n",
       "874    0\n",
       "875    0\n",
       "876    1\n",
       "877    1\n",
       "878    1\n",
       "879    0\n",
       "880    0\n",
       "881    1\n",
       "882    1\n",
       "883    1\n",
       "884    1\n",
       "885    1\n",
       "886    1\n",
       "887    0\n",
       "888    1\n",
       "889    0\n",
       "890    1\n",
       "Name: Deceased, Length: 891, dtype: int64"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 两种分类结果分别是幸存和死亡，‘Survived’字段是其中一种分类标签\n",
    "# 新增‘Deceased’表示第二种分类标签，取值为‘Survived’字段取非\n",
    "data['Deceased'] = data['Survived'].apply(lambda s:int(not s))\n",
    "data['Deceased']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[1, 0],\n",
       "       [0, 1],\n",
       "       [0, 1],\n",
       "       ...,\n",
       "       [1, 0],\n",
       "       [0, 1],\n",
       "       [1, 0]], dtype=int64)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dataset_Y = data[['Deceased', 'Survived']]\n",
    "dataset_Y = dataset_Y.values\n",
    "dataset_Y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "# 划分为训练集和验证集\n",
    "X_train, X_val, Y_train, Y_val = train_test_split(dataset_X, dataset_Y, test_size=0.2, random_state=42)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "\n",
    "## 构建计算图\n",
    "\n",
    "# 声明输入数据占位符\n",
    "X = tf.placeholder(tf.float32, shape=[None, 6])\n",
    "Y = tf.placeholder(tf.float32, shape=[None, 2])\n",
    "\n",
    "# 声明变量\n",
    "W1 = tf.Variable(tf.truncated_normal([6,4]), name='weights_1')\n",
    "b1 = tf.Variable(tf.zeros([1,4]), name='bias_1')\n",
    "W2 = tf.Variable(tf.truncated_normal([4,2]),name='weights_2')\n",
    "b2 = tf.Variable(tf.zeros([2]), name='bias_2')\n",
    "\n",
    "# 构造前向传播计算图\n",
    "z1 = tf.matmul(X, W1) + b1\n",
    "y_pred = tf.nn.softmax(tf.matmul(z1, W2) + b2)\n",
    "\n",
    "# 使用交叉熵作为损失函数\n",
    "cross_entropy = -tf.reduce_sum(Y * tf.log(y_pred + 1e-10),reduction_indices=1)\n",
    "\n",
    "# 批量样本的代价值为所有样本交叉熵的平均值\n",
    "cost = tf.reduce_mean(cross_entropy)\n",
    "\n",
    "# 加入优化算法\n",
    "train_op = tf.train.AdamOptimizer(0.001).minimize(cost)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 0001, total loss=5546.903262723\n",
      "Epoch: 0002, total loss=4498.115071389\n",
      "Epoch: 0003, total loss=2167.997049432\n",
      "Epoch: 0004, total loss=648.877790437\n",
      "Epoch: 0005, total loss=542.466192416\n",
      "Epoch: 0006, total loss=496.461688609\n",
      "Epoch: 0007, total loss=468.163735916\n",
      "Epoch: 0008, total loss=450.270260985\n",
      "Epoch: 0009, total loss=439.205251625\n",
      "Epoch: 0010, total loss=432.418005270\n",
      "Epoch: 0011, total loss=428.141939776\n",
      "Epoch: 0012, total loss=425.230706785\n",
      "Epoch: 0013, total loss=423.057553034\n",
      "Epoch: 0014, total loss=421.316044566\n",
      "Epoch: 0015, total loss=419.861635665\n",
      "Epoch: 0016, total loss=418.612131733\n",
      "Epoch: 0017, total loss=417.518599184\n",
      "Epoch: 0018, total loss=416.556961536\n",
      "Epoch: 0019, total loss=415.690809250\n",
      "Epoch: 0020, total loss=414.912641232\n",
      "Epoch: 0021, total loss=414.199637913\n",
      "Epoch: 0022, total loss=413.547336152\n",
      "Epoch: 0023, total loss=412.937217863\n",
      "Epoch: 0024, total loss=412.365919689\n",
      "Epoch: 0025, total loss=411.821853700\n",
      "Epoch: 0026, total loss=411.302656802\n",
      "Epoch: 0027, total loss=410.794651907\n",
      "Epoch: 0028, total loss=410.301768401\n",
      "Epoch: 0029, total loss=409.815418038\n",
      "Epoch: 0030, total loss=409.332622861\n",
      "Epoch: 0031, total loss=408.851486562\n",
      "Epoch: 0032, total loss=408.373887052\n",
      "Epoch: 0033, total loss=407.896248057\n",
      "Epoch: 0034, total loss=407.421941843\n",
      "Epoch: 0035, total loss=406.948991131\n",
      "Epoch: 0036, total loss=406.480788455\n",
      "Epoch: 0037, total loss=406.015780572\n",
      "Epoch: 0038, total loss=405.554819261\n",
      "Epoch: 0039, total loss=405.101392811\n",
      "Epoch: 0040, total loss=404.652853914\n",
      "Epoch: 0041, total loss=404.211748915\n",
      "Epoch: 0042, total loss=403.777166620\n",
      "Epoch: 0043, total loss=403.349747801\n",
      "Epoch: 0044, total loss=402.929714012\n",
      "Epoch: 0045, total loss=402.516333379\n",
      "Epoch: 0046, total loss=402.110929219\n",
      "Epoch: 0047, total loss=401.712333005\n",
      "Epoch: 0048, total loss=401.320863353\n",
      "Epoch: 0049, total loss=400.937137309\n",
      "Epoch: 0050, total loss=400.559490611\n",
      "Epoch: 0051, total loss=400.187718175\n",
      "Epoch: 0052, total loss=399.822843877\n",
      "Epoch: 0053, total loss=399.463133953\n",
      "Epoch: 0054, total loss=399.109657415\n",
      "Epoch: 0055, total loss=398.761185614\n",
      "Epoch: 0056, total loss=398.418535975\n",
      "Epoch: 0057, total loss=398.079644985\n",
      "Epoch: 0058, total loss=397.746250041\n",
      "Epoch: 0059, total loss=397.416348441\n",
      "Epoch: 0060, total loss=397.090273881\n",
      "Epoch: 0061, total loss=396.768823608\n",
      "Epoch: 0062, total loss=396.449884367\n",
      "Epoch: 0063, total loss=396.134915251\n",
      "Epoch: 0064, total loss=395.823204038\n",
      "Epoch: 0065, total loss=395.514212221\n",
      "Epoch: 0066, total loss=395.206815935\n",
      "Epoch: 0067, total loss=394.903104224\n",
      "Epoch: 0068, total loss=394.601073571\n",
      "Epoch: 0069, total loss=394.301075441\n",
      "Epoch: 0070, total loss=394.003385400\n",
      "Epoch: 0071, total loss=393.706697924\n",
      "Epoch: 0072, total loss=393.411598534\n",
      "Epoch: 0073, total loss=393.119292561\n",
      "Epoch: 0074, total loss=392.827036360\n",
      "Epoch: 0075, total loss=392.536329227\n",
      "Epoch: 0076, total loss=392.245963147\n",
      "Epoch: 0077, total loss=391.957562561\n",
      "Epoch: 0078, total loss=391.668681841\n",
      "Epoch: 0079, total loss=391.381184908\n",
      "Epoch: 0080, total loss=391.094519285\n",
      "Epoch: 0081, total loss=390.808096110\n",
      "Epoch: 0082, total loss=390.522102447\n",
      "Epoch: 0083, total loss=390.235920464\n",
      "Epoch: 0084, total loss=389.950487948\n",
      "Epoch: 0085, total loss=389.664601205\n",
      "Epoch: 0086, total loss=389.379114336\n",
      "Epoch: 0087, total loss=389.093548166\n",
      "Epoch: 0088, total loss=388.807935422\n",
      "Epoch: 0089, total loss=388.521828262\n",
      "Epoch: 0090, total loss=388.235011192\n",
      "Epoch: 0091, total loss=387.948970872\n",
      "Epoch: 0092, total loss=387.661800554\n",
      "Epoch: 0093, total loss=387.373935691\n",
      "Epoch: 0094, total loss=387.085994660\n",
      "Epoch: 0095, total loss=386.797635656\n",
      "Epoch: 0096, total loss=386.508625418\n",
      "Epoch: 0097, total loss=386.218323083\n",
      "Epoch: 0098, total loss=385.927307817\n",
      "Epoch: 0099, total loss=385.636078173\n",
      "Epoch: 0100, total loss=385.343685674\n",
      "Epoch: 0101, total loss=385.050546039\n",
      "Epoch: 0102, total loss=384.755808595\n",
      "Epoch: 0103, total loss=384.460374160\n",
      "Epoch: 0104, total loss=384.164622613\n",
      "Epoch: 0105, total loss=383.866240200\n",
      "Epoch: 0106, total loss=383.567479207\n",
      "Epoch: 0107, total loss=383.267753455\n",
      "Epoch: 0108, total loss=382.966487453\n",
      "Epoch: 0109, total loss=382.663134544\n",
      "Epoch: 0110, total loss=382.358604854\n",
      "Epoch: 0111, total loss=382.052530174\n",
      "Epoch: 0112, total loss=381.745252390\n",
      "Epoch: 0113, total loss=381.435733895\n",
      "Epoch: 0114, total loss=381.125286802\n",
      "Epoch: 0115, total loss=380.811766430\n",
      "Epoch: 0116, total loss=380.497296189\n",
      "Epoch: 0117, total loss=380.180337625\n",
      "Epoch: 0118, total loss=379.860663864\n",
      "Epoch: 0119, total loss=379.538835698\n",
      "Epoch: 0120, total loss=379.215042411\n",
      "Epoch: 0121, total loss=378.888445516\n",
      "Epoch: 0122, total loss=378.559677133\n",
      "Epoch: 0123, total loss=378.227358527\n",
      "Epoch: 0124, total loss=377.892517354\n",
      "Epoch: 0125, total loss=377.555280283\n",
      "Epoch: 0126, total loss=377.214823255\n",
      "Epoch: 0127, total loss=376.871052271\n",
      "Epoch: 0128, total loss=376.525199732\n",
      "Epoch: 0129, total loss=376.176450776\n",
      "Epoch: 0130, total loss=375.825961843\n",
      "Epoch: 0131, total loss=375.474419634\n",
      "Epoch: 0132, total loss=375.124312393\n",
      "Epoch: 0133, total loss=374.777296774\n",
      "Epoch: 0134, total loss=374.436860193\n",
      "Epoch: 0135, total loss=374.105832937\n",
      "Epoch: 0136, total loss=373.789382715\n",
      "Epoch: 0137, total loss=373.490063862\n",
      "Epoch: 0138, total loss=373.205910670\n",
      "Epoch: 0139, total loss=372.936704732\n",
      "Epoch: 0140, total loss=372.681798343\n",
      "Epoch: 0141, total loss=372.443068140\n",
      "Epoch: 0142, total loss=372.220860851\n",
      "Epoch: 0143, total loss=372.014633938\n",
      "Epoch: 0144, total loss=371.820331338\n",
      "Epoch: 0145, total loss=371.633020021\n",
      "Epoch: 0146, total loss=371.447591427\n",
      "Epoch: 0147, total loss=371.264918195\n",
      "Epoch: 0148, total loss=371.083471145\n",
      "Epoch: 0149, total loss=370.903750850\n",
      "Epoch: 0150, total loss=370.724840632\n",
      "Epoch: 0151, total loss=370.547569014\n",
      "Epoch: 0152, total loss=370.370506888\n",
      "Epoch: 0153, total loss=370.194249940\n",
      "Epoch: 0154, total loss=370.018454331\n",
      "Epoch: 0155, total loss=369.842814855\n",
      "Epoch: 0156, total loss=369.667931910\n",
      "Epoch: 0157, total loss=369.493890248\n",
      "Epoch: 0158, total loss=369.320370793\n",
      "Epoch: 0159, total loss=369.148872332\n",
      "Epoch: 0160, total loss=368.978873359\n",
      "Epoch: 0161, total loss=368.809947605\n",
      "Epoch: 0162, total loss=368.643770052\n",
      "Epoch: 0163, total loss=368.478771176\n",
      "Epoch: 0164, total loss=368.315858282\n",
      "Epoch: 0165, total loss=368.153616100\n",
      "Epoch: 0166, total loss=367.991812588\n",
      "Epoch: 0167, total loss=367.829989378\n",
      "Epoch: 0168, total loss=367.668297045\n",
      "Epoch: 0169, total loss=367.505153244\n",
      "Epoch: 0170, total loss=367.342426655\n",
      "Epoch: 0171, total loss=367.179194668\n",
      "Epoch: 0172, total loss=367.015314169\n",
      "Epoch: 0173, total loss=366.851196881\n",
      "Epoch: 0174, total loss=366.686207962\n",
      "Epoch: 0175, total loss=366.521282414\n",
      "Epoch: 0176, total loss=366.355847480\n",
      "Epoch: 0177, total loss=366.190182691\n",
      "Epoch: 0178, total loss=366.024272819\n",
      "Epoch: 0179, total loss=365.858101674\n",
      "Epoch: 0180, total loss=365.691497829\n",
      "Epoch: 0181, total loss=365.524779308\n",
      "Epoch: 0182, total loss=365.358254391\n",
      "Epoch: 0183, total loss=365.191313495\n",
      "Epoch: 0184, total loss=365.023858015\n",
      "Epoch: 0185, total loss=364.856421635\n",
      "Epoch: 0186, total loss=364.688989270\n",
      "Epoch: 0187, total loss=364.520910152\n",
      "Epoch: 0188, total loss=364.353422807\n",
      "Epoch: 0189, total loss=364.185250225\n",
      "Epoch: 0190, total loss=364.017007176\n",
      "Epoch: 0191, total loss=363.848444998\n",
      "Epoch: 0192, total loss=363.679627755\n",
      "Epoch: 0193, total loss=363.511137839\n",
      "Epoch: 0194, total loss=363.342140043\n",
      "Epoch: 0195, total loss=363.172670582\n",
      "Epoch: 0196, total loss=363.003224191\n",
      "Epoch: 0197, total loss=362.833750309\n",
      "Epoch: 0198, total loss=362.663755878\n",
      "Epoch: 0199, total loss=362.493366721\n",
      "Epoch: 0200, total loss=362.323379968\n",
      "Epoch: 0201, total loss=362.152499349\n",
      "Epoch: 0202, total loss=361.981485379\n",
      "Epoch: 0203, total loss=361.809841087\n",
      "Epoch: 0204, total loss=361.638377503\n",
      "Epoch: 0205, total loss=361.466184970\n",
      "Epoch: 0206, total loss=361.293648178\n",
      "Epoch: 0207, total loss=361.120139277\n",
      "Epoch: 0208, total loss=360.947024748\n",
      "Epoch: 0209, total loss=360.772287414\n",
      "Epoch: 0210, total loss=360.598132098\n",
      "Epoch: 0211, total loss=360.422036698\n",
      "Epoch: 0212, total loss=360.247474783\n",
      "Epoch: 0213, total loss=360.068212603\n",
      "Epoch: 0214, total loss=359.894715986\n",
      "Epoch: 0215, total loss=359.710336291\n",
      "Epoch: 0216, total loss=359.540142591\n",
      "Epoch: 0217, total loss=359.346988295\n",
      "Epoch: 0218, total loss=359.187783059\n",
      "Epoch: 0219, total loss=358.978690416\n",
      "Epoch: 0220, total loss=358.843111867\n",
      "Epoch: 0221, total loss=358.622708658\n",
      "Epoch: 0222, total loss=358.491869355\n",
      "Epoch: 0223, total loss=358.258885975\n",
      "Epoch: 0224, total loss=358.116589454\n",
      "Epoch: 0225, total loss=357.844209112\n",
      "Epoch: 0226, total loss=357.752926148\n",
      "Epoch: 0227, total loss=357.449862301\n",
      "Epoch: 0228, total loss=357.368718809\n",
      "Epoch: 0229, total loss=357.024898091\n",
      "Epoch: 0230, total loss=356.981944432\n",
      "Epoch: 0231, total loss=356.595030189\n",
      "Epoch: 0232, total loss=356.576138336\n",
      "Epoch: 0233, total loss=356.140822362\n",
      "Epoch: 0234, total loss=356.152751715\n",
      "Epoch: 0235, total loss=355.668372888\n",
      "Epoch: 0236, total loss=355.700597658\n",
      "Epoch: 0237, total loss=355.174182266\n",
      "Epoch: 0238, total loss=355.213446252\n",
      "Epoch: 0239, total loss=354.668677684\n",
      "Epoch: 0240, total loss=354.700635469\n",
      "Epoch: 0241, total loss=354.187907971\n",
      "Epoch: 0242, total loss=354.202812701\n",
      "Epoch: 0243, total loss=353.789167454\n",
      "Epoch: 0244, total loss=353.791735496\n",
      "Epoch: 0245, total loss=353.522924148\n",
      "Epoch: 0246, total loss=353.531375716\n",
      "Epoch: 0247, total loss=353.373641424\n",
      "Epoch: 0248, total loss=353.356121650\n",
      "Epoch: 0249, total loss=353.207095097\n",
      "Epoch: 0250, total loss=353.144077020\n",
      "Epoch: 0251, total loss=352.981667090\n",
      "Epoch: 0252, total loss=352.908389969\n",
      "Epoch: 0253, total loss=352.740826658\n",
      "Epoch: 0254, total loss=352.672856229\n",
      "Epoch: 0255, total loss=352.498735216\n",
      "Epoch: 0256, total loss=352.440307877\n",
      "Epoch: 0257, total loss=352.257027563\n",
      "Epoch: 0258, total loss=352.209138873\n",
      "Epoch: 0259, total loss=352.016900813\n",
      "Epoch: 0260, total loss=351.978202089\n",
      "Epoch: 0261, total loss=351.779076398\n",
      "Epoch: 0262, total loss=351.745708319\n",
      "Epoch: 0263, total loss=351.543787516\n",
      "Epoch: 0264, total loss=351.511491098\n",
      "Epoch: 0265, total loss=351.311166832\n",
      "Epoch: 0266, total loss=351.275332244\n",
      "Epoch: 0267, total loss=351.080322011\n",
      "Epoch: 0268, total loss=351.037960225\n",
      "Epoch: 0269, total loss=350.851202494\n",
      "Epoch: 0270, total loss=350.800453951\n",
      "Epoch: 0271, total loss=350.623721451\n",
      "Epoch: 0272, total loss=350.563395104\n",
      "Epoch: 0273, total loss=350.397392048\n",
      "Epoch: 0274, total loss=350.327888791\n",
      "Epoch: 0275, total loss=350.172502131\n",
      "Epoch: 0276, total loss=350.094212824\n",
      "Epoch: 0277, total loss=349.949141084\n",
      "Epoch: 0278, total loss=349.862982194\n",
      "Epoch: 0279, total loss=349.727488951\n",
      "Epoch: 0280, total loss=349.635004404\n",
      "Epoch: 0281, total loss=349.507785133\n",
      "Epoch: 0282, total loss=349.411251604\n",
      "Epoch: 0283, total loss=349.291448155\n",
      "Epoch: 0284, total loss=349.192689487\n",
      "Epoch: 0285, total loss=349.080518075\n",
      "Epoch: 0286, total loss=348.983602337\n",
      "Epoch: 0287, total loss=348.882401831\n",
      "Epoch: 0288, total loss=348.793725567\n",
      "Epoch: 0289, total loss=348.695438997\n",
      "Epoch: 0290, total loss=348.599503216\n",
      "Epoch: 0291, total loss=348.510801795\n",
      "Epoch: 0292, total loss=348.413754049\n",
      "Epoch: 0293, total loss=348.311769631\n",
      "Epoch: 0294, total loss=348.206909539\n",
      "Epoch: 0295, total loss=348.099472946\n",
      "Epoch: 0296, total loss=347.993022169\n",
      "Epoch: 0297, total loss=347.884899214\n",
      "Epoch: 0298, total loss=347.777531717\n",
      "Epoch: 0299, total loss=347.669101703\n",
      "Epoch: 0300, total loss=347.560651033\n",
      "Epoch: 0301, total loss=347.451465228\n",
      "Epoch: 0302, total loss=347.342039986\n",
      "Epoch: 0303, total loss=347.231682622\n",
      "Epoch: 0304, total loss=347.121263158\n",
      "Epoch: 0305, total loss=347.010153427\n",
      "Epoch: 0306, total loss=346.898473396\n",
      "Epoch: 0307, total loss=346.786227117\n",
      "Epoch: 0308, total loss=346.673720124\n",
      "Epoch: 0309, total loss=346.560412285\n",
      "Epoch: 0310, total loss=346.446898442\n",
      "Epoch: 0311, total loss=346.333024790\n",
      "Epoch: 0312, total loss=346.218949904\n",
      "Epoch: 0313, total loss=346.104515705\n",
      "Epoch: 0314, total loss=345.990126052\n",
      "Epoch: 0315, total loss=345.875803755\n",
      "Epoch: 0316, total loss=345.761932670\n",
      "Epoch: 0317, total loss=345.648298257\n",
      "Epoch: 0318, total loss=345.535367934\n",
      "Epoch: 0319, total loss=345.422673791\n",
      "Epoch: 0320, total loss=345.310899716\n",
      "Epoch: 0321, total loss=345.199214466\n",
      "Epoch: 0322, total loss=345.088487339\n",
      "Epoch: 0323, total loss=344.977905398\n",
      "Epoch: 0324, total loss=344.867818554\n",
      "Epoch: 0325, total loss=344.758128400\n",
      "Epoch: 0326, total loss=344.648781545\n",
      "Epoch: 0327, total loss=344.539702184\n",
      "Epoch: 0328, total loss=344.430874721\n",
      "Epoch: 0329, total loss=344.322240734\n",
      "Epoch: 0330, total loss=344.214092928\n",
      "Epoch: 0331, total loss=344.105950193\n",
      "Epoch: 0332, total loss=343.998073669\n",
      "Epoch: 0333, total loss=343.890406744\n",
      "Epoch: 0334, total loss=343.782921288\n",
      "Epoch: 0335, total loss=343.675529427\n",
      "Epoch: 0336, total loss=343.568636115\n",
      "Epoch: 0337, total loss=343.461614728\n",
      "Epoch: 0338, total loss=343.354979305\n",
      "Epoch: 0339, total loss=343.248656279\n",
      "Epoch: 0340, total loss=343.142332005\n",
      "Epoch: 0341, total loss=343.036346673\n",
      "Epoch: 0342, total loss=342.930374805\n",
      "Epoch: 0343, total loss=342.824642777\n",
      "Epoch: 0344, total loss=342.719223420\n",
      "Epoch: 0345, total loss=342.613905004\n",
      "Epoch: 0346, total loss=342.508444248\n",
      "Epoch: 0347, total loss=342.403698578\n",
      "Epoch: 0348, total loss=342.298791526\n",
      "Epoch: 0349, total loss=342.194087167\n",
      "Epoch: 0350, total loss=342.089638426\n",
      "Epoch: 0351, total loss=341.984966386\n",
      "Epoch: 0352, total loss=341.880458610\n",
      "Epoch: 0353, total loss=341.776126879\n",
      "Epoch: 0354, total loss=341.671694247\n",
      "Epoch: 0355, total loss=341.567378907\n",
      "Epoch: 0356, total loss=341.462937236\n",
      "Epoch: 0357, total loss=341.358311640\n",
      "Epoch: 0358, total loss=341.253783122\n",
      "Epoch: 0359, total loss=341.148966830\n",
      "Epoch: 0360, total loss=341.044028195\n",
      "Epoch: 0361, total loss=340.938685186\n",
      "Epoch: 0362, total loss=340.833424354\n",
      "Epoch: 0363, total loss=340.727697570\n",
      "Epoch: 0364, total loss=340.621802724\n",
      "Epoch: 0365, total loss=340.515665399\n",
      "Epoch: 0366, total loss=340.409007407\n",
      "Epoch: 0367, total loss=340.302491695\n",
      "Epoch: 0368, total loss=340.195484485\n",
      "Epoch: 0369, total loss=340.088456458\n",
      "Epoch: 0370, total loss=339.981206476\n",
      "Epoch: 0371, total loss=339.873631105\n",
      "Epoch: 0372, total loss=339.766290892\n",
      "Epoch: 0373, total loss=339.658644371\n",
      "Epoch: 0374, total loss=339.551295659\n",
      "Epoch: 0375, total loss=339.444156371\n",
      "Epoch: 0376, total loss=339.337078364\n",
      "Epoch: 0377, total loss=339.230351551\n",
      "Epoch: 0378, total loss=339.124092156\n",
      "Epoch: 0379, total loss=339.018499034\n",
      "Epoch: 0380, total loss=338.913360621\n",
      "Epoch: 0381, total loss=338.809002531\n",
      "Epoch: 0382, total loss=338.705608610\n",
      "Epoch: 0383, total loss=338.603209781\n",
      "Epoch: 0384, total loss=338.501751589\n",
      "Epoch: 0385, total loss=338.401328699\n",
      "Epoch: 0386, total loss=338.302062425\n",
      "Epoch: 0387, total loss=338.203436021\n",
      "Epoch: 0388, total loss=338.106257563\n",
      "Epoch: 0389, total loss=338.010055559\n",
      "Epoch: 0390, total loss=337.914642816\n",
      "Epoch: 0391, total loss=337.820416448\n",
      "Epoch: 0392, total loss=337.727222052\n",
      "Epoch: 0393, total loss=337.634953405\n",
      "Epoch: 0394, total loss=337.543806958\n",
      "Epoch: 0395, total loss=337.453538012\n",
      "Epoch: 0396, total loss=337.364345658\n",
      "Epoch: 0397, total loss=337.276306824\n",
      "Epoch: 0398, total loss=337.189192326\n",
      "Epoch: 0399, total loss=337.103204078\n",
      "Epoch: 0400, total loss=337.018269061\n",
      "Epoch: 0401, total loss=336.934376427\n",
      "Epoch: 0402, total loss=336.851581221\n",
      "Epoch: 0403, total loss=336.769987318\n",
      "Epoch: 0404, total loss=336.689476565\n",
      "Epoch: 0405, total loss=336.610157148\n",
      "Epoch: 0406, total loss=336.532054300\n",
      "Epoch: 0407, total loss=336.454979293\n",
      "Epoch: 0408, total loss=336.379197864\n",
      "Epoch: 0409, total loss=336.304677672\n",
      "Epoch: 0410, total loss=336.231332191\n",
      "Epoch: 0411, total loss=336.159388394\n",
      "Epoch: 0412, total loss=336.088525541\n",
      "Epoch: 0413, total loss=336.019221955\n",
      "Epoch: 0414, total loss=335.951106006\n",
      "Epoch: 0415, total loss=335.884551509\n",
      "Epoch: 0416, total loss=335.819278919\n",
      "Epoch: 0417, total loss=335.755658400\n",
      "Epoch: 0418, total loss=335.693474431\n",
      "Epoch: 0419, total loss=335.633020462\n",
      "Epoch: 0420, total loss=335.574262504\n",
      "Epoch: 0421, total loss=335.517044437\n",
      "Epoch: 0422, total loss=335.461933559\n",
      "Epoch: 0423, total loss=335.408534687\n",
      "Epoch: 0424, total loss=335.357234548\n",
      "Epoch: 0425, total loss=335.307909255\n",
      "Epoch: 0426, total loss=335.260829463\n",
      "Epoch: 0427, total loss=335.215916282\n",
      "Epoch: 0428, total loss=335.173335195\n",
      "Epoch: 0429, total loss=335.132923217\n",
      "Epoch: 0430, total loss=335.094850783\n",
      "Epoch: 0431, total loss=335.059170756\n",
      "Epoch: 0432, total loss=335.025545658\n",
      "Epoch: 0433, total loss=334.994070766\n",
      "Epoch: 0434, total loss=334.964671814\n",
      "Epoch: 0435, total loss=334.937171064\n",
      "Epoch: 0436, total loss=334.911287271\n",
      "Epoch: 0437, total loss=334.887145068\n",
      "Epoch: 0438, total loss=334.864316599\n",
      "Epoch: 0439, total loss=334.842888648\n",
      "Epoch: 0440, total loss=334.822533472\n",
      "Epoch: 0441, total loss=334.803308424\n",
      "Epoch: 0442, total loss=334.784917815\n",
      "Epoch: 0443, total loss=334.767480998\n",
      "Epoch: 0444, total loss=334.750748180\n",
      "Epoch: 0445, total loss=334.734621306\n",
      "Epoch: 0446, total loss=334.719099889\n",
      "Epoch: 0447, total loss=334.704164062\n",
      "Epoch: 0448, total loss=334.689658907\n",
      "Epoch: 0449, total loss=334.675662967\n",
      "Epoch: 0450, total loss=334.661908024\n",
      "Epoch: 0451, total loss=334.648503146\n",
      "Epoch: 0452, total loss=334.635417579\n",
      "Epoch: 0453, total loss=334.622572389\n",
      "Epoch: 0454, total loss=334.609927460\n",
      "Epoch: 0455, total loss=334.597557420\n",
      "Epoch: 0456, total loss=334.585296916\n",
      "Epoch: 0457, total loss=334.573258441\n",
      "Epoch: 0458, total loss=334.561172275\n",
      "Epoch: 0459, total loss=334.549339303\n",
      "Epoch: 0460, total loss=334.537603240\n",
      "Epoch: 0461, total loss=334.525952112\n",
      "Epoch: 0462, total loss=334.514398650\n",
      "Epoch: 0463, total loss=334.502880465\n",
      "Epoch: 0464, total loss=334.491470397\n",
      "Epoch: 0465, total loss=334.480101773\n",
      "Epoch: 0466, total loss=334.468754102\n",
      "Epoch: 0467, total loss=334.457527651\n",
      "Epoch: 0468, total loss=334.446290371\n",
      "Epoch: 0469, total loss=334.435097454\n",
      "Epoch: 0470, total loss=334.424039899\n",
      "Epoch: 0471, total loss=334.412990278\n",
      "Epoch: 0472, total loss=334.401961770\n",
      "Epoch: 0473, total loss=334.391015727\n",
      "Epoch: 0474, total loss=334.380082562\n",
      "Epoch: 0475, total loss=334.369176805\n",
      "Epoch: 0476, total loss=334.358389081\n",
      "Epoch: 0477, total loss=334.347575558\n",
      "Epoch: 0478, total loss=334.336856198\n",
      "Epoch: 0479, total loss=334.326173602\n",
      "Epoch: 0480, total loss=334.315512426\n",
      "Epoch: 0481, total loss=334.304927558\n",
      "Epoch: 0482, total loss=334.294458602\n",
      "Epoch: 0483, total loss=334.283935745\n",
      "Epoch: 0484, total loss=334.273566068\n",
      "Epoch: 0485, total loss=334.263174373\n",
      "Epoch: 0486, total loss=334.252799353\n",
      "Epoch: 0487, total loss=334.242542551\n",
      "Epoch: 0488, total loss=334.232336943\n",
      "Epoch: 0489, total loss=334.222208496\n",
      "Epoch: 0490, total loss=334.212113876\n",
      "Epoch: 0491, total loss=334.202093909\n",
      "Epoch: 0492, total loss=334.192079308\n",
      "Epoch: 0493, total loss=334.182143797\n",
      "Epoch: 0494, total loss=334.172326170\n",
      "Epoch: 0495, total loss=334.162488964\n",
      "Epoch: 0496, total loss=334.152688808\n",
      "Epoch: 0497, total loss=334.142982276\n",
      "Epoch: 0498, total loss=334.133314954\n",
      "Epoch: 0499, total loss=334.123676914\n",
      "Epoch: 0500, total loss=334.114117325\n",
      "Training complete!\n",
      "Accuracy on test set: 0.793296099\n"
     ]
    }
   ],
   "source": [
    "## 训练迭代\n",
    "\n",
    "with tf.Session() as sess:\n",
    "    # 初始化所有变量\n",
    "    tf.global_variables_initializer().run()\n",
    "    \n",
    "    # 迭代训练\n",
    "    for epoch in range(500):\n",
    "        total_loss = 0\n",
    "        for i in range(len(X_train)):\n",
    "            feed = {X:[X_train[i]], Y:[Y_train[i]]}\n",
    "            _,loss = sess.run([train_op, cost], feed_dict=feed)\n",
    "            total_loss += loss\n",
    "        print('Epoch: %04d, total loss=%.9f' % (epoch + 1,total_loss))\n",
    "    print('Training complete!')\n",
    "    \n",
    "    ## 评估验证集的准确率\n",
    "\n",
    "    pred = sess.run(y_pred, feed_dict={X:X_val})\n",
    "    correct = np.equal(np.argmax(pred, 1), np.argmax(Y_val, 1))\n",
    "    accuracy = np.mean(correct.astype(np.float32))\n",
    "    print(\"Accuracy on test set: %.9f\" % accuracy)\n",
    "    \n",
    "    ## 测试测试集数据\n",
    "    testdata = pd.read_csv(\"D:/cao/kaggle/Titanic/data/test.csv\")\n",
    "    testdata = testdata.fillna(0)\n",
    "    # convert ['male', 'female'] values of Sex to [1, 0]\n",
    "    testdata['Sex'] = testdata['Sex'].apply(lambda s: 1 if s == 'male' else 0)\n",
    "    X_test = testdata[['Sex', 'Age', 'Pclass', 'SibSp', 'Parch', 'Fare']]\n",
    "    predictions = np.argmax(sess.run(y_pred, feed_dict={X: X_test}), 1)\n",
    "    \n",
    "    ## 将结果导出为csv文件\n",
    "    submission = pd.DataFrame({\n",
    "        \"PassengerId\": testdata[\"PassengerId\"],\n",
    "        \"Survived\": predictions\n",
    "    })\n",
    "    submission.to_csv(\"titanic-submission.csv\", index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
