{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# 是否已整理过数据集\n",
    "org = True\n",
    "if not org:\n",
    "    import os\n",
    "    import shutil\n",
    "\n",
    "    def reorg_cifar10_data(data_dir, label_file, train_dir, test_dir, input_dir, valid_ratio):\n",
    "        with open(os.path.join(data_dir, label_file), 'r') as f:\n",
    "            lines = f.readlines()[1:]\n",
    "            tokens = [l.rstrip().split(',') for l in lines]\n",
    "            idx_label = dict(((int(idx), label) for idx, label in tokens))\n",
    "        labels = set(idx_label.values())\n",
    "        num_train = len(os.listdir(os.path.join(data_dir, train_dir)))\n",
    "        num_train_tuning = int(num_train * (1 - valid_ratio))\n",
    "        assert 0 < num_train_tuning < num_train\n",
    "        num_train_tuning_per_label = num_train_tuning // len(labels)\n",
    "        label_count = dict()\n",
    "        def mkdir_if_not_exist(path):\n",
    "            if not os.path.exists(os.path.join(*path)):\n",
    "                os.makedirs(os.path.join(*path))\n",
    "        for train_file in os.listdir(os.path.join(data_dir, train_dir)):\n",
    "            idx = int(train_file.split('.')[0])\n",
    "            label = idx_label[idx]\n",
    "            mkdir_if_not_exist([data_dir, input_dir, 'train_valid', label])\n",
    "            shutil.copy(os.path.join(data_dir, train_dir, train_file),os.path.join(data_dir, input_dir, 'train_valid', label))\n",
    "            if label not in label_count or label_count[label] < num_train_tuning_per_label:\n",
    "                mkdir_if_not_exist([data_dir, input_dir, 'train', label])\n",
    "                shutil.copy(os.path.join(data_dir, train_dir, train_file),os.path.join(data_dir, input_dir, 'train', label))\n",
    "                label_count[label] = label_count.get(label, 0) + 1\n",
    "            else:\n",
    "                mkdir_if_not_exist([data_dir, input_dir, 'valid', label])\n",
    "                shutil.copy(os.path.join(data_dir, train_dir, train_file),os.path.join(data_dir, input_dir, 'valid', label))\n",
    "        mkdir_if_not_exist([data_dir, input_dir, 'test', 'unknown'])\n",
    "        for test_file in os.listdir(os.path.join(data_dir, test_dir)):\n",
    "            shutil.copy(os.path.join(data_dir, test_dir, test_file),os.path.join(data_dir, input_dir, 'test', 'unknown'))\n",
    "\n",
    "    train_dir = 'train'\n",
    "    test_dir = 'test'\n",
    "\n",
    "    data_dir = '/home/sinyer/python/data/kaggle_cifar10'\n",
    "    label_file = 'trainLabels.csv'\n",
    "    input_dir = 'train_valid_test'\n",
    "    valid_ratio = 0.1\n",
    "    reorg_cifar10_data(data_dir, label_file, train_dir, test_dir, input_dir, valid_ratio)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import mxnet as mx\n",
    "from mxnet import init, gluon, nd, autograd, image\n",
    "from mxnet.gluon import nn\n",
    "from mxnet.gluon.data import vision\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "from time import time\n",
    "ctx = mx.gpu()\n",
    "\n",
    "data_dir = '/home/sinyer/python/data/kaggle_cifar10'\n",
    "label_file = 'trainLabels.csv'\n",
    "input_dir = 'train_valid_test'\n",
    "\n",
    "input_str = data_dir + '/' + input_dir + '/'\n",
    "\n",
    "train_valid_ds = vision.ImageFolderDataset(input_str + 'train_valid', flag=1)\n",
    "\n",
    "train_pic = nd.stack(*[train_valid_ds.__getitem__(i)[0] for i in range(50000)])\n",
    "train_label = []\n",
    "for i in range(50000):\n",
    "    train_label.append(train_valid_ds.__getitem__(i)[1]) \n",
    "train_label = nd.array(train_label)\n",
    "\n",
    "train_pic = train_pic.astype('float32')/255\n",
    "train_label = train_label.astype('float32')\n",
    "\n",
    "batch_size = 128\n",
    "mean = [0.4914, 0.4822, 0.4465]\n",
    "std = [0.2470, 0.2435, 0.2616]\n",
    "for i in range(3):\n",
    "    train_pic[:,:,:,i] = (train_pic[:,:,:,i] - mean[i])/std[i]\n",
    "train_pic = np.transpose(train_pic, (0,3,1,2))\n",
    "\n",
    "train_data = gluon.data.DataLoader(gluon.data.ArrayDataset(train_pic, train_label), batch_size, shuffle=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def accuracy(output, label):\n",
    "    return nd.mean(output.argmax(axis=1)==label).asscalar()\n",
    "\n",
    "def apply(img):\n",
    "    aug_train = image.CreateAugmenter(data_shape=(3, 32, 32), rand_crop=True, rand_mirror=True)\n",
    "    for aug in aug_train:\n",
    "        img = aug(img)\n",
    "    return img\n",
    "\n",
    "def transform(data): \n",
    "    data = nd.pad(data, pad_width=(0,0,0,0,2,2,2,2),mode='constant',constant_value=0)\n",
    "    data = nd.transpose(data, (0,2,3,1))\n",
    "    data = nd.stack(*[apply(d) for d in data])\n",
    "    data = nd.transpose(data, (0,3,1,2))\n",
    "    return data\n",
    "\n",
    "class Residual(nn.Block):\n",
    "    def __init__(self, channels, same_shape=True, equal=True, **kwargs):\n",
    "        super(Residual, self).__init__(**kwargs)\n",
    "        self.same_shape = same_shape\n",
    "        self.equal = equal\n",
    "        with self.name_scope():\n",
    "            strides = 1 if same_shape else 2\n",
    "            self.bn1 = nn.BatchNorm()\n",
    "            self.conv1 = nn.Conv2D(channels, kernel_size=3, padding=1, strides=strides)\n",
    "            self.bn2 = nn.BatchNorm()\n",
    "            self.conv2 = nn.Conv2D(channels, kernel_size=3, padding=1)\n",
    "            if (not same_shape) or (not equal):\n",
    "                self.conv3 = nn.Conv2D(channels, kernel_size=1, strides=strides)\n",
    "    def forward(self, x):\n",
    "        out = self.conv1(nd.relu(self.bn1(x)))\n",
    "        out = self.conv2(nd.relu(self.bn2(out)))\n",
    "        if (not self.same_shape) or (not self.equal):\n",
    "            x = self.conv3(x)\n",
    "        return out + x\n",
    "\n",
    "class ResNet(nn.Block):\n",
    "    def __init__(self, num_classes, **kwargs):\n",
    "        super(ResNet, self).__init__(**kwargs)\n",
    "        with self.name_scope(): \n",
    "            net = self.net = nn.Sequential()\n",
    "            net.add(nn.Conv2D(channels=16, kernel_size=3, strides=1, padding=1))\n",
    "            net.add(Residual(channels=16*8, equal=False))\n",
    "            net.add(Residual(channels=16*8), Residual(channels=16*8))            \n",
    "            net.add(Residual(channels=32*8, same_shape=False))\n",
    "            net.add(Residual(channels=32*8), Residual(channels=32*8))\n",
    "            net.add(Residual(channels=64*8, same_shape=False))\n",
    "            net.add(Residual(channels=64*8), Residual(channels=64*8))\n",
    "            net.add(nn.BatchNorm())\n",
    "            net.add(nn.Activation(activation='relu'))\n",
    "            net.add(nn.AvgPool2D(pool_size=8))\n",
    "            net.add(nn.Flatten())\n",
    "            net.add(nn.Dense(num_classes))\n",
    "    def forward(self, x):\n",
    "        out = x\n",
    "        for i, b in enumerate(self.net):\n",
    "            out = b(out)\n",
    "        return out"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "net = ResNet(10)\n",
    "net.initialize(ctx=ctx, init=init.Xavier())\n",
    "loss = gluon.loss.SoftmaxCrossEntropyLoss()\n",
    "trainer = gluon.Trainer(net.collect_params(), 'nag', {'learning_rate': 0.1, 'momentum': 0.9, 'wd': 5e-4})"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "E 0; L 1.524124; Tr_acc 0.441448; T 122.589915\n",
      "E 1; L 0.965602; Tr_acc 0.655790; T 119.494456\n",
      "E 2; L 0.744971; Tr_acc 0.740181; T 119.443863\n",
      "E 3; L 0.621808; Tr_acc 0.784871; T 118.745010\n",
      "E 4; L 0.544807; Tr_acc 0.812080; T 119.453280\n",
      "E 5; L 0.505423; Tr_acc 0.825464; T 118.908199\n",
      "E 6; L 0.468818; Tr_acc 0.839726; T 118.610433\n",
      "E 7; L 0.447672; Tr_acc 0.845033; T 118.821814\n",
      "E 8; L 0.433558; Tr_acc 0.851578; T 119.065236\n",
      "E 9; L 0.417138; Tr_acc 0.856981; T 118.695992\n",
      "E 10; L 0.401701; Tr_acc 0.862112; T 118.618255\n",
      "E 11; L 0.385744; Tr_acc 0.867479; T 118.668333\n",
      "E 12; L 0.380615; Tr_acc 0.868962; T 118.877657\n",
      "E 13; L 0.371043; Tr_acc 0.873537; T 118.883992\n",
      "E 14; L 0.361567; Tr_acc 0.876586; T 118.941867\n",
      "E 15; L 0.355193; Tr_acc 0.879168; T 118.572304\n",
      "E 16; L 0.343887; Tr_acc 0.881989; T 118.750083\n",
      "E 17; L 0.334025; Tr_acc 0.884195; T 118.573804\n",
      "E 18; L 0.332493; Tr_acc 0.885546; T 118.413831\n",
      "E 19; L 0.331624; Tr_acc 0.886881; T 118.142120\n",
      "E 20; L 0.326648; Tr_acc 0.887960; T 118.424217\n",
      "E 21; L 0.319768; Tr_acc 0.890417; T 118.686454\n",
      "E 22; L 0.319569; Tr_acc 0.890633; T 118.523335\n",
      "E 23; L 0.313818; Tr_acc 0.893119; T 118.461386\n",
      "E 24; L 0.311460; Tr_acc 0.892639; T 118.931372\n",
      "E 25; L 0.308209; Tr_acc 0.894565; T 118.370965\n",
      "E 26; L 0.301482; Tr_acc 0.896312; T 117.998451\n",
      "E 27; L 0.306885; Tr_acc 0.894222; T 118.463008\n",
      "E 28; L 0.300125; Tr_acc 0.896116; T 118.144355\n",
      "E 29; L 0.297204; Tr_acc 0.897818; T 118.874061\n",
      "E 30; L 0.299358; Tr_acc 0.897522; T 118.705589\n",
      "E 31; L 0.298296; Tr_acc 0.896539; T 119.189677\n",
      "E 32; L 0.293242; Tr_acc 0.898777; T 118.240513\n",
      "E 33; L 0.288653; Tr_acc 0.901746; T 118.522226\n",
      "E 34; L 0.293966; Tr_acc 0.899944; T 118.800220\n",
      "E 35; L 0.290144; Tr_acc 0.900763; T 118.711364\n",
      "E 36; L 0.288791; Tr_acc 0.902374; T 118.272174\n",
      "E 37; L 0.288306; Tr_acc 0.901067; T 118.312146\n",
      "E 38; L 0.286840; Tr_acc 0.902542; T 118.206285\n",
      "E 39; L 0.283109; Tr_acc 0.903013; T 118.503061\n",
      "E 40; L 0.278070; Tr_acc 0.905143; T 118.631738\n",
      "E 41; L 0.276718; Tr_acc 0.906030; T 118.450220\n",
      "E 42; L 0.283913; Tr_acc 0.903049; T 118.657429\n",
      "E 43; L 0.277520; Tr_acc 0.905591; T 118.136101\n",
      "E 44; L 0.273574; Tr_acc 0.908004; T 118.585268\n",
      "E 45; L 0.283502; Tr_acc 0.903273; T 118.398295\n",
      "E 46; L 0.282849; Tr_acc 0.902929; T 118.714911\n",
      "E 47; L 0.278006; Tr_acc 0.902522; T 118.895274\n",
      "E 48; L 0.272765; Tr_acc 0.905854; T 118.436372\n",
      "E 49; L 0.277078; Tr_acc 0.904863; T 118.288268\n",
      "E 50; L 0.273633; Tr_acc 0.906118; T 118.310684\n",
      "E 51; L 0.267792; Tr_acc 0.907017; T 118.343076\n",
      "E 52; L 0.270911; Tr_acc 0.907297; T 118.279466\n",
      "E 53; L 0.273289; Tr_acc 0.905275; T 118.313208\n",
      "E 54; L 0.276442; Tr_acc 0.905099; T 117.808043\n",
      "E 55; L 0.263911; Tr_acc 0.910322; T 118.325332\n",
      "E 56; L 0.270885; Tr_acc 0.907001; T 118.457931\n",
      "E 57; L 0.270750; Tr_acc 0.907065; T 118.476570\n",
      "E 58; L 0.274626; Tr_acc 0.905687; T 118.108655\n",
      "E 59; L 0.268356; Tr_acc 0.907809; T 118.566611\n",
      "E 60; L 0.129789; Tr_acc 0.957325; T 118.485275\n",
      "E 61; L 0.077240; Tr_acc 0.974984; T 118.115618\n",
      "E 62; L 0.059436; Tr_acc 0.981917; T 118.125327\n",
      "E 63; L 0.050728; Tr_acc 0.983879; T 118.238343\n",
      "E 64; L 0.043635; Tr_acc 0.986953; T 118.305018\n",
      "E 65; L 0.038821; Tr_acc 0.988719; T 118.505392\n",
      "E 66; L 0.035873; Tr_acc 0.989758; T 118.349368\n",
      "E 67; L 0.035899; Tr_acc 0.989158; T 118.419903\n",
      "E 68; L 0.037917; Tr_acc 0.988431; T 118.386572\n",
      "E 69; L 0.038532; Tr_acc 0.988499; T 118.737600\n",
      "E 70; L 0.041576; Tr_acc 0.987048; T 118.356747\n",
      "E 71; L 0.044215; Tr_acc 0.986021; T 118.156632\n",
      "E 72; L 0.045803; Tr_acc 0.985322; T 118.147096\n",
      "E 73; L 0.049353; Tr_acc 0.984051; T 118.200092\n",
      "E 74; L 0.048147; Tr_acc 0.984703; T 118.407892\n",
      "E 75; L 0.058983; Tr_acc 0.980567; T 118.417646\n",
      "E 76; L 0.056954; Tr_acc 0.981294; T 118.577273\n",
      "E 77; L 0.058232; Tr_acc 0.980834; T 118.372649\n",
      "E 78; L 0.060115; Tr_acc 0.980467; T 118.154080\n",
      "E 79; L 0.054164; Tr_acc 0.982217; T 118.324147\n",
      "E 80; L 0.055510; Tr_acc 0.982545; T 118.305998\n",
      "E 81; L 0.060180; Tr_acc 0.979871; T 118.577763\n",
      "E 82; L 0.056693; Tr_acc 0.981246; T 118.491946\n",
      "E 83; L 0.059143; Tr_acc 0.980874; T 118.172045\n",
      "E 84; L 0.062630; Tr_acc 0.979444; T 118.237720\n",
      "E 85; L 0.054819; Tr_acc 0.982177; T 118.144625\n",
      "E 86; L 0.057312; Tr_acc 0.981889; T 118.750360\n",
      "E 87; L 0.061212; Tr_acc 0.980639; T 117.984393\n",
      "E 88; L 0.059644; Tr_acc 0.980675; T 118.479949\n",
      "E 89; L 0.060273; Tr_acc 0.980954; T 117.661995\n",
      "E 90; L 0.059329; Tr_acc 0.980375; T 118.301161\n",
      "E 91; L 0.058494; Tr_acc 0.981214; T 118.433719\n",
      "E 92; L 0.059035; Tr_acc 0.980695; T 117.980463\n",
      "E 93; L 0.053774; Tr_acc 0.982952; T 118.374372\n",
      "E 94; L 0.058183; Tr_acc 0.980411; T 118.109194\n",
      "E 95; L 0.058780; Tr_acc 0.980942; T 117.933496\n",
      "E 96; L 0.057315; Tr_acc 0.981382; T 118.147017\n",
      "E 97; L 0.055825; Tr_acc 0.981945; T 117.891407\n",
      "E 98; L 0.052415; Tr_acc 0.983072; T 118.025350\n",
      "E 99; L 0.055227; Tr_acc 0.981678; T 118.132168\n",
      "E 100; L 0.055999; Tr_acc 0.982005; T 118.494092\n",
      "E 101; L 0.058227; Tr_acc 0.980719; T 118.315405\n",
      "E 102; L 0.058408; Tr_acc 0.980894; T 117.954309\n",
      "E 103; L 0.049717; Tr_acc 0.984235; T 118.115260\n",
      "E 104; L 0.053001; Tr_acc 0.982721; T 118.296828\n",
      "E 105; L 0.056612; Tr_acc 0.981985; T 118.323616\n",
      "E 106; L 0.052596; Tr_acc 0.983056; T 118.099779\n",
      "E 107; L 0.057694; Tr_acc 0.981362; T 118.265442\n",
      "E 108; L 0.053886; Tr_acc 0.982864; T 118.074456\n",
      "E 109; L 0.059195; Tr_acc 0.980535; T 118.155352\n",
      "E 110; L 0.050055; Tr_acc 0.983720; T 118.497947\n",
      "E 111; L 0.053092; Tr_acc 0.982737; T 118.348689\n",
      "E 112; L 0.051787; Tr_acc 0.983504; T 118.255641\n",
      "E 113; L 0.049467; Tr_acc 0.984779; T 118.193083\n",
      "E 114; L 0.049720; Tr_acc 0.983911; T 118.423008\n",
      "E 115; L 0.057917; Tr_acc 0.981454; T 118.416979\n",
      "E 116; L 0.052520; Tr_acc 0.983312; T 118.126164\n",
      "E 117; L 0.055628; Tr_acc 0.982193; T 118.174071\n",
      "E 118; L 0.045557; Tr_acc 0.985686; T 118.230558\n",
      "E 119; L 0.050394; Tr_acc 0.983923; T 118.317493\n",
      "E 120; L 0.020539; Tr_acc 0.994525; T 118.094622\n",
      "E 121; L 0.007005; Tr_acc 0.999001; T 118.324357\n",
      "E 122; L 0.004759; Tr_acc 0.999480; T 118.016759\n",
      "E 123; L 0.003912; Tr_acc 0.999660; T 118.132428\n",
      "E 124; L 0.003206; Tr_acc 0.999840; T 118.506840\n",
      "E 125; L 0.003091; Tr_acc 0.999840; T 117.911470\n",
      "E 126; L 0.002774; Tr_acc 0.999880; T 118.310965\n",
      "E 127; L 0.002464; Tr_acc 0.999920; T 118.331167\n",
      "E 128; L 0.002504; Tr_acc 0.999980; T 118.302877\n",
      "E 129; L 0.002262; Tr_acc 0.999960; T 118.198788\n",
      "E 130; L 0.002176; Tr_acc 0.999960; T 118.152872\n",
      "E 131; L 0.002154; Tr_acc 0.999980; T 118.069241\n",
      "E 132; L 0.002037; Tr_acc 0.999960; T 117.988134\n",
      "E 133; L 0.002045; Tr_acc 0.999980; T 117.952077\n",
      "E 134; L 0.002112; Tr_acc 0.999980; T 118.488060\n",
      "E 135; L 0.001965; Tr_acc 1.000000; T 118.009149\n",
      "E 136; L 0.001946; Tr_acc 0.999980; T 118.564690\n",
      "E 137; L 0.001952; Tr_acc 1.000000; T 117.834380\n",
      "E 138; L 0.001990; Tr_acc 0.999980; T 118.201485\n",
      "E 139; L 0.001979; Tr_acc 0.999980; T 118.249155\n",
      "E 140; L 0.001920; Tr_acc 1.000000; T 118.351114\n",
      "E 141; L 0.001918; Tr_acc 0.999960; T 118.164370\n",
      "E 142; L 0.001782; Tr_acc 1.000000; T 118.381078\n",
      "E 143; L 0.001837; Tr_acc 0.999960; T 118.405782\n",
      "E 144; L 0.001788; Tr_acc 1.000000; T 118.380884\n",
      "E 145; L 0.001837; Tr_acc 1.000000; T 118.000181\n",
      "E 146; L 0.001850; Tr_acc 1.000000; T 118.093230\n",
      "E 147; L 0.001762; Tr_acc 0.999980; T 118.159649\n",
      "E 148; L 0.001849; Tr_acc 1.000000; T 118.250973\n",
      "E 149; L 0.001784; Tr_acc 1.000000; T 118.089723\n",
      "E 150; L 0.001753; Tr_acc 1.000000; T 122.751971\n",
      "E 151; L 0.001792; Tr_acc 1.000000; T 118.243021\n",
      "E 152; L 0.001787; Tr_acc 1.000000; T 118.223067\n",
      "E 153; L 0.001724; Tr_acc 1.000000; T 118.140151\n",
      "E 154; L 0.001801; Tr_acc 1.000000; T 118.293688\n",
      "E 155; L 0.001835; Tr_acc 0.999980; T 118.163334\n",
      "E 156; L 0.001868; Tr_acc 0.999980; T 118.234736\n",
      "E 157; L 0.001732; Tr_acc 1.000000; T 118.690409\n",
      "E 158; L 0.001771; Tr_acc 0.999980; T 118.651125\n",
      "E 159; L 0.001811; Tr_acc 1.000000; T 118.337742\n",
      "Tr_acc 1.000000\n"
     ]
    }
   ],
   "source": [
    "epochs = 160\n",
    "\n",
    "for epoch in range(epochs):\n",
    "    if epoch == 60:\n",
    "        trainer.set_learning_rate(0.02)\n",
    "    if epoch == 120:\n",
    "        trainer.set_learning_rate(0.004)\n",
    "    if epoch == 140:\n",
    "        trainer.set_learning_rate(0.0008)\n",
    "    train_loss = 0.\n",
    "    train_acc = 0.\n",
    "    batch = 0\n",
    "    start = time()\n",
    "    for data, label in train_data:\n",
    "        data = transform(data).as_in_context(ctx)\n",
    "        label = label.as_in_context(ctx)\n",
    "        with autograd.record():\n",
    "            output = net(data)\n",
    "            l = loss(output, label)\n",
    "        l.backward()\n",
    "        trainer.step(data.shape[0])\n",
    "        train_loss += nd.mean(l).asscalar()\n",
    "        train_acc += accuracy(output, label)\n",
    "        batch += 1\n",
    "    if epoch%5 == 0:\n",
    "        print(\"E %d; L %f; Tr_acc %f; T %f\" % (epoch, train_loss / batch, train_acc / batch, time() - start))\n",
    "print(\"Tr_acc %f\" % (train_acc / batch))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "test_ds = vision.ImageFolderDataset(input_str + 'test', flag=1)\n",
    "\n",
    "test_pic = nd.stack(*[test_ds.__getitem__(i)[0] for i in range(300000)])\n",
    "test_label = []\n",
    "for i in range(300000):\n",
    "    test_label.append(test_ds.__getitem__(i)[1]) \n",
    "test_label = nd.array(test_label)\n",
    "\n",
    "test_pic = test_pic.astype('float32')/255\n",
    "test_label = test_label.astype('float32')\n",
    "\n",
    "for i in range(3):\n",
    "    test_pic[:,:,:,i] = (test_pic[:,:,:,i] - mean[i])/std[i]    \n",
    "test_pic = np.transpose(test_pic, (0,3,1,2))\n",
    "\n",
    "test_data = gluon.data.DataLoader(gluon.data.ArrayDataset(test_pic, test_label), batch_size, shuffle=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "preds = []\n",
    "for data, label in test_data:\n",
    "    output = net(data.as_in_context(ctx))\n",
    "    preds.extend(output.argmax(axis=1).astype(int).asnumpy())\n",
    "\n",
    "sorted_ids = list(range(1, len(test_ds) + 1))\n",
    "sorted_ids.sort(key = lambda x:str(x))\n",
    "\n",
    "df = pd.DataFrame({'id': sorted_ids, 'label': preds})\n",
    "df['label'] = df['label'].apply(lambda x: train_valid_ds.synsets[x])\n",
    "df.to_csv('submission.csv', index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
