{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-13T07:12:23.968756Z",
     "start_time": "2018-11-13T07:12:23.011044Z"
    }
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/lib64/python2.7/site-packages/sklearn/cross_validation.py:41: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.\n",
      "  \"This module will be removed in 0.20.\", DeprecationWarning)\n"
     ]
    }
   ],
   "source": [
    "%matplotlib inline\n",
    "from IPython.core.interactiveshell import InteractiveShell\n",
    "InteractiveShell.ast_node_interactivity = \"all\"\n",
    "\n",
    "import os\n",
    "import ast\n",
    "import datetime as dt\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "import seaborn as sns\n",
    "import cv2\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "\n",
    "import glob\n",
    "\n",
    "import numpy as np\n",
    "from PIL import Image, ImageDraw\n",
    "\n",
    "from sklearn.preprocessing import LabelEncoder\n",
    "from sklearn.cross_validation import train_test_split\n",
    "\n",
    "import logging\n",
    "logging.basicConfig(filename='example.log',level=logging.DEBUG)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-13T07:12:28.071865Z",
     "start_time": "2018-11-13T07:12:26.555909Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "('Reading...', '../input/train_simplified/eye.csv')\n",
      "('Reading...', '../input/train_simplified/castle.csv')\n",
      "('Reading...', '../input/train_simplified/pizza.csv')\n",
      "('Reading...', '../input/train_simplified/umbrella.csv')\n",
      "('Reading...', '../input/train_simplified/bat.csv')\n",
      "('Reading...', '../input/train_simplified/hot tub.csv')\n",
      "('Reading...', '../input/train_simplified/diving board.csv')\n",
      "('Reading...', '../input/train_simplified/wine bottle.csv')\n",
      "('Reading...', '../input/train_simplified/butterfly.csv')\n",
      "('Reading...', '../input/train_simplified/bread.csv')\n",
      "('Reading...', '../input/train_simplified/television.csv')\n",
      "('Reading...', '../input/train_simplified/peas.csv')\n",
      "('Reading...', '../input/train_simplified/binoculars.csv')\n",
      "('Reading...', '../input/train_simplified/basket.csv')\n",
      "('Reading...', '../input/train_simplified/steak.csv')\n",
      "('Reading...', '../input/train_simplified/eyeglasses.csv')\n",
      "('Reading...', '../input/train_simplified/backpack.csv')\n",
      "('Reading...', '../input/train_simplified/teddy-bear.csv')\n",
      "('Reading...', '../input/train_simplified/grass.csv')\n",
      "('Reading...', '../input/train_simplified/canoe.csv')\n",
      "('Reading...', '../input/train_simplified/power outlet.csv')\n",
      "('Reading...', '../input/train_simplified/car.csv')\n",
      "('Reading...', '../input/train_simplified/keyboard.csv')\n",
      "('Reading...', '../input/train_simplified/parachute.csv')\n",
      "('Reading...', '../input/train_simplified/spreadsheet.csv')\n",
      "('Reading...', '../input/train_simplified/snowflake.csv')\n",
      "('Reading...', '../input/train_simplified/pond.csv')\n",
      "('Reading...', '../input/train_simplified/airplane.csv')\n",
      "('Reading...', '../input/train_simplified/postcard.csv')\n",
      "('Reading...', '../input/train_simplified/lobster.csv')\n",
      "('Reading...', '../input/train_simplified/peanut.csv')\n",
      "('Reading...', '../input/train_simplified/broom.csv')\n",
      "('Reading...', '../input/train_simplified/mouse.csv')\n",
      "('Reading...', '../input/train_simplified/bathtub.csv')\n",
      "('Reading...', '../input/train_simplified/hot dog.csv')\n",
      "('Reading...', '../input/train_simplified/chandelier.csv')\n",
      "('Reading...', '../input/train_simplified/cat.csv')\n",
      "('Reading...', '../input/train_simplified/cello.csv')\n",
      "('Reading...', '../input/train_simplified/scissors.csv')\n",
      "('Reading...', '../input/train_simplified/horse.csv')\n",
      "('Reading...', '../input/train_simplified/bed.csv')\n",
      "('Reading...', '../input/train_simplified/tractor.csv')\n",
      "('Reading...', '../input/train_simplified/pliers.csv')\n",
      "('Reading...', '../input/train_simplified/chair.csv')\n",
      "('Reading...', '../input/train_simplified/baseball.csv')\n",
      "('Reading...', '../input/train_simplified/paintbrush.csv')\n",
      "('Reading...', '../input/train_simplified/stereo.csv')\n",
      "('Reading...', '../input/train_simplified/bulldozer.csv')\n",
      "('Reading...', '../input/train_simplified/rhinoceros.csv')\n",
      "('Reading...', '../input/train_simplified/sandwich.csv')\n",
      "('Reading...', '../input/train_simplified/vase.csv')\n",
      "('Reading...', '../input/train_simplified/campfire.csv')\n",
      "('Reading...', '../input/train_simplified/basketball.csv')\n",
      "('Reading...', '../input/train_simplified/coffee cup.csv')\n",
      "('Reading...', '../input/train_simplified/sleeping bag.csv')\n",
      "('Reading...', '../input/train_simplified/streetlight.csv')\n",
      "('Reading...', '../input/train_simplified/zebra.csv')\n",
      "('Reading...', '../input/train_simplified/see saw.csv')\n",
      "('Reading...', '../input/train_simplified/axe.csv')\n",
      "('Reading...', '../input/train_simplified/hedgehog.csv')\n",
      "('Reading...', '../input/train_simplified/radio.csv')\n",
      "('Reading...', '../input/train_simplified/grapes.csv')\n",
      "('Reading...', '../input/train_simplified/soccer ball.csv')\n",
      "('Reading...', '../input/train_simplified/ceiling fan.csv')\n",
      "('Reading...', '../input/train_simplified/skyscraper.csv')\n",
      "('Reading...', '../input/train_simplified/frog.csv')\n",
      "('Reading...', '../input/train_simplified/snail.csv')\n",
      "('Reading...', '../input/train_simplified/dresser.csv')\n",
      "('Reading...', '../input/train_simplified/goatee.csv')\n",
      "('Reading...', '../input/train_simplified/sink.csv')\n",
      "('Reading...', '../input/train_simplified/apple.csv')\n",
      "('Reading...', '../input/train_simplified/The Mona Lisa.csv')\n",
      "('Reading...', '../input/train_simplified/house.csv')\n",
      "('Reading...', '../input/train_simplified/hexagon.csv')\n",
      "('Reading...', '../input/train_simplified/circle.csv')\n",
      "('Reading...', '../input/train_simplified/rain.csv')\n",
      "('Reading...', '../input/train_simplified/boomerang.csv')\n",
      "('Reading...', '../input/train_simplified/train.csv')\n",
      "('Reading...', '../input/train_simplified/kangaroo.csv')\n",
      "('Reading...', '../input/train_simplified/rake.csv')\n",
      "('Reading...', '../input/train_simplified/arm.csv')\n",
      "('Reading...', '../input/train_simplified/headphones.csv')\n",
      "('Reading...', '../input/train_simplified/cooler.csv')\n",
      "('Reading...', '../input/train_simplified/sock.csv')\n",
      "('Reading...', '../input/train_simplified/rainbow.csv')\n",
      "('Reading...', '../input/train_simplified/cloud.csv')\n",
      "('Reading...', '../input/train_simplified/hockey puck.csv')\n",
      "('Reading...', '../input/train_simplified/bird.csv')\n",
      "('Reading...', '../input/train_simplified/bottlecap.csv')\n",
      "('Reading...', '../input/train_simplified/stove.csv')\n",
      "('Reading...', '../input/train_simplified/bowtie.csv')\n",
      "('Reading...', '../input/train_simplified/duck.csv')\n",
      "('Reading...', '../input/train_simplified/carrot.csv')\n",
      "('Reading...', '../input/train_simplified/skateboard.csv')\n",
      "('Reading...', '../input/train_simplified/hot air balloon.csv')\n",
      "('Reading...', '../input/train_simplified/onion.csv')\n",
      "('Reading...', '../input/train_simplified/hockey stick.csv')\n",
      "('Reading...', '../input/train_simplified/house plant.csv')\n",
      "('Reading...', '../input/train_simplified/underwear.csv')\n",
      "('Reading...', '../input/train_simplified/shovel.csv')\n",
      "('Reading...', '../input/train_simplified/picture frame.csv')\n",
      "('Reading...', '../input/train_simplified/pool.csv')\n",
      "('Reading...', '../input/train_simplified/camouflage.csv')\n",
      "('Reading...', '../input/train_simplified/elephant.csv')\n",
      "('Reading...', '../input/train_simplified/stitches.csv')\n",
      "('Reading...', '../input/train_simplified/whale.csv')\n",
      "('Reading...', '../input/train_simplified/sun.csv')\n",
      "('Reading...', '../input/train_simplified/moon.csv')\n",
      "('Reading...', '../input/train_simplified/broccoli.csv')\n",
      "('Reading...', '../input/train_simplified/rabbit.csv')\n",
      "('Reading...', '../input/train_simplified/rollerskates.csv')\n",
      "('Reading...', '../input/train_simplified/fence.csv')\n",
      "('Reading...', '../input/train_simplified/squirrel.csv')\n",
      "('Reading...', '../input/train_simplified/feather.csv')\n",
      "('Reading...', '../input/train_simplified/roller coaster.csv')\n",
      "('Reading...', '../input/train_simplified/door.csv')\n",
      "('Reading...', '../input/train_simplified/flamingo.csv')\n",
      "('Reading...', '../input/train_simplified/popsicle.csv')\n",
      "('Reading...', '../input/train_simplified/waterslide.csv')\n",
      "('Reading...', '../input/train_simplified/fireplace.csv')\n",
      "('Reading...', '../input/train_simplified/hamburger.csv')\n",
      "('Reading...', '../input/train_simplified/windmill.csv')\n",
      "('Reading...', '../input/train_simplified/bus.csv')\n",
      "('Reading...', '../input/train_simplified/sweater.csv')\n",
      "('Reading...', '../input/train_simplified/snake.csv')\n",
      "('Reading...', '../input/train_simplified/river.csv')\n",
      "('Reading...', '../input/train_simplified/drums.csv')\n",
      "('Reading...', '../input/train_simplified/garden.csv')\n",
      "('Reading...', '../input/train_simplified/clock.csv')\n",
      "('Reading...', '../input/train_simplified/bear.csv')\n",
      "('Reading...', '../input/train_simplified/baseball bat.csv')\n",
      "('Reading...', '../input/train_simplified/bicycle.csv')\n",
      "('Reading...', '../input/train_simplified/fire hydrant.csv')\n",
      "('Reading...', '../input/train_simplified/crocodile.csv')\n",
      "('Reading...', '../input/train_simplified/parrot.csv')\n",
      "('Reading...', '../input/train_simplified/angel.csv')\n",
      "('Reading...', '../input/train_simplified/octagon.csv')\n",
      "('Reading...', '../input/train_simplified/giraffe.csv')\n",
      "('Reading...', '../input/train_simplified/calculator.csv')\n",
      "('Reading...', '../input/train_simplified/bush.csv')\n",
      "('Reading...', '../input/train_simplified/pineapple.csv')\n",
      "('Reading...', '../input/train_simplified/dragon.csv')\n",
      "('Reading...', '../input/train_simplified/tent.csv')\n",
      "('Reading...', '../input/train_simplified/paper clip.csv')\n",
      "('Reading...', '../input/train_simplified/snorkel.csv')\n",
      "('Reading...', '../input/train_simplified/shorts.csv')\n",
      "('Reading...', '../input/train_simplified/tornado.csv')\n",
      "('Reading...', '../input/train_simplified/beard.csv')\n",
      "('Reading...', '../input/train_simplified/hand.csv')\n",
      "('Reading...', '../input/train_simplified/moustache.csv')\n",
      "('Reading...', '../input/train_simplified/screwdriver.csv')\n",
      "('Reading...', '../input/train_simplified/nose.csv')\n",
      "('Reading...', '../input/train_simplified/eraser.csv')\n",
      "('Reading...', '../input/train_simplified/truck.csv')\n",
      "('Reading...', '../input/train_simplified/sea turtle.csv')\n",
      "('Reading...', '../input/train_simplified/penguin.csv')\n",
      "('Reading...', '../input/train_simplified/ant.csv')\n",
      "('Reading...', '../input/train_simplified/motorbike.csv')\n",
      "('Reading...', '../input/train_simplified/tree.csv')\n",
      "('Reading...', '../input/train_simplified/camel.csv')\n",
      "('Reading...', '../input/train_simplified/toe.csv')\n",
      "('Reading...', '../input/train_simplified/pillow.csv')\n",
      "('Reading...', '../input/train_simplified/yoga.csv')\n",
      "('Reading...', '../input/train_simplified/lantern.csv')\n",
      "('Reading...', '../input/train_simplified/potato.csv')\n",
      "('Reading...', '../input/train_simplified/cruise ship.csv')\n",
      "('Reading...', '../input/train_simplified/pickup truck.csv')\n",
      "('Reading...', '../input/train_simplified/pig.csv')\n",
      "('Reading...', '../input/train_simplified/foot.csv')\n",
      "('Reading...', '../input/train_simplified/mosquito.csv')\n",
      "('Reading...', '../input/train_simplified/tiger.csv')\n",
      "('Reading...', '../input/train_simplified/octopus.csv')\n",
      "('Reading...', '../input/train_simplified/diamond.csv')\n",
      "('Reading...', '../input/train_simplified/wine glass.csv')\n",
      "('Reading...', '../input/train_simplified/crab.csv')\n",
      "('Reading...', '../input/train_simplified/blackberry.csv')\n",
      "('Reading...', '../input/train_simplified/submarine.csv')\n",
      "('Reading...', '../input/train_simplified/bee.csv')\n",
      "('Reading...', '../input/train_simplified/The Eiffel Tower.csv')\n",
      "('Reading...', '../input/train_simplified/finger.csv')\n",
      "('Reading...', '../input/train_simplified/belt.csv')\n",
      "('Reading...', '../input/train_simplified/jail.csv')\n",
      "('Reading...', '../input/train_simplified/bracelet.csv')\n",
      "('Reading...', '../input/train_simplified/megaphone.csv')\n",
      "('Reading...', '../input/train_simplified/saw.csv')\n",
      "('Reading...', '../input/train_simplified/microwave.csv')\n",
      "('Reading...', '../input/train_simplified/pants.csv')\n",
      "('Reading...', '../input/train_simplified/The Great Wall of China.csv')\n",
      "('Reading...', '../input/train_simplified/mermaid.csv')\n",
      "('Reading...', '../input/train_simplified/ear.csv')\n",
      "('Reading...', '../input/train_simplified/firetruck.csv')\n",
      "('Reading...', '../input/train_simplified/ocean.csv')\n",
      "('Reading...', '../input/train_simplified/birthday cake.csv')\n",
      "('Reading...', '../input/train_simplified/table.csv')\n",
      "('Reading...', '../input/train_simplified/star.csv')\n",
      "('Reading...', '../input/train_simplified/cannon.csv')\n",
      "('Reading...', '../input/train_simplified/envelope.csv')\n",
      "('Reading...', '../input/train_simplified/skull.csv')\n",
      "('Reading...', '../input/train_simplified/beach.csv')\n",
      "('Reading...', '../input/train_simplified/cactus.csv')\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "('Reading...', '../input/train_simplified/alarm clock.csv')\n",
      "('Reading...', '../input/train_simplified/shoe.csv')\n",
      "('Reading...', '../input/train_simplified/map.csv')\n",
      "('Reading...', '../input/train_simplified/nail.csv')\n",
      "('Reading...', '../input/train_simplified/stairs.csv')\n",
      "('Reading...', '../input/train_simplified/blueberry.csv')\n",
      "('Reading...', '../input/train_simplified/mailbox.csv')\n",
      "('Reading...', '../input/train_simplified/remote control.csv')\n",
      "('Reading...', '../input/train_simplified/hourglass.csv')\n",
      "('Reading...', '../input/train_simplified/trombone.csv')\n",
      "('Reading...', '../input/train_simplified/watermelon.csv')\n",
      "('Reading...', '../input/train_simplified/suitcase.csv')\n",
      "('Reading...', '../input/train_simplified/telephone.csv')\n",
      "('Reading...', '../input/train_simplified/golf club.csv')\n",
      "('Reading...', '../input/train_simplified/sword.csv')\n",
      "('Reading...', '../input/train_simplified/bridge.csv')\n",
      "('Reading...', '../input/train_simplified/brain.csv')\n",
      "('Reading...', '../input/train_simplified/helmet.csv')\n",
      "('Reading...', '../input/train_simplified/spoon.csv')\n",
      "('Reading...', '../input/train_simplified/dishwasher.csv')\n",
      "('Reading...', '../input/train_simplified/hospital.csv')\n",
      "('Reading...', '../input/train_simplified/couch.csv')\n",
      "('Reading...', '../input/train_simplified/donut.csv')\n",
      "('Reading...', '../input/train_simplified/traffic light.csv')\n",
      "('Reading...', '../input/train_simplified/barn.csv')\n",
      "('Reading...', '../input/train_simplified/banana.csv')\n",
      "('Reading...', '../input/train_simplified/squiggle.csv')\n",
      "('Reading...', '../input/train_simplified/wheel.csv')\n",
      "('Reading...', '../input/train_simplified/necklace.csv')\n",
      "('Reading...', '../input/train_simplified/violin.csv')\n",
      "('Reading...', '../input/train_simplified/hurricane.csv')\n",
      "('Reading...', '../input/train_simplified/matches.csv')\n",
      "('Reading...', '../input/train_simplified/crown.csv')\n",
      "('Reading...', '../input/train_simplified/candle.csv')\n",
      "('Reading...', '../input/train_simplified/bucket.csv')\n",
      "('Reading...', '../input/train_simplified/anvil.csv')\n",
      "('Reading...', '../input/train_simplified/dumbbell.csv')\n",
      "('Reading...', '../input/train_simplified/fork.csv')\n",
      "('Reading...', '../input/train_simplified/toothpaste.csv')\n",
      "('Reading...', '../input/train_simplified/compass.csv')\n",
      "('Reading...', '../input/train_simplified/key.csv')\n",
      "('Reading...', '../input/train_simplified/strawberry.csv')\n",
      "('Reading...', '../input/train_simplified/teapot.csv')\n",
      "('Reading...', '../input/train_simplified/pencil.csv')\n",
      "('Reading...', '../input/train_simplified/light bulb.csv')\n",
      "('Reading...', '../input/train_simplified/sheep.csv')\n",
      "('Reading...', '../input/train_simplified/helicopter.csv')\n",
      "('Reading...', '../input/train_simplified/smiley face.csv')\n",
      "('Reading...', '../input/train_simplified/van.csv')\n",
      "('Reading...', '../input/train_simplified/dolphin.csv')\n",
      "('Reading...', '../input/train_simplified/spider.csv')\n",
      "('Reading...', '../input/train_simplified/fish.csv')\n",
      "('Reading...', '../input/train_simplified/toothbrush.csv')\n",
      "('Reading...', '../input/train_simplified/marker.csv')\n",
      "('Reading...', '../input/train_simplified/flying saucer.csv')\n",
      "('Reading...', '../input/train_simplified/cookie.csv')\n",
      "('Reading...', '../input/train_simplified/scorpion.csv')\n",
      "('Reading...', '../input/train_simplified/palm tree.csv')\n",
      "('Reading...', '../input/train_simplified/toaster.csv')\n",
      "('Reading...', '../input/train_simplified/sailboat.csv')\n",
      "('Reading...', '../input/train_simplified/asparagus.csv')\n",
      "('Reading...', '../input/train_simplified/pear.csv')\n",
      "('Reading...', '../input/train_simplified/elbow.csv')\n",
      "('Reading...', '../input/train_simplified/cup.csv')\n",
      "('Reading...', '../input/train_simplified/dog.csv')\n",
      "('Reading...', '../input/train_simplified/piano.csv')\n",
      "('Reading...', '../input/train_simplified/mushroom.csv')\n",
      "('Reading...', '../input/train_simplified/snowman.csv')\n",
      "('Reading...', '../input/train_simplified/line.csv')\n",
      "('Reading...', '../input/train_simplified/mouth.csv')\n",
      "('Reading...', '../input/train_simplified/calendar.csv')\n",
      "('Reading...', '../input/train_simplified/saxophone.csv')\n",
      "('Reading...', '../input/train_simplified/paint can.csv')\n",
      "('Reading...', '../input/train_simplified/panda.csv')\n",
      "('Reading...', '../input/train_simplified/hat.csv')\n",
      "('Reading...', '../input/train_simplified/drill.csv')\n",
      "('Reading...', '../input/train_simplified/leg.csv')\n",
      "('Reading...', '../input/train_simplified/knee.csv')\n",
      "('Reading...', '../input/train_simplified/lightning.csv')\n",
      "('Reading...', '../input/train_simplified/ice cream.csv')\n",
      "('Reading...', '../input/train_simplified/hammer.csv')\n",
      "('Reading...', '../input/train_simplified/frying pan.csv')\n",
      "('Reading...', '../input/train_simplified/camera.csv')\n",
      "('Reading...', '../input/train_simplified/mountain.csv')\n",
      "('Reading...', '../input/train_simplified/flip flops.csv')\n",
      "('Reading...', '../input/train_simplified/shark.csv')\n",
      "('Reading...', '../input/train_simplified/swing set.csv')\n",
      "('Reading...', '../input/train_simplified/bench.csv')\n",
      "('Reading...', '../input/train_simplified/string bean.csv')\n",
      "('Reading...', '../input/train_simplified/ladder.csv')\n",
      "('Reading...', '../input/train_simplified/cake.csv')\n",
      "('Reading...', '../input/train_simplified/jacket.csv')\n",
      "('Reading...', '../input/train_simplified/flower.csv')\n",
      "('Reading...', '../input/train_simplified/fan.csv')\n",
      "('Reading...', '../input/train_simplified/laptop.csv')\n",
      "('Reading...', '../input/train_simplified/cell phone.csv')\n",
      "('Reading...', '../input/train_simplified/mug.csv')\n",
      "('Reading...', '../input/train_simplified/stethoscope.csv')\n",
      "('Reading...', '../input/train_simplified/guitar.csv')\n",
      "('Reading...', '../input/train_simplified/tooth.csv')\n",
      "('Reading...', '../input/train_simplified/square.csv')\n",
      "('Reading...', '../input/train_simplified/lipstick.csv')\n",
      "('Reading...', '../input/train_simplified/purse.csv')\n",
      "('Reading...', '../input/train_simplified/raccoon.csv')\n",
      "('Reading...', '../input/train_simplified/passport.csv')\n",
      "('Reading...', '../input/train_simplified/oven.csv')\n",
      "('Reading...', '../input/train_simplified/book.csv')\n",
      "('Reading...', '../input/train_simplified/clarinet.csv')\n",
      "('Reading...', '../input/train_simplified/crayon.csv')\n",
      "('Reading...', '../input/train_simplified/face.csv')\n",
      "('Reading...', '../input/train_simplified/church.csv')\n",
      "('Reading...', '../input/train_simplified/stop sign.csv')\n",
      "('Reading...', '../input/train_simplified/microphone.csv')\n",
      "('Reading...', '../input/train_simplified/lollipop.csv')\n",
      "('Reading...', '../input/train_simplified/swan.csv')\n",
      "('Reading...', '../input/train_simplified/leaf.csv')\n",
      "('Reading...', '../input/train_simplified/computer.csv')\n",
      "('Reading...', '../input/train_simplified/lion.csv')\n",
      "('Reading...', '../input/train_simplified/lighthouse.csv')\n",
      "('Reading...', '../input/train_simplified/t-shirt.csv')\n",
      "('Reading...', '../input/train_simplified/ambulance.csv')\n",
      "('Reading...', '../input/train_simplified/washing machine.csv')\n",
      "('Reading...', '../input/train_simplified/police car.csv')\n",
      "('Reading...', '../input/train_simplified/garden hose.csv')\n",
      "('Reading...', '../input/train_simplified/owl.csv')\n",
      "('Reading...', '../input/train_simplified/school bus.csv')\n",
      "('Reading...', '../input/train_simplified/cow.csv')\n",
      "('Reading...', '../input/train_simplified/triangle.csv')\n",
      "('Reading...', '../input/train_simplified/toilet.csv')\n",
      "('Reading...', '../input/train_simplified/monkey.csv')\n",
      "('Reading...', '../input/train_simplified/wristwatch.csv')\n",
      "('Reading...', '../input/train_simplified/floor lamp.csv')\n",
      "('Reading...', '../input/train_simplified/bandage.csv')\n",
      "('Reading...', '../input/train_simplified/harp.csv')\n",
      "('Reading...', '../input/train_simplified/flashlight.csv')\n",
      "('Reading...', '../input/train_simplified/tennis racquet.csv')\n",
      "('Reading...', '../input/train_simplified/trumpet.csv')\n",
      "('Reading...', '../input/train_simplified/zigzag.csv')\n",
      "('Reading...', '../input/train_simplified/animal migration.csv')\n",
      "('Reading...', '../input/train_simplified/speedboat.csv')\n"
     ]
    }
   ],
   "source": [
    "CLASSES_CSV = glob.glob('../input/train_simplified/*.csv')\n",
    "CLASSES = [x.split('/')[-1][:-4] for x in CLASSES_CSV]\n",
    "\n",
    "# 读取单个csv文件\n",
    "def read_df(path, nrows):\n",
    "    print('Reading...', path)\n",
    "    if isinstance(nrows, int):\n",
    "        return pd.read_csv(path, nrows=nrows, parse_dates=['timestamp'])\n",
    "    else:\n",
    "        return pd.read_csv(path, parse_dates=['timestamp'])\n",
    "\n",
    "# 读取多个csv文件\n",
    "def contcat_df(paths, nrows):\n",
    "    dfs = []\n",
    "    for path in paths:\n",
    "        dfs.append(read_df(path, nrows))\n",
    "    return pd.concat(dfs, axis=0, ignore_index=True)\n",
    "\n",
    "df = contcat_df(CLASSES_CSV, 50)\n",
    "df = df.reindex(np.random.permutation(df.index))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T09:37:35.482122Z",
     "start_time": "2018-11-05T09:36:20.376384Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "lbl = LabelEncoder().fit(df['word'])\n",
    "df['word'] = lbl.transform(df['word'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T09:37:35.498467Z",
     "start_time": "2018-11-05T09:37:35.486891Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(34000000, 6)"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T09:37:40.159971Z",
     "start_time": "2018-11-05T09:37:39.795221Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "import torch\n",
    "import torchvision.models as models\n",
    "import torchvision.transforms as transforms\n",
    "import torchvision.datasets as datasets\n",
    "import torch.nn as nn\n",
    "import torch.optim as optim\n",
    "from torch.autograd import Variable\n",
    "from torch.utils.data.dataset import Dataset\n",
    "\n",
    "def draw_cv2(raw_strokes, size=256, lw=6, time_color=True):\n",
    "    BASE_SIZE = 299\n",
    "    img = np.zeros((BASE_SIZE, BASE_SIZE), np.uint8)\n",
    "    for t, stroke in enumerate(eval(raw_strokes)):\n",
    "        for i in range(len(stroke[0]) - 1):\n",
    "            color = 255 - min(t, 10) * 13 if time_color else 255\n",
    "            _ = cv2.line(img, (stroke[0][i] + 22, stroke[1][i]  + 22),\n",
    "                         (stroke[0][i + 1] + 22, stroke[1][i + 1] + 22), color, lw)\n",
    "    if size != BASE_SIZE:\n",
    "        return cv2.resize(img, (size, size))\n",
    "    else:\n",
    "        return img\n",
    "\n",
    "class QRDataset(Dataset):\n",
    "    def __init__(self, img_drawing, img_label, img_size, transform=None):\n",
    "        self.img_drawing = img_drawing\n",
    "        self.img_label = img_label\n",
    "        self.img_size = img_size\n",
    "        self.transform = transform\n",
    "\n",
    "    def __getitem__(self, index):\n",
    "        img = np.zeros((self.img_size, self.img_size, 3))\n",
    "        img[:, :, 0] = draw_cv2(self.img_drawing[index], self.img_size)\n",
    "        img[:, :, 1] = img[:, :, 0]\n",
    "        img[:, :, 2] = img[:, :, 0]\n",
    "        img = Image.fromarray(np.uint8(img))\n",
    "        \n",
    "        if self.transform is not None:\n",
    "            img = self.transform(img)\n",
    "        \n",
    "        label = torch.from_numpy(np.array([self.img_label[index]]))\n",
    "        return img, label\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.img_drawing)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T11:56:46.141290Z",
     "start_time": "2018-11-05T11:56:42.822717Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "import torch\n",
    "import torchvision.models as models\n",
    "import torchvision.transforms as transforms\n",
    "import torchvision.datasets as datasets\n",
    "import torch.nn as nn\n",
    "import torch.optim as optim\n",
    "from torch.autograd import Variable\n",
    "from torch.utils.data.dataset import Dataset\n",
    "\n",
    "model = models.resnet18(True)\n",
    "model.avgpool = nn.AdaptiveAvgPool2d(1)\n",
    "model.fc = nn.Linear(512, 340)\n",
    "\n",
    "model = model.cuda(0)\n",
    "optimizer = optim.Adam(model.parameters(), lr=0.005)\n",
    "scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[2, 3, 5, 7, 8], gamma=0.1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T09:37:57.053561Z",
     "start_time": "2018-11-05T09:37:56.465941Z"
    },
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "train_df = df.iloc[:-70000]\n",
    "val_df = df.iloc[-70000:]\n",
    "\n",
    "train_loader = torch.utils.data.DataLoader(\n",
    "    QRDataset(train_df['drawing'].values, train_df['word'].values, 128,\n",
    "                     transforms.Compose([\n",
    "                        transforms.RandomHorizontalFlip(),\n",
    "                        transforms.RandomVerticalFlip(),\n",
    "                        transforms.RandomAffine(5, scale=[0.85, 1.05]),\n",
    "                        transforms.ToTensor(),\n",
    "                        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
    "        ])\n",
    "    ),\n",
    "    batch_size=2000, shuffle=True, num_workers=10,\n",
    ")\n",
    "\n",
    "val_loader = torch.utils.data.DataLoader(\n",
    "    QRDataset(val_df['drawing'].values, val_df['word'].values, 128,\n",
    "                     transforms.Compose([\n",
    "                        transforms.RandomHorizontalFlip(),\n",
    "                        transforms.ToTensor(),\n",
    "                        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
    "        ])\n",
    "    ),\n",
    "    batch_size=2000, shuffle=True, num_workers=10,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T10:11:34.257276Z",
     "start_time": "2018-11-05T09:37:58.787235Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Process Process-4:\n",
      "Process Process-9:\n",
      "Process Process-5:\n",
      "Process Process-10:\n",
      "Process Process-6:\n",
      "Process Process-1:\n",
      "Process Process-3:\n",
      "Traceback (most recent call last):\n",
      "Process Process-8:\n",
      "Process Process-7:\n",
      "Process Process-2:\n",
      "Traceback (most recent call last):\n",
      "Traceback (most recent call last):\n",
      "Traceback (most recent call last):\n",
      "Traceback (most recent call last):\n",
      "Traceback (most recent call last):\n",
      "Traceback (most recent call last):\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "Traceback (most recent call last):\n",
      "Traceback (most recent call last):\n",
      "Traceback (most recent call last):\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "    self.run()\n",
      "    self.run()\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "    self.run()\n",
      "    self.run()\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 258, in _bootstrap\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "    self.run()\n",
      "    self.run()\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "    self.run()\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "    self.run()\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "    self.run()\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "    self.run()\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 106, in _worker_loop\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 96, in _worker_loop\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/process.py\", line 114, in run\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "    samples = collate_fn([dataset[i] for i in batch_indices])\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 96, in _worker_loop\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "    r = index_queue.get(timeout=MANAGER_STATUS_CHECK_INTERVAL)\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 96, in _worker_loop\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "    self._target(*self._args, **self._kwargs)\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 96, in _worker_loop\n",
      "  File \"<ipython-input-5-d76989fd34e1>\", line 38, in __getitem__\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 96, in _worker_loop\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/queues.py\", line 131, in get\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 96, in _worker_loop\n",
      "    r = index_queue.get(timeout=MANAGER_STATUS_CHECK_INTERVAL)\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 96, in _worker_loop\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 96, in _worker_loop\n",
      "    r = index_queue.get(timeout=MANAGER_STATUS_CHECK_INTERVAL)\n",
      "    img = self.transform(img)\n",
      "  File \"/usr/lib64/python2.7/site-packages/torch/utils/data/dataloader.py\", line 96, in _worker_loop\n",
      "    if timeout < 0 or not self._poll(timeout):\n",
      "    r = index_queue.get(timeout=MANAGER_STATUS_CHECK_INTERVAL)\n",
      "    r = index_queue.get(timeout=MANAGER_STATUS_CHECK_INTERVAL)\n",
      "    r = index_queue.get(timeout=MANAGER_STATUS_CHECK_INTERVAL)\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/queues.py\", line 131, in get\n",
      "    r = index_queue.get(timeout=MANAGER_STATUS_CHECK_INTERVAL)\n",
      "  File \"/usr/lib/python2.7/site-packages/torchvision/transforms/transforms.py\", line 49, in __call__\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/queues.py\", line 131, in get\n",
      "    r = index_queue.get(timeout=MANAGER_STATUS_CHECK_INTERVAL)\n",
      "KeyboardInterrupt\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/queues.py\", line 131, in get\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/queues.py\", line 131, in get\n",
      "    r = index_queue.get(timeout=MANAGER_STATUS_CHECK_INTERVAL)\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/queues.py\", line 131, in get\n",
      "    img = t(img)\n",
      "    if timeout < 0 or not self._poll(timeout):\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/queues.py\", line 131, in get\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/queues.py\", line 131, in get\n",
      "    if timeout < 0 or not self._poll(timeout):\n",
      "    if timeout < 0 or not self._poll(timeout):\n",
      "    if timeout < 0 or not self._poll(timeout):\n",
      "    if timeout < 0 or not self._poll(timeout):\n",
      "  File \"/usr/lib64/python2.7/multiprocessing/queues.py\", line 131, in get\n",
      "  File \"/usr/lib/python2.7/site-packages/torchvision/transforms/transforms.py\", line 143, in __call__\n",
      "KeyboardInterrupt\n",
      "    if timeout < 0 or not self._poll(timeout):\n",
      "KeyboardInterrupt\n",
      "    if timeout < 0 or not self._poll(timeout):\n",
      "KeyboardInterrupt\n",
      "KeyboardInterrupt\n",
      "KeyboardInterrupt\n",
      "    return F.normalize(tensor, self.mean, self.std)\n",
      "    if timeout < 0 or not self._poll(timeout):\n",
      "KeyboardInterrupt\n",
      "KeyboardInterrupt\n",
      "  File \"/usr/lib/python2.7/site-packages/torchvision/transforms/functional.py\", line 168, in normalize\n",
      "KeyboardInterrupt\n",
      "    t.sub_(m).div_(s)\n",
      "KeyboardInterrupt\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0mTraceback (most recent call last)",
      "\u001b[0;32m<ipython-input-8-0607848efac5>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m     22\u001b[0m     \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdata\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_loader\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     23\u001b[0m         \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdata\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 24\u001b[0;31m         \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mVariable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcuda\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     25\u001b[0m         \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mVariable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mview\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcuda\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     26\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "import torch.nn.functional as F\n",
    "loss_fn = nn.CrossEntropyLoss()\n",
    "\n",
    "def accuracy(output, target, topk=(1,)):\n",
    "    \"\"\"Computes the accuracy over the k top predictions for the specified values of k\"\"\"\n",
    "    with torch.no_grad():\n",
    "        maxk = max(topk)\n",
    "        batch_size = target.size(0)\n",
    "\n",
    "        _, pred = output.topk(maxk, 1, True, True)\n",
    "        pred = pred.t()\n",
    "        correct = pred.eq(target.view(1, -1).expand_as(pred))\n",
    "\n",
    "        res = []\n",
    "        for k in topk:\n",
    "            correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)\n",
    "            res.append(correct_k.mul_(100.0 / batch_size))\n",
    "        return res\n",
    "\n",
    "for epoch in range(10):\n",
    "    scheduler.step()\n",
    "    for i, data in enumerate(train_loader):\n",
    "        x, y = data\n",
    "        x = Variable(x).cuda(0)\n",
    "        y = Variable(y.view(-1)).cuda(0)\n",
    "\n",
    "        optimizer.zero_grad()\n",
    "        output = model(x)\n",
    "        loss = loss_fn(output, y)\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "\n",
    "        if i % 100 == 0:\n",
    "            acc1, acc3 = accuracy(output, y, topk=(1, 3))\n",
    "            logstr = 'Epoch {0}/{1}: \\tloss {2}, ACC {3:.4f}/{4:.4f}'.format(epoch, i, loss.item(), \n",
    "                                                                  acc1.item(), acc3.item())\n",
    "            logging.info(logstr)\n",
    "        if i % 1000 == 0:\n",
    "            torch.save(model.state_dict(), 'resnet18_{0}.pt'.format(epoch))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T09:30:10.425948Z",
     "start_time": "2018-11-05T09:29:43.393285Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "80.9000015259 93.1500015259\n",
      "80.5 93.4000015259\n",
      "79.3000030518 92.5999984741\n",
      "79.5500030518 92.0\n",
      "80.1500015259 92.1500015259\n",
      "80.7000045776 92.4500045776\n",
      "81.5 93.5\n",
      "81.0 92.8000030518\n",
      "80.5500030518 93.0999984741\n",
      "81.0 93.1500015259\n",
      "81.4500045776 92.3000030518\n",
      "81.3499984741 92.7000045776\n",
      "79.9000015259 92.8000030518\n",
      "79.8000030518 92.75\n",
      "81.0 93.25\n",
      "81.0 92.75\n",
      "80.9000015259 92.5500030518\n",
      "81.9000015259 93.75\n",
      "80.9000015259 93.25\n",
      "79.2000045776 92.8000030518\n",
      "81.5 93.9000015259\n",
      "80.7000045776 92.6500015259\n",
      "79.7000045776 92.9000015259\n",
      "80.7000045776 92.6500015259\n",
      "81.8000030518 92.8499984741\n",
      "82.4500045776 92.8000030518\n",
      "80.4000015259 93.8499984741\n",
      "79.9500045776 92.8499984741\n",
      "80.4000015259 92.3499984741\n",
      "80.0999984741 93.5\n",
      "80.1500015259 92.75\n",
      "81.5500030518 93.8000030518\n",
      "80.5500030518 93.1500015259\n",
      "80.0500030518 92.4500045776\n",
      "79.4000015259 92.5\n"
     ]
    }
   ],
   "source": [
    "model = model.eval()\n",
    "with torch.no_grad():\n",
    "    for data in val_loader:\n",
    "        images, labels = data\n",
    "        images = Variable(images).cuda(0)\n",
    "        labels = Variable(labels.view(-1)).cuda(0)\n",
    "        \n",
    "        outputs = model(images)\n",
    "        acc1, acc5 = accuracy(outputs, labels, topk=(1, 3))\n",
    "        print acc1.item(), acc5.item()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T10:29:43.632759Z",
     "start_time": "2018-11-05T10:29:42.969794Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "submit = pd.read_csv('../input/sample_submission.csv')\n",
    "submit_df = pd.read_csv('../input/test_simplified.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T12:47:54.846895Z",
     "start_time": "2018-11-01T12:47:54.835114Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "test_loader = torch.utils.data.DataLoader(\n",
    "    QRDataset(submit_df['drawing'].values, np.zeros(submit_df.shape[0]), 64,\n",
    "                     transforms.Compose([\n",
    "#                         transforms.RandomHorizontalFlip(),\n",
    "#                         transforms.RandomVerticalFlip(),\n",
    "                        transforms.ToTensor(),\n",
    "        ])\n",
    "    ),\n",
    "    batch_size=2000, shuffle=False, num_workers=10,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T12:48:09.751382Z",
     "start_time": "2018-11-01T12:47:56.113716Z"
    }
   },
   "outputs": [],
   "source": [
    "pred = []\n",
    "for t, (x, y) in enumerate(test_loader):\n",
    "    x_var = Variable(x.cuda(0))\n",
    "    y_var = Variable(y.cuda(0))\n",
    "    scores = model(x_var)\n",
    "    pred.append(scores.data.cpu().numpy())\n",
    "pred = np.concatenate(pred, 0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T12:48:19.224835Z",
     "start_time": "2018-11-01T12:48:11.799654Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "import warnings\n",
    "warnings.filterwarnings(\"ignore\", category=DeprecationWarning) \n",
    "\n",
    "pred_label = [lbl.inverse_transform(x.argsort()[-3:][::-1]) for x in pred]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T12:48:20.303981Z",
     "start_time": "2018-11-01T12:48:20.057900Z"
    }
   },
   "outputs": [],
   "source": [
    "pred_label = np.vstack(pred_label)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T12:48:20.942309Z",
     "start_time": "2018-11-01T12:48:20.927245Z"
    }
   },
   "outputs": [],
   "source": [
    "submit['top1'] = pred_label[:, 0]\n",
    "submit['top2'] = pred_label[:, 1]\n",
    "submit['top3'] = pred_label[:, 2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T12:48:21.628275Z",
     "start_time": "2018-11-01T12:48:21.466253Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "submit['top1'] = submit['top1'].apply(lambda x: x.replace(' ', '_'))\n",
    "submit['top2'] = submit['top2'].apply(lambda x: x.replace(' ', '_'))\n",
    "submit['top3'] = submit['top3'].apply(lambda x: x.replace(' ', '_'))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T12:48:22.551058Z",
     "start_time": "2018-11-01T12:48:22.489193Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "submit['word'] = submit['top1'] + ' ' + submit['top2'] + ' ' + submit['top3']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T12:48:29.480449Z",
     "start_time": "2018-11-01T12:48:29.177267Z"
    }
   },
   "outputs": [],
   "source": [
    "submit[['key_id', 'word']].to_csv('./tmp_91.csv', index=None)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T09:10:06.926519Z",
     "start_time": "2018-11-01T09:10:06.917598Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'2.3000'"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "'{0:.4f}'.format(2.3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-01T12:47:28.988327Z",
     "start_time": "2018-11-01T12:47:28.981297Z"
    }
   },
   "outputs": [],
   "source": [
    "model = model.eval()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T10:24:05.372759Z",
     "start_time": "2018-11-05T10:24:05.310022Z"
    }
   },
   "outputs": [
    {
     "ename": "NameError",
     "evalue": "name 'df' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m\u001b[0m",
      "\u001b[0;31mNameError\u001b[0mTraceback (most recent call last)",
      "\u001b[0;32m<ipython-input-1-00cf07b74dcd>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mdf\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[0;31mNameError\u001b[0m: name 'df' is not defined"
     ]
    }
   ],
   "source": [
    "df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T10:30:20.037869Z",
     "start_time": "2018-11-05T10:30:19.213760Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(34000, 6)"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_test_split(df, test_size=0.01)[1].shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T10:36:05.864621Z",
     "start_time": "2018-11-05T10:36:05.834634Z"
    },
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "submit.to_pickle?"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T11:57:40.165107Z",
     "start_time": "2018-11-05T11:57:40.151734Z"
    }
   },
   "outputs": [],
   "source": [
    "model.train?"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-05T12:57:46.010253Z",
     "start_time": "2018-11-05T12:57:46.002655Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'ss      '"
      ]
     },
     "execution_count": 31,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "'{0:8s}'.format('ss')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-09T10:31:52.634178Z",
     "start_time": "2018-11-09T10:31:52.620729Z"
    }
   },
   "outputs": [],
   "source": [
    "import time, datetime"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T11:58:48.847541Z",
     "start_time": "2018-11-10T11:58:48.834894Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'20181110195848839'"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import codecs, time, os\n",
    "from datetime import datetime\n",
    "\n",
    "datetime.now().strftime('%Y%m%d%H%M%S%f')[:-3]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-09T10:41:47.906230Z",
     "start_time": "2018-11-09T10:41:47.882607Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "datetime.datetime(2018, 11, 9, 18, 41, 47, 884700)"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "datetime.now()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-09T10:49:40.733220Z",
     "start_time": "2018-11-09T10:49:40.723135Z"
    }
   },
   "outputs": [
    {
     "ename": "SyntaxError",
     "evalue": "invalid syntax (<ipython-input-11-4e3b1b4592ff>, line 1)",
     "output_type": "error",
     "traceback": [
      "\u001b[0;36m  File \u001b[0;32m\"<ipython-input-11-4e3b1b4592ff>\"\u001b[0;36m, line \u001b[0;32m1\u001b[0m\n\u001b[0;31m    {\"time\":\"20181109184925552\",\"key\":\"ump.psoriasis3.logo.tptime\",\"hostname\":\"10.177.62.4\",\"processState\":\"0\",\"elapsedTime\":\"36\"}{\"time\":\"20130415135617820\",\"key\":\"JCSS.ObjectCheck.checkfile\",\"hostname\":\" YPT-Wangyuan \",\"processState\":\"0\",\"elapsedTime\":\"36\"}\u001b[0m\n\u001b[0m                                                                                                                                  ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n"
     ]
    }
   ],
   "source": [
    "{\"time\":\"20181109105423524\",\"key\":\"ump.psoriasis3.logo.tptime\",\"hostname\":\"10.177.62.4\",\"processState\":\"0\",\"elapsedTime\":\"36\"}\n",
    "{\"time\":\"20130415135617820\",\"key\":\"JCSS.ObjectCheck.checkfile\",\"hostname\":\" YPT-Wangyuan \",\"processState\":\"0\",\"elapsedTime\":\"36\"}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T11:52:01.257540Z",
     "start_time": "2018-11-10T11:52:01.247831Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "True"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "os.path.exists('./')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T12:05:51.557388Z",
     "start_time": "2018-11-10T12:05:51.549194Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "172.28.220.19\n"
     ]
    }
   ],
   "source": [
    "import netifaces as ni\n",
    "ni.ifaddresses('eth0')\n",
    "ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']\n",
    "print ip  # should print \"192.168.100.37\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T12:06:06.028620Z",
     "start_time": "2018-11-10T12:06:06.020171Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'172.28.220.19'"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "str(ip)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T12:20:57.268482Z",
     "start_time": "2018-11-10T12:20:57.226459Z"
    }
   },
   "outputs": [],
   "source": [
    "# -*- coding: utf-8 -*-\n",
    "import os, sys, json, codecs\n",
    "from datetime import datetime\n",
    "from collections import OrderedDict\n",
    "\n",
    "import netifaces as ni\n",
    "def getip():\n",
    "    ni.ifaddresses('eth0')\n",
    "    ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']\n",
    "    return str(ip)\n",
    "\n",
    "def gettime():\n",
    "    return datetime.now().strftime('%Y%m%d%H%M%S%f')[:-3]\n",
    "\n",
    "# https://cf.jd.com/pages/viewpage.action?pageId=73256284\n",
    "# https://cf.jd.com/pages/viewpage.action?pageId=73251808\n",
    "class UMP(object):\n",
    "    def __init__(self):\n",
    "        if not os.path.exists('/export/home/tomcat/UMP-Monitor/logs/'):\n",
    "            os.makedirs('/export/home/tomcat/UMP-Monitor/logs/')\n",
    "        self.ip = getip()\n",
    "\n",
    "    def ump_tp(self, path, key, state, elapesd, host=None):\n",
    "        '''方法性能监控\n",
    "\n",
    "        写入格式，\n",
    "        {\"time\":\"20130415135617820\",\"key\":\"JCSS.ObjectCheck.checkfile\",\"hostname\":\" YPT-Wangyuan \",\"processState\":\"0\",\"elapsedTime\":\"36\"}\n",
    "\n",
    "        '''\n",
    "\n",
    "        timestr = datetime.now().strftime('%Y%m%d%H%M%S%f')[:-3]\n",
    "        if host == None:\n",
    "            host = self.ip\n",
    "        state = str(state)\n",
    "        elapesd = str(elapesd)\n",
    "\n",
    "        logjson = OrderedDict([\n",
    "                    (\"time\", gettime()), \n",
    "                    (\"key\", key), \n",
    "                    (\"hostname\", host), \n",
    "                    (\"processState\", state),\n",
    "                    (\"elapsedTime\", elapesd), \n",
    "        ])\n",
    "        with open('/export/home/tomcat/UMP-Monitor/logs/' + path, 'a') as up:\n",
    "            up.write(json.dumps(logjson) + '\\n')\n",
    "\n",
    "    def ump_alive(self):\n",
    "        pass"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T12:20:57.508469Z",
     "start_time": "2018-11-10T12:20:57.503187Z"
    }
   },
   "outputs": [],
   "source": [
    "ump = UMP()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T12:20:57.748854Z",
     "start_time": "2018-11-10T12:20:57.742921Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{\"time\": \"20181110202057745\", \"key\": \"897\", \"hostname\": \"172.28.220.19\", \"processState\": \"1\", \"elapsedTime\": \"23\"}\n"
     ]
    }
   ],
   "source": [
    "ump.ump_tp('./', '897', '1', '23')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T12:44:02.246309Z",
     "start_time": "2018-11-10T12:44:02.240295Z"
    }
   },
   "outputs": [],
   "source": [
    "type = 2"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 88,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T13:01:15.493732Z",
     "start_time": "2018-11-10T13:01:15.488036Z"
    }
   },
   "outputs": [],
   "source": [
    "a = time.time()\n",
    "# time.sleep(1)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 89,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-10T13:01:15.887622Z",
     "start_time": "2018-11-10T13:01:15.881831Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "393\n"
     ]
    }
   ],
   "source": [
    "print str(int((time.time() - a) * 1000))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-13T07:13:16.401047Z",
     "start_time": "2018-11-13T07:13:01.476150Z"
    }
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Downloading: \"https://download.pytorch.org/models/resnet101-5d3b4d8f.pth\" to /root/.torch/models/resnet101-5d3b4d8f.pth\n",
      "100%|██████████| 178728960/178728960 [00:11<00:00, 14909357.07it/s]\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "import torchvision.models as models\n",
    "import torchvision.transforms as transforms\n",
    "import torchvision.datasets as datasets\n",
    "import torch.nn as nn\n",
    "import torch.optim as optim\n",
    "from torch.autograd import Variable\n",
    "from torch.utils.data.dataset import Dataset\n",
    "\n",
    "model = models.resnet101(True)\n",
    "model.avgpool = nn.AdaptiveAvgPool2d(1)\n",
    "model.fc = nn.Linear(512, 340)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2018-11-13T07:13:19.781936Z",
     "start_time": "2018-11-13T07:13:19.750059Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "ResNet(\n",
       "  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n",
       "  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "  (relu): ReLU(inplace)\n",
       "  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n",
       "  (layer1): Sequential(\n",
       "    (0): Bottleneck(\n",
       "      (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): Bottleneck(\n",
       "      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (2): Bottleneck(\n",
       "      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "  )\n",
       "  (layer2): Sequential(\n",
       "    (0): Bottleneck(\n",
       "      (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (2): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (3): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "  )\n",
       "  (layer3): Sequential(\n",
       "    (0): Bottleneck(\n",
       "      (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (2): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (3): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (4): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (5): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (6): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (7): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (8): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (9): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (10): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (11): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (12): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (13): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (14): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (15): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (16): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (17): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (18): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (19): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (20): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (21): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (22): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "  )\n",
       "  (layer4): Sequential(\n",
       "    (0): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): Bottleneck(\n",
       "      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "    (2): Bottleneck(\n",
       "      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "    )\n",
       "  )\n",
       "  (avgpool): AdaptiveAvgPool2d(output_size=1)\n",
       "  (fc): Linear(in_features=2048, out_features=1000, bias=True)\n",
       ")"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 2",
   "language": "python",
   "name": "python2"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.5"
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {},
   "toc_section_display": true,
   "toc_window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
