{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Convert static computational graph to Caffe"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Prepare environment"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!pip install paddlepaddle paddle2onnx # required, paddlepaddle>=1.8.0, paddle2onnx>=0.3.2\n",
    "!pip install onnx onnxruntime # optional for check and run ONNX model, onnx>=1.7.0, onnxruntime>=1.5.2\n",
    "# manually install caffe with Python support"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "import paddle.fluid as fluid\n",
    "import paddle\n",
    "import os"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 1. ResNet network"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "def conv_bn_layer(input,\n",
    "                  ch_out,\n",
    "                  filter_size,\n",
    "                  stride,\n",
    "                  padding,\n",
    "                  act='relu',\n",
    "                  bias_attr=False):\n",
    "    tmp = fluid.layers.conv2d(\n",
    "        input=input,\n",
    "        filter_size=filter_size,\n",
    "        num_filters=ch_out,\n",
    "        stride=stride,\n",
    "        padding=padding,\n",
    "        act=None,\n",
    "        bias_attr=bias_attr)\n",
    "    return fluid.layers.batch_norm(input=tmp, act=act)\n",
    "\n",
    "def shortcut(input, ch_in, ch_out, stride):\n",
    "    if ch_in != ch_out:\n",
    "        return conv_bn_layer(input, ch_out, 1, stride, 0, None)\n",
    "    else:\n",
    "        return input\n",
    "\n",
    "def basicblock(input, ch_in, ch_out, stride):\n",
    "    tmp = conv_bn_layer(input, ch_out, 3, stride, 1)\n",
    "    tmp = conv_bn_layer(tmp, ch_out, 3, 1, 1, act=None, bias_attr=True)\n",
    "    short = shortcut(input, ch_in, ch_out, stride)\n",
    "    return fluid.layers.elementwise_add(x=tmp, y=short, act='relu')\n",
    "\n",
    "def layer_warp(block_func, input, ch_in, ch_out, count, stride):\n",
    "    tmp = block_func(input, ch_in, ch_out, stride)\n",
    "    for i in range(1, count):\n",
    "        tmp = block_func(tmp, ch_out, ch_out, 1)\n",
    "    return tmp\n",
    "\n",
    "def resnet_cifar10(ipt, depth=32):\n",
    "    # depth should be one of 20, 32, 44, 56, 110, 1202\n",
    "    assert (depth - 2) % 6 == 0\n",
    "    n = (depth - 2) // 6\n",
    "    nStages = {16, 64, 128}\n",
    "    conv1 = conv_bn_layer(ipt, ch_out=16, filter_size=3, stride=1, padding=1)\n",
    "    res1 = layer_warp(basicblock, conv1, 16, 16, n, 1)\n",
    "    #res2 = layer_warp(basicblock, res1, 16, 32, n, 2)\n",
    "    #res3 = layer_warp(basicblock, res2, 32, 64, n, 2)\n",
    "    pool = fluid.layers.pool2d(\n",
    "        input=res1, pool_size=8, pool_type='avg', pool_stride=1)\n",
    "    predict = fluid.layers.fc(input=pool, size=10, act='softmax')\n",
    "    return predict\n",
    "\n",
    "paddle.enable_static()\n",
    "data_shape = [None, 3, 32, 32]\n",
    "images = fluid.data(name='pixel', shape=data_shape, dtype='float32')\n",
    "predict = resnet_cifar10(images, 32)\n",
    "exe = fluid.Executor(fluid.CPUPlace())\n",
    "_ =exe.run(fluid.default_startup_program())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 2. Convert uncombined PaddlePaddle model(parameters saved in separated files)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['save_infer_model/scale_0.tmp_0']"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model_dir = './resnet_not_combined/'\n",
    "fluid.io.save_inference_model(model_dir, [\"pixel\"], [predict], exe)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__model__         batch_norm_2.w_2  batch_norm_6.w_2  conv2d_10.w_0\r\n",
      "batch_norm_0.b_0  batch_norm_3.b_0  batch_norm_7.b_0  conv2d_2.b_0\r\n",
      "batch_norm_0.w_0  batch_norm_3.w_0  batch_norm_7.w_0  conv2d_2.w_0\r\n",
      "batch_norm_0.w_1  batch_norm_3.w_1  batch_norm_7.w_1  conv2d_3.w_0\r\n",
      "batch_norm_0.w_2  batch_norm_3.w_2  batch_norm_7.w_2  conv2d_4.b_0\r\n",
      "batch_norm_1.b_0  batch_norm_4.b_0  batch_norm_8.b_0  conv2d_4.w_0\r\n",
      "batch_norm_1.w_0  batch_norm_4.w_0  batch_norm_8.w_0  conv2d_5.w_0\r\n",
      "batch_norm_1.w_1  batch_norm_4.w_1  batch_norm_8.w_1  conv2d_6.b_0\r\n",
      "batch_norm_1.w_2  batch_norm_4.w_2  batch_norm_8.w_2  conv2d_6.w_0\r\n",
      "batch_norm_10.b_0 batch_norm_5.b_0  batch_norm_9.b_0  conv2d_7.w_0\r\n",
      "batch_norm_10.w_0 batch_norm_5.w_0  batch_norm_9.w_0  conv2d_8.b_0\r\n",
      "batch_norm_10.w_1 batch_norm_5.w_1  batch_norm_9.w_1  conv2d_8.w_0\r\n",
      "batch_norm_10.w_2 batch_norm_5.w_2  batch_norm_9.w_2  conv2d_9.w_0\r\n",
      "batch_norm_2.b_0  batch_norm_6.b_0  conv2d_0.w_0      fc_0.b_0\r\n",
      "batch_norm_2.w_0  batch_norm_6.w_0  conv2d_1.w_0      fc_0.w_0\r\n",
      "batch_norm_2.w_1  batch_norm_6.w_1  conv2d_10.b_0\r\n"
     ]
    }
   ],
   "source": [
    "!ls ./resnet_not_combined/"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "objc[23784]: Class CaptureDelegate is implemented in both /usr/local/lib/python3.7/site-packages/cv2/cv2.cpython-37m-darwin.so (0x112543c18) and /usr/local/opt/opencv@3/lib/libopencv_videoio.3.4.dylib (0x1226c7070). One of the two will be used. Which one is undefined.\n",
      "objc[23784]: Class CVWindow is implemented in both /usr/local/lib/python3.7/site-packages/cv2/cv2.cpython-37m-darwin.so (0x112543c68) and /usr/local/opt/opencv@3/lib/libopencv_highgui.3.4.dylib (0x1226870a8). One of the two will be used. Which one is undefined.\n",
      "objc[23784]: Class CVView is implemented in both /usr/local/lib/python3.7/site-packages/cv2/cv2.cpython-37m-darwin.so (0x112543c90) and /usr/local/opt/opencv@3/lib/libopencv_highgui.3.4.dylib (0x1226870d0). One of the two will be used. Which one is undefined.\n",
      "objc[23784]: Class CVSlider is implemented in both /usr/local/lib/python3.7/site-packages/cv2/cv2.cpython-37m-darwin.so (0x112543cb8) and /usr/local/opt/opencv@3/lib/libopencv_highgui.3.4.dylib (0x1226870f8). One of the two will be used. Which one is undefined.\n",
      "2022-07-06 20:30:07 [INFO]\tpaddle graph info: \n",
      "2022-07-06 20:30:07 [INFO]\tinput: ['pixel_idx0'], output: ['save_infer_model_scale_0_tmp_0_idx48'], info: {'conv2d': 11, 'batch_norm': 11, 'relu': 11, 'elementwise_add': 11, 'pool2d': 1, 'mul': 1, 'softmax': 1, 'scale': 1}\n",
      "\u001B[1;31;40m2022-07-06 20:30:07 [WARNING]\tcaffe do not support exclusive param, will get ignored\u001B[0m\n",
      "2022-07-06 20:30:07 [INFO]\tun-identical mapping with node name list (['mul_idx44_extra_x_flatten', 'mul_idx44'])\n",
      "2022-07-06 20:30:07 [INFO]\tun-identical mapping with node name list ([])\n",
      "2022-07-06 20:30:07 [INFO]\tcaffe graph info: \n",
      "2022-07-06 20:30:07 [INFO]\tinput: ['pixel_idx0'], output: ['fc_0_tmp_2_idx46'], info: {'Convolution': 11, 'BatchNorm': 11, 'Scale': 17, 'ReLU': 11, 'Eltwise': 5, 'Pooling': 1, 'Flatten': 1, 'InnerProduct': 1, 'Softmax': 1}\n",
      "2022-07-06 20:30:07 [INFO]\tpass opt_caffe_cutout_dropout is skipped\n",
      "WARNING: Logging before InitGoogleLogging() is written to STDERR\n",
      "I0706 20:30:07.147895 371264960 net.cpp:58] Initializing net from parameters: \n",
      "state {\n",
      "  phase: TRAIN\n",
      "  level: 0\n",
      "}\n",
      "layer {\n",
      "  name: \"pixel_idx0\"\n",
      "  type: \"Input\"\n",
      "  top: \"pixel_idx0\"\n",
      "  input_param {\n",
      "    shape {\n",
      "      dim: 1\n",
      "      dim: 3\n",
      "      dim: 32\n",
      "      dim: 32\n",
      "    }\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_0_tmp_0_idx0\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"pixel_idx0\"\n",
      "  top: \"conv2d_0_tmp_0_idx0\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_0_tmp_3_idx1_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_0_tmp_0_idx0\"\n",
      "  top: \"batch_norm_0_tmp_3_idx1_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_0_tmp_3_idx1\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_0_tmp_3_idx1_extra\"\n",
      "  top: \"batch_norm_0_tmp_3_idx1\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_0_tmp_4_idx2\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_0_tmp_3_idx1\"\n",
      "  top: \"batch_norm_0_tmp_4_idx2\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_1_tmp_0_idx3\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_0_tmp_4_idx2\"\n",
      "  top: \"conv2d_1_tmp_0_idx3\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_1_tmp_3_idx4_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_1_tmp_0_idx3\"\n",
      "  top: \"batch_norm_1_tmp_3_idx4_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_1_tmp_3_idx4\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_1_tmp_3_idx4_extra\"\n",
      "  top: \"batch_norm_1_tmp_3_idx4\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_1_tmp_4_idx5\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_1_tmp_3_idx4\"\n",
      "  top: \"batch_norm_1_tmp_4_idx5\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_2_tmp_0_idx6\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_1_tmp_4_idx5\"\n",
      "  top: \"conv2d_2_tmp_0_idx6\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_2_tmp_1_idx7\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_2_tmp_0_idx6\"\n",
      "  top: \"conv2d_2_tmp_1_idx7\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_2_tmp_3_idx8_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_2_tmp_1_idx7\"\n",
      "  top: \"batch_norm_2_tmp_3_idx8_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_2_tmp_3_idx8\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_2_tmp_3_idx8_extra\"\n",
      "  top: \"batch_norm_2_tmp_3_idx8\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_0_tmp_0_idx9\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_2_tmp_3_idx8\"\n",
      "  bottom: \"batch_norm_0_tmp_4_idx2\"\n",
      "  top: \"elementwise_add_0_tmp_0_idx9\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_0_tmp_1_idx10\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_0_tmp_0_idx9\"\n",
      "  top: \"elementwise_add_0_tmp_1_idx10\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_3_tmp_0_idx11\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"elementwise_add_0_tmp_1_idx10\"\n",
      "  top: \"conv2d_3_tmp_0_idx11\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_3_tmp_3_idx12_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_3_tmp_0_idx11\"\n",
      "  top: \"batch_norm_3_tmp_3_idx12_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_3_tmp_3_idx12\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_3_tmp_3_idx12_extra\"\n",
      "  top: \"batch_norm_3_tmp_3_idx12\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_3_tmp_4_idx13\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_3_tmp_3_idx12\"\n",
      "  top: \"batch_norm_3_tmp_4_idx13\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_4_tmp_0_idx14\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_3_tmp_4_idx13\"\n",
      "  top: \"conv2d_4_tmp_0_idx14\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_4_tmp_1_idx15\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_4_tmp_0_idx14\"\n",
      "  top: \"conv2d_4_tmp_1_idx15\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_4_tmp_3_idx16_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_4_tmp_1_idx15\"\n",
      "  top: \"batch_norm_4_tmp_3_idx16_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_4_tmp_3_idx16\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_4_tmp_3_idx16_extra\"\n",
      "  top: \"batch_norm_4_tmp_3_idx16\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_1_tmp_0_idx17\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_4_tmp_3_idx16\"\n",
      "  bottom: \"elementwise_add_0_tmp_1_idx10\"\n",
      "  top: \"elementwise_add_1_tmp_0_idx17\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_1_tmp_1_idx18\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_1_tmp_0_idx17\"\n",
      "  top: \"elementwise_add_1_tmp_1_idx18\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_5_tmp_0_idx19\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"elementwise_add_1_tmp_1_idx18\"\n",
      "  top: \"conv2d_5_tmp_0_idx19\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_5_tmp_3_idx20_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_5_tmp_0_idx19\"\n",
      "  top: \"batch_norm_5_tmp_3_idx20_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_5_tmp_3_idx20\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_5_tmp_3_idx20_extra\"\n",
      "  top: \"batch_norm_5_tmp_3_idx20\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_5_tmp_4_idx21\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_5_tmp_3_idx20\"\n",
      "  top: \"batch_norm_5_tmp_4_idx21\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_6_tmp_0_idx22\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_5_tmp_4_idx21\"\n",
      "  top: \"conv2d_6_tmp_0_idx22\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_6_tmp_1_idx23\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_6_tmp_0_idx22\"\n",
      "  top: \"conv2d_6_tmp_1_idx23\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_6_tmp_3_idx24_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_6_tmp_1_idx23\"\n",
      "  top: \"batch_norm_6_tmp_3_idx24_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_6_tmp_3_idx24\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_6_tmp_3_idx24_extra\"\n",
      "  top: \"batch_norm_6_tmp_3_idx24\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_2_tmp_0_idx25\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_6_tmp_3_idx24\"\n",
      "  bottom: \"elementwise_add_1_tmp_1_idx18\"\n",
      "  top: \"elementwise_add_2_tmp_0_idx25\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_2_tmp_1_idx26\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_2_tmp_0_idx25\"\n",
      "  top: \"elementwise_add_2_tmp_1_idx26\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_7_tmp_0_idx27\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"elementwise_add_2_tmp_1_idx26\"\n",
      "  top: \"conv2d_7_tmp_0_idx27\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_7_tmp_3_idx28_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_7_tmp_0_idx27\"\n",
      "  top: \"batch_norm_7_tmp_3_idx28_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_7_tmp_3_idx28\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_7_tmp_3_idx28_extra\"\n",
      "  top: \"batch_norm_7_tmp_3_idx28\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_7_tmp_4_idx29\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_7_tmp_3_idx28\"\n",
      "  top: \"batch_norm_7_tmp_4_idx29\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_8_tmp_0_idx30\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_7_tmp_4_idx29\"\n",
      "  top: \"conv2d_8_tmp_0_idx30\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_8_tmp_1_idx31\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_8_tmp_0_idx30\"\n",
      "  top: \"conv2d_8_tmp_1_idx31\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_8_tmp_3_idx32_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_8_tmp_1_idx31\"\n",
      "  top: \"batch_norm_8_tmp_3_idx32_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_8_tmp_3_idx32\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_8_tmp_3_idx32_extra\"\n",
      "  top: \"batch_norm_8_tmp_3_idx32\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_3_tmp_0_idx33\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_8_tmp_3_idx32\"\n",
      "  bottom: \"elementwise_add_2_tmp_1_idx26\"\n",
      "  top: \"elementwise_add_3_tmp_0_idx33\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_3_tmp_1_idx34\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_3_tmp_0_idx33\"\n",
      "  top: \"elementwise_add_3_tmp_1_idx34\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_9_tmp_0_idx35\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"elementwise_add_3_tmp_1_idx34\"\n",
      "  top: \"conv2d_9_tmp_0_idx35\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_9_tmp_3_idx36_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_9_tmp_0_idx35\"\n",
      "  top: \"batch_norm_9_tmp_3_idx36_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_9_tmp_3_idx36\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_9_tmp_3_idx36_extra\"\n",
      "  top: \"batch_norm_9_tmp_3_idx36\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_9_tmp_4_idx37\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_9_tmp_3_idx36\"\n",
      "  top: \"batch_norm_9_tmp_4_idx37\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_10_tmp_0_idx38\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_9_tmp_4_idx37\"\n",
      "  top: \"conv2d_10_tmp_0_idx38\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_10_tmp_1_idx39\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_10_tmp_0_idx38\"\n",
      "  top: \"conv2d_10_tmp_1_idx39\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_10_tmp_3_idx40_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_10_tmp_1_idx39\"\n",
      "  top: \"batch_norm_10_tmp_3_idx40_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_10_tmp_3_idx40\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_10_tmp_3_idx40_extra\"\n",
      "  top: \"batch_norm_10_tmp_3_idx40\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_4_tmp_0_idx41\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_10_tmp_3_idx40\"\n",
      "  bottom: \"elementwise_add_3_tmp_1_idx34\"\n",
      "  top: \"elementwise_add_4_tmp_0_idx41\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_4_tmp_1_idx42\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_4_tmp_0_idx41\"\n",
      "  top: \"elementwise_add_4_tmp_1_idx42\"\n",
      "}\n",
      "layer {\n",
      "  name: \"pool2d_0_tmp_0_idx43\"\n",
      "  type: \"Pooling\"\n",
      "  bottom: \"elementwise_add_4_tmp_1_idx42\"\n",
      "  top: \"pool2d_0_tmp_0_idx43\"\n",
      "  pooling_param {\n",
      "    pool: AVE\n",
      "    kernel_h: 8\n",
      "    kernel_w: 8\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    pad_h: 0\n",
      "    pad_w: 0\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"fc_0_tmp_0_idx44_extra_x\"\n",
      "  type: \"Flatten\"\n",
      "  bottom: \"pool2d_0_tmp_0_idx43\"\n",
      "  top: \"fc_0_tmp_0_idx44_extra_x\"\n",
      "  flatten_param {\n",
      "    axis: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"fc_0_tmp_0_idx44\"\n",
      "  type: \"InnerProduct\"\n",
      "  bottom: \"fc_0_tmp_0_idx44_extra_x\"\n",
      "  top: \"fc_0_tmp_0_idx44\"\n",
      "  inner_product_param {\n",
      "    num_output: 10\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"fc_0_tmp_1_idx45\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"fc_0_tmp_0_idx44\"\n",
      "  top: \"fc_0_tmp_1_idx45\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"fc_0_tmp_2_idx46\"\n",
      "  type: \"Softmax\"\n",
      "  bottom: \"fc_0_tmp_1_idx45\"\n",
      "  top: \"fc_0_tmp_2_idx46\"\n",
      "  softmax_param {\n",
      "    axis: 1\n",
      "  }\n",
      "}\n",
      "I0706 20:30:07.152778 371264960 layer_factory.hpp:77] Creating layer pixel_idx0\n",
      "I0706 20:30:07.153473 371264960 net.cpp:100] Creating Layer pixel_idx0\n",
      "I0706 20:30:07.153529 371264960 net.cpp:408] pixel_idx0 -> pixel_idx0\n",
      "I0706 20:30:07.154820 371264960 net.cpp:150] Setting up pixel_idx0\n",
      "I0706 20:30:07.155090 371264960 net.cpp:157] Top shape: 1 3 32 32 (3072)\n",
      "I0706 20:30:07.155292 371264960 net.cpp:165] Memory required for data: 12288\n",
      "I0706 20:30:07.155304 371264960 layer_factory.hpp:77] Creating layer conv2d_0_tmp_0_idx0\n",
      "I0706 20:30:07.155985 371264960 net.cpp:100] Creating Layer conv2d_0_tmp_0_idx0\n",
      "I0706 20:30:07.155999 371264960 net.cpp:434] conv2d_0_tmp_0_idx0 <- pixel_idx0\n",
      "I0706 20:30:07.156258 371264960 net.cpp:408] conv2d_0_tmp_0_idx0 -> conv2d_0_tmp_0_idx0\n",
      "I0706 20:30:07.157727 371264960 net.cpp:150] Setting up conv2d_0_tmp_0_idx0\n",
      "I0706 20:30:07.157740 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.157748 371264960 net.cpp:165] Memory required for data: 77824\n",
      "I0706 20:30:07.158332 371264960 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:30:07.158449 371264960 net.cpp:100] Creating Layer batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:30:07.158460 371264960 net.cpp:434] batch_norm_0_tmp_3_idx1_extra <- conv2d_0_tmp_0_idx0\n",
      "I0706 20:30:07.158483 371264960 net.cpp:408] batch_norm_0_tmp_3_idx1_extra -> batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:30:07.158629 371264960 net.cpp:150] Setting up batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:30:07.158639 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.158648 371264960 net.cpp:165] Memory required for data: 143360\n",
      "I0706 20:30:07.158936 371264960 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_3_idx1\n",
      "I0706 20:30:07.159202 371264960 net.cpp:100] Creating Layer batch_norm_0_tmp_3_idx1\n",
      "I0706 20:30:07.159214 371264960 net.cpp:434] batch_norm_0_tmp_3_idx1 <- batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:30:07.159232 371264960 net.cpp:408] batch_norm_0_tmp_3_idx1 -> batch_norm_0_tmp_3_idx1\n",
      "I0706 20:30:07.159787 371264960 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_3_idx1\n",
      "I0706 20:30:07.160635 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.160648 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.160653 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.160660 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.160904 371264960 net.cpp:150] Setting up batch_norm_0_tmp_3_idx1\n",
      "I0706 20:30:07.160912 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.160919 371264960 net.cpp:165] Memory required for data: 208896\n",
      "I0706 20:30:07.160948 371264960 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_4_idx2\n",
      "I0706 20:30:07.160971 371264960 net.cpp:100] Creating Layer batch_norm_0_tmp_4_idx2\n",
      "I0706 20:30:07.160980 371264960 net.cpp:434] batch_norm_0_tmp_4_idx2 <- batch_norm_0_tmp_3_idx1\n",
      "I0706 20:30:07.160995 371264960 net.cpp:408] batch_norm_0_tmp_4_idx2 -> batch_norm_0_tmp_4_idx2\n",
      "I0706 20:30:07.161018 371264960 net.cpp:150] Setting up batch_norm_0_tmp_4_idx2\n",
      "I0706 20:30:07.161026 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.161032 371264960 net.cpp:165] Memory required for data: 274432\n",
      "I0706 20:30:07.161038 371264960 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split\n",
      "I0706 20:30:07.161303 371264960 net.cpp:100] Creating Layer batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split\n",
      "I0706 20:30:07.161316 371264960 net.cpp:434] batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split <- batch_norm_0_tmp_4_idx2\n",
      "I0706 20:30:07.161334 371264960 net.cpp:408] batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split -> batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split_0\n",
      "I0706 20:30:07.161357 371264960 net.cpp:408] batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split -> batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split_1\n",
      "I0706 20:30:07.161381 371264960 net.cpp:150] Setting up batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split\n",
      "I0706 20:30:07.161386 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.161393 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.161399 371264960 net.cpp:165] Memory required for data: 405504\n",
      "I0706 20:30:07.161406 371264960 layer_factory.hpp:77] Creating layer conv2d_1_tmp_0_idx3\n",
      "I0706 20:30:07.161422 371264960 net.cpp:100] Creating Layer conv2d_1_tmp_0_idx3\n",
      "I0706 20:30:07.161429 371264960 net.cpp:434] conv2d_1_tmp_0_idx3 <- batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split_0\n",
      "I0706 20:30:07.161445 371264960 net.cpp:408] conv2d_1_tmp_0_idx3 -> conv2d_1_tmp_0_idx3\n",
      "I0706 20:30:07.161535 371264960 net.cpp:150] Setting up conv2d_1_tmp_0_idx3\n",
      "I0706 20:30:07.161545 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.161552 371264960 net.cpp:165] Memory required for data: 471040\n",
      "I0706 20:30:07.161563 371264960 layer_factory.hpp:77] Creating layer batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:30:07.161579 371264960 net.cpp:100] Creating Layer batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:30:07.161587 371264960 net.cpp:434] batch_norm_1_tmp_3_idx4_extra <- conv2d_1_tmp_0_idx3\n",
      "I0706 20:30:07.161602 371264960 net.cpp:408] batch_norm_1_tmp_3_idx4_extra -> batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:30:07.161644 371264960 net.cpp:150] Setting up batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:30:07.161653 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.161660 371264960 net.cpp:165] Memory required for data: 536576\n",
      "I0706 20:30:07.161687 371264960 layer_factory.hpp:77] Creating layer batch_norm_1_tmp_3_idx4\n",
      "I0706 20:30:07.161710 371264960 net.cpp:100] Creating Layer batch_norm_1_tmp_3_idx4\n",
      "I0706 20:30:07.161718 371264960 net.cpp:434] batch_norm_1_tmp_3_idx4 <- batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:30:07.161733 371264960 net.cpp:408] batch_norm_1_tmp_3_idx4 -> batch_norm_1_tmp_3_idx4\n",
      "I0706 20:30:07.161764 371264960 layer_factory.hpp:77] Creating layer batch_norm_1_tmp_3_idx4\n",
      "I0706 20:30:07.161808 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.161814 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.161818 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.161825 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.161839 371264960 net.cpp:150] Setting up batch_norm_1_tmp_3_idx4\n",
      "I0706 20:30:07.161844 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.161849 371264960 net.cpp:165] Memory required for data: 602112\n",
      "I0706 20:30:07.161862 371264960 layer_factory.hpp:77] Creating layer batch_norm_1_tmp_4_idx5\n",
      "I0706 20:30:07.161875 371264960 net.cpp:100] Creating Layer batch_norm_1_tmp_4_idx5\n",
      "I0706 20:30:07.161883 371264960 net.cpp:434] batch_norm_1_tmp_4_idx5 <- batch_norm_1_tmp_3_idx4\n",
      "I0706 20:30:07.161897 371264960 net.cpp:408] batch_norm_1_tmp_4_idx5 -> batch_norm_1_tmp_4_idx5\n",
      "I0706 20:30:07.161914 371264960 net.cpp:150] Setting up batch_norm_1_tmp_4_idx5\n",
      "I0706 20:30:07.161921 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.161927 371264960 net.cpp:165] Memory required for data: 667648\n",
      "I0706 20:30:07.161932 371264960 layer_factory.hpp:77] Creating layer conv2d_2_tmp_0_idx6\n",
      "I0706 20:30:07.161947 371264960 net.cpp:100] Creating Layer conv2d_2_tmp_0_idx6\n",
      "I0706 20:30:07.161955 371264960 net.cpp:434] conv2d_2_tmp_0_idx6 <- batch_norm_1_tmp_4_idx5\n",
      "I0706 20:30:07.161970 371264960 net.cpp:408] conv2d_2_tmp_0_idx6 -> conv2d_2_tmp_0_idx6\n",
      "I0706 20:30:07.162017 371264960 net.cpp:150] Setting up conv2d_2_tmp_0_idx6\n",
      "I0706 20:30:07.162025 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.162031 371264960 net.cpp:165] Memory required for data: 733184\n",
      "I0706 20:30:07.162042 371264960 layer_factory.hpp:77] Creating layer conv2d_2_tmp_1_idx7\n",
      "I0706 20:30:07.162057 371264960 net.cpp:100] Creating Layer conv2d_2_tmp_1_idx7\n",
      "I0706 20:30:07.162065 371264960 net.cpp:434] conv2d_2_tmp_1_idx7 <- conv2d_2_tmp_0_idx6\n",
      "I0706 20:30:07.162078 371264960 net.cpp:408] conv2d_2_tmp_1_idx7 -> conv2d_2_tmp_1_idx7\n",
      "I0706 20:30:07.162106 371264960 layer_factory.hpp:77] Creating layer conv2d_2_tmp_1_idx7\n",
      "I0706 20:30:07.162142 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.162150 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.162154 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.162160 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.162173 371264960 net.cpp:150] Setting up conv2d_2_tmp_1_idx7\n",
      "I0706 20:30:07.162178 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.162184 371264960 net.cpp:165] Memory required for data: 798720\n",
      "I0706 20:30:07.162196 371264960 layer_factory.hpp:77] Creating layer batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:30:07.162212 371264960 net.cpp:100] Creating Layer batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:30:07.162221 371264960 net.cpp:434] batch_norm_2_tmp_3_idx8_extra <- conv2d_2_tmp_1_idx7\n",
      "I0706 20:30:07.162235 371264960 net.cpp:408] batch_norm_2_tmp_3_idx8_extra -> batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:30:07.162271 371264960 net.cpp:150] Setting up batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:30:07.162278 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.162286 371264960 net.cpp:165] Memory required for data: 864256\n",
      "I0706 20:30:07.162314 371264960 layer_factory.hpp:77] Creating layer batch_norm_2_tmp_3_idx8\n",
      "I0706 20:30:07.162333 371264960 net.cpp:100] Creating Layer batch_norm_2_tmp_3_idx8\n",
      "I0706 20:30:07.162339 371264960 net.cpp:434] batch_norm_2_tmp_3_idx8 <- batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:30:07.162353 371264960 net.cpp:408] batch_norm_2_tmp_3_idx8 -> batch_norm_2_tmp_3_idx8\n",
      "I0706 20:30:07.162382 371264960 layer_factory.hpp:77] Creating layer batch_norm_2_tmp_3_idx8\n",
      "I0706 20:30:07.162422 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.162429 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.162433 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.162441 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.162454 371264960 net.cpp:150] Setting up batch_norm_2_tmp_3_idx8\n",
      "I0706 20:30:07.162459 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.162465 371264960 net.cpp:165] Memory required for data: 929792\n",
      "I0706 20:30:07.162477 371264960 layer_factory.hpp:77] Creating layer elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:30:07.163035 371264960 net.cpp:100] Creating Layer elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:30:07.163060 371264960 net.cpp:434] elementwise_add_0_tmp_0_idx9 <- batch_norm_2_tmp_3_idx8\n",
      "I0706 20:30:07.163101 371264960 net.cpp:434] elementwise_add_0_tmp_0_idx9 <- batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split_1\n",
      "I0706 20:30:07.163132 371264960 net.cpp:408] elementwise_add_0_tmp_0_idx9 -> elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:30:07.163303 371264960 net.cpp:150] Setting up elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:30:07.163314 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.163322 371264960 net.cpp:165] Memory required for data: 995328\n",
      "I0706 20:30:07.163331 371264960 layer_factory.hpp:77] Creating layer elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:30:07.163349 371264960 net.cpp:100] Creating Layer elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:30:07.163358 371264960 net.cpp:434] elementwise_add_0_tmp_1_idx10 <- elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:30:07.163374 371264960 net.cpp:408] elementwise_add_0_tmp_1_idx10 -> elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:30:07.163408 371264960 net.cpp:150] Setting up elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:30:07.163422 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.163434 371264960 net.cpp:165] Memory required for data: 1060864\n",
      "I0706 20:30:07.163444 371264960 layer_factory.hpp:77] Creating layer elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split\n",
      "I0706 20:30:07.163496 371264960 net.cpp:100] Creating Layer elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split\n",
      "I0706 20:30:07.163513 371264960 net.cpp:434] elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split <- elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:30:07.163532 371264960 net.cpp:408] elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split -> elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split_0\n",
      "I0706 20:30:07.163559 371264960 net.cpp:408] elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split -> elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split_1\n",
      "I0706 20:30:07.163583 371264960 net.cpp:150] Setting up elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split\n",
      "I0706 20:30:07.163589 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.163596 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.163601 371264960 net.cpp:165] Memory required for data: 1191936\n",
      "I0706 20:30:07.163609 371264960 layer_factory.hpp:77] Creating layer conv2d_3_tmp_0_idx11\n",
      "I0706 20:30:07.163626 371264960 net.cpp:100] Creating Layer conv2d_3_tmp_0_idx11\n",
      "I0706 20:30:07.163633 371264960 net.cpp:434] conv2d_3_tmp_0_idx11 <- elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split_0\n",
      "I0706 20:30:07.163648 371264960 net.cpp:408] conv2d_3_tmp_0_idx11 -> conv2d_3_tmp_0_idx11\n",
      "I0706 20:30:07.163699 371264960 net.cpp:150] Setting up conv2d_3_tmp_0_idx11\n",
      "I0706 20:30:07.163718 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.163726 371264960 net.cpp:165] Memory required for data: 1257472\n",
      "I0706 20:30:07.163738 371264960 layer_factory.hpp:77] Creating layer batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:30:07.163753 371264960 net.cpp:100] Creating Layer batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:30:07.163761 371264960 net.cpp:434] batch_norm_3_tmp_3_idx12_extra <- conv2d_3_tmp_0_idx11\n",
      "I0706 20:30:07.163779 371264960 net.cpp:408] batch_norm_3_tmp_3_idx12_extra -> batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:30:07.163816 371264960 net.cpp:150] Setting up batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:30:07.163823 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.163830 371264960 net.cpp:165] Memory required for data: 1323008\n",
      "I0706 20:30:07.163846 371264960 layer_factory.hpp:77] Creating layer batch_norm_3_tmp_3_idx12\n",
      "I0706 20:30:07.163863 371264960 net.cpp:100] Creating Layer batch_norm_3_tmp_3_idx12\n",
      "I0706 20:30:07.163872 371264960 net.cpp:434] batch_norm_3_tmp_3_idx12 <- batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:30:07.163889 371264960 net.cpp:408] batch_norm_3_tmp_3_idx12 -> batch_norm_3_tmp_3_idx12\n",
      "I0706 20:30:07.163921 371264960 layer_factory.hpp:77] Creating layer batch_norm_3_tmp_3_idx12\n",
      "I0706 20:30:07.163964 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.163972 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.163976 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.163983 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.163997 371264960 net.cpp:150] Setting up batch_norm_3_tmp_3_idx12\n",
      "I0706 20:30:07.164002 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164008 371264960 net.cpp:165] Memory required for data: 1388544\n",
      "I0706 20:30:07.164023 371264960 layer_factory.hpp:77] Creating layer batch_norm_3_tmp_4_idx13\n",
      "I0706 20:30:07.164041 371264960 net.cpp:100] Creating Layer batch_norm_3_tmp_4_idx13\n",
      "I0706 20:30:07.164049 371264960 net.cpp:434] batch_norm_3_tmp_4_idx13 <- batch_norm_3_tmp_3_idx12\n",
      "I0706 20:30:07.164069 371264960 net.cpp:408] batch_norm_3_tmp_4_idx13 -> batch_norm_3_tmp_4_idx13\n",
      "I0706 20:30:07.164089 371264960 net.cpp:150] Setting up batch_norm_3_tmp_4_idx13\n",
      "I0706 20:30:07.164096 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164103 371264960 net.cpp:165] Memory required for data: 1454080\n",
      "I0706 20:30:07.164108 371264960 layer_factory.hpp:77] Creating layer conv2d_4_tmp_0_idx14\n",
      "I0706 20:30:07.164124 371264960 net.cpp:100] Creating Layer conv2d_4_tmp_0_idx14\n",
      "I0706 20:30:07.164132 371264960 net.cpp:434] conv2d_4_tmp_0_idx14 <- batch_norm_3_tmp_4_idx13\n",
      "I0706 20:30:07.164147 371264960 net.cpp:408] conv2d_4_tmp_0_idx14 -> conv2d_4_tmp_0_idx14\n",
      "I0706 20:30:07.164196 371264960 net.cpp:150] Setting up conv2d_4_tmp_0_idx14\n",
      "I0706 20:30:07.164203 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164209 371264960 net.cpp:165] Memory required for data: 1519616\n",
      "I0706 20:30:07.164219 371264960 layer_factory.hpp:77] Creating layer conv2d_4_tmp_1_idx15\n",
      "I0706 20:30:07.164237 371264960 net.cpp:100] Creating Layer conv2d_4_tmp_1_idx15\n",
      "I0706 20:30:07.164244 371264960 net.cpp:434] conv2d_4_tmp_1_idx15 <- conv2d_4_tmp_0_idx14\n",
      "I0706 20:30:07.164258 371264960 net.cpp:408] conv2d_4_tmp_1_idx15 -> conv2d_4_tmp_1_idx15\n",
      "I0706 20:30:07.164292 371264960 layer_factory.hpp:77] Creating layer conv2d_4_tmp_1_idx15\n",
      "I0706 20:30:07.164328 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.164335 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.164340 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164345 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.164358 371264960 net.cpp:150] Setting up conv2d_4_tmp_1_idx15\n",
      "I0706 20:30:07.164363 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164371 371264960 net.cpp:165] Memory required for data: 1585152\n",
      "I0706 20:30:07.164382 371264960 layer_factory.hpp:77] Creating layer batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:30:07.164397 371264960 net.cpp:100] Creating Layer batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:30:07.164407 371264960 net.cpp:434] batch_norm_4_tmp_3_idx16_extra <- conv2d_4_tmp_1_idx15\n",
      "I0706 20:30:07.164422 371264960 net.cpp:408] batch_norm_4_tmp_3_idx16_extra -> batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:30:07.164463 371264960 net.cpp:150] Setting up batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:30:07.164471 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164479 371264960 net.cpp:165] Memory required for data: 1650688\n",
      "I0706 20:30:07.164497 371264960 layer_factory.hpp:77] Creating layer batch_norm_4_tmp_3_idx16\n",
      "I0706 20:30:07.164515 371264960 net.cpp:100] Creating Layer batch_norm_4_tmp_3_idx16\n",
      "I0706 20:30:07.164522 371264960 net.cpp:434] batch_norm_4_tmp_3_idx16 <- batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:30:07.164537 371264960 net.cpp:408] batch_norm_4_tmp_3_idx16 -> batch_norm_4_tmp_3_idx16\n",
      "I0706 20:30:07.164570 371264960 layer_factory.hpp:77] Creating layer batch_norm_4_tmp_3_idx16\n",
      "I0706 20:30:07.164615 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.164623 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.164628 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164633 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.164649 371264960 net.cpp:150] Setting up batch_norm_4_tmp_3_idx16\n",
      "I0706 20:30:07.164654 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164660 371264960 net.cpp:165] Memory required for data: 1716224\n",
      "I0706 20:30:07.164690 371264960 layer_factory.hpp:77] Creating layer elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:30:07.164706 371264960 net.cpp:100] Creating Layer elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:30:07.164714 371264960 net.cpp:434] elementwise_add_1_tmp_0_idx17 <- batch_norm_4_tmp_3_idx16\n",
      "I0706 20:30:07.164724 371264960 net.cpp:434] elementwise_add_1_tmp_0_idx17 <- elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split_1\n",
      "I0706 20:30:07.164738 371264960 net.cpp:408] elementwise_add_1_tmp_0_idx17 -> elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:30:07.164769 371264960 net.cpp:150] Setting up elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:30:07.164777 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164783 371264960 net.cpp:165] Memory required for data: 1781760\n",
      "I0706 20:30:07.164788 371264960 layer_factory.hpp:77] Creating layer elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:30:07.164803 371264960 net.cpp:100] Creating Layer elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:30:07.164810 371264960 net.cpp:434] elementwise_add_1_tmp_1_idx18 <- elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:30:07.164824 371264960 net.cpp:408] elementwise_add_1_tmp_1_idx18 -> elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:30:07.164845 371264960 net.cpp:150] Setting up elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:30:07.164852 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.164860 371264960 net.cpp:165] Memory required for data: 1847296\n",
      "I0706 20:30:07.164865 371264960 layer_factory.hpp:77] Creating layer elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split\n",
      "I0706 20:30:07.164924 371264960 net.cpp:100] Creating Layer elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split\n",
      "I0706 20:30:07.164934 371264960 net.cpp:434] elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split <- elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:30:07.164950 371264960 net.cpp:408] elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split -> elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split_0\n",
      "I0706 20:30:07.164970 371264960 net.cpp:408] elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split -> elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split_1\n",
      "I0706 20:30:07.164991 371264960 net.cpp:150] Setting up elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split\n",
      "I0706 20:30:07.164999 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.165004 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.165010 371264960 net.cpp:165] Memory required for data: 1978368\n",
      "I0706 20:30:07.165016 371264960 layer_factory.hpp:77] Creating layer conv2d_5_tmp_0_idx19\n",
      "I0706 20:30:07.165084 371264960 net.cpp:100] Creating Layer conv2d_5_tmp_0_idx19\n",
      "I0706 20:30:07.165093 371264960 net.cpp:434] conv2d_5_tmp_0_idx19 <- elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split_0\n",
      "I0706 20:30:07.165117 371264960 net.cpp:408] conv2d_5_tmp_0_idx19 -> conv2d_5_tmp_0_idx19\n",
      "I0706 20:30:07.165174 371264960 net.cpp:150] Setting up conv2d_5_tmp_0_idx19\n",
      "I0706 20:30:07.165182 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.165190 371264960 net.cpp:165] Memory required for data: 2043904\n",
      "I0706 20:30:07.165202 371264960 layer_factory.hpp:77] Creating layer batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:30:07.165220 371264960 net.cpp:100] Creating Layer batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:30:07.165228 371264960 net.cpp:434] batch_norm_5_tmp_3_idx20_extra <- conv2d_5_tmp_0_idx19\n",
      "I0706 20:30:07.165244 371264960 net.cpp:408] batch_norm_5_tmp_3_idx20_extra -> batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:30:07.165297 371264960 net.cpp:150] Setting up batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:30:07.165308 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.165316 371264960 net.cpp:165] Memory required for data: 2109440\n",
      "I0706 20:30:07.165334 371264960 layer_factory.hpp:77] Creating layer batch_norm_5_tmp_3_idx20\n",
      "I0706 20:30:07.165354 371264960 net.cpp:100] Creating Layer batch_norm_5_tmp_3_idx20\n",
      "I0706 20:30:07.165364 371264960 net.cpp:434] batch_norm_5_tmp_3_idx20 <- batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:30:07.165381 371264960 net.cpp:408] batch_norm_5_tmp_3_idx20 -> batch_norm_5_tmp_3_idx20\n",
      "I0706 20:30:07.165414 371264960 layer_factory.hpp:77] Creating layer batch_norm_5_tmp_3_idx20\n",
      "I0706 20:30:07.165475 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.165485 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.165504 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.165513 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.165532 371264960 net.cpp:150] Setting up batch_norm_5_tmp_3_idx20\n",
      "I0706 20:30:07.165539 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.165545 371264960 net.cpp:165] Memory required for data: 2174976\n",
      "I0706 20:30:07.165561 371264960 layer_factory.hpp:77] Creating layer batch_norm_5_tmp_4_idx21\n",
      "I0706 20:30:07.165580 371264960 net.cpp:100] Creating Layer batch_norm_5_tmp_4_idx21\n",
      "I0706 20:30:07.165591 371264960 net.cpp:434] batch_norm_5_tmp_4_idx21 <- batch_norm_5_tmp_3_idx20\n",
      "I0706 20:30:07.165609 371264960 net.cpp:408] batch_norm_5_tmp_4_idx21 -> batch_norm_5_tmp_4_idx21\n",
      "I0706 20:30:07.165633 371264960 net.cpp:150] Setting up batch_norm_5_tmp_4_idx21\n",
      "I0706 20:30:07.165642 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.165688 371264960 net.cpp:165] Memory required for data: 2240512\n",
      "I0706 20:30:07.165705 371264960 layer_factory.hpp:77] Creating layer conv2d_6_tmp_0_idx22\n",
      "I0706 20:30:07.165740 371264960 net.cpp:100] Creating Layer conv2d_6_tmp_0_idx22\n",
      "I0706 20:30:07.165750 371264960 net.cpp:434] conv2d_6_tmp_0_idx22 <- batch_norm_5_tmp_4_idx21\n",
      "I0706 20:30:07.165767 371264960 net.cpp:408] conv2d_6_tmp_0_idx22 -> conv2d_6_tmp_0_idx22\n",
      "I0706 20:30:07.165832 371264960 net.cpp:150] Setting up conv2d_6_tmp_0_idx22\n",
      "I0706 20:30:07.165841 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.165849 371264960 net.cpp:165] Memory required for data: 2306048\n",
      "I0706 20:30:07.165860 371264960 layer_factory.hpp:77] Creating layer conv2d_6_tmp_1_idx23\n",
      "I0706 20:30:07.165879 371264960 net.cpp:100] Creating Layer conv2d_6_tmp_1_idx23\n",
      "I0706 20:30:07.165890 371264960 net.cpp:434] conv2d_6_tmp_1_idx23 <- conv2d_6_tmp_0_idx22\n",
      "I0706 20:30:07.165911 371264960 net.cpp:408] conv2d_6_tmp_1_idx23 -> conv2d_6_tmp_1_idx23\n",
      "I0706 20:30:07.166580 371264960 layer_factory.hpp:77] Creating layer conv2d_6_tmp_1_idx23\n",
      "I0706 20:30:07.166640 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.166649 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.166653 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.166661 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.166674 371264960 net.cpp:150] Setting up conv2d_6_tmp_1_idx23\n",
      "I0706 20:30:07.166680 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.166687 371264960 net.cpp:165] Memory required for data: 2371584\n",
      "I0706 20:30:07.166703 371264960 layer_factory.hpp:77] Creating layer batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:30:07.166719 371264960 net.cpp:100] Creating Layer batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:30:07.166733 371264960 net.cpp:434] batch_norm_6_tmp_3_idx24_extra <- conv2d_6_tmp_1_idx23\n",
      "I0706 20:30:07.166750 371264960 net.cpp:408] batch_norm_6_tmp_3_idx24_extra -> batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:30:07.166792 371264960 net.cpp:150] Setting up batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:30:07.166810 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.166827 371264960 net.cpp:165] Memory required for data: 2437120\n",
      "I0706 20:30:07.166846 371264960 layer_factory.hpp:77] Creating layer batch_norm_6_tmp_3_idx24\n",
      "I0706 20:30:07.166862 371264960 net.cpp:100] Creating Layer batch_norm_6_tmp_3_idx24\n",
      "I0706 20:30:07.166872 371264960 net.cpp:434] batch_norm_6_tmp_3_idx24 <- batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:30:07.166886 371264960 net.cpp:408] batch_norm_6_tmp_3_idx24 -> batch_norm_6_tmp_3_idx24\n",
      "I0706 20:30:07.166922 371264960 layer_factory.hpp:77] Creating layer batch_norm_6_tmp_3_idx24\n",
      "I0706 20:30:07.166985 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.166996 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.167003 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.167013 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.167045 371264960 net.cpp:150] Setting up batch_norm_6_tmp_3_idx24\n",
      "I0706 20:30:07.167057 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.167065 371264960 net.cpp:165] Memory required for data: 2502656\n",
      "I0706 20:30:07.167081 371264960 layer_factory.hpp:77] Creating layer elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:30:07.167104 371264960 net.cpp:100] Creating Layer elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:30:07.167124 371264960 net.cpp:434] elementwise_add_2_tmp_0_idx25 <- batch_norm_6_tmp_3_idx24\n",
      "I0706 20:30:07.167146 371264960 net.cpp:434] elementwise_add_2_tmp_0_idx25 <- elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split_1\n",
      "I0706 20:30:07.167165 371264960 net.cpp:408] elementwise_add_2_tmp_0_idx25 -> elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:30:07.167196 371264960 net.cpp:150] Setting up elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:30:07.167205 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.167217 371264960 net.cpp:165] Memory required for data: 2568192\n",
      "I0706 20:30:07.167227 371264960 layer_factory.hpp:77] Creating layer elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:30:07.167248 371264960 net.cpp:100] Creating Layer elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:30:07.167258 371264960 net.cpp:434] elementwise_add_2_tmp_1_idx26 <- elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:30:07.167275 371264960 net.cpp:408] elementwise_add_2_tmp_1_idx26 -> elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:30:07.167295 371264960 net.cpp:150] Setting up elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:30:07.167301 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.167308 371264960 net.cpp:165] Memory required for data: 2633728\n",
      "I0706 20:30:07.167315 371264960 layer_factory.hpp:77] Creating layer elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split\n",
      "I0706 20:30:07.167342 371264960 net.cpp:100] Creating Layer elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split\n",
      "I0706 20:30:07.167353 371264960 net.cpp:434] elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split <- elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:30:07.167554 371264960 net.cpp:408] elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split -> elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split_0\n",
      "I0706 20:30:07.167609 371264960 net.cpp:408] elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split -> elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split_1\n",
      "I0706 20:30:07.167641 371264960 net.cpp:150] Setting up elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split\n",
      "I0706 20:30:07.167649 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.167657 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.167663 371264960 net.cpp:165] Memory required for data: 2764800\n",
      "I0706 20:30:07.167670 371264960 layer_factory.hpp:77] Creating layer conv2d_7_tmp_0_idx27\n",
      "I0706 20:30:07.167693 371264960 net.cpp:100] Creating Layer conv2d_7_tmp_0_idx27\n",
      "I0706 20:30:07.167706 371264960 net.cpp:434] conv2d_7_tmp_0_idx27 <- elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split_0\n",
      "I0706 20:30:07.167737 371264960 net.cpp:408] conv2d_7_tmp_0_idx27 -> conv2d_7_tmp_0_idx27\n",
      "I0706 20:30:07.167804 371264960 net.cpp:150] Setting up conv2d_7_tmp_0_idx27\n",
      "I0706 20:30:07.167814 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.167820 371264960 net.cpp:165] Memory required for data: 2830336\n",
      "I0706 20:30:07.167834 371264960 layer_factory.hpp:77] Creating layer batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:30:07.167852 371264960 net.cpp:100] Creating Layer batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:30:07.167861 371264960 net.cpp:434] batch_norm_7_tmp_3_idx28_extra <- conv2d_7_tmp_0_idx27\n",
      "I0706 20:30:07.167886 371264960 net.cpp:408] batch_norm_7_tmp_3_idx28_extra -> batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:30:07.167943 371264960 net.cpp:150] Setting up batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:30:07.167953 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.167959 371264960 net.cpp:165] Memory required for data: 2895872\n",
      "I0706 20:30:07.167977 371264960 layer_factory.hpp:77] Creating layer batch_norm_7_tmp_3_idx28\n",
      "I0706 20:30:07.167995 371264960 net.cpp:100] Creating Layer batch_norm_7_tmp_3_idx28\n",
      "I0706 20:30:07.168004 371264960 net.cpp:434] batch_norm_7_tmp_3_idx28 <- batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:30:07.168023 371264960 net.cpp:408] batch_norm_7_tmp_3_idx28 -> batch_norm_7_tmp_3_idx28\n",
      "I0706 20:30:07.168057 371264960 layer_factory.hpp:77] Creating layer batch_norm_7_tmp_3_idx28\n",
      "I0706 20:30:07.168107 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.168116 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.168120 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.168128 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.168143 371264960 net.cpp:150] Setting up batch_norm_7_tmp_3_idx28\n",
      "I0706 20:30:07.168148 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.168154 371264960 net.cpp:165] Memory required for data: 2961408\n",
      "I0706 20:30:07.168190 371264960 layer_factory.hpp:77] Creating layer batch_norm_7_tmp_4_idx29\n",
      "I0706 20:30:07.168207 371264960 net.cpp:100] Creating Layer batch_norm_7_tmp_4_idx29\n",
      "I0706 20:30:07.168216 371264960 net.cpp:434] batch_norm_7_tmp_4_idx29 <- batch_norm_7_tmp_3_idx28\n",
      "I0706 20:30:07.168231 371264960 net.cpp:408] batch_norm_7_tmp_4_idx29 -> batch_norm_7_tmp_4_idx29\n",
      "I0706 20:30:07.168257 371264960 net.cpp:150] Setting up batch_norm_7_tmp_4_idx29\n",
      "I0706 20:30:07.168267 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.168285 371264960 net.cpp:165] Memory required for data: 3026944\n",
      "I0706 20:30:07.168295 371264960 layer_factory.hpp:77] Creating layer conv2d_8_tmp_0_idx30\n",
      "I0706 20:30:07.168320 371264960 net.cpp:100] Creating Layer conv2d_8_tmp_0_idx30\n",
      "I0706 20:30:07.168331 371264960 net.cpp:434] conv2d_8_tmp_0_idx30 <- batch_norm_7_tmp_4_idx29\n",
      "I0706 20:30:07.168351 371264960 net.cpp:408] conv2d_8_tmp_0_idx30 -> conv2d_8_tmp_0_idx30\n",
      "I0706 20:30:07.168408 371264960 net.cpp:150] Setting up conv2d_8_tmp_0_idx30\n",
      "I0706 20:30:07.168416 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.168423 371264960 net.cpp:165] Memory required for data: 3092480\n",
      "I0706 20:30:07.168434 371264960 layer_factory.hpp:77] Creating layer conv2d_8_tmp_1_idx31\n",
      "I0706 20:30:07.168452 371264960 net.cpp:100] Creating Layer conv2d_8_tmp_1_idx31\n",
      "I0706 20:30:07.168464 371264960 net.cpp:434] conv2d_8_tmp_1_idx31 <- conv2d_8_tmp_0_idx30\n",
      "I0706 20:30:07.168486 371264960 net.cpp:408] conv2d_8_tmp_1_idx31 -> conv2d_8_tmp_1_idx31\n",
      "I0706 20:30:07.168520 371264960 layer_factory.hpp:77] Creating layer conv2d_8_tmp_1_idx31\n",
      "I0706 20:30:07.168561 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.168568 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.168572 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.168578 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.168596 371264960 net.cpp:150] Setting up conv2d_8_tmp_1_idx31\n",
      "I0706 20:30:07.168602 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.168609 371264960 net.cpp:165] Memory required for data: 3158016\n",
      "I0706 20:30:07.168628 371264960 layer_factory.hpp:77] Creating layer batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:30:07.168648 371264960 net.cpp:100] Creating Layer batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:30:07.168660 371264960 net.cpp:434] batch_norm_8_tmp_3_idx32_extra <- conv2d_8_tmp_1_idx31\n",
      "I0706 20:30:07.168684 371264960 net.cpp:408] batch_norm_8_tmp_3_idx32_extra -> batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:30:07.168732 371264960 net.cpp:150] Setting up batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:30:07.168740 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.168746 371264960 net.cpp:165] Memory required for data: 3223552\n",
      "I0706 20:30:07.168762 371264960 layer_factory.hpp:77] Creating layer batch_norm_8_tmp_3_idx32\n",
      "I0706 20:30:07.168778 371264960 net.cpp:100] Creating Layer batch_norm_8_tmp_3_idx32\n",
      "I0706 20:30:07.168787 371264960 net.cpp:434] batch_norm_8_tmp_3_idx32 <- batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:30:07.168802 371264960 net.cpp:408] batch_norm_8_tmp_3_idx32 -> batch_norm_8_tmp_3_idx32\n",
      "I0706 20:30:07.168838 371264960 layer_factory.hpp:77] Creating layer batch_norm_8_tmp_3_idx32\n",
      "I0706 20:30:07.168888 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.168896 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.168900 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.168906 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.168921 371264960 net.cpp:150] Setting up batch_norm_8_tmp_3_idx32\n",
      "I0706 20:30:07.168927 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.168933 371264960 net.cpp:165] Memory required for data: 3289088\n",
      "I0706 20:30:07.168946 371264960 layer_factory.hpp:77] Creating layer elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:30:07.168962 371264960 net.cpp:100] Creating Layer elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:30:07.168972 371264960 net.cpp:434] elementwise_add_3_tmp_0_idx33 <- batch_norm_8_tmp_3_idx32\n",
      "I0706 20:30:07.168982 371264960 net.cpp:434] elementwise_add_3_tmp_0_idx33 <- elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split_1\n",
      "I0706 20:30:07.168999 371264960 net.cpp:408] elementwise_add_3_tmp_0_idx33 -> elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:30:07.169049 371264960 net.cpp:150] Setting up elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:30:07.169057 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.169063 371264960 net.cpp:165] Memory required for data: 3354624\n",
      "I0706 20:30:07.169070 371264960 layer_factory.hpp:77] Creating layer elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:30:07.169082 371264960 net.cpp:100] Creating Layer elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:30:07.169090 371264960 net.cpp:434] elementwise_add_3_tmp_1_idx34 <- elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:30:07.169108 371264960 net.cpp:408] elementwise_add_3_tmp_1_idx34 -> elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:30:07.169132 371264960 net.cpp:150] Setting up elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:30:07.169139 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.169148 371264960 net.cpp:165] Memory required for data: 3420160\n",
      "I0706 20:30:07.169157 371264960 layer_factory.hpp:77] Creating layer elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split\n",
      "I0706 20:30:07.169239 371264960 net.cpp:100] Creating Layer elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split\n",
      "I0706 20:30:07.169260 371264960 net.cpp:434] elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split <- elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:30:07.169286 371264960 net.cpp:408] elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split -> elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split_0\n",
      "I0706 20:30:07.169318 371264960 net.cpp:408] elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split -> elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split_1\n",
      "I0706 20:30:07.169370 371264960 net.cpp:150] Setting up elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split\n",
      "I0706 20:30:07.169384 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.169394 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.169404 371264960 net.cpp:165] Memory required for data: 3551232\n",
      "I0706 20:30:07.169414 371264960 layer_factory.hpp:77] Creating layer conv2d_9_tmp_0_idx35\n",
      "I0706 20:30:07.169451 371264960 net.cpp:100] Creating Layer conv2d_9_tmp_0_idx35\n",
      "I0706 20:30:07.169472 371264960 net.cpp:434] conv2d_9_tmp_0_idx35 <- elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split_0\n",
      "I0706 20:30:07.169502 371264960 net.cpp:408] conv2d_9_tmp_0_idx35 -> conv2d_9_tmp_0_idx35\n",
      "I0706 20:30:07.169595 371264960 net.cpp:150] Setting up conv2d_9_tmp_0_idx35\n",
      "I0706 20:30:07.169610 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.169620 371264960 net.cpp:165] Memory required for data: 3616768\n",
      "I0706 20:30:07.169637 371264960 layer_factory.hpp:77] Creating layer batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:30:07.169674 371264960 net.cpp:100] Creating Layer batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:30:07.169696 371264960 net.cpp:434] batch_norm_9_tmp_3_idx36_extra <- conv2d_9_tmp_0_idx35\n",
      "I0706 20:30:07.169715 371264960 net.cpp:408] batch_norm_9_tmp_3_idx36_extra -> batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:30:07.169767 371264960 net.cpp:150] Setting up batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:30:07.169775 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.169782 371264960 net.cpp:165] Memory required for data: 3682304\n",
      "I0706 20:30:07.169824 371264960 layer_factory.hpp:77] Creating layer batch_norm_9_tmp_3_idx36\n",
      "I0706 20:30:07.169855 371264960 net.cpp:100] Creating Layer batch_norm_9_tmp_3_idx36\n",
      "I0706 20:30:07.169873 371264960 net.cpp:434] batch_norm_9_tmp_3_idx36 <- batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:30:07.169905 371264960 net.cpp:408] batch_norm_9_tmp_3_idx36 -> batch_norm_9_tmp_3_idx36\n",
      "I0706 20:30:07.169945 371264960 layer_factory.hpp:77] Creating layer batch_norm_9_tmp_3_idx36\n",
      "I0706 20:30:07.169986 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.169992 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.169996 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170003 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.170017 371264960 net.cpp:150] Setting up batch_norm_9_tmp_3_idx36\n",
      "I0706 20:30:07.170023 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170030 371264960 net.cpp:165] Memory required for data: 3747840\n",
      "I0706 20:30:07.170045 371264960 layer_factory.hpp:77] Creating layer batch_norm_9_tmp_4_idx37\n",
      "I0706 20:30:07.170063 371264960 net.cpp:100] Creating Layer batch_norm_9_tmp_4_idx37\n",
      "I0706 20:30:07.170073 371264960 net.cpp:434] batch_norm_9_tmp_4_idx37 <- batch_norm_9_tmp_3_idx36\n",
      "I0706 20:30:07.170087 371264960 net.cpp:408] batch_norm_9_tmp_4_idx37 -> batch_norm_9_tmp_4_idx37\n",
      "I0706 20:30:07.170106 371264960 net.cpp:150] Setting up batch_norm_9_tmp_4_idx37\n",
      "I0706 20:30:07.170112 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170118 371264960 net.cpp:165] Memory required for data: 3813376\n",
      "I0706 20:30:07.170125 371264960 layer_factory.hpp:77] Creating layer conv2d_10_tmp_0_idx38\n",
      "I0706 20:30:07.170140 371264960 net.cpp:100] Creating Layer conv2d_10_tmp_0_idx38\n",
      "I0706 20:30:07.170150 371264960 net.cpp:434] conv2d_10_tmp_0_idx38 <- batch_norm_9_tmp_4_idx37\n",
      "I0706 20:30:07.170163 371264960 net.cpp:408] conv2d_10_tmp_0_idx38 -> conv2d_10_tmp_0_idx38\n",
      "I0706 20:30:07.170212 371264960 net.cpp:150] Setting up conv2d_10_tmp_0_idx38\n",
      "I0706 20:30:07.170220 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170226 371264960 net.cpp:165] Memory required for data: 3878912\n",
      "I0706 20:30:07.170236 371264960 layer_factory.hpp:77] Creating layer conv2d_10_tmp_1_idx39\n",
      "I0706 20:30:07.170285 371264960 net.cpp:100] Creating Layer conv2d_10_tmp_1_idx39\n",
      "I0706 20:30:07.170295 371264960 net.cpp:434] conv2d_10_tmp_1_idx39 <- conv2d_10_tmp_0_idx38\n",
      "I0706 20:30:07.170308 371264960 net.cpp:408] conv2d_10_tmp_1_idx39 -> conv2d_10_tmp_1_idx39\n",
      "I0706 20:30:07.170339 371264960 layer_factory.hpp:77] Creating layer conv2d_10_tmp_1_idx39\n",
      "I0706 20:30:07.170375 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.170383 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.170387 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170394 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.170408 371264960 net.cpp:150] Setting up conv2d_10_tmp_1_idx39\n",
      "I0706 20:30:07.170413 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170418 371264960 net.cpp:165] Memory required for data: 3944448\n",
      "I0706 20:30:07.170432 371264960 layer_factory.hpp:77] Creating layer batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:30:07.170449 371264960 net.cpp:100] Creating Layer batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:30:07.170456 371264960 net.cpp:434] batch_norm_10_tmp_3_idx40_extra <- conv2d_10_tmp_1_idx39\n",
      "I0706 20:30:07.170475 371264960 net.cpp:408] batch_norm_10_tmp_3_idx40_extra -> batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:30:07.170513 371264960 net.cpp:150] Setting up batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:30:07.170521 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170527 371264960 net.cpp:165] Memory required for data: 4009984\n",
      "I0706 20:30:07.170543 371264960 layer_factory.hpp:77] Creating layer batch_norm_10_tmp_3_idx40\n",
      "I0706 20:30:07.170558 371264960 net.cpp:100] Creating Layer batch_norm_10_tmp_3_idx40\n",
      "I0706 20:30:07.170567 371264960 net.cpp:434] batch_norm_10_tmp_3_idx40 <- batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:30:07.170583 371264960 net.cpp:408] batch_norm_10_tmp_3_idx40 -> batch_norm_10_tmp_3_idx40\n",
      "I0706 20:30:07.170614 371264960 layer_factory.hpp:77] Creating layer batch_norm_10_tmp_3_idx40\n",
      "I0706 20:30:07.170653 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.170661 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.170665 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170671 371264960 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:30:07.170711 371264960 net.cpp:150] Setting up batch_norm_10_tmp_3_idx40\n",
      "I0706 20:30:07.170722 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170729 371264960 net.cpp:165] Memory required for data: 4075520\n",
      "I0706 20:30:07.170758 371264960 layer_factory.hpp:77] Creating layer elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:30:07.170778 371264960 net.cpp:100] Creating Layer elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:30:07.170785 371264960 net.cpp:434] elementwise_add_4_tmp_0_idx41 <- batch_norm_10_tmp_3_idx40\n",
      "I0706 20:30:07.170800 371264960 net.cpp:434] elementwise_add_4_tmp_0_idx41 <- elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split_1\n",
      "I0706 20:30:07.170815 371264960 net.cpp:408] elementwise_add_4_tmp_0_idx41 -> elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:30:07.170867 371264960 net.cpp:150] Setting up elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:30:07.170878 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170886 371264960 net.cpp:165] Memory required for data: 4141056\n",
      "I0706 20:30:07.170893 371264960 layer_factory.hpp:77] Creating layer elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:30:07.170908 371264960 net.cpp:100] Creating Layer elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:30:07.170917 371264960 net.cpp:434] elementwise_add_4_tmp_1_idx42 <- elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:30:07.170933 371264960 net.cpp:408] elementwise_add_4_tmp_1_idx42 -> elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:30:07.170965 371264960 net.cpp:150] Setting up elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:30:07.170975 371264960 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:30:07.170982 371264960 net.cpp:165] Memory required for data: 4206592\n",
      "I0706 20:30:07.170989 371264960 layer_factory.hpp:77] Creating layer pool2d_0_tmp_0_idx43\n",
      "I0706 20:30:07.171293 371264960 net.cpp:100] Creating Layer pool2d_0_tmp_0_idx43\n",
      "I0706 20:30:07.171314 371264960 net.cpp:434] pool2d_0_tmp_0_idx43 <- elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:30:07.171345 371264960 net.cpp:408] pool2d_0_tmp_0_idx43 -> pool2d_0_tmp_0_idx43\n",
      "I0706 20:30:07.171967 371264960 net.cpp:150] Setting up pool2d_0_tmp_0_idx43\n",
      "I0706 20:30:07.171981 371264960 net.cpp:157] Top shape: 1 16 25 25 (10000)\n",
      "I0706 20:30:07.171990 371264960 net.cpp:165] Memory required for data: 4246592\n",
      "I0706 20:30:07.171998 371264960 layer_factory.hpp:77] Creating layer fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:30:07.172287 371264960 net.cpp:100] Creating Layer fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:30:07.172299 371264960 net.cpp:434] fc_0_tmp_0_idx44_extra_x <- pool2d_0_tmp_0_idx43\n",
      "I0706 20:30:07.172315 371264960 net.cpp:408] fc_0_tmp_0_idx44_extra_x -> fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:30:07.172498 371264960 net.cpp:150] Setting up fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:30:07.172508 371264960 net.cpp:157] Top shape: 1 10000 (10000)\n",
      "I0706 20:30:07.172513 371264960 net.cpp:165] Memory required for data: 4286592\n",
      "I0706 20:30:07.172519 371264960 layer_factory.hpp:77] Creating layer fc_0_tmp_0_idx44\n",
      "I0706 20:30:07.172945 371264960 net.cpp:100] Creating Layer fc_0_tmp_0_idx44\n",
      "I0706 20:30:07.172963 371264960 net.cpp:434] fc_0_tmp_0_idx44 <- fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:30:07.172986 371264960 net.cpp:408] fc_0_tmp_0_idx44 -> fc_0_tmp_0_idx44\n",
      "I0706 20:30:07.173749 371264960 net.cpp:150] Setting up fc_0_tmp_0_idx44\n",
      "I0706 20:30:07.173760 371264960 net.cpp:157] Top shape: 1 10 (10)\n",
      "I0706 20:30:07.173768 371264960 net.cpp:165] Memory required for data: 4286632\n",
      "I0706 20:30:07.173782 371264960 layer_factory.hpp:77] Creating layer fc_0_tmp_1_idx45\n",
      "I0706 20:30:07.173800 371264960 net.cpp:100] Creating Layer fc_0_tmp_1_idx45\n",
      "I0706 20:30:07.173808 371264960 net.cpp:434] fc_0_tmp_1_idx45 <- fc_0_tmp_0_idx44\n",
      "I0706 20:30:07.173828 371264960 net.cpp:408] fc_0_tmp_1_idx45 -> fc_0_tmp_1_idx45\n",
      "I0706 20:30:07.173866 371264960 layer_factory.hpp:77] Creating layer fc_0_tmp_1_idx45\n",
      "I0706 20:30:07.173908 371264960 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:30:07.173915 371264960 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:30:07.173919 371264960 scale_layer.cpp:94] bottom[0]->shape is 1 10 (10)\n",
      "I0706 20:30:07.173925 371264960 scale_layer.cpp:95] scale->shape is 10 (10)\n",
      "I0706 20:30:07.173939 371264960 net.cpp:150] Setting up fc_0_tmp_1_idx45\n",
      "I0706 20:30:07.173944 371264960 net.cpp:157] Top shape: 1 10 (10)\n",
      "I0706 20:30:07.173949 371264960 net.cpp:165] Memory required for data: 4286672\n",
      "I0706 20:30:07.173962 371264960 layer_factory.hpp:77] Creating layer fc_0_tmp_2_idx46\n",
      "I0706 20:30:07.173991 371264960 net.cpp:100] Creating Layer fc_0_tmp_2_idx46\n",
      "I0706 20:30:07.174001 371264960 net.cpp:434] fc_0_tmp_2_idx46 <- fc_0_tmp_1_idx45\n",
      "I0706 20:30:07.174017 371264960 net.cpp:408] fc_0_tmp_2_idx46 -> fc_0_tmp_2_idx46\n",
      "I0706 20:30:07.174438 371264960 net.cpp:150] Setting up fc_0_tmp_2_idx46\n",
      "I0706 20:30:07.174448 371264960 net.cpp:157] Top shape: 1 10 (10)\n",
      "I0706 20:30:07.174454 371264960 net.cpp:165] Memory required for data: 4286712\n",
      "I0706 20:30:07.174465 371264960 net.cpp:228] fc_0_tmp_2_idx46 does not need backward computation.\n",
      "I0706 20:30:07.174472 371264960 net.cpp:228] fc_0_tmp_1_idx45 does not need backward computation.\n",
      "I0706 20:30:07.174477 371264960 net.cpp:228] fc_0_tmp_0_idx44 does not need backward computation.\n",
      "I0706 20:30:07.174484 371264960 net.cpp:228] fc_0_tmp_0_idx44_extra_x does not need backward computation.\n",
      "I0706 20:30:07.174490 371264960 net.cpp:228] pool2d_0_tmp_0_idx43 does not need backward computation.\n",
      "I0706 20:30:07.174496 371264960 net.cpp:228] elementwise_add_4_tmp_1_idx42 does not need backward computation.\n",
      "I0706 20:30:07.174504 371264960 net.cpp:228] elementwise_add_4_tmp_0_idx41 does not need backward computation.\n",
      "I0706 20:30:07.174513 371264960 net.cpp:228] batch_norm_10_tmp_3_idx40 does not need backward computation.\n",
      "I0706 20:30:07.174520 371264960 net.cpp:228] batch_norm_10_tmp_3_idx40_extra does not need backward computation.\n",
      "I0706 20:30:07.174608 371264960 net.cpp:228] conv2d_10_tmp_1_idx39 does not need backward computation.\n",
      "I0706 20:30:07.174621 371264960 net.cpp:228] conv2d_10_tmp_0_idx38 does not need backward computation.\n",
      "I0706 20:30:07.174629 371264960 net.cpp:228] batch_norm_9_tmp_4_idx37 does not need backward computation.\n",
      "I0706 20:30:07.174636 371264960 net.cpp:228] batch_norm_9_tmp_3_idx36 does not need backward computation.\n",
      "I0706 20:30:07.174644 371264960 net.cpp:228] batch_norm_9_tmp_3_idx36_extra does not need backward computation.\n",
      "I0706 20:30:07.174650 371264960 net.cpp:228] conv2d_9_tmp_0_idx35 does not need backward computation.\n",
      "I0706 20:30:07.174659 371264960 net.cpp:228] elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split does not need backward computation.\n",
      "I0706 20:30:07.174665 371264960 net.cpp:228] elementwise_add_3_tmp_1_idx34 does not need backward computation.\n",
      "I0706 20:30:07.174672 371264960 net.cpp:228] elementwise_add_3_tmp_0_idx33 does not need backward computation.\n",
      "I0706 20:30:07.174680 371264960 net.cpp:228] batch_norm_8_tmp_3_idx32 does not need backward computation.\n",
      "I0706 20:30:07.174773 371264960 net.cpp:228] batch_norm_8_tmp_3_idx32_extra does not need backward computation.\n",
      "I0706 20:30:07.174782 371264960 net.cpp:228] conv2d_8_tmp_1_idx31 does not need backward computation.\n",
      "I0706 20:30:07.174788 371264960 net.cpp:228] conv2d_8_tmp_0_idx30 does not need backward computation.\n",
      "I0706 20:30:07.174795 371264960 net.cpp:228] batch_norm_7_tmp_4_idx29 does not need backward computation.\n",
      "I0706 20:30:07.174801 371264960 net.cpp:228] batch_norm_7_tmp_3_idx28 does not need backward computation.\n",
      "I0706 20:30:07.174809 371264960 net.cpp:228] batch_norm_7_tmp_3_idx28_extra does not need backward computation.\n",
      "I0706 20:30:07.174816 371264960 net.cpp:228] conv2d_7_tmp_0_idx27 does not need backward computation.\n",
      "I0706 20:30:07.174824 371264960 net.cpp:228] elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split does not need backward computation.\n",
      "I0706 20:30:07.174831 371264960 net.cpp:228] elementwise_add_2_tmp_1_idx26 does not need backward computation.\n",
      "I0706 20:30:07.174839 371264960 net.cpp:228] elementwise_add_2_tmp_0_idx25 does not need backward computation.\n",
      "I0706 20:30:07.174846 371264960 net.cpp:228] batch_norm_6_tmp_3_idx24 does not need backward computation.\n",
      "I0706 20:30:07.174854 371264960 net.cpp:228] batch_norm_6_tmp_3_idx24_extra does not need backward computation.\n",
      "I0706 20:30:07.174860 371264960 net.cpp:228] conv2d_6_tmp_1_idx23 does not need backward computation.\n",
      "I0706 20:30:07.174867 371264960 net.cpp:228] conv2d_6_tmp_0_idx22 does not need backward computation.\n",
      "I0706 20:30:07.174960 371264960 net.cpp:228] batch_norm_5_tmp_4_idx21 does not need backward computation.\n",
      "I0706 20:30:07.174968 371264960 net.cpp:228] batch_norm_5_tmp_3_idx20 does not need backward computation.\n",
      "I0706 20:30:07.174976 371264960 net.cpp:228] batch_norm_5_tmp_3_idx20_extra does not need backward computation.\n",
      "I0706 20:30:07.174983 371264960 net.cpp:228] conv2d_5_tmp_0_idx19 does not need backward computation.\n",
      "I0706 20:30:07.174991 371264960 net.cpp:228] elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split does not need backward computation.\n",
      "I0706 20:30:07.174998 371264960 net.cpp:228] elementwise_add_1_tmp_1_idx18 does not need backward computation.\n",
      "I0706 20:30:07.175006 371264960 net.cpp:228] elementwise_add_1_tmp_0_idx17 does not need backward computation.\n",
      "I0706 20:30:07.175014 371264960 net.cpp:228] batch_norm_4_tmp_3_idx16 does not need backward computation.\n",
      "I0706 20:30:07.175021 371264960 net.cpp:228] batch_norm_4_tmp_3_idx16_extra does not need backward computation.\n",
      "I0706 20:30:07.175029 371264960 net.cpp:228] conv2d_4_tmp_1_idx15 does not need backward computation.\n",
      "I0706 20:30:07.175115 371264960 net.cpp:228] conv2d_4_tmp_0_idx14 does not need backward computation.\n",
      "I0706 20:30:07.175123 371264960 net.cpp:228] batch_norm_3_tmp_4_idx13 does not need backward computation.\n",
      "I0706 20:30:07.175130 371264960 net.cpp:228] batch_norm_3_tmp_3_idx12 does not need backward computation.\n",
      "I0706 20:30:07.175138 371264960 net.cpp:228] batch_norm_3_tmp_3_idx12_extra does not need backward computation.\n",
      "I0706 20:30:07.175145 371264960 net.cpp:228] conv2d_3_tmp_0_idx11 does not need backward computation.\n",
      "I0706 20:30:07.175153 371264960 net.cpp:228] elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split does not need backward computation.\n",
      "I0706 20:30:07.175163 371264960 net.cpp:228] elementwise_add_0_tmp_1_idx10 does not need backward computation.\n",
      "I0706 20:30:07.175170 371264960 net.cpp:228] elementwise_add_0_tmp_0_idx9 does not need backward computation.\n",
      "I0706 20:30:07.175179 371264960 net.cpp:228] batch_norm_2_tmp_3_idx8 does not need backward computation.\n",
      "I0706 20:30:07.175185 371264960 net.cpp:228] batch_norm_2_tmp_3_idx8_extra does not need backward computation.\n",
      "I0706 20:30:07.175264 371264960 net.cpp:228] conv2d_2_tmp_1_idx7 does not need backward computation.\n",
      "I0706 20:30:07.175272 371264960 net.cpp:228] conv2d_2_tmp_0_idx6 does not need backward computation.\n",
      "I0706 20:30:07.175280 371264960 net.cpp:228] batch_norm_1_tmp_4_idx5 does not need backward computation.\n",
      "I0706 20:30:07.175287 371264960 net.cpp:228] batch_norm_1_tmp_3_idx4 does not need backward computation.\n",
      "I0706 20:30:07.175294 371264960 net.cpp:228] batch_norm_1_tmp_3_idx4_extra does not need backward computation.\n",
      "I0706 20:30:07.175302 371264960 net.cpp:228] conv2d_1_tmp_0_idx3 does not need backward computation.\n",
      "I0706 20:30:07.175310 371264960 net.cpp:228] batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split does not need backward computation.\n",
      "I0706 20:30:07.175318 371264960 net.cpp:228] batch_norm_0_tmp_4_idx2 does not need backward computation.\n",
      "I0706 20:30:07.175325 371264960 net.cpp:228] batch_norm_0_tmp_3_idx1 does not need backward computation.\n",
      "I0706 20:30:07.175333 371264960 net.cpp:228] batch_norm_0_tmp_3_idx1_extra does not need backward computation.\n",
      "I0706 20:30:07.175411 371264960 net.cpp:228] conv2d_0_tmp_0_idx0 does not need backward computation.\n",
      "I0706 20:30:07.175419 371264960 net.cpp:228] pixel_idx0 does not need backward computation.\n",
      "I0706 20:30:07.175424 371264960 net.cpp:270] This network produces output fc_0_tmp_2_idx46\n",
      "I0706 20:30:07.175643 371264960 net.cpp:283] Network initialization done.\n",
      "I0706 20:30:07.179179 371264960 net.cpp:860] Serializing 65 layers\n",
      "2022-07-06 20:30:07 [INFO]\tCaffe model files are saved as [caffe-model/paddle2caffe.prototxt] and [caffe-model/paddle2caffe.caffemodel].\n"
     ]
    }
   ],
   "source": [
    "# Execute the shell command. Jupyter may can't find the paddle2caffe command,\n",
    "# please go to the command line to execute\n",
    "\n",
    "!mkdir ./caffe-model\n",
    "!paddle2caffe --model_dir ./resnet_not_combined/  --save_file caffe-model/paddle2caffe"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 3. Convert combined PaddlePaddle model(parameters saved in one binary file)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['save_infer_model/scale_0.tmp_1']"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model_dir = './resnet_combined/'\n",
    "fluid.io.save_inference_model(model_dir, [\"pixel\"], [predict], exe, model_filename='__model__', params_filename='__params__')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__model__  __params__\r\n"
     ]
    }
   ],
   "source": [
    "!ls ./resnet_combined/"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "objc[23908]: Class CaptureDelegate is implemented in both /usr/local/lib/python3.7/site-packages/cv2/cv2.cpython-37m-darwin.so (0x1141dfc18) and /usr/local/opt/opencv@3/lib/libopencv_videoio.3.4.dylib (0x1181af070). One of the two will be used. Which one is undefined.\n",
      "objc[23908]: Class CVWindow is implemented in both /usr/local/lib/python3.7/site-packages/cv2/cv2.cpython-37m-darwin.so (0x1141dfc68) and /usr/local/opt/opencv@3/lib/libopencv_highgui.3.4.dylib (0x11816f0a8). One of the two will be used. Which one is undefined.\n",
      "objc[23908]: Class CVView is implemented in both /usr/local/lib/python3.7/site-packages/cv2/cv2.cpython-37m-darwin.so (0x1141dfc90) and /usr/local/opt/opencv@3/lib/libopencv_highgui.3.4.dylib (0x11816f0d0). One of the two will be used. Which one is undefined.\n",
      "objc[23908]: Class CVSlider is implemented in both /usr/local/lib/python3.7/site-packages/cv2/cv2.cpython-37m-darwin.so (0x1141dfcb8) and /usr/local/opt/opencv@3/lib/libopencv_highgui.3.4.dylib (0x11816f0f8). One of the two will be used. Which one is undefined.\n",
      "2022-07-06 20:31:00 [INFO]\tpaddle graph info: \n",
      "2022-07-06 20:31:00 [INFO]\tinput: ['pixel_idx0'], output: ['save_infer_model_scale_0_tmp_1_idx48'], info: {'conv2d': 11, 'batch_norm': 11, 'relu': 11, 'elementwise_add': 11, 'pool2d': 1, 'mul': 1, 'softmax': 1, 'scale': 1}\n",
      "\u001B[1;31;40m2022-07-06 20:31:00 [WARNING]\tcaffe do not support exclusive param, will get ignored\u001B[0m\n",
      "2022-07-06 20:31:00 [INFO]\tun-identical mapping with node name list (['mul_idx44_extra_x_flatten', 'mul_idx44'])\n",
      "2022-07-06 20:31:00 [INFO]\tun-identical mapping with node name list ([])\n",
      "2022-07-06 20:31:00 [INFO]\tcaffe graph info: \n",
      "2022-07-06 20:31:00 [INFO]\tinput: ['pixel_idx0'], output: ['fc_0_tmp_2_idx46'], info: {'Convolution': 11, 'BatchNorm': 11, 'Scale': 17, 'ReLU': 11, 'Eltwise': 5, 'Pooling': 1, 'Flatten': 1, 'InnerProduct': 1, 'Softmax': 1}\n",
      "2022-07-06 20:31:00 [INFO]\tpass opt_caffe_cutout_dropout is skipped\n",
      "\u001B[1;31;40m2022-07-06 20:31:00 [WARNING]\twill remove exist file\u001B[0m\n",
      "\u001B[1;31;40m2022-07-06 20:31:00 [WARNING]\twill remove exist file\u001B[0m\n",
      "WARNING: Logging before InitGoogleLogging() is written to STDERR\n",
      "I0706 20:31:00.938488 424177088 net.cpp:58] Initializing net from parameters: \n",
      "state {\n",
      "  phase: TRAIN\n",
      "  level: 0\n",
      "}\n",
      "layer {\n",
      "  name: \"pixel_idx0\"\n",
      "  type: \"Input\"\n",
      "  top: \"pixel_idx0\"\n",
      "  input_param {\n",
      "    shape {\n",
      "      dim: 1\n",
      "      dim: 3\n",
      "      dim: 32\n",
      "      dim: 32\n",
      "    }\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_0_tmp_0_idx0\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"pixel_idx0\"\n",
      "  top: \"conv2d_0_tmp_0_idx0\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_0_tmp_3_idx1_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_0_tmp_0_idx0\"\n",
      "  top: \"batch_norm_0_tmp_3_idx1_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_0_tmp_3_idx1\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_0_tmp_3_idx1_extra\"\n",
      "  top: \"batch_norm_0_tmp_3_idx1\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_0_tmp_4_idx2\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_0_tmp_3_idx1\"\n",
      "  top: \"batch_norm_0_tmp_4_idx2\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_1_tmp_0_idx3\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_0_tmp_4_idx2\"\n",
      "  top: \"conv2d_1_tmp_0_idx3\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_1_tmp_3_idx4_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_1_tmp_0_idx3\"\n",
      "  top: \"batch_norm_1_tmp_3_idx4_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_1_tmp_3_idx4\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_1_tmp_3_idx4_extra\"\n",
      "  top: \"batch_norm_1_tmp_3_idx4\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_1_tmp_4_idx5\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_1_tmp_3_idx4\"\n",
      "  top: \"batch_norm_1_tmp_4_idx5\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_2_tmp_0_idx6\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_1_tmp_4_idx5\"\n",
      "  top: \"conv2d_2_tmp_0_idx6\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_2_tmp_1_idx7\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_2_tmp_0_idx6\"\n",
      "  top: \"conv2d_2_tmp_1_idx7\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_2_tmp_3_idx8_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_2_tmp_1_idx7\"\n",
      "  top: \"batch_norm_2_tmp_3_idx8_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_2_tmp_3_idx8\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_2_tmp_3_idx8_extra\"\n",
      "  top: \"batch_norm_2_tmp_3_idx8\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_0_tmp_0_idx9\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_2_tmp_3_idx8\"\n",
      "  bottom: \"batch_norm_0_tmp_4_idx2\"\n",
      "  top: \"elementwise_add_0_tmp_0_idx9\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_0_tmp_1_idx10\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_0_tmp_0_idx9\"\n",
      "  top: \"elementwise_add_0_tmp_1_idx10\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_3_tmp_0_idx11\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"elementwise_add_0_tmp_1_idx10\"\n",
      "  top: \"conv2d_3_tmp_0_idx11\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_3_tmp_3_idx12_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_3_tmp_0_idx11\"\n",
      "  top: \"batch_norm_3_tmp_3_idx12_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_3_tmp_3_idx12\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_3_tmp_3_idx12_extra\"\n",
      "  top: \"batch_norm_3_tmp_3_idx12\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_3_tmp_4_idx13\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_3_tmp_3_idx12\"\n",
      "  top: \"batch_norm_3_tmp_4_idx13\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_4_tmp_0_idx14\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_3_tmp_4_idx13\"\n",
      "  top: \"conv2d_4_tmp_0_idx14\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_4_tmp_1_idx15\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_4_tmp_0_idx14\"\n",
      "  top: \"conv2d_4_tmp_1_idx15\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_4_tmp_3_idx16_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_4_tmp_1_idx15\"\n",
      "  top: \"batch_norm_4_tmp_3_idx16_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_4_tmp_3_idx16\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_4_tmp_3_idx16_extra\"\n",
      "  top: \"batch_norm_4_tmp_3_idx16\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_1_tmp_0_idx17\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_4_tmp_3_idx16\"\n",
      "  bottom: \"elementwise_add_0_tmp_1_idx10\"\n",
      "  top: \"elementwise_add_1_tmp_0_idx17\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_1_tmp_1_idx18\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_1_tmp_0_idx17\"\n",
      "  top: \"elementwise_add_1_tmp_1_idx18\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_5_tmp_0_idx19\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"elementwise_add_1_tmp_1_idx18\"\n",
      "  top: \"conv2d_5_tmp_0_idx19\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_5_tmp_3_idx20_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_5_tmp_0_idx19\"\n",
      "  top: \"batch_norm_5_tmp_3_idx20_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_5_tmp_3_idx20\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_5_tmp_3_idx20_extra\"\n",
      "  top: \"batch_norm_5_tmp_3_idx20\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_5_tmp_4_idx21\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_5_tmp_3_idx20\"\n",
      "  top: \"batch_norm_5_tmp_4_idx21\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_6_tmp_0_idx22\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_5_tmp_4_idx21\"\n",
      "  top: \"conv2d_6_tmp_0_idx22\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_6_tmp_1_idx23\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_6_tmp_0_idx22\"\n",
      "  top: \"conv2d_6_tmp_1_idx23\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_6_tmp_3_idx24_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_6_tmp_1_idx23\"\n",
      "  top: \"batch_norm_6_tmp_3_idx24_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_6_tmp_3_idx24\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_6_tmp_3_idx24_extra\"\n",
      "  top: \"batch_norm_6_tmp_3_idx24\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_2_tmp_0_idx25\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_6_tmp_3_idx24\"\n",
      "  bottom: \"elementwise_add_1_tmp_1_idx18\"\n",
      "  top: \"elementwise_add_2_tmp_0_idx25\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_2_tmp_1_idx26\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_2_tmp_0_idx25\"\n",
      "  top: \"elementwise_add_2_tmp_1_idx26\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_7_tmp_0_idx27\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"elementwise_add_2_tmp_1_idx26\"\n",
      "  top: \"conv2d_7_tmp_0_idx27\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_7_tmp_3_idx28_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_7_tmp_0_idx27\"\n",
      "  top: \"batch_norm_7_tmp_3_idx28_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_7_tmp_3_idx28\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_7_tmp_3_idx28_extra\"\n",
      "  top: \"batch_norm_7_tmp_3_idx28\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_7_tmp_4_idx29\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_7_tmp_3_idx28\"\n",
      "  top: \"batch_norm_7_tmp_4_idx29\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_8_tmp_0_idx30\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_7_tmp_4_idx29\"\n",
      "  top: \"conv2d_8_tmp_0_idx30\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_8_tmp_1_idx31\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_8_tmp_0_idx30\"\n",
      "  top: \"conv2d_8_tmp_1_idx31\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_8_tmp_3_idx32_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_8_tmp_1_idx31\"\n",
      "  top: \"batch_norm_8_tmp_3_idx32_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_8_tmp_3_idx32\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_8_tmp_3_idx32_extra\"\n",
      "  top: \"batch_norm_8_tmp_3_idx32\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_3_tmp_0_idx33\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_8_tmp_3_idx32\"\n",
      "  bottom: \"elementwise_add_2_tmp_1_idx26\"\n",
      "  top: \"elementwise_add_3_tmp_0_idx33\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_3_tmp_1_idx34\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_3_tmp_0_idx33\"\n",
      "  top: \"elementwise_add_3_tmp_1_idx34\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_9_tmp_0_idx35\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"elementwise_add_3_tmp_1_idx34\"\n",
      "  top: \"conv2d_9_tmp_0_idx35\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_9_tmp_3_idx36_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_9_tmp_0_idx35\"\n",
      "  top: \"batch_norm_9_tmp_3_idx36_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_9_tmp_3_idx36\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_9_tmp_3_idx36_extra\"\n",
      "  top: \"batch_norm_9_tmp_3_idx36\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_9_tmp_4_idx37\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"batch_norm_9_tmp_3_idx36\"\n",
      "  top: \"batch_norm_9_tmp_4_idx37\"\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_10_tmp_0_idx38\"\n",
      "  type: \"Convolution\"\n",
      "  bottom: \"batch_norm_9_tmp_4_idx37\"\n",
      "  top: \"conv2d_10_tmp_0_idx38\"\n",
      "  convolution_param {\n",
      "    num_output: 16\n",
      "    bias_term: false\n",
      "    group: 1\n",
      "    pad_h: 1\n",
      "    pad_w: 1\n",
      "    kernel_h: 3\n",
      "    kernel_w: 3\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    dilation: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"conv2d_10_tmp_1_idx39\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"conv2d_10_tmp_0_idx38\"\n",
      "  top: \"conv2d_10_tmp_1_idx39\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_10_tmp_3_idx40_extra\"\n",
      "  type: \"BatchNorm\"\n",
      "  bottom: \"conv2d_10_tmp_1_idx39\"\n",
      "  top: \"batch_norm_10_tmp_3_idx40_extra\"\n",
      "  batch_norm_param {\n",
      "    use_global_stats: true\n",
      "    eps: 1e-05\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"batch_norm_10_tmp_3_idx40\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"batch_norm_10_tmp_3_idx40_extra\"\n",
      "  top: \"batch_norm_10_tmp_3_idx40\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_4_tmp_0_idx41\"\n",
      "  type: \"Eltwise\"\n",
      "  bottom: \"batch_norm_10_tmp_3_idx40\"\n",
      "  bottom: \"elementwise_add_3_tmp_1_idx34\"\n",
      "  top: \"elementwise_add_4_tmp_0_idx41\"\n",
      "  eltwise_param {\n",
      "    operation: SUM\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"elementwise_add_4_tmp_1_idx42\"\n",
      "  type: \"ReLU\"\n",
      "  bottom: \"elementwise_add_4_tmp_0_idx41\"\n",
      "  top: \"elementwise_add_4_tmp_1_idx42\"\n",
      "}\n",
      "layer {\n",
      "  name: \"pool2d_0_tmp_0_idx43\"\n",
      "  type: \"Pooling\"\n",
      "  bottom: \"elementwise_add_4_tmp_1_idx42\"\n",
      "  top: \"pool2d_0_tmp_0_idx43\"\n",
      "  pooling_param {\n",
      "    pool: AVE\n",
      "    kernel_h: 8\n",
      "    kernel_w: 8\n",
      "    stride_h: 1\n",
      "    stride_w: 1\n",
      "    pad_h: 0\n",
      "    pad_w: 0\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"fc_0_tmp_0_idx44_extra_x\"\n",
      "  type: \"Flatten\"\n",
      "  bottom: \"pool2d_0_tmp_0_idx43\"\n",
      "  top: \"fc_0_tmp_0_idx44_extra_x\"\n",
      "  flatten_param {\n",
      "    axis: 1\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"fc_0_tmp_0_idx44\"\n",
      "  type: \"InnerProduct\"\n",
      "  bottom: \"fc_0_tmp_0_idx44_extra_x\"\n",
      "  top: \"fc_0_tmp_0_idx44\"\n",
      "  inner_product_param {\n",
      "    num_output: 10\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"fc_0_tmp_1_idx45\"\n",
      "  type: \"Scale\"\n",
      "  bottom: \"fc_0_tmp_0_idx44\"\n",
      "  top: \"fc_0_tmp_1_idx45\"\n",
      "  scale_param {\n",
      "    axis: 1\n",
      "    num_axes: 1\n",
      "    bias_term: true\n",
      "  }\n",
      "}\n",
      "layer {\n",
      "  name: \"fc_0_tmp_2_idx46\"\n",
      "  type: \"Softmax\"\n",
      "  bottom: \"fc_0_tmp_1_idx45\"\n",
      "  top: \"fc_0_tmp_2_idx46\"\n",
      "  softmax_param {\n",
      "    axis: 1\n",
      "  }\n",
      "}\n",
      "I0706 20:31:00.941340 424177088 layer_factory.hpp:77] Creating layer pixel_idx0\n",
      "I0706 20:31:00.941416 424177088 net.cpp:100] Creating Layer pixel_idx0\n",
      "I0706 20:31:00.941431 424177088 net.cpp:408] pixel_idx0 -> pixel_idx0\n",
      "I0706 20:31:00.941509 424177088 net.cpp:150] Setting up pixel_idx0\n",
      "I0706 20:31:00.941529 424177088 net.cpp:157] Top shape: 1 3 32 32 (3072)\n",
      "I0706 20:31:00.941540 424177088 net.cpp:165] Memory required for data: 12288\n",
      "I0706 20:31:00.941547 424177088 layer_factory.hpp:77] Creating layer conv2d_0_tmp_0_idx0\n",
      "I0706 20:31:00.941576 424177088 net.cpp:100] Creating Layer conv2d_0_tmp_0_idx0\n",
      "I0706 20:31:00.941584 424177088 net.cpp:434] conv2d_0_tmp_0_idx0 <- pixel_idx0\n",
      "I0706 20:31:00.941606 424177088 net.cpp:408] conv2d_0_tmp_0_idx0 -> conv2d_0_tmp_0_idx0\n",
      "I0706 20:31:00.941702 424177088 net.cpp:150] Setting up conv2d_0_tmp_0_idx0\n",
      "I0706 20:31:00.941712 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.941720 424177088 net.cpp:165] Memory required for data: 77824\n",
      "I0706 20:31:00.941751 424177088 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:31:00.941772 424177088 net.cpp:100] Creating Layer batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:31:00.941781 424177088 net.cpp:434] batch_norm_0_tmp_3_idx1_extra <- conv2d_0_tmp_0_idx0\n",
      "I0706 20:31:00.941794 424177088 net.cpp:408] batch_norm_0_tmp_3_idx1_extra -> batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:31:00.941846 424177088 net.cpp:150] Setting up batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:31:00.941854 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.941862 424177088 net.cpp:165] Memory required for data: 143360\n",
      "I0706 20:31:00.941895 424177088 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_3_idx1\n",
      "I0706 20:31:00.941918 424177088 net.cpp:100] Creating Layer batch_norm_0_tmp_3_idx1\n",
      "I0706 20:31:00.941926 424177088 net.cpp:434] batch_norm_0_tmp_3_idx1 <- batch_norm_0_tmp_3_idx1_extra\n",
      "I0706 20:31:00.941941 424177088 net.cpp:408] batch_norm_0_tmp_3_idx1 -> batch_norm_0_tmp_3_idx1\n",
      "I0706 20:31:00.941982 424177088 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_3_idx1\n",
      "I0706 20:31:00.942040 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.942047 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.942051 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942059 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.942075 424177088 net.cpp:150] Setting up batch_norm_0_tmp_3_idx1\n",
      "I0706 20:31:00.942080 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942088 424177088 net.cpp:165] Memory required for data: 208896\n",
      "I0706 20:31:00.942112 424177088 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_4_idx2\n",
      "I0706 20:31:00.942132 424177088 net.cpp:100] Creating Layer batch_norm_0_tmp_4_idx2\n",
      "I0706 20:31:00.942140 424177088 net.cpp:434] batch_norm_0_tmp_4_idx2 <- batch_norm_0_tmp_3_idx1\n",
      "I0706 20:31:00.942154 424177088 net.cpp:408] batch_norm_0_tmp_4_idx2 -> batch_norm_0_tmp_4_idx2\n",
      "I0706 20:31:00.942174 424177088 net.cpp:150] Setting up batch_norm_0_tmp_4_idx2\n",
      "I0706 20:31:00.942183 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942189 424177088 net.cpp:165] Memory required for data: 274432\n",
      "I0706 20:31:00.942194 424177088 layer_factory.hpp:77] Creating layer batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split\n",
      "I0706 20:31:00.942214 424177088 net.cpp:100] Creating Layer batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split\n",
      "I0706 20:31:00.942222 424177088 net.cpp:434] batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split <- batch_norm_0_tmp_4_idx2\n",
      "I0706 20:31:00.942236 424177088 net.cpp:408] batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split -> batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split_0\n",
      "I0706 20:31:00.942260 424177088 net.cpp:408] batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split -> batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split_1\n",
      "I0706 20:31:00.942281 424177088 net.cpp:150] Setting up batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split\n",
      "I0706 20:31:00.942392 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942405 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942412 424177088 net.cpp:165] Memory required for data: 405504\n",
      "I0706 20:31:00.942420 424177088 layer_factory.hpp:77] Creating layer conv2d_1_tmp_0_idx3\n",
      "I0706 20:31:00.942440 424177088 net.cpp:100] Creating Layer conv2d_1_tmp_0_idx3\n",
      "I0706 20:31:00.942447 424177088 net.cpp:434] conv2d_1_tmp_0_idx3 <- batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split_0\n",
      "I0706 20:31:00.942464 424177088 net.cpp:408] conv2d_1_tmp_0_idx3 -> conv2d_1_tmp_0_idx3\n",
      "I0706 20:31:00.942517 424177088 net.cpp:150] Setting up conv2d_1_tmp_0_idx3\n",
      "I0706 20:31:00.942524 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942531 424177088 net.cpp:165] Memory required for data: 471040\n",
      "I0706 20:31:00.942543 424177088 layer_factory.hpp:77] Creating layer batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:31:00.942556 424177088 net.cpp:100] Creating Layer batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:31:00.942595 424177088 net.cpp:434] batch_norm_1_tmp_3_idx4_extra <- conv2d_1_tmp_0_idx3\n",
      "I0706 20:31:00.942618 424177088 net.cpp:408] batch_norm_1_tmp_3_idx4_extra -> batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:31:00.942669 424177088 net.cpp:150] Setting up batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:31:00.942679 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942687 424177088 net.cpp:165] Memory required for data: 536576\n",
      "I0706 20:31:00.942715 424177088 layer_factory.hpp:77] Creating layer batch_norm_1_tmp_3_idx4\n",
      "I0706 20:31:00.942736 424177088 net.cpp:100] Creating Layer batch_norm_1_tmp_3_idx4\n",
      "I0706 20:31:00.942745 424177088 net.cpp:434] batch_norm_1_tmp_3_idx4 <- batch_norm_1_tmp_3_idx4_extra\n",
      "I0706 20:31:00.942764 424177088 net.cpp:408] batch_norm_1_tmp_3_idx4 -> batch_norm_1_tmp_3_idx4\n",
      "I0706 20:31:00.942795 424177088 layer_factory.hpp:77] Creating layer batch_norm_1_tmp_3_idx4\n",
      "I0706 20:31:00.942831 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.942837 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.942842 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942848 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.942862 424177088 net.cpp:150] Setting up batch_norm_1_tmp_3_idx4\n",
      "I0706 20:31:00.942867 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942875 424177088 net.cpp:165] Memory required for data: 602112\n",
      "I0706 20:31:00.942888 424177088 layer_factory.hpp:77] Creating layer batch_norm_1_tmp_4_idx5\n",
      "I0706 20:31:00.942901 424177088 net.cpp:100] Creating Layer batch_norm_1_tmp_4_idx5\n",
      "I0706 20:31:00.942909 424177088 net.cpp:434] batch_norm_1_tmp_4_idx5 <- batch_norm_1_tmp_3_idx4\n",
      "I0706 20:31:00.942926 424177088 net.cpp:408] batch_norm_1_tmp_4_idx5 -> batch_norm_1_tmp_4_idx5\n",
      "I0706 20:31:00.942945 424177088 net.cpp:150] Setting up batch_norm_1_tmp_4_idx5\n",
      "I0706 20:31:00.942950 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.942957 424177088 net.cpp:165] Memory required for data: 667648\n",
      "I0706 20:31:00.942962 424177088 layer_factory.hpp:77] Creating layer conv2d_2_tmp_0_idx6\n",
      "I0706 20:31:00.942977 424177088 net.cpp:100] Creating Layer conv2d_2_tmp_0_idx6\n",
      "I0706 20:31:00.942986 424177088 net.cpp:434] conv2d_2_tmp_0_idx6 <- batch_norm_1_tmp_4_idx5\n",
      "I0706 20:31:00.942999 424177088 net.cpp:408] conv2d_2_tmp_0_idx6 -> conv2d_2_tmp_0_idx6\n",
      "I0706 20:31:00.943046 424177088 net.cpp:150] Setting up conv2d_2_tmp_0_idx6\n",
      "I0706 20:31:00.943054 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943061 424177088 net.cpp:165] Memory required for data: 733184\n",
      "I0706 20:31:00.943071 424177088 layer_factory.hpp:77] Creating layer conv2d_2_tmp_1_idx7\n",
      "I0706 20:31:00.943086 424177088 net.cpp:100] Creating Layer conv2d_2_tmp_1_idx7\n",
      "I0706 20:31:00.943094 424177088 net.cpp:434] conv2d_2_tmp_1_idx7 <- conv2d_2_tmp_0_idx6\n",
      "I0706 20:31:00.943112 424177088 net.cpp:408] conv2d_2_tmp_1_idx7 -> conv2d_2_tmp_1_idx7\n",
      "I0706 20:31:00.943141 424177088 layer_factory.hpp:77] Creating layer conv2d_2_tmp_1_idx7\n",
      "I0706 20:31:00.943176 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.943182 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.943186 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943193 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.943207 424177088 net.cpp:150] Setting up conv2d_2_tmp_1_idx7\n",
      "I0706 20:31:00.943212 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943217 424177088 net.cpp:165] Memory required for data: 798720\n",
      "I0706 20:31:00.943230 424177088 layer_factory.hpp:77] Creating layer batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:31:00.943246 424177088 net.cpp:100] Creating Layer batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:31:00.943254 424177088 net.cpp:434] batch_norm_2_tmp_3_idx8_extra <- conv2d_2_tmp_1_idx7\n",
      "I0706 20:31:00.943271 424177088 net.cpp:408] batch_norm_2_tmp_3_idx8_extra -> batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:31:00.943310 424177088 net.cpp:150] Setting up batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:31:00.943317 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943323 424177088 net.cpp:165] Memory required for data: 864256\n",
      "I0706 20:31:00.943353 424177088 layer_factory.hpp:77] Creating layer batch_norm_2_tmp_3_idx8\n",
      "I0706 20:31:00.943370 424177088 net.cpp:100] Creating Layer batch_norm_2_tmp_3_idx8\n",
      "I0706 20:31:00.943378 424177088 net.cpp:434] batch_norm_2_tmp_3_idx8 <- batch_norm_2_tmp_3_idx8_extra\n",
      "I0706 20:31:00.943392 424177088 net.cpp:408] batch_norm_2_tmp_3_idx8 -> batch_norm_2_tmp_3_idx8\n",
      "I0706 20:31:00.943421 424177088 layer_factory.hpp:77] Creating layer batch_norm_2_tmp_3_idx8\n",
      "I0706 20:31:00.943459 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.943466 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.943470 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943480 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.943493 424177088 net.cpp:150] Setting up batch_norm_2_tmp_3_idx8\n",
      "I0706 20:31:00.943498 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943504 424177088 net.cpp:165] Memory required for data: 929792\n",
      "I0706 20:31:00.943516 424177088 layer_factory.hpp:77] Creating layer elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:31:00.943537 424177088 net.cpp:100] Creating Layer elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:31:00.943545 424177088 net.cpp:434] elementwise_add_0_tmp_0_idx9 <- batch_norm_2_tmp_3_idx8\n",
      "I0706 20:31:00.943555 424177088 net.cpp:434] elementwise_add_0_tmp_0_idx9 <- batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split_1\n",
      "I0706 20:31:00.943569 424177088 net.cpp:408] elementwise_add_0_tmp_0_idx9 -> elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:31:00.943593 424177088 net.cpp:150] Setting up elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:31:00.943599 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943606 424177088 net.cpp:165] Memory required for data: 995328\n",
      "I0706 20:31:00.943611 424177088 layer_factory.hpp:77] Creating layer elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:31:00.943624 424177088 net.cpp:100] Creating Layer elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:31:00.943630 424177088 net.cpp:434] elementwise_add_0_tmp_1_idx10 <- elementwise_add_0_tmp_0_idx9\n",
      "I0706 20:31:00.943686 424177088 net.cpp:408] elementwise_add_0_tmp_1_idx10 -> elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:31:00.943717 424177088 net.cpp:150] Setting up elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:31:00.943725 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943733 424177088 net.cpp:165] Memory required for data: 1060864\n",
      "I0706 20:31:00.943739 424177088 layer_factory.hpp:77] Creating layer elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split\n",
      "I0706 20:31:00.943760 424177088 net.cpp:100] Creating Layer elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split\n",
      "I0706 20:31:00.943769 424177088 net.cpp:434] elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split <- elementwise_add_0_tmp_1_idx10\n",
      "I0706 20:31:00.943785 424177088 net.cpp:408] elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split -> elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split_0\n",
      "I0706 20:31:00.943809 424177088 net.cpp:408] elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split -> elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split_1\n",
      "I0706 20:31:00.943866 424177088 net.cpp:150] Setting up elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split\n",
      "I0706 20:31:00.943874 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943881 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.943886 424177088 net.cpp:165] Memory required for data: 1191936\n",
      "I0706 20:31:00.943892 424177088 layer_factory.hpp:77] Creating layer conv2d_3_tmp_0_idx11\n",
      "I0706 20:31:00.943908 424177088 net.cpp:100] Creating Layer conv2d_3_tmp_0_idx11\n",
      "I0706 20:31:00.943917 424177088 net.cpp:434] conv2d_3_tmp_0_idx11 <- elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split_0\n",
      "I0706 20:31:00.943931 424177088 net.cpp:408] conv2d_3_tmp_0_idx11 -> conv2d_3_tmp_0_idx11\n",
      "I0706 20:31:00.943990 424177088 net.cpp:150] Setting up conv2d_3_tmp_0_idx11\n",
      "I0706 20:31:00.943998 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.944049 424177088 net.cpp:165] Memory required for data: 1257472\n",
      "I0706 20:31:00.944068 424177088 layer_factory.hpp:77] Creating layer batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:31:00.944087 424177088 net.cpp:100] Creating Layer batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:31:00.944095 424177088 net.cpp:434] batch_norm_3_tmp_3_idx12_extra <- conv2d_3_tmp_0_idx11\n",
      "I0706 20:31:00.944110 424177088 net.cpp:408] batch_norm_3_tmp_3_idx12_extra -> batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:31:00.944154 424177088 net.cpp:150] Setting up batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:31:00.944161 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.944171 424177088 net.cpp:165] Memory required for data: 1323008\n",
      "I0706 20:31:00.944187 424177088 layer_factory.hpp:77] Creating layer batch_norm_3_tmp_3_idx12\n",
      "I0706 20:31:00.944206 424177088 net.cpp:100] Creating Layer batch_norm_3_tmp_3_idx12\n",
      "I0706 20:31:00.944216 424177088 net.cpp:434] batch_norm_3_tmp_3_idx12 <- batch_norm_3_tmp_3_idx12_extra\n",
      "I0706 20:31:00.944283 424177088 net.cpp:408] batch_norm_3_tmp_3_idx12 -> batch_norm_3_tmp_3_idx12\n",
      "I0706 20:31:00.944322 424177088 layer_factory.hpp:77] Creating layer batch_norm_3_tmp_3_idx12\n",
      "I0706 20:31:00.944366 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.944375 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.944378 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.944387 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.944403 424177088 net.cpp:150] Setting up batch_norm_3_tmp_3_idx12\n",
      "I0706 20:31:00.944408 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.944414 424177088 net.cpp:165] Memory required for data: 1388544\n",
      "I0706 20:31:00.944428 424177088 layer_factory.hpp:77] Creating layer batch_norm_3_tmp_4_idx13\n",
      "I0706 20:31:00.944442 424177088 net.cpp:100] Creating Layer batch_norm_3_tmp_4_idx13\n",
      "I0706 20:31:00.944449 424177088 net.cpp:434] batch_norm_3_tmp_4_idx13 <- batch_norm_3_tmp_3_idx12\n",
      "I0706 20:31:00.944465 424177088 net.cpp:408] batch_norm_3_tmp_4_idx13 -> batch_norm_3_tmp_4_idx13\n",
      "I0706 20:31:00.944485 424177088 net.cpp:150] Setting up batch_norm_3_tmp_4_idx13\n",
      "I0706 20:31:00.944491 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.944499 424177088 net.cpp:165] Memory required for data: 1454080\n",
      "I0706 20:31:00.944504 424177088 layer_factory.hpp:77] Creating layer conv2d_4_tmp_0_idx14\n",
      "I0706 20:31:00.944519 424177088 net.cpp:100] Creating Layer conv2d_4_tmp_0_idx14\n",
      "I0706 20:31:00.944526 424177088 net.cpp:434] conv2d_4_tmp_0_idx14 <- batch_norm_3_tmp_4_idx13\n",
      "I0706 20:31:00.944545 424177088 net.cpp:408] conv2d_4_tmp_0_idx14 -> conv2d_4_tmp_0_idx14\n",
      "I0706 20:31:00.944602 424177088 net.cpp:150] Setting up conv2d_4_tmp_0_idx14\n",
      "I0706 20:31:00.944609 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.944617 424177088 net.cpp:165] Memory required for data: 1519616\n",
      "I0706 20:31:00.944626 424177088 layer_factory.hpp:77] Creating layer conv2d_4_tmp_1_idx15\n",
      "I0706 20:31:00.944641 424177088 net.cpp:100] Creating Layer conv2d_4_tmp_1_idx15\n",
      "I0706 20:31:00.944650 424177088 net.cpp:434] conv2d_4_tmp_1_idx15 <- conv2d_4_tmp_0_idx14\n",
      "I0706 20:31:00.944666 424177088 net.cpp:408] conv2d_4_tmp_1_idx15 -> conv2d_4_tmp_1_idx15\n",
      "I0706 20:31:00.944698 424177088 layer_factory.hpp:77] Creating layer conv2d_4_tmp_1_idx15\n",
      "I0706 20:31:00.944739 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.944746 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.944751 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.944756 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.944774 424177088 net.cpp:150] Setting up conv2d_4_tmp_1_idx15\n",
      "I0706 20:31:00.944779 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.944785 424177088 net.cpp:165] Memory required for data: 1585152\n",
      "I0706 20:31:00.944798 424177088 layer_factory.hpp:77] Creating layer batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:31:00.944813 424177088 net.cpp:100] Creating Layer batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:31:00.944821 424177088 net.cpp:434] batch_norm_4_tmp_3_idx16_extra <- conv2d_4_tmp_1_idx15\n",
      "I0706 20:31:00.944834 424177088 net.cpp:408] batch_norm_4_tmp_3_idx16_extra -> batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:31:00.944885 424177088 net.cpp:150] Setting up batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:31:00.944891 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.944897 424177088 net.cpp:165] Memory required for data: 1650688\n",
      "I0706 20:31:00.944914 424177088 layer_factory.hpp:77] Creating layer batch_norm_4_tmp_3_idx16\n",
      "I0706 20:31:00.944931 424177088 net.cpp:100] Creating Layer batch_norm_4_tmp_3_idx16\n",
      "I0706 20:31:00.944938 424177088 net.cpp:434] batch_norm_4_tmp_3_idx16 <- batch_norm_4_tmp_3_idx16_extra\n",
      "I0706 20:31:00.944953 424177088 net.cpp:408] batch_norm_4_tmp_3_idx16 -> batch_norm_4_tmp_3_idx16\n",
      "I0706 20:31:00.944985 424177088 layer_factory.hpp:77] Creating layer batch_norm_4_tmp_3_idx16\n",
      "I0706 20:31:00.945024 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.945031 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.945035 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.945041 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.945056 424177088 net.cpp:150] Setting up batch_norm_4_tmp_3_idx16\n",
      "I0706 20:31:00.945062 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.945068 424177088 net.cpp:165] Memory required for data: 1716224\n",
      "I0706 20:31:00.945096 424177088 layer_factory.hpp:77] Creating layer elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:31:00.945112 424177088 net.cpp:100] Creating Layer elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:31:00.945120 424177088 net.cpp:434] elementwise_add_1_tmp_0_idx17 <- batch_norm_4_tmp_3_idx16\n",
      "I0706 20:31:00.945132 424177088 net.cpp:434] elementwise_add_1_tmp_0_idx17 <- elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split_1\n",
      "I0706 20:31:00.945147 424177088 net.cpp:408] elementwise_add_1_tmp_0_idx17 -> elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:31:00.945168 424177088 net.cpp:150] Setting up elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:31:00.945175 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.945181 424177088 net.cpp:165] Memory required for data: 1781760\n",
      "I0706 20:31:00.945187 424177088 layer_factory.hpp:77] Creating layer elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:31:00.945199 424177088 net.cpp:100] Creating Layer elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:31:00.945206 424177088 net.cpp:434] elementwise_add_1_tmp_1_idx18 <- elementwise_add_1_tmp_0_idx17\n",
      "I0706 20:31:00.945322 424177088 net.cpp:408] elementwise_add_1_tmp_1_idx18 -> elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:31:00.945348 424177088 net.cpp:150] Setting up elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:31:00.945355 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.945363 424177088 net.cpp:165] Memory required for data: 1847296\n",
      "I0706 20:31:00.945369 424177088 layer_factory.hpp:77] Creating layer elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split\n",
      "I0706 20:31:00.945385 424177088 net.cpp:100] Creating Layer elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split\n",
      "I0706 20:31:00.945394 424177088 net.cpp:434] elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split <- elementwise_add_1_tmp_1_idx18\n",
      "I0706 20:31:00.945408 424177088 net.cpp:408] elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split -> elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split_0\n",
      "I0706 20:31:00.945477 424177088 net.cpp:408] elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split -> elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split_1\n",
      "I0706 20:31:00.945502 424177088 net.cpp:150] Setting up elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split\n",
      "I0706 20:31:00.945508 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.945514 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.945520 424177088 net.cpp:165] Memory required for data: 1978368\n",
      "I0706 20:31:00.945526 424177088 layer_factory.hpp:77] Creating layer conv2d_5_tmp_0_idx19\n",
      "I0706 20:31:00.945542 424177088 net.cpp:100] Creating Layer conv2d_5_tmp_0_idx19\n",
      "I0706 20:31:00.945550 424177088 net.cpp:434] conv2d_5_tmp_0_idx19 <- elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split_0\n",
      "I0706 20:31:00.945565 424177088 net.cpp:408] conv2d_5_tmp_0_idx19 -> conv2d_5_tmp_0_idx19\n",
      "I0706 20:31:00.945616 424177088 net.cpp:150] Setting up conv2d_5_tmp_0_idx19\n",
      "I0706 20:31:00.945689 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.945703 424177088 net.cpp:165] Memory required for data: 2043904\n",
      "I0706 20:31:00.945717 424177088 layer_factory.hpp:77] Creating layer batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:31:00.945734 424177088 net.cpp:100] Creating Layer batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:31:00.945744 424177088 net.cpp:434] batch_norm_5_tmp_3_idx20_extra <- conv2d_5_tmp_0_idx19\n",
      "I0706 20:31:00.945760 424177088 net.cpp:408] batch_norm_5_tmp_3_idx20_extra -> batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:31:00.945820 424177088 net.cpp:150] Setting up batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:31:00.945830 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.945837 424177088 net.cpp:165] Memory required for data: 2109440\n",
      "I0706 20:31:00.945855 424177088 layer_factory.hpp:77] Creating layer batch_norm_5_tmp_3_idx20\n",
      "I0706 20:31:00.945873 424177088 net.cpp:100] Creating Layer batch_norm_5_tmp_3_idx20\n",
      "I0706 20:31:00.945881 424177088 net.cpp:434] batch_norm_5_tmp_3_idx20 <- batch_norm_5_tmp_3_idx20_extra\n",
      "I0706 20:31:00.945897 424177088 net.cpp:408] batch_norm_5_tmp_3_idx20 -> batch_norm_5_tmp_3_idx20\n",
      "I0706 20:31:00.945933 424177088 layer_factory.hpp:77] Creating layer batch_norm_5_tmp_3_idx20\n",
      "I0706 20:31:00.945974 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.945981 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.945986 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.945992 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.946007 424177088 net.cpp:150] Setting up batch_norm_5_tmp_3_idx20\n",
      "I0706 20:31:00.946012 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946018 424177088 net.cpp:165] Memory required for data: 2174976\n",
      "I0706 20:31:00.946033 424177088 layer_factory.hpp:77] Creating layer batch_norm_5_tmp_4_idx21\n",
      "I0706 20:31:00.946045 424177088 net.cpp:100] Creating Layer batch_norm_5_tmp_4_idx21\n",
      "I0706 20:31:00.946053 424177088 net.cpp:434] batch_norm_5_tmp_4_idx21 <- batch_norm_5_tmp_3_idx20\n",
      "I0706 20:31:00.946067 424177088 net.cpp:408] batch_norm_5_tmp_4_idx21 -> batch_norm_5_tmp_4_idx21\n",
      "I0706 20:31:00.946089 424177088 net.cpp:150] Setting up batch_norm_5_tmp_4_idx21\n",
      "I0706 20:31:00.946094 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946101 424177088 net.cpp:165] Memory required for data: 2240512\n",
      "I0706 20:31:00.946106 424177088 layer_factory.hpp:77] Creating layer conv2d_6_tmp_0_idx22\n",
      "I0706 20:31:00.946127 424177088 net.cpp:100] Creating Layer conv2d_6_tmp_0_idx22\n",
      "I0706 20:31:00.946136 424177088 net.cpp:434] conv2d_6_tmp_0_idx22 <- batch_norm_5_tmp_4_idx21\n",
      "I0706 20:31:00.946153 424177088 net.cpp:408] conv2d_6_tmp_0_idx22 -> conv2d_6_tmp_0_idx22\n",
      "I0706 20:31:00.946213 424177088 net.cpp:150] Setting up conv2d_6_tmp_0_idx22\n",
      "I0706 20:31:00.946221 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946228 424177088 net.cpp:165] Memory required for data: 2306048\n",
      "I0706 20:31:00.946238 424177088 layer_factory.hpp:77] Creating layer conv2d_6_tmp_1_idx23\n",
      "I0706 20:31:00.946254 424177088 net.cpp:100] Creating Layer conv2d_6_tmp_1_idx23\n",
      "I0706 20:31:00.946262 424177088 net.cpp:434] conv2d_6_tmp_1_idx23 <- conv2d_6_tmp_0_idx22\n",
      "I0706 20:31:00.946275 424177088 net.cpp:408] conv2d_6_tmp_1_idx23 -> conv2d_6_tmp_1_idx23\n",
      "I0706 20:31:00.946306 424177088 layer_factory.hpp:77] Creating layer conv2d_6_tmp_1_idx23\n",
      "I0706 20:31:00.946350 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.946357 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.946362 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946367 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.946384 424177088 net.cpp:150] Setting up conv2d_6_tmp_1_idx23\n",
      "I0706 20:31:00.946389 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946395 424177088 net.cpp:165] Memory required for data: 2371584\n",
      "I0706 20:31:00.946408 424177088 layer_factory.hpp:77] Creating layer batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:31:00.946421 424177088 net.cpp:100] Creating Layer batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:31:00.946429 424177088 net.cpp:434] batch_norm_6_tmp_3_idx24_extra <- conv2d_6_tmp_1_idx23\n",
      "I0706 20:31:00.946445 424177088 net.cpp:408] batch_norm_6_tmp_3_idx24_extra -> batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:31:00.946491 424177088 net.cpp:150] Setting up batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:31:00.946498 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946504 424177088 net.cpp:165] Memory required for data: 2437120\n",
      "I0706 20:31:00.946521 424177088 layer_factory.hpp:77] Creating layer batch_norm_6_tmp_3_idx24\n",
      "I0706 20:31:00.946535 424177088 net.cpp:100] Creating Layer batch_norm_6_tmp_3_idx24\n",
      "I0706 20:31:00.946544 424177088 net.cpp:434] batch_norm_6_tmp_3_idx24 <- batch_norm_6_tmp_3_idx24_extra\n",
      "I0706 20:31:00.946560 424177088 net.cpp:408] batch_norm_6_tmp_3_idx24 -> batch_norm_6_tmp_3_idx24\n",
      "I0706 20:31:00.946593 424177088 layer_factory.hpp:77] Creating layer batch_norm_6_tmp_3_idx24\n",
      "I0706 20:31:00.946632 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.946640 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.946645 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946650 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.946666 424177088 net.cpp:150] Setting up batch_norm_6_tmp_3_idx24\n",
      "I0706 20:31:00.946671 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946676 424177088 net.cpp:165] Memory required for data: 2502656\n",
      "I0706 20:31:00.946691 424177088 layer_factory.hpp:77] Creating layer elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:31:00.946704 424177088 net.cpp:100] Creating Layer elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:31:00.946713 424177088 net.cpp:434] elementwise_add_2_tmp_0_idx25 <- batch_norm_6_tmp_3_idx24\n",
      "I0706 20:31:00.946725 424177088 net.cpp:434] elementwise_add_2_tmp_0_idx25 <- elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split_1\n",
      "I0706 20:31:00.946739 424177088 net.cpp:408] elementwise_add_2_tmp_0_idx25 -> elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:31:00.946761 424177088 net.cpp:150] Setting up elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:31:00.946767 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946774 424177088 net.cpp:165] Memory required for data: 2568192\n",
      "I0706 20:31:00.946821 424177088 layer_factory.hpp:77] Creating layer elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:31:00.946846 424177088 net.cpp:100] Creating Layer elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:31:00.946856 424177088 net.cpp:434] elementwise_add_2_tmp_1_idx26 <- elementwise_add_2_tmp_0_idx25\n",
      "I0706 20:31:00.946872 424177088 net.cpp:408] elementwise_add_2_tmp_1_idx26 -> elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:31:00.946892 424177088 net.cpp:150] Setting up elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:31:00.946899 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.946907 424177088 net.cpp:165] Memory required for data: 2633728\n",
      "I0706 20:31:00.946913 424177088 layer_factory.hpp:77] Creating layer elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split\n",
      "I0706 20:31:00.946926 424177088 net.cpp:100] Creating Layer elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split\n",
      "I0706 20:31:00.946935 424177088 net.cpp:434] elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split <- elementwise_add_2_tmp_1_idx26\n",
      "I0706 20:31:00.947000 424177088 net.cpp:408] elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split -> elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split_0\n",
      "I0706 20:31:00.947031 424177088 net.cpp:408] elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split -> elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split_1\n",
      "I0706 20:31:00.947054 424177088 net.cpp:150] Setting up elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split\n",
      "I0706 20:31:00.947062 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947068 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947074 424177088 net.cpp:165] Memory required for data: 2764800\n",
      "I0706 20:31:00.947080 424177088 layer_factory.hpp:77] Creating layer conv2d_7_tmp_0_idx27\n",
      "I0706 20:31:00.947098 424177088 net.cpp:100] Creating Layer conv2d_7_tmp_0_idx27\n",
      "I0706 20:31:00.947105 424177088 net.cpp:434] conv2d_7_tmp_0_idx27 <- elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split_0\n",
      "I0706 20:31:00.947153 424177088 net.cpp:408] conv2d_7_tmp_0_idx27 -> conv2d_7_tmp_0_idx27\n",
      "I0706 20:31:00.947208 424177088 net.cpp:150] Setting up conv2d_7_tmp_0_idx27\n",
      "I0706 20:31:00.947216 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947223 424177088 net.cpp:165] Memory required for data: 2830336\n",
      "I0706 20:31:00.947234 424177088 layer_factory.hpp:77] Creating layer batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:31:00.947248 424177088 net.cpp:100] Creating Layer batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:31:00.947259 424177088 net.cpp:434] batch_norm_7_tmp_3_idx28_extra <- conv2d_7_tmp_0_idx27\n",
      "I0706 20:31:00.947273 424177088 net.cpp:408] batch_norm_7_tmp_3_idx28_extra -> batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:31:00.947314 424177088 net.cpp:150] Setting up batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:31:00.947320 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947327 424177088 net.cpp:165] Memory required for data: 2895872\n",
      "I0706 20:31:00.947343 424177088 layer_factory.hpp:77] Creating layer batch_norm_7_tmp_3_idx28\n",
      "I0706 20:31:00.947360 424177088 net.cpp:100] Creating Layer batch_norm_7_tmp_3_idx28\n",
      "I0706 20:31:00.947369 424177088 net.cpp:434] batch_norm_7_tmp_3_idx28 <- batch_norm_7_tmp_3_idx28_extra\n",
      "I0706 20:31:00.947384 424177088 net.cpp:408] batch_norm_7_tmp_3_idx28 -> batch_norm_7_tmp_3_idx28\n",
      "I0706 20:31:00.947414 424177088 layer_factory.hpp:77] Creating layer batch_norm_7_tmp_3_idx28\n",
      "I0706 20:31:00.947464 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.947472 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.947476 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947484 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.947499 424177088 net.cpp:150] Setting up batch_norm_7_tmp_3_idx28\n",
      "I0706 20:31:00.947506 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947515 424177088 net.cpp:165] Memory required for data: 2961408\n",
      "I0706 20:31:00.947528 424177088 layer_factory.hpp:77] Creating layer batch_norm_7_tmp_4_idx29\n",
      "I0706 20:31:00.947541 424177088 net.cpp:100] Creating Layer batch_norm_7_tmp_4_idx29\n",
      "I0706 20:31:00.947551 424177088 net.cpp:434] batch_norm_7_tmp_4_idx29 <- batch_norm_7_tmp_3_idx28\n",
      "I0706 20:31:00.947564 424177088 net.cpp:408] batch_norm_7_tmp_4_idx29 -> batch_norm_7_tmp_4_idx29\n",
      "I0706 20:31:00.947585 424177088 net.cpp:150] Setting up batch_norm_7_tmp_4_idx29\n",
      "I0706 20:31:00.947592 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947598 424177088 net.cpp:165] Memory required for data: 3026944\n",
      "I0706 20:31:00.947604 424177088 layer_factory.hpp:77] Creating layer conv2d_8_tmp_0_idx30\n",
      "I0706 20:31:00.947619 424177088 net.cpp:100] Creating Layer conv2d_8_tmp_0_idx30\n",
      "I0706 20:31:00.947628 424177088 net.cpp:434] conv2d_8_tmp_0_idx30 <- batch_norm_7_tmp_4_idx29\n",
      "I0706 20:31:00.947643 424177088 net.cpp:408] conv2d_8_tmp_0_idx30 -> conv2d_8_tmp_0_idx30\n",
      "I0706 20:31:00.947688 424177088 net.cpp:150] Setting up conv2d_8_tmp_0_idx30\n",
      "I0706 20:31:00.947696 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947703 424177088 net.cpp:165] Memory required for data: 3092480\n",
      "I0706 20:31:00.947713 424177088 layer_factory.hpp:77] Creating layer conv2d_8_tmp_1_idx31\n",
      "I0706 20:31:00.947727 424177088 net.cpp:100] Creating Layer conv2d_8_tmp_1_idx31\n",
      "I0706 20:31:00.947736 424177088 net.cpp:434] conv2d_8_tmp_1_idx31 <- conv2d_8_tmp_0_idx30\n",
      "I0706 20:31:00.947750 424177088 net.cpp:408] conv2d_8_tmp_1_idx31 -> conv2d_8_tmp_1_idx31\n",
      "I0706 20:31:00.947782 424177088 layer_factory.hpp:77] Creating layer conv2d_8_tmp_1_idx31\n",
      "I0706 20:31:00.947818 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.947824 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.947829 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947836 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.947849 424177088 net.cpp:150] Setting up conv2d_8_tmp_1_idx31\n",
      "I0706 20:31:00.947854 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947860 424177088 net.cpp:165] Memory required for data: 3158016\n",
      "I0706 20:31:00.947872 424177088 layer_factory.hpp:77] Creating layer batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:31:00.947886 424177088 net.cpp:100] Creating Layer batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:31:00.947894 424177088 net.cpp:434] batch_norm_8_tmp_3_idx32_extra <- conv2d_8_tmp_1_idx31\n",
      "I0706 20:31:00.947908 424177088 net.cpp:408] batch_norm_8_tmp_3_idx32_extra -> batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:31:00.947960 424177088 net.cpp:150] Setting up batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:31:00.947970 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.947979 424177088 net.cpp:165] Memory required for data: 3223552\n",
      "I0706 20:31:00.948002 424177088 layer_factory.hpp:77] Creating layer batch_norm_8_tmp_3_idx32\n",
      "I0706 20:31:00.948024 424177088 net.cpp:100] Creating Layer batch_norm_8_tmp_3_idx32\n",
      "I0706 20:31:00.948036 424177088 net.cpp:434] batch_norm_8_tmp_3_idx32 <- batch_norm_8_tmp_3_idx32_extra\n",
      "I0706 20:31:00.948058 424177088 net.cpp:408] batch_norm_8_tmp_3_idx32 -> batch_norm_8_tmp_3_idx32\n",
      "I0706 20:31:00.948094 424177088 layer_factory.hpp:77] Creating layer batch_norm_8_tmp_3_idx32\n",
      "I0706 20:31:00.948173 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.948206 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.948218 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.948227 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.948246 424177088 net.cpp:150] Setting up batch_norm_8_tmp_3_idx32\n",
      "I0706 20:31:00.948253 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.948259 424177088 net.cpp:165] Memory required for data: 3289088\n",
      "I0706 20:31:00.948273 424177088 layer_factory.hpp:77] Creating layer elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:31:00.948288 424177088 net.cpp:100] Creating Layer elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:31:00.948297 424177088 net.cpp:434] elementwise_add_3_tmp_0_idx33 <- batch_norm_8_tmp_3_idx32\n",
      "I0706 20:31:00.948309 424177088 net.cpp:434] elementwise_add_3_tmp_0_idx33 <- elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split_1\n",
      "I0706 20:31:00.948323 424177088 net.cpp:408] elementwise_add_3_tmp_0_idx33 -> elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:31:00.948374 424177088 net.cpp:150] Setting up elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:31:00.948385 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.948393 424177088 net.cpp:165] Memory required for data: 3354624\n",
      "I0706 20:31:00.948400 424177088 layer_factory.hpp:77] Creating layer elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:31:00.948415 424177088 net.cpp:100] Creating Layer elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:31:00.948423 424177088 net.cpp:434] elementwise_add_3_tmp_1_idx34 <- elementwise_add_3_tmp_0_idx33\n",
      "I0706 20:31:00.948439 424177088 net.cpp:408] elementwise_add_3_tmp_1_idx34 -> elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:31:00.948458 424177088 net.cpp:150] Setting up elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:31:00.948463 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.948472 424177088 net.cpp:165] Memory required for data: 3420160\n",
      "I0706 20:31:00.948478 424177088 layer_factory.hpp:77] Creating layer elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split\n",
      "I0706 20:31:00.948559 424177088 net.cpp:100] Creating Layer elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split\n",
      "I0706 20:31:00.948570 424177088 net.cpp:434] elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split <- elementwise_add_3_tmp_1_idx34\n",
      "I0706 20:31:00.948585 424177088 net.cpp:408] elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split -> elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split_0\n",
      "I0706 20:31:00.948607 424177088 net.cpp:408] elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split -> elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split_1\n",
      "I0706 20:31:00.948628 424177088 net.cpp:150] Setting up elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split\n",
      "I0706 20:31:00.948635 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.948642 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.948647 424177088 net.cpp:165] Memory required for data: 3551232\n",
      "I0706 20:31:00.948693 424177088 layer_factory.hpp:77] Creating layer conv2d_9_tmp_0_idx35\n",
      "I0706 20:31:00.948715 424177088 net.cpp:100] Creating Layer conv2d_9_tmp_0_idx35\n",
      "I0706 20:31:00.948725 424177088 net.cpp:434] conv2d_9_tmp_0_idx35 <- elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split_0\n",
      "I0706 20:31:00.948745 424177088 net.cpp:408] conv2d_9_tmp_0_idx35 -> conv2d_9_tmp_0_idx35\n",
      "I0706 20:31:00.948801 424177088 net.cpp:150] Setting up conv2d_9_tmp_0_idx35\n",
      "I0706 20:31:00.948812 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.948818 424177088 net.cpp:165] Memory required for data: 3616768\n",
      "I0706 20:31:00.948829 424177088 layer_factory.hpp:77] Creating layer batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:31:00.948844 424177088 net.cpp:100] Creating Layer batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:31:00.948853 424177088 net.cpp:434] batch_norm_9_tmp_3_idx36_extra <- conv2d_9_tmp_0_idx35\n",
      "I0706 20:31:00.948868 424177088 net.cpp:408] batch_norm_9_tmp_3_idx36_extra -> batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:31:00.948921 424177088 net.cpp:150] Setting up batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:31:00.948930 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.948936 424177088 net.cpp:165] Memory required for data: 3682304\n",
      "I0706 20:31:00.948980 424177088 layer_factory.hpp:77] Creating layer batch_norm_9_tmp_3_idx36\n",
      "I0706 20:31:00.949002 424177088 net.cpp:100] Creating Layer batch_norm_9_tmp_3_idx36\n",
      "I0706 20:31:00.949010 424177088 net.cpp:434] batch_norm_9_tmp_3_idx36 <- batch_norm_9_tmp_3_idx36_extra\n",
      "I0706 20:31:00.949025 424177088 net.cpp:408] batch_norm_9_tmp_3_idx36 -> batch_norm_9_tmp_3_idx36\n",
      "I0706 20:31:00.949057 424177088 layer_factory.hpp:77] Creating layer batch_norm_9_tmp_3_idx36\n",
      "I0706 20:31:00.949100 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.949108 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.949112 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949118 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.949136 424177088 net.cpp:150] Setting up batch_norm_9_tmp_3_idx36\n",
      "I0706 20:31:00.949141 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949146 424177088 net.cpp:165] Memory required for data: 3747840\n",
      "I0706 20:31:00.949158 424177088 layer_factory.hpp:77] Creating layer batch_norm_9_tmp_4_idx37\n",
      "I0706 20:31:00.949174 424177088 net.cpp:100] Creating Layer batch_norm_9_tmp_4_idx37\n",
      "I0706 20:31:00.949183 424177088 net.cpp:434] batch_norm_9_tmp_4_idx37 <- batch_norm_9_tmp_3_idx36\n",
      "I0706 20:31:00.949198 424177088 net.cpp:408] batch_norm_9_tmp_4_idx37 -> batch_norm_9_tmp_4_idx37\n",
      "I0706 20:31:00.949216 424177088 net.cpp:150] Setting up batch_norm_9_tmp_4_idx37\n",
      "I0706 20:31:00.949223 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949229 424177088 net.cpp:165] Memory required for data: 3813376\n",
      "I0706 20:31:00.949234 424177088 layer_factory.hpp:77] Creating layer conv2d_10_tmp_0_idx38\n",
      "I0706 20:31:00.949249 424177088 net.cpp:100] Creating Layer conv2d_10_tmp_0_idx38\n",
      "I0706 20:31:00.949257 424177088 net.cpp:434] conv2d_10_tmp_0_idx38 <- batch_norm_9_tmp_4_idx37\n",
      "I0706 20:31:00.949272 424177088 net.cpp:408] conv2d_10_tmp_0_idx38 -> conv2d_10_tmp_0_idx38\n",
      "I0706 20:31:00.949322 424177088 net.cpp:150] Setting up conv2d_10_tmp_0_idx38\n",
      "I0706 20:31:00.949330 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949337 424177088 net.cpp:165] Memory required for data: 3878912\n",
      "I0706 20:31:00.949347 424177088 layer_factory.hpp:77] Creating layer conv2d_10_tmp_1_idx39\n",
      "I0706 20:31:00.949362 424177088 net.cpp:100] Creating Layer conv2d_10_tmp_1_idx39\n",
      "I0706 20:31:00.949370 424177088 net.cpp:434] conv2d_10_tmp_1_idx39 <- conv2d_10_tmp_0_idx38\n",
      "I0706 20:31:00.949386 424177088 net.cpp:408] conv2d_10_tmp_1_idx39 -> conv2d_10_tmp_1_idx39\n",
      "I0706 20:31:00.949415 424177088 layer_factory.hpp:77] Creating layer conv2d_10_tmp_1_idx39\n",
      "I0706 20:31:00.949452 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.949460 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.949465 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949470 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.949486 424177088 net.cpp:150] Setting up conv2d_10_tmp_1_idx39\n",
      "I0706 20:31:00.949491 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949496 424177088 net.cpp:165] Memory required for data: 3944448\n",
      "I0706 20:31:00.949509 424177088 layer_factory.hpp:77] Creating layer batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:31:00.949525 424177088 net.cpp:100] Creating Layer batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:31:00.949533 424177088 net.cpp:434] batch_norm_10_tmp_3_idx40_extra <- conv2d_10_tmp_1_idx39\n",
      "I0706 20:31:00.949549 424177088 net.cpp:408] batch_norm_10_tmp_3_idx40_extra -> batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:31:00.949589 424177088 net.cpp:150] Setting up batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:31:00.949595 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949601 424177088 net.cpp:165] Memory required for data: 4009984\n",
      "I0706 20:31:00.949617 424177088 layer_factory.hpp:77] Creating layer batch_norm_10_tmp_3_idx40\n",
      "I0706 20:31:00.949632 424177088 net.cpp:100] Creating Layer batch_norm_10_tmp_3_idx40\n",
      "I0706 20:31:00.949640 424177088 net.cpp:434] batch_norm_10_tmp_3_idx40 <- batch_norm_10_tmp_3_idx40_extra\n",
      "I0706 20:31:00.949657 424177088 net.cpp:408] batch_norm_10_tmp_3_idx40 -> batch_norm_10_tmp_3_idx40\n",
      "I0706 20:31:00.949687 424177088 layer_factory.hpp:77] Creating layer batch_norm_10_tmp_3_idx40\n",
      "I0706 20:31:00.949726 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.949733 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.949738 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949743 424177088 scale_layer.cpp:95] scale->shape is 16 (16)\n",
      "I0706 20:31:00.949759 424177088 net.cpp:150] Setting up batch_norm_10_tmp_3_idx40\n",
      "I0706 20:31:00.949764 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949770 424177088 net.cpp:165] Memory required for data: 4075520\n",
      "I0706 20:31:00.949784 424177088 layer_factory.hpp:77] Creating layer elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:31:00.949796 424177088 net.cpp:100] Creating Layer elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:31:00.949805 424177088 net.cpp:434] elementwise_add_4_tmp_0_idx41 <- batch_norm_10_tmp_3_idx40\n",
      "I0706 20:31:00.949816 424177088 net.cpp:434] elementwise_add_4_tmp_0_idx41 <- elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split_1\n",
      "I0706 20:31:00.949831 424177088 net.cpp:408] elementwise_add_4_tmp_0_idx41 -> elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:31:00.949851 424177088 net.cpp:150] Setting up elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:31:00.949857 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949863 424177088 net.cpp:165] Memory required for data: 4141056\n",
      "I0706 20:31:00.949869 424177088 layer_factory.hpp:77] Creating layer elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:31:00.949884 424177088 net.cpp:100] Creating Layer elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:31:00.949892 424177088 net.cpp:434] elementwise_add_4_tmp_1_idx42 <- elementwise_add_4_tmp_0_idx41\n",
      "I0706 20:31:00.949906 424177088 net.cpp:408] elementwise_add_4_tmp_1_idx42 -> elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:31:00.949971 424177088 net.cpp:150] Setting up elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:31:00.949981 424177088 net.cpp:157] Top shape: 1 16 32 32 (16384)\n",
      "I0706 20:31:00.949988 424177088 net.cpp:165] Memory required for data: 4206592\n",
      "I0706 20:31:00.949993 424177088 layer_factory.hpp:77] Creating layer pool2d_0_tmp_0_idx43\n",
      "I0706 20:31:00.950013 424177088 net.cpp:100] Creating Layer pool2d_0_tmp_0_idx43\n",
      "I0706 20:31:00.950022 424177088 net.cpp:434] pool2d_0_tmp_0_idx43 <- elementwise_add_4_tmp_1_idx42\n",
      "I0706 20:31:00.950037 424177088 net.cpp:408] pool2d_0_tmp_0_idx43 -> pool2d_0_tmp_0_idx43\n",
      "I0706 20:31:00.950063 424177088 net.cpp:150] Setting up pool2d_0_tmp_0_idx43\n",
      "I0706 20:31:00.950070 424177088 net.cpp:157] Top shape: 1 16 25 25 (10000)\n",
      "I0706 20:31:00.950076 424177088 net.cpp:165] Memory required for data: 4246592\n",
      "I0706 20:31:00.950083 424177088 layer_factory.hpp:77] Creating layer fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:31:00.950098 424177088 net.cpp:100] Creating Layer fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:31:00.950106 424177088 net.cpp:434] fc_0_tmp_0_idx44_extra_x <- pool2d_0_tmp_0_idx43\n",
      "I0706 20:31:00.950172 424177088 net.cpp:408] fc_0_tmp_0_idx44_extra_x -> fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:31:00.950202 424177088 net.cpp:150] Setting up fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:31:00.950208 424177088 net.cpp:157] Top shape: 1 10000 (10000)\n",
      "I0706 20:31:00.950215 424177088 net.cpp:165] Memory required for data: 4286592\n",
      "I0706 20:31:00.950222 424177088 layer_factory.hpp:77] Creating layer fc_0_tmp_0_idx44\n",
      "I0706 20:31:00.950242 424177088 net.cpp:100] Creating Layer fc_0_tmp_0_idx44\n",
      "I0706 20:31:00.950249 424177088 net.cpp:434] fc_0_tmp_0_idx44 <- fc_0_tmp_0_idx44_extra_x\n",
      "I0706 20:31:00.950264 424177088 net.cpp:408] fc_0_tmp_0_idx44 -> fc_0_tmp_0_idx44\n",
      "I0706 20:31:00.950456 424177088 net.cpp:150] Setting up fc_0_tmp_0_idx44\n",
      "I0706 20:31:00.950464 424177088 net.cpp:157] Top shape: 1 10 (10)\n",
      "I0706 20:31:00.950471 424177088 net.cpp:165] Memory required for data: 4286632\n",
      "I0706 20:31:00.950484 424177088 layer_factory.hpp:77] Creating layer fc_0_tmp_1_idx45\n",
      "I0706 20:31:00.950500 424177088 net.cpp:100] Creating Layer fc_0_tmp_1_idx45\n",
      "I0706 20:31:00.950508 424177088 net.cpp:434] fc_0_tmp_1_idx45 <- fc_0_tmp_0_idx44\n",
      "I0706 20:31:00.950522 424177088 net.cpp:408] fc_0_tmp_1_idx45 -> fc_0_tmp_1_idx45\n",
      "I0706 20:31:00.950553 424177088 layer_factory.hpp:77] Creating layer fc_0_tmp_1_idx45\n",
      "I0706 20:31:00.950592 424177088 scale_layer.cpp:92] axis_ is 1\n",
      "I0706 20:31:00.950599 424177088 scale_layer.cpp:93] num_axes is 1\n",
      "I0706 20:31:00.950603 424177088 scale_layer.cpp:94] bottom[0]->shape is 1 10 (10)\n",
      "I0706 20:31:00.950609 424177088 scale_layer.cpp:95] scale->shape is 10 (10)\n",
      "I0706 20:31:00.950620 424177088 net.cpp:150] Setting up fc_0_tmp_1_idx45\n",
      "I0706 20:31:00.950625 424177088 net.cpp:157] Top shape: 1 10 (10)\n",
      "I0706 20:31:00.950631 424177088 net.cpp:165] Memory required for data: 4286672\n",
      "I0706 20:31:00.950644 424177088 layer_factory.hpp:77] Creating layer fc_0_tmp_2_idx46\n",
      "I0706 20:31:00.950671 424177088 net.cpp:100] Creating Layer fc_0_tmp_2_idx46\n",
      "I0706 20:31:00.950681 424177088 net.cpp:434] fc_0_tmp_2_idx46 <- fc_0_tmp_1_idx45\n",
      "I0706 20:31:00.950696 424177088 net.cpp:408] fc_0_tmp_2_idx46 -> fc_0_tmp_2_idx46\n",
      "I0706 20:31:00.950724 424177088 net.cpp:150] Setting up fc_0_tmp_2_idx46\n",
      "I0706 20:31:00.950731 424177088 net.cpp:157] Top shape: 1 10 (10)\n",
      "I0706 20:31:00.950737 424177088 net.cpp:165] Memory required for data: 4286712\n",
      "I0706 20:31:00.950747 424177088 net.cpp:228] fc_0_tmp_2_idx46 does not need backward computation.\n",
      "I0706 20:31:00.950753 424177088 net.cpp:228] fc_0_tmp_1_idx45 does not need backward computation.\n",
      "I0706 20:31:00.950759 424177088 net.cpp:228] fc_0_tmp_0_idx44 does not need backward computation.\n",
      "I0706 20:31:00.950765 424177088 net.cpp:228] fc_0_tmp_0_idx44_extra_x does not need backward computation.\n",
      "I0706 20:31:00.950771 424177088 net.cpp:228] pool2d_0_tmp_0_idx43 does not need backward computation.\n",
      "I0706 20:31:00.950778 424177088 net.cpp:228] elementwise_add_4_tmp_1_idx42 does not need backward computation.\n",
      "I0706 20:31:00.950855 424177088 net.cpp:228] elementwise_add_4_tmp_0_idx41 does not need backward computation.\n",
      "I0706 20:31:00.950863 424177088 net.cpp:228] batch_norm_10_tmp_3_idx40 does not need backward computation.\n",
      "I0706 20:31:00.950870 424177088 net.cpp:228] batch_norm_10_tmp_3_idx40_extra does not need backward computation.\n",
      "I0706 20:31:00.950877 424177088 net.cpp:228] conv2d_10_tmp_1_idx39 does not need backward computation.\n",
      "I0706 20:31:00.950883 424177088 net.cpp:228] conv2d_10_tmp_0_idx38 does not need backward computation.\n",
      "I0706 20:31:00.950891 424177088 net.cpp:228] batch_norm_9_tmp_4_idx37 does not need backward computation.\n",
      "I0706 20:31:00.950897 424177088 net.cpp:228] batch_norm_9_tmp_3_idx36 does not need backward computation.\n",
      "I0706 20:31:00.950904 424177088 net.cpp:228] batch_norm_9_tmp_3_idx36_extra does not need backward computation.\n",
      "I0706 20:31:00.950910 424177088 net.cpp:228] conv2d_9_tmp_0_idx35 does not need backward computation.\n",
      "I0706 20:31:00.950918 424177088 net.cpp:228] elementwise_add_3_tmp_1_idx34_elementwise_add_3_tmp_1_idx34_0_split does not need backward computation.\n",
      "I0706 20:31:00.950925 424177088 net.cpp:228] elementwise_add_3_tmp_1_idx34 does not need backward computation.\n",
      "I0706 20:31:00.950932 424177088 net.cpp:228] elementwise_add_3_tmp_0_idx33 does not need backward computation.\n",
      "I0706 20:31:00.950940 424177088 net.cpp:228] batch_norm_8_tmp_3_idx32 does not need backward computation.\n",
      "I0706 20:31:00.950947 424177088 net.cpp:228] batch_norm_8_tmp_3_idx32_extra does not need backward computation.\n",
      "I0706 20:31:00.950954 424177088 net.cpp:228] conv2d_8_tmp_1_idx31 does not need backward computation.\n",
      "I0706 20:31:00.950963 424177088 net.cpp:228] conv2d_8_tmp_0_idx30 does not need backward computation.\n",
      "I0706 20:31:00.950970 424177088 net.cpp:228] batch_norm_7_tmp_4_idx29 does not need backward computation.\n",
      "I0706 20:31:00.950978 424177088 net.cpp:228] batch_norm_7_tmp_3_idx28 does not need backward computation.\n",
      "I0706 20:31:00.951117 424177088 net.cpp:228] batch_norm_7_tmp_3_idx28_extra does not need backward computation.\n",
      "I0706 20:31:00.951124 424177088 net.cpp:228] conv2d_7_tmp_0_idx27 does not need backward computation.\n",
      "I0706 20:31:00.951133 424177088 net.cpp:228] elementwise_add_2_tmp_1_idx26_elementwise_add_2_tmp_1_idx26_0_split does not need backward computation.\n",
      "I0706 20:31:00.951139 424177088 net.cpp:228] elementwise_add_2_tmp_1_idx26 does not need backward computation.\n",
      "I0706 20:31:00.951148 424177088 net.cpp:228] elementwise_add_2_tmp_0_idx25 does not need backward computation.\n",
      "I0706 20:31:00.951156 424177088 net.cpp:228] batch_norm_6_tmp_3_idx24 does not need backward computation.\n",
      "I0706 20:31:00.951164 424177088 net.cpp:228] batch_norm_6_tmp_3_idx24_extra does not need backward computation.\n",
      "I0706 20:31:00.951170 424177088 net.cpp:228] conv2d_6_tmp_1_idx23 does not need backward computation.\n",
      "I0706 20:31:00.951176 424177088 net.cpp:228] conv2d_6_tmp_0_idx22 does not need backward computation.\n",
      "I0706 20:31:00.951184 424177088 net.cpp:228] batch_norm_5_tmp_4_idx21 does not need backward computation.\n",
      "I0706 20:31:00.951191 424177088 net.cpp:228] batch_norm_5_tmp_3_idx20 does not need backward computation.\n",
      "I0706 20:31:00.951198 424177088 net.cpp:228] batch_norm_5_tmp_3_idx20_extra does not need backward computation.\n",
      "I0706 20:31:00.951285 424177088 net.cpp:228] conv2d_5_tmp_0_idx19 does not need backward computation.\n",
      "I0706 20:31:00.951294 424177088 net.cpp:228] elementwise_add_1_tmp_1_idx18_elementwise_add_1_tmp_1_idx18_0_split does not need backward computation.\n",
      "I0706 20:31:00.951303 424177088 net.cpp:228] elementwise_add_1_tmp_1_idx18 does not need backward computation.\n",
      "I0706 20:31:00.951309 424177088 net.cpp:228] elementwise_add_1_tmp_0_idx17 does not need backward computation.\n",
      "I0706 20:31:00.951319 424177088 net.cpp:228] batch_norm_4_tmp_3_idx16 does not need backward computation.\n",
      "I0706 20:31:00.951328 424177088 net.cpp:228] batch_norm_4_tmp_3_idx16_extra does not need backward computation.\n",
      "I0706 20:31:00.951335 424177088 net.cpp:228] conv2d_4_tmp_1_idx15 does not need backward computation.\n",
      "I0706 20:31:00.951342 424177088 net.cpp:228] conv2d_4_tmp_0_idx14 does not need backward computation.\n",
      "I0706 20:31:00.951350 424177088 net.cpp:228] batch_norm_3_tmp_4_idx13 does not need backward computation.\n",
      "I0706 20:31:00.951357 424177088 net.cpp:228] batch_norm_3_tmp_3_idx12 does not need backward computation.\n",
      "I0706 20:31:00.951474 424177088 net.cpp:228] batch_norm_3_tmp_3_idx12_extra does not need backward computation.\n",
      "I0706 20:31:00.951488 424177088 net.cpp:228] conv2d_3_tmp_0_idx11 does not need backward computation.\n",
      "I0706 20:31:00.951498 424177088 net.cpp:228] elementwise_add_0_tmp_1_idx10_elementwise_add_0_tmp_1_idx10_0_split does not need backward computation.\n",
      "I0706 20:31:00.951506 424177088 net.cpp:228] elementwise_add_0_tmp_1_idx10 does not need backward computation.\n",
      "I0706 20:31:00.951514 424177088 net.cpp:228] elementwise_add_0_tmp_0_idx9 does not need backward computation.\n",
      "I0706 20:31:00.951522 424177088 net.cpp:228] batch_norm_2_tmp_3_idx8 does not need backward computation.\n",
      "I0706 20:31:00.951529 424177088 net.cpp:228] batch_norm_2_tmp_3_idx8_extra does not need backward computation.\n",
      "I0706 20:31:00.951537 424177088 net.cpp:228] conv2d_2_tmp_1_idx7 does not need backward computation.\n",
      "I0706 20:31:00.951545 424177088 net.cpp:228] conv2d_2_tmp_0_idx6 does not need backward computation.\n",
      "I0706 20:31:00.951617 424177088 net.cpp:228] batch_norm_1_tmp_4_idx5 does not need backward computation.\n",
      "I0706 20:31:00.951627 424177088 net.cpp:228] batch_norm_1_tmp_3_idx4 does not need backward computation.\n",
      "I0706 20:31:00.951634 424177088 net.cpp:228] batch_norm_1_tmp_3_idx4_extra does not need backward computation.\n",
      "I0706 20:31:00.951642 424177088 net.cpp:228] conv2d_1_tmp_0_idx3 does not need backward computation.\n",
      "I0706 20:31:00.951651 424177088 net.cpp:228] batch_norm_0_tmp_4_idx2_batch_norm_0_tmp_4_idx2_0_split does not need backward computation.\n",
      "I0706 20:31:00.951658 424177088 net.cpp:228] batch_norm_0_tmp_4_idx2 does not need backward computation.\n",
      "I0706 20:31:00.951665 424177088 net.cpp:228] batch_norm_0_tmp_3_idx1 does not need backward computation.\n",
      "I0706 20:31:00.951673 424177088 net.cpp:228] batch_norm_0_tmp_3_idx1_extra does not need backward computation.\n",
      "I0706 20:31:00.951680 424177088 net.cpp:228] conv2d_0_tmp_0_idx0 does not need backward computation.\n",
      "I0706 20:31:00.951776 424177088 net.cpp:228] pixel_idx0 does not need backward computation.\n",
      "I0706 20:31:00.951787 424177088 net.cpp:270] This network produces output fc_0_tmp_2_idx46\n",
      "I0706 20:31:00.952020 424177088 net.cpp:283] Network initialization done.\n",
      "I0706 20:31:00.953838 424177088 net.cpp:860] Serializing 65 layers\n",
      "2022-07-06 20:31:00 [INFO]\tCaffe model files are saved as [caffe-model/paddle2caffe.prototxt] and [caffe-model/paddle2caffe.caffemodel].\n"
     ]
    }
   ],
   "source": [
    "# Execute the shell command. Jupyter may can't find the paddle2onnx command, \n",
    "# please go to the command line to execute\n",
    "\n",
    "!paddle2caffe --model_dir ./resnet_combined/ --model_filename '__model__' --params_filename '__params__' --save_file caffe-model/paddle2caffe"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}