{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "GKC Unet torch.ipynb",
      "version": "0.3.2",
      "provenance": [],
      "collapsed_sections": []
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "accelerator": "GPU"
  },
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "LJU6exrh0KmK",
        "colab_type": "text"
      },
      "source": [
        "# initialize"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "2MVIR7Wkrj8y",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "# connect Google drive\n",
        "\n",
        "import os, imp, logging\n",
        "from google.colab import drive\n",
        "os.chdir(\"/content\")\n",
        "\n",
        "if not os.path.exists(\"/content/drive\"):\n",
        "    drive.mount('/gdrive', force_remount=True)\n",
        "    !ln -s '/gdrive/My Drive/GKC' drive\n",
        "\n",
        "\n",
        "\n",
        "# download code (from my github, for colab running)\n",
        "if not os.path.exists(\"Unet-pytorch\"):\n",
        "    ! git clone https://github.com/silicx/Unet-pytorch.git\n",
        "    os.chdir(\"Unet-pytorch\")\n",
        "else:\n",
        "    os.chdir(\"Unet-pytorch\")\n",
        "    ! git pull\n",
        "\n",
        "\n",
        "# setup logger\n",
        "logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)\n",
        "logging.info(\"done.\")\n",
        "\n",
        "\n",
        "! nvidia-smi"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "wYrxHJfLFamD",
        "colab_type": "text"
      },
      "source": [
        "## configuration"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "E1EC0-ZEFVbS",
        "colab_type": "text"
      },
      "source": [
        "### axis0 (transverse)"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "TIi51q8cJwOu",
        "colab_type": "code",
        "outputId": "022bfaed-a76e-469d-8612-27dcc16b3390",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 34
        }
      },
      "source": [
        "from config import Configuration\n",
        "\n",
        "config = Configuration('axis2')\n",
        "config.output_ch = 2\n",
        "config.img_ch    = 1\n",
        "config.data_mode = 'raw'\n",
        "\n",
        "logging.info(\"done.\")"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "2019-06-08 02:42:11,389 done.\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "_y5Eh06jFd8n",
        "colab_type": "text"
      },
      "source": [
        "### axis1 (coronal)"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "oX1b5K3NFihN",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "from config import Configuration\n",
        "\n",
        "config = Configuration('axis1')\n",
        "config.output_ch = 20\n",
        "config.img_ch    = 3\n",
        "config.data_mode = 'location'\n",
        "\n",
        "logging.info(\"done.\")"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "gd_9Kj8FFgnO",
        "colab_type": "text"
      },
      "source": [
        "### axis2 (sagittal)"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "wQWiVAyMFjIe",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "from config import Configuration\n",
        "\n",
        "config = Configuration('axis2')\n",
        "config.output_ch = 20\n",
        "config.img_ch    = 3\n",
        "config.data_mode = 'location'\n",
        "\n",
        "logging.info(\"done.\")"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "4KEv55UZsal_",
        "colab_type": "text"
      },
      "source": [
        "## data preparation (split into slices)"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "I77LRrlZS4t9",
        "colab_type": "code",
        "outputId": "d17ca97b-abb3-4209-b22a-ba429b9aa56a",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 255
        }
      },
      "source": [
        "os.chdir(\"/content/Unet-pytorch\")\n",
        "\n",
        "import dataprocess\n",
        "imp.reload(dataprocess)\n",
        "dataprocess.split_data(config, override=True)"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "2019-06-05 12:21:50,283 /content/data/axis2/\n",
            "2019-06-05 12:21:50,292 File 1\n",
            "2019-06-05 12:22:04,336 File 2\n",
            "2019-06-05 12:22:11,032 File 3\n",
            "2019-06-05 12:22:19,543 File 4\n",
            "2019-06-05 12:22:28,078 File 5\n",
            "2019-06-05 12:22:36,597 File 6\n",
            "2019-06-05 12:22:44,693 File 7\n",
            "2019-06-05 12:22:51,493 File 8\n",
            "2019-06-05 12:22:59,060 File 9\n",
            "2019-06-05 12:23:06,954 File 10\n",
            "2019-06-05 12:23:14,480 train: 4148 slices\n",
            "2019-06-05 12:23:14,482 test: 512 slices\n",
            "2019-06-05 12:23:14,485 valid: 460 slices\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "ycLdKhs6Hhky",
        "colab_type": "text"
      },
      "source": [
        "# unet 2d torch\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "l9RMU6Y9ShJP",
        "colab_type": "text"
      },
      "source": [
        "## train"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "tJKuMhXyHhPa",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "import imp, os\n",
        "os.chdir(\"/content/Unet-pytorch\")\n",
        "import main\n",
        "\n",
        "main.train(config)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "bnzsWp1npMwE",
        "colab_type": "text"
      },
      "source": [
        "## inference"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "MxNyCQVaHtpk",
        "colab_type": "code",
        "outputId": "c39083b3-021c-4c9b-b363-7d9e5caf938a",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 3094
        }
      },
      "source": [
        "import main, imp\n",
        "\n",
        "main.inference_3D(\n",
        "    config,\n",
        "    \"/content/drive/h5_8b/\",\n",
        "    \"/content/result/\"\n",
        ")"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "2019-06-08 02:42:15,947 Weight loaded: /content/drive/models/axis2/best_model.pkl\n",
            "2019-06-08 02:42:15,951 Sample case1.h5\n",
            "2019-06-08 02:42:22,168 [32/512]\n",
            "2019-06-08 02:42:27,637 [64/512]\n",
            "2019-06-08 02:42:33,164 [96/512]\n",
            "2019-06-08 02:42:38,682 [128/512]\n",
            "2019-06-08 02:42:44,126 [160/512]\n",
            "2019-06-08 02:42:49,558 [192/512]\n",
            "2019-06-08 02:42:54,910 [224/512]\n",
            "2019-06-08 02:43:00,313 [256/512]\n",
            "2019-06-08 02:43:05,829 [288/512]\n",
            "2019-06-08 02:43:11,189 [320/512]\n",
            "2019-06-08 02:43:16,533 [352/512]\n",
            "2019-06-08 02:43:21,828 [384/512]\n",
            "2019-06-08 02:43:27,048 [416/512]\n",
            "2019-06-08 02:43:32,431 [448/512]\n",
            "2019-06-08 02:43:37,784 [480/512]\n",
            "2019-06-08 02:43:42,958 [512/512]\n",
            "2019-06-08 02:43:43,156 (512, 544, 512, 20)\n",
            "2019-06-08 02:43:43,193 Sample case2.h5\n",
            "2019-06-08 02:43:48,685 [32/512]\n",
            "2019-06-08 02:43:53,420 [64/512]\n",
            "2019-06-08 02:43:58,216 [96/512]\n",
            "2019-06-08 02:44:03,037 [128/512]\n",
            "2019-06-08 02:44:07,932 [160/512]\n",
            "2019-06-08 02:44:12,878 [192/512]\n",
            "2019-06-08 02:44:17,685 [224/512]\n",
            "2019-06-08 02:44:22,481 [256/512]\n",
            "2019-06-08 02:44:27,415 [288/512]\n",
            "2019-06-08 02:44:32,188 [320/512]\n",
            "2019-06-08 02:44:37,107 [352/512]\n",
            "2019-06-08 02:44:41,987 [384/512]\n",
            "2019-06-08 02:44:46,868 [416/512]\n",
            "2019-06-08 02:44:51,683 [448/512]\n",
            "2019-06-08 02:44:56,414 [480/512]\n",
            "2019-06-08 02:45:01,208 [512/512]\n",
            "2019-06-08 02:45:01,376 (512, 496, 512, 20)\n",
            "2019-06-08 02:45:02,057 Sample case3.h5\n",
            "2019-06-08 02:45:08,346 [32/512]\n",
            "2019-06-08 02:45:13,814 [64/512]\n",
            "2019-06-08 02:45:19,377 [96/512]\n",
            "2019-06-08 02:45:24,894 [128/512]\n",
            "2019-06-08 02:45:30,343 [160/512]\n",
            "2019-06-08 02:45:35,801 [192/512]\n",
            "2019-06-08 02:45:41,349 [224/512]\n",
            "2019-06-08 02:45:46,993 [256/512]\n",
            "2019-06-08 02:45:52,591 [288/512]\n",
            "2019-06-08 02:45:58,054 [320/512]\n",
            "2019-06-08 02:46:03,544 [352/512]\n",
            "2019-06-08 02:46:09,011 [384/512]\n",
            "2019-06-08 02:46:14,457 [416/512]\n",
            "2019-06-08 02:46:20,040 [448/512]\n",
            "2019-06-08 02:46:25,555 [480/512]\n",
            "2019-06-08 02:46:31,020 [512/512]\n",
            "2019-06-08 02:46:31,186 (512, 560, 512, 20)\n",
            "2019-06-08 02:46:31,213 Sample case4.h5\n",
            "2019-06-08 02:46:38,281 [32/512]\n",
            "2019-06-08 02:46:45,085 [64/512]\n",
            "2019-06-08 02:46:51,205 [96/512]\n",
            "2019-06-08 02:46:57,552 [128/512]\n",
            "2019-06-08 02:47:03,658 [160/512]\n",
            "2019-06-08 02:47:09,850 [192/512]\n",
            "2019-06-08 02:47:16,606 [224/512]\n",
            "2019-06-08 02:47:22,767 [256/512]\n",
            "2019-06-08 02:47:29,099 [288/512]\n",
            "2019-06-08 02:47:35,254 [320/512]\n",
            "2019-06-08 02:47:41,527 [352/512]\n",
            "2019-06-08 02:47:47,613 [384/512]\n",
            "2019-06-08 02:47:53,752 [416/512]\n",
            "2019-06-08 02:47:59,955 [448/512]\n",
            "2019-06-08 02:48:06,149 [480/512]\n",
            "2019-06-08 02:48:12,254 [512/512]\n",
            "2019-06-08 02:48:12,436 (512, 624, 512, 20)\n",
            "2019-06-08 02:48:12,489 Sample case5.h5\n",
            "2019-06-08 02:48:19,400 [32/512]\n",
            "2019-06-08 02:48:25,258 [64/512]\n",
            "2019-06-08 02:48:31,286 [96/512]\n",
            "2019-06-08 02:48:37,204 [128/512]\n",
            "2019-06-08 02:48:43,097 [160/512]\n",
            "2019-06-08 02:48:48,969 [192/512]\n",
            "2019-06-08 02:48:54,788 [224/512]\n",
            "2019-06-08 02:49:00,672 [256/512]\n",
            "2019-06-08 02:49:06,692 [288/512]\n",
            "2019-06-08 02:49:12,570 [320/512]\n",
            "2019-06-08 02:49:18,322 [352/512]\n",
            "2019-06-08 02:49:24,188 [384/512]\n",
            "2019-06-08 02:49:30,086 [416/512]\n",
            "2019-06-08 02:49:36,084 [448/512]\n",
            "2019-06-08 02:49:42,064 [480/512]\n",
            "2019-06-08 02:49:48,147 [512/512]\n",
            "2019-06-08 02:49:48,322 (512, 592, 512, 20)\n",
            "2019-06-08 02:49:48,400 Sample case6.h5\n",
            "2019-06-08 02:49:54,681 [32/512]\n",
            "2019-06-08 02:50:00,218 [64/512]\n",
            "2019-06-08 02:50:05,770 [96/512]\n",
            "2019-06-08 02:50:11,352 [128/512]\n",
            "2019-06-08 02:50:16,897 [160/512]\n",
            "2019-06-08 02:50:22,490 [192/512]\n",
            "2019-06-08 02:50:27,947 [224/512]\n",
            "2019-06-08 02:50:33,506 [256/512]\n",
            "2019-06-08 02:50:39,087 [288/512]\n",
            "2019-06-08 02:50:44,658 [320/512]\n",
            "2019-06-08 02:50:50,260 [352/512]\n",
            "2019-06-08 02:50:55,741 [384/512]\n",
            "2019-06-08 02:51:01,334 [416/512]\n",
            "2019-06-08 02:51:06,972 [448/512]\n",
            "2019-06-08 02:51:12,539 [480/512]\n",
            "2019-06-08 02:51:18,215 [512/512]\n",
            "2019-06-08 02:51:18,386 (512, 560, 512, 20)\n",
            "2019-06-08 02:51:18,515 Sample case7.h5\n",
            "2019-06-08 02:51:24,119 [32/512]\n",
            "2019-06-08 02:51:29,135 [64/512]\n",
            "2019-06-08 02:51:34,067 [96/512]\n",
            "2019-06-08 02:51:39,065 [128/512]\n",
            "2019-06-08 02:51:43,973 [160/512]\n",
            "2019-06-08 02:51:48,878 [192/512]\n",
            "2019-06-08 02:51:53,757 [224/512]\n",
            "2019-06-08 02:51:58,622 [256/512]\n",
            "2019-06-08 02:52:03,458 [288/512]\n",
            "2019-06-08 02:52:08,353 [320/512]\n",
            "2019-06-08 02:52:13,104 [352/512]\n",
            "2019-06-08 02:52:18,057 [384/512]\n",
            "2019-06-08 02:52:23,053 [416/512]\n",
            "2019-06-08 02:52:27,859 [448/512]\n",
            "2019-06-08 02:52:32,791 [480/512]\n",
            "2019-06-08 02:52:37,623 [512/512]\n",
            "2019-06-08 02:52:37,765 (512, 496, 512, 20)\n",
            "2019-06-08 02:52:38,087 Sample case8.h5\n",
            "2019-06-08 02:52:44,250 [32/512]\n",
            "2019-06-08 02:52:49,708 [64/512]\n",
            "2019-06-08 02:52:55,301 [96/512]\n",
            "2019-06-08 02:53:00,745 [128/512]\n",
            "2019-06-08 02:53:06,226 [160/512]\n",
            "2019-06-08 02:53:11,741 [192/512]\n",
            "2019-06-08 02:53:17,147 [224/512]\n",
            "2019-06-08 02:53:22,549 [256/512]\n",
            "2019-06-08 02:53:28,000 [288/512]\n",
            "2019-06-08 02:53:33,460 [320/512]\n",
            "2019-06-08 02:53:38,862 [352/512]\n",
            "2019-06-08 02:53:44,150 [384/512]\n",
            "2019-06-08 02:53:49,524 [416/512]\n",
            "2019-06-08 02:53:54,985 [448/512]\n",
            "2019-06-08 02:54:00,502 [480/512]\n",
            "2019-06-08 02:54:05,964 [512/512]\n",
            "2019-06-08 02:54:06,128 (512, 544, 512, 20)\n",
            "2019-06-08 02:54:06,254 Sample case9.h5\n",
            "2019-06-08 02:54:12,944 [32/512]\n",
            "2019-06-08 02:54:18,464 [64/512]\n",
            "2019-06-08 02:54:24,129 [96/512]\n",
            "2019-06-08 02:54:29,805 [128/512]\n",
            "2019-06-08 02:54:35,485 [160/512]\n",
            "2019-06-08 02:54:41,816 [192/512]\n",
            "2019-06-08 02:54:47,404 [224/512]\n",
            "2019-06-08 02:54:53,023 [256/512]\n",
            "2019-06-08 02:54:58,543 [288/512]\n",
            "2019-06-08 02:55:04,165 [320/512]\n",
            "2019-06-08 02:55:10,103 [352/512]\n",
            "2019-06-08 02:55:15,796 [384/512]\n",
            "2019-06-08 02:55:21,415 [416/512]\n",
            "2019-06-08 02:55:27,109 [448/512]\n",
            "2019-06-08 02:55:32,622 [480/512]\n",
            "2019-06-08 02:55:38,226 [512/512]\n",
            "2019-06-08 02:55:38,428 (512, 560, 512, 20)\n",
            "2019-06-08 02:55:38,459 Sample case10.h5\n",
            "2019-06-08 02:55:44,623 [32/512]\n",
            "2019-06-08 02:55:49,971 [64/512]\n",
            "2019-06-08 02:55:55,323 [96/512]\n",
            "2019-06-08 02:56:00,690 [128/512]\n",
            "2019-06-08 02:56:06,019 [160/512]\n",
            "2019-06-08 02:56:11,447 [192/512]\n",
            "2019-06-08 02:56:16,790 [224/512]\n",
            "2019-06-08 02:56:22,109 [256/512]\n",
            "2019-06-08 02:56:27,457 [288/512]\n",
            "2019-06-08 02:56:32,862 [320/512]\n",
            "2019-06-08 02:56:38,148 [352/512]\n",
            "2019-06-08 02:56:43,541 [384/512]\n",
            "2019-06-08 02:56:48,891 [416/512]\n",
            "2019-06-08 02:56:54,249 [448/512]\n",
            "2019-06-08 02:56:59,605 [480/512]\n",
            "2019-06-08 02:57:05,028 [512/512]\n",
            "2019-06-08 02:57:05,186 (512, 544, 512, 20)\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "ZlKDWnkuTn8s",
        "colab_type": "text"
      },
      "source": [
        "## evaluation 2D u-net"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "HqfTFDzl72D6",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "import numpy as np\n",
        "from main import evaluate_3D_path\n",
        "\n",
        "for splits in [(1,10, 'train'),(10,11, 'test')]:\n",
        "    logging.info(\"\")\n",
        "    all_res = []\n",
        "    for ind in range(splits[0], splits[1]):\n",
        "        res = evaluate_3D_path(\"/content/drive/result/{}/case{}.h5\".format(config.name, ind), \"/content/drive/h5_8b/case{}.h5\".format(ind))\n",
        "        all_res.append(res)\n",
        "        \n",
        "    print(splits[2],':')\n",
        "    for rk in all_res[0].keys():\n",
        "        print(rk)\n",
        "        for k in all_res[0][rk].keys():\n",
        "            print('\\t', k, ':', np.mean(np.array([x[rk][k] for x in all_res])))"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "uQrBB1oZOhK_",
        "colab_type": "text"
      },
      "source": [
        "# fusion evaluation"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "egWNhhOnT6XN",
        "colab_type": "text"
      },
      "source": [
        "## simple seg fusion"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "vyWfMa4OJu69",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "import h5py, logging, gc\n",
        "import numpy as np\n",
        "\n",
        "from main import evaluate_3D_image\n",
        "\n",
        "\n",
        "def load_for_seg_fusion(sample_index, part, pred_dir = '/content/result', gt_dir='/content/drive/h5_8b'):\n",
        "    \"\"\"\n",
        "    pred_dir: folder which contains all inference result (produced by 'main.test3D')\n",
        "    part: 0-4\n",
        "    gt_dir: training data folder\n",
        "    sample_index: 1~10\n",
        "    \"\"\"\n",
        "    \n",
        "    part = [part*128, (part+1)*128]\n",
        "    if part[1]>512:\n",
        "        part[1] = None\n",
        "    \n",
        "    \n",
        "    logging.info(\"loading axis0\")\n",
        "    with h5py.File(\"{}/axis0/case{}.h5\".format(pred_dir, sample_index), 'r') as fp:\n",
        "        axis0 = np.array(fp['data'][:, :, part[0]:part[1], :])\n",
        "        logging.info(axis0.shape)\n",
        "         \n",
        "    logging.info(\"loading axis1\")\n",
        "    with h5py.File(\"{}/axis1/case{}.h5\".format(pred_dir, sample_index), 'r') as fp:\n",
        "        axis1_tmp = np.array(fp['data'][:, :, part[0]:part[1], :]).transpose((1,0,2,3))\n",
        "        axis1 = np.stack([\n",
        "            axis1_tmp[...,0].copy(),\n",
        "            np.max(axis1_tmp[...,1:],axis=3)\n",
        "        ],axis=3)\n",
        "        logging.info(axis1.shape)\n",
        "        del axis1_tmp\n",
        "        gc.collect()\n",
        "        \n",
        "    logging.info(\"loading axis2\")\n",
        "    with h5py.File(\"{}/axis2/case{}.h5\".format(pred_dir, sample_index), 'r') as fp:\n",
        "        axis2_tmp = np.array(fp['data'][part[0]:part[1], :, :, :]).transpose((1,2,0,3))\n",
        "        axis2 = np.stack([\n",
        "            axis2_tmp[...,0].copy(),\n",
        "            np.max(axis2_tmp[...,1:],axis=3)\n",
        "        ],axis=3)\n",
        "        logging.info(axis2.shape)\n",
        "        del axis2_tmp\n",
        "        gc.collect()\n",
        "        \n",
        "    logging.info(\"loading gt\")\n",
        "    with h5py.File(\"{}/case{}.h5\".format(gt_dir, sample_index), 'r') as fp:\n",
        "        gt = np.array(fp['annot'])\n",
        "        gt = gt[gt.shape[0]%16:, :, part[0]:part[1]]\n",
        "        gt = gt>0\n",
        "    \n",
        "    return axis0, axis1, axis2, gt\n",
        "\n",
        "\n",
        "\n",
        "\n",
        "\n",
        "def evaluate_seg_fusion(sample_index, fuse_method):\n",
        "    all_res = {\n",
        "        'accuracy' : [0,0],\n",
        "        'precision': [0,0],\n",
        "        'recall'   : [0,0],\n",
        "        'iou'      : [0,0],\n",
        "        'dice'     : [0,0],\n",
        "    }\n",
        "    for part in range(4):\n",
        "        logging.info('Part {}'.format(part))\n",
        "        axis0, axis1, axis2, gt = load_for_seg_fusion(sample_index, part)\n",
        "        pred = fuse_method(axis0, axis1, axis2)\n",
        "        res = evaluate_3D_image(pred, gt)['segmentation']\n",
        "        for k in all_res.keys():\n",
        "            all_res[k][0] += res[k][0]\n",
        "            all_res[k][1] += res[k][1]\n",
        "        \n",
        "    for k,v in all_res.items():\n",
        "        print(k)\n",
        "        print(v[0]/v[1])\n",
        "    \n",
        "    \n",
        "def vote2_fusion_seg(axis0, axis1, axis2):\n",
        "    axis0 = np.argmax(axis0,axis=3)\n",
        "    axis1 = np.argmax(axis1,axis=3)\n",
        "    axis2 = np.argmax(axis2,axis=3)\n",
        "    return (axis0+axis1+axis2) >= 2\n",
        "\n",
        "def vote1_fusion_seg(axis0, axis1, axis2):\n",
        "    axis0 = np.argmax(axis0,axis=3)\n",
        "    axis1 = np.argmax(axis1,axis=3)\n",
        "    axis2 = np.argmax(axis2,axis=3)\n",
        "    return (axis0+axis1+axis2) >= 1\n",
        "\n",
        "def mean_fusion_seg(axis0, axis1, axis2):\n",
        "    return np.argmax((axis0+axis1+axis2)/3, axis=3)\n",
        "\n",
        "\n",
        "\n",
        "\n",
        "\n",
        "evaluate_seg_fusion(10, vote1_fusion_seg)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "JoCjpTXWy_od",
        "colab_type": "text"
      },
      "source": [
        "## simple cls fusion"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "feWcilrtSc7Y",
        "colab_type": "code",
        "outputId": "991151b0-38fb-4ef5-a0ef-bc7f748db138",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1173
        }
      },
      "source": [
        "import h5py, logging, gc\n",
        "import numpy as np\n",
        "\n",
        "from main import evaluate_3D_image\n",
        "\n",
        "def load_for_cls_fusion(sample_index, part, pred_dir = '/content/result', gt_dir='/content/drive/h5_8b'):\n",
        "    \"\"\"\n",
        "    pred_dir: folder which contains all inference result (produced by 'main.test3D')\n",
        "    part: 0-8\n",
        "    gt_dir: training data folder\n",
        "    sample_index: 1~10\n",
        "    \"\"\"\n",
        "    \n",
        "    part = [part*64, (part+1)*64]\n",
        "    if part[1]>512:\n",
        "        part[1] = None\n",
        "         \n",
        "    logging.info(\"loading axis1\")\n",
        "    with h5py.File(\"{}/axis1/case{}.h5\".format(pred_dir, sample_index), 'r') as fp:\n",
        "        axis1 = np.array(fp['data'][:, :, part[0]:part[1], :]).transpose((1,0,2,3))\n",
        "        logging.info(axis1.shape)\n",
        "        \n",
        "    logging.info(\"loading axis2\")\n",
        "    with h5py.File(\"{}/axis2/case{}.h5\".format(pred_dir, sample_index), 'r') as fp:\n",
        "        axis2 = np.array(fp['data'][part[0]:part[1], :, :, :]).transpose((1,2,0,3))\n",
        "        logging.info(axis2.shape)\n",
        "        \n",
        "    logging.info(\"loading gt\")\n",
        "    with h5py.File(\"{}/case{}.h5\".format(gt_dir, sample_index), 'r') as fp:\n",
        "        gt = np.array(fp['annot'])\n",
        "        gt = gt[gt.shape[0]%16:, :, part[0]:part[1]]\n",
        "    \n",
        "    return axis1, axis2, gt\n",
        "\n",
        "\n",
        "\n",
        "\n",
        "def evaluate_cls_fusion(sample_index, fuse_method):\n",
        "    all_res = {\n",
        "        'segmentation':{\n",
        "            'accuracy' : [0,0],\n",
        "            'precision': [0,0],\n",
        "            'recall'   : [0,0],\n",
        "            'iou'      : [0,0],\n",
        "            'dice'     : [0,0],\n",
        "        },\n",
        "        'classification': {\n",
        "            'accuracy': [0,0],\n",
        "            'mean_accuracy': [[0]*20, [0]*20],\n",
        "        }\n",
        "    }\n",
        "    for k1 in all_res.keys():\n",
        "        for k2 in all_res[k1].keys():\n",
        "            all_res[k1][k2] = np.array(all_res[k1][k2])\n",
        "    \n",
        "    for part in range(8):\n",
        "        logging.info('Part {}'.format(part))\n",
        "        axis1, axis2, gt = load_for_cls_fusion(sample_index, part)\n",
        "        pred = fuse_method(axis1, axis2)\n",
        "        res = evaluate_3D_image(pred, gt)\n",
        "        for k1 in all_res.keys():\n",
        "            for k2 in all_res[k1].keys():\n",
        "                t0, t1 = np.array(res[k1][k2][0]), np.array(res[k1][k2][1])\n",
        "                all_res[k1][k2][0] += t0\n",
        "                all_res[k1][k2][1] += t1\n",
        "    \n",
        "    for k1 in all_res.keys():\n",
        "        print(k1)\n",
        "        for k2 in all_res[k1].keys():\n",
        "            print('\\t', k2)\n",
        "            t = all_res[k1][k2][0,...]/all_res[k1][k2][1,...]\n",
        "            print('\\t', t, np.nanmean(t))\n",
        "\n",
        "        \n",
        "\n",
        "def mean_fusion_cls(axis1, axis2):\n",
        "    return np.argmax((axis1+axis2)/2, axis=3)\n",
        "\n",
        "\n",
        "\n",
        "\n",
        "\n",
        "evaluate_cls_fusion(10, mean_fusion_cls)\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "2019-06-08 04:24:47,576 Part 0\n",
            "2019-06-08 04:24:47,578 loading axis1\n",
            "2019-06-08 04:25:56,418 (544, 512, 64, 20)\n",
            "2019-06-08 04:25:56,420 loading axis2\n",
            "2019-06-08 04:26:05,854 (544, 512, 64, 20)\n",
            "2019-06-08 04:26:05,855 loading gt\n",
            "2019-06-08 04:26:13,067 Part 1\n",
            "2019-06-08 04:26:13,068 loading axis1\n",
            "2019-06-08 04:27:21,441 (544, 512, 64, 20)\n",
            "2019-06-08 04:27:21,443 loading axis2\n",
            "2019-06-08 04:27:30,666 (544, 512, 64, 20)\n",
            "2019-06-08 04:27:30,667 loading gt\n",
            "2019-06-08 04:27:38,132 Part 2\n",
            "2019-06-08 04:27:38,133 loading axis1\n",
            "2019-06-08 04:28:34,135 (544, 512, 64, 20)\n",
            "2019-06-08 04:28:34,137 loading axis2\n",
            "2019-06-08 04:28:43,646 (544, 512, 64, 20)\n",
            "2019-06-08 04:28:43,647 loading gt\n",
            "2019-06-08 04:28:51,194 Part 3\n",
            "2019-06-08 04:28:51,195 loading axis1\n",
            "2019-06-08 04:29:49,673 (544, 512, 64, 20)\n",
            "2019-06-08 04:29:49,675 loading axis2\n",
            "2019-06-08 04:29:58,897 (544, 512, 64, 20)\n",
            "2019-06-08 04:29:58,899 loading gt\n",
            "2019-06-08 04:30:06,524 Part 4\n",
            "2019-06-08 04:30:06,525 loading axis1\n",
            "2019-06-08 04:31:05,183 (544, 512, 64, 20)\n",
            "2019-06-08 04:31:05,185 loading axis2\n",
            "2019-06-08 04:31:14,706 (544, 512, 64, 20)\n",
            "2019-06-08 04:31:14,707 loading gt\n",
            "2019-06-08 04:31:22,335 Part 5\n",
            "2019-06-08 04:31:22,337 loading axis1\n",
            "2019-06-08 04:32:20,820 (544, 512, 64, 20)\n",
            "2019-06-08 04:32:20,822 loading axis2\n",
            "2019-06-08 04:32:30,044 (544, 512, 64, 20)\n",
            "2019-06-08 04:32:30,045 loading gt\n",
            "2019-06-08 04:32:37,576 Part 6\n",
            "2019-06-08 04:32:37,577 loading axis1\n",
            "2019-06-08 04:33:33,624 (544, 512, 64, 20)\n",
            "2019-06-08 04:33:33,626 loading axis2\n",
            "2019-06-08 04:33:43,449 (544, 512, 64, 20)\n",
            "2019-06-08 04:33:43,450 loading gt\n",
            "2019-06-08 04:33:51,135 Part 7\n",
            "2019-06-08 04:33:51,136 loading axis1\n",
            "2019-06-08 04:34:49,464 (544, 512, 64, 20)\n",
            "2019-06-08 04:34:49,466 loading axis2\n",
            "2019-06-08 04:34:58,665 (544, 512, 64, 20)\n",
            "2019-06-08 04:34:58,666 loading gt\n"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "segmentation\n",
            "\t accuracy\n",
            "\t 0.9948612311307121 0.9948612311307121\n",
            "\t precision\n",
            "\t 0.8821936091766901 0.8821936091766901\n",
            "\t recall\n",
            "\t 0.9948375383954786 0.9948375383954786\n",
            "\t iou\n",
            "\t 0.8781733981083837 0.8781733981083837\n",
            "\t dice\n",
            "\t 0.9351355939689515 0.9351355939689515\n",
            "classification\n",
            "\t accuracy\n",
            "\t 0.6947378757744619 0.6947378757744619\n",
            "\t mean_accuracy\n",
            "\t [0.99979935 0.9357967  0.94116247 0.95680653 0.92665204 0.91650611\n",
            " 0.37705774 0.03711128 0.07066372 0.22641252 0.8203672  0.83591628\n",
            " 0.73332226 0.6166392  0.51340731 0.60763894 0.81480991 0.80366624\n",
            "        nan        nan] 0.6740964323478694\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:71: RuntimeWarning: invalid value encountered in true_divide\n"
          ],
          "name": "stderr"
        }
      ]
    }
  ]
}