import os
import numpy as np
import argparse
import h5py
import pickle
rng = np.random.RandomState(0)

import skimage.transform as transfrom
import tensorflow as tf
import keras.backend as K
from keras.callbacks import ModelCheckpoint
from keras.applications.inception_v3 import InceptionV3
from keras.models import *
from keras.layers import *
from keras.regularizers import *
from keras.optimizers import SGD, RMSprop, Adam
from keras.models import Sequential

wd = 1e-4

class Generator():
    def __init__(self, hdf5_file):
        self.dataset = h5py.File(hdf5_file, 'r')
        self.scan_list = list(self.dataset.keys())
        self.scan_list.sort()
        test_list = np.load('./test_list.npy')
        train_list = np.load('./train_list.npy')
        print(len(train_list), len(test_list))

        self.train_list = []
        self.val_list = np.append(rng.choice(train_list, size=0.1*len(train_list), replace=False), rng.choice(test_list, size=0.1*len(test_list), replace=False))
        self.test_list = []

        for scan_id in train_list:
            if scan_id not in self.val_list:
                self.train_list.append(scan_id)
        for scan_id in test_list:
            if scan_id not in self.val_list:
                self.test_list.append(scan_id)

        print(len(self.train_list), len(self.val_list), len(self.test_list))

    def train_generate(self, batch_size=1):
        images = []
        labels = []
        while True:
            for scan_id in self.train_list:
                if rng.rand() < 0.5:
                    z_start = 15
                    image = self.dataset[scan_id][:, :, z_start:z_start+1]
                    labels.append([1, 0])
                else:
                    z_end = 15
                    depth = self.dataset[scan_id].shape[-1]
                    image = self.dataset[scan_id][:, :, depth - z_end - 1: depth - z_end]
                    labels.append([0, 1])

                image[image<-1024] = -1024
                image[image > 800] = 800
                image = (image+1024)/(1024+800)
                # image = transfrom.resize(image, (224,224))
                # if rng.rand() < 0.5:
                #     image = image[::-1, :, :]
                # if rng.rand() < 0.5:
                #     image = image[:, ::-1, :]
                # if rng.rand() < 0.5:
                #     image = image[:, :, ::-1]
                images.append(image)

                if len(labels) == batch_size:
                    yield np.asarray(images), np.asarray(labels)
                    images.clear()
                    labels.clear()

    def val_generate(self, batch_size=1):
        images = []
        labels = []
        while True:
            for scan_id in self.val_list:
                if rng.rand() < 0.5:
                    z_start = 15
                    image = self.dataset[scan_id][:, :, z_start:z_start + 1]
                    labels.append([1, 0])
                else:
                    z_end = 15
                    depth = self.dataset[scan_id].shape[-1]
                    image = self.dataset[scan_id][:, :, depth -z_end - 1: depth - z_end]
                    labels.append([0, 1])

                image[image < -1024] = -1024
                image[image > 800] = 800
                image = (image + 1024) / (1024 + 800)
                # image = transfrom.resize(image, (224, 224))
                # if rng.rand() < 0.5:
                #     image = image[:, :, ::-1]
                images.append(image)

                if len(labels) == batch_size:
                    yield np.asarray(images), np.asarray(labels)
                    images.clear()
                    labels.clear()

            if len(labels) != 0:
                yield np.asarray(images), np.asarray(labels)
                images.clear()
                labels.clear()

    def predict_generate(self, batch_size=1, z_start=15):
        images = []
        labels = []
        while True:
            for scan_id in self.scan_list:
                image = self.dataset[scan_id][:, :, z_start:z_start + 1]
                labels.append([1, 0])

                image[image < -1024] = -1024
                image[image > 800] = 800
                image = (image + 1024) / (1024 + 800)
                # image = transfrom.resize(image, (224, 224))
                # if rng.rand() < 0.5:
                #     image = image[:, :, ::-1]
                images.append(image)

                if len(labels) == batch_size:
                    yield np.asarray(images), np.asarray(labels)
                    images.clear()
                    labels.clear()

            if len(labels) != 0:
                yield np.asarray(images), np.asarray(labels)
                images.clear()
                labels.clear()

    def test_generate(self, batch_size=1):
        images = []
        labels = []
        while True:
            for scan_id in self.test_list:
                if rng.rand() < 0.5:
                    z_start = 15
                    image = self.dataset[scan_id][:, :, z_start:z_start + 1]
                    labels.append([1, 0])
                else:
                    z_end = 15
                    depth = self.dataset[scan_id].shape[-1]
                    image = self.dataset[scan_id][:, :, depth - z_end - 1: depth - z_end]
                    labels.append([0, 1])

                image[image < -1024] = -1024
                image[image > 800] = 800
                image = (image + 1024) / (1024 + 800)
                # image = transfrom.resize(image, (224, 224))
                # if rng.rand() < 0.5:
                #     image = image[:, :, ::-1]
                images.append(image)

                if len(labels) == batch_size:
                    yield np.asarray(images), np.asarray(labels)
                    images.clear()
                    labels.clear()

            if len(labels) != 0:
                yield np.asarray(images), np.asarray(labels)
                images.clear()
                labels.clear()


def train(hdf5_file, output_dir):
    gen = Generator(hdf5_file)
    train_generator = gen.train_generate(32)
    val_generator = gen.val_generate(32)
    test_generator = gen.test_generate(32)

    model = Sequential()
    model.add(Conv2D(32, 9, 9, init='he_normal', activation='relu', W_regularizer=l2(wd), input_shape=(512, 512, 1)))
    model.add(MaxPooling2D(pool_size=(4, 4), strides=(4, 4)))
    model.add(Conv2D(64, 7, 7, init='he_normal', activation='relu', W_regularizer=l2(wd)))
    model.add(MaxPooling2D(pool_size=(4, 4), strides=(4, 4)))
    model.add(Conv2D(128, 7, 7, init='he_normal', activation='relu', W_regularizer=l2(wd)))
    model.add(MaxPooling2D(pool_size=(4, 4), strides=(4, 4)))
    model.add(Flatten())
    model.add(Dense(256, init='he_normal', activation='relu', W_regularizer=l2(wd)))
    model.add(Dropout(0.5, seed=2112))
    model.add(Dense(2, init='he_normal', activation='softmax', W_regularizer=l2(wd)))


    model.compile(optimizer=SGD(1e-2, momentum=0.9, decay=1e-3), loss='categorical_crossentropy', metrics=['accuracy'])
    model.summary()

    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)

    mc = ModelCheckpoint(os.path.join(output_dir,'{epoch:02d}.hdf5'), monitor='val_acc', save_weights_only=True)

    try:
        model.fit_generator(train_generator, len(gen.train_list), 1000, verbose=1,
                            validation_data=val_generator, nb_val_samples=len(gen.val_list),
                            callbacks=[mc], initial_epoch=0)
    except KeyboardInterrupt:
        pass

    hist = model.evaluate_generator(test_generator, len(gen.test_list))
    print(hist)
    K.clear_session()


def predict(hdf5_file, output_dir):
    gen = Generator(hdf5_file)

    model = Sequential()
    model.add(Conv2D(32, 9, 9, init='he_normal', activation='relu', W_regularizer=l2(wd), input_shape=(512, 512, 1)))
    model.add(MaxPooling2D(pool_size=(4, 4), strides=(4, 4)))
    model.add(Conv2D(64, 7, 7, init='he_normal', activation='relu', W_regularizer=l2(wd)))
    model.add(MaxPooling2D(pool_size=(4, 4), strides=(4, 4)))
    model.add(Conv2D(128, 7, 7, init='he_normal', activation='relu', W_regularizer=l2(wd)))
    model.add(MaxPooling2D(pool_size=(4, 4), strides=(4, 4)))
    model.add(Flatten())
    model.add(Dense(256, init='he_normal', activation='relu', W_regularizer=l2(wd)))
    model.add(Dropout(0.5, seed=2112))
    model.add(Dense(2, init='he_normal', activation='softmax', W_regularizer=l2(wd)))

    model.load_weights('../../data/flip_regression/model/70.hdf5', by_name=True)
    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)

    # predict
    flip_preds = []

    for z_start in [11,13,15,17,19]:
        print(z_start)
        predict_generator = gen.predict_generate(32, z_start)
        preds = model.predict_generator(predict_generator, len(list(gen.dataset.keys())))[:, 0]
        flip_preds.append(preds)

    for z_start in [-11,-13,-15,-17,-19]:
        print(z_start)
        predict_generator = gen.predict_generate(32, z_start)
        preds = model.predict_generator(predict_generator, len(list(gen.dataset.keys())))[:, 1]
        flip_preds.append(preds)

    flip_preds = np.asarray(flip_preds)
    flip_list = []
    for ind, scan_id in enumerate(gen.scan_list):
        if np.mean(flip_preds[:, ind]) < 0.5:
            flip_list.append(scan_id)

    np.save(os.path.join(output_dir, 'flip_list.npy'), flip_list)
    K.clear_session()


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description="train a network to regression flip")

    parser.add_argument("-g", "--gpu", required=True,
                        dest="gpu", metavar="gpu",
                        help="gpu device to train")
    parser.add_argument("-d", "--data", required=False, default='../../data/kaggle/vol.hdf5',
                        dest="hdf5_file", metavar="hdf5 file",
                        help="train data")
    parser.add_argument("-o", "--output", required=False, default='.',
                        dest="output_dir", metavar="output dir",
                        help="output directory")
    parser.add_argument("-t", "--train", required=True,
                        type=int, dest="flag", metavar="flag to train",
                        help="flag to train")

    args = parser.parse_args()

    os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
    tf_config = tf.ConfigProto()
    tf_config.log_device_placement = False
    tf_config.gpu_options.allow_growth = True
    K.set_session(tf.Session(config=tf_config))
    if args.flag:
        train(args.hdf5_file, args.output_dir)
    else:
        predict(args.hdf5_file, args.output_dir)