# coding=utf-8

import os
import shutil
import logging

import numpy as np
import SimpleITK as sitk
import tensorflow as tf

import sys
sys.path.append("..")

from Utils import write_image

class Trainer(object):
    """Trains a unet instance

    :param net: the unet instance to train
    :param batch_size: size of training batch
    """

    prediction_path = "./Prediction"

    def __init__(self, config, net):
        self.net = net
        self.batch_size = config['batch_size']
        self.iterations = config['iterations']

        lr_decay = config['learning_rate_decay']
        lr = config['learning_rate']
        self.optimizer = self._get_optimizer(lr, lr_decay)

    def _get_optimizer(self, lr, lr_decay=False):
        if lr_decay:
            global_step = tf.Variable(0, trainable=False)
            lr = tf.train.exponential_decay(lr, global_step, 2500, 0.9, staircase=True)
            raw_op = tf.train.AdamOptimizer(learning_rate=lr)
        else:
            raw_op = tf.train.AdamOptimizer(learning_rate=lr)

        update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
        with tf.control_dependencies(update_ops):
            train_op = raw_op.minimize(self.net.cost)

        return train_op

    def _initialize(self, output_path):

        tf.summary.scalar('loss', self.net.cost)
        tf.summary.scalar('accuracy', self.net.accuracy)

        self.summary_op = tf.summary.merge_all()
        init = tf.global_variables_initializer()

        prediction_path = os.path.abspath(self.prediction_path)
        output_path = os.path.abspath(output_path)

        logging.info("Removing '{:}'".format(prediction_path))
        shutil.rmtree(prediction_path, ignore_errors=True)
        logging.info("Removing '{:}'".format(output_path))
        shutil.rmtree(output_path, ignore_errors=True)

        if not os.path.exists(prediction_path):
            logging.info("Allocating '{:}'".format(prediction_path))
            os.makedirs(prediction_path)

        if not os.path.exists(output_path):
            logging.info("Allocating '{:}'".format(output_path))
            os.makedirs(output_path)

        return init

    def train(self, data_provider, output_path, keep_prob=0.75, display_step=20):
        """
        Lauches the training process

        :param data_provider: callable returning training and verification data
        :param output_path: path where to store checkpoints
        :param keep_prob: keep probability
        :param display_step: number of steps till outputting stats
        """
        save_path = output_path
        init = self._initialize(output_path)

        with tf.Session() as sess:
            sess.run(init)
            summary_writer = tf.summary.FileWriter(
                output_path, graph=sess.graph)
            logging.info("Start optimization")

            for numIter in range(self.iterations):

                (batch_inputs,
                 batch_label,
                 batch_distance) = data_provider.get_data(numIter,
                                       batch_size=self.batch_size)

                # training = numIter < 20000
                training = True

                if batch_label.sum() == 0:
                    sampleType = 0
                elif batch_inputs[..., 1].sum() == 0:
                    sampleType = 1
                else:
                    sampleType = 2

                summary_str, _, total_loss, accuracy = sess.run(
                    (self.summary_op, self.optimizer,
                     self.net.cost, self.net.accuracy),
                     feed_dict={self.net.inputs:    batch_inputs,
                                self.net.label:     batch_label,
                                self.net.distance:  batch_distance,
                                self.net.iteration: numIter,
                                self.net.training:  training,
                                self.net.keep_prob: keep_prob})

                print("Iter:%6d  Type:%-2d Loss:%1.8f  Acc:%5.2f " %
                      (numIter, sampleType, total_loss, accuracy))

                if numIter % display_step == 0:
                    if sampleType != 0:  # 不记录label为空的条目
                        summary_writer.add_summary(summary_str, numIter)
                        summary_writer.flush()

                    batch_inputs, batch_label = data_provider.get_testdata(self.batch_size)
                    accuracy = sess.run(self.net.accuracy,
                                        feed_dict={self.net.inputs:    batch_inputs,
                                                   self.net.label:     batch_label,
                                                   self.net.training:  False,
                                                   self.net.keep_prob: 1.0})
                    print("Verification Accuracy:%5.2f" % accuracy)

                if numIter % 1500 == 0:
                    final_path = self.net.save(sess, save_path,
                                               (numIter + 1) / 1500)
            print("Optimization Finished!")
