import os
import time
import datetime
import copy
import jittor
import functools
import numpy as np
import pandas as pd
from tqdm import tqdm
from pprint import pprint
from prettytable import PrettyTable


def get_describle(a: dict = {}, detail=True):
    desc = str()
    for it in a.items():
        if type(it[1]) == bool:
            if it[1]:
                s = str(it[0]).replace('_', '').replace(' ', '')
                desc += f"_{s}"
        else:
            s1 = str(it[0]).replace('_', '').replace(' ', '')
            s2 = str(it[1]).replace('_', '').replace(' ', '')
            desc += f"_{s1}{s2}"
    if detail:
        return desc
    else:
        return ''


def forwardfunction(net, features, labels, loss_func):
    out = net.execute(features)
    _batch_loss = loss_func(out, labels)
    return out, _batch_loss


def stepforward(net, loader, loss_func, optimizer=None, metrics_dict={}, verbose=True, desc=''):
    epoch_metrics, total = {}, 0
    prefix = ''
    net.train()
    batch_metrics = {}
    tqdm_iterator = tqdm(loader,
                         desc='[{}]'.format(desc, ),
                         ncols=200,
                         leave=True,
                         unit='batch',
                         disable=(verbose != True))
    for features, labels in tqdm_iterator:
        total += labels.size(0)
        if hasattr(net, 'forwardfunction'):
            setattr(net, 'loss_func', loss_func)
            out, _batch_loss = net.forwardfunction(features, labels)

        else:
            out, _batch_loss = forwardfunction(net, features, labels, loss_func)
        batch_loss = _batch_loss
        batch_metrics[prefix + "loss"] = batch_loss.data[0]
        epoch_metrics[prefix + "loss"] = epoch_metrics.get(prefix + "loss", 0.0) + \
                                         batch_loss * labels.size(0)
        optimizer.step(_batch_loss)
        for name, metric_func in metrics_dict.items():
            calculated_metric = metric_func(out, labels)
            batch_metrics[prefix + name] = calculated_metric
            epoch_metrics[prefix + name] = epoch_metrics.get(prefix + name, 0.0) + \
                                           calculated_metric * labels.size(0)
        tqdm_iterator.set_postfix({k: batch_metrics[k] for k in batch_metrics.keys()})

    for name, metric_value in epoch_metrics.items():
        epoch_metrics[name] = metric_value / total
    return epoch_metrics


@jittor.no_grad()
def stepforward_test(net, loader, loss_func, metrics_dict={}, verbose=True, desc=''):
    prefix = 'val_'
    epoch_metrics, total = {}, 0
    net.eval()
    batch_metrics = {}
    tqdm_iterator = tqdm(loader,
                         desc='{}'.format(desc),
                         ncols=200,
                         leave=True,
                         unit='batch',
                         disable=(verbose != True))
    for features, labels in tqdm_iterator:
        total += labels.size(0)
        # out, _batch_loss = forwardfunction(net, features, labels, loss_func)
        out = net(features)
        _batch_loss = loss_func(out, labels)
        batch_loss = _batch_loss
        batch_metrics[prefix + "loss"] = batch_loss.data[0]
        epoch_metrics[prefix + "loss"] = epoch_metrics.get(prefix + "loss", 0.0) + \
                                         batch_loss * labels.size(0)
        for name, metric_func in metrics_dict.items():
            calculated_metric = metric_func(out, labels)
            batch_metrics[prefix + name] = calculated_metric
            epoch_metrics[prefix + name] = epoch_metrics.get(prefix + name, 0.0) + \
                                           calculated_metric * labels.size(0)
        tqdm_iterator.set_postfix({k: batch_metrics[k] for k in batch_metrics.keys()})

    for name, metric_value in epoch_metrics.items():
        epoch_metrics[name] = metric_value / total
    return epoch_metrics


class Trainer:

    def __init__(self, net):
        super(Trainer, self).__init__()
        self.net = net

    def compile(self,
                loss_dict=None,
                optimizer_dict=None,
                lr_scheduler_dict={},
                metrics_dict=None,
                numpy_metric=True,
                monitor='val_loss',
                monitor_mode='min'):
        assert monitor_mode in ['min', 'max']
        assert monitor in list(metrics_dict.keys()) + ['loss'] + \
               list(map(lambda x: 'val_' + x, list(metrics_dict.keys()) + ['loss']))
        if loss_dict and loss_dict.__contains__('loss_func'):
            loss_func = loss_dict['loss_func']
            loss_dict.pop('loss_func')
            self.loss_func = loss_func(**loss_dict)
            self.loss_desc = f'-{type(self.loss_func).__name__}{get_describle(loss_dict)}'
        else:
            self.loss_func = jittor.nn.MSELoss()
            self.loss_desc = f'-{type(self.loss_func).__name__}'
        if optimizer_dict and optimizer_dict.__contains__('optimizer'):
            optimizer = optimizer_dict['optimizer']
            optimizer_dict.pop('optimizer')
            enhancer, enhancer_dict = None, None
            if optimizer_dict.__contains__('enhancer') and optimizer_dict.__contains__('enhancer_dict'):
                enhancer = optimizer_dict['enhancer']
                enhancer_dict = optimizer_dict['enhancer_dict']
                optimizer_dict.pop('enhancer')
                optimizer_dict.pop('enhancer_dict')
            self.optimizer = optimizer(self.net.parameters(), **optimizer_dict)
            self.optim_desc = f'-{type(self.optimizer).__name__}{get_describle(optimizer_dict)}'
            if enhancer and enhancer_dict:
                self.optimizer = enhancer(self.optimizer, **enhancer_dict)
                self.optim_desc = f'{self.optim_desc}-{type(self.optimizer).__name__}{get_describle(enhancer_dict)}'
        else:
            self.optimizer = jittor.optim.Adam(self.net.parameters())
            self.optim_desc = f'-{type(self.optimizer).__name__}'

        if lr_scheduler_dict and lr_scheduler_dict.__contains__('lr_scheduler'):
            lr_scheduler = lr_scheduler_dict['lr_scheduler']
            lr_scheduler_dict.pop('lr_scheduler')
            self.lr_scheduler = lr_scheduler(self.optimizer, **lr_scheduler_dict)
            self.lr_scheduler_desc = f'-{type(self.lr_scheduler).__name__}{get_describle(lr_scheduler_dict)}'
        else:
            self.lr_scheduler = None
            self.lr_scheduler_desc = ''
        self.numpy_metric = numpy_metric
        self.monitor = monitor if monitor else 'loss'
        self.monitor_mode = monitor_mode if monitor_mode else 'min'
        self.metrics_dict = metrics_dict if metrics_dict else {}
        self.history = {}

    def fit(self, epochs, T_loader, V_loader, verbose=True):
        self.train_desc = f"{self.net.desc}{self.loss_desc}{self.optim_desc}{self.lr_scheduler_desc}-epk{epochs}"
        if verbose:
            pprint(self.net)
            print("\n[{}]\tTraining Started!".format(self.train_desc))
        monitor_best = float("inf") if self.monitor_mode == 'min' else -float("inf")
        model_best = copy.deepcopy(self.net.state_dict())
        optimizer_best = copy.deepcopy(self.optimizer.state_dict())

        start = time.time()
        for epoch in range(1, epochs + 1):
            # 1. training loop -------------------------------------------------
            train_metrics = stepforward(self.net,
                                        loss_func=self.loss_func,
                                        optimizer=self.optimizer,
                                        metrics_dict=self.metrics_dict,
                                        loader=T_loader,
                                        verbose=verbose)
            for name, train_metric in train_metrics.items():
                self.history[name] = self.history.get(name, []) + [train_metric]
            # 2. validate loop -------------------------------------------------
            val_metrics = stepforward_test(self.net,
                                           loss_func=self.loss_func,
                                           metrics_dict=self.metrics_dict,
                                           loader=V_loader,
                                           verbose=verbose)
            for name, val_metric in val_metrics.items():
                self.history[name] = self.history.get(name, []) + [val_metric]

            assert self.monitor in self.history.keys()
            compared = self.history[self.monitor][-1] <= monitor_best if self.monitor_mode == 'min' else \
                self.history[self.monitor][-1] >= monitor_best
            if compared:
                if verbose:
                    print("\n{} update: {} --> {}".format(self.monitor, monitor_best, self.history[self.monitor][-1]))
                model_best = copy.deepcopy(self.net.state_dict())
                optimizer_best = copy.deepcopy(self.optimizer.state_dict())

            monitor_best = min([monitor_best, self.history[self.monitor][-1]]) if self.monitor_mode == 'min' else max(
                [monitor_best, self.history[self.monitor][-1]])

            # 3. print logs -------------------------------------------------
            if verbose:
                infos = {'time': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
                         "epoch": '{}/{}'.format(epoch, epochs)}
                infos.update({k: round(self.history[k][-1], 5) for k in self.history.keys()})
                infos.update({'best_{}'.format(self.monitor): round(monitor_best, 5)})
                tb = PrettyTable()
                tb.field_names = infos.keys()
                tb.add_row(infos.values())
                print(tb)

        end = time.time()

        if verbose:
            print("[{}]\tTraining Finished!".format(self.desc))
            print(f'Running time ({epochs} Epochs): %s Seconds' % round(end - start, 5))
            print('Best_{}: {}'.format(self.monitor, round(monitor_best, 5)))
        model_final = copy.deepcopy(self.net.state_dict())
        optimizer_final = copy.deepcopy(self.optimizer.state_dict())
        state_dict = {
            'model_best': model_best,
            'model_final': model_final,
            'optimizer_best': optimizer_best,
            'optimizer_final': optimizer_final,
        }
        return pd.DataFrame(self.history), state_dict, monitor_best

    def evaluate(self, dataloader):
        return self.stepforward(loader=dataloader, phase='val')

    def predict(self, dataloader):
        self.eval()
        true_labels = []
        pred_labels = []
        for features, labels in dataloader:
            outputs = self.execute(features)
            preds = np.argmax(outputs.data, axis=1)
            true_labels.extend(list(labels.data))
            pred_labels.extend(list(preds))

        return np.array(true_labels), np.array(pred_labels)
