import colorama
import torch
import copy
import os
import pandas as pd
import time
import matplotlib.pyplot as plt


def train(model, train_loader, test_loader, epoches, lr, read_model_path=None, batch_debug=False):
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    colorama.init()
    if read_model_path is not None:
        model.load_state_dict(torch.load(read_model_path))

    optimizer = torch.optim.Adam(model.parameters(), lr=lr)

    criterion = nn.CrossEntropyLoss()

    model = model.to(device)

    best_model_wts = copy.deepcopy(model.state_dict())

    best_acc = 0.0

    train_loss_list = []
    test_loss_list = []
    train_acc_list = []
    test_acc_list = []

    since = time.time()
    for epoch in range(epoches):
        print('-' * 30)
        print('Epoch {}/{}'.format(epoch + 1, epoches))
        train_loss = 0.0
        train_correct = 0
        test_loss = 0.0
        test_correct = 0
        train_num = 0
        test_num = 0
        for i, (bx, by) in enumerate(train_loader):
            batch_since = time.time()
            bx = bx.to(device)
            by = by.to(device)

            model.train()
            output = model(bx)
            pre_label = torch.argmax(output, dim=1)
            loss = criterion(output, by)

            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

            train_loss += loss.item() * bx.size(0)
            train_correct += torch.sum(pre_label == by).item()
            train_num += bx.size(0)
            batch_time_used = time.time() - batch_since
            if batch_debug:
                print('\r\ttrain Batch Debug: {} [{}/{} bt]\t{:.2f}s'.format(
                    colorama.Fore.GREEN + str(
                        round((i + 1) / len(train_loader) * 100)) + '%' + colorama.Style.RESET_ALL,
                    i + 1, len(train_loader), batch_time_used * (len(train_loader) - i - 1)), end='')

        print()
        for i, (bx, by) in enumerate(test_loader):
            batch_since = time.time()
            bx = bx.to(device)
            by = by.to(device)

            model.eval()
            output = model(bx)
            pre_label = torch.argmax(output, dim=1)
            loss = criterion(output, by)

            test_loss += loss.item() * bx.size(0)
            test_correct += torch.sum(pre_label == by).item()
            test_num += bx.size(0)
            batch_time_used = time.time() - batch_since
            if batch_debug:
                print('\r\ttest Batch Debug: {} [{}/{} bt]\t{:.2f}s'.format(
                    colorama.Fore.GREEN + str(round((i + 1) / len(test_loader) * 100)) + '%' + colorama.Style.RESET_ALL,
                    i + 1, len(test_loader), batch_time_used * (len(test_loader) - i - 1)), end='')

        train_loss_list.append(train_loss / train_num)
        test_loss_list.append(test_loss / test_num)
        train_acc_list.append(float(train_correct) / train_num)
        test_acc_list.append(float(test_correct) / test_num)
        print()
        print('Train Loss: {:.4f} Acc: {:.4f} '.format(train_loss_list[-1], train_acc_list[-1]))
        print('Test Loss: {:.4f} Acc: {:.4f} '.format(test_loss_list[-1], test_acc_list[-1]))

        if test_acc_list[-1] > best_acc:
            best_acc = test_acc_list[-1]
            best_model_wts = copy.deepcopy(model.state_dict())

        time_used = time.time() - since
        print('All time used: {}h {}m {:.2f}s'.format(int(time_used // 3600), int((time_used % 3600) // 60),
                                                      time_used % 60))

    train_process = pd.DataFrame(
        {'epoch': range(1, epoches + 1), 'train_loss': train_loss_list, 'train_acc': train_acc_list,
         'test_loss': test_loss_list, 'test_acc': test_acc_list})

    return best_model_wts, train_process


def save_best_model(best_model_wts, path):
    torch.save(best_model_wts, path)


def plot_train_process(train_process):
    """
    plot train process
    """
    plot_name = os.path.basename(os.getcwd())

    plt.figure(figsize=(12, 4))
    plt.subplot(1, 2, 1)
    plt.plot(train_process['epoch'], train_process['train_loss'], 'ro-', label='train_loss')
    plt.plot(train_process['epoch'], train_process['test_loss'], 'bo-', label='eval_loss')
    plt.xlabel('epoch')
    plt.ylabel('loss')
    plt.legend()
    plt.title(plot_name)

    plt.subplot(1, 2, 2)
    plt.plot(train_process['epoch'], train_process['train_acc'], 'ro-', label='train_acc')
    plt.plot(train_process['epoch'], train_process['test_acc'], 'bo-', label='eval_acc')
    plt.xlabel('epoch')
    plt.ylabel('accuracy')
    plt.legend()
    plt.title(plot_name)
    plt.show()


if __name__ == '__main__':
    pass
