#!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
@File   :   train.py
@Author :   Song
@Time   :   2022/2/28 21:48
@Contact:   songjian@westlake.edu.cn
@intro  : 
'''
import sys
import numpy as np
import einops
import matplotlib.pyplot as plt
import torch.optim
import torch.utils.data
import torch.nn.functional as F
import torch.nn.utils.rnn as rnn_utils
from pathlib import Path
import predifine
import models
from dataloader import *

try:
    profile
except:
    profile = lambda x: x


# @profile
def train_one_epoch(train_loader, model, optimizer, epoch):
    model.train()

    epoch_loss = 0.

    for batch_idx, (batch_spectra, batch_pr_charge, batch_label) in enumerate(train_loader):

        batch_spectra = batch_spectra.float().to(device)
        batch_pr_charge = batch_pr_charge.long().to(device)
        batch_label = batch_label.long().to(device)

        # forward
        pred_label = model(batch_spectra, batch_pr_charge)

        # loss
        batch_loss = F.cross_entropy(pred_label, batch_label)

        # back
        optimizer.zero_grad()
        batch_loss.backward()
        # update
        optimizer.step()

        # log
        epoch_loss += batch_loss.item()

        # print
        if batch_idx % 10000 == 0:
            print('Training epoch: [{}], Batch: [{}/{}], loss: {:.3f}'.format(
                epoch, batch_idx, len(train_loader), batch_loss.item(), ))

    epoch_loss = epoch_loss / (batch_idx + 1)

    return epoch_loss


# @profile
def eval_one_epoch(dataloader, model, epoch):
    model.eval()
    batch_match_num_v = []
    for batch_idx, (batch_spectra, batch_pr_charge, batch_label) in enumerate(dataloader):

        batch_spectra = batch_spectra.float().to(device)
        batch_pr_charge = batch_pr_charge.long().to(device)
        batch_label = batch_label.long().to(device)

        # forward
        with torch.no_grad():
            # forward
            pred_label = model(batch_spectra, batch_pr_charge)

            # for pred len
            pred_label = torch.softmax(pred_label, dim=1)
            pred_label = torch.argmax(pred_label, dim=1)
            batch_match_num = (pred_label == batch_label).sum()
            batch_acc = batch_match_num / len(batch_label)
            batch_match_num_v.append(batch_match_num.item())

            if batch_idx % 10000 == 0:
                print('Eval epoch: [{}], Batch: [{}/{}], '
                      'batch acc: {:.3f}'.format(
                    epoch, batch_idx, len(dataloader),
                    batch_acc.item()))

    epoch_acc = sum(batch_match_num_v) / len(dataloader.dataset)
    return epoch_acc



# @profile
def test_one_epoch(dataloader, model, device):
    model.eval()
    score_v = []
    for batch_idx, (batch_spectra, batch_pr_charge, batch_label) in enumerate(dataloader):

        batch_spectra = batch_spectra.float().to(device)
        batch_pr_charge = batch_pr_charge.long().to(device)

        # forward
        with torch.no_grad():
            # forward
            pred = model(batch_spectra, batch_pr_charge)

            # for pred len
            pred = torch.softmax(pred, dim=1)
            pred = pred[:, 1]
            score_v.extend(pred.cpu().numpy())

    score_v = np.array(score_v)
    return score_v


def my_collate(items):
    batch_spectra, batch_pr_charge, batch_label = zip(*items)

    batch_spectra = rnn_utils.pad_sequence(batch_spectra, batch_first=True)
    batch_pr_charge = torch.tensor(batch_pr_charge)
    batch_label = torch.tensor(batch_label)

    return batch_spectra, batch_pr_charge, batch_label


if __name__ == '__main__':
    # train and valid
    # predifine.device = torch.device('cuda:' + str(sys.argv[1]))
    device = predifine.device

    train_dataset = Train_Dataset('train', path=predifine.train_npz)
    eval_dataset = Eval_Dataset('eval', path=predifine.eval_npz)

    train_loader = torch.utils.data.DataLoader(train_dataset,
                                               batch_size=predifine.batch_size,
                                               num_workers=predifine.num_workers,
                                               shuffle=predifine.shuffle,
                                               pin_memory=True,
                                               collate_fn=my_collate)  # 默认collate_fn
    eval_loader = torch.utils.data.DataLoader(eval_dataset,
                                              batch_size=predifine.batch_size,
                                              num_workers=predifine.num_workers,
                                              shuffle=predifine.shuffle,
                                              pin_memory=True,
                                              collate_fn=my_collate)

    # %% model
    model = models.Model_Filter(
        dim_model=predifine.dim_model,
        n_head=predifine.n_head,
        dim_feedforward=predifine.dim_feedforward,
        n_layers=predifine.n_layers,
        dropout=predifine.dropout,
        dim_intensity=predifine.dim_intensity,
        max_length=predifine.max_length,
        max_charge=predifine.max_charge
    ).to(device)

    # optimizer
    optimizer = torch.optim.Adam(model.parameters())

    train_loss_v, eval_loss_v = [], []
    eval_acc_v, eval_acc_bias_v = [], []

    for epoch in range(predifine.epochs):
        train_loss = train_one_epoch(train_loader, model, optimizer, epoch)
        train_loss_v.append(train_loss)

        eval_acc = eval_one_epoch(eval_loader, model, epoch)
        eval_acc_v.append(eval_acc)

        print('Epoch {}, train loss: {:.3f}, eval acc: {:.3f}'.format(
            epoch, train_loss, eval_acc))

        # save and plot
        save_dir = Path(predifine.train_output_dir) / ('Filter_epoch_' + str(epoch) + '.pt')
        torch.save(model.state_dict(), save_dir)

        # plot
        plt.figure()
        plt.subplot(211)
        plt.plot(train_loss_v, label='training loss')
        plt.legend()
        plt.ylabel('loss')
        plt.subplot(212)
        plt.plot(eval_acc_v, label='eval acc')
        plt.legend()
        plt.ylabel('acc')

        plt.savefig(Path(predifine.train_output_dir) / ('Filter_epoch_' + str(epoch) + '.png'))
