# -*- coding:utf8 -*-
# @Time : 2023/2/1 16:44
# @Author : WanJie Wu

import torch
from torch import optim
from loguru import logger
import torch.nn.functional as func
from dataclasses import dataclass
from tensorboardX import SummaryWriter
from sklearn.metrics import accuracy_score

from app.src.clf.bert import BertClfModel
from app.src.clf.dataset import gen_dataloader


@dataclass
class MidVariables:
    model_file_name: str = ""
    cur_epoch: int = 0
    global_step: int = 0
    best_score: float = 0.0


def train(args, model, optimizer, criterion, train_loader, dev_loader, writer, mid_vars):
    logger.info("模型开始训练...")
    model.train()
    for _ in range(args.epochs):
        mid_vars.cur_epoch += 1
        logger.info(f"--- train start epoch {mid_vars.cur_epoch}---\n")
        for batch_idx, item in enumerate(train_loader):
            input_ids = item["input_ids"].to(args.device)
            attention_mask = item["attention_mask"].to(args.device)
            token_type_ids = item["token_type_ids"].to(args.device)
            labels = item["labels"].to(args.device)
            out = model(input_ids, attention_mask, token_type_ids)
            loss = criterion(input=out, target=labels)
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()
            mid_vars.global_step += 1
            writer.add_scalar("train_loss", loss, mid_vars.global_step)
            if mid_vars.global_step % args.log_iter_num == 0:
                logger.info(f"global_steps: {mid_vars.global_step}, loss: {loss.cpu().item(): .4f}")

            if mid_vars.global_step % args.evaluation_interval_steps != 0:
                continue
            evaluation(args, model, criterion, dev_loader)
            model.train()


def evaluation(args, model, criterion, dev_loader):
    model.eval()
    accuracy_lst = []

    for batch_idx, item in enumerate(dev_loader):
        with torch.no_grad():
            input_ids = item["input_ids"].to(args.device)
            attention_mask = item["attention_mask"].to(args.device)
            token_type_ids = item["token_type_ids"].to(args.device)
            labels = item["labels"]
            out = model(input_ids, attention_mask, token_type_ids)
            pred = torch.argmax(out, dim=1).cpu().numpy()
            acc = accuracy_score(labels, pred)
            loss = criterion()


def main(args):
    writer = SummaryWriter(logdir=args.summary_log_dir)
    train_loader, dev_loader, test_loader = gen_dataloader(
        dataset_dir=args.dataset_dir,
        model_name_or_path=args.model_name_or_path,
        batch_size=args.batch_size,
        max_seq_length=args.max_seq_length
    )
    model = BertClfModel(
        model_name_or_path=args.model_name_or_path,
        num_classes=args.num_classes
    )
    model.to(args.device)
    optimizer = optim.AdamW(params=model.parameters(), lr=args.learning_rate)
    criterion = func.cross_entropy
    mid_vars = MidVariables()
    args.evaluation_interval_steps = int(len(train_loader) / args.evaluation_per_epoch)
    train(args, model, optimizer, criterion, train_loader, dev_loader, writer, mid_vars)
