# -*- coding: utf-8 -*-

import torch
from tqdm import tqdm
from translator import Translator
from torch.utils.data import DataLoader
from torch.optim import Adam
from util import len2mask, shift
import torch.nn.functional as F


class Engine:
    def __init__(self,
                 translator: Translator,
                 train_dataloader: DataLoader,
                 test_dataloader: DataLoader,
                 lr=1e-4,
                 max_len=200,
                 device=None):
        self.translator = translator
        self.train_dataloader = train_dataloader
        self.test_dataloader = test_dataloader
        self.max_len = max_len
        self.optimizer = Adam(params=translator.parameters(), lr=lr)
        self.device = device

    def train(self):
        self.translator.train()
        train_bar = tqdm(self.train_dataloader)

        for src_token_ids, src_len, tgt_token_ids, _ in train_bar:
            src_token_ids = torch.tensor(src_token_ids, device=self.device)
            src_len = torch.tensor(src_len, device=self.device)
            tgt_token_ids = torch.tensor(tgt_token_ids, device=self.device)

            # B, L, V
            shift_tgt = shift(tgt_token_ids, sos_id=0)
            src_mask = len2mask(src_len, max_len=self.max_len).unsqueeze(1).unsqueeze(2)
            tgt_prob = self.translator.forward(src_token_ids,
                                               shift_tgt=shift_tgt,
                                               src_mask=src_mask)
            # ignore <pad>
            vocab_size = tgt_prob.size(-1)
            loss = F.nll_loss(tgt_prob.reshape(-1, vocab_size), tgt_token_ids.reshape(-1), ignore_index=2)
            self.optimizer.zero_grad()
            loss.backward()
            self.optimizer.step()
            train_bar.set_description("nll loss : {:.4f}".format(loss.item()))

    def test(self, is_test=True):
        """

        :param is_test: bool, True if using test-dataset else eval-dataset
        :return:
        """
        self.translator.eval()

        test_bar = tqdm(self.test_dataloader)
        for src_token_ids, src_len, tgt_token_ids, _ in test_bar:
            src_token_ids = torch.tensor(src_token_ids, device=self.device)
            src_len = torch.tensor(src_len, device=self.device)

            # tgt_token_ids 在 test 的时候无法使用
            # tgt_token_ids = torch.tensor(tgt_token_ids, device=self.device)

            # B, L
            src_mask = len2mask(src_len, max_len=self.max_len).unsqueeze(1).unsqueeze(2)
            tgt_pred_ids = self.translator.forward(src_token_ids, shift_tgt=None, src_mask=src_mask)
            print(tgt_pred_ids.shape)

        self.translator.train()
