import torch
import torch.nn.functional as F
from ignite.engine.engine import Engine, State, Events
from ignite._utils import convert_tensor

class SinglePatchTrainer:

    def __init__(self, models, optimizers, loss_fns, logger, config):
        self.opts = config.trainer_config

        self.log_eval_batch = False
        
        self.Fts = models["Fts1"]
            
        if optimizers:
            self.optim_fts = optimizers["Fts"]
            
        if loss_fns:
            self.Lmatch = loss_fns["Lmatch"][0]

        self.device = torch.device(config.device)
        self.logger = logger
        self.log_freq = config.log_freq
        self.attached = {}
        self.curr_epoch = 0
        self.log_str = ""


        pass


    def _prepare_batch(self, batch, non_blocking=True):
        '''
        将batch从cpu上移到gpu上
        '''
        xs, ys = batch
        
        xs = convert_tensor(xs, self.device, non_blocking=non_blocking).float()
        
        ys = convert_tensor(ys, self.device, non_blocking=non_blocking)

        return xs, ys

    def _zero_grad(self):
        if self.optim_fts:
            self.optim_fts.zero_grad()

    def train(self, engine, batch):
        self.Fts.train()

        log_str = ""
        curr_step = self.logger.counters["train"]

        patch, y = self._prepare_batch(batch)
        
        self._zero_grad()

        scores, _, feature_map, atten = self.Fts(patch)

        if engine.state.iteration % 20 == 0:
            if patch.size()[1] == 4:
                self.logger.log_image_grid("patch", patch[:, [2, 1, 3]], "train")
            else:
                self.logger.log_image_grid("patch", patch, "train")
            self.logger.log_image_grid("Attention", atten, "train")

        loss = self.Lmatch(scores, y)

        self.logger.add_scalars('train/match_loss', {'Lmatch': loss.item()}, curr_step)

        log_str += "Loss: {:.5f} \t".format(loss.item())

        loss.backward()
        self.optim_fts.step()

        self.log_str = log_str

        return None, None

    def on_epoch_start(self, engine, phase=None):
        if phase == "train":
            self.curr_epoch = engine.state.epoch
        
        if phase == "evaluate":
            self.log_eval_batch = True         

    def on_epoch_end(self, engine, phase=None):
        if phase in ["evaluate", "test"]:
            metrics = engine.state.metrics
            log = ""
            for k, v in metrics.items():
                log += "{}: {:.5f}  ".format(k, v)

            print("{} Results - Epoch: {}  {}".format(phase.capitalize(), self.curr_epoch, log))

    def on_iteration_start(self, engine, phase=None):            
        if phase == "train":
            curr_iter = (engine.state.iteration - 1) % len(self.attached["train_loader"]) + 1

            if curr_iter % self.log_freq == 0:
                print("Epoch[{}] Iteration[{}/{}] {}".format(engine.state.epoch, curr_iter, len(self.attached["train_loader"]), self.log_str))
        
        elif phase == "test":
            curr_iter = (engine.state.iteration - 1) % len(self.attached["test_loader"]) + 1
            if curr_iter % self.log_freq == 0:
                print("Iteration[{}/{}]".format(curr_iter, len(self.attached["test_loader"])))

    def on_iteration_end(self, engine, phase=None):
        pass

    def evaluate(self, engine, batch):

        return None, None 

    def attach(self, name, obj):
        self.attached[name] = obj