import os
import torch

from .plugins_base import PluginBase
from trainer.trainer import PluginType, TrainContext


class BestModelSaverPlugin(PluginBase):
    plugin_hooks = {
        PluginType.EPOCH_END: "check_and_save"
    }

    def __init__(self):
        self.best_miou = 0.0

    def check_and_save(self, ctx: TrainContext):
        val_miou = ctx.workspace.get("val_miou")
        if val_miou is None:
            return

        if val_miou > self.best_miou:
            if self.check_key(ctx.workspace, "logger"):
                ctx.workspace["logger"](f'    New best miou in val: {val_miou:.6f}')
            self.best_miou = val_miou
            self._save_model(ctx, val_miou)

    def _save_model(self, ctx: TrainContext, val_miou: float):
        if not self.check_key(ctx.workspace, "results_folder", f"{type(self).__name__ }need variable: results_folder"):
            return
        checkpoint_path = os.path.join(ctx.workspace["results_folder"], "checkpoints")
        os.makedirs(checkpoint_path, exist_ok=True)

        save_path = os.path.join(
            checkpoint_path,
            f"{ctx.cfg.get('model').value}_miou_{val_miou:.6f}_epoch_{ctx.epoch}.pth"
        )

        torch.save({
            "epoch": ctx.epoch,
            "model_state_dict": ctx.model.state_dict(),
            "optimizer_state_dict": ctx.optimizer.state_dict(),
            "scheduler_state_dict": ctx.workspace["scheduler"].state_dict() if self.check_key(ctx.workspace, "scheduler") else None,
            "miou": val_miou,
        }, save_path)
