from abc import ABC, abstractmethod
from copy import deepcopy
from typing import Optional, List
from pathlib import Path

import torch
import torch.nn as nn
from torch.optim import Optimizer
from torch.optim.lr_scheduler import _LRScheduler

from .loss import NNLoss
from .model_utils import initialize_weights


class NNModel(ABC):
    def __init__(self, lr=0.01) -> None:
        super().__init__()
        self.network = self.init_network()
        self.loss = self.init_loss()
        self.optimizer = self.init_optimizer(lr)
        self.scheduler = self.init_lr_scheduler()
        self.decoder = self.init_decoder()
        initialize_weights(self.network)

    @abstractmethod
    def init_network(self) -> nn.Module:
        raise NotImplementedError

    @abstractmethod
    def init_loss(self) -> NNLoss:
        raise NotImplementedError

    @abstractmethod
    def init_optimizer(self) -> Optional[Optimizer]:
        """
        Example:
            >>> from torch.optim import Optimizer, SGD
            >>> return SGD(params=self.network.params(), lr=1e-3)
        """
        raise NotImplementedError

    @abstractmethod
    def init_lr_scheduler(self, lr) -> Optional[_LRScheduler]:
        """
        Example:
            >>> from torch.optim.lr_scheduler import ExponentialLR
            >>> return ExponentialLR(optimizer=self.optimizer, T_max=self.runner.epochs_left)
        """
        raise NotImplementedError

    @abstractmethod
    def init_decoder(self):
        raise NotImplementedError

    def forward(self, inputs, targets, use_amp=False):
        with torch.cuda.amp.autocast(enabled=use_amp):
            outputs = self.network(inputs)
            losses = self.loss(outputs, targets)
            return outputs, losses

    def train(self):
        self.network.train()

    def eval(self):
        self.network.eval()

    def to(self, device: torch.device):
        self.network.to(device)
        self.loss.to(device)
        return self

    def state_dict(self) -> dict:
        return {
            "network": self.network.state_dict(),
            "optimizer": self.optimizer.state_dict(),
            "scheduler": self.scheduler.state_dict(),
        }

    def load(self, state_dict):
        self.network.load_state_dict(state_dict["network"])
        self.optimizer.load_state_dict(state_dict["optimizer"])
        self.scheduler.load_state_dict(state_dict["scheduler"])
        return

    def load_network_weights(self, state_dict, excluded):
        loaded = deepcopy(state_dict)
        for layer_name, parameter in state_dict.items():
            for tag in excluded:
                if tag in layer_name:
                    del loaded[layer_name]
        info = self.network.load_state_dict(loaded, strict=False)
        print("  Missing:", info.missing_keys)
        print("  Unexpected:", info.unexpected_keys)
        return

    def freeze_network_weights(self, tag_lists):
        for layer_name, parameter in self.network.named_parameters():
            for tag in tag_lists:
                if tag in layer_name:
                    parameter.requires_grad = False
        return

    def export_onnx(
        self,
        output_path,
        input_size,
        output_names: List[str],
        input_names=("input",),
        opset_version=12,
        dynamic_axes=None,
    ):
        onnx_file_path = Path(output_path).with_suffix(".onnx")
        onnx_file_path.parent.mkdir(parents=True, exist_ok=True)

        torch.onnx.export(
            model=self.network,
            args=torch.randn(input_size),
            f=str(onnx_file_path),
            opset_version=opset_version,
            input_names=list(input_names),
            output_names=list(output_names),
            dynamic_axes=dynamic_axes,
        )
        print(f"Export onnx to: {onnx_file_path}")

    def export_pth(self, output_path):
        pth_file_path = Path(output_path).with_suffix(".pth")
        pth_file_path.parent.mkdir(parents=True, exist_ok=True)
        torch.save(self.network.state_dict(), str(pth_file_path))
        print(f"Export pth to: {pth_file_path}")
