from torch.nn import Module
from torch.optim import Optimizer
from torch import save as _save
from torch.optim import Adam, SGD, RMSprop, AdamW

__all__ = [
    "BaseNet",
]


class BaseNet(Module):
    def __init__(self) -> None:
        super(BaseNet, self).__init__()

    def readArgsDict(self, diction: dict):
        self.batch: int = diction["BATCH_SIZE"]
        self.epoch: int = diction.get("EPOCH", 1)
        self.lr: float = diction["LEARNING_RATE"]
        self.lossFunc = diction["LOSS_FUNC"]
        self.optim: str = diction.get("OPTIMIZER", "sgd").lower()
        return self

    def saveModel(self, fp: str) -> None:
        """把神经网络模型以两种方式保存"""
        if input("Save this model or not?(y/n):").lower() == 'y':
            _save(self, f"{fp}.pkl")
            _save(self.state_dict(), f"{fp}_param.pkl")

    def getOptimizer(self) -> Optimizer:
        """生成优化器"""
        opt = self.optim
        if opt == "adam":
            return Adam(self.parameters(), lr=self.lr, betas=(0.9, 0.99))
        elif opt == "adamw":
            return AdamW(self.parameters(), lr=self.lr, betas=(0.9, 0.99))
        elif opt == "momentum":
            return SGD(self.parameters(), lr=self.lr, momentum=0.8)
        elif opt == "rmsprop":
            return RMSprop(self.parameters(), lr=self.lr, alpha=0.9)
        else:
            return SGD(self.parameters(), lr=self.lr)
