from os.path import dirname as _dirname
from time import time as _time
from torch import Tensor as _Tensor, no_grad as _no_grad
from torch.nn import Sequential as _Sequential, LeakyReLU as _LeakyReLU
from torch.nn import Softmax as _Softmax, Dropout as _Dropout
from torch.nn import Linear as _Linear
from torch.nn import Conv2d as _Conv2d, MaxPool2d as _MaxPool2d
from torch import device

# from torch.cuda import is_available
from torch import argmax
from torchvision.transforms import ToTensor as _ToTensor
from torch.utils.data import DataLoader as _DataLoader
from torchvision.datasets import MNIST as _MNIST
from BaseNet import *
from os.path import isfile as _isfile, exists as _exists
from torch import load as _load

__all__ = [
    "TIMER",
    "UNLOCK_DEVICE",
    # "DEVICE",
    "MLP",
    "LeNet",
    "AutoEncoder",
    "myTrains",
    "myTest",
    "MNISTloader",
    "loadModel",
]

# 是否下载数据集
_DOWNLOAD_DATA: bool = False
# 数据集路径
_DATA_PATH: str = f"{_dirname(__file__)}/MNIST"
# 是否计时
TIMER: bool = True
# 是否考虑使用GPU
UNLOCK_DEVICE: bool = False

# if is_available() and UNLOCK_DEVICE:
#     DEVICE = _device("cuda")
# else:
#     DEVICE = _device("cpu")


class MLP(BaseNet):
    """四层感知机网络"""

    def __init__(self):
        super(MLP, self).__init__()
        self.fc = _Sequential(
            _Linear(in_features=784, out_features=512),  # 784 = 28 * 28
            _Dropout(p=0.5),
            _LeakyReLU(),
            _Linear(in_features=512, out_features=128),
            _Dropout(p=0.1),
            _LeakyReLU(),
            _Linear(in_features=128, out_features=10),
            _Softmax(dim=1),
        )

    def forward(self, din: _Tensor):
        """前向传播"""
        din = din.view(din.shape[0], 784)
        return self.fc(din)


class LeNet(BaseNet):
    """简易卷积神经网络"""

    def __init__(self):
        super(LeNet, self).__init__()
        self.conv = _Sequential(
            _Conv2d(in_channels=1, out_channels=6, kernel_size=5, padding=2),
            _LeakyReLU(),
            _MaxPool2d(2, 2),
            _Conv2d(in_channels=6, out_channels=16, kernel_size=5),
            _LeakyReLU(),
            _MaxPool2d(2, 2),
            _Conv2d(in_channels=16, out_channels=120, kernel_size=5),
        )
        self.fc = _Sequential(
            _LeakyReLU(),
            _Linear(in_features=120, out_features=96),
            _Dropout(p=0.12),
            _LeakyReLU(),
            _Linear(in_features=96, out_features=58),
            _Dropout(p=0.09),
            _LeakyReLU(),
            _Linear(in_features=58, out_features=10),
            _Softmax(dim=1),
        )

    def forward(self, din: _Tensor):
        """前向传播"""
        return self.fc(self.conv(din).view(din.shape[0], 120))


class AutoEncoder(BaseNet):
    """自编码"""

    def __init__(self, hyper: dict):
        super(LeNet, self).__init__()
        self.encoder = _Sequential(
            _Linear(in_features=784, out_features=128),
            _Dropout(p=0.12),
            _LeakyReLU(),
            _Linear(in_features=128, out_features=64),
            _Dropout(p=0.06),
            _LeakyReLU(),
            _Linear(in_features=64, out_features=12),
            _Dropout(p=0.01),
            _LeakyReLU(),
            _Linear(in_features=12, out_features=3),
        )
        self.decoder = _Sequential(
            _Linear(in_features=3, out_features=12),
            _Dropout(p=0.12),
            _LeakyReLU(),
            _Linear(in_features=12, out_features=64),
            _Dropout(p=0.06),
            _LeakyReLU(),
            _Linear(in_features=64, out_features=128),
            _Dropout(p=0.01),
            _LeakyReLU(),
            _Linear(in_features=128, out_features=784),
        )

    def forward(self, din: _Tensor):
        """前向传播"""
        encoded = self.encoder(din.view(din.shape[0], 784))
        decoded = self.decoder(encoded)
        return encoded, decoded


def myTrains(mod: BaseNet) -> None:
    """训练神经网络"""
    mod.train()
    optimizer = mod.getOptimizer()
    loader = MNISTloader(mod.batch, train=True)
    total = len(loader.dataset)

    for i in range(1, mod.epoch + 1):
        t1 = _time()
        correct = train_loss = 0.0
        for data, target in loader:
            # if UNLOCK_DEVICE:
            #     data, target = data.to(DEVICE), target.to(DEVICE)
            outputs = mod(data)
            predicted = argmax(outputs, 1)
            correct += sum(predicted == target).item()
            optimizer.zero_grad(True)
            loss = mod.lossFunc(outputs, target)
            loss.backward()
            optimizer.step()
            train_loss += loss.item()

        t2 = _time()
        train_loss /= total / mod.batch
        print(f"Epoch: {i}/{mod.epoch}")
        print(f"Training Loss: {train_loss:.8}")
        print(f"Accuracy rate: {correct / total:.6%}")
        print(f"Cost times: {t2-t1:.3f} second.\n") if TIMER else 0


def myTest(mod: BaseNet) -> None:
    """在数据集上测试神经网络"""
    t1 = _time()
    with _no_grad():
        mod.eval()
        correct = total = 0
        loader = MNISTloader(mod.batch)
        total = len(loader.dataset)
        for data, target in loader:
            # if UNLOCK_DEVICE:
            #     data, target = data.to(DEVICE), target.to(DEVICE)
            outputs = mod(data)
            predicted = outputs.argmax(dim=1)
            correct += sum(predicted == target).item()
    t2 = _time()
    print(f"Accuracy rate: {correct / total:.6%}")
    print(f"Cost times: {t2-t1:.3f} second.\n") if TIMER else 0


def MNISTloader(batch: int, train: bool = False) -> _DataLoader:
    """创建MNIST数据集加载器"""
    data = _MNIST(
        root=_DATA_PATH, train=train, download=_DOWNLOAD_DATA, transform=_ToTensor()
    )
    return _DataLoader(
        data, shuffle=train, batch_size=batch, num_workers=0, pin_memory=False
    )


def loadModel(
    fp: str, model: BaseNet, argDict: dict, fromParam: bool = True
) -> BaseNet:
    """加载模型"""
    mod: BaseNet = model()
    fp += "_param.pkl" if fromParam else ".pkl"
    if _isfile(fp) & _exists(fp):
        loaded: BaseNet = _load(fp)
        if fromParam:
            mod.load_state_dict(loaded)
        else:
            mod = loaded
    # if UNLOCK_DEVICE:
    #     mod = mod.to(DEVICE)
    mod.readArgsDict(argDict)
    return mod
