import sys
import warnings

import torch

known_backends: dict = {}


def get_backend_by_name(name: str) -> 'BackendBase':
    """

    :param name:
    :return:
    """
    for framework_name, backend in known_backends.items():
        if framework_name == name:
            return backend

    none_abstract_backends = []
    backends = []
    backends.extend(BackendBase.__subclasses__())
    while backends:  # BFS iterate all subclasses
        backend = backends.pop()
        if not backend.is_abstract_class():
            none_abstract_backends.append(backend)
        backends.extend(backend.__subclasses__())

    for BackendInstance in none_abstract_backends:
        if BackendInstance.framework_name not in known_backends:
            if BackendInstance.framework_name in sys.modules:
                backend = BackendInstance()
                known_backends[backend.framework_name] = backend
                if backend.framework_name == name:
                    return backend
    return BackendBase()


def get_backend(tensor) -> 'BackendBase':
    """

    :param tensor:
    :return:
    """
    # if the tensor's backend is known
    for framework_name, backend in known_backends.items():
        if backend.check_appropriate_type(tensor):
            return backend

    none_abstract_backends = []
    backends = []
    backends.extend(BackendBase.__subclasses__())
    while backends:  # BFS iterate all subclasses
        backend = backends.pop()
        if not backend.is_abstract_class():
            none_abstract_backends.append(backend)
        backends.extend(backend.__subclasses__())

    for BackendInstance in none_abstract_backends:
        if BackendInstance.framework_name not in known_backends:
            if BackendInstance.framework_name in sys.modules:
                backend = BackendInstance()
                known_backends[backend.framework_name] = backend
                if backend.check_appropriate_type(tensor):
                    return backend

    return BackendBase()


class BackendBase:
    framework_name = None

    def __init__(self):
        pass

    def check_appropriate_type(self, tensor):
        """

        :param tensor:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    @staticmethod
    def is_abstract_class():
        """
        某些backend的可能是一个抽象类
        :return:
        """
        return True

    def from_numpy(self, tensor):
        """

        :param tensor:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def to_numpy(self, tensor):
        """

        :param tensor:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def device(self, tensor):
        """

        :param tensor:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def to_device(self, tensor, device="cpu"):
        """

        :param tensor:
        :param device:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def normal(self, loc, scale, shape, device="cpu"):
        """

        :param device:
        :param loc:
        :param scale:
        :param shape:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def uniform(self, low, high, shape, device="cpu"):
        """

        :param device:
        :param low:
        :param high:
        :param shape:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def laplace(self, loc, scale, shape, device="cpu"):
        """

        :param device:
        :param loc:
        :param scale:
        :param shape:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def zeros(self, shape, device="cpu"):
        """

        :param device:
        :param shape:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def ones(self, shape, device="cpu"):
        """

        :param device:
        :param shape:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def eye(self, n, device="cpu"):
        """

        :param device:
        :param n:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def squeeze(self, tensor, dim):
        """

        :param tensor:
        :param dim:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def unsqueeze(self, tensor, dim):
        """

        :param tensor:
        :param dim:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def matmul(self, a, b):
        """
        matrix multiplication
        :param a:
        :param b:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def transpose(self, tensor, axes=None):
        """

        :param tensor:
        :param axes:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def inv(self, tensor):
        """

        :param tensor:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def shape(self, tensor):
        """

        :param tensor:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def reshape(self, tensor, shape):
        """

        :param tensor:
        :param shape:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def rand(self, shape, device="cpu"):
        """

        :param device:
        :param shape:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def logic_and(self, a, b):
        """

        :param a:
        :param b:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def logic_or(self, a, b):
        """

        :param a:
        :param b:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def logic_not(self, a):
        """

        :param a:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def norm(self, a):
        """

        :param a:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def exp(self, tensor):
        """

        :param tensor:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def set_require_grad(self, tensor, require_grad=False):
        """

        :param tensor:
        :param require_grad:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")

    def from_python(self, vector):
        """
        generate a tensor from list or tuple
        :param vector:
        :return:
        """
        raise NotImplementedError("Abstract class, cannot be invoked!")


class NumpyBackend(BackendBase):
    framework_name = 'numpy'

    def __init__(self):
        super().__init__()
        import numpy
        self.np = numpy

    def check_appropriate_type(self, tensor):
        return isinstance(tensor, self.np.ndarray)

    @staticmethod
    def is_abstract_class():
        return False

    def device(self, tensor):
        return "cpu"  # numpy is only on cpu

    def to_device(self, tensor, device="cpu"):
        return tensor

    def normal(self, loc, scale, shape, device="cpu"):
        return self.np.random.normal(loc=loc, scale=scale, size=shape)

    def uniform(self, low, high, shape, device="cpu"):
        return self.np.random.uniform(low=low, high=high, size=shape)

    def laplace(self, loc, scale, shape, device="cpu"):
        return self.np.random.laplace(loc=loc, scale=scale, size=shape)

    def zeros(self, shape, device="cpu"):
        return self.np.zeros(shape=shape)

    def ones(self, shape, device="cpu"):
        return self.np.ones(shape=shape)

    def eye(self, n, device="cpu"):
        if isinstance(n, tuple):
            return self.np.eye(*n)
        else:
            return self.np.eye(n)

    def squeeze(self, tensor, dim):
        return self.np.squeeze(tensor, dim)

    def unsqueeze(self, tensor, dim):
        return self.np.expand_dims(tensor, axis=dim)

    def matmul(self, a, b):
        return self.np.matmul(a, b)

    def transpose(self, tensor, axes=None):
        return self.np.transpose(tensor, axes=axes)

    def inv(self, tensor):
        return self.np.linalg.inv(tensor)

    def shape(self, tensor):
        return tensor.shape

    def reshape(self, tensor, shape):
        return self.np.reshape(tensor, shape)

    def rand(self, shape, device="cpu"):
        return self.np.random.rand(*shape)

    def logic_and(self, a, b):
        return self.np.logical_and(a, b)

    def logic_or(self, a, b):
        return self.logic_or(a, b)

    def logic_not(self, a):
        return self.np.logic_not(a)

    def norm(self, a):
        return self.np.linalg.norm(a)

    def exp(self, tensor):
        return self.np.exp(tensor)

    def to_numpy(self, tensor):
        return tensor

    def from_numpy(self, tensor):
        return tensor

    def set_require_grad(self, tensor, require_grad=False):
        return tensor

    def from_python(self, vector):
        return self.np.array(vector)


class PytorchBackend(BackendBase):
    framework_name = "torch"

    def __init__(self):
        super().__init__()
        import torch
        self.torch = torch

    def check_appropriate_type(self, tensor):
        return isinstance(tensor, self.torch.Tensor)

    @staticmethod
    def is_abstract_class():
        return False

    def from_numpy(self, tensor):
        return self.torch.from_numpy(tensor)

    def to_numpy(self, tensor):
        return tensor.detach().cpu().numpy()

    def device(self, tensor):
        return tensor.device

    def to_device(self, tensor, device="cpu"):
        return tensor.to(device=device)

    def normal(self, loc, scale, shape, device="cpu"):
        return self.torch.normal(mean=loc, std=scale, size=shape, device=device)

    def uniform(self, low, high, shape, device="cpu"):
        return (high - low) * self.torch.rand(size=shape, device=device) + low

    def laplace(self, loc, scale, shape, device="cpu"):
        return self.torch.distributions.laplace.Laplace(loc=loc, scale=scale).sample(sample_shape=shape).to(device)

    def zeros(self, shape, device="cpu"):
        return self.torch.zeros(size=shape, device=device)

    def ones(self, shape, device="cpu"):
        return self.torch.ones(size=shape, device=device)

    def eye(self, n, device="cpu"):
        if isinstance(n, tuple):
            return self.torch.eye(n=n[0], m=n[0], device=device)
        else:
            return self.torch.eye(n=n, device=device)

    def squeeze(self, tensor, dim):
        return self.torch.squeeze(tensor, dim=dim)

    def unsqueeze(self, tensor, dim):
        return self.torch.unsqueeze(tensor, dim=dim)

    def matmul(self, a, b):
        return self.torch.mm(a, b)

    def transpose(self, tensor, axes=None):
        if axes is None:
            return self.torch.transpose(tensor, dim0=0, dim1=1)
        else:
            return self.torch.permute(tensor, axes)

    def inv(self, tensor):
        return self.torch.linalg.inv(tensor)

    def shape(self, tensor):
        return tensor.shape

    def reshape(self, tensor, shape):
        return self.torch.reshape(tensor, shape=shape)

    def rand(self, shape, device="cpu"):
        return self.torch.rand(size=shape, device=device)

    def logic_and(self, a, b):
        return self.torch.logical_and(a, b)

    def logic_or(self, a, b):
        return self.torch.logical_or(a, b)

    def logic_not(self, a):
        return self.torch.logical_not(a)

    def norm(self, a):
        return self.torch.norm(a)

    def exp(self, tensor):
        return self.torch.exp(tensor)

    def set_require_grad(self, tensor, require_grad=False):
        tensor.require_grad = require_grad
        return tensor

    def from_python(self, vector):
        return self.torch.tensor(data=vector)
