
from core.cuda import cuda, cuda_module,cuda_array_types
from core.config import using_config
from utils.common import to_tuple
from utils.functions_collect import add, mul, neg, sub, rsub, div, rdiv, pow, reshape, transpose, sum, \
        get_item, matmul

# 重载运算
def setup_variable():


    Variable.__add__ = add
    Variable.__radd__ = add
    Variable.__mul__ = mul
    Variable.__rmul__ = mul
    Variable.__neg__ = neg
    Variable.__sub__ = sub
    Variable.__rsub__ = rsub
    Variable.__truediv__ = div
    Variable.__rtruediv__ = rdiv
    Variable.__pow__ = pow
    Variable.__getitem__ = get_item

    Variable.matmul = matmul
    Variable.dot = matmul


class Variable:
    # 优先度
    __array_priority__ = 200

    def __init__(self, data, name=None):
        """
        初始化Variable

        Args:
            data (ndarray-like or scalar): The data of the variable. If a scalar is passed,
                                           it will be automatically converted to a numpy array.
            name (str, optional): The name of the variable. Defaults to None.
        """

        if data is not None:
            if not isinstance(data, cuda_array_types):
                data = cuda.to_array(data)

        self.data = data
        self.grad = None
        self.creator = None
        # generation表示函数（或变量）是哪一级的
        self.generation = 0
        self.name = name
        self.is_variable = True

        setup_variable()

    # 初始化导数
    def cleargrad(self):
        self.grad = None

    def set_creator(self, func):
        """
        record the varaible's creator
        """
        self.creator = func
        self.generation = func.generation + 1

    def unchain(self):
        """
        Unchain the relation between Variable and its creator
        """
        self.creator = None

    def unchain_backward(self):
        if self.creator is not None:
            funcs = [self.creator]
            while funcs:
                f = funcs.pop()
                for x in f.inputs:
                    if x.creator is not None:
                        funcs.append(x.creator)
                        x.unchain()

    def _add_func_to_queue(self, func, queue, seen_set):
        if func not in seen_set:
            queue.append(func)
            seen_set.add(func)
            queue.sort(key=lambda x: x.generation)
        return queue, seen_set

    def backward(self, retain_grad=False, create_graph=False):
        """
        计算变量的梯度。

        Parameters:
            retain_grad (bool, optional): 是否保留中间变量的梯度。默认为 False。
            create_graph (bool, optional): 是否创建计算图以用于高阶梯度计算。默认为 False。
        """
        if self.grad is None:
            self.grad = Variable(cuda_module.ones_like(self.data))

        funcs = [self.creator]
        seen_set = set()

        while funcs:
            f = funcs.pop()
            gys = [output().grad for output in f.outputs]

            with using_config('enable_backprop', create_graph):
                gxs = to_tuple(f.backward(*gys))

            for x, gx in zip(f.inputs, gxs):
                x.grad = gx if x.grad is None else x.grad + gx
                if x.creator is not None and x.creator not in seen_set:
                    funcs.append(x.creator)
                    seen_set.add(x.creator)

            if not retain_grad:
                for y in f.outputs:
                    y().grad = None

    def reshape(self, *shape):
        """
            Reshape the array to the specified shape.

            Args:
                *shape: Integers or a tuple/list of integers representing the desired shape.

            Returns:
                Reshaped array.
            """
        if len(shape) == 1 and isinstance(shape[0], (tuple, list)):
            shape = shape[0]

        return reshape(self, shape)

    def sum(self, axis=None, keepdims=False):
        return sum(self, axis, keepdims)

    def to_cpu(self) -> None:
        """
        Move the data to CPU memory.
        """
        if self.data is not None:
            self.data = cuda.to_numpy(self.data)

    def to_gpu(self):
        """
        Move the data to GPU memory.
        """
        if self.data is not None:
            self.data = cuda.to_cupy(self.data)

    def transpose(self, *axes):
        """
        Transpose the array. Accepts at most one argument which can be a tuple or list of
        integers representing the desired permutation of axes.

        Args:
            *axes: Optional. Tuple or list of integers representing the desired permutation
                   of axes.

        Returns:
            Transposed array.
        """
        # 空元组或空列表会被视为 False
        if len(axes) == 0:
            axes = None
        elif len(axes) == 1:
            if isinstance(axes[0], (tuple, list)) or axes[0] is None:
                axes = axes[0]

        # axes允许为None
        return transpose(self, axes)

    @property
    def T(self):
        """
        Transpose the object and return a new object.

        """
        return transpose(self)

    @property
    def shape(self):
        """
        Get the shape of the object's data.

        """
        return self.data.shape

    @property
    def ndim(self):
        """
       Get the number of dimensions of the object's data.

       Returns:
           int: Number of dimensions.
       """
        return self.data.ndim

    @property
    def size(self):
        """
        Get the number of elements in the object's data.

        Returns:
            int: Number of elements.
        """
        return self.data.size

    @property
    def dtype(self):
        """
        Return the data type of the object's data.

        Returns:
            Data type of the object's data.
        """
        return self.data.dtype

    def __len__(self) -> int:
        """
        Return the length of the data in the object.

        Returns:
            int: Length of the data.
        """
        return len(self.data)

    # 用于定义对象的字符串表示形式 类似__str__
    # 主要用于开发者，方便调试和开发
    # 如果你希望对象的字符串表示形式对普通用户更友好，可以考虑实现 __str__ 方法
    def __repr__(self):
        if self.data is None:
            return 'variable(None)'
        p = str(self.data).replace('\n', '\n' + ' ' * 9)
        return 'variable(' + p + ')'

    def __str__(self):
        if self.data is None:
            return 'variable(None)'
        p = str(self.data).replace('\n', '\n' + ' ' * 9)
        return 'variable(' + p + ')'
