from core.function import Function
from core.cuda import cuda
import utils

# 聚合函数


class Sum(Function):
    def __init__(self, axis, keepdims):
        self.x_shape = None
        self.axis = axis
        self.keepdims = keepdims

    def forward(self, x):
        x = cuda.to_array(x)
        self.x_shape = x.shape
        # axis表示相加轴的方向，执行axis为None，函数会计算所有元素的总和（标量）
        # 如果指定为元祖，则会沿着该元祖指定的轴求和

        # keepdims用于指定输入和输出是否应该具有相同的维度
        # 例如keepdims=False会全部相加输出标量，为True则会保留轴的数量
        y = x.sum(axis=self.axis, keepdims=self.keepdims)
        return y

    # sum的反向传播，梯度的元素会复制为输入变量的形状
    def backward(self, gy):
        gy = reshape_sum_backward(gy, self.x_shape, self.axis, self.keepdims)
        gx = utils.functions_collect.broadcast_to(gy, self.x_shape)
        return gx

    def sum(self, x):
        return self(x)


def reshape_sum_backward(gy, x_shape, axis, keepdims):
    """Reshape gradient appropriately for functions.sum's backward.

    Args:
        gy (Variable): Gradient variable from the output by backprop.
        x_shape (tuple): Shape used at sum function's forward.
        axis (None or int or tuple of ints): Axis used at sum function's
            forward.
        keepdims (bool): Keepdims used at sum function's forward.

    Returns:
        Variable: Gradient variable which is reshaped appropriately
    """
    ndim = len(x_shape)
    tupled_axis = axis if isinstance(axis, tuple) or axis is None else (axis,)

    if not (ndim == 0 or tupled_axis is None or keepdims):
        actual_axis = [a if a >= 0 else a + ndim for a in tupled_axis]
        shape = list(gy.shape)
        for a in sorted(actual_axis):
            shape.insert(a, 1)
    else:
        shape = gy.shape

    gy = gy.reshape(shape)  # reshape
    return gy
