import numpy as np

from autograd.functions import Function


'''
class Function:
    def __init__(self):
        self.saved_tensors = []

    def save_for_backward(self, *x):
        self.saved_tensors.extend(x)

    def forward(self, *args, **kwargs):
        raise NotImplementedError("You must implement the forward function for custom Function.")

    def backward(self, *args, **kwargs):
        raise NotImplementedError("You must implement the forward function for custom Function.")

    def __call__(self, *ts, **kwargs):
        data = self.forward(*[t.data for t in ts], **kwargs)
        requires_grad = any([t.requires_grad for t in ts])
        outputs = tensor.Tensor(data, requires_grad=requires_grad, grad_fn=self)

        self.inputs = ts
        
        return outputs
'''

#均以ndarray进行计算
"""
https://numpy.org/doc/stable/user/basics.broadcasting.html
Two dimensions are compatible when:
1.they are equal, or
2.one of them is 1
在运算中涉及到broadcast时,输出的array和输入的shape会不一致,
则所求梯度的shape也会不一致,需要对传播过来的grad进行处理
eg.
Forward:
A:   8 * 1
B:   1 * 5
res: 8 * 5 
==========
Backward:  在相应的维度上累加梯度即可
grad_res: 8 * 5 
grad_A:   8 * 1
grad_B:   1 * 5
"""

def handle_broadcasting(grad, shape):

    for _ in range(grad.ndim - len(shape)):
        grad = grad.sum(axis=0)

    for i, dim in enumerate(shape):
        if dim == 1:
            grad = grad.sum(axis=i, keepdims=True)
    return grad

class Add(Function):

    def forward(self, x1, x2):
        self.save_for_backward(x1.shape, x2.shape)
        return x1 + x2

    def backward(self, grad):
        x1_shape, x2_shape = self.saved_tensors
        return handle_broadcasting(grad, x1_shape), handle_broadcasting(grad, x2_shape)

class Sub(Function):
    def forward(self, x1, x2):
        self.save_for_backward(x1.shape, x2.shape)
        return x1 - x2

    def backward(self, grad):
        x1_shape, x2_shape = self.saved_tensors
        return handle_broadcasting(grad, x1_shape), handle_broadcasting(-grad, x2_shape)

class Mul(Function):
    def forward(self, x1, x2):
        self.save_for_backward(x1, x2)
        return x1 * x2

    def backward(self, grad):
        x1, x2 = self.saved_tensors
        return handle_broadcasting(grad * x2, x1.shape), handle_broadcasting(grad * x1, x2.shape)

class TrueDiv(Function):
    """
    (u/v)'= (u'v-v'u)/ (v^2)
    """
    def forward(self, x1, x2):
        self.save_for_backward(x1, x2)
        return x1 / x2
    def backward(self, grad):
        x1, x2 = self.saved_tensors
        return handle_broadcasting(grad / x2, x1.shape), handle_broadcasting(-grad * x1 / x2**2, x2.shape)        

class Neg(Function):
    def forward(self, x):
        return -x

    def backward(self, grad):
        return -grad

class Matmul(Function):
    def forward(self, x1, x2):
        self.save_for_backward(x1, x2)
        return x1 @ x2

    def backward(self,grad):
        x1, x2 = self.saved_tensors
        return grad @ x2.T, x1.T @ grad

class Sum(Function):
    def forward(self, x, axis=None):
        self.save_for_backward(x, axis)
        return x.sum(axis=axis)

    def backward(self, grad):
        x, axis = self.saved_tensors
        # print(x,axis)
        if axis is None:
            grad = grad * np.ones_like(x)
            # print(grad)
            # print("yes")
        else:
            grad = np.expand_dims(grad, axis)
            grad = np.repeat(grad, x.shape[axis], axis)
        
        # print("Sum.backward()")
        # print(grad)
        return grad

class Mean(Function):
    def forward(self, x, axis=None, keepdims=False):
        out = x.mean(axis=axis)
        self.save_for_backward(x, out, axis)
        return out

    def backward(self, grad):
        x, out, axis = self.saved_tensors
        if axis is None:
            grad = grad * np.ones_like(x)
        else:
            grad = np.expand_dims(grad, axis)
            grad = np.repeat(grad, x.shape[axis], axis)

        return grad * (out.size / x.size)


class Max(Function):
    def forward(self, x, axis=None):
        self.save_for_backward(x, axis)
        return np.max(x, axis=axis)

    def backward(self, grad):
        x, axis = self.saved_tensors
        return grad * (np.max(x, axis, keepdims=1) == x)

class Min(Function):
    def forward(self, x, axis=None):
        self.save_for_backward(x, axis)
        return np.min(x, axis=axis)

    def backward(self, grad):
        x, axis = self.saved_tensors
        return grad * (np.min(x, axis, keepdims=1) == x)

class Log(Function):
    def forward(self, x):
        self.save_for_backward(x)
        return np.log(x)

    def backward(self,grad):
        x = self.saved_tensors
        return grad / x

class Exp(Function):
    def forward(self, x):
        self.save_for_backward(x)
        return np.exp(x)
    
    def backward(self, grad):
        x = self.saved_tensors
        return grad * np.exp(x)

class Pow(Function):
    def forward(self, x, c: float):
        self.save_for_backward(x, c)
        return x ** c

    def backward(self, grad):
        x, c = self.saved_tensors
        return grad * c * x ** (c - 1), None

class Pad(Function):
    def forward(self, x, pad_width, mode):
        self.save_for_backward(x.shape, pad_width)
        return np.pad(x, pad_width=pad_width, mode=mode)

    def backward(self, grad):
        shape, pad_width = self.saved_tensors
        slices = {}
        for size, (before, after) in zip(shape, pad_width):
            slices.append(slice(before, size-after))
        return grad[tuple(slice)], None, None

class Transpose(Function):
    def forward(self, x, axes):
        self.save_for_backward(axes)
        return x.transpose(axes)

    def backward(self, grad):
        axes, = self.saved_tensors
        if axes is None:
            return grad.transpose()
        return grad.transpose(np.argsort(axes)), None

class Reshape(Function):
    def forward(self, x, shape=None):
        self.save_for_backward(x.shape)
        return x.reshape(shape)

    def backward(self, grad):
        shape, = self.saved_tensors
        return grad.reshape(shape), None

class Getitem(Function):
    def forward(self, x, idx):
        self.save_for_backward(x, idx)
        return x[idx]

    def backward(self, grad):
        x, idx = self.saved_tensors
        grad_ori = np.zeros_like(x)
        grad_ori[idx] = grad

        return grad_ori