import numpy as np

def _numerical_gradient_1d(f,x):
    h = 1e-4 # 0.0001
    grad = np.zeros_like(x)

    for idx in range(x.size):
        tmp_val = x[idx]
        x[idx] = tmp_val + h
        fxh1 = f(x)

        x[idx] = tmp_val - h
        fxh2 = f(x)

        grad[idx] = (fxh1 - fxh2)/(2*h)

        x[idx] = tmp_val
    return grad

def numerical_gradient_2d(f,X):
    if X.ndim == 1:
        return _numerical_gradient_1d(f,X)
    else:
        grad = np.zeros_like(X)
        for idx, x in enumerate(X):
            grad[idx] =_numerical_gradient_1d(f,x)
        return grad 
# 上面两个函数即可构成求解任意函数的梯度
# 下面的另外一个函数也可以完成求解任意函数的梯度
def numerical_gradient(f,x):
    h = 1e-4
    grad = np.zeros_like(x)

    it = np.nditer(x,flags=['multi_index'],op_flags=['readwrite'])
    while not it.finished:
        idx = it.multi_index
        tmp_val = x[idx]
        x[idx] = tmp_val + h
        fxh1 = f(x)

        x[idx] = tmp_val - h
        fxh2 = f(x)
        grad[idx] = (fxh1-fxh2)/(2*h)

        x[idx] = tmp_val
        it.iternext()
    return grad 
    
def function_2(x):
    return np.sum(x**2)

if __name__ == '__main__':
    # print(_numerical_gradient_1d(function_2,np.array([3.0,4.0])))
    # print(_numerical_gradient_1d(function_2,np.array([0.0,2.0])))
    # print(numerical_gradient_2d(function_2,np.array([[3.0,4.0],[0.0,2.0]])))
    x = np.array([[3.0,4.0],[0.0,2.0]])
    print(numerical_gradient(function_2,x))