import numpy as np 

def _numerical_gradient_1d(f,x):
    # 一维变量的梯度计算
    h = 1e-4 # 0.0001
    grad = np.zeros_like(x)

    for idx in range(x.size):
        tmp_val = x[idx]
        x[idx] = tmp_val + h
        fxh1 = f(x) # f(x+h)

        x[idx] = tmp_val - h
        fxh2 = f(x) # f(x-h)
        grad[idx] = (fxh1-fxh2)/(2*h)

        x[idx] = tmp_val
    return grad 

def numerical_gradient_2d(f,X):
    if X.ndim == 1:
        return _numerical_gradient_1d(f,X) 
    else:
        grad = np.zeros_like(X)

        for idx,x in enumerate(X):
            grad[idx] = _numerical_gradient_1d(f,x)
        
        return grad 
    
def numerical_gradient(f,x):
    h = 1e-4
    grad = np.zeros_like(x)

    it = np.nditer(x,flags=['multi_index'],op_flags=['readwrite'])
    while not it.finished:
        idx = it.multi_index
        tmp_val = x[idx]
        x[idx] = tmp_val + h
        fxh1 = f(x)

        x[idx] = tmp_val - h
        fxh2 = f(x)
        grad[idx] = (fxh1-fxh2)/(2*h)

        x[idx] = tmp_val
        it.iternext()
    return grad 


if __name__ == '__main__':
    def function_2(x):
        return x[0]**2+x[1]**2
    
    grad = numerical_gradient(function_2,np.array([3.0,4.0]))
    print(grad)

