import numpy as np

from wanmu.nb.utils.funs import _softmax, cross_entropy_error


class simpleNet(object):
    def __init__(self):
        np.random.seed(0)
        self.W = np.random.randn(2, 3)

    def forward(self, x):
        return np.dot(x, self.W)

    def loss(self, x, y):
        z = self.forward(x)
        p = _softmax(z)
        loss = cross_entropy_error(p, y)
        return loss

    def numerical_gradient(self, f, x):
        h = 1e-4
        grad = np.zeros_like(x)
        it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite'])
        while not it.finished:
            idx = it.multi_index
            tmp_val = x[idx]
            x[idx] = float(tmp_val) + h
            fxh1 = f(x)

            x[idx] = tmp_val - h
            fxh2 = f(x)

            grad[idx] = (fxh1 - fxh2) / (2 * h)

            x[idx] = tmp_val
            it.iternext()
        return grad

    def gradient_descent(self, f, init_x, lr=0.01, step_num=1000):
        x = init_x
        for i in range(step_num):
            grad = self.numerical_gradient(f, x)
            x -= lr * grad
        return x


if __name__ == '__main__':
    net = simpleNet()
    print(net.W)
    X = np.array([[0.6, 0.9]])
    p = net.forward(X)
    print("预测值为：", p)
    print("预测的类别为：", np.argmax(p))
    y = np.array([0, 0, 1])
    f = lambda w: net.loss(X, y)
    dw = net.gradient_descent(f, net.W)
    print(dw)
