import numpy as np
from coommon import *

class SimpleNet:
    def __init__(self):
        self.W = np.random.randn(2, 3)

    def forward(self,x):
        a = np.dot(x,self.W)
        y = softmax(a)
        return y

    def loss(self,x,t):
        y = self.forward(x)
        return cross_entropy_error(y,t)

    # def loss1(self,x,t):
    #     return cross_entropy_error(softmax(np.dot(x,self.W)),t)

if __name__ == '__main__':
    net = SimpleNet()
    x = np.array([0.6, 0.9])
    t = np.array([0, 0, 1])
    f = lambda w: net.loss(x, t)
    print(f(net.W))
    numerical_gradient(f, net.W)
    print(numerical_gradient(f, net.W))