import pickle

import numpy as np

from dataset.mnist import load_mnist


def get_dataset():
    (x_train, t_train), (x_test, t_test) = load_mnist(normalize=True, flatten=True, one_hot_label=False)
    return x_train, t_train


def init_network():  # 神经网络学习,pickle直接拿到结果
    with open(file='sample_weight.pkl', mode='rb')as f:
        network = pickle.load(f)
    return network


def sigmod(x):
    y = 1 / (1 + np.exp(-x))
    return y


def softmax(x):
    exp_x = np.exp(x)
    sum_exp_x = np.sum(exp_x)
    y = exp_x / sum_exp_x
    return y


def predict(network, x):
    W1 = network['W1']
    W2 = network['W2']
    W3 = network['W3']

    b1 = network['b1']
    b2 = network['b2']
    b3 = network['b3']

    a1 = np.dot(x, W1) + b1
    z1 = sigmod(a1)

    a2 = np.dot(z1, W2) + b2
    z2 = sigmod(a2)

    a3 = np.dot(z2, W3) + b3
    y = softmax(a3)

    return y


def mini_batch_cross_entropy_error1(y, t):
    batch_size = y_batch.shape[0]
    y = y[np.arange(batch_size), t]
    y = y + 1e-7
    y = np.log(y)
    molecule = np.sum(y)

    result = -molecule / batch_size
    return result


def mini_batch_cross_entropy_error(y, t):
    batch_size = y_batch.shape[0]
    result = -np.sum(np.log(y[np.arange(batch_size), t] + 1e-7)) / batch_size
    return result


# main
if __name__ == '__main__':
    x, t = get_dataset()
    network = init_network()
    y = predict(network, x)  # inference

    # prepare data in using mini_batch
    batch_size = 10
    y_scale = y.shape[0]  # get y's scale
    random_data = np.random.choice(a=y_scale, size=batch_size)

    y_batch = y[random_data]
    t_batch = t[random_data]

    # Loss Function
    loss_result = mini_batch_cross_entropy_error1(y_batch, t_batch)

    print('The loss function value is : ' + str(loss_result))
