"""
    损失函数
"""
import numpy as np

from common.functions import softmax_function_v2
# 1. MSE
def mean_squared_error(y,t):
    return 0.5 * np.sum((y - t)**2)


# 2. 交叉熵误差

def cross_entropy_error(y,t):
    """
    交叉熵损失函数
    :param y:   目标值
    :param t:   预测值
    :return:    结果
    """

    if y.ndim == 1:
        y = y.reshape(1, y.size)
        t = t.reshape(1, t.size)

    # t 是独热编码表示，转换成正确识别的标签的索引
    if t.size == y.size:
        t = t.argmax(axis=1)

    n = y.shape[0]

    return -np.sum(np.log(y[np.arange(n),t] + 1e-7))/n

if __name__ == '__main__':
    x = np.array([[0,2,4],[3,4,5],[-1,-1,-2],[-6,-7,-3]])
    print(softmax_function_v2(x))