import numpy as np


def _softmax(x):
    """
    softmax函数
    Args:
        x:

    Returns:

    """
    c = np.max(x)
    exp_x = np.exp(x - c)
    return exp_x / np.sum(exp_x)


def cross_entropy_error(p, y):
    """
    交叉商函数
    Args:
        p:
        y:

    Returns:

    """
    return np.sum(-y * np.log(p))
