import numpy as np

def sigmoid(x):
    return 1/(1+np.exp(-x))

def sigmoid_grad(x):
    return (1.0-sigmoid(x))*sigmoid(x)

def softmax(x):
    if x.ndim == 2:
        x = x.T
        x = x-np.max(x,axis=0)
        y = np.exp(x)/np.sum(np.exp(x),axis=0)
        return y.T
    x = x-np.max(x)
    return np.exp(x)/np.sum(np.exp(x))

def cross_entropy_error(y,t):
    '''交叉熵损失函数
    params
    --------------------------
        y: 神经网络输出，既可以是10个分量的向量，也可以是nx10的矩阵
        t: t可以使one_hot_vector,即10个分量的向量，也可以是nx10的矩阵
    return
    ---------------------------
        交叉熵损失
    '''
    # 如果y是一个10维向量（行向量），转换成为矩阵
    if y.ndim == 1: # 类似形状y=np.array([1,2,3,4,5,6,7,8,9,10])
        t = t.reshape(1,t.size) # 转换为矩阵
        y = y.reshape(1,y.size)
    
    # 标签数据是one_hot_vector情况下，转换为标签向量
    if t.size == y.size:
        t = t.argmax(axis=1)
    
    # 每批包含样本数
    batch_size = y.shape[0]
    return -np.sum(np.log(y[np.arange(batch_size),t]+1e-7))/batch_size

if __name__ == '__main__':
    t=np.array([0,0,1,0,0,0,0,0,0,0])
    y=np.array([0.1,0.05,0.6,0.0,0.05,0.1,0.0,0.1,0.0,0.0])
    print(cross_entropy_error(y,t))