import numpy as np

'''
损失函数梯度（softmax）
'''


def soft_max(input_y):
    y_exp = np.exp(input_y)
    y_exp_sum = np.sum(y_exp)
    prob = np.exp(input_y) / y_exp_sum
    return prob


def delta(i, j):
    if i == j:
        return 1
    else:
        return 0


def soft_max_gradient(p, i, j):
    return p[i] * (delta(i, j) - p[j])


y = np.array([2., 1., 0])
prob_y = soft_max(y)
print(prob_y)

print(soft_max_gradient(prob_y, 0, 1))
