# SoftMax
import numpy as np
from Layer import Layer
NEAR_0 = 1e-10


class Softmax(Layer):
    def __init__(self):
        super().__init__()
        self.input_tensor = []
        self.output_tensor = []

    def forward(self, input_tensor):
        """
        softmax loss = sum over batch(- log y_pred)
        :param input_tensor: input tensor x
        :return: predicted output y_pred, represent class probability
        """
        self.input_tensor = np.zeros(input_tensor.shape)
        self.output_tensor = np.zeros(input_tensor.shape)
        for b in range(input_tensor.shape[0]):
            # 为避免softmax上溢，令Xn - max(Xn)，容易证明这个更改不会改变softmax的返回值
            self.input_tensor[b] = input_tensor[b] - np.tile(np.max(input_tensor[b]), (input_tensor.shape[1], 1)).T
            exp = np.exp(self.input_tensor[b])
            sum_exp = np.sum(exp)
            self.output_tensor[b] = exp / np.tile(sum_exp, (self.input_tensor.shape[1], 1)).T
        return self.output_tensor

    def predict(self, input_tensor, labels):
        """
        predict labels with given input tensor
        :param input_tensor: input tensor x
        :param labels: true labels y, one-hot label
        :return: prediction, softmax loss
        """
        self.output_tensor = self.forward(input_tensor)
        loss = np.sum(-np.log(self.output_tensor[np.where(labels == 1)] + NEAR_0))
        return self.output_tensor, loss

    def backward(self, label):
        """
        start point of backward, loss = y_true - y_pred
        :param label: true label
        :return: error
        """
        self.output_tensor[np.where(label == 1)] -= 1
        return self.output_tensor

