import numpy as np

"""
softmax and cross entropy loss layer
"""


class Softmax_cross_entropy:
    def __init__(self):
        self.x = None
        self.y = None
        self.outputs = None

    def __call__(self, x, y):
        self.x = x
        self.y = y
        self.outputs = self.__softmax(x)
        loss_value = self.__cross_entropy()
        return self.outputs, loss_value

    def __softmax(self, x):
        maxele = max(x)
        sumE = sum(list(map(lambda e: np.exp(e - maxele), x)))
        return np.array(list(map(lambda e: np.exp(e - maxele) / sumE, x)))

    def __cross_entropy(self):
        sumV = 0
        for i in range(len(self.y)):
            if self.y[i] != 0:
                sumV -= self.y[i] * np.log10(self.outputs[i] + 1e-10)
        return sumV

    def backward(self):
        return (self.outputs - self.y).reshape(1, -1)

