# 激活函数
import numpy as np
from Layer import Layer


# ReLU
class ReLU(Layer):
    def __init__(self):
        super().__init__()
        self.input_tensor = []

    def forward(self, input_tensor):
        '''
        ReLU activation function: f(x) = max(0,x)
        '''
        self.input_tensor = np.array(input_tensor)
        input_tensor[np.where(input_tensor <= 0)] = 0
        return input_tensor

    def backward(self, error):
        '''
        derivative: f'(x) = 1 ,if x > 0
                            0 ,else
        :param error: gradient of loss with respect to y = f(x)
        :return: gradient of loss with respect to x
        '''
        error[np.where(self.input_tensor <= 0)] = 0
        return error


# Sigmoid
class Sigmoid(Layer):
    def __init__(self):
        super().__init__()
        self.output_tensor = []

    def forward(self, input_tensor):
        output_tensor = 1.0 / (1.0 + np.exp(-input_tensor))
        self.output_tensor = output_tensor
        return output_tensor

    def backward(self, error_tensor):
        error_tensor *= (self.output_tensor * (1 - self.output_tensor))
        return error_tensor
