#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File  : 4.PyTorch激活函数原理和使用.py
# @Author: Richard Chiming Xu
# @Date  : 2021/11/7
# @Desc  :

import numpy as np
import matplotlib.pyplot as plt


def elu(x, alpha=1):
    return max(0, x) + min(0, alpha*(np.exp(x) - 1))
def leaky_relu(x, negative_slope=1e-2):
    return max(0, x) + negative_slope*min(0,x)
def p_relu(x, a=0.25):
    return max(0, x) + a*min(0,x)
def relu(x):
    return max(0, x)
def relu_6(x):
    return min(max(0, x), 6)
def selu(x):
    return 1.0507009873554804934193349852946*(max(0, x)+min(0,1.0507009873554804934193349852946*(np.exp(x)-1)))
def celu(x, alpha=0.25):
    return max(0, x) + min(0,alpha*(np.exp(x/alpha))-1)
def sigmoid(x):
    return 1/(1+np.exp(-x))
def log_sigmoid(x):
    return np.log(1/(1+np.exp(-x)))
def tanh(x):
    return (np.e**x - np.e**-x)/(np.e**x + np.e**-x)
def tanh_shrink(x):
    return x - (np.e**x - np.e**-x)/(np.e**x + np.e**-x)
def softplus(x, beta=1):
    return (1/beta)*np.log(1+np.exp(beta*x))
def soft_shrink(x, lambd=0.5):
    if x>lambd:
        return x-lambd
    elif x<-lambd:
        return x+lambd
    else:
        return 0
x = [i-25 for i in range(50)]
#画出图像
plt.plot(x, [elu(i) for i in x],label='relu')
plt.plot(x, [leaky_relu(i) for i in x],label='leaky_relu')
plt.plot(x, [p_relu(i) for i in x],label='p_relu')
plt.plot(x, [relu(i) for i in x],label='relu')
plt.plot(x, [relu_6(i) for i in x],label='relu_6')
plt.plot(x, [selu(i) for i in x],label='selu')
plt.plot(x, [celu(i) for i in x],label='celu')
plt.plot(x, [sigmoid(i) for i in x],label='sigmoid')
plt.plot(x, [log_sigmoid(i) for i in x],label='log_sigmoid')
plt.plot(x, [tanh(i) for i in x],label='tanh')
plt.plot(x, [tanh_shrink(i) for i in x],label='tanh_shrink')
plt.plot(x, [softplus(i) for i in x],label='softplus')
plt.plot(x, [soft_shrink(i) for i in x],label='soft_shrink')

plt.legend()
plt.show()