# -*- coding: utf-8 -*-
"""
------------------------------------------------------------------------------
    File Name:  activation_demo
    Author   :  wanwei1029
    Date     :  2018/12/20
    Desc     : 激活函数
------------------------------------------------------------------------------
"""
from keras.layers import Activation, Dense
from keras.models import Sequential

def demo():
    """
    激活函数有两种用法，一是通过参数activation来指定，二是添加Activation层。
    可以自定义激活函数，系统自带的激活函数有：
    softmax 对输入数据的最后一维进行softmax，输入数据应形如(nb_samples, nb_timesteps, nb_dims)或(nb_samples,nb_dims)
    elu
    selu
    softplus
    softsign
    relu
    tanh
    sigmoid
    hard_sigmoid
    linear
    """
    model = Sequential()
    model.add(Dense(10, input_shape=(50,)))
    model.add(Activation('tanh'))
    # 上述代码等价于 model.add(Dense(10, input_shape=(50,), activation='tanh'))


if __name__ == '__main__':
    test_method = "demo"
    if test_method == "demo":
        demo()
