#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Created by Happiness on 2017/11/5
# 指数下降法优化学习率

import tensorflow as tf

TRAINING_STEPS = 100
global_step = tf.Variable(0)
decay_learning_rate = tf.train.exponential_decay(0.1, global_step=global_step, decay_steps=1, decay_rate=0.96,
                                                 staircase=False)

# 创建输入
x = tf.Variable(tf.constant(5, tf.float32), name='x')
y = tf.square(x, name='y')

trail_step = tf.train.AdamOptimizer(decay_learning_rate).minimize(y, global_step)

with tf.Session() as sess:
    init_op = tf.global_variables_initializer()
    sess.run(init_op)
    for i in range(TRAINING_STEPS):
        sess.run(trail_step)
        if i%10 == 0:
            dla = sess.run(decay_learning_rate)
            x_value = sess.run(x)
            print("decay_learing_rate  : %f " % dla)

if __name__ == '__main__':
    pass
