import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

"""
    防止过拟合：
    
        dropout 使用百分之 keep_prob用来计算模型，防止过拟合
"""


def dropout_test():
    x_data = np.linspace(-1, 1, 500)[:, None]
    noise_data = np.random.uniform(-0.02, 0.02, x_data.shape)
    y_data = np.square(x_data) + noise_data

    # 定义参数类型
    with tf.name_scope("input"):
        x = tf.placeholder(tf.float32, [None, 1], "x_input")
        y = tf.placeholder(tf.float32, [None, 1], "y_input")
        learning_rate = tf.Variable(0.01, name="y_input")
        keep_prob = tf.placeholder(tf.float32, name="keep_dropout")

    # 定义L1神经网络
    with tf.name_scope("layer_1"):
        Weight_L1 = tf.Variable(tf.random_normal([1, 100]), name="Weight_L1")
        Basic_L1 = tf.Variable(tf.zeros([1, 100]), name="Basic_L1")
        Publish_L1 = tf.nn.sigmoid(tf.matmul(x, Weight_L1) + Basic_L1)
        Dropout_L1 = tf.nn.dropout(Publish_L1, keep_prob, name="dropout_L1")
        scalar_summaries(Weight_L1, "Weight_L1")
        scalar_summaries(Basic_L1, "Basic_L1")

    # 定义L2神经网络
    with tf.name_scope("layer_2"):
        Weight_L2 = tf.Variable(tf.random_normal([100, 1]), name="Weight_L2")
        Basic_L2 = tf.Variable(tf.zeros([1, 1]), name="Basic_L2")
        scalar_summaries(Weight_L2, "Weight_L2")
        scalar_summaries(Basic_L2, "Basic_L2")
        with tf.name_scope("logits"):
            logits = tf.matmul(Dropout_L1, Weight_L2) + Basic_L2

    # 交叉熵的损失值
    with tf.name_scope("loss"):
        loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits))
        tf.summary.scalar("loss", tf.reduce_min(loss))

    # 预测值
    with tf.name_scope("prediction"):
        prediction = tf.nn.sigmoid(logits)
        tf.summary.scalar("prediction", tf.reduce_mean(loss))

    # 训练方法和设置学习率
    with tf.name_scope("AdamOptimizer"):
        train = tf.train.AdamOptimizer(learning_rate).minimize(loss)

    # 合并所有的summary
    merged = tf.summary.merge_all()

    with tf.Session() as session:
        session.run(tf.global_variables_initializer())

        # 将tensorboard的信息输入到文件中
        writer = tf.summary.FileWriter('logs/', session.graph)
        for step in range(1, 5000):

            # 得到训练过程中所有摘要点信息
            session.run(train, feed_dict={x: x_data, y: y_data, keep_prob: 0.93})

            # 将摘要图点输入到log文件中 每次打印
            writer.add_summary(session.run(merged, feed_dict={x: x_data, y: y_data, keep_prob: 0.93}), step)

            if step % 1000 == 0:
                # 学习率下降
                session.run(tf.assign(learning_rate, learning_rate * 0.99))
                print("step:", step, "learning_rate:", session.run(learning_rate), "loss:",
                      session.run(loss, feed_dict={x: x_data, y: y_data, keep_prob: 0.93}))

        plt.figure()
        plt.scatter(x_data, y_data)
        plt.plot(x_data, session.run(prediction, feed_dict={x: x_data, y: y_data, keep_prob: 0.93}), c="r")
        plt.show()


"""
    概要标量图的
"""


def scalar_summaries(var, name):
    mean = tf.reduce_mean(var)
    tf.summary.scalar("mean_%s" % name, mean)
    mean_square = tf.reduce_mean(tf.square(var - mean))
    tf.summary.scalar("Mean_square_%s" % name, mean_square)
    tf.summary.scalar("min_%s" % name, tf.reduce_min(var))
    tf.summary.scalar("max_%s" % name, tf.reduce_max(var))
    tf.summary.histogram('histogram_%s' % name, var)  # 直方图


"""
    启动：F:\Idea workspace\tensorflow-learning\lession\logs>tensorboard --logdir=./
"""
if __name__ == "__main__":
    dropout_test()
