# -*- coding: utf-8 -*-
import tensorflow as tf
import numpy as np
from tqdm import tqdm
import RBM
import rnn_rbm  # The hyperparameters of the RBM and RNN-RBM are specified in the rnn_rbm file
import midi_manipulation
import summaries

SUMMARY_RBM_DIR="SUMMARY_LOGS/RBM";

num_epochs=100

lr=0.01

def main():





    ####
    ##  定义输入层
    ##  x与songs均是输入
    ####
    with tf.name_scope("input_layer"):

        songs = midi_manipulation.get_songs('Pop_Music_Midi')

        x = tf.placeholder(tf.float32, shape=[None, rnn_rbm.n_visible], name="x")  # The placeholder variable that holds our data

    #####
    ##定义RNN层
    ###
    with tf.name_scope("RNN_layer"):

            with tf.name_scope("initial_state"):
                u0 = tf.Variable(tf.zeros([1, rnn_rbm.n_hidden_recurrent], tf.float32),name="u0")  # The initial state of the RNN

                summaries.variable_summaries("initialStateU0",u0)

            with tf.name_scope("weight"):

                Wvu = tf.Variable(tf.random_normal([rnn_rbm.n_visible, rnn_rbm.n_hidden_recurrent], 0.0001),name="Wvu")  # The data -> RNN weight matrix

                summaries.variable_summaries("Wvu",Wvu)

            with tf.name_scope("RNN_hidden_layer"):

                with tf.name_scope("weight"):

                    Wuu = tf.Variable(tf.random_normal([rnn_rbm.n_hidden_recurrent, rnn_rbm.n_hidden_recurrent], 0.0001),name="Wuu")  # The RNN hidden unit weight matrix

                    summaries.variable_summaries("Wuu",Wuu)
                with tf.name_scope("bias"):

                    bu = tf.Variable(tf.zeros([1, rnn_rbm.n_hidden_recurrent], tf.float32),name="bu")  # The RNN hidden unit bias vector

                    summaries.variable_summaries("bu",bu)
    #########
    ## 定义RNN-RBM链接层
    ##
    #########
    with tf.name_scope("RNN_RBM_Connect_layer"):

        with tf.name_scope("vision"):
            ####
            # 定义链接RNN隐藏层到RBM可视化层的权重
            ####
            with tf.name_scope("weight"):

                Wuv = tf.Variable(tf.random_normal([rnn_rbm.n_hidden_recurrent, rnn_rbm.n_visible], 0.0001),name="Wuv")  # The RNN -> RBM visible weight matrix
                summaries.variable_summaries("Wuv",Wuv)
            #####
            ##定义RNN隐藏层到RBM可视化层链接bias
            #######

            with tf.name_scope("bias"):
                    bv = tf.Variable(tf.zeros([1, rnn_rbm.n_visible], tf.float32),name="bv")  # The RNN -> RBM visible bias vector
                    summaries.variable_summaries("bv",bv)
        with tf.name_scope("hidden"):

            with tf.name_scope("weight"):

                Wuh = tf.Variable(tf.random_normal([rnn_rbm.n_hidden_recurrent, rnn_rbm.n_hidden], 0.0001),name="Wuh")  # The RNN -> RBM hidden weight matrix
                summaries.variable_summaries("Wuh",Wuh)
            #####
            ##定义RNN隐藏层到RBM可视化层链接bias
            #######

            with tf.name_scope("bias"):

                bh = tf.Variable(tf.zeros([1, rnn_rbm.n_hidden], tf.float32), name="bh")  # The RNN -> RBM hidden bias vector
                summaries.variable_summaries("bh",bh)
    #########
    ###定义RBM视化层
    ###
    #########
    with tf.name_scope("RBM_layer"):
        #######
        ##定义RBM可视化层权重
        ##
        #######
        with tf.name_scope("weight"):
            W = tf.Variable(tf.random_normal([rnn_rbm.n_visible, rnn_rbm.n_hidden], 0.01),name="W")  # The weight matrix of the RBM
            summaries.variable_summaries("W",W)
        with tf.name_scope("RBM_vision_layer"):

            with tf.name_scope("bias"):
                BV_t = tf.Variable(tf.ones([1, rnn_rbm.n_visible], tf.float32), name="BV_t")
                summaries.variable_summaries("BV_t",BV_t)
        with tf.name_scope("RBM_hidden_layer"):
            #####
            ##RBM可视化层偏执系数bias
            #####
            with tf.name_scope("bias"):
                # The RBM bias vectors. These matrices will get populated during rnn-rbm training and generation
                BH_t = tf.Variable(tf.ones([1, rnn_rbm.n_hidden], tf.float32), name="BH_t")
                summaries.variable_summaries("BH_t",BH_t)
        # Build the RBM optimization
        saver = tf.train.Saver()

        # Note that we initialize the RNN->RBM bias vectors with the bias vectors of the trained RBM. These vectors will form the templates for the bv_t and
        # bh_t of each RBM that we create when we run the RNN-RBM


    with tf.name_scope("training"):

        updt = RBM.get_cd_update(x, W, bv, bh, 1, lr)

    # 合并所有记录点
    merged=tf.summary.merge_all()

    # Run the session
    with tf.Session() as sess:
        summary_writer=tf.summary.FileWriter(SUMMARY_RBM_DIR,sess.graph)
        # Initialize the variables of the model
        init = tf.global_variables_initializer()

        sess.run(init)

        # Run over each song num_epoch times
        for epoch in tqdm(range(num_epochs)):

            for song in songs:
                ###tensorflow运行时数据
                run_options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)

                run_metadata=tf.RunMetadata()

                sess.run(updt, feed_dict={x: song},options=run_options,run_metadata=run_metadata)
                ###记录生成日志点
                summary=sess.run(merged,{x:song})

                ##summary_writer.add_run_metadata(run_metadata,'epoch%d' % epoch)
                summary_writer.add_summary(summary,epoch)
        # Save the initialized model here
        save_path = saver.save(sess, "parameter_checkpoints/initialized.ckpt")

    summary_writer.close()

if __name__ == "__main__":
        main()