# -*- coding: utf-8 -*-
# @Author: lidongdong
# @time  : 18-12-16 下午10:09
# @file  : train.py

import os
import numpy as np
import tensorflow as tf
from dataloader import DataLoader
# from mnist_loader import MnistLoader
from Model import FGan
from data_prepare.parameters import config


os.environ["CUDA_VISIBLE_DEVICES"] = "0"


def main():
    dataloader = DataLoader("Data/images.h5", config.train.batch_size, load_all_to_memory=True)
    # dataloader = MnistLoader("/home/jack/hdd_download/", config.train.batch_size)
    model = FGan(config)
    input_tensor, variables, losses, outputs, checks = model.build_model()
    check_ts = [checks["d_loss1"], checks["d_loss2"], checks["d_loss3"]]
    # optimizer momentum is 0.5
    d_optimizer = tf.train.AdamOptimizer(config.train.lr_rate).minimize(losses["d_loss"], var_list=variables["d_vars"])
    g_optimizer = tf.train.AdamOptimizer(config.train.lr_rate).minimize(losses["g_loss"], var_list=variables["g_vars"])

    # session
    con = tf.ConfigProto()
    con.gpu_options.allow_growth = True
    session = tf.Session(config=con)

    # saver
    saver = tf.train.Saver()
    model_path = "/dl_data/flower-gan/Data/Models"
    latest_epoch, pkl_filename = latest_pkl(model_path)
    if config.train.reload_pkl and pkl_filename:
        # pkl_filename = "/dl_data/flower-gan/Data/Models/model_epoch90.pkl"
        print "restore weight from {}".format(pkl_filename)
        saver.restore(session, pkl_filename)
    else:
        print "no pkl file, initialize variables"
        latest_epoch = 0
        session.run(tf.global_variables_initializer())

    # summary
    fw = tf.summary.FileWriter("/tmp/flower", session.graph)
    tf.summary.scalar("d_loss", losses["d_loss"])
    tf.summary.scalar("d_loss1", checks["d_loss1"])
    tf.summary.scalar("d_loss2", checks["d_loss2"])
    tf.summary.scalar("d_loss3", checks["d_loss3"])
    tf.summary.scalar("g_loss", losses["g_loss"])
    tf.summary.image("fake_image", outputs["fake_image"], max_outputs=3)
    merged = tf.summary.merge_all()
    index = 0
    for epoch in range(latest_epoch + 1, config.train.epoches):
        print "[Train Epoch: {:>3}]".format(epoch)
        for batch in dataloader.get_batch():
            real_image, wrong_image, caption = batch
            noise = np.random.uniform(-1, 1, [real_image.shape[0], config.dimensions.z_dim])
            index += 1

            # DISC UPDATE
            _, merged_summary, d_all, d1, d2, d3 = session.run([d_optimizer, merged, losses["d_loss"]] + check_ts,
                                                           feed_dict={
                                                               input_tensor["real_image"]: real_image,
                                                               input_tensor["wrong_image"]: wrong_image,
                                                               input_tensor["noise"]: noise,
                                                               input_tensor["text_embedding"]: caption
                                                           })

            _, g_loss = session.run([g_optimizer, losses["g_loss"]],
                                    feed_dict={
                                        input_tensor["real_image"]: real_image,
                                        input_tensor["wrong_image"]: wrong_image,
                                        input_tensor["noise"]: noise,
                                        input_tensor["text_embedding"]: caption
                                    })
            _, g_loss = session.run([g_optimizer, losses["g_loss"]],
                                    feed_dict={
                                        input_tensor["real_image"]: real_image,
                                        input_tensor["wrong_image"]: wrong_image,
                                        input_tensor["noise"]: noise,
                                        input_tensor["text_embedding"]: caption
                                    })
            fw.add_summary(summary=merged_summary, global_step=index)

            print "discriminator >>> d_loss: {:.2f}   d1: {:.2f}   d2: {:.2f}    d3: {:.2f}".format(d_all, d1, d2, d3)
            print "generator     >>> g_loss: {:2f}".format(g_loss)
            print "\n"

        # SAVE WEIGHT
        if epoch % config.train.save_interval == 0 or epoch == config.train.epoches:
            saver.save(session, "Data/Models/model_epoch{}.pkl".format(epoch))


def latest_pkl(epoch_path):
    import glob, re
    filename_template = os.path.join(epoch_path, "model_epoch*.pkl.meta")
    pkl_filenames = glob.glob(filename_template)
    epoches = map(int, [re.match(".*model_epoch(.*)\.pkl\.meta", filename).group(1) for filename in pkl_filenames])
    if len(epoches) >0:
        return max(epoches), os.path.join(epoch_path, "model_epoch{:0>3}.pkl".format(max(epoches)))
    else:
        return 0, None


if __name__ == '__main__':
    main()
