# --*-- coding: UTF-8 -*-

import sys

sys.path.append('/home/cnn/pro/yazif/vqa/')

import tensorflow as tf
from utils.config import process_config
from utils.dirs import create_dirs
from models.vis_lstm_model import VisLstmModel
from models.vgg_model import VggModel
from data_loader.data_loader import DataLoader
import numpy as np
import os

epoch_num = 20
batch_size = 200
config = process_config('../configs/lstm.json')

# create the experiments dirs
create_dirs([config.summary_dir, config.checkpoint_dir])

vgg = None

if not config.is_have_img_data:
    vgg = VggModel(config.vgg19_npy_path)
    vgg.build()

model = VisLstmModel(config)
model.build_model()
# create your data generator
data = DataLoader(config)
saver = tf.train.Saver(max_to_keep=epoch_num)
save_path = config.checkpoint_dir + '/vqa.ckpt'

with tf.Session() as sess:
    loss_summary = tf.placeholder(dtype=tf.float32, shape=[], name='loss_summary')
    tf.summary.scalar('loss', loss_summary)
    acc_summary = tf.placeholder(dtype=tf.float32, shape=[], name='loss_summary')
    tf.summary.scalar('accuracy', acc_summary)
    train_summary_writer = tf.summary.FileWriter(os.path.join(config.summary_dir, "train"),
                                                 sess.graph)

    merged = tf.summary.merge_all()

    # 最优化
    opt = tf.train.AdamOptimizer(0.001).minimize(model.loss)

    init = tf.global_variables_initializer()
    sess.run(init)

    total_acc = 0.0

    for i in range(epoch_num):
        batch_no = 0
        accs = []
        losses = []
        while batch_no * batch_size < data.get_data_size():
            ques, img, answer = data.get_next_batch(batch_no, batch_size)
            if not config.is_have_img_data:
                img = sess.run(vgg.fc7, feed_dict={vgg.rgb: img})
            acc, loss, _ = sess.run([model.accuracy, model.loss, opt], feed_dict={
                model.img_feat: img,
                model.ques: ques,
                model.answer: answer,
                model.batch_size : batch_size
            })

            batch_no += 1

            '''
            summary = sess.run(merged, feed_dict={
                loss_summary: loss,
                acc_summary: acc
            })

            train_summary_writer.add_summary(summary, batch_no)
            print('acc', acc, 'loss', loss, batch_no)

            '''
            accs.append(acc)
            losses.append(loss)

        # saver.save(sess=sess, global_step=i, save_path=save_path)

        acc = np.mean(accs)
        loss = np.mean(losses)
        print('acc', acc, 'loss', loss, i, epoch_num)

        summary = sess.run(merged, feed_dict={
            loss_summary: loss,
            acc_summary: acc
        })
        train_summary_writer.add_summary(summary, i)

        if total_acc < acc:
            saver.save(sess=sess, global_step=i, save_path=save_path)
            total_acc = acc
