#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 23 13:30:31 2016

@author: kevin
"""
from getStrokeLabel import genStrokes
import tensorflow as tf
import numpy as np
from utils import weight_variable, bias_variable, loglikelihood
from config import Config
from myGlimpse import GlimpseNet, LocNet
from myCNN import CNN2
import os
import h5py
import matplotlib.pyplot as plt
from LastDraw import manualDraw as lastDraw
from manualDraw import *
import cv2,datetime


start_time=datetime.datetime.now()

rnn_cell = tf.nn.rnn_cell
seq2seq = tf.nn.seq2seq

config = Config()
n_steps = config.step

loc_mean_arr = []
sampled_loc_arr = []
alpha_est_arr = []

draw = genStrokes(config, )
cnn2 = CNN2(config,)

# placeholders
image_ph = tf.placeholder(tf.float32, [None, config.img_sz, config.img_sz, config.img_channels])
strokeMap_ph = tf.placeholder(tf.float32, [None, config.img_sz, config.img_sz, config.img_channels])
init_feature=tf.placeholder(tf.float32,[None,config.rnn_input_sz])


def get_next_input(rnn_output, i):
    """ take the output of RNN as input
        produce the next gt vector for RNN
    """
    global strokeMaps
    global images
    global gtAlphas
    loc, loc_mean,loc_loc,size_ratio= loc_net(rnn_output) 
    strokeMaps = tf.py_func(myDraw, [strokeMaps, loc_loc,size_ratio], tf.double)
    strokeMaps=tf.cast (strokeMaps,tf.float32)
    alpha_est =  tf.py_func(cnn2.op,[images,strokeMaps],tf.double)
    alpha_est = tf.cast(alpha_est,tf.float32)
    alpha_est_arr.append(alpha_est)
    gl_next = glimpse_net(loc_loc, alpha_est)
    loc_mean_arr.append(loc_mean)
    sampled_loc_arr.append(loc)
    return gl_next

# build the auxiliary glimpse-net && loc-net
with tf.variable_scope('glimpse_net'):
    glimpse_net = GlimpseNet(config, )
with tf.variable_scope('loc_net'):
    loc_net = LocNet(config, )


# number of examples: N == M*batch
N = tf.shape(image_ph)[0]

#first_loc,_ = loc_net(init_feature)




strokeMaps = strokeMap_ph
images = image_ph


# RNN --- LSTM
lstm_cell = rnn_cell.BasicLSTMCell(config.rnn_input_sz, forget_bias=1.0, state_is_tuple=True)


init_state=lstm_cell.zero_state(N,tf.float32)

rnn_init_input = [init_feature]

with tf.variable_scope('first_rnn'):
      rnn_init_output, rnn_init_states = seq2seq.rnn_decoder(rnn_init_input, init_state, lstm_cell)
rnn_init_output=tf.squeeze(rnn_init_output)
_,_,first_loc,first_size_ratio= loc_net([rnn_init_output])


strokeMaps=tf.py_func(myDraw,[strokeMaps,first_loc,first_size_ratio],tf.double)
strokeMaps=tf.cast(strokeMaps,tf.float32)
alpha_est = tf.py_func(cnn2.op,[images,strokeMaps],tf.double)

alpha_est = tf.cast(alpha_est,tf.float32)
alpha_est_arr.append(alpha_est)

first_gl = glimpse_net(first_loc, alpha_est)


rnn_inputs = [first_gl]   
rnn_inputs.extend([0]*(config.num_glimpses))
rnn_outputs, rnn_states = seq2seq.rnn_decoder(rnn_inputs, rnn_init_states, lstm_cell, loop_function=get_next_input)



# Baseline Network for b_t
with tf.variable_scope('baseline'):
    w_baseline = weight_variable((config.rnn_output_sz, 1))
    b_baseline = bias_variable((1,))
baselines = []
for t, output in enumerate(rnn_outputs[1:]):
    baseline_t = tf.nn.xw_plus_b(output, w_baseline, b_baseline)
    baseline_t = tf.squeeze(baseline_t) # remove dimensions of size 1
    baselines.append(baseline_t)
baselines = tf.pack(baselines)      # [timesteps, batch*M]
baselines = tf.transpose(baselines) # [batch*M, timesteps]


rewards = []
for i in range(config.num_glimpses):
     alpha_last = tf.squeeze(alpha_est_arr[i])
     alpha_this = tf.squeeze(alpha_est_arr[i+1])

     
     
loc_compute = tf.pack(sampled_loc_arr) # [timesteps, batch_sz, loc_dim]

loc_avg = tf.reduce_mean(loc_compute) # average over all dims
loc_var = tf.reduce_mean(tf.square(tf.reduce_mean(loc_compute, reduction_indices=[1,2]) - loc_avg))



logll = loglikelihood(tf.cast(loc_mean_arr,tf.float64), tf.cast(sampled_loc_arr,tf.float64), tf.cast(config.loc_std,tf.float64))     # [batch_sz*M, timesteps]
logll = tf.cast(logll,tf.float32)
   # average reward over batch_sz*M
# baseline_mse

# collect all trainable variables
var_list = tf.trainable_variables()

# hybrid loss

# learning rate
global_step = tf.get_variable('global_step',[],initializer=tf.constant_initializer(0), trainable=False)
starter_learning_rate = config.lr_start

learning_rate = tf.train.exponential_decay(starter_learning_rate, global_step, config.lrDecayFreq, 0.97, staircase=True)
learning_rate = tf.maximum(learning_rate, config.lr_min)
saver=tf.train.Saver()

def read_data(path):
     images = np.array(h5py.File(path, 'r').get('images'))
     return images
     
def read_prob(path):
     probs = np.array(h5py.File(path, 'r').get('probs'))
     return probs

def read_loc(path):
    locs = np.array(h5py.File(path, 'r').get('locs'))
    return locs

def pre_processStroke(images,alphas):
    Background=(0,0,0)
    Foreground=(255,255,255)
    [batch_size,width,height,ch]=images.shape
    strokes=images.copy()
    for i in range(batch_size):
        img=images[i]
        alpha=alphas[i]
        for y in range (10):                  
            for x in range (width):
                if alpha[y,x]==0:
                   strokes[i,y,x]=Background
                elif alpha[y,x]==255:
                   strokes[i,y,x]=Foreground
                if alpha[height-y-1,x]==0:
                   strokes[i,height-y-1,x]=Background
                elif alpha[height-y-1,x]==255:
                   strokes[i,height-y-1,x]=Foreground
    return strokes   


with tf.Session() as sess:

     saver.restore(sess, "./net/save_net.ckpt")
      
     data_dir = os.path.join(os.getcwd(), 'train.h5')
     prob_dir = os.path.join(os.getcwd(), 'probs.h5')
     
     train_images= read_data(data_dir)
     train_strokeMaps=train_images.copy()
     train_probs = read_prob(prob_dir)
     
     
     # draw figure for loss
     plt.axis([0,20,-5,5])
     fig_loss = []

     counter = 0
     for ep in xrange(n_steps):
          batch_idxs = len(train_images) // config.batch_sz
          print ('batch_idxs==', batch_idxs)
          for idx in xrange(0, batch_idxs):
               print ('idx==', idx)
               counter = counter + 1
               
               batch_images = train_images[idx*config.batch_sz : (idx+1)*config.batch_sz]
               batch_strokeMaps= train_strokeMaps[idx*config.batch_sz : (idx+1)*config.batch_sz]
               batch_probs  = train_probs[idx*config.batch_sz : (idx+1)*config.batch_sz]
               
               batch_images = np.reshape(batch_images, (-1, config.img_sz * config.img_sz * config.img_channels))
               batch_strokeMaps = np.reshape(batch_strokeMaps, (-1, config.img_sz * config.img_sz * config.img_channels))
               
               
               batch_images = np.tile(batch_images, [config.M, 1]).astype('float32')
               batch_strokeMaps = np.tile(batch_strokeMaps, [config.M, 1]).astype('float32')

               batch_probs = np.tile(batch_probs, [config.M, 1]).astype('float32')

               batch_images = np.reshape(batch_images, (-1, config.img_sz, config.img_sz, config.img_channels))
               batch_strokeMaps = np.reshape(batch_strokeMaps, (-1, config.img_sz, config.img_sz, config.img_channels))
               
               loc_net.sampling = True
               strokeMaps_val,alphas_val,alpha_est_val= sess.run([strokeMaps,alpha_est_arr,alpha_est,], 
                                        feed_dict={image_ph: batch_images,
                                                   strokeMap_ph: batch_strokeMaps,
                                                   init_feature: batch_probs,
                                            })


               alphas_val=np.array(alphas_val)         #(num_glimpse,batch_sz*M,img_sz,img_sz,alMatte_channel)
               #result_path = os.path.join(os.getcwd(), 'matte.h5')
               #with h5py.File(result_path, 'w') as hf:
                     #hf.create_dataset('matte', data=alphas_val)
               lastMatte=alphas_val[-1,0,:,:]
               cv2.imshow('matte',np.uint8(lastMatte))
               cv2.waitKey(0)
               strokeMaps_val=np.array(strokeMaps_val)
               #cv2.destroyAllWindows()
               lastStroke=lastDraw(np.uint8(strokeMaps_val[0]))
               cv2.imshow('LastStroke',lastStroke)
               cv2.waitKey(0)
               lastStrokes=np.random.random((1,config.img_sz,config.img_sz,config.strokeMap_channels))
               lastStrokes[0]=np.array(lastStroke)
               print lastStrokes.shape
               FinalMatte=cnn2.op(batch_images,lastStrokes)
               FinalMatte=np.array(FinalMatte)
               FinalMatte=np.uint8(FinalMatte)
               print FinalMatte.shape
               cv2.imshow('FinalMatte',FinalMatte[0])
               cv2.waitKey(0)
               cv2.destroyAllWindows()
               
                  
end_time=datetime.datetime.now()
print 'total time is: ',(end_time-start_time).seconds











