 # -*- coding: utf-8 -*-
"""
 #    Powered by AIgames of michine intellengence lab
 #    Author: Justin Xu

Build the Recurrent Highway Network.

#  compute input and labels for training.
#  compute the loss of the prediction with respect to the labels
#  compute the rhn cell
#  compute  the rhn_unroll with the variables reused
"""
import tensorflow as tf
import reader
import numpy

tf.app.flags.DEFINE_boolean('use_fp16', False,
                            """Train the model using fp16.""")

def _variable_on_cpu(name, shape, initializer):
  """Helper to create a Variable stored on CPU memory.

  Args:
    name: name of the variable
    shape: list of ints
    initializer: initializer for Variable

  Returns:
    Variable Tensor
  """
  with tf.device('/cpu:0'):
    dtype = tf.float16 if True else tf.float32
    var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype)
  return var


def rhn_cell(step_input,config):
    concat = []
    state = step_input
    concat.append(state)
    for i in range(config.inner_layers):
        name = 'rhn_inner_state_%d' % i
        with tf.variable_scope(name) as scope:
            weights =  _variable_on_cpu('weights',[config.hidden_size,config.hidden_size],
                                        tf.truncated_normal_initializer(stddev=config.stddev, dtype=config.dtype))
            biases = _variable_on_cpu('biases', [config.hidden_size], tf.constant_initializer(config.bias))
            state = config.activation(tf.matmul(state,weights) + biases)
            concat.append(state)

    return concat

def rhn(inputs,config):
    outputs = []
    with tf.variable_scope('initial_state') as scope:
        with tf.device('/cpu:0'):
            dtype = tf.float16 if True else tf.float32
            initial_state = tf.get_variable('initial_state', [config.batch_size,config.hidden_size],
                                            initializer=tf.constant_initializer(0.0), trainable=False,dtype=dtype)
    # initial_state = zero_state()
    # import pdb; pdb.set_trace()
    for index in range(config.num_steps):
        name = 'rhn_external_input'
        if index == 0:
            with tf.variable_scope(name) as scope:
                weights =  _variable_on_cpu('weights_0',[config.hidden_size,config.hidden_size],
                                            tf.truncated_normal_initializer(stddev=config.stddev, dtype=config.dtype))
                biases = _variable_on_cpu('biases_0', [config.hidden_size], tf.constant_initializer(config.bias))
                external_input =  tf.matmul(inputs[:,index,:],weights) + biases

                weights_1 =  _variable_on_cpu('weights_1',[config.hidden_size,config.hidden_size],
                                            tf.truncated_normal_initializer(stddev=config.stddev, dtype=config.dtype))
                biases_1 = _variable_on_cpu('biases_1', [config.hidden_size], tf.constant_initializer(config.bias))

                external_state = tf.matmul(initial_state,weights_1) + biases_1 + external_input
                concats = rhn_cell(step_input=config.activation(external_state),config=config)
        else:
            # Sharing the Recurrent Neural Networks!
            with tf.variable_scope(name,reuse=True) as scope:
                weights =  _variable_on_cpu('weights_0',[config.hidden_size,config.hidden_size],
                                            tf.truncated_normal_initializer(stddev=config.stddev, dtype=config.dtype))
                biases = _variable_on_cpu('biases_0', [config.hidden_size], tf.constant_initializer(config.bias))
                external_input =  tf.matmul(inputs[:,index,:],weights) + biases

                weights_1 =  _variable_on_cpu('weights_1',[config.hidden_size,config.hidden_size],
                                            tf.truncated_normal_initializer(stddev=config.stddev, dtype=config.dtype))
                biases_1 = _variable_on_cpu('biases_1', [config.hidden_size], tf.constant_initializer(config.bias))

                external_state = tf.matmul(concats[-1],weights_1) + biases_1 + external_input
                concats = rhn_cell(step_input=config.activation(external_state),config=config)
            # scope.reuse_variables()
        # import pdb; pdb.set_trace()
        outputs.append(concats[-1])
    return outputs

def rhn_loss(targets,outputs,config):
    output = tf.reshape(tf.stack(axis=1, values=outputs),[-1, config.hidden_size])
    softmax_w = tf.get_variable(
        "softmax_w",[config.hidden_size, config.vocab_size], dtype=config.dtype)
    softmax_b = tf.get_variable("softmax_b", [config.vocab_size],dtype=config.dtype)
    logits = tf.matmul(output, softmax_w) + softmax_b
    loss = tf.contrib.legacy_seq2seq.sequence_loss_by_example(
        [logits],
        [tf.reshape(targets,[-1])],
        [tf.ones([config.batch_size * config.num_steps], dtype=config.dtype)])
    return tf.reduce_sum(loss)/config.batch_size

def rhn_update(loss,learning_rate):
    # Decay the learning rate exponentially based on the number of steps
    tf.summary.scalar('loss',loss)
    optimizer = tf.train.GradientDescentOptimizer(learning_rate)
    # Create a variable to track the global step
    global_step = tf.Variable(0,name='global_step',trainable=False)

    # compute gradients
    train_op = optimizer.minimize(loss, global_step=global_step)
    # lr = tf.Variable(0.0, trainable=False)
    # tvars = tf.trainable_variables()
    # grads, _=tf.clip_by_global_norm(tf.gradients(cost, tvars),
    #                                 config.max_grad_norm)
    # optimizer = tf.train.GradientDescentOptimizer(lr)
    # train_op = optimizer.apply_gradients(zip(grads,tvars),
    #                                      global_step=tf.contrib.framework.get_or_create_global_step())
    #
    return train_op

def pdb_inputs(batch_size,num_steps,epoch_size,data,name=None):
    input_data, targets = reader.ptb_producer(
                        data,batch_size,num_steps, name=name)
    return input_data, targets
