# -*- coding: utf-8 -*-
"""
Update on Sun Mar  3 21:39:40 2019

@author: william

Email: hua_yan_tsn@163.com
"""
import tensorflow as tf
import numpy as np
from WordEmbeddings import laptop_model, restaurant_model
def sigmoid(input):
    return 1/(1 + np.exp(input))
def updateGate(x_t, h_t_1, W):
    conc = np.concatenate((h_t_1, x_t), axis=0)
    c = np.sum(W * conc)
    u_t = sigmoid(c)
    return u_t

def resetGate(x_t, h_t_1, W):
    r_t = updateGate(x_t, h_t_1, W)
    return r_t

def getHiddenState(updates, resets, x_t, h_t_1, W):
    r_h = np.zeros(1,)
    r_h[0] = resets * h_t_1
    conc = np.concatenate((r_h, x_t))
    h_hat = np.tanh(np.sum(W * conc))
    h_t = (1-updates) * h_t_1 + updates * h_hat
    return h_t

def output(h_t, W):
    return sigmoid(W * h_t)

def computeCell(x_t, h_t_1, W):
    """
    :param x_t: the original input of word_embeddings
    :param h_t_1: the hidden state of last iterator
    :param W: the dict contains four weight vectors, updates, reset, hidden, output
    :return o: the output
    :return h: the hidden state of this iterator process
    """
    u = updateGate(x_t, h_t_1, W['update'])
    r = resetGate(x_t, h_t_1, W['reset'])
    h = np.zeros(1,)
    h[0] = getHiddenState(u, r, x_t, h_t_1, W['hidden'])
    o = output(h, W['output'])
    out = o[0]
    for i in range(1, len(o)):
        out = np.concatenate((out, o[i]), axis=0)
    return out, h

def forwardGRU(inputEmbeddings, W):
    h_t_1 = np.random.rand(1,)
    sentence_out = []
    for i in inputEmbeddings:
        o, h_t_1 = computeCell(i, h_t_1, W)
        sentence_out.append(o)
    return sentence_out

def backwardGRU(inputEmbeddings, W):
    h_t_1 = np.zeros(1, )
    sentence_out = []
    for i in range(inputEmbeddings.shape[0] - 1, -1, -1):
        o, h_t_1 = computeCell(inputEmbeddings[i], h_t_1, W)
        sentence_out.append(o)
    return sentence_out

def getWordEmbeddings(sentence):
    """
    :param sentence: the sentence with blank as its delimiter
    :return word_embeddings
    """
    sentence_list = sentence.split()
    word_embeddings = np.zeros((len(sentence_list), 300))
    print('word embeddings shape is ', word_embeddings.shape)
    for i in range(len(sentence_list)):
        try:
            word_embeddings[i] = laptop_model.get_vector(sentence_list[i])
        except:
            try:
                word_embeddings[i] = restaurant_model.get_vector(sentence_list[i])
            except:
                word_embeddings[i] = np.zeros(laptop_model.get_vector('much').shape)
    return word_embeddings

def getInputEmbeddings(word_embeddings, position_embeddings):
    """
    :param word_embeddings:
    :param position_embeddings:
    :return input_embeddings: the concatenation of word_embeddings and position_embeddings
    """
    try:
        input_embeddings = np.concatenate((word_embeddings, position_embeddings), axis=1)
        return input_embeddings
    except:
        print(word_embeddings.shape)
        print(position_embeddings)
        print('error information end.')
        raise
def concatenating(h1, h2):
    """
    :param h1: the hidden state computed in the forward propagation process by forwardGRU
    :param h2: the hidden state computed in the backward propagation process by backwardGRU
    :return : the concatenation of h1 and h2 with h1 forward
    """
    return np.concatenate((h1, h2), axis=0)

def BiDirectionalGRU(word_embeddings, position_embeddings, W):
    """
    :param word_embeddings: the embeddings of each word in sentence
    :param position_embeddings: the position embeddings of each word in sentence
    :param W: the weight dict that contains the weight computation process in the GRU, including hidden,
            update, reset, and output.
    :return h: the hidden state
    """
    input_embeddings = getInputEmbeddings(word_embeddings, position_embeddings)
    print('input embeddings shape is ', input_embeddings.shape)
    print('====get input embeddings success====')
    h1 = forwardGRU(input_embeddings, W['forward'])
    print('====forward process completed====\n', h1)
    h2 = backwardGRU(input_embeddings, W['backward'])
    print('====backward process completed====', h2)
    h = concatenating(h1, h2)
    return h