#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time    : 2019/12/17 上午10:57
# @Author  : fugang_le
# @Software: PyCharm

from keras.models import Model
from keras.layers import Lambda
from keras.layers import Input, LSTM, Embedding, Bidirectional
from keras import backend as K

from src.lstm.config import Config


def exponent_neg_manhattan_distance(left, right):
    return K.exp(-K.sum(K.abs(left - right), axis=1, keepdims=True))


def exponent_neg_cosine_distance(left, right):
    ''' Helper function for the similarity estimate of the LSTMs outputs '''
    # left, right = concats[0], concats[1]
    leftNorm = K.l2_normalize(left, axis=-1)
    rightNorm = K.l2_normalize(right, axis=-1)
    return K.sum(K.prod([leftNorm, rightNorm], axis=0), axis=1, keepdims=True)


def build_model():
    left_input = Input(shape=(Config.max_sequence_length,), dtype='int32')
    right_input = Input(shape=(Config.max_sequence_length,), dtype='int32')
    embedding_layer = Embedding(Config.nb_words,
                                Config.embedding_dim,
                                input_length=Config.max_sequence_length,
                                trainable=True)
    encoded_left = embedding_layer(left_input)
    encoded_right = embedding_layer(right_input)

    # two lstm layer share the parameters
    if Config.bidirectional:
        shared_lstm = Bidirectional(LSTM(Config.num_lstm, dropout=0.5))
    else:
        shared_lstm = LSTM(Config.num_lstm)

    left_output = shared_lstm(encoded_left)
    right_output = shared_lstm(encoded_right)

    cos_distance = Lambda(function=lambda x:exponent_neg_cosine_distance(x[0], x[1]), output_shape=lambda x: (x[0][0], 1))([left_output, right_output])
    model = Model(inputs=[left_input, right_input], outputs=[cos_distance])
    return model

    