# -*- coding: utf-8 -*-
# @Time    : 2018/4/2 15:29
# @Author  : Tianchiyue
# @File    : tan.py
# @Software: PyCharm Community Edition

from models.model import BaseModel
from keras.layers import Bidirectional, LSTM, Lambda, dot
from models.layers import *


class TAN(BaseModel):

    def build(self):
        """
        Stance Classification with Target-Specific Neural Attention Networks
        :return:
        :param embedding_matrix:
        :return:
        """
        # TODO mask
        target = ClearMaskLayer()(self.target)
        target_rep = Lambda(lambda x: K.mean(x, keepdims=True, axis=1))(target)
        connected = ConnectAspectLayer()([self.sentence, target_rep])
        att_c = LocationAttentionLayer(name='tan')(connected)
        left_x = Bidirectional(LSTM(self.config['lstm_output_size'],
                                    dropout=self.config['dropout_rate'],
                                    recurrent_dropout=self.config['dropout_rate'],
                                    return_sequences=True))(self.sentence)  # 140*300
        cr = dot([att_c, left_x], axes=1, name='attention_mul')  # None*300
        return cr
