# -*- coding: utf-8 -*-
# @Time    : 2018/3/31 19:24
# @Author  : Tianchiyue
# @File    : ata.py
# @Software: PyCharm Community Edition

from models.model import BaseModel
from keras.layers import Bidirectional, LSTM, Dense, Reshape, Activation, dot
from models.layers import *


class ATA(BaseModel):

    def build(self):
        """
        A Context-aware Atention Network for Interactive Question Answering
        :param embedding_matrix:
        :return:
        """
        target = ClearMaskLayer()(self.target)
        target_weight = Dense(1, use_bias=False, name='target_weights')(target)
        target_weight = Reshape((self.config['target_nums'],))(target_weight)
        target_weight = Activation('softmax', name='target_softmax')(target_weight)  # None,target_nums
        target_rep = dot([target_weight, target], axes=1)
        target_rep = Dense(self.config['embedding_dims'], activation='tanh')(target_rep)
        att_c = AttentionLayer(name='att')([self.sentence, target_rep])
        left_x = Bidirectional(LSTM(self.config['hidden_dims'],
                                    dropout=0.2,
                                    recurrent_dropout=0.2,
                                    return_sequences=True))(self.sentence)  # 140*300
        cr = dot([att_c, left_x], axes=1, name='attention_mul')  # None*300
        return cr
