# -*- coding: utf-8 -*-  
'''
seq2seq零件

    - Encoder
    
    - Decoder

@author: luoyi
Created on 2021年3月17日
'''
import tensorflow as tf


#    编码器
class EncoderSubModel(tf.keras.Model):
    def __init__(self,
                 name='EncoderSubModel',
                 dict_w2i={},
                 embedding_dims=10,
                 hidden_units=4,
                 **kwargs):
        super(EncoderSubModel, self).__init__(name=name, **kwargs)
        
        #    词向量转换
        self._embedding = tf.keras.layers.Embedding(name='encoder_embedding', input_dim=dict_w2i.count(), output_dim=embedding_dims, mask_zero=True)
        #    过GRUs
        self._grus = tf.keras.layers.GRU(name='encoder_grus', 
                                         units=hidden_units, 
                                         return_sequences=True, 
                                         return_state=True,
                                         kernel_initializer=tf.initializers.he_normal(),
                                         bias_initializer=tf.initializers.zeros())
        pass
    
    def call(self, encoder_inputs, training=None, mask=None):
        x = self._encoder_embedding(encoder_inputs)
        encoder_outputs, state_h, state_c = self._encoder_grus(x)
        return encoder_outputs, state_h, state_c
    pass


#    解码器
class DecoderSubModel(tf.keras.Model):
    def __init__(self,
                 name='DecoderSubModel',
                 dict_w2i={},
                 embedding_dims=10,
                 hidden_units=4,
                 **kwargs):
        super(DecoderSubModel, self).__init__(name=name, **kwargs)
        
        #    词向量转换
        self._embedding = tf.keras.layers.Embedding(name='decoder_embedding', input_dim=dict_w2i.count(), output_dim=embedding_dims, mask_zero=True)
        #    过GRUs
        self._grus = tf.keras.layers.GRU(name='decoder_grus', 
                                         units=hidden_units, 
                                         return_sequences=True, 
                                         return_state=True,
                                         kernel_initializer=tf.initializers.he_normal(),
                                         bias_initializer=tf.initializers.zeros())
        #    注意力机制
        self._attention = tf.keras.layers.Attention()
        pass
    
    def call(self, encoder_outputs, decoder_inputs, decoder_state_hc, training=None, mask=None):
        x = self._embedding(decoder_inputs)
        decoder_outputs, decoder_state_h, decoder_state_c = self._grus(x, initial_state=decoder_state_hc)
        decoder_outputs = self._attention([decoder_outputs, encoder_outputs])
        return decoder_outputs, decoder_state_h, decoder_state_c
    pass

