# -*- coding: utf-8 -*-
"""
Created on Wed Sep 30 21:04:48 2020

@author: DELL
"""
import tensorflow as tf
from tensorflow.keras import layers
from Model_modules import Highway_layers,ProjectionLayer,All_Con_MP_Layers,LSTM_Layers

class ELMO(tf.keras.Model):
    def __init__(self,para,word_to_char_ids_matrix,char_ids_to_vector_matrix):
        '''
        该类来搭建完整的ELMO
        Parameters
        ----------
        para: 一个参数收纳器，用来存储下面的参数
        
        n_highway_layers : int
            进行多少次高速公路层.
        n_filters : int
            所有卷积输出通道数加起来.
        model_dim : int
            输入进LSTM的词向量的维度大小.
        filters : 2d-list
            存储卷积的核大小和输出的通道数.
        drop_rate : float
            丢弃率.
        vocab_size : int
            字典大小.

        Returns
        -------
        [batch_size,max_sen_len,vocab_size+1]是预测的每个词的概率.

        '''
        super().__init__(self)
        #将word转化为字符编码
        self.word_embedding=layers.Embedding(input_dim=para.vocab_size+1, output_dim=para.max_word_len, input_length=para.max_sen_len, weights=[word_to_char_ids_matrix],trainable=False)
        #下面这个嵌入矩阵是将字符id表示为嵌入向量，是可以训练的，因为我是随机初始化的
        self.char_embedding=layers.Embedding(input_dim=para.char_nums, output_dim=para.char_embedding_len, input_length=para.max_word_len,weights=[char_ids_to_vector_matrix],trainable=True)
        
        self.HighWayLayers=[Highway_layers(para.n_filters) for i in range(para.n_highway_layers)]
        self.Projection=ProjectionLayer(para.model_dim)
        self.con=All_Con_MP_Layers(para.filters)
        self.lstm=LSTM_Layers(para.model_dim,para.drop_rate,para.vocab_size)
        
    def call(self,inputs):
        
        out=self.word_embedding(inputs)
        out=self.char_embedding(out)
        out=self.con(out)
        for i in range(len(self.HighWayLayers)):
            out=self.HighWayLayers[i](out)
        out=self.Projection(out)
        out=self.lstm(out)
        
        return out
        