import pickle

from keras.models import Model
from keras.layers import Input, Dense, Embedding, Dropout, Bidirectional, LSTM, TimeDistributed, Activation

from keras_ver.decoder import LSTMDecoder_tag

BASE_DIR = '../data/'
vocab_size = 174743
embed_dim = 300
hidden_dim = 300
embed_layer = Embedding(vocab_size+1, embed_dim)

word_vec = pickle.load(open(BASE_DIR + 'wv_matrix.pkl', 'rb'))
txt_input = Input(shape=(50,), name="txt_input")
txt_embed = Embedding(vocab_size+1, embed_dim, input_length=50,
                      weights=[word_vec],
                      name="txt_embed", trainable=True, mask_zero=True)(txt_input)
txt_drop = Dropout(0.3, name="txt_drop")(txt_embed)

encoder = Bidirectional(LSTM(hidden_dim, return_sequences=True), name="encoder")(txt_drop)

decoder = LSTMDecoder_tag(hidden_dim=hidden_dim, output_dim=hidden_dim,output_length=50,
                          return_sequences=True)(encoder)

tag_dense = TimeDistributed(Dense(168))(decoder)
tag_score = Activation('softmax')(tag_dense)

model = Model(inputs=txt_input, outputs=tag_score)
model.summary()