from jy.med_qa.med_constant import Med_Const
from tensorflow.keras.layers import Dense, Input
from tensorflow.keras.layers import Embedding, LSTM, Bidirectional
from tensorflow.keras.models import Model
from tensorflow.keras.callbacks import ModelCheckpoint
from jy.med_qa.med_word2vector import build_matrix, get_train_val

embedding_matrix = build_matrix()
x_train,y_train,x_test ,y_test,x_val, y_val = get_train_val()
embedding_layer = Embedding(len(embedding_matrix),
                            Med_Const.EMBEDDING_DIM,
                            weights=[embedding_matrix],
                            input_length=Med_Const.MAX_SEQUENCE_LENGTH,
                            trainable=True)

sequence_input = Input(shape=(Med_Const.MAX_SEQUENCE_LENGTH,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)
l_lstm = Bidirectional(LSTM(100))(embedded_sequences)
preds = Dense(23, activation='softmax')(l_lstm)
model = Model(sequence_input, preds)
model.compile(loss='categorical_crossentropy',
              optimizer='rmsprop',
              metrics=['acc'])

print("Bidirectional LSTM")
model.summary()
cp = ModelCheckpoint('../jy/mymodel/med.h5', monitor='val_acc', verbose=1, save_best_only=True)
model.save("../jy/mymodel/med.h5")
history = model.fit(x_train, y_train, validation_data=(x_val, y_val), epochs=20, batch_size=16, callbacks=[cp])
