from typing import Dict, Any

from tensorflow import keras
from tensorflow.python.keras.callbacks import EarlyStopping, TensorBoard
from tensorflow.python.keras.layers import Attention

import kashgari
from kashgari.embeddings import BERTEmbedding
from kashgari.layers import L
from kashgari.layers.crf import CRF
from kashgari.tasks.labeling.base_model import BaseLabelingModel


def read_file(path):
    features = [[]]
    labels = [[]]
    with open(path, 'r', encoding='utf-8') as f:
        message = f.readlines()
    for i in message:
        if i is not None:
            i = i.split("\t")
            if len(i) == 2:
                features.append(i[0].split())
                labels.append(i[1].split())
    return features, labels


train_data, train_labels = read_file("medicinal_data_indication/train_data.txt")
test_data, test_labels = read_file("medicinal_data_indication/test_data.txt")
valid_data, valid_labels = read_file("medicinal_data_indication/valid_data.txt")

bert_embed = BERTEmbedding('D:\预训练语言模型\wwm',
                           trainable=True,
                           task=kashgari.LABELING,
                           sequence_length=256)


class BiLSTM_CRF_Model(BaseLabelingModel):
    """Bidirectional LSTM CRF Sequence Labeling Model"""

    @classmethod
    def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
        """
        Get hyper parameters of model
        Returns:
            hyper parameters dict
        """
        return {
            'layer_blstm': {
                'units': 128,
                'return_sequences': True
            },
            'layer_dense': {
                'units': 64,
                'activation': 'tanh'
            }
        }

    def build_model_arc(self):
        """
        build model architectural
        """
        output_dim = len(self.processor.label2idx)
        config = self.hyper_parameters
        embed_model = self.embedding.embed_model

        layer_blstm = L.Bidirectional(L.LSTM(**config['layer_blstm']),
                                      name='layer_blstm')

        layer_dense = L.Dense(**config['layer_dense'], name='layer_dense')
        layer_crf_dense = L.Dense(output_dim, name='layer_crf_dense')
        layer_crf = CRF(output_dim, name='layer_crf')

        tensor = layer_blstm(embed_model.output)
        tensor = Attention()([tensor, tensor])

        tensor = layer_dense(tensor)
        tensor = layer_crf_dense(tensor)
        output_tensor = layer_crf(tensor)

        self.layer_crf = layer_crf
        self.tf_model = keras.Model(embed_model.inputs, output_tensor)

    def compile_model(self, **kwargs):
        if kwargs.get('loss') is None:
            kwargs['loss'] = self.layer_crf.loss
        if kwargs.get('metrics') is None:
            kwargs['metrics'] = [self.layer_crf.viterbi_accuracy]
        super(BiLSTM_CRF_Model, self).compile_model(**kwargs)


model = BiLSTM_CRF_Model(bert_embed)
early_stopping = EarlyStopping(
    monitor='val_loss',
    min_delta=0,
    patience=8,
    verbose=1,
    mode='auto'
)
tb_cb = TensorBoard(log_dir="log_dir", write_images=1, histogram_freq=1)

model.fit(train_data,
          train_labels,
          valid_data,
          valid_labels,
          epochs=500,
          batch_size=256,
          callbacks=[early_stopping, tb_cb]
          )
model.evaluate(test_data, test_labels)
model.save("Indication")
