import sys
sys.path.append("/data/python_project/qiufengfeng/nlp_tools")
from nlp_tools.corpus.ner.corpus_loader import ChineseDailyNerCorpus,JsonNerCorpus
from nlp_tools.tasks.labeling import BiLSTM_CRF_Model
from nlp_tools.embeddings import BertEmbedding
from nlp_tools.processors import SequenceProcessor,NerLabelProcessor
from nlp_tools.callbacks.eval_callback import NerF1ScoreSaveCallBack
from tensorflow import keras
import os

import sys
if 'win' in sys.platform:
    bert_model_path = r"F:\pretrain_model\bert\chinese_L-12_H-768_A-12"
    save_path = r'E:\model_output\ner'
else:
    bert_model_path = r'/home/qiufengfeng/nlp/pre_trained_model/chinese_L-12_H-768_A-12/chinese_L-12_H-768_A-12/'
    save_path = '/home/qiufengfeng/nlp/train_models/ner/'
bert_model_token_path = os.path.join(bert_model_path,'vocab.txt')




# x_train,y_train = ChineseDailyNerCorpus.load_data('train')
# x_eval,y_eval = ChineseDailyNerCorpus.load_data('eval')
# x_test,y_test = ChineseDailyNerCorpus.load_data('test')

glob_path_test = '/home/qiufengfeng/nlp/nlp_project/github_third/TransformersNer/ner_train/data/*test.json'
glob_path_train = '/home/qiufengfeng/nlp/nlp_project/github_third/TransformersNer/ner_train/data/*train.json'
glob_path_dev = '/home/qiufengfeng/nlp/nlp_project/github_third/TransformersNer/ner_train/data/*dev.json'
x_train,y_train = JsonNerCorpus().load_data(glob_path_train)
x_eval,y_eval = ChineseDailyNerCorpus.load_data(glob_path_dev)
x_test,y_test = ChineseDailyNerCorpus.load_data(glob_path_test)



from nlp_tools.tokenizer.bert_tokenizer import BertTokenizer

# 初始化分词器
bert_tokenizer = BertTokenizer(token_dict=bert_model_token_path,simplified=True)

# 初始化句子和标签processor
sequenceProcessor = SequenceProcessor(text_tokenizer=bert_tokenizer)
labelProcessor = NerLabelProcessor()

# 初始化向量层embedding
bert_embedding = BertEmbedding(model_folder=bert_model_path)

# 构建模型
model = BiLSTM_CRF_Model(embedding=bert_embedding,text_processor=sequenceProcessor,label_processor=labelProcessor)

early_stop = keras.callbacks.EarlyStopping(patience=10)
reduse_lr_callback = keras.callbacks.ReduceLROnPlateau(factor=0.1, patience=5)
ner_f1_save_callback = NerF1ScoreSaveCallBack(model,x_test,y_test,save_path)
model.fit(x_train,y_train,x_eval,y_eval,epochs=30,callbacks=[early_stop,ner_f1_save_callback],batch_size=64)
#model.evaluate(x_test,y_test)

model.save(save_path)