import os
from nlp_tools.corpus.classify.competition import DataFoundClassify
from nlp_tools.metrics.classification import F1CategoryCallback
from nlp_tools.tasks.classification import ClassificationCnn
from nlp_tools.tasks.classification.cnn_attention_model import CNN_Attention_Model
from nlp_tools.tasks.classification.cls_mlp_model import ClsMlpModel
from nlp_tools.tasks.classification.dpcnn_model import DPCNN_Model
from nlp_tools.tasks.classification.cnn_lstm_model import CNN_LSTM_Model
from nlp_tools.processors.sequence_processor import SequenceProcessor
from nlp_tools.processors.classification.classification_label_processor import ClassificationLabelProcessor
from nlp_tools.tokenizer.hugging_tokenizer import HuggingTokenizer
from nlp_tools.embeddings.hugginface.autoembedding import AutoEmbedding

from nlp_tools.callbacks.classification.f1score_save_callback import F1SaveCallback

import random
import numpy as np
import tensorflow as tf
def seed_tensorflow(seed=42):
    random.seed(seed)
    os.environ['PYTHONHASHSEED'] = str(seed)
    np.random.seed(seed)
    tf.random.set_seed(seed)
    os.environ['TF_DETERMINISTIC_OPS'] = '1' # pip install tensorflow-determinism

seed_tensorflow(2021)
model_save_path = 'temp/sentiment'


file_name = "/home/fanfanfeng/PycharmProjects/pythonProject/torch_study/sentiment.csv"
(train_data,valid_data) = DataFoundClassify(file_name,split_train_test=True,max_length=32).load_data()



bert_model_path = "hfl/chinese-bert-wwm-ext"
label_list = ["0","1"]
label_dict = {key:index for index,key in enumerate(label_list)}



text_tokenizer  = HuggingTokenizer(bert_model_path)
embedding  = AutoEmbedding(bert_model_path,text_tokenizer.tokenizer.model_input_names)

# 默认是不需要分词或者对训练数据进行处理的，如果需要，则要重写text_tokenizer和相应的processor
sequenceProcessor = SequenceProcessor(text_tokenizer=text_tokenizer)
labelProcessor = ClassificationLabelProcessor(vocab2idx=label_dict)
model = ClsMlpModel(
    embedding=embedding,
    text_processor=sequenceProcessor,
    label_processor=labelProcessor,
    use_rdrop=False,
    use_FGM=False,
    max_sequence_length=34,
)



model_save_path1 = 'contraband/tf_models/train_model_f1_random/'
f1_callback = F1SaveCallback(model,model_save_path1,valid_data,label_names=label_list)

from keras.api._v2.keras.callbacks import  TensorBoard

model.fit(train_data,validate_data=valid_data,epochs=60,callbacks=[f1_callback],batch_size=200)

