"""
配置文件
"""
import torch
import pickle


device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

# ------- 用户词典 ---------
user_dict_path = "corpus/user_dict/keywords.txt"
stopword_path = "corpus/user_dict/stopwords.txt"
classify_corpus_train_path = "corpus/classify/classify_train.txt"
classify_corpus_test_path = "corpus/classify/classify_test.txt"
classify_corpus_train_by_word_path = "corpus/classify/classify_train_by_word.txt"
classify_corpus_test_by_word_path = "corpus/classify/classify_test_by_word.txt"

# --------- 分类相关 -------------
classify_model_path = "model/classify.model"
classify_model_by_word_path = "model/classify_by_word.model"

# --------- chatbot相关 ---------
chatbot_input_by_word_path = r"F:\virtual_environment\AI_Study\AI_study_code" \
             r"\人工智能NLP项目\案例-chat_service\corpus\chatbot/input_by_word.txt"
chatbot_target_by_word_path = r"F:\virtual_environment\AI_Study\AI_study_code" \
              r"\人工智能NLP项目\案例-chat_service\corpus\chatbot/target_by_word.txt"

# --------- ws -----------
chatbot_ws_input_by_word_model_path = "model/chatbot/input_by_word_model.pkl"
chatbot_ws_target_by_word_model_path = "model/chatbot/target_by_word_model.pkl"

chatbot_ws_input_by_word_model = pickle.load(open(chatbot_ws_input_by_word_model_path, "rb"))
chatbot_ws_target_by_word_model = pickle.load(open(chatbot_ws_target_by_word_model_path, "rb"))

chatbot_batch_size = 128
chatbot_max_len = 20

# ----------- Embedding相关 ---------------
chatbot_embedding_dim = 300

# ----------- GRU相关 -------------
chatbot_encoder_hidden_size = 128
chatbot_encoder_num_layers = 1
chatbot_decoder_hidden_size = 128
chatbot_decoder_num_layers = 1

# ---------- 模型的训练和评估相关 -------------
chatbot_save_model_path = "model/chatbot/chabot_train_model.model"
chatbot_save_optimizer_path = "model/chatbot/chabot_train_optimizer.model"

chatbot_teacher_forcing_rate = 0.7

# ------------- beam Search相关 ----------
chatbot_beam_width = 3

# ------------- recall 相关 -------------
recall_corpus_tfidf_path = "corpus/dnn/recall/qa.json"
recall_corpus_bm25_path = "corpus/dnn/recall/qa_bm25.json"
recall_search_index_tfidf_model_path = "model/dnn/recall/search_index_tfidf.model"
recall_search_index_bm25_model_path = "model/dnn/recall/search_index_bm25.model"
recall_search_index_fasttext_model_path = "model/dnn/recall/search_index_fasttext.model"
recall_topk = 10
recall_clusters = 8

recall_fasttext_model_train_data_path = "corpus/dnn/sort/sim_q_cut_by_word.txt"
recall_fasttext_model_save_path = "model/dnn/recall/fasttext_vectorizer.model"

# ------------ 排序相关 ------------------
sort_q_data_path = "corpus/dnn/sort/q_cut_by_word.txt"
sort_sim_q_data_path = "corpus/dnn/sort/sim_q_cut_by_word.txt"
sort_v_data_path = "corpus/dnn/sort/target.txt"

sort_q_ws_model_path = "model/dnn/sort/sort_q.pkl"
sort_sim_q_ws_model_path = "model/dnn/sort/sort_sim_q.pkl"

sort_q_ws_model = pickle.load(open(sort_q_ws_model_path, mode="rb"))
sort_sim_q_ws_model = pickle.load(open(sort_sim_q_ws_model_path, mode="rb"))

sort_max_len = 20
sort_batch_size = 128
sort_embedding_dim = 300
sort_hidden_size = 256
sort_num_layers = 2
sort_drop_out = 0.3

sort_save_model_path = "model/dnn/sort/train.model"
sort_save_optimizer_path = "model/dnn/sort/optimizer.model"




