# 语料配置
import os.path
import pickle

import torch

device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

user_dict_path = "corpus/user_dict/user_dict.txt"
stopwords_dict_path = "corpus/user_dict/stopwords.txt"
classify_corpus_train_path = "corpus/classify/classify_train.txt"
classify_corpus_by_word_train_path = "corpus/classify/classify_train_by_word.txt"
classify_corpus_test_path = "corpus/classify/classify_test.txt"
classify_corpus_by_word_test_path = "corpus/classify/classify_test_by_word.txt"

# 分类
classify_model_path = "model/classify.model"
classify_model_path_by_word = "model/classify_by_word.model"

classify_model_final_path = "model/classify.model"
classify_model_by_word_final_path = "model/classify_by_word.model"

# chatbot
chatbot_batch_size = 128
chatbot_by_word = True
if chatbot_by_word:
    chatbot_input_path = "corpus/chatbot/input_by_word.txt"
    chatbot_target_path = "corpus/chatbot/target_by_word.txt"
else:
    chatbot_input_path = "corpus/chatbot/input.txt"
    chatbot_target_path = "corpus/chatbot/target.txt"

# ws
if chatbot_by_word:
    chatbot_ws_input_path = "model/chatbot/ws_input.model"
    chatbot_ws_target_path = "model/chatbot/ws_target.model"
else:
    chatbot_ws_input_path = "model/chatbot/ws_input_by_word.model"
    chatbot_ws_target_path = "model/chatbot/ws_target_by_word.model"

chatbot_ws_input = pickle.load(open(chatbot_ws_input_path, "rb"), encoding="utf-8")
chatbot_ws_target = pickle.load(open(chatbot_ws_target_path, "rb"), encoding="utf-8")

if chatbot_by_word:
    chatbot_input_max_len = 20
    chatbot_target_max_len = 20
else:
    chatbot_input_max_len = 12
    chatbot_target_max_len = 12

chatbot_embedding_dim = 100
chatbot_encoder_hidden_size = 128  # 隐藏层中lstm的个数
chatbot_encoder_num_layers = 2  # 隐藏层的个数
chatbot_decoder_hidden_size = 128  # 隐藏层中lstm的个数
chatbot_decoder_num_layers = 2  # 隐藏层的个数
chatbot_dropout = 0.5
chatbot_bidirectional = True
chatbot_teacher_forcing_ratio = 0.7

chatbot_model_save_path = "model/chatbot/seq2seq.model" if not chatbot_by_word else "model/chatbot/seq2seq_by_word.model"
chatbot_optimizer_save_path = "model/chatbot/optimizer.model" if not chatbot_by_word else "model/chatbot/optimizer_by_word.model"

beam_width = 3
clip = 0.01

# recall
recall_qa_dict_path = "corpus/dnn/recall/QA.txt"
recall_search_index_path = "model/dnn/recall/search.index"
recall_topk = 4
recall_clusters = 10

recall_fasttext_by_word_save_path = "model/dnn/recall/fasttext_by_word.model"
recall_fasttext_save_path = "model/dnn/recall/fasttext.model"

# 排序
sort_ws_save_path = "model/dnn/sort/sort.model"
sort_ws_by_word_save_path = "model/dnn/sort/sort_by_word.model"
sort_q_path = "corpus/dnn/sort/q_cuted.txt"
sort_q_by_word_path = "corpus/dnn/sort/q_cuted_words.txt"
sort_sim_q_path = "corpus/dnn/sort/sim_q_cuted.txt"
sort_sim_q_by_word_path = "corpus/dnn/sort/sim_q_cuted_words.txt"
sort_target_path = "corpus/dnn/sort/merged_v.txt"

sort_ws = pickle.load(open(sort_ws_by_word_save_path, "rb")) if os.path.isfile(sort_ws_by_word_save_path) else None
sort_batch_size = 128
sort_q_max_len = 20
sort_sim_q_max_len = 20

sort_input_size = 300
sort_hidden_size = 256
sort_num_layers = 2
sort_bidirectional = True

sort_pooling_stride = 2
sort_pooling_kernel_size = 2
sort_drop_out=0.3

sort_model_save_path = "model/dnn/sort/sort.model"
sort_optimizer_save_path = "model/dnn/sort/optimizer.model"
