from tqdm import tqdm
import json
import random

from lib import cut
import config

# 小黄鸡路径
xiaohuangji_path = r"F:\virtual_environment\AI_Study\AI_study_code\人工智能NLP项目\案例-chat_service" \
                   r"\corpus\classify\origin_corpus/小黄鸡未分词.conv"
# 手动构造的问题的路径
by_hand_path = r"F:\virtual_environment\AI_Study\AI_study_code\人工智能NLP项目\案例-chat_service" \
               r"\corpus\classify\origin_corpus/手动构造的问题.json"
# 爬虫抓取的问题的路径
crawl_path = r"F:\virtual_environment\AI_Study\AI_study_code\人工智能NLP项目\案例-chat_service" \
             r"\corpus\classify\origin_corpus/爬虫抓取的问题.csv"

flags = [0, 0, 0, 0, 1]  # 五分之一的分为测试集，五分之四的分为训练集


def keywords_in_line(line):
    """删除含有关键字语句"""
    keywords_list = ["传智播客", "传智", "黑马程序员", "黑马", "python",
                     "人工智能", "c语言", "c++", "java", "javaee", "前端", "移动开发", "ui",
                     "ue", "大数据", "软件测试", "php", "h5", "产品经理", "linux", "运维", "go语言",
                     "区块链", "影视制作", "pmp", "项目管理", "新媒体", "小程序", "前端"]
    for word in line:
        if word in keywords_list:
            return True

    return False


def process_xiaohuangji(f_train, f_test, by_word=False):
    """处理小黄鸡语料"""
    num_train = 0
    num_test = 0
    for line in tqdm(open(xiaohuangji_path, encoding="UTF-8").readlines(), desc="小黄鸡"):
        if line.startswith("E"):
            flag = 0
            continue
        else:
            if flag == 0:
                cut_list = cut(line[1:].strip(), by_word=by_word)
                flag = 1
                if not keywords_in_line(cut_list):
                    line = " ".join(cut_list) + "\t" + "__label__chat"
                    if random.choice(flags) == 0:  # 写入训练集
                        f_train.write(line + "\n")
                        num_train += 1
                    else:
                        f_test.write(line + "\n")
                        num_test += 1

    return num_train, num_test


def process_by_hand(f_train, f_test, by_word=False):
    """处理手动构造的问题"""
    num_train = 0
    num_test = 0
    total_lines = json.loads(open(by_hand_path, encoding="UTF-8").read())
    for key in total_lines:
        for lines in tqdm(total_lines[key], desc="手动构造的问题"):
            for line in lines:
                cut_list = cut(line.strip(), by_word=by_word)
                if "校区" in cut_list:
                    continue
                if not keywords_in_line(cut_list):
                    line = " ".join(cut_list) + "\t" + "__label__qa" + "\n"
                    if random.choice(flags) == 0:
                        f_train.write(line)
                        num_train += 1
                    else:
                        f_test.write(line)
                        num_test += 1

    return num_train, num_test


def process_crawl(f_train, f_test, by_word=False):
    """处理爬虫爬取的问题"""
    num_train = 0
    num_test = 0
    for line in tqdm(open(crawl_path, encoding="UTF-8").readlines(), desc="爬虫问题"):
        cut_list = cut(line.strip(), by_word=by_word)
        if not keywords_in_line(cut_list):
            line = " ".join(cut_list) + "\t" + "__label__qa" + "\n"
            if random.choice(flags) == 0:
                f_train.write(line)
                num_train += 1
            else:
                f_test.write(line)
                num_test += 1

    return num_train, num_test


def process():
    f_train = open(config.classify_corpus_train_path, "a", encoding="UTF-8")
    f_test = open(config.classify_corpus_test_path, "a", encoding="UTF-8")
    # 处理小黄鸡
    # num_chat_train, num_chat_test = process_xiaohuangji(f_train, f_test)
    # 处理手动构造的问题
    num_qa_train, num_qa_test = process_by_hand(f_train, f_test)
    # 处理爬虫抓取的问题
    _a, _b = process_crawl(f_train, f_test)

    num_qa_train += _a
    num_qa_test += _b
    f_train.close()
    f_test.close()


def process_by_word():
    f_train_by_word = open(config.classify_corpus_train_by_word_path, "a", encoding="UTF-8")
    f_test_by_word = open(config.classify_corpus_test_by_word_path, "a", encoding="UTF-8")
    # 处理小黄鸡
    num_chat_train, num_chat_test = process_xiaohuangji(f_train_by_word, f_test_by_word,
                                                        by_word=True)
    # 处理手动构造的问题
    num_qa_train, num_qa_test = process_by_hand(f_train_by_word, f_test_by_word, by_word=True)
    # 处理爬虫抓取的问题
    _a, _b = process_crawl(f_train_by_word, f_test_by_word, by_word=True)

    num_qa_train += _a
    num_qa_test += _b
    f_train_by_word.close()
    f_test_by_word.close()
