import os
import random
import re
import string

import tqdm
from gxl_ai_utils.utils import utils_file
from torch.utils.data import DistributedSampler

# 一些随机的中文词语列表
words = ["我", "你", "他", "她", "它", "我们", "你们", "他们", "她们", "它们",
         "喜欢", "爱", "讨厌", "吃", "喝", "玩", "学习", "工作", "休息",
         "今天", "昨天", "明天", "早上", "中午", "晚上", "下午",
         "很", "非常", "有时候", "总是", "从来不", "一直",
         "快乐", "悲伤", "兴奋", "紧张", "害怕", "愤怒", "困惑",
         "天空", "大海", "山川", "花草", "小鸟", "猫狗", "老鼠",
         "城市", "乡村", "学校", "医院", "超市", "火车", "汽车",
         "书籍", "音乐", "电影", "游戏", "运动", "旅行", "冒险",
         "梦想", "希望", "努力", "坚持", "成功", "失败", "幸福",
         "家庭", "友谊", "爱情", "婚姻", "孤独", "自由", "责任",
         "未来", "过去", "现在", "时光", "岁月", "生活", "人生"]


# 随机生成句子
def generate_sentence():
    # 随机选择句子长度
    sentence_length = random.randint(5, 15)
    # 随机选择词语并组合成句子
    sentence = ''.join(random.choice(words) for _ in range(sentence_length))
    return sentence


def make_test_data():
    output_file = "./train_test.txt"
    res_list = []
    for i in range(100000):
        sentence = generate_sentence()
        res_list.append(sentence)
    utils_file.write_list_to_file(res_list, output_file)
    output_file = "./dev_test.txt"
    res_list = []
    for i in range(1000):
        sentence = generate_sentence()
        res_list.append(sentence)
    utils_file.write_list_to_file(res_list, output_file)
    output_file = "./test_test.txt"
    res_list = []
    for i in range(1000):
        sentence = generate_sentence()
        res_list.append(sentence)
    utils_file.write_list_to_file(res_list, output_file)


def prepare_data():
    asr_data_dir = "/home/work_nfs5_ssd/hfxue/data/data4w/source_1"
    dataset_dirnames = os.listdir(asr_data_dir)
    res_list = []
    for dataset_dirname in dataset_dirnames:
        text_path = os.path.join(asr_data_dir, dataset_dirname, "text")
        if not os.path.exists(text_path):
            utils_file.logging_print(f"warning:{text_path} not exists")
            continue
        else:
            utils_file.logging_print(f"开始处理:text_path:{text_path}")
        temp_text_dict = utils_file.load_dict_from_scp(text_path)
        text_list = temp_text_dict.values()
        res_list.extend(text_list)
    all_lens = len(res_list)
    utils_file.logging_print(f"all_lens:{all_lens}")
    train_list = res_list[:int(all_lens * 0.95)]
    dev_list = res_list[int(all_lens * 0.95):int(all_lens * 0.99)]
    test_list = res_list[int(all_lens * 0.99):]
    utils_file.write_list_to_file(train_list, "./train.txt")
    utils_file.write_list_to_file(dev_list, "./dev.txt")
    utils_file.write_list_to_file(test_list, "./test.txt")


def contains_letters(s):
    pattern = re.compile('[a-zA-Z]')
    return bool(pattern.search(s))
def filter_data():
    """"""
    train_list = utils_file.load_list_file_clean("./train.txt")
    dev_list = utils_file.load_list_file_clean("./dev.txt")
    test_list = utils_file.load_list_file_clean("./test.txt")
    new_train_list = []
    new_dev_list = []
    new_test_list = []
    for sentence in tqdm.tqdm(train_list, total=len(train_list)):
        if not contains_letters(sentence):
            new_train_list.append(sentence)
    for sentence in tqdm.tqdm(dev_list, total=len(dev_list)):
        if not contains_letters(sentence):
            new_dev_list.append(sentence)
    for sentence in tqdm.tqdm(test_list, total=len(test_list)):
        if not contains_letters(sentence):
            new_test_list.append(sentence)
    utils_file.write_list_to_file(new_train_list, "./new_train.txt")
    utils_file.write_list_to_file(new_dev_list, "./new_dev.txt")
    utils_file.write_list_to_file(new_test_list, "./new_test.txt")


def chongxin_fenpei_bili_of_data():
    """"""
    train_list = utils_file.load_list_file_clean("./new_train.txt")
    dev_list = utils_file.load_list_file_clean("./new_dev.txt")
    test_list = utils_file.load_list_file_clean("./new_test.txt")
    all_list = train_list + dev_list + test_list
    train_list = all_list[:int(len(all_list) * 0.999)]
    test_list = all_list[int(len(all_list) * 0.999):int(len(all_list) * 0.9999)]
    dev_list = all_list[int(len(all_list) * 0.9999):]
    utils_file.write_list_to_file(train_list, "./new_train.txt")
    utils_file.write_list_to_file(dev_list, "./new_dev.txt")
    utils_file.write_list_to_file(test_list, "./new_test.txt")

if __name__ == "__main__":
    chongxin_fenpei_bili_of_data()
