import glob
import os.path

from gxl_ai_utils.utils import utils_file


def do_handle():
    """"""
    input_root_dir = "/home/work_nfs13/znlin/wenet_whisper_finetune/examples/wenetspeech/whisper/SOT/test/"
    datasets = ["aishell4",  "alimeeting","ami" ]
    output_dir = "/home/work_nfs8/xlgeng/data/scp_test"
    for dataset in datasets:
        temp_input_dir = os.path.join(input_root_dir,dataset, "shards")
        output_dir_temp = os.path.join(output_dir, dataset, 'shards')
        utils_file.do_copy_dir(temp_input_dir, output_dir_temp)
        shard_list_path = os.path.join(output_dir, dataset, 'shards_list.txt')
        tar_file_list = glob.glob(os.path.join(output_dir_temp, "*.tar"))
        utils_file.write_list_to_file(tar_file_list, shard_list_path)

def do_handle2():
    """"""
    input_root_dir = "/home/work_nfs13/znlin/wenet_whisper_finetune/examples/wenetspeech/whisper/SOT/test/"
    datasets = ["aishell4",  "alimeeting","ami" ]
    output_dir = "/home/work_nfs8/asr_data/data/asr_test_sets"
    for dataset in datasets:
        text_path_i = os.path.join(output_dir, dataset, 'text')
        text_dict = utils_file.load_dict_from_scp(text_path_i)
        new_text_dict = {}
        for key, value in text_dict.items():
            new_text_dict[key] = value.replace("$", "<SC>")
        old_text_path = os.path.join(output_dir, dataset, 'text_old')
        utils_file.write_dict_to_scp(text_dict, old_text_path)
        utils_file.write_dict_to_scp(new_text_dict, text_path_i)

if __name__=="__main__":
    do_handle2()
