"""
将shard包解压到一个目录， 目录内部结构保留shard的的名字
"""
import time

import tqdm
from gxl_ai_utils.utils import utils_file

output_dir_root = "/home/node27_tmpdata/xlgeng/pachong_10W_data/raw_wav/ximalaya_redian_2T"
utils_file.makedir(output_dir_root)

input_shard_dir = "/home/work_nfs15/asr_data/data/pachong_data_shard/ximalaya_redian_2T"
tar_file_list = utils_file.get_file_path_list_for_wav_dir(input_shard_dir, suffix='tar', recursive=False)

def little_func(little_tar_file_path_list):
    now = time.time()
    i = 1
    for tar_tmp in tqdm.tqdm(little_tar_file_path_list, total=len(little_tar_file_path_list), desc='extracting tar'):
        file_name_pure = utils_file.get_file_pure_name_from_path(tar_tmp)
        tmp_output_dir = utils_file.join_path(output_dir_root, file_name_pure)
        utils_file.do_decompression_tar(tar_tmp, tmp_output_dir)
        i+=1
        if i%10==0:
            the_time = time.time()
            print(f"进度：{i}/{len(little_tar_file_path_list)}, 每10条 cost {the_time-now}")


import multiprocessing
# 获取CPU核心数
thread_num = multiprocessing.cpu_count()
print(f"使用的进程数（线程数）: {thread_num}")
list_list = utils_file.do_split_list(tar_file_list, thread_num)
processes = []

# 创建并启动多个进程
for little_list in list_list:
    process = multiprocessing.Process(target=little_func, args=(little_list,))
    processes.append(process)
    process.start()

# 等待所有进程完成
for process in processes:
    process.join()

print("所有进程已完成")



