import random

from gxl_ai_utils.utils import utils_file
is_npu = True
try:
    import torch
    import torch_npu
except Exception as e:
    print(e)
    is_npu = False

def do_split_file(input_file, num_parts, output_dir):
    """

    Args:
        input_file:
        num_parts:
        output_dir:

    Returns:

    """
    utils_file.makedir_sil(output_dir)
    with open(input_file, 'rb') as f:
        f.seek(0, 2)  # 将文件指针移动到文件末尾
        file_size = f.tell()  # 获取文件大小
        chunk_size = file_size // num_parts  # 计算每一份的大小
        utils_file.logging_limit_print(f'chunk_size:{chunk_size}')
        f.seek(0)  # 将文件指针移回文件开头
        for i in range(num_parts):
            part_file = f"{output_dir}/{input_file.split('/')[-1]}_{i}.gxl_part"
            utils_file.logging_limit_print(f'part_file:{part_file}')
            with open(part_file, 'wb') as part:
                if i == num_parts - 1:  # 最后一部分可能会大一些，处理文件大小不能整除的情况
                    data = f.read()
                else:
                    data = f.read(chunk_size)
                part.write(data)


def do_combine_files(output_file, split_files_dir, old_file_name, num_parts):
    """

    Args:
        output_file:
        split_files_dir:
        old_file_name:  **.* eg: 001.tar
        num_parts: int ,

    Returns:

    """
    with open(output_file, 'wb') as out:
        for i in range(num_parts):
            part_file = f"{split_files_dir}/{old_file_name}_{i}.gxl_part"
            print(part_file)
            with open(part_file, 'rb') as part:
                data = part.read()
                out.write(data)

def do_download_parts(split_files_dir, old_input_name, parts_num, output_dir,num_thread):
    """
    下载分片文件到本地
    Args:
        split_files_dir:
        old_input_name: ***.* eg: 001.tar
        parts_num:
        output_dir:
        num_thread:

    Returns:

    """
    utils_file.makedir_sil(output_dir)
    file_list = []
    for i in range(parts_num):
        file_list.append(f"{split_files_dir}/{old_input_name}_{i}.gxl_part")
    fake_path = utils_file.do_get_fake_file()
    random.shuffle(file_list)
    utils_file.write_list_to_file(file_list, fake_path)
    utils_file.do_sync_files_download_data_multi_thread(
        file_list_path=fake_path,
        username="root",
        password="Fy!mATB@QE",
        remote_host="139.210.101.41",
        local_directory=output_dir,
        num_thread=num_thread
    )



input_ckpt_path, output_split_dir, num_parts, download_split_ckpt_dir, download_ckpt_path =  utils_file.do_get_commandline_param(5, ["input_ckpt_path", "output_split_dir", "num_parts", "download_split_ckpt_dir", "download_ckpt_path"])
utils_file.logging_info(f"input_ckpt_path: {input_ckpt_path}")
utils_file.logging_info(f"output_split_dir: {output_split_dir}")
utils_file.logging_info(f"num_parts: {num_parts}")
utils_file.logging_info(f"download_split_ckpt_dir: {download_split_ckpt_dir}")
utils_file.logging_info(f"download_ckpt_path: {download_ckpt_path}")
num_parts = int(num_parts)

utils_file.logging_info(f"is_npu: {is_npu}")
if is_npu:
    # 在华为机器上
    do_split_file(input_ckpt_path, num_parts, output_split_dir)
else:
    # 在实验室机器上
    do_download_parts(output_split_dir, input_ckpt_path.split('/')[-1], num_parts, download_split_ckpt_dir, num_thread=10)
    do_combine_files(download_ckpt_path, download_split_ckpt_dir, input_ckpt_path.split('/')[-1], num_parts)
