# 写个脚本，目的是在Linux下为了运行N个Rclone命令，每个Rclone都是一个单独的进程，这样可以充分利用带宽，提高传输速度

# 1、读取所有要运行的命令
# 2、根据CPU核心数，决定同时运行的Rclone个数
# 3、每个Rclone进程输出到同一个日志文件中
# 4、如果有Rclone进程退出，自动重启一个新的Rclone进程
# 5、如果所有Rclone进程都退出，脚本退出

import subprocess
from multiprocessing import cpu_count
from threading import Thread
from queue import Queue


def read_commands(file_path):
    with open(file_path, "r") as file:
        commands = [line.strip() for line in file if not line.startswith("#")]
    return commands


def run_command(command, log_file):
    with open(log_file, "a") as log:
        process = subprocess.Popen(command, shell=True, stdout=log, stderr=subprocess.STDOUT)
    return process


def monitor_processes(queue, log_file, num_processes):
    active_processes = []
    while not queue.empty() or active_processes:
        # 如果活跃进程数少于CPU核心数，且队列中还有命令，启动新进程
        while len(active_processes) < num_processes and not queue.empty():
            command = queue.get()
            process = run_command(command, log_file)
            active_processes.append(process)
            print(f"启动了新的Rclone进程: {command}")

        # 检查进程是否退出
        for process in active_processes[:]:
            if process.poll() is not None:
                active_processes.remove(process)
                print("一个Rclone进程已退出。")

    print("所有Rclone进程都已退出。")


def main():
    commands_file = "commands.sh"
    log_file = "rclone_log.txt"
    commands = read_commands(commands_file)
    commands_queue = Queue()
    [commands_queue.put(cmd) for cmd in commands]

    num_processes = cpu_count()
    print(f"根据CPU核心数，将同时运行 {num_processes} 个Rclone进程。")

    monitor_thread = Thread(target=monitor_processes, args=(commands_queue, log_file, num_processes))
    monitor_thread.start()
    monitor_thread.join()

    print("脚本执行完毕。")


if __name__ == "__main__":
    main()
