# This package will contain the spiders of your Scrapy project
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
import multiprocessing


class CustomException(Exception):
    pass


def run_mul(target, max_workers=5):
    tasks = []
    try:
        for _ in range(max_workers):
            sub_process = multiprocessing.Process(target=target)
            sub_process.start()
            tasks.append(sub_process)
        for task in tasks:
            task.join()
    except KeyboardInterrupt:
        print("主进程检测到 KeyboardInterrupt")
        for task in tasks:
            task.terminate()
        print("所有子进程已终止")
    else:
        print("所有子进程正常完成")
    finally:
        for task in tasks:
            task.join()
