import asyncio
import os
from lib.log import DBG, INFO, ERROR, EXCEPTION
import multiprocessing
from lib import http_fetch


# 多任务同时处理队列里的数据，用协程方式实现，所以IO较多不用此接口

class ProcessQueue(object):

    def __init__(self, queue, queue_params, task_count, async_do_process):
        """

        :param queue: 处理哪个队列
        :param queue_params 一些额外的参数，通常是传一些创建队列的对象的一些属性
        :param task_count: 同时处理此队列的任务数
        :param async_do_process: 异步处理的函数
        """
        self.queue_params = queue_params
        self.queue = queue
        self.task_count = task_count
        self.async_do_process = async_do_process

    async def process_queue_sub_task(self, sub_task_no):
        """
        处理队列的子任务

        :param sub_task_no: 任务号
        :return: None
        """

        DBG(f"启动队列任务 {sub_task_no}: {self.queue_params}....")
        empty_count = 0
        while True:
            if empty_count > 60:
                break

            if self.queue.empty():
                DBG(f"队列空了({sub_task_no}-->{empty_count})")
                # time.sleep(1)  # 是不是所有的任务都会sleep，这样会不会sleep 10s
                await asyncio.sleep(1)
                empty_count += 1
                continue

            empty_count = 0
            queue_item = self.queue.get()
            # DBG(f"加载队列中的任务({sub_task_no}): {queue_item}")
            result = await self.async_do_process(queue_item)
            if type(result) == str:
                DBG(f"处理失败({sub_task_no}) {result}...")

    async def process_queue_tasks(self):
        """
        多任务处理队列数据，每个任务都循环取数据处理

        :return: None
        """

        tasks = [self.process_queue_sub_task(f"{i+1}") for i in range(self.task_count)]
        await asyncio.gather(*tasks)

    def run(self):
        asyncio.run(self.process_queue_tasks())


# class ProcessQueueTest(object):
#     def __init__(self):
#         self.fetch_queue = multiprocessing.Queue()  # {url:xxx, callback:"", type:"fetch"}
#
#     async def do_process_fetch(self, item):
#         url = item
#         stream = False
#         save_path = None
#
#         # content = http_fetch.fetch(url=url, stream=stream, save_path=save_path)
#         content = await http_fetch.async_fetch(url=url, stream=stream, save_path=save_path)
#
#     def fetch_work(self, work_name, work_alias):
#         pid = os.getpid()
#         # pname = psutil.Process(pid).name()
#         DBG(f"加载网页进程启动({work_name}): {work_alias}: {pid}")
#
#         pqt = ProcessQueue(
#             queue=self.fetch_queue,
#             queue_params={"work_name": work_name, "work_alias": work_alias},
#             task_count=5,
#             async_do_process=self.do_process_fetch
#         )
#         pqt.run()
#
#     def run(self):
#
#         for i in range(2, 100):
#             self.fetch_queue.put(f"https://www.baidu.com")
#
#         p_params = [
#             {"name": "fetch", "alias": "请求网页", "target": self.fetch_work}
#         ]
#
#         processes = []
#         for i in range(len(p_params)):
#             p = multiprocessing.Process(
#                 target=p_params[i]["target"],
#                 args=(p_params[i]["name"], p_params[i]["alias"]))
#
#             processes.append(p)
#             p.start()
#
#         for p in processes:
#             p.join()
#
#         self.fetch_queue.close()
#
#         print("======")
#
#
# if __name__ == '__main__':
#     test = ProcessQueueTest()
#     test.run()
