# -*- coding: utf-8 -*-
# @Time    : 2022/8/9
# @Author  : szw
# @Email   : 1259577135@qq.com
# @File    : fans_util
# @Software:
# @desc :

from gevent import monkey;monkey.patch_all()
import gevent
from concurrent.futures import ThreadPoolExecutor
from tqdm import tqdm
from datetime import datetime
from queue import  Queue
import requests
import  time


def now():
    now_date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
    return now_date


class Base_Model():


    class tqdm_util():

        def __init__(self, total=None, desc=None, alias=None):
            print(total)
            self.alias = alias
            if total is None:
                total = 100
            self.tq = tqdm(total=total)


            if desc:
                self.desc = desc
            else:
                self.desc = "当前更新进度"

            self.tq.set_description_str(desc)

        def update(self, _update: int, suffix=None):
            desc = f"当前时间:{now()} -------- {self.desc} "
            if suffix:
                desc = f"{suffix} {desc}"
            self.tq.set_description_str(desc)
            self.tq.update(_update)

        def set_desc(self, desc: str):
            self.tq.set_description_str(desc)

        def set_alias(self, alias: str):
            self.alias = alias

        def set_total(self, total: int):
            self.tq.total = total

    def init_seed(self,seed_list):
        """
        初始化种子
        :return:
        """
        #任务列表数据
        self.tq = self.tqdm_util(total=len(seed_list))
        self.seed_list=Queue()
        for uid in seed_list:
            self.seed_list.put(uid)

        self.process_status="未开始"



    def crawler(self,seed):


        pass


    def __init__(self,event_size=90,alias="爬虫任务"):
        """
        默认100个线程
        :param event_list:
        """
        # self.result_count=0
        self.event_size=event_size
        self.alias=alias




    def start(self):

        self.process_status = "进行中"
        event_list = list()
        for i in range(0, self.event_size):
            alias = f"{self.alias}-{i}"
            g = gevent.spawn(self.run, alias)
            event_list.append(g)

        gevent.joinall(event_list)

        self.process_status = "完成"

    def get_process_status(self):
        #返回任务执行状态
        return self.process_status

    def set_process_status(self,process_status):
        self.process_status=process_status


    def run(self,alias):
        print(f"{alias} 启动")
        while self.seed_list.qsize()>0:
            seed=self.seed_list.get()
            self.crawler(seed)
            self.tq.update(1,alias)






class dy_crawler(Base_Model):

    def crawler(self,seed):
        print(seed)

        pass




if __name__ == '__main__':

    #初始化访问框架
    seed = [123,"http://www.baidu.com","http://www.baidu.com"]
    #种子传字典
    # event_size 为10 则 开10个线程   alias 为爬虫的名
    dy=dy_crawler(event_size=10,alias="爬虫任务别名")
    dy.init_seed(seed)
    dy.start()



