# _*_ coding:utf-8 _*_
"""
:File: database.py
:Author: cfp
:Date: 2025/9/10 09:41
:LastEditTime: 2025/9/10 09:41
:LastEditors: cfp
:Description:
"""

from application.common.redis_url_pool import RedisUrlPool
from application.common.config import Config
from application.models import AmazonListing


class GetFast(RedisUrlPool):
    def __init__(self, host="127.0.0.1", db=0, port=6379):
        super(GetFast, self).__init__(host=host, db=db, port=port)
        self._redisKey = "BaiduList"                ##指定网络池的key
        self._max_workers = Config.MAX_WORKERS      ##开始多少个任务

    async def load_url(self):
        """加载url item"""
        data = await AmazonListing.get_all()
        for item in range(10):
            # 访问http://www.httpbin.org/delay/2 是需要2秒才能返回内容。
            # 我们可以加载十个请求链接，按传统的方式是大概需要20多秒才能完成这10个请求
            # 可以自己拿起手表计算下，我们访问了10个请求花了多长的时间
            url_item = {
                "url": "http://www.httpbin.org/delay/2",
                "backfunc": "parse_baidu",
            }
            await self._addurl(url_item)

    async def parse_baidu(self, r):
        """解析对应的回调函数"""
        print(r.keys())
        # 使用 sync_to_async 包装数据库创建操作
        # 异步创建数据记录
        print("收到html长度：", len(r["html"]))

    async def run(self):
        await self.load_url()  ##加载url
        await self.crawl_main()  ##启动爬虫程序

