import asyncio
from loguru import logger

from com.arcfox.middleware.async_redis_middleware import open_redis
from com.arcfox.util import async_request as requests, constant

from com.arcfox.base.base_spider import BaseSpider, ExecuteType

# https://ugc.map.baidu.com/cube/ncp/boundsearch?bottom_left_point=13486678%2C3561756&top_right_point=13561558%2C3723804&zoom=1
from com.arcfox.yiqing.process.baidu_yiqing_processer import BaiduYiqingProceser
from com.arcfox.yiqing.task.yiqing_task import YiQingTask


class BaiduYiQingSpider(BaseSpider):

    def __init__(self):
        super().__init__()
        self.url = "https://ugc.map.baidu.com/cube/ncp/boundsearch?bottom_left_point=0,0&top_right_point=1000000000,1000000000&zoom=12"
        # self.processor = YiQingParser()
        self.processer = BaiduYiqingProceser()
        self.task = YiQingTask()
        self.session = None


    async def _pull_task(self):
        return True, ExecuteType.ONCE

    def get_headers(self):
        return {
            'Cookie': 'BAIDUID=4EBDB329AE7AF098EFD4043297CDDA23:FG=1; ugcid=1-655625122161dbba-1649484310%7C898938703'
        }

    async def _crawl_by_task(self, task):
        await self.init_session()
        try:
            response = await requests.get(self.session, self.url, headers=self.get_headers(),
                                          timeout=5)
            result_json = response.json()
            logger.info(result_json)
            if result_json['code'] == 0:
                items = result_json['result']["poi_list"]
                await self.processer.save_baidu_yiqing_data(items)
        except Exception as e:
            logger.error(e)

    @open_redis
    async def init_data_version(self, client):
        pass

if __name__ == "__main__":
    asyncio.run(BaiduYiQingSpider().start_crawl())
