import asyncio
import json
import time
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '/root/work/arcfox-crawler/'))
from loguru import logger
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.firefox.service import Service

from com.arcfox.manager.redis_task_manager import RedisTaskManager

task_manager = RedisTaskManager("aqc_cookies")
async def save_cookies(driver):
    cookies = driver.get_cookies()
    if cookies:
        my_cookie = {}
        for cookie in cookies:
            if cookie['name'] == 'BAIDUID':
                result = cookie['value']
                my_cookie['BAIDUID_BFESS'] = result
            if cookie['name'] == 'ab_sr':
                result = cookie['value']
                my_cookie['ab_sr'] = result
        if 'BAIDUID_BFESS' in my_cookie:
            logger.info(f"save cookies: {my_cookie}")
            with open("cookies.txt", "a") as f:
                f.write(json.dumps(my_cookie) + "\n")

            await task_manager.add_tasks([my_cookie])


async def crawl_cookies():
    while True:
        task_size = await task_manager.task_size()
        if task_size > 50:
            time.sleep(10)
            logger.info("Cookie池已满, 休眠10秒后重新检测")
            continue
        service = Service(executable_path='./geckodriver.exe')
        options = webdriver.FirefoxOptions()
        options.add_argument('--headless')
        driver = webdriver.Firefox(service=service, options=options)
        driver.implicitly_wait(3)
        driver.get('https://aiqicha.baidu.com/')
        driver.find_element(By.ID, "aqc-search-input").send_keys("上海欣索霓文化传播有限责任公司")
        # 搜索
        driver.find_element(By.CLASS_NAME, "search-btn").click()
        # 查看详情
        driver.find_element(By.CLASS_NAME, "company-list").find_elements(By.CLASS_NAME, "card")[0].click()
        time.sleep(2)
        driver.refresh()
        time.sleep(2)
        await save_cookies(driver)
        driver.quit()
        time.sleep(1)


if __name__ == "__main__":
    asyncio.run(crawl_cookies())
