#!/usr/bin/env python 
# -*- coding: utf-8 -*-
# @Time    : 2023/3/4 17:49
# @Author  : WJQ
# @Site    : 
# @File    : crawl.py
# @Software: PyCharm
from typing import Optional, List, Dict

from browser.driver import DriverHandler
from browser.driver_wx import DriverHandlerWx
from browser.driver_zfb import DriverHandlerZfb
from browser.driver_kuaiapp import DriverHandlerKApp
from crawler.modules.click_handler import ClickHandler
from crawler.modules.form_handler import FormHandler
from crawler.modules.popup_handler import PopupHandler


class Crawler:
    def __init__(self, driver_handler):
        self.driver_handler: Optional[DriverHandler, DriverHandlerWx, DriverHandlerZfb, DriverHandlerKApp] = driver_handler
        self.popup_handler = PopupHandler(self.driver_handler)
        self.form_handler = FormHandler(self.driver_handler)
        self.click_handler = ClickHandler(self.driver_handler)
        self.login_state = False
        self.collect_element_list: List = []
        self.driver_path: str = ""
        self.driver_query: str = ""
        self.node_id: int = 0
        self.path_time: Dict = {}

    def crawl_pages(self, click_element, previous_title):
        '''
        对于不同crawler，crawl_pages使用不同策略对输入的req新建page进行爬取，
        :return:
        '''
        pass