from typing import Union

from requests.cookies import RequestsCookieJar

from sam.util.crawler.lite.spider import BaseSpider


class FactorySpider(BaseSpider):
    def __init__(self):
        super().__init__(name="factory", level="debug")
        self.index = 0

    def prepare_cookies(self) -> Union[dict, RequestsCookieJar]:
        pass

    def prepare_proxies(self) -> dict:
        pass

    def parse_url(self, url, soup) -> list:
        self.index += 1
        if self.index <= 12:
            return [f"https://www.hrs.com.cn/productlist/0/{self.index}.html"]

    def parse_data(self, url, soup) -> list:
        print(url)
        current_page_content = soup.select(r'body > div.middle-content.row > div > ul > li > a > h4')
        if current_page_content:
            current_content = []
            for one in current_page_content:
                current_content.append(f"{one.contents[0]}")
            return current_content


def crawl():
    spider = FactorySpider()
    spider.start("https://www.hrs.com.cn/productlist/0.html")


if __name__ == "__main__":
    crawl()
