"""
实现通用代理ip爬虫, 拥有基本的代理ip爬虫功能
可以用来被继承并添加特定的方法, 如添加对应特定网站的反爬方法
"""
import requests
from utils.http import get_request_headers
from lxml import etree
from domain import Proxy
# from tqdm import tqdm
import random
import time
from settings import MAX_SLEEP_TIME


class BaseSpider(object):
    def __init__(self, urls: list[str], group_xpath, detail_xpath: dict[str: str]):
        self.urls = urls
        self.group_xpath = group_xpath
        self.detail_xpath = detail_xpath

    def get_page_from_url(self, url):
        """
        发送请求, 获取响应数据
        """
        response = requests.get(url, headers=get_request_headers())
        return response.content

    def get_info(self, lis):
        """
        返回列表第一个元素, 如果列表为空则返回空字符串
        """
        return lis[0] if len(lis) != 0 else ''

    def get_proxies_from_page(self, page):
        html = etree.HTML(page)
        trs = html.xpath(self.group_xpath)
        for tr in trs:
            ip = self.get_info(tr.xpath(self.detail_xpath['ip'])).strip()
            port = int(self.get_info(tr.xpath(self.detail_xpath['port'])).strip())
            area = self.get_info(tr.xpath(self.detail_xpath['area'])).strip()
            proxy = Proxy(ip=ip, port=port, area=area)
            yield proxy

    def get_proxies(self):
        # tq = tqdm(self.urls)
        first = True
        for url in self.urls:
            # 发送请求, 获取响应数据
            # tq.set_description(desc=f'正在获取{url}中的数据')
            page = self.get_page_from_url(url)
            if not first:
                sleep_time = random.uniform(0, MAX_SLEEP_TIME)
                time.sleep(sleep_time)
            else:
                first = False
            # 解析页面, 提取数据, 封装成Proxy对象
            proxies = self.get_proxies_from_page(page)
            yield from proxies
        # tq.close()


if __name__ == '__main__':
    config = {
        'urls': [f'http://www.66ip.cn/{i}.html'for i in range(1, 3)],
        'group_xpath': r"//*[text()='端口号']//..//..//*[name()='tr' and position()>1]",
        'detail_xpath': {
            'ip': './td[1]/text()',
            'port': './td[2]/text()',
            'area': './td[3]/text()'
        }
    }
    bs = BaseSpider(**config)
    py_list = bs.get_proxies()
    for p in py_list:
        print(p)
