import requests
from utils.http import get_request_header
from lxml import etree
from domain import Proxy

"""
  1. 在base_spider.py文件中,定义一个BaseSpider类, 继承object
  2. 提供三个类成员变量:
     - urls: 代理IP网址的URL的列表
     - group_xpath: 分组XPATH, 获取包含代理IP信息标签列表的XPATH
     - detail_xpath: 组内XPATH, 获取代理IP详情的信息XPATH, 格式为: {'ip':'xx', 'port':'xx', 'area':'xx'}
  3. 提供初始方法, 传入爬虫URL列表, 分组XPATH, 详情(组内)XPATH
  4. 对外提供一个获取代理IP的方法
     - 遍历URL列表, 获取URL
     - 根据发送请求, 获取页面数据
     - 解析页面, 提取数据, 封装为Proxy对象
     - 返回Proxy对象列表

"""


class BaseSpider(object):
    urls = []
    group_xpath = ''
    detail_xpath = {}

    def __init__(self, urls=[], group_xpath='', detail_xpath={}):
        if urls:
            self.urls = urls
        if group_xpath:
            self.group_xpath = group_xpath
        if detail_xpath:
            self.detail_xpath = detail_xpath

    def get_proxies(self):
        for url in self.urls:
            content = self.get_response_from_url(url)
            proxies = self.get_proxies_from_response(content)
            yield from proxies

    def get_response_from_url(self, url):
        response = requests.get(url, headers=get_request_header())
        return response.content

    def get_proxies_from_response(self, content):
        ele = etree.HTML(content)
        trs = ele.xpath(self.group_xpath)
        for tr in trs:
            ip = BaseSpider.get_first(tr.xpath(self.detail_xpath["ip"]))
            port = BaseSpider.get_first(tr.xpath(self.detail_xpath["port"]))
            area = BaseSpider.get_first(tr.xpath(self.detail_xpath["area"]))
            proxy = Proxy(ip, port, area=area)
            yield proxy

    @staticmethod
    def get_first(lis):
        return lis[0].strip() if len(lis) != 0 else ''


if __name__ == '__main__':
    config = {
        'urls': ['http://www.ip3366.net/free/?stype=1&page={}'.format(i) for i in range(1, 4)],
        'group_xpath': '//*[@id="list"]/table/tbody/tr',
        'detail_xpath': {'ip': './td[1]/text()', 'port': './td[2]/text()', 'area': './td[5]/text()'}
    }

    base_spider = BaseSpider(**config)
    for item in base_spider.get_proxies():
        print(item)
