#!/usr/bin/env python
# encoding: utf-8
'''
@author: aliax
@license: (C) Copyright 2018-2020.
@contact: a1048327635@gmail.com
@file: Test_89ip.py
@time: 2020/7/12 0012 15:01
@desc:
'''
import threading
import time

import requests
from parsel import Selector

import Config.config as con
import Config.db as db



class Get_proxy():
    """
    爬取代理网站信息
    """

    def get_proxy(self, name):
        """
        快代理&&齐云代理&&云代理
        :param name: 选择爬取的网站
        :return:
        """
        # 选择页数
        thread_name = threading.currentThread().name
        for page in range(1, 2001):
            time.sleep(1)
            headers = con.headers
            url = con.website.get(name).format(str(page))
            data = requests.get(url=url, headers=headers).text
            par = Selector(data)
            tr_par = par.xpath('//table[@class="layui-table"]/tbody/tr')
            print(tr_par)
            # 二次提取数据
            for tr in tr_par:
                proxy_ip = tr.xpath('./td[1]/text()').extract_first().strip()
                proxy_port = tr.xpath('./td[2]/text()').extract_first().strip()
                proxy = proxy_ip + ':' + proxy_port  # 封装ip:端口 redis中的key
                #print(proxy)
                #db.delete("all", proxy)
                #db.set("all", proxy, proxy)
                con.log.info('{}] - {} {} 爬取成功'.format(thread_name,name, proxy))
                time.sleep(0.5)

            #con.log.info('{}] - [{}] - the page {} success！'.format(thread_name, name, page))
g = Get_proxy()
g.get_proxy("89代理")