#!/usr/bin/env python
# encoding: utf-8
'''
@author: aliax
@license: (C) Copyright 2018-2020.
@contact: a1048327635@gmail.com
@file: Test_proxylists.py
@time: 2020/7/12 0012 13:43
@desc:
'''
import threading
import time

import requests
from parsel import Selector

import Config.config as con
import Config.db as db
class Get_proxy():
    """
    爬取代理网站信息
    """

    def get_proxy(self, name):
        """
        快代理&&齐云代理&&云代理
        :param name: 选择爬取的网站
        :return:
        """
        # 选择页数
        thread_name = threading.currentThread().name
        for page in range(1, 2):
            time.sleep(1)
            headers = con.headers
            url = con.website.get(name).format(str(page))
            data = requests.get(url=url, headers=headers).text
            for i in data.split('\r\n')[:-1]:
                print(i)
                #db.set('all',i,i)
            #con.log.info('{}] - [{}] - the page {} success！'.format(thread_name, name, page))
g = Get_proxy()
g.get_proxy('proxylists')