#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2018/11/12 6:47
# @Author  : lizhen
# @Site    : 
# @File    : PerpareInfo.py
# @Software: PyCharm
import time

import pyautogui as pag
import requests
from lxml import etree
from selenium import webdriver


class PerpareInfo:
    uas_prefix = ['Mozilla/', 'Nokia']
    uas_init_urls = ['https://blog.csdn.net/bone_ace/article/details/52476016']
    uas_default = 'User-Agent,Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.92 Safari/537.36'
    uas_end_file = {'pc': 'doc/useragent_pc.txt'}
    proxies_params = ['gngn', 'gnpt', 'gwgn', 'gwpt']
    proxies_init_url = 'http://www.data5u.com/free/{type}/index.shtml'
    proxies_types = ['http', 'https']
    proxies_type_file = {'http': 'doc/proxy_http.txt', 'https': 'doc/proxy_https.txt'}

    def __init__(self):
        self.uas = []
        self.proxies = {}

    def save_useragents(self):
        """收集网络上的UserAgent"""
        for url in self.uas_init_urls:
            driver = webdriver.Chrome()
            driver.maximize_window()
            driver.get(url)
            pag.hotkey('pagedown')
            el = driver.find_element_by_xpath('//*[@id="btn-readmore"]')
            el.click()
            time.sleep(15)

    def __save_useragents(self):
        """保存UserAgent"""
        pass

    def save_proxies(self):
        """收集网络上的Proxies"""
        proxies_http = []
        proxies_https = []
        for param in self.proxies_params:
            url = self.proxies_init_url.format(type=param)
            response = requests.get(url, headers={'User-Agent': self.uas_default})
            html = etree.HTML(response.text)
            nodes = html.xpath('//ul/li[2]/ul')[1:]
            for node in nodes:
                timeout = node.xpath('./span[8]/li/text()')[0][:-2]
                anonymous = node.xpath('./span[3]/li/text()')[0]
                if float(timeout) > 1 or anonymous != '高匿':
                    continue
                proxy_type = node.xpath('./span[4]/li/text()')[0]
                addr = '{ip}:{port}'.format(ip=node.xpath('./span[1]/li/text()')[0],
                                            port=node.xpath('./span[2]/li/text()')[0])
                if proxy_type == 'http':
                    proxies_http.append(addr)
                elif proxy_type == 'https':
                    proxies_https.append(addr)
        self.proxies = {'http': proxies_http, 'https': proxies_https}
        self.__save_proxies()

    def __save_proxies(self):
        """保存Proxy"""
        for proxies_type in self.proxies_types:
            with open(self.proxies_type_file[proxies_type], encoding='utf-8', mode='w') as f:
                for item in self.proxies[proxies_type]:
                    f.write(item + '\n')


prepare_info = PerpareInfo()


def main():
    # prepare_info.save_useragents()
    prepare_info.save_proxies()


if __name__ == '__main__':
    main()
