#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2018/11/12 7:12
# @Author  : lizhen
# @Site    : 
# @File    : ExtractInfo.py
# @Software: PyCharm
import inspect
import random
import requests
import os

from Utils.PrepareInfo import prepare_info


class ExtractInfo:
    ua_end_file = {'pc': 'doc/useragent_pc.txt'}
    ua_default = {
        'User-Agent': 'User-Agent,Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.92 Safari/537.36'}
    proxy_type_file = {'http': 'doc/proxy_http.txt', 'https': 'doc/proxy_https.txt'}

    def get_useragent(self, ua_end='pc'):
        """随机产生一个UserAgent"""
        with open(self.ua_end_file[ua_end], encoding='utf-8', mode='r') as f:
            user_agents = f.readlines()
            while True:
                if len(user_agents) == 0:
                    continue
                _user_agent = random.choice(user_agents).strip()
                user_agent = {'User-Agent': _user_agent}
                return user_agent

    def get_proxy(self, proxy_type='http', server='https://www.baidu.com/'):
        """随机产生一个Proxy"""
        proxy_types = []
        if not proxy_type:
            proxy_types = ['http', 'https']
        else:
            proxy_types.append(proxy_type)
        urls = []
        for _proxy_type in proxy_types:
            with open(self.proxy_type_file[_proxy_type], encoding='utf-8', mode='r') as f:
                urls.extend([_proxy_type + '::' + item for item in f.readlines()])
        count = 0
        while True:
            if len(urls) == 0:
                prepare_info.save_proxies()
                return self.get_proxy(proxy_type, server)
                continue
            type_ip = random.choice(urls).strip().split('::')
            proxy = {type_ip[0]: type_ip[1]}
            response = requests.head(server, proxies=proxy, headers=self.ua_default)
            if response.status_code == 200:
                proxy = {type_ip[0]: type_ip[1]}
                return proxy
            count += 1;
            if count >= 3:
                prepare_info.save_proxies()
                return self.get_proxy(proxy_type, server)


extract_info = ExtractInfo()


def main():
    # print(extract_info.get_useragent())
    print(extract_info.get_proxy())


if __name__ == '__main__':
    main()
