import random
from urllib import request
from bs4 import BeautifulSoup
import ssl
ssl._create_default_https_context = ssl._create_unverified_context


# 返回假user-Agent
def getAgent():
    agents = ['Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
              'Chrome/73.0.3683.103 Safari/537.36',
              'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) '
              'Chrome/17.0.963.56 Safari/535.11',
              'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
              'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) '
              'Version/5.1 Safari/534.50',
              'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) '
              'Chrome/86.0.4240.111 Safari/537.36',
              'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0',
              'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)',
              'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)']
    fakeHeader = {'User-Agent': agents[random.randint(0, len(agents)-1)]}
    return fakeHeader


# 从代理网站爬代理IP地址
def getProxyIp():
    proxy = []
    url = 'https://proxy.mimvp.com/freeopen?proxy=in_hp&sort=&page=1'
    req = request.Request(url,headers=getAgent())
    res = request.urlopen(req)
    data = res.read().decode('utf-8')
    soup = BeautifulSoup(data,"html.parser")
    soup.prettify()
    ips = soup.findAll('tr')
    for x in range(1, len(ips)):
        ip = ips[x]
        tds = ip.findAll("td")
        ip_temp = tds[1].contents[0]
        proxy.append('http://'+ip_temp)
    return proxy


# 返回爬取到的代理IP
def getProxy():
    proxy = getProxyIp()
    fakeProxy = {'http': proxy[random.randint(0, len(proxy)-1)]}
    return fakeProxy