import urllib.request
import random
import re
from pachou.proxyDemo import proxyDemo02
from bs4 import BeautifulSoup

userPool = [
    {
        "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36"},
    {
        "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0"},
    {
        "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 BIDUBrowser/8.7 Safari/537.36"}
]
ipPool = [{'https': "114.225.171.133:53128"}, {'https': "101.236.43.153:8866"}]


def getUser():
    try:
        ruser = random.choice(userPool)
        print(ruser)
        # 建立空列表，为了以制定格式存储头信息
        headerAll = []
        for key, value in ruser.items():
            item = (key, value)
            headerAll.append(item)
            return headerAll
    except Exception as err:
        print("获取userpool失败", err)


# IP地址取自国内髙匿代理IP网站：http://www.xicidaili.com/nn/
# 仅仅爬取首页IP地址就足够一般使用
def getIp(html):
    try:
        # 截取ip和端口
        reg = "\\d+\.\\d+\.\\d+\.\\d+</td>\s+<td>\\d+"
        imgre = re.compile(reg)
        imglist = re.findall(imgre, html.decode('utf-8'))
        print(imglist)
        ipArray = []
        for str in imglist:
            # ipAndport = str.split("</td>\n    <td>")
            ipAndport = re.split("</td>\s+<td>", str)
            # 字典存放ip:port
            ipAndportDictionarry = {ipAndport[0]: ipAndport[1]}
            ipArray.append(ipAndportDictionarry)
        print(ipArray)

        return random.choice(ipArray)
    except Exception as err:
        print("获取ip失败", err)


# 获取网页内容函数
def getHTMLText(url):
    try:
        html = urllib.request.urlopen(url).read()
    except Exception as err:
        print("获取页面内容失败", err)
        return None
    else:
        return html


def paseHtml(html):
    soup = BeautifulSoup(html, "html")
    ipData = soup.find_all("tr")
    ipList = []
    for i in range(1, len(ipData)):
        ip_data = ipData[i]
        tds = ip_data.find_all("td")
        ipList.append({tds[5].text: str(tds[1].text) + ":" + str(tds[2].text)})
    print(ipList)


def demo():
    opener = urllib.request.build_opener()
    opener.addheaders = getUser()
    # 设置全局
    urllib.request.install_opener(opener)


# 获取html
html = proxyDemo02("http://www.xicidaili.com/nn/", ipPool[0], isSwitchProxy=True, headers=userPool[0])

# 获取ip
# print(getIp(html))
paseHtml(html)
