import requests
import random
from bs4 import BeautifulSoup
from FileManager import save_html
from lxml import etree

def get_ip_list():
    headers = {
        # 'Accept': 'text / html, application / xhtml + xml, application / xml;q = 0.9, image / webp, image / apng, * / *;q = 0.8',
        #        'Accept-Encoding': 'gzip, deflate',
        #        'Accept-Language': 'zh-CN,zh;q=0.9',
        #         'Cache - Control': 'max - age = 0',
        #        'Connection': 'keep-alive',
        #        'Cookie': '_free_proxy_session=BAh7B0kiD3Nlc3Npb25faWQGOgZFVEkiJTRlMjNkMzA5OWQ2N2JiMmYxNTIwMmFjMzAyMTc3YmU1BjsAVEkiEF9jc3JmX3Rva2VuBjsARkkiMWgycHpyZ1FpeEdZc0tGSC92QThGRzNlYkpSRm96bitmU3BZc1lybnZ1bEE9BjsARg%3D%3D--46506692bc4bd58f266c137f92fa6f3cf62dabd6',
        #        'Host': 'www.xicidaili.com',
        #        'If - None - Match': 'W / "0ed737ef78b49c595eb52807d0d0c229"',
        #        'Upgrade-Insecure-Requests': '1',
               'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'}


    print("正在获取代理列表...")
    url = 'http://www.xicidaili.com'


    target_response = requests.get(url=url, headers=headers)
    target_response.encoding = 'utf-8'
    target_html = target_response.text
    save_html(target_html,'iplist.html')

    bf1_ip_list = BeautifulSoup(target_html, 'lxml')
    bf2_ip_list = BeautifulSoup(str(bf1_ip_list.find_all(id='ip_list')), 'lxml')
    ip_list_info = bf2_ip_list.table.contents
    proxys_list = []
    for index in range(177):#177 SOCKET 代理没用上  len(ip_list_info)
        if index % 2 == 1 and index % 44 != 1 and index  % 44 != 2 and index  % 44 != 3 :
            print('***')
            print('index is ' + str(index))
            print(ip_list_info[index])
            dom = etree.HTML(str(ip_list_info[index]))
            ip = dom.xpath('//td[2]')
            port = dom.xpath('//td[3]')
            protocol = dom.xpath('//td[6]')
            proxys_list.append(protocol[0].text.lower() + '://' + ip[0].text + ':' + port[0].text)
        else:
            print('index is NOT COUNT IN' + str(index))
            print(ip_list_info[index])
    print(proxys_list)

    # print(str(ip_list_info[3]))
    # return
    # for index in range(len(ip_list_info)):
    #     if index % 2 == 0 and index != 1:
    #         dom = etree.HTML(str(ip_list_info[index]))
    #         # ip = dom.xpath('//td[2]')
    #         # port = dom.xpath('//td[3]')
    #         # protocol = dom.xpath('//td[6]')
    #         # proxys_list.append(protocol[0].text.lower() + '#' + ip[0].text + '#' + port[0].text)
    # print(proxys_list)
    # print("代理列表抓取成功.")
    # return ip_list

def _unpack(row, kind='td'):

    elts = row.xpath('.//%s' %kind)

    return [val.text_content() for val in elts]# 使用列表推导式返回一个列表

#根据timeout判断ip是否可行
def testIpAvailable(ip):
    url = 'http://www.baidu.com'
    headers = {
               'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'}

    NETWORK_STATUS = False
    try:
        html = requests.get(url=url, headers=headers, proxies={'http': ip}, timeout=1.0)
    except requests.exceptions.ConnectTimeout:
        NETWORK_STATUS = False
    except requests.exceptions.Timeout:
        REQUEST_TIMEOUT = True
    print(REQUEST_TIMEOUT)
    return REQUEST_TIMEOUT

def get_random_ip(self, ip_list):
    print("正在设置随机代理...")
    proxy_list = []
    for ip in ip_list:
        proxy_list.append('http://' + ip)
    proxy_ip = random.choice(proxy_list)
    proxies = {'http': proxy_ip}
    print("代理设置成功.")
    return proxies

if __name__=='__main__':
    print('ha')
    ip_list = get_ip_list()
    # print(ip_list)
    # for a in ip_list.__iter__():
    #     print(a)