# !/usr/bin/python
# coding=utf-8
'''
爬取快代理的IP和端口，免得每次手动取
'''

# 导入requests库
import requests
import random
from bs4 import BeautifulSoup
import time
import telnetlib
import re
import json

import sys

print(sys.path)
sys.path.append('../Day04/myShell') #模块所在目录加入到搜素目录中

headers = [
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36',
    "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36",
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36",
    "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0",
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/537.75.14",
    "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Win64; x64; Trident/6.0)",
    'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11) Gecko/20071127 Firefox/2.0.0.11',
    'Opera/9.25 (Windows NT 5.1; U; en)',
    'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)',
    'Mozilla/5.0 (compatible; Konqueror/3.5; Linux) KHTML/3.5.5 (like Gecko) (Kubuntu)',
    'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.12) Gecko/20070731 Ubuntu/dapper-security Firefox/1.5.0.12',
    'Lynx/2.8.5rel.1 libwww-FM/2.14 SSL-MM/1.4.1 GNUTLS/1.2.9',
    "Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.7 (KHTML, like Gecko) Ubuntu/11.04 Chromium/16.0.912.77 Chrome/16.0.912.77 Safari/535.7",
    "Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:10.0) Gecko/20100101 Firefox/10.0",
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36']

url = 'https://www.kuaidaili.com/ops/proxylist/$item/'

def getProxyIP():
    ip = []
    for i in range(10):
        item = str(i+1)
        myUrl = url.replace("$item",item)
        myheards = {'User-Agent': random.choice(headers)}

        time.sleep(random.uniform(1,3))
        res = requests.get(myUrl, headers=myheards)
        html = res.content
        html_doc = str(html, "UTF-8")
        soup = BeautifulSoup(html_doc, 'html.parser')

        tr = soup.find('div',id='freeList').find('tbody').find_all('tr')
        #删除第一个tr
        
        # 获取IP和端口号
        for each in tr:
            td = str(each.text).split("\n")
            ipUrl = 'http://'+ td[1]+":"+td[2]
            dict = {'http': ipUrl}
            ip.append(dict)
            
    print(ip)
    return ip  #

# 截取字典里面的
def testIP(dict,i):
    url = dict['http']
    myUrl = str(url)
    # myUrl=myUrl.replace('\\','')
    newUrl = re.sub('//','',myUrl)
    l = newUrl.split(":")
    ip = l[1]
    port = l[2]
    try:
        telnetlib.Telnet(ip,port,timeout=2)
        return True 
    except:
        print("第%d个IP异常%s"%(i,myUrl))
    return False

def testIP2(dict,i):
    pUrl = dict['http']
    myUrl = str(pUrl)
    # myUrl=myUrl.replace('\\','')
    newUrl = re.sub('//','',myUrl)
    l = newUrl.split(":")
    ip = l[1]
    port = l[2]
    newHeaders = {'User-Agent': random.choice(headers)}
    try:
        res = requests.get(url="http://icanhazip.com/",headers = newHeaders,proxies={"http":myUrl},timeout=6)
        #proxyIP = res.text
        if( 200== res.status_code):
            print('第'+str(i+1)+"个"+"代理IP:'"+ myUrl + "'有效！")
            return True
        else:
            print('第'+str(i+1)+"个"+"代理IP无效！")
            return False
    except :
            print('第'+str(i+1)+"个"+ip + ':无效！！!')
            return False




# 返回一个随机的User-Agent
def get_random_header():
    newHeaders = {
        "User-Agent": random.choice(headers),
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
        "Accept-Encoding": "gzip"
    }
    return newHeaders


def testIP3(http, ip, port):
    '''
    :param http: 字符串 http 或者 https
    :param ip:  字符串 ip地址
    :param port: 字符串 端口
    :return: 数值 请求网页的状态码
    '''
    proxies = {
        http: http + '://' + ip + ':' + port
    }
    newHeaders = {'User-Agent': random.choice(headers)}
    print(proxies)
    test_url = ['http://httpbin.org/ip']
    try:
        response = requests.get(test_url[0], headers=newHeaders, proxies=proxies, allow_redirects=False, timeout=6)
        # print(response.text)
        # print(response.status_code)
        # 只取高匿代理
        if json.loads(response.text)['origin'].split(', ')[0] == ip:
            print(response.text)
            return response.status_code
    except Exception as e:
        # print(str(e))
        pass


# 获取的代理IP过滤一下，不过貌似很多不能用 = =，telnet有问题？
# 经测试，发现存在以下问题：虽然某些代理可以用telnet测试通过，但实际仍然上无效。有兴趣的可在windows 终端中测试。
# 方式1 通过telnet检测代理IP
def fliterProxyIpByTelnet():
    ip = getProxyIP()
    ip2 = ip
    i = 0
    for each in ip:
        boolean = testIP(each,i)
        if boolean is False:
            del ip2[i]
        i += 1
    return ip2

# 方式2，通过访问百度
def fliterProxyIPByBaidu():
    ip = getProxyIP()
    #ip =[{'http': 'http://120.196.112.6:3128'}]
    ip2 = ip
    i = 0
    for each in ip:
        boolean = testIP2(each,i)
        if boolean is False:
            del ip2[i]
        i += 1
    return ip2


'''
当包含有main函数的程序被作为module被import时，该module程序(print_main_function.py)对应的__name__变量值为该module对应的函数名称，
因此该module程序（print_main_function.py）中的main函数不会被执行。
'''
def main():
    #ip = getProxyIP()
    #ip = [{'http': 'http://222.223.182.66:8000'}]
    #i = 1
    # for each in ip:
    #   testIP(each,i)
    #   i +=1
    #ip2 = fliterProxyIpByTelnet()
    ip2 = fliterProxyIPByBaidu()
    print("ip2 %s"%(ip2))
    pass

if __name__ == '__main__':
    main()