
import urllib.request
import random
import re
import requests
from settings import UAPOOL
import time

url = "http://www.66ip.cn/nmtq.php?getnum=50&isp=0&anonymoustype=0&start=&ports=&export=&ipaddress=%D5%E3%BD%AD&area=1&proxytype=0&api=66ip"

req = urllib.request.Request(url)
req.add_header('User-Agent',random.choice(UAPOOL))
pattern = "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,4}"
count = 0
success = 0
with open("ippool.txt", 'w+') as fhandle:
    fhandle.writelines("")
while 1:
    datas = urllib.request.urlopen(req,timeout=60).read()
    datas = str(datas)

    result = re.compile(pattern).findall(datas)
    print(result)
    with open("ippool.txt",'a+') as fhandle:
        for wr in result:
            print(str(wr))
            htp = "http://"+str(wr)
            proxies={'http': 'http://'+str(wr)}
            try:
                rr = requests.get("http://www.baidu.com",timeout=6,proxies=proxies).status_code
            except:
                print("连接失败")
            else:
                print("Succe:", rr)
                if str(rr) == "200":
                    success+=1
                    fhandle.writelines(str(wr) + "\n")
                    print("成功个数:",success,"休息中:",count)

    count+=1
    print("休息中……：",count)
    time.sleep(10)





