import requests
from multiprocessing import Pool
import traceback
import json
import random
import os
import time

ok_data=[]
count=0

def efficacy_ip(proxy_ip):
    # 传入：{'http':'123.123.123.123:000'}
    USER_AGENTS = [
            "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
            "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)",
            "Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)",
            "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)",
            "Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6",
            "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1",
            "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0",
            "Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5"
        ]

    temp_list=[]
    url1="https://www.baidu.com"
    url2="https://www.jd.com"
    url3="https://www.douban.com"
    url_list=[url1,url2,url3]
    #flag=True #默认ip有效
    for url in url_list:
        headers={'User-Agent':random.choice(USER_AGENTS)}
        try:
            r=requests.get(url=url,headers=headers,proxies=proxy_ip)
            r.raise_for_status()
        except:
            return None
            traceback.print_exc(file=open("a.txt", "a",encoding='utf-8'))
            print("{} request error".format(proxy_ip))
    #print("{} request OK".format(proxy_ip))
    return proxy_ip

def main():
    st_time=time.time()
    count=0
    data_list=[]

    with open('ip_data.json','r') as file:
        data=json.load(file)

    pool=Pool(10)
    result=pool.map(efficacy_ip,data)
    for each in result:
        if(each):
            count+=1
            data_list.append(each)
    # 读取原json 合并
    with open('xicidiali_eff_ip.json','r',encoding='utf8') as file:
        origin_data=json.load(file)
    data_list.extend(origin_data)
    with open('ip.json','w',encoding='utf8') as file:
        json.dump(data_list,file)
    print('OK')

    end_time=time.time()
    print("校验Ip {} 个 有效Ip {} 个 耗时 ：{}  ".format(len(data),count,end_time-st_time))
    print("目前代理池有效 IP {} ".format(len(data_list)))
def temp():
    with open('temp_data.json','r',encoding='utf8') as file:
       # json.dump(data_list,file)
        data=json.load(file)
    for ea in data:
        print(ea)
        print(type(ea))
    print('OK')
if __name__ == '__main__':
    #temp()
    main()