# coding=utf-8

import aiohttp
import asyncio
from fake_useragent import UserAgent
import requests
import re
import time
from collections import Counter
from random import choice

"""单独的一个线程，以五分钟一次/十分钟一次的频率检查ip， 更新白名单。
    先使用ip检验网站，查询自己的ip
    验证爬虫所在的ip是否在代理ip白名单里面了。如果没有，使用api添加白名单。
    随后使用api删除之前的ip，使得代理商ip白名单列表数量不超限制
"""
# base_urls = ["http://www.baidu.com/", "www.taobao.com", "www.qq.com", "www.163.com"]
# url = "http://ip.11jsq.com/index.php/api/entry?method=proxyServer.generate_api_url&packid=0&fa=0&fetch_key=&qty=1&time=101&pro=&city=&port=1&format=txt&ss=1&css=&dt=1&specialTxt=3&specialJson="
# url = "http://mvip.piping.mogumiao.com/proxy/api/main_fetcher_bs?appKey=1ed3519b6a6a4c019bea4fd97cab3542&count=20&expiryDate=0&format=2&newLine=2"
# whitelist_add_url = "http://http.zhiliandaili.com/Users-whiteIpAddNew.html?appid=1348&appkey=60247e140488dc4aafedbcdad4ecae0b&whiteip="
# whitelist_chk_url = "http://http.zhiliandaili.com/Users-whiteIpListNew.html?appid=1348&appkey=60247e140488dc4aafedbcdad4ecae0b"
# whitelist_del_url = "http://http.zhiliandaili.com/Users-whiteIpDelNew.html?appid=1348&appkey=60247e140488dc4aafedbcdad4ecae0b&whiteip="

# ua = UserAgent()

tunnels = ["117.44.243.140:18081", "47.106.230.172:10000", "47.106.230.172:10001", "47.106.230.172:10002", "47.106.230.172:10003", "47.106.230.172:10004", "47.106.230.172:10005", "47.106.230.172:10006", "47.106.230.172:10007", "47.106.230.172:10008", "47.106.230.172:10009", "117.44.243.140:18082", "117.44.243.140:18083", "117.44.243.140:18084", "117.44.243.140:18085", "117.44.243.140:18086",
           "117.44.243.140:18087", "117.44.243.140:18088", "117.44.243.140:18089", "117.44.243.140:18090", "117.44.243.140:18091", "117.44.243.140:18092", "117.44.243.140:18093", "117.44.243.140:18094", "117.44.243.140:18095", "117.44.243.140:18096", "117.44.243.140:18097", "117.44.243.140:18098", "117.44.243.140:18099", "117.44.243.140:18100", "117.44.243.140:18101", "117.44.243.140:18102",
           "117.44.243.140:18103", "117.44.243.140:18104", "117.44.243.140:18105", "117.44.243.140:18106", "117.44.243.140:18107", "117.44.243.140:18108", "117.44.243.140:18109", "117.44.243.140:18110"]

#
# def test_proxy_list(proxy_list_pa):
#     proxy_list3 = list(set(proxy_list_pa))
#     proxy_list3 = ["http://" + x for x in proxy_list3]
#     loop = asyncio.get_event_loop()
#     good_proxies1 = loop.run_until_complete(main(loop, proxy_list3))
#     return good_proxies1
#

# async def crawl(proxy, url, session):
#     headers = {'User-Agent': ua.random}
#     try:
#         r = await session.get(url, proxy=proxy, timeout=timeout, headers=headers)
#         return proxy
#     except Exception as _:
#         pass

#
# async def main(loop1, proxy_list1):
#     async with aiohttp.ClientSession() as session:
#         tasks = [loop1.create_task(crawl(proxy, url_test, session)) for proxy in proxy_list1 for url_test in base_urls]
#         finished, unfinished = await asyncio.wait(tasks)
#         good_proxies = [r.result() for r in finished if r.result() is not None]
#         return good_proxies
#
#
# def fetch_ipport_list():
#     r = requests.get(url)
#     proxy_list2 = re.split(r"\n", r.text)
#     return proxy_list2

#
# def get_proxy_gen():
#     proxy_list_init = []
#     start_time = time.time()
#     while len(proxy_list_init) == 0 and time.time() - start_time < 30:
#         proxy_list = fetch_ipport_list()
#         proxy_list_init = test_proxy_list(proxy_list)
#     return Counter(proxy_list_init).most_common()[0][0]


##################################################################

# ip_added = []
# """regex 依次分别是 https://ip.cn | http://www.882667.com/ | http://ddns.nat123.com | baidu.com"""
# regex = "(?<=您现在的 IP：<code>)\d+\.\d+\.\d+\.\d+|(?<=你的ip是：</b><a href=\"ip_)\d+\.\d+\.\d+\.\d+|(?<=nat123动态域名解析http版ip检测\nCurrent IP Address:\s)\d+\.\d+\.\d+\.\d+|(?<=本机IP:&nbsp;)\d+\.\d+\.\d+\.\d+"
#
# list_req_webs = ["https://www.baidu.com/s?wd=IP", "https://ip.cn", "http://ddns.nat123.com", "http://www.882667.com/"]


# def fetch_ips():
#     loop = asyncio.get_event_loop()
#     ip_s = loop.run_until_complete(main_fetcher(loop, list_req_webs))
#     return ip_s


# async def main_fetcher(loop5, list_req_webs1):
#     async with aiohttp.ClientSession() as session:
#         tasks = [loop5.create_task(pingpong(session, x)) for x in list_req_webs1]
#         finished, unfinished = await asyncio.wait(tasks)
#         res_ips = [x.result() for x in finished]  # if x.result() is not None]
#         return res_ips


# async def pingpong(session1, req_web):
#     headers = {'user-agen': ua.random}
#     r = await session1.get(req_web, headers=headers, timeout=10)
#     rtext = await r.text()
#     # print(rtext)
#
#     refind_list = re.findall(r"{}".format(regex), rtext)
#     # print(len(refind_list), req_web, rtext[:50])
#     # ip = refind_list[0] if refind_list else None
#     return refind_list[0] if len(refind_list) == 1 else None
#

def fetch_tunnels():
    return choice(tunnels)


# while True:
#     myip_2add = fetch_ips()
#     if len(myip_2add) > 0:
#
#     time.sleep(600)
# def get_fangnanproxypool():
#     files = {'file': open("image/Screenshot from 2018-07-24 15-20-13.png", 'rb')}
#     url1 = "http://192.168.10.172:8002/middleware/identifying/upload.go?filename=ZhiXingWang"
#     response = requests.post(url1, files=files)
#     print(response.text)


# def proxy_gen_infi():
#     ll = tunnels
#     index = 0
#     while True:
#         if index == len(ll):
#             index = 0
#         result = ll[index]
#         index += 1
#         yield result


if __name__ == "__main__":
    # print(get_proxy_gen())
    # print(fetch_ips())
    pass