import re
from concurrent import futures
# 获取随机useragent
# from fake_useragent import UserAgent
import requests
from vip_request_ini import vip_request



def get_proxy():
    return requests.get("http://127.0.0.1:5010/get/").content.decode('utf8')

def get_allproxy():
    all_proxy_str = requests.get("http://127.0.0.1:5010/get_all/").content.decode('utf8').replace(", ", ",").replace("  ", "").replace("\n", "").replace("\"", "\'")
    all_proxylist = eval(all_proxy_str)
    return all_proxylist

def delete_proxy(proxy):
    requests.get("http://127.0.0.1:5010/delete/?proxy={}".format(proxy))

def get_one_cookie(proxy):

    global req
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3',
        'Accept-Encoding': 'gzip, deflate, br',
        'Content-Type': 'application/x-www-form-urlencoded',
        'Accept-Language': 'zh-CN,zh;q=0.9',
        'Cache-Control': 'max-age=0',
        'Host': 'weixin.sogou.com',
        'Connection': 'keep-alive'
    }
    # proxy ='60.9.1.250:80'
    url = "https://weixin.sogou.com/weixin?type=1&s_from=input&query=%E5%B0%8F%E9%B8%9F&ie=utf8&_sug_=n&_sug_type_="
    i = 0
    while i < 2:
        try:
            req = requests.get(url,headers=headers,proxies={"https": "http://{}".format(proxy)},timeout=(50, 50))
            req.encoding = 'utf-8'
        except requests.exceptions.RequestException:
            print(f"重连{i}")
            i += 1
    cookies = req.cookies.get_dict()
    print(f"原始cookie------{cookies}")
    cookies['browerV'] = '3'
    cookies['osV'] = '1'
    cookies['sct'] = '3'
    cookies['sst0'] = '552'
    cookies['SUV'] = '005E5558458CAE4B5B248FD6FBCA1033'
    list =[]  #  suid  snuid  suv 使用次数
    try:
        list.append(cookies['SNUID'])
    except:
        cookies['SNUID'] = 'EB457B901F2D940A000000005D3597EF'
    print(list)
    print(f"更新后的cookie------{cookies}")
    if list != '' and cookies['SNUID'] != 'EB457B901F2D940A000000005D3597EF':
        writelogtxt(str(list))
    serach = re.search('请输入图中的验证码', req.text)
    if serach != None:  #如果查询到有该字符，说明有验证码
        # 搜狗限制网页，等待输入验证码
        print(f'----X----X----X----X----Runtime--验证码异常结束')

        delete_proxy(proxy)
    else:

        #如果可用，继续用这个ip刷出sunid
        print(f'----S----S----S----S----Runtime--可用ip')



    print(req.text)
    req.close()
    requests.session().close()
    return cookies
# for i in range(1200):
#     proxy = get_proxy()
#     print(proxy)
#     try:
#         print(get_one_cookie(proxy))
#     except:
#         delete_proxy(proxy)
#         print("删除proxy")
#         continue
def run(proxy):
    try:
        print(get_one_cookie(proxy))
    except:
        delete_proxy(proxy)
        print("删除proxy")
def ThreadLine():
    with futures.ThreadPoolExecutor(max_workers=90) as executor:  # 多线程
        #for循环进行多线程调用
        for i in get_allproxy():
            print(f'{i}-------RUNTIME')
            #在循环内实例化，在外边实例化会让所有的值一致
            executor.submit(get_one_cookie,i)

if __name__ == '__main__':
    ThreadLine()