import random
import time
import traceback

import requests
from requests.adapters import HTTPAdapter
from requests_file import FileAdapter
from requests_html import HTMLSession
import asyncio

from common.request import ippool
from common.utils import timeutil


def get_default_header(h):
    # h={'Host': 'www.tianyancha.com','Referer': 'https://www.tianyancha.com',}
    headers = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
        'Accept-Encoding': 'gzip,deflate,sdch,br',
        'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6,zh-TW;q=0.4',
        'Cache-Control': 'no-cache',
        'Connection': 'keep-alive',
        'Pragma': 'no-cache',
        'Upgrade-Insecure-Requests': '1',
        'User-Agent': 'Mozilla/5.0(WindowsNT6.1;WOW64)AppleWebKit/537.36(KHTML,likeGecko)Chrome/55.0.2883.87Safari/537.36'
    }
    headers.update(h)
    return headers


def randomheader():
    user_agent_list = [
        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
    ]
    user_agent = random.choice(user_agent_list)
    headers = {'User-Agent': user_agent}
    return headers


def get_session():
    session = HTMLSession()
    return session


def get(url, max_retries=0, timeout=2, keep_session=None, **kwargs):
    try:
        if keep_session:
            session = keep_session
        else:
            session = HTMLSession()
        session.mount('http://', HTTPAdapter(max_retries=max_retries))
        session.mount('https://', HTTPAdapter(max_retries=max_retries))
        response = session.get(url, timeout=timeout, **kwargs)
        # proxies = {'http': 'http://127.0.0.1:8888', 'https': 'http://127.0.0.1:8888'}
        # response = session.get(url, timeout=timeout, proxies=proxies, verify=False, **kwargs)
        if not keep_session:
            session.close()
        else:
            cookie_dict = requests.utils.dict_from_cookiejar(response.cookies)
            c = requests.utils.cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True)
            if len(c) > 0:
                session.cookies.update(c)
        return response
    except Exception:
        raise


def get_local(path):
    session = HTMLSession()

    session.mount('file://', FileAdapter())

    response = session.get(f'file:///' + path)
    session.close()
    return response


def is_proxy_exception(proxy=None):
    error_message = traceback.format_exc()
    if "proxy" in error_message or "Proxy" in error_message or "代理错误" in error_message:
        return True
    if proxy and proxy["server_url"] in error_message:
        return True
    return False


def get_by_proxy(url, timeout=8, max_retries=0, **kwargs):
    proxy = ippool.get_proxy()
    try:
        return get(url, timeout=timeout, max_retries=max_retries, proxies=proxy, **kwargs)
    except Exception as e:
        if is_proxy_exception(proxy):
            ippool.remove_bad_proxy(proxy)
            raise Exception("代理错误" + str(e))
        raise


def cookie_dict_from_str(cookie_strs):
    cookie_str_list = cookie_strs.split(";")
    cookies_dict = {}
    for cookie_str in cookie_str_list:
        if not cookie_str.strip():
            continue
        aa = cookie_str.split("=")
        if len(aa) < 2:
            v = ""
        else:
            v = aa[1].strip(" ")
        cookies_dict[aa[0].strip(" ")] = v

    return cookies_dict

#[{'domain': 'www.sudai9.com', 'expiry': 1569313050.337803, 'httpOnly': False, 'name': 'page_num', 'path': '/', 'secure': False, 'value': '10'}, {'domain': 'www.sudai9.com', 'expiry': 1569248249.740175, 'httpOnly': False, 'name': 'kefuname', 'path': '/', 'secure': False, 'value': '%E6%9D%8E%E6%98%8E%E7%9C%9F'}, {'domain': 'www.sudai9.com', 'expiry': 1569248249.740222, 'httpOnly': False, 'name': 'kefucityid', 'path': '/', 'secure': False, 'value': '3101'}, {'domain': 'www.sudai9.com', 'expiry': 1569248249.740145, 'httpOnly': False, 'name': 'account', 'path': '/', 'secure': False, 'value': '13003230983'}, {'domain': 'www.sudai9.com', 'expiry': 1569248249.74012, 'httpOnly': False, 'name': 'usertype', 'path': '/', 'secure': False, 'value': '3'}, {'domain': 'www.sudai9.com', 'expiry': 1569946648.597585, 'httpOnly': False, 'name': 'ci_session', 'path': '/', 'secure': False, 'value': 'a%3A5%3A%7Bs%3A10%3A%22session_id%22%3Bs%3A32%3A%2259e97055a49e358a487808172aab4e15%22%3Bs%3A10%3A%22ip_address%22%3Bs%3A13%3A%22101.81.64.189%22%3Bs%3A10%3A%22user_agent%22%3Bs%3A114%3A%22Mozilla%2F5.0+%28Windows+NT+10.0%3B+Win64%3B+x64%29+AppleWebKit%2F537.36+%28KHTML%2C+like+Gecko%29+Chrome%2F77.0.3865.90+Safari%2F537.36%22%3Bs%3A13%3A%22last_activity%22%3Bi%3A1569226652%3Bs%3A9%3A%22user_data%22%3Bs%3A0%3A%22%22%3B%7Db8f32e408022f27ed932953a73f60ecff9adb17d'}, {'domain': 'www.sudai9.com', 'expiry': 1569248249.740093, 'httpOnly': False, 'name': 'kefuid', 'path': '/', 'secure': False, 'value': 'LNVizonP6AL0RVRIMBTjVA%3D%3D'}, {'domain': 'www.sudai9.com', 'expiry': 1569248249.7402, 'httpOnly': False, 'name': 'kefucity', 'path': '/', 'secure': False, 'value': '%E4%B8%8A%E6%B5%B7'}, {'domain': 'www.sudai9.com', 'expiry': 1569248249.740066, 'httpOnly': False, 'name': 'father_kefuid', 'path': '/', 'secure': False, 'value': '130873'}, {'domain': 'www.sudai9.com', 'expiry': 1569313049.74004, 'httpOnly': False, 'name': 'brand2', 'path': '/', 'secure': False, 'value': '30'}, {'domain': 'www.sudai9.com', 'expiry': 1569248249.739994, 'httpOnly': False, 'name': 'laurence', 'path': '/', 'secure': False, 'value': '0141d3d0774b46b340f55d31b320dab3'}]

def cookie_dict_from_list(cookie_list):
    cookies_dict = {}
    for c in cookie_list:
        cookies_dict[c["name"]] = c["value"]
    return cookies_dict


if __name__ == '__main__':
    url = "https://www.qichacha.com/firm_1a99c4c9a28a9bd19ba99e688e9e8ac7.html"
    session = HTMLSession()
    cookies = cookie_dict_from_str(
        "aliyungf_tc=AQAAABAJ32TLPQ4A9k33dMk4fhS8G1Bm; ssuid=4220356200; csrfToken=pcenZMQMg1ISp7BnnS0HkAx7; TYCID=e9a5a90092fd11e9bdfb51a17a08499c; undefined=e9a5a90092fd11e9bdfb51a17a08499c; jsid=SEM-BAIDU-PP-VI-001065; tyc-user-info=%257B%2522claimEditPoint%2522%253A%25220%2522%252C%2522myAnswerCount%2522%253A%25220%2522%252C%2522myQuestionCount%2522%253A%25220%2522%252C%2522signUp%2522%253A%25220%2522%252C%2522explainPoint%2522%253A%25220%2522%252C%2522privateMessagePointWeb%2522%253A%25220%2522%252C%2522nickname%2522%253A%2522%25E6%2589%25B6%25E8%258B%258F%2522%252C%2522integrity%2522%253A%25220%2525%2522%252C%2522privateMessagePoint%2522%253A%25220%2522%252C%2522state%2522%253A0%252C%2522announcementPoint%2522%253A%25220%2522%252C%2522isClaim%2522%253A%25220%2522%252C%2522vipManager%2522%253A%25220%2522%252C%2522discussCommendCount%2522%253A%25220%2522%252C%2522monitorUnreadCount%2522%253A%25220%2522%252C%2522onum%2522%253A%25220%2522%252C%2522claimPoint%2522%253A%25220%2522%252C%2522token%2522%253A%2522eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiIxMzkxNjg2ODY0OSIsImlhdCI6MTU2MDk5NjcyNywiZXhwIjoxNTkyNTMyNzI3fQ.tX3F8d4-r5SeGuONW9WVJK4zUshZSaYWkBYHvRWa3Fjqi6A3Moi6YmfsM89F5e8LaV7fj8w5413QJcQGDoTRCQ%2522%252C%2522pleaseAnswerCount%2522%253A%25220%2522%252C%2522redPoint%2522%253A%25220%2522%252C%2522bizCardUnread%2522%253A%25220%2522%252C%2522vnum%2522%253A%25220%2522%252C%2522mobile%2522%253A%252213916868649%2522%257D; auth_token=eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiIxMzkxNjg2ODY0OSIsImlhdCI6MTU2MDk5NjcyNywiZXhwIjoxNTkyNTMyNzI3fQ.tX3F8d4-r5SeGuONW9WVJK4zUshZSaYWkBYHvRWa3Fjqi6A3Moi6YmfsM89F5e8LaV7fj8w5413QJcQGDoTRCQ; bannerFlag=undefined; Hm_lvt_e92c8d65d92d534b0fc290df538b4758=1560836752,1560995484,1560999149,1561094271; Hm_lpvt_e92c8d65d92d534b0fc290df538b4758=1561094271; _ga=GA1.2.893090315.1560995484; _gid=GA1.2.2118363815.1560995484; _gat_gtag_UA_123487620_1=1")
    session.cookies.update(cookies)

    session.mount('http://', HTTPAdapter())
    session.mount('https://', HTTPAdapter())
    proxies = {'http': 'http://127.0.0.1:8888', 'https': 'http://127.0.0.1:8888'}
    response = session.get(url, timeout=5, proxies=proxies, verify=False)

    dom = response.html
    dom.render(keep_page=True)


    async def research(page):
        await page.click('#ajaxpage[text="2"]')


    asyncio.get_event_loop().run_until_complete(research(dom.page))


    async def close(page):
        await page.close()


    # asyncio.get_event_loop().run_until_complete(close(dom.page))
    print(response.text)
    print(timeutil.datetime_to_ymdhms())
