import asyncio
import json
import sys

import aiohttp as aiohttp
import cchardet
import urllib3
from loguru import logger

from com.arcfox.source.cookie_jar import MyCookieJar

urllib3.disable_warnings()
if sys.platform == 'win32':
    asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())


class Response:
    def __init__(self, code, response, url):
        self.response = response
        self.code = code
        self.download_url = url

    def json(self):
        return json.loads(self.response)


async def session():
    con = aiohttp.TCPConnector(ssl=False)
    return aiohttp.ClientSession(connector=con)


async def get_with_session(url=None, params=None, headers=None, cookies=None, proxy=None, binary=False,
                           timeout=7):
    '''
    get with new session
    :param url:
    :param params:
    :param headers:
    :param cookies:
    :param proxy:
    :param binary:
    :param timeout:
    :return:
    '''
    async with aiohttp.ClientSession() as sess:
        return await get(session=sess, url=url, params=params, headers=headers, cookies=cookies, proxy=proxy,
                   binary=binary, timeout=timeout)


async def post_with_session(url=None, headers=None, data=None, proxy=None, binary=False, timeout=7):
    '''
    post with new session
    :param url:
    :param headers:
    :param data:
    :param proxy:
    :param binary:
    :param timeout:
    :return:
    '''
    async with aiohttp.ClientSession() as sess:
        return await post(session=sess, url=url, headers=headers, data=data, proxy=proxy, binary=binary, timeout=timeout)


async def get(session=None, url=None, params=None, headers=None, cookies=None, proxy=None, binary=False,
              timeout=7):
    '''
    请求普通的 html 页面
    :param session: 请求方式
    :param url: 请求的 url，类型为 字符换或URL
    :param params: 请求参数
    :param headers: 请求头
    :param cookies: 请求cookie
    :param proxy: 代理
    :param binary: 是否为下载二进制的内容，例（图片），默认为False
    :param semaphore: 并发数量，默认为 10
    :param timeout: 请求等待时间
    :return: ( 请求的 url，请求的状态码，请求的 HTML )
    '''
    try:
        async with session.get(url, params=params, headers=headers, cookies=cookies, proxy=proxy,
                               timeout=timeout, ssl=False) as response:
            status = response.status
            html = await response.read()  # 得到一个字节码的页面
            if not binary:
                # 如果不是下载的字节码文件，则 decode
                encoding = cchardet.detect(html)['encoding']
                if encoding:
                    html = html.decode(encoding, errors='ignore')
            download_url = str(response.url)  # 下载的 url
    except asyncio.TimeoutError as e:
        logger.exception(f"请求超时: {url}", e)
        html = ''
        status = -100
        download_url = url
    except Exception as e:
        logger.exception(f"下载失败: {url}")
        html = ''
        status = -200
        download_url = url
    return Response(status, html, download_url)


async def post(session=None, url=None, headers=None, data=None, proxy=None, binary=False, timeout=7):
    '''
    POST 请求普通的 html 页面
    :param session: 请求方式
    :param url: 请求的 url，类型为 字符换
    :param headers: 请求头
    :param data: post 请求的参数
    :param proxy: 代理
    :param binary: 是否为下载二进制的内容，例（图片），默认为False
    :param semaphore: 并发数量，默认为 10
    :param timeout: 请求等待事件
    :return: ( 请求的 url，请求的状态码，请求的 HTML )
    '''
    try:
        async with session.post(url, headers=headers, data=data, proxy=proxy, timeout=timeout,
                                ssl=False) as response:
            status = response.status
            html = await response.read()  # 得到一个字节码的页面
            if not binary:
                # 如果不是下载的字节码文件，则 decode
                encoding = cchardet.detect(html)['encoding']
                if encoding:
                    html = html.decode(encoding, errors='ignore')
            download_url = str(response.url)  # 下载的 url
    except asyncio.TimeoutError as e:
        logger.exception(f"请求超时: {url}")
        html = ''
        status = -100
        download_url = url
    except Exception as e:
        logger.exception(f"下载失败: {url}", e)
        html = ''
        status = -200
        download_url = url
    return Response(status, html, download_url)
