# -*- coding: UTF-8 -*-
import typing

import httpx
from httpx._client import UseClientDefault, USE_CLIENT_DEFAULT
from httpx._types import URLTypes, QueryParamTypes, CookieTypes, HeaderTypes, TimeoutTypes, AuthTypes, RequestContent, \
    RequestData, RequestFiles


# todo 网络请求是否不应该只是一个包，是否也应该是一个celery任务？通过链式调用的方式

class HttpClient(object):
    def __init__(self):
        self.client = httpx.AsyncClient()

    async def handle_get(
            self,
            url: URLTypes,
            *args,
            params: typing.Optional[QueryParamTypes] = None,
            headers: typing.Optional[HeaderTypes] = None,
            cookies: typing.Optional[CookieTypes] = None,
            auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT,
            timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT,
            **kwargs
    ):
        r = await self.client.get(url, *args, params=params, headers=headers, cookies=cookies, auth=auth,
                                  timeout=timeout, **kwargs)
        assert r.status_code == 200
        # return r.json()

    async def handle_post(
            self,
            url: URLTypes,
            *args,
            content: typing.Optional[RequestContent] = None,
            data: typing.Optional[RequestData] = None,
            files: typing.Optional[RequestFiles] = None,
            json: typing.Optional[typing.Any] = None,
            params: typing.Optional[QueryParamTypes] = None,
            headers: typing.Optional[HeaderTypes] = None,
            cookies: typing.Optional[CookieTypes] = None,
            auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT,
            timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT,
            **kwargs
    ):
        r = await self.client.post(url, *args, content=content, data=data, files=files, json=json, params=params,
                                   headers=headers, cookies=cookies, auth=auth, timeout=timeout, **kwargs)
        assert r.status_code == 200
        # return r.json()

    async def handle_stream(self, method, url):
        client = httpx.AsyncClient()
        async with client.stream(method, url) as response:
            async for chunk in response.aiter_bytes():
                pass


async def handle_get(
        url: URLTypes,
        *args,
        params: typing.Optional[QueryParamTypes] = None,
        headers: typing.Optional[HeaderTypes] = None,
        cookies: typing.Optional[CookieTypes] = None,
        auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT,
        timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT,
        **kwargs
):
    async with httpx.AsyncClient() as client:
        r = await client.get(url, *args, params=params, headers=headers, cookies=cookies, auth=auth, timeout=timeout,
                             **kwargs)
        assert r.status_code == 200
        # return r.json()


async def handle_post(
        url: URLTypes,
        *args,
        content: typing.Optional[RequestContent] = None,
        data: typing.Optional[RequestData] = None,
        files: typing.Optional[RequestFiles] = None,
        json: typing.Optional[typing.Any] = None,
        params: typing.Optional[QueryParamTypes] = None,
        headers: typing.Optional[HeaderTypes] = None,
        cookies: typing.Optional[CookieTypes] = None,
        auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT,
        timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT,
        **kwargs
):
    async with httpx.AsyncClient() as client:
        r = await client.post(url, *args, content=content, data=data, files=files, json=json, params=params,
                              headers=headers, cookies=cookies, auth=auth, timeout=timeout, **kwargs)
        assert r.status_code == 200
        # return r.json()


async def handle_get1(
        url: URLTypes,
        *args,
        params: typing.Optional[QueryParamTypes] = None,
        headers: typing.Optional[HeaderTypes] = None,
        cookies: typing.Optional[CookieTypes] = None,
        auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT,
        timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT,
        **kwargs
):
    async with httpx.AsyncClient() as client:
        for _ in range(10):
            r = await client.get(url, *args, params=params, headers=headers, cookies=cookies, auth=auth,
                                 timeout=timeout, **kwargs)
            assert r.status_code == 200
            # return r.json()


async def handle_stream(method, url):
    client = httpx.AsyncClient()
    async with client.stream(method, url) as response:
        async for chunk in response.aiter_bytes():
            pass


async def aa():
    return await asyncio.gather(*[handle_get(url="http://www.baidu.com") for _ in range(50)])


async def bb():
    return await asyncio.gather(*[handle_get1(url="http://www.baidu.com") for _ in range(10)])


if __name__ == "__main__":
    # # 使用uvloop更快，但是uvloop不支持window操作
    # # import uvloop
    # #
    # # loop = asyncio.get_event_loop()
    # # asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())  # 在启用事件前加这一行代码即可
    # # loop.run_until_complete(asyncio.wait(tasks))
    #
    # start = time.time()
    # tasks = []
    # for _ in range(100):
    #     # task = asyncio.ensure_future(get_request("https://www.baidu.com"))
    #     # tasks.append(task)
    #
    #     # asyncio.run(get_request("https://www.baidu.com"))  # 18.622s
    #     asyncio.run(send_request(url="https://www.baidu.com", method='get'))  # 18.622s
    #
    # # loop = asyncio.get_event_loop()
    # # loop.run_until_complete(asyncio.wait(tasks))  # 9.356s  单独发就会很慢，批量请求就会比较快
    # print('总耗时:', time.time() - start)
    import asyncio
    import time

    start = time.time()
    # asyncio.run(handle_get1(url="http://www.baidu.com"))  # 5-7s
    # for _ in range(50):
    #     asyncio.run(handle_get(url="http://www.baidu.com"))  # 15s

    # asyncio.run(aa())  # 3s
    asyncio.run(bb())  # 3s
    print(time.time() - start)
    # fixme gather异步+多个client，每个客户端处理多个请求 100个请求最快可以3秒左右
