# Third-party wheels
import aiohttp
import asyncio
import datetime
from http.cookiejar import CookieJar
import json
import requests
import sys
import urllib3

# My wheels
from BrowserCookiesGetter import BrowserCookiesGetter
import PackagedInfo
import PublicConstant

urllib3.disable_warnings()


class PixivAPI:
    def __init__(self, custom_proxies: dict | None, custom_cookies: CookieJar | None):
        self.custom_proxies = custom_proxies
        if custom_cookies is None:
            print('Cookies not set. Some functions may not work properly.',
                  file=sys.stderr)
        self.custom_cookies = custom_cookies

    def get_page_info(
            self, search_keyword: str, page_index: int,
            ecd: datetime.datetime | None
    ) -> PackagedInfo.PackagedPageInfo:
        while True:
            try:
                '''
                https://www.pixiv.net/ajax/search/artworks/ドーラ?
                order=date_d&mode=all&scd=2000-01-01&ecd=2022-11-25&
                p=1&s_mode=s_tag_full&type=all&lang=zh
                '''
                my_params = {'p': page_index}
                if ecd is not None:
                    ecd_plus_one = ecd + datetime.timedelta(days=1)  # may overlap
                    my_params['ecd'] = f'{ecd_plus_one.year}-{ecd_plus_one.month:02d}-{ecd_plus_one.day:02d}'
                # else:
                #     ecd = datetime.datetime.today()
                #     my_params['ecd'] = f'{ecd.year}-{ecd.month:02d}-{ecd.day:02d}'
                r = requests.get(
                    url=f'https://www.pixiv.net/ajax/search/artworks/{search_keyword}',
                    proxies=self.custom_proxies,
                    headers=PublicConstant.headers,
                    params=my_params,
                    cookies=self.custom_cookies,
                    verify=False
                )
                page_info: dict = json.loads(r.content)
                # print(page_info)
                '''
                page_info
                | -- body
                | -- | -- illustManga
                | -- | -- | -- data [           /* up to 60 artworks */
                | -- | -- | -- | -- id
                | -- | -- | -- | -- url         /* thumbnail, 250x250 */
                | -- | -- | -- | -- createDate  /* 'createDate': '2022-11-04T13:40:35+09:00' */
                | -- | -- | -- ]
                | -- | -- | -- total /* total number of artworks */
                '''
                rebased_root: dict = page_info['body']['illustManga']
                pic_data_list: list[PackagedInfo.PackagedPageItemInfo] = list()
                for each in rebased_root['data']:
                    if not each.get('id', False):
                        print(f'Found no id in {each}', file=sys.stderr)
                        continue
                    pic_data_list.append(PackagedInfo.PackagedPageItemInfo(
                        _id=each['id'],
                        _thumbnail_url=each['url'],
                        _create_date=datetime.datetime.fromisoformat(
                            each['createDate'])
                    ))
                return PackagedInfo.PackagedPageInfo(
                    _search_keyword=search_keyword,
                    _page_index=page_index,
                    _artworks=pic_data_list,
                    _total=rebased_root['total']
                )
            except Exception as e:
                print(
                    f'Failed when executing function PixivAPI.get_page_info({type(e)}): {e}', file=sys.stderr)

    def get_pic_info(self, pic_id: int, thumb_url: str | None) -> PackagedInfo.PackagedPicInfo | None:
        while True:
            try:
                r = requests.get(
                    url='https://www.pixiv.net/touch/ajax/illust/details',
                    proxies=self.custom_proxies,
                    headers=PublicConstant.headers,
                    params={'illust_id': pic_id},
                    timeout=3,
                    verify=False
                )
                pic_info: dict = json.loads(r.content)
                '''
                pic_info
                | -- body
                | -- | illust_details
                | -- | -- url                  /* url of the thumbnail, 128x128 */
                | -- | -- tags                 /* may includes `R-18` or `R-18G` */
                | -- | -- bookmark_user_total  /* like_num */
                | -- | -- upload_timestamp     /* upload time, timestamp */
                | -- | -- ai_type              /* 2 for ai-created */
                | -- | -- x_restrict           /* '0' for safe */
                '''
                rebased_root: dict = pic_info['body']['illust_details']
                return PackagedInfo.PackagedPicInfo(
                    _id=pic_id,
                    _thumbnail_url=(
                        rebased_root['url'] if thumb_url is None else thumb_url),
                    _is_r18=rebased_root['x_restrict'] != '0',
                    _like_num=rebased_root['bookmark_user_total'],
                    _upload_date=rebased_root['upload_timestamp'],
                    _is_ai=(rebased_root['ai_type'] == 2)
                )
            except Exception as e:
                print(
                    f'Failed when executing function PixivAPI.get_pic_info({type(e)}): {e}',
                    file=sys.stderr)

    async def get_pic_info_async(
            self, session: aiohttp.ClientSession,
            pic_id: int, thumb_url: str | None, sema: asyncio.Semaphore) -> PackagedInfo.PackagedPicInfo | None:
        while True:
            try:
                await sema.acquire()
                async with session.get(
                        'https://www.pixiv.net/touch/ajax/illust/details',
                        proxy=self.custom_proxies['http'] if self.custom_proxies else '',
                        params={'illust_id': pic_id},
                        timeout=aiohttp.ClientTimeout(PublicConstant.aiohttp_timeout),
                        ssl=False) as response:
                    pic_info: dict = json.loads(await response.read())
                    '''
                    pic_info
                    | -- body
                    | -- | illust_details
                    | -- | -- url                  /* url of the thumbnail, 128x128 */
                    | -- | -- tags                 /* may includes `R-18` or `R-18G` */
                    | -- | -- bookmark_user_total  /* like_num */
                    | -- | -- upload_timestamp     /* upload time, timestamp */
                    | -- | -- ai_type              /* 0 for human-created */
                    | -- | -- x_restrict           /* '0' for safe */
                    '''
                    rebased_root: dict = pic_info['body']['illust_details']
                    res = PackagedInfo.PackagedPicInfo(
                        _id=pic_id,
                        _thumbnail_url=(
                            rebased_root['url'] if thumb_url is None else thumb_url),
                        _is_r18=rebased_root['x_restrict'] != '0',
                        _like_num=rebased_root['bookmark_user_total'],
                        _upload_date=rebased_root['upload_timestamp'],
                        _is_ai=(rebased_root['ai_type'] == 2)
                    )
                    sema.release()
                    return res
            except (aiohttp.ClientProxyConnectionError, aiohttp.ClientHttpProxyError):
                if requests.get('https://www.pixiv.net', proxies=self.custom_proxies).status_code != 200:
                    print(f'! PROXY ERROR ! Cannot connect to pixiv.net through the proxy!', file=sys.stderr)
                    sema.release()
            except Exception as e:
                print(
                    f'Failed when executing function PixivAPI.get_pic_info_async({type(e)}): {e}',
                    file=sys.stderr
                )
                sema.release()

    def get_pic_thumbnail(self, thumbnail_url: str) -> bytes:
        while True:
            try:
                return requests.get(
                    url=thumbnail_url,
                    proxies=self.custom_proxies,
                    headers=PublicConstant.headers,
                    verify=False
                ).content
            except Exception as e:
                print(f'Failed when executing function PixivAPI.get_pic_thumbnail: {type(e)}', file=sys.stderr)

    async def get_pic_thumbnail_async(self, thumbnail_url: str, session: aiohttp.ClientSession) -> bytes:
        while True:
            try:
                async with session.get(
                    thumbnail_url,
                    headers=PublicConstant.headers,
                    proxy=self.custom_proxies['http'] if self.custom_proxies else '',
                    ssl=False
                ) as res:
                    return await res.read()
                # return await requests.get(
                #     url=thumbnail_url,
                #     proxies=self.custom_proxies,
                #     headers=PublicConstant.headers,
                #     verify=False
                # ).content
            except Exception as e:
                print(
                    f'Failed when executing function PixivAPI.get_pic_thumbnail_async({type(e)}): {e}', file=sys.stderr
                )


if __name__ == '__main__':
    cookies_getter = BrowserCookiesGetter()
    my_cookies = cookies_getter.get_cookies_automatically_from_browser()
    assert my_cookies is not None

    test = PixivAPI(
        {'http': 'http://127.0.0.1:20171', 'https': 'http://127.0.0.1:20171'},
        my_cookies
    )

    page_test = test.get_page_info(
        'ドーラ(にじさんじSEEDs)',
        1,
        None
    )
    print(page_test)
    # id = 95808388 for not-r18, 100712258 for r18, 102477979 for ai-created
    # pic_test = test.get_pic_info('102502656')
    # print(pic_test)
