import functools
import logging
import shutil
import time
import typing as t
from dataclasses import dataclass
from pathlib import Path
import tqdm

import pandas as pd
import requests

logging.basicConfig(
    level=logging.INFO,
    format="[%(asctime)s] [%(levelname)s] [%(funcName)s:%(lineno)d] - %(message)s",
    datefmt="%Y-%m-%d %H:%M:%S",
)
logger = logging.getLogger(__name__)


class LimitedError(Exception):
    pass


def re_try(max_retries=3, delay=300):
    def decorator(func):
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            retries = 0
            while retries < max_retries:
                try:
                    return func(*args, **kwargs)
                except LimitedError:
                    retries += 1
                    logging.info(
                        f"call {func.__name__} retry {retries}/{max_retries} after exception."
                    )
                    if retries >= max_retries:
                        raise
                    time.sleep(delay)

        return wrapper

    return decorator


@dataclass
class Spider:
    GAME_ID: t.ClassVar[int] = 10000100
    URL: t.ClassVar[str] = "https://galaxias-api.lingxigames.com/ds/ajax/endpoint.json"
    HEADERS: t.ClassVar[dict] = {
        "Host": "galaxias-api.lingxigames.com",
        "Connection": "keep-alive",
        "accept": "application/json, text/plain, */*",
        "accept-encoding": "gzip, deflate, br, zstd",
        "accept-language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
        "content-type": "application/json",
        "origin": "https://sgzzlb.lingxigames.com",
        "referer": "https://sgzzlb.lingxigames.com/",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0",
        "sec-ch-ua": '"Microsoft Edge";v="129", "Not=A?Brand";v="8", "Chromium";v="129"',
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": '"Windows"',
        "sec-fetch-dest": "empty",
        "sec-fetch-mode": "cors",
        "sec-fetch-site": "same-site",
        "Upgrade-Insecure-Requests": "1",
        "Pragma": "no-cache",
        "Cache-Control": "no-cache",
    }
    APIS: t.ClassVar[dict] = {
        "wjk": {
            "tb_id": "350659087930767364",
            "category_ids": "350958444404029449,350958444404029450,350958444404029451,350958444404029452",
        },
        "zfk": {
            "tb_id": "350659307657771017",
            "category_ids": "352052655832446980,352052655832446981",
        },
        "zbk": {
            "tb_id": "350659411548294145",
            "category_ids": "352054216407136265,352054216407136266,352054216407136267,352054216407136268",
        },
        "bzk": {
            "tb_id": "553976671589912577",
            "category_ids": "553977003162226697,553977003162226698,553977003162226699",
        },
    }
    work_path: Path = Path(".")

    def __post_init__(self):
        self.work_path = Path(self.work_path).joinpath("results").absolute()

    def start(self):
        logger.info("init work path %s", self.work_path)
        self.init_work_path()

        for name in self.APIS:
            api_info = self.APIS[name]
            tb_id = api_info["tb_id"]

            details = self.get_details(**api_info)

            data = []
            with tqdm.tqdm(
                (detail["priRowId"] for detail in details), desc=f"{name}"
            ) as bar:
                for row_id in bar:
                    data.append(self.get_detail(tb_id=tb_id, row_id=row_id))

            df = pd.DataFrame(data)
            to = self.work_path / f"{name}.csv"
            df.to_csv(to, index=False, encoding="utf-8-sig")

    def init_work_path(self):
        if self.work_path.exists() and self.work_path.is_dir():
            shutil.rmtree(self.work_path)

        self.work_path.mkdir(parents=True, exist_ok=True)

    @re_try()
    def get_details_by_page(self, tb_id: str, category_ids: str, page: int):
        json_data = {
            "api": "/api/l/owresource/getQueryDataInfoListByCategory",
            "params": {
                "gameId": self.GAME_ID,
                "tbId": tb_id,
                "categoryIds": category_ids,
                "page": page,
                "size": 20,
            },
        }
        resp = requests.post(url=self.URL, json=json_data)
        json_resp = resp.json()

        if not json_resp["success"]:
            raise LimitedError(f"{json_resp['resultCode']} {json_resp['resultDesc']}")

        return json_resp["result"]["items"], json_resp["result"]["totalCount"]

    def get_details(self, tb_id: str, category_ids: str):
        all_results = []
        page = 0
        while True:
            items, count = self.get_details_by_page(
                tb_id=tb_id, category_ids=category_ids, page=page
            )
            all_results.extend(items)

            if (page + 1) * 20 > count:
                break

            page += 1

        return all_results

    @re_try()
    def get_detail(self, tb_id: str, row_id: str):
        json_data = {
            "api": "/api/l/owresource/getQueryDataInfoByPriId",
            "params": {
                "gameId": self.GAME_ID,
                "tbId": tb_id,
                "priRowId": row_id,
            },
        }
        resp = requests.post(url=self.URL, json=json_data)
        json_resp = resp.json()

        if not json_resp["success"]:
            raise LimitedError(f"{json_resp['resultCode']} {json_resp['resultDesc']}")

        item = json_resp["result"]["items"][0]
        return {
            "table": item["priKey"],
            "name": item["priVal"],
            **{detail["key"]: detail["val"] for detail in item["details"]},
        }


if __name__ == "__main__":
    Spider().start()
