import warnings
from pathlib import Path
import re
import json
import asyncio
from collections import defaultdict

#
import pydash
import pandas as pd
from humanfriendly import format_size

#
from webconn import *
from load_data import *

warnings.filterwarnings("ignore")


def _get_general_platform(plaform: str):
    return (plaform).split(" ")[0] if " " in plaform else plaform


def _generate_detailed_version(version, mydate: datetime):
    version = "" if version in {"latest", ""} else version
    if type(mydate) is not str:
        dtstr = "" if pd.isna(mydate) else mydate.strftime("%Y-%m-%d")
    else:
        dtstr = mydate
    return ", ".join([i for i in (version, dtstr) if i])


def _calc_UIicon_path(UIfunc: str):
    return "/icons/{}.png".format(UIfunc)


def _trans_to_unique_list(iterable, *, key=None, reverse=False):
    dataset = set(iterable)
    dataset.discard("")
    return sorted(dataset, key=key, reverse=reverse)


def _calc_github_mirror_url(github_release_url: str):
    url_mode = pydash.get(CONFIG, "github_download.url_mode")
    new_host = pydash.get(CONFIG, "github_download.host")
    if url_mode == "concat":
        return "{}/{}".format(new_host, github_release_url)
    elif url_mode == "replace":
        return github_release_url.replace("https://github.com", new_host)
    return github_release_url


# ========组合前端信息========
def _generate_DL_release(record: dict, info_webfile: dict):
    if info_webfile is None:
        WEB_LOGGER.warning("{} 的直链 {} 可能失效".format(record["name"], record["releaseUrl"]))
        return {}

    return {
        "version": _generate_detailed_version(
            (
                calc_version_from_filename(record["releasePattern"], info_webfile["name"])
                if record["version"] in {"latest", ""}
                else record["version"]
            ),
            info_webfile["time"],
        ),
        "asset": {
            "url": record["releaseUrl"],
            "icon": _calc_UIicon_path("download"),
            "size": format_size(info_webfile["size"], binary=True) if info_webfile["size"] > 0 else "",
        },
    }


def _generate_mirrorFTP_release(record: dict, release_infos: list):
    if len(release_infos) == 0:
        return {"valid": False}
    lastest_release = release_infos[0]
    calculated_version = ("v" if lastest_release["version"][0].lower() != "v" else "") + lastest_release["version"]
    return {
        "documentation": {
            "url": record["documentation"] or record["releaseUrl"],
            "icon": _calc_UIicon_path("documentation"),
        },
        "version": _generate_detailed_version(
            (calculated_version if record["version"] in {"latest", ""} else record["version"]),
            lastest_release["time"].split(" ")[0],
        ),
        "asset": {
            "url": lastest_release["url"],
            "icon": _calc_UIicon_path("download"),
            "size": format_size(lastest_release["size"], binary=True) if lastest_release["size"] > 0 else "",
        },
    }


def _generate_git_release(record: dict, releases_data: list):
    pattern = record["releasePattern"]
    project_url = record["releaseUrl"]
    host_short = {GITHUB_URL: "github", GITEE_URL: "gitee"}.get(re.search(r"https://(.+?).com/.+?/.+?", project_url))
    project = "/".join(project_url.split("/")[-2:])
    if not pattern:
        WEB_LOGGER.error("{}的 {} 存在忘记填写的pattern，请手动检查".format(host_short, project))
        return {"valid": False}

    if len(releases_data) == 0:
        WEB_LOGGER.warning("{}的 {} 没有releases，请手动检查".format(host_short, project))
        return {"valid": False}

    reobj = get_version_matcher(pattern)
    all_assets = pydash.flatten(
        [[(asset if reobj.match(asset["name"]) else None) for asset in release["assets"]] for release in releases_data]
    )
    latest_release_time = datetime_from_isoz(releases_data[0]["time"])
    asset: dict = next((i for i in all_assets if i is not None), None)

    if asset is None:
        WEB_LOGGER.warning("{}的 {} 无法匹配到pattern【{}】，请手动检查".format(host_short, project, pattern))
        return {"valid": False}

    release_url = asset["browser_download_url"]
    version = calc_version_from_filename(pattern, asset["name"], releases_data[0]["tag_name"])

    if record["releaseType"] == "github":
        release_url = _calc_github_mirror_url(release_url)

    return {
        "platform": record["platform"],
        "version": _generate_detailed_version(version, latest_release_time),
        "asset": {
            "url": release_url,
            "icon": _calc_UIicon_path("download"),
            "size": format_size(asset["size"], binary=True) if asset.get("size", 0) > 0 else "",
        },
        "gitProject": {"url": record["releaseUrl"], "icon": _calc_UIicon_path(record["releaseType"])},
    }


def _generate_software_release(record: dict, network_result=None):
    common_part = {
        "documentation": {
            "url": record["documentation"],
            "icon": _calc_UIicon_path("home" if record["maintainer"] == "Official" else "documentation"),
        },
        "filePassword": record["filePassword"],
        "sharePassword": record["sharePassword"],
        "platform": record["platform"],
        "version": _generate_detailed_version(record["version"], record["checkDate"]),
        "maintainer": "" if record["maintainer"] in {"Official", ""} else "by: " + record["maintainer"],
        "asset": {
            "url": record["releaseUrl"],
            "icon": _calc_UIicon_path(record["releaseType"]),
            "size": record["fileSize"],
        },
        "gitProject": {"url": "", "icon": ""},
    }
    unique_part = {}
    if record["releaseType"] in {"github", "gitee"}:
        # 这里network_result其实是releases_data
        unique_part = _generate_git_release(record, network_result)
    elif record["releaseType"] == "download":
        # 这里network_result其实是info_webfile
        unique_part = _generate_DL_release(record, network_result)
    elif record["releaseType"] == "mirrorFTP":
        # 这里network_result其实是releases_infos
        unique_part = _generate_mirrorFTP_release(record, network_result)

    return common_part | unique_part


def group_df_records_by_title(df: pd.DataFrame) -> dict[str, list[dict]]:
    """
    :return: {"name1":[records],"name2":[records]}
    """
    return {
        name: group.sort_values(by=["key", "platform", "releaseType", "version"]).to_dict("records")
        for name, group in df.groupby("name", sort=False)
    }


def group_software_from_records(records: list, network_results_map: dict):
    result = {}
    # base
    for col in {"icon", "name", "comment"}:
        result[col] = next((record[col] for record in records if record[col]), "")
    # alias
    result["alias"] = _trans_to_unique_list(
        pydash.flatten(
            (re.split(r"[;|]", record["alias"]) + (["装机"] if record["quick"] else [])) for record in records
        )
    )
    # platforms
    result["platforms"] = _trans_to_unique_list(
        (
            _calc_UIicon_path("platform/{}".format(_get_general_platform(record["platform"])))
            for record in records
            if record["platform"]
        ),
        reverse=True,
    )
    # ======== releases ========
    for record in records:
        if record["releaseType"] in {"github", "gitee", "download", "mirrorFTP"}:
            network_result = network_results_map.get(id(record))
            release = _generate_software_release(record, network_result)
            if release.get("valid", True):
                result.setdefault("releases", []).append(release)
        else:
            result.setdefault("releases", []).append(_generate_software_release(record))
    return result


# =================== 收集和执行任务 ===================
async def get_network_results(records: list[dict]):
    # 按 record["releaseUrl"] 分组
    grouped_records = defaultdict(list)
    for record in records:
        grouped_records[record["releaseUrl"]].append(record)

    # 对每组任务并发执行
    async def process_group(group: list[dict]):
        return {id(record): await get_network_result(record) for record in group}

    dict_list = await asyncio.gather(*[process_group(group) for group in grouped_records.values()])
    return {k: v for d in dict_list for k, v in d.items()}


async def main():
    redirect_logger("web")
    redirect_logger("main")
    # ========
    Path("../frontend/dist").mkdir(parents=True, exist_ok=True)
    debug_path = Path("../frontend/software_data.json").resolve()
    dist_path = Path("../frontend/dist/software_data.json").resolve()
    # ========
    software_grecords = group_df_records_by_title(load_records(Path("manual_data.xlsx"), "software"))
    # ================
    all_records: list[dict] = []
    for name, records in software_grecords.items():
        all_records.extend(records)
    network_results_map = await get_network_results(all_records)
    # ========
    software_data = []
    for name, records in software_grecords.items():
        software_item = group_software_from_records(records, network_results_map)
        if len(software_item.get("releases", ())) > 0:
            software_data.append(software_item)

    LOGGER.info("已生成 {} 个软件的信息".format(len(software_data)))
    target_text = json.dumps(software_data, ensure_ascii=False, sort_keys=True)
    # ========
    debug_path.write_text(target_text, "utf-8")
    dist_path.write_text(target_text, "utf-8")
    LOGGER.info("已在前端调试目录和前端编译目录生成 software_data.json")


if __name__ == "__main__":
    asyncio.run(main())
