"""
从 Bilbao Crystallographic Server 抓取 32 个晶体点群的对称操作数据

运行方式：
    python -m app.utils.fetch_bilbao_symmetry_operations

特点：
    - 自动从 point_group_bilbao_params.json 读取点群配置
    - 抓取详细页面的完整对称操作信息
    - 包含坐标三元组、矩阵、ITA符号、Seitz符号
"""

import asyncio
import json
from pathlib import Path
from typing import Any, Iterable
from urllib.parse import parse_qs, urlparse

import httpx
from bs4 import BeautifulSoup, Tag

BASE_URL = "http://webbdcrista2.ehu.es"
DETAIL_URL = f"{BASE_URL}/cgi-bin/cryst/programs/nph-point_genpos"
POINT_GROUP_URL = f"{BASE_URL}/cgi-bin/rep/programs/sam/point.py"


def normalize_triplet(value: str) -> str:
    """统一坐标三元组文本，便于匹配"""
    return value.replace(" ", "").replace("\n", "").strip()


def find_operations_table(soup: BeautifulSoup) -> Tag | None:
    """在页面中找到包含对称操作的表格"""
    tables = soup.find_all("table")

    for table in tables:
        rows = table.find_all("tr")
        if len(rows) < 3:
            continue

        header_text = rows[0].get_text()
        if "No." in header_text and "(x,y,z)" in header_text:
            return table

    return None


def parse_operations_table(table: Tag) -> list[dict[str, Any]]:
    """解析 (x,y,z) 形式的操作表格"""
    rows = table.find_all("tr")
    operations: list[dict[str, Any]] = []
    idx = 2

    while idx < len(rows):
        row = rows[idx]
        cols = row.find_all("td")

        if len(cols) != 8:
            idx += 1
            continue

        op_id_text = cols[0].get_text().strip()
        if not op_id_text:
            idx += 2
            continue

        try:
            op_id = int(op_id_text)
        except ValueError:
            idx += 2
            continue

        coordinate_triplet = cols[1].get_text().strip()

        matrix_text = cols[2].get_text().strip().replace("(", "").replace(")", "")
        matrix_parts = [x for x in matrix_text.split() if x]

        matrix = None
        if len(matrix_parts) >= 9:
            try:
                matrix_values = [int(x.replace("−", "-")) for x in matrix_parts[:9]]
                matrix = [
                    [matrix_values[0], matrix_values[1], matrix_values[2]],
                    [matrix_values[3], matrix_values[4], matrix_values[5]],
                    [matrix_values[6], matrix_values[7], matrix_values[8]],
                ]
            except (ValueError, IndexError):
                matrix = None

        ita_symbol = cols[6].get_text().strip()
        seitz = cols[7].get_text().strip()

        operations.append(
            {
                "id": op_id,
                "coordinate_triplet": coordinate_triplet,
                "matrix": matrix,
                "ita_symbol": ita_symbol,
                "seitz": seitz,
            }
        )

        idx += 2

    return operations


def match_generators_to_operations(
    generator_ops: Iterable[dict[str, Any]], operations: list[dict[str, Any]]
) -> list[int]:
    """将生成元操作匹配到对应的 operation id"""
    lookup: dict[str, list[int]] = {}
    for op in operations:
        key = normalize_triplet(op.get("coordinate_triplet", ""))
        lookup.setdefault(key, []).append(op["id"])

    generator_ids: list[int] = []
    for gen in generator_ops:
        key = normalize_triplet(gen.get("coordinate_triplet", ""))
        candidate_ids = lookup.get(key)

        if candidate_ids:
            generator_ids.append(candidate_ids[0])
            continue

        matrix = gen.get("matrix")
        if matrix:
            for op in operations:
                if op.get("matrix") == matrix:
                    generator_ids.append(op["id"])
                    break
            else:
                print(f"  ⚠️  无法匹配生成元 {gen.get('coordinate_triplet')}, 将跳过")
        else:
            print(f"  ⚠️  无法匹配生成元 {gen.get('coordinate_triplet')}, 将跳过")

    return generator_ids


def find_subgroups_table(soup: BeautifulSoup) -> Tag | None:
    """定位子群表格"""
    for table in soup.find_all("table"):
        caption = table.find("caption")
        if not caption:
            continue
        caption_text = caption.get_text(strip=True)
        if "Subgroups of the group" in caption_text:
            return table
    return None


def extract_num_from_href(href: str | None) -> int | None:
    if not href:
        return None
    try:
        parsed = urlparse(href)
        query = parse_qs(parsed.query)
        if "num" in query:
            return int(query["num"][0])
    except (ValueError, KeyError):
        return None
    return None


def safe_int(value: str | None) -> int | None:
    if not value:
        return None
    try:
        return int(value.strip())
    except ValueError:
        return None


def parse_subgroups_table(table: Tag, num_to_schoenflies: dict[int, str]) -> list[dict[str, Any]]:
    """解析子群表格，只保留 Schoenflies 与指数"""
    rows = table.find_all("tr")
    subgroups: list[dict[str, Any]] = []

    for row in rows[1:]:
        cols = row.find_all("td")
        if len(cols) < 3:
            continue

        link = cols[0].find("a")
        subgroup_num = extract_num_from_href(str(link.get("href")) if link else None)
        if subgroup_num and subgroup_num in num_to_schoenflies:
            subgroup_name = num_to_schoenflies[subgroup_num]
        else:
            raw_text = cols[0].get_text(strip=True)
            subgroup_name = raw_text.split("(")[0].replace(" ", "")

        index_value = safe_int(cols[2].get_text())
        if not subgroup_name or index_value is None:
            continue

        subgroups.append({"schoenflies": subgroup_name, "index": index_value})

    return subgroups


async def fetch_subgroups(
    client: httpx.AsyncClient,
    sg: int,
    num: int,
    schoenflies: str,
    num_to_schoenflies: dict[int, str],
) -> tuple[list[dict[str, Any]], str]:
    """从 point.py 页面抓取子群信息"""
    url = f"{POINT_GROUP_URL}?sg={sg}&num={num}"
    try:
        response = await client.get(url, timeout=30.0)
        response.raise_for_status()
    except Exception as exc:
        print(f"  ⚠️  子群抓取失败: {exc}")
        return [], url

    soup = BeautifulSoup(response.text, "html.parser")
    table = find_subgroups_table(soup)
    if not table:
        print("  ⚠️  未找到子群表")
        return [], url

    subgroups = parse_subgroups_table(table, num_to_schoenflies)

    # 过滤掉自身
    filtered = []
    seen: set[str] = set()
    for item in subgroups:
        name = item.get("schoenflies")
        if not name or name == schoenflies:
            continue
        if name in seen:
            continue
        seen.add(name)
        filtered.append(item)

    return filtered, url


async def fetch_point_group_operations(
    client: httpx.AsyncClient,
    params: dict[str, Any],
    num_to_schoenflies: dict[int, str],
) -> dict[str, Any]:
    """抓取单个点群的详细对称操作信息

    Args:
        client: HTTP 客户端
        num: Bilbao 点群编号（对应 URL 参数 ?num=）
        schoenflies: Schoenflies 符号

    Returns:
        包含对称操作详细信息的字典
    """
    num = params["num"]
    schoenflies = params["schoenflies"]
    sg = params["sg"]

    url = f"{DETAIL_URL}?num={num}"
    print(f"抓取 {schoenflies} (num={num})...")

    try:
        response = await client.get(url, timeout=30.0)
        response.raise_for_status()

        soup = BeautifulSoup(response.text, "html.parser")

        table = find_operations_table(soup)
        if not table:
            raise ValueError("未找到对称操作表格")

        operations = parse_operations_table(table)

        generators_url = f"{DETAIL_URL}?w2do=gens&num={num}&what="
        gen_response = await client.get(generators_url, timeout=30.0)
        gen_response.raise_for_status()
        gen_soup = BeautifulSoup(gen_response.text, "html.parser")

        gen_table = find_operations_table(gen_soup)
        generator_ops = parse_operations_table(gen_table) if gen_table else []
        generator_ids = match_generators_to_operations(generator_ops, operations)

        subgroups, subgroups_url = await fetch_subgroups(client, sg, num, schoenflies, num_to_schoenflies)

        print(f"  ✓ 找到 {len(operations)} 个对称操作, 生成元 {len(generator_ids)} 个, 子群 {len(subgroups)} 个")

        return {
            "num": num,
            "schoenflies": schoenflies,
            "url": url,
            "operations": operations,
            "generators": generator_ids,
            "generators_url": generators_url,
            "subgroups": subgroups,
            "subgroups_url": subgroups_url,
        }

    except Exception as e:
        print(f"  ✗ 错误: {e}")
        return {
            "num": num,
            "schoenflies": schoenflies,
            "url": url,
            "generators": [],
            "generators_url": f"{DETAIL_URL}?w2do=gens&num={num}&what=",
            "subgroups": [],
            "subgroups_url": f"{POINT_GROUP_URL}?sg={sg}&num={num}",
            "error": str(e),
            "operations": [],
        }


async def fetch_all_operations(point_group_params: list[dict]) -> dict[str, Any]:
    """抓取所有点群的对称操作

    Args:
        point_group_params: 点群参数列表（从 point_group_bilbao_params.json 读取）

    Returns:
        所有点群的对称操作数据
    """
    all_data = {}
    num_to_schoenflies = {params["num"]: params["schoenflies"] for params in point_group_params}

    async with httpx.AsyncClient() as client:
        for params in point_group_params:
            schoenflies = params["schoenflies"]
            data = await fetch_point_group_operations(client, params, num_to_schoenflies)
            all_data[schoenflies] = data

            # 避免请求过快
            await asyncio.sleep(1)

    return all_data


def update_character_tables(operations_data: dict[str, Any], data_dir: Path):
    """更新 bilbao_point_groups.json 文件

    Args:
        operations_data: 从 Bilbao 抓取的对称操作数据
        data_dir: 数据目录路径
    """
    char_tables_file = data_dir / "bilbao_point_groups.json"

    print(f"\n更新 {char_tables_file.name}...")

    # 读取特征标表文件
    with open(char_tables_file, "r", encoding="utf-8") as f:
        char_tables = json.load(f)

    print(f"  读取到 {len(char_tables)} 个点群的特征标表")

    # 更新数据
    updated_count = 0
    for table in char_tables:
        schoenflies = table["schoenflies"]

        if schoenflies in operations_data:
            # 用详细的操作数据替换
            table["operations"] = operations_data[schoenflies]["operations"]
            table["operations_url"] = operations_data[schoenflies]["url"]
            table["generators"] = operations_data[schoenflies].get("generators", [])
            table["generators_url"] = operations_data[schoenflies].get("generators_url")
            table["subgroups"] = operations_data[schoenflies].get("subgroups", [])
            table["subgroups_url"] = operations_data[schoenflies].get("subgroups_url")
            if "subgroups_detail" in table:
                table.pop("subgroups_detail", None)
            updated_count += 1
            print(
                f"  ✓ {schoenflies}: {len(table['operations'])} 个操作, "
                f"生成元 {len(table['generators'])} 个, 子群 {len(table['subgroups'])} 个"
            )

    # 保存更新后的文件
    with open(char_tables_file, "w", encoding="utf-8") as f:
        json.dump(char_tables, f, ensure_ascii=False, indent=2)

    print(f"\n  ✓ 已更新 {updated_count}/{len(char_tables)} 个点群")

    # 验证数据质量
    total_ops = sum(len(pg["operations"]) for pg in char_tables)
    ita_filled = sum(1 for pg in char_tables for op in pg["operations"] if op.get("ita_symbol", "").strip())
    seitz_filled = sum(1 for pg in char_tables for op in pg["operations"] if op.get("seitz", "").strip())

    print(f"\n数据完整性:")
    print(f"  - 总操作数: {total_ops}")
    print(f"  - ITA symbol: {ita_filled}/{total_ops} ({ita_filled*100//total_ops}%)")
    print(f"  - Seitz: {seitz_filled}/{total_ops} ({seitz_filled*100//total_ops}%)")


async def main():
    """主函数"""
    print("=" * 70)
    print("从 Bilbao Crystallographic Server 抓取对称操作数据")
    print("=" * 70)

    # 设置路径
    data_dir = Path(__file__).parent.parent / "data"
    params_file = data_dir / "point_group_bilbao_params.json"

    # 读取点群参数
    print(f"\n读取点群参数：{params_file.name}")
    with open(params_file, "r", encoding="utf-8") as f:
        point_group_params = json.load(f)

    print(f"  ✓ 找到 {len(point_group_params)} 个点群")

    # 抓取数据
    print("\n开始抓取对称操作...")
    all_data = await fetch_all_operations(point_group_params)

    # 更新 bilbao_point_groups.json
    update_character_tables(all_data, data_dir)

    # 显示统计
    print("\n" + "=" * 70)
    print("统计信息")
    print("=" * 70)

    success_count = 0
    error_count = 0
    total_operations = 0

    for schoenflies, data in sorted(all_data.items(), key=lambda x: x[1].get("num", 0)):
        num_ops = len(data.get("operations", []))
        gen_count = len(data.get("generators", []))
        subgroup_count = len(data.get("subgroups", []))

        if "error" in data:
            print(f"  ✗ {schoenflies}: 抓取失败 - {data['error']}")
            error_count += 1
        else:
            print(f"  ✓ {schoenflies}: {num_ops} 个操作, 生成元 {gen_count} 个, 子群 {subgroup_count} 个")
            success_count += 1
            total_operations += num_ops

    print("\n" + "=" * 70)
    print(f"成功: {success_count}/{len(all_data)}")
    print(f"失败: {error_count}/{len(all_data)}")
    print(f"总对称操作数: {total_operations}")
    print("=" * 70)


if __name__ == "__main__":
    asyncio.run(main())
