#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
合并 original_path 与 crop_info 到标注文件
===========================================

支持两类数据集：
1. public_processed （有 annotations.json，含 original_path & crop_info）
2. aier_processed   （annotations.json 没有 original_path & crop_info，需要动态构建）

功能：
- 遍历 data_path / aier_process_dir，构建 image_path -> {original_path, crop_info}
- 支持缓存映射文件，避免重复扫描
- 合并到 input_json 的 list[{"image_path", "bboxes"}]

作者: zym1105
时间: 2025-9-20
"""

import argparse
import json
import os
from typing import Dict, Any, List
from PIL import Image


def abspath_norm(p: str) -> str:
    return os.path.abspath(os.path.normpath(p))


def load_subset_annotations_map(data_path: str, original_root: str, strict: bool = True) -> Dict[str, Dict[str, Any]]:
    """扫描 public_processed 子集，构建映射"""
    mapping: Dict[str, Dict[str, Any]] = {}
    data_path_abs = abspath_norm(data_path)
    original_root_abs = abspath_norm(original_root)

    sub_dirs = [d for d in os.listdir(data_path_abs) if os.path.isdir(os.path.join(data_path_abs, d))]
    print(f"[INFO] 子集数量: {len(sub_dirs)} (扫描 {data_path_abs})")

    for sub in sub_dirs:
        sub_dir = os.path.join(data_path_abs, sub)
        ann_file = os.path.join(sub_dir, "annotations.json")
        if not os.path.isfile(ann_file):
            if strict:
                raise FileNotFoundError(f"{sub_dir} 下无 annotations.json")
            else:
                continue

        with open(ann_file, "r", encoding="utf-8") as f:
            sub_ann = json.load(f)

        for image_name, data in sub_ann.items():
            rel_img_path = data.get("image_path")
            rel_ori_path = data.get("original_path")
            crop_info = data.get("crop_info")
            if not rel_img_path or not rel_ori_path or not crop_info:
                if strict:
                    raise KeyError(f"缺少必要字段于 {ann_file} -> {image_name}")
                else:
                    continue

            abs_img_path = abspath_norm(os.path.join(sub_dir, rel_img_path))
            abs_ori_path = abspath_norm(os.path.join(original_root_abs, sub, rel_ori_path))

            mapping[abs_img_path] = {"original_path": abs_ori_path, "crop_info": crop_info}

    print(f"[INFO] public_processed 映射完成，载入 {len(mapping)} 条")
    return mapping


def build_aier_meta(aier_process_dir: str, aier_orignal_dir: str, strict: bool = True) -> Dict[str, Dict[str, Any]]:
    """
    Aier 数据集没有 crop_info 和 original_path，需要动态生成
    - original_path: 通过文件名匹配 aier_orignal_dir
    - crop_info: {crop_box=[0,0,W,H], original_size=[W,H], cropped_size=[224,224]}
    """
    mapping: Dict[str, Dict[str, Any]] = {}

    # 遍历原始目录，构建文件名->绝对路径映射
    fname2ori: Dict[str, str] = {}
    for root, _, files in os.walk(aier_orignal_dir):
        for f in files:
            if f.lower().endswith((".jpg", ".jpeg", ".png")):
                if f in fname2ori and strict:
                    raise ValueError(f"同名文件冲突: {f}")
                fname2ori[f] = abspath_norm(os.path.join(root, f))

    ann_file = os.path.join(aier_process_dir, "annotations.json")
    with open(ann_file, "r", encoding="utf-8") as f:
        data = json.load(f)

    for _, item in data.items():
        abs_img_path = abspath_norm(item["image_path"])
        fname = os.path.basename(abs_img_path)

        if fname not in fname2ori:
            if strict:
                raise FileNotFoundError(f"找不到 {fname} 对应的原始图像")
            else:
                continue
        abs_ori_path = fname2ori[fname]

        try:
            with Image.open(abs_ori_path) as img:
                W, H = img.size
        except Exception as e:
            if strict:
                raise RuntimeError(f"无法读取原始图像尺寸 {abs_ori_path}: {e}")
            else:
                continue
        #'crop_box': (top, bottom, left, right),
        crop_info = {
            "crop_box": [0, H, 0, W],  # top, bottom, left, right
            "original_size": [H, W],
            "cropped_size": [H, W],
        }

        mapping[abs_img_path] = {"original_path": abs_ori_path, "crop_info": crop_info}

    print(f"[INFO] Aier 映射完成，载入 {len(mapping)} 条")
    return mapping


def merge_meta_into_dataset(input_json: str,
                            output_json: str,
                            img_meta_map: Dict[str, Dict[str, Any]],
                            strict: bool = True) -> None:
    """合并 meta 到 input_json"""
    with open(input_json, "r", encoding="utf-8") as f:
        dataset = json.load(f)

    basename_map: Dict[str, List[str]] = {}
    for abs_img_path in img_meta_map.keys():
        base = os.path.basename(abs_img_path)
        basename_map.setdefault(base, []).append(abs_img_path)

    merged_cnt = 0
    for item in dataset:
        ipath = item.get("image_path")
        if not ipath:
            continue
        abs_ipath = abspath_norm(ipath)

        meta = img_meta_map.get(abs_ipath)
        if meta is None:
            base = os.path.basename(abs_ipath)
            candidates = basename_map.get(base, [])
            if len(candidates) == 1:
                meta = img_meta_map[candidates[0]]
            elif len(candidates) > 1 and strict:
                raise ValueError(f"同名图片冲突: {base}")
            else:
                continue

        item["meta"] = {"original_path": meta["original_path"], "crop_info": meta["crop_info"]}
        merged_cnt += 1

    os.makedirs(os.path.dirname(os.path.abspath(output_json)), exist_ok=True)
    with open(output_json, "w", encoding="utf-8") as f:
        json.dump(dataset, f, ensure_ascii=False, indent=2)

    print(f"[INFO] 合并完成：成功 {merged_cnt} 条，输出 {output_json}")


def parse_args() -> argparse.Namespace:
    parser = argparse.ArgumentParser(description="Merge original_path & crop_info into annotation JSON.")
    parser.add_argument("--data_path", type=str, default="/data0/zhangpinglu/gy/Dataset/public_processed")
    parser.add_argument("--original_data_dir", type=str, default="/data0/zhangpinglu/gy/Dataset/public_dataset")
    parser.add_argument("--aier_process_dir", type=str, default="/data0/zhangpinglu/gy/Dataset/aier_processed")
    parser.add_argument("--aier_orignal_dir", type=str, default="/data0/zhangpinglu/gy/Dataset/aier_orignal")
    parser.add_argument("--input_json", type=str, default="./experiments/mix_cot_only.json")
    parser.add_argument("--output_json", type=str, default="./experiments/merged_annotations.json")
    parser.add_argument("--media_dir", type=str, default="./experiments/media_dir", help="缓存映射存放目录")
    parser.add_argument("--rebuild_mapping_public", action="store_true", default=False, help="是否强制重建 public 映射")
    parser.add_argument("--rebuild_mapping_aier", action="store_true", default=False, help="是否强制重建 aier 映射")
    parser.add_argument("--strict", action="store_true", default=True)
    parser.add_argument("--no_strict", dest="strict", action="store_false")
    return parser.parse_args()


def main():
    args = parse_args()
    os.makedirs(args.media_dir, exist_ok=True)
    public_map_file = os.path.join(args.media_dir, "public_dataset_mapping.json")
    aier_map_file = os.path.join(args.media_dir, "aier_dataset_mapping.json")

    # public
    if os.path.exists(public_map_file) and not args.rebuild_mapping_public:
        print(f"[INFO] 加载缓存 public 映射: {public_map_file}")
        with open(public_map_file, "r", encoding="utf-8") as f:
            public_map = json.load(f)
    else:
        public_map = load_subset_annotations_map(args.data_path, args.original_data_dir, strict=args.strict)
        with open(public_map_file, "w", encoding="utf-8") as f:
            json.dump(public_map, f, ensure_ascii=False, indent=2)

    # aier
    if os.path.exists(aier_map_file) and not args.rebuild_mapping_aier:
        print(f"[INFO] 加载缓存 aier 映射: {aier_map_file}")
        with open(aier_map_file, "r", encoding="utf-8") as f:
            aier_map = json.load(f)
    else:
        aier_map = build_aier_meta(args.aier_process_dir, args.aier_orignal_dir, strict=args.strict)
        with open(aier_map_file, "w", encoding="utf-8") as f:
            json.dump(aier_map, f, ensure_ascii=False, indent=2)

    # 合并
    merged_map = {**public_map, **aier_map}
    merge_meta_into_dataset(args.input_json, args.output_json, merged_map, strict=args.strict)


if __name__ == "__main__":
    main()
