import sys
from pathlib import Path
import torch
import json
from typing import Dict, Union, List

# LAYER_INT_RANGE = {"act_quantizer":}


def check_tensor(tensor, path):
    if tensor is None:
        # print(f"Found None tensor at: {path}")
        return False

    if not isinstance(tensor, torch.Tensor):
        return False

    has_inf = torch.isinf(tensor).any()
    has_large_values = (
        (tensor[(~torch.isnan(tensor)) & (~torch.isinf(tensor))]).abs() > 128
    ).any()

    if has_inf or has_large_values:
        print(f"Found problematic tensor at: \x1b[031m{path}\x1b[0m")
        if has_inf:
            print("  - Contains \x1b[034minf\x1b[0m values")
        if has_large_values:
            print(
                f"  - Contains values with absolute \x1b[033mvalue > 128\x1b[0m (max: \x1b[032m{(tensor[(~torch.isnan(tensor)) & (~torch.isinf(tensor))]).max().item()}\x1b[0m)"
            )
        return True

    return False


def scan_directory(base_path: Path):
    for f_path in base_path.glob("language_model.model.layers.*"):
        for pth_file in f_path.glob("*.pth"):
            try:
                data = torch.load(pth_file)
                if isinstance(data, (torch.Tensor, type(None))):
                    check_tensor(data, pth_file)
                elif isinstance(data, dict):
                    for key, value in data.items():
                        if isinstance(value, (torch.Tensor, type(None))):
                            if check_tensor(value, f"{pth_file} (key: {key})"):
                                print(f"  - In dictionary with key: {key}")
                # 可以添加其他数据结构的检查，如list、tuple等
            except Exception as e:
                print(f"Error loading {pth_file}: {str(e)}")


def get_tensor_range(tensor: torch.Tensor) -> Dict[str, float]:
    """获取 tensor 的数据范围"""
    return {
        "min": tensor.min().item(),
        "max": tensor.max().item(),
        "shape": list(tensor.shape),
        # "mean": tensor.mean().item(),
        # "std": tensor.std().item(),
    }


def process_pth_file(pth_file: Path) -> Union[Dict[str, float], None]:
    """处理单个 .pth 文件，返回其 tensor 范围（如果是 None 则跳过）"""
    try:
        data = torch.load(pth_file)
        if data is None:
            return None
        if isinstance(data, torch.Tensor):
            return get_tensor_range(data)
        return None  # 如果不是 Tensor 也不是 None，可以自定义处理
    except Exception as e:
        print(f"Error processing {pth_file}: {e}")
        return None


def scan_folder(folder: Path) -> Dict[str, Dict[str, float]]:
    """扫描单个文件夹，返回所有 .pth 文件的 tensor 范围"""
    tensor_ranges = {}
    for pth_file in folder.rglob("*.pth"):
        tensor_range = process_pth_file(pth_file)
        if tensor_range is not None:
            # 使用相对路径作为 key
            rel_path = str(pth_file.relative_to(folder))
            tensor_ranges[rel_path] = tensor_range
    return tensor_ranges


def find_transformer_folders(root_dir: Union[str, Path]) -> List[Path]:
    """查找所有符合 transformer.h.0.* 模式的文件夹"""
    root_path = Path(root_dir)
    return list(root_path.glob("language_model.model.layers.*"))


def save_tensor_ranges_to_json(
    root_dir: Union[str, Path], output_json: str = "tensor_ranges.json"
):
    """主函数：扫描所有 transformer.h.0.* 文件夹并保存结果到 JSON"""
    root_path = Path(root_dir)
    all_results = {}

    # 查找所有符合条件的文件夹
    transformer_folders = find_transformer_folders(root_path)
    if not transformer_folders:
        print(
            f"No folders matching 'language_model.model.layers.*' found in {root_path}"
        )
        return

    # 遍历每个文件夹并收集 tensor 范围
    for folder in transformer_folders:
        print(f"Scanning folder: {folder.name}")
        tensor_ranges = scan_folder(folder)
        if tensor_ranges:
            all_results[folder.name] = tensor_ranges

    # 保存到 JSON
    with open(output_json, "w") as f:
        json.dump(all_results, f, indent=2)

    print(f"Saved tensor ranges to {output_json}")


if __name__ == "__main__":
    if len(sys.argv) < 2:
        print("Usage: python script.py <directory_path>")
        sys.exit(1)

    base = Path(sys.argv[1])
    if not base.exists():
        print(f"Directory not found: {base}")
        sys.exit(1)

    scan_directory(base)
    save_tensor_ranges_to_json(base)
