"""Check model version and parameter freeze status."""

import torch
import sys
from pathlib import Path

REPO_ROOT = Path(__file__).resolve().parents[2]
if str(REPO_ROOT) not in sys.path:
    sys.path.insert(0, str(REPO_ROOT))

from mini_vggt.Distill.vggt_mini import MiniVGGT, MiniVGGTConfig
from mini_vggt.Distill.vggt_mini_gqa import MiniVGGTGQA, MiniVGGTGQAConfig

def check_model(checkpoint_path: str):
    """Check model type and parameter status."""
    print(f"\n{'='*80}")
    print(f"Checking checkpoint: {checkpoint_path}")
    print(f"{'='*80}\n")
    
    checkpoint = torch.load(checkpoint_path, map_location="cpu")
    
    # Determine model type from checkpoint
    if "model" in checkpoint:
        model_state = checkpoint["model"]
        print(f"✓ Found 'model' key in checkpoint")
    else:
        model_state = checkpoint
        print(f"⚠ No 'model' key, using checkpoint directly")
    
    print(f"\nCheckpoint keys: {list(checkpoint.keys())}")
    
    # Check if it's GQA or regular MiniVGGT
    has_query_proj = any("query_proj" in k for k in model_state.keys())
    
    if has_query_proj:
        print(f"\n✓ Detected MiniVGGTGQA model (has 'query_proj' keys)")
        student = MiniVGGTGQA(MiniVGGTGQAConfig())
        model_type = "GQA"
    else:
        print(f"\n✓ Detected MiniVGGT model (no 'query_proj' keys)")
        student = MiniVGGT(MiniVGGTConfig())
        model_type = "Regular"
    
    # Load model weights
    student.load_state_dict(model_state, strict=True)
    
    print(f"\nModel type: {model_type}")
    print(f"Embed dim: {student.config.embed_dim}")
    print(f"Student dim (embed_dim * 2): {student.config.embed_dim * 2}")
    
    # Check blocks structure
    if hasattr(student, 'blocks'):
        print(f"\nNumber of blocks: {len(student.blocks)}")
        
        # Count parameters per block
        for idx, block in enumerate(student.blocks):
            params = list(block.parameters())
            num_params = sum(p.numel() for p in params)
            print(f"  Block {idx}: {num_params:,} parameters")
    else:
        print(f"\n⚠ Model has no 'blocks' attribute!")
        print(f"Available attributes: {[attr for attr in dir(student) if not attr.startswith('_')][:20]}")
        
        # Check for common backbone attributes
        if hasattr(student, 'backbone'):
            print(f"\n✓ Model has 'backbone' attribute")
            if hasattr(student.backbone, 'blocks'):
                print(f"  backbone.blocks exists with {len(student.backbone.blocks)} blocks")
        
        if hasattr(student, 'vision_encoder'):
            print(f"\n✓ Model has 'vision_encoder' attribute")
            if hasattr(student.vision_encoder, 'blocks'):
                print(f"  vision_encoder.blocks exists with {len(student.vision_encoder.blocks)} blocks")
    
    # Check patch_start_idx
    if hasattr(student, 'patch_start_idx'):
        print(f"\nPatch start index: {student.patch_start_idx}")
    else:
        print(f"\n⚠ Model has no 'patch_start_idx' attribute!")
    
    # Total parameters
    total_params = sum(p.numel() for p in student.parameters())
    print(f"\nTotal parameters: {total_params:,}")
    
    # Check if checkpoint contains other components
    if "criterion" in checkpoint:
        print(f"\n✓ Checkpoint contains 'criterion' (can load pretrained projections)")
        criterion_keys = list(checkpoint["criterion"].keys())
        print(f"  Criterion keys: {criterion_keys[:5]}..." if len(criterion_keys) > 5 else f"  Criterion keys: {criterion_keys}")
    
    if "camera_criterion" in checkpoint:
        print(f"✓ Checkpoint contains 'camera_criterion'")
    
    if "projector" in checkpoint:
        print(f"✓ Checkpoint contains 'projector'")
    
    if "optimizer" in checkpoint:
        print(f"✓ Checkpoint contains 'optimizer'")
    
    if "epoch" in checkpoint:
        print(f"✓ Checkpoint at epoch: {checkpoint['epoch']}")
    
    print(f"\n{'='*80}\n")


if __name__ == "__main__":
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument("--checkpoint", default="/data0/liqifeng/ZYC/mini_vggt_finetuned.pt")
    args = parser.parse_args()
    
    check_model(args.checkpoint)
