#!/usr/bin/env python3

import os
import json
from safetensors import safe_open

def detailed_comparison():
    """详细比较基础模型和微调模型"""
    print("=== 详细模型比较 ===\n")
    
    # 路径定义
    base_model_path = "models/Qwen1.5-1.8B"
    finetuned_model_path = "training/models/finetuned_model_tianqi"
    
    print("1. 模型文件结构比较:")
    print("   基础模型文件:")
    base_files = sorted(os.listdir(base_model_path))
    for f in base_files:
        size = os.path.getsize(os.path.join(base_model_path, f)) if os.path.isfile(os.path.join(base_model_path, f)) else 0
        print(f"     {f} ({size/1024/1024:.1f} MB)" if size > 0 else f"     {f}")
    
    print("\n   微调模型额外文件:")
    finetuned_files = sorted(os.listdir(finetuned_model_path))
    extra_files = [f for f in finetuned_files if f not in base_files]
    for f in extra_files:
        size = os.path.getsize(os.path.join(finetuned_model_path, f)) if os.path.isfile(os.path.join(finetuned_model_path, f)) else 0
        print(f"     {f} ({size/1024/1024:.1f} MB)" if size > 0 else f"     {f}")
    
    # 分析LoRA适配器
    adapter_path = os.path.join(finetuned_model_path, "adapter_model.safetensors")
    if os.path.exists(adapter_path):
        print("\n2. LoRA适配器详细分析:")
        with safe_open(adapter_path, framework="pt", device="cpu") as f:
            keys = list(f.keys())
            total_params = 0
            
            # 按层分组
            layers = {}
            for key in keys:
                parts = key.split('.')
                if len(parts) > 5:
                    layer_num = parts[4]
                    if layer_num not in layers:
                        layers[layer_num] = []
                    layers[layer_num].append(key)
            
            print(f"   总参数矩阵数: {len(keys)}")
            for layer_num in sorted(layers.keys(), key=lambda x: int(x) if x.isdigit() else x):
                print(f"   Layer {layer_num}: {len(layers[layer_num])} 个矩阵")
            
            # 计算总参数量
            for key in keys:
                tensor = f.get_tensor(key)
                total_params += tensor.numel()
            
            print(f"   总参数量: {total_params:,}")
            print(f"   参数占比: {total_params/1_800_000_000*100:.4f}% (相对于1.8B基础模型)")
            
            # 显示参数示例
            print("\n3. 参数矩阵示例:")
            sample_keys = keys[:6]  # 显示前6个
            for key in sample_keys:
                tensor = f.get_tensor(key)
                print(f"   {key}: {list(tensor.shape)}")
    
    # 检查配置文件
    adapter_config_path = os.path.join(finetuned_model_path, "adapter_config.json")
    if os.path.exists(adapter_config_path):
        print("\n4. LoRA配置信息:")
        with open(adapter_config_path, 'r') as f:
            config = json.load(f)
            print(f"   LoRA秩 (r): {config.get('r', 'N/A')}")
            print(f"   LoRA Alpha: {config.get('lora_alpha', 'N/A')}")
            print(f"   Dropout: {config.get('lora_dropout', 'N/A')}")
            print(f"   目标模块: {config.get('target_modules', 'N/A')}")
    
    print("\n=== 比较完成 ===")

if __name__ == "__main__":
    detailed_comparison()
