import psutil
import platform
import subprocess
import re
from typing import Dict

def get_hardware_info() -> Dict:
    info = {}
    
    # 获取CPU信息
    info['cpu'] = {
        'name': platform.processor(),
        'cores': psutil.cpu_count(logical=False),
        'threads': psutil.cpu_count(logical=True)
    }
    
    # 获取内存信息（GB）
    mem = psutil.virtual_memory()
    info['memory'] = {
        'total': round(mem.total / (1024**3), 1),
        'available': round(mem.available / (1024**3), 1)
    }
    
    # 获取GPU信息
    try:
        # 使用nvidia-smi获取GPU信息
        result = subprocess.run(['nvidia-smi', '--query-gpu=name,memory.total', '--format=csv,noheader'], 
                             stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
        
        if result.returncode == 0:
            gpus = []
            for line in result.stdout.strip().split('\n'):
                name, vram = re.split(r',\s*', line)
                vram_gb = round(int(vram.split()[0])/1024, 1)
                gpus.append({'name': name, 'vram': vram_gb})
            info['gpu'] = gpus
        else:
            info['gpu_error'] = result.stderr
    except Exception as e:
        info['gpu_error'] = str(e)
    
    # 获取存储空间（GB）
    disk = psutil.disk_usage('/')
    info['storage'] = {
        'total': round(disk.total / (1024**3), 1),
        'free': round(disk.free / (1024**3), 1)
    }
    
    return info

def recommend_llm_models(hw_info: Dict) -> Dict:
    recommendations = {}
    
    # 内存基准推荐
    total_mem = hw_info['memory']['total']
    recommendations['memory_based'] = {
        '7B': total_mem >= 8,
        '13B': total_mem >= 16,
        '30B': total_mem >= 32,
        '65B': total_mem >= 64
    }
    
    # GPU显存推荐
    if 'gpu' in hw_info and hw_info['gpu']:
        vram = sum(g['vram'] for g in hw_info['gpu'])
        recommendations['gpu_based'] = {
            '7B-4bit': vram >= 6,
            '13B-4bit': vram >= 10,
            '30B-4bit': vram >= 20,
            '7B-fp16': vram >= 14
        }
    
    # 存储空间检查
    recommendations['storage'] = {
        'min_required': hw_info['storage']['free'] >= 20
    }
    
    return recommendations

if __name__ == '__main__':
    hw = get_hardware_info()
    print('\n硬件检测结果:')
    print(f"CPU: {hw['cpu']['name']} ({hw['cpu']['cores']}核/{hw['cpu']['threads']}线程)")
    print(f"内存: {hw['memory']['total']}GB (可用 {hw['memory']['available']}GB)")
    
    if 'gpu' in hw:
        for i, gpu in enumerate(hw['gpu']):
            print(f"GPU #{i+1}: {gpu['name']} ({gpu['vram']}GB VRAM)")
    else:
        print("GPU信息获取失败:", hw.get('gpu_error', '未知错误'))
    
    print(f"存储: 剩余 {hw['storage']['free']}GB / 总计 {hw['storage']['total']}GB")
    
    rec = recommend_llm_models(hw)
    print('\n推荐部署的模型:')
    for model, available in rec['memory_based'].items():
        if available:
            print(f"- {model} 模型 (CPU/内存部署)")
    
    if 'gpu_based' in rec:
        print("\nGPU加速建议:")
        for model, available in rec['gpu_based'].items():
            if available:
                print(f"- {model} 可以尝试")
    
    if not rec['storage']['min_required']:
        print("\n警告: 可用存储空间不足20GB，建议清理空间")