import os
import json
import platform
import os

class ConfigManager:
    """
    配置管理器类，负责加载和管理模型配置
    """
    def __init__(self, config_file=None):
        self.config_file = config_file or os.path.join(os.path.dirname(os.path.dirname(__file__)), 'config.json')
        self.config = self.load_config()
        
    def get_default_config(self):
        """
        获取默认配置
        """
        # 检查系统类型，为Windows设置默认路径
        system = platform.system().lower()
        if system == 'windows':
            default_model_dir = os.path.join('F:\\AI', 'models', 'minicpm-v')
            default_output_dir = os.path.join('F:\\AI', 'outputs')
            default_image_path = os.path.join('F:\\AI', 'test_images', 'test1.jpg')
        else:
            # Linux/Mac默认路径
            default_model_dir = os.path.join(os.path.expanduser('~'), '.cache', 'huggingface', 'models', 'minicpm-v')
            default_output_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'outputs')
            default_image_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'test_images', 'test1.jpg')
            
        # 创建必要的目录
        os.makedirs(default_model_dir, exist_ok=True)
        os.makedirs(default_output_dir, exist_ok=True)
        os.makedirs(os.path.dirname(default_image_path), exist_ok=True)
            
        return {
            "model": {
                "name": "openbmb/MiniCPM-V-2B-dpo-bf16",
                "cache_dir": default_model_dir,
                "revision": None,
                "local_files_only": True
            },
            "gpu": {
                "device_id": 0,
                "dtype": "bfloat16",
                "enable_amp": True,
                "max_memory": "auto"
            },
            "paths": {
                "model_dir": default_model_dir,
                "output_dir": default_output_dir,
                "default_image_path": default_image_path
            },
            "inference": {
                "max_new_tokens": 1024,
                "temperature": 0.8,
                "top_p": 0.95,
                "enable_thinking": False
            },
            "video": {
                "sample_interval": 5,
                "max_frames": 8
            }
        }
    
    def load_config(self):
        """
        从配置文件加载设置，如果文件不存在则创建默认配置
        """
        try:
            if os.path.exists(self.config_file):
                with open(self.config_file, 'r', encoding='utf-8') as f:
                    config = json.load(f)
                print(f"已从{self.config_file}加载配置")
                
                # 兼容性处理：转换旧配置格式到新格式
                if 'paths' in config and 'model_dir' not in config['paths']:
                    # 尝试从model配置中获取模型目录
                    if 'model' in config:
                        if 'local_model_path' in config['model'] and config['model']['local_model_path']:
                            model_dir = config['model']['local_model_path']
                        elif 'cache_dir' in config['model'] and config['model']['cache_dir']:
                            model_dir = config['model']['cache_dir']
                        else:
                            # 使用默认模型目录
                            system = platform.system().lower()
                            if system == 'windows':
                                model_dir = os.path.join('F:\\AI', 'models', 'minicpm-v')
                            else:
                                model_dir = os.path.join(os.path.expanduser('~'), '.cache', 'huggingface', 'models', 'minicpm-v')
                        
                        # 添加model_dir到paths配置
                        config['paths']['model_dir'] = model_dir
                        print(f"已添加model_dir配置: {model_dir}")
        
            else:
                config = self.get_default_config()
                # 创建配置文件
                os.makedirs(os.path.dirname(self.config_file), exist_ok=True)
                with open(self.config_file, 'w', encoding='utf-8') as f:
                    json.dump(config, f, indent=4, ensure_ascii=False)
                print(f"配置文件不存在，已创建默认配置到{self.config_file}")
            
            # 确保所有必要的目录存在
            if 'paths' in config:
                if 'model_dir' in config['paths']:
                    os.makedirs(config['paths']['model_dir'], exist_ok=True)
                if 'output_dir' in config['paths']:
                    os.makedirs(config['paths']['output_dir'], exist_ok=True)
            
            return config
        except Exception as e:
            print(f"加载配置时出错: {e}")
            # 即使出错，也返回默认配置
            return self.get_default_config()
    
    def save_config(self):
        """
        保存配置到文件
        """
        try:
            with open(self.config_file, 'w', encoding='utf-8') as f:
                json.dump(self.config, f, indent=4, ensure_ascii=False)
            print(f"配置已保存到{self.config_file}")
            return True
        except Exception as e:
            print(f"保存配置时出错: {e}")
            return False
    
    def update_config(self, updates):
        """
        更新配置
        """
        for key, value in updates.items():
            if key in self.config:
                if isinstance(value, dict) and isinstance(self.config[key], dict):
                    self.config[key].update(value)
                else:
                    self.config[key] = value
        return self.save_config()
    
    def get(self, key_path, default=None):
        """
        通过点分隔的路径获取配置值
        例如: config.get('gpu.dtype', 'float32')
        """
        parts = key_path.split('.')
        value = self.config
        
        try:
            for part in parts:
                value = value[part]
            return value
        except (KeyError, TypeError):
            return default

# 创建全局配置管理器实例
config_manager = ConfigManager()