import os
from datetime import timedelta

basedir = os.path.abspath(os.path.dirname(__file__))

# 优先尝试加载 .env 与 transfer_optimization.env（若存在）
try:
    from dotenv import load_dotenv
    # 加载项目根目录下的 .env
    load_dotenv(os.path.join(basedir, '.env'))
    # 加载优化配置文件（允许覆盖）
    opt_env = os.path.join(basedir, 'transfer_optimization.env')
    if os.path.exists(opt_env):
        load_dotenv(opt_env, override=True)
except Exception:
    # 可忽略：未安装 python-dotenv 或文件不存在
    pass

class Config:
    SECRET_KEY = os.environ.get('SECRET_KEY') or 'your-secret-key-here'
    SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
        'sqlite:///' + os.path.join(basedir, 'app.db')
    SQLALCHEMY_TRACK_MODIFICATIONS = False
    
    # Celery configuration
    CELERY_BROKER_URL = 'redis://localhost:6379/0'
    CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
    
    # File upload configuration
    MAX_CONTENT_LENGTH = 16 * 1024 * 1024  # 16MB max file size
    UPLOAD_FOLDER = os.path.join(basedir, 'uploads')
    
    # SSH configuration
    SSH_KEY_FOLDER = os.path.join(basedir, 'ssh_keys')
    
    # Docker configuration
    DOCKER_TIMEOUT = int(os.environ.get('DOCKER_TIMEOUT', 300))  # 5分钟默认超时
    DOCKER_PULL_TIMEOUT = int(os.environ.get('DOCKER_PULL_TIMEOUT', 600))  # 10分钟拉取超时
    DOCKER_SAVE_TIMEOUT = int(os.environ.get('DOCKER_SAVE_TIMEOUT', 900))  # 15分钟保存超时
    
    # Transfer task configuration
    TRANSFER_CHUNK_SIZE = int(os.environ.get('TRANSFER_CHUNK_SIZE', 8 * 1024 * 1024))  # 8MB chunks (默认从1MB提升到8MB)
    TRANSFER_PROGRESS_INTERVAL = int(os.environ.get('TRANSFER_PROGRESS_INTERVAL', 2))  # 2秒进度更新间隔
    
    # 传输优化配置
    TRANSFER_USE_COMPRESSION = os.environ.get('TRANSFER_USE_COMPRESSION', 'true').lower() == 'true'  # 是否使用压缩
    TRANSFER_COMPRESSION_LEVEL = int(os.environ.get('TRANSFER_COMPRESSION_LEVEL', 6))  # 压缩级别 1-9
    TRANSFER_USE_PARALLEL = os.environ.get('TRANSFER_USE_PARALLEL', 'true').lower() == 'true'  # 启用并行传输
    TRANSFER_PARALLEL_CHUNKS = int(os.environ.get('TRANSFER_PARALLEL_CHUNKS', 3))  # 并行传输块数
    
    # 替代传输方式配置
    TRANSFER_USE_HTTP = os.environ.get('TRANSFER_USE_HTTP', 'false').lower() == 'true'  # 是否使用HTTP传输
    TRANSFER_HTTP_PORT = int(os.environ.get('TRANSFER_HTTP_PORT', 8080))  # HTTP传输端口
    TRANSFER_USE_RSYNC = os.environ.get('TRANSFER_USE_RSYNC', 'false').lower() == 'true'  # 是否使用rsync
    
    @classmethod
    def get_transfer_chunk_size(cls):
        """动态获取传输切片大小"""
        return int(os.environ.get('TRANSFER_CHUNK_SIZE', 8 * 1024 * 1024))
    
    @classmethod
    def get_transfer_use_compression(cls):
        """动态获取是否使用压缩"""
        return os.environ.get('TRANSFER_USE_COMPRESSION', 'true').lower() == 'true'
    
    @classmethod
    def get_transfer_compression_level(cls):
        """动态获取压缩级别"""
        return int(os.environ.get('TRANSFER_COMPRESSION_LEVEL', 6))
    
    @classmethod
    def get_transfer_use_parallel(cls):
        """动态获取是否使用并行传输"""
        return os.environ.get('TRANSFER_USE_PARALLEL', 'true').lower() == 'true'
    
    @classmethod
    def get_transfer_parallel_chunks(cls):
        """动态获取并行传输块数"""
        return int(os.environ.get('TRANSFER_PARALLEL_CHUNKS', 3))
    
    @classmethod
    def get_transfer_use_http(cls):
        """动态获取是否使用HTTP传输"""
        return os.environ.get('TRANSFER_USE_HTTP', 'false').lower() == 'true'
    
    @classmethod
    def get_transfer_use_rsync(cls):
        """动态获取是否使用rsync"""
        return os.environ.get('TRANSFER_USE_RSYNC', 'false').lower() == 'true'
    
    @classmethod
    def get_transfer_http_port(cls):
        """动态获取HTTP传输端口"""
        return int(os.environ.get('TRANSFER_HTTP_PORT', 8080))
    
    @classmethod
    def reload_config(cls):
        """重新加载配置"""
        # 重新读取环境变量（再尝试加载文件覆盖）
        try:
            from dotenv import load_dotenv
            load_dotenv(os.path.join(basedir, '.env'))
            opt_env = os.path.join(basedir, 'transfer_optimization.env')
            if os.path.exists(opt_env):
                load_dotenv(opt_env, override=True)
        except Exception:
            pass
        
        cls.TRANSFER_CHUNK_SIZE = cls.get_transfer_chunk_size()
        cls.TRANSFER_USE_COMPRESSION = cls.get_transfer_use_compression()
        cls.TRANSFER_COMPRESSION_LEVEL = cls.get_transfer_compression_level()
        cls.TRANSFER_USE_PARALLEL = cls.get_transfer_use_parallel()
        cls.TRANSFER_PARALLEL_CHUNKS = cls.get_transfer_parallel_chunks()
        cls.TRANSFER_USE_HTTP = cls.get_transfer_use_http()
        cls.TRANSFER_USE_RSYNC = cls.get_transfer_use_rsync()
        
        print(f"配置已重新加载:")
        print(f"  切片大小: {cls.TRANSFER_CHUNK_SIZE // (1024*1024)}MB")
        print(f"  压缩: {'启用' if cls.TRANSFER_USE_COMPRESSION else '禁用'}")
        print(f"  压缩级别: {cls.TRANSFER_COMPRESSION_LEVEL}")
        print(f"  并行传输: {'启用' if cls.TRANSFER_USE_PARALLEL else '禁用'}")
        print(f"  并行数量: {cls.TRANSFER_PARALLEL_CHUNKS}")
        print(f"  HTTP传输: {'启用' if cls.TRANSFER_USE_HTTP else '禁用'}")
        print(f"  rsync: {'启用' if cls.TRANSFER_USE_RSYNC else '禁用'}")
    
    @staticmethod
    def init_app(app):
        # Create necessary directories
        for folder in [app.config['UPLOAD_FOLDER'], app.config['SSH_KEY_FOLDER']]:
            if not os.path.exists(folder):
                os.makedirs(folder)

config = Config()
