# main.py
from concurrent.futures import ProcessPoolExecutor, as_completed
from utils.analyzer4qwen import alanyze_runner
from utils.minio_client import ensure_bucket
import logging
import sys
from dotenv import load_dotenv
import hydra
from configs import initialize_config
from omegaconf import DictConfig
# from configs import get_config
from pathlib import Path
import os
import json

logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

# 加载 .env 文件
def load_environment():
    """明确加载环境变量"""
    # 确定 .env 文件路径
    env_path = Path(__file__).parent / '.env'
    print(f"Looking for .env at: {env_path}")
    
    if env_path.exists():
        print(".env file found, loading...")
        load_dotenv(dotenv_path=env_path, override=True)
        
        # 打印所有相关的环境变量
        env_vars = ['API_BASE_URL', 'API_TOKEN', 'MINIO_ACCESS_KEY',  'OLLAMA_MODEL']
        
        print("Environment variables from .env:")
        for var in env_vars:
            value = os.getenv(var)
            if value:
                print(f"  {var}: {value}")
            else:
                print(f"  {var}: NOT SET")
                
        # filters_str = os.getenv('CONFIDENCE_FILTERS')
        # if filters_str:
        #     print(f"  CONFIDENCE_FILTERS: {filters_str}")
        #     os.environ['CONFIDENCE_FILTERS'] = json.loads(filters_str)
        
    else:
        print(".env file not found!")

# 在Hydra之前加载环境变量
load_environment()

def run_single_task(config: DictConfig):
    try:
        # 初始化配置
        initialize_config(config)
    
        ensure_bucket()
        
        alanyze_runner(config)
        
    except Exception as e:
        logger.critical(f"🛑 系统错误: {e}")
        sys.exit(1)

@hydra.main(version_base=None, config_path="./configs", config_name="config")
def main(config: DictConfig):
    
    initialize_config(config)
    # config = get_config()
    num_workers = int(config['app']['max_workers'] )  # 并发任务数，建议不超过 CPU 核心数
    
    try:
        logger.info(f"🚀 启动 {num_workers} 路并发分析任务...")
        
        # 使用 ProcessPoolExecutor
        with ProcessPoolExecutor(max_workers=num_workers) as executor:
            # 提交 20 个任务（使用相同 config）
            futures = [
                executor.submit(run_single_task, config) 
                for _ in range(num_workers)
            ]
            
            # 等待所有任务完成
            for i, future in enumerate(as_completed(futures)):
                result = future.result()
                if "error" in result:
                    logger.warning(f"⚠️ 任务 {i+1} 出错: {result}")
                else:
                    logger.info(f"✅ 任务 {i+1} 完成")
                    
        logger.info("🎉 所有任务执行完毕")
        
    except Exception as e:
        logger.critical(f"🛑 系统级错误: {e}")
        sys.exit(1)
        
if __name__ == '__main__':
    main()