#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
统一任务处理器
整合批处理、负载均衡、编译器优化等功能
"""

import os
import time
from typing import Dict, List, Optional, Any, Callable
from concurrent.futures import ThreadPoolExecutor, as_completed

from ..interfaces_module.processor import (
    ITaskProcessor,
    ProcessingResult,
    ProcessingStrategy,
    TaskItem,
    TaskStatus
)
from ..builtin_config_module.builtin_config import BuiltInConfig, ProcessingConfig
from ..utils_module.logger import ObfuscationLogger


class UnifiedTaskProcessor(ITaskProcessor):
    """
    统一任务处理器
    整合批处理、负载均衡和编译器优化
    """

    def __init__(self, config: BuiltInConfig, logger: ObfuscationLogger):
        """
        初始化统一任务处理器

        Args:
            config: 配置对象
            logger: 日志记录器
        """
        self.config = config
        self.logger = logger

        # 处理器配置 - 直接使用 ProcessingConfig
        if hasattr(config, 'processing') and config.processing:
            self.processor_config = config.processing
        else:
            # 向后兼容：创建默认 ProcessingConfig
            self.processor_config = ProcessingConfig(
                max_workers=getattr(config, 'max_workers', 4),
                min_batch_size=getattr(config, 'min_batch_size', 10),
                max_batch_size=getattr(config, 'max_batch_size', 50),
                batch_timeout=getattr(config, 'batch_timeout', 0.5),
                enable_caching=getattr(config, 'use_cache', True),
                enable_load_balancing=getattr(config, 'use_load_balancing', True),
                retry_count=getattr(config, 'retry_count', 3),
                retry_delay=getattr(config, 'retry_delay', 1.0)
            )

        # 任务处理器注册表
        self.handlers: Dict[str, Callable] = {}

        # 任务状态跟踪
        self.task_status: Dict[str, TaskStatus] = {}

        # 执行器
        self.executor = None

        # 统计信息
        self.statistics = {
            'total_tasks': 0,
            'completed_tasks': 0,
            'failed_tasks': 0,
            'total_batches': 0,
            'avg_batch_size': 0,
            'processing_time': 0
        }

        # 初始化策略处理器
        self._init_strategies()

    def _init_strategies(self):
        """初始化处理策略"""
        self.strategies = {
            ProcessingStrategy.SERIAL: self._process_serial,
            ProcessingStrategy.PARALLEL: self._process_parallel,
            ProcessingStrategy.BATCH: self._process_batch,
            ProcessingStrategy.ADAPTIVE: self._process_adaptive
        }

    def process(self, tasks: List[TaskItem],
                strategy: ProcessingStrategy = ProcessingStrategy.ADAPTIVE) -> ProcessingResult:
        """
        处理任务列表

        Args:
            tasks: 任务列表
            strategy: 处理策略

        Returns:
            ProcessingResult: 处理结果
        """
        start_time = time.time()

        self.logger.log_operation(
            "任务处理",
            f"开始处理 {len(tasks)} 个任务，策略: {strategy.value}"
        )

        # 初始化任务状态
        for task in tasks:
            self.task_status[task.task_id] = TaskStatus.PENDING

        # 选择处理策略
        strategy_handler = self.strategies.get(strategy, self._process_adaptive)

        # 执行处理
        try:
            result = strategy_handler(tasks)
        except Exception as e:
            self.logger.log_error(f"任务处理失败: {e}")
            result = ProcessingResult()
            result.errors = {task.task_id: str(e) for task in tasks}
            result.failed_tasks = len(tasks)

        # 更新统计信息
        result.processing_time = time.time() - start_time
        self.statistics['processing_time'] += result.processing_time

        self.logger.log_operation(
            "处理完成",
            f"成功: {result.completed_tasks}, 失败: {result.failed_tasks}, "
            f"耗时: {result.processing_time:.2f}秒"
        )

        return result

    def _process_serial(self, tasks: List[TaskItem]) -> ProcessingResult:
        """串行处理任务"""
        result = ProcessingResult(total_tasks=len(tasks))

        for task in tasks:
            self.task_status[task.task_id] = TaskStatus.RUNNING

            try:
                task_result = self.process_single(task)
                result.results[task.task_id] = task_result
                result.completed_tasks += 1
                self.task_status[task.task_id] = TaskStatus.COMPLETED
            except Exception as e:
                result.errors[task.task_id] = str(e)
                result.failed_tasks += 1
                self.task_status[task.task_id] = TaskStatus.FAILED

        return result

    def _process_parallel(self, tasks: List[TaskItem]) -> ProcessingResult:
        """并行处理任务"""
        result = ProcessingResult(total_tasks=len(tasks))

        with ThreadPoolExecutor(max_workers=self.processor_config.max_workers) as executor:
            # 提交所有任务
            future_to_task = {
                executor.submit(self.process_single, task): task
                for task in tasks
            }

            # 收集结果
            for future in as_completed(future_to_task):
                task = future_to_task[future]

                try:
                    task_result = future.result()
                    result.results[task.task_id] = task_result
                    result.completed_tasks += 1
                    self.task_status[task.task_id] = TaskStatus.COMPLETED
                except Exception as e:
                    result.errors[task.task_id] = str(e)
                    result.failed_tasks += 1
                    self.task_status[task.task_id] = TaskStatus.FAILED

        return result

    def _process_batch(self, tasks: List[TaskItem]) -> ProcessingResult:
        """批处理任务"""
        result = ProcessingResult(total_tasks=len(tasks))

        # 创建批次
        batches = self._create_batches(tasks)
        self.statistics['total_batches'] += len(batches)

        self.logger.log_operation(
            "批处理",
            f"创建了 {len(batches)} 个批次"
        )

        # 并行处理批次
        with ThreadPoolExecutor(max_workers=self.processor_config.max_workers) as executor:
            future_to_batch = {
                executor.submit(self._process_batch_items, batch): batch
                for batch in batches
            }

            for future in as_completed(future_to_batch):
                batch = future_to_batch[future]

                try:
                    batch_result = future.result()
                    # 合并批次结果
                    for task_id, task_result in batch_result.items():
                        if isinstance(task_result, Exception):
                            result.errors[task_id] = str(task_result)
                            result.failed_tasks += 1
                        else:
                            result.results[task_id] = task_result
                            result.completed_tasks += 1
                except Exception as e:
                    # 批次失败，所有任务标记为失败
                    for task in batch:
                        result.errors[task.task_id] = f"Batch failed: {e}"
                        result.failed_tasks += 1

        return result

    def _process_adaptive(self, tasks: List[TaskItem]) -> ProcessingResult:
        """自适应处理策略"""
        # 根据任务数量和类型选择最优策略
        task_count = len(tasks)

        if task_count == 0:
            return ProcessingResult()

        # 小任务量使用串行
        if task_count < self.processor_config.min_batch_size:
            self.logger.log_operation("自适应策略", f"任务数少于{self.processor_config.min_batch_size}，使用串行处理")
            return self._process_serial(tasks)

        # 中等任务量使用并行
        if task_count < self.processor_config.max_batch_size:
            self.logger.log_operation("自适应策略", f"任务数适中，使用并行处理")
            return self._process_parallel(tasks)

        # 大任务量使用批处理
        self.logger.log_operation("自适应策略", f"任务数较多，使用批处理")
        return self._process_batch(tasks)

    def _create_batches(self, tasks: List[TaskItem]) -> List[List[TaskItem]]:
        """创建任务批次"""
        batches = []

        # 按类型和优先级分组
        grouped = {}
        for task in tasks:
            key = (task.task_type, task.priority)
            if key not in grouped:
                grouped[key] = []
            grouped[key].append(task)

        # 为每个组创建批次
        for key, group_tasks in grouped.items():
            batch = []
            for task in group_tasks:
                batch.append(task)

                if len(batch) >= self.processor_config.max_batch_size:
                    batches.append(batch)
                    batch = []

            # 添加剩余的任务
            if batch:
                batches.append(batch)

        return batches

    def _process_batch_items(self, batch: List[TaskItem]) -> Dict[str, Any]:
        """处理一个批次的任务"""
        results = {}

        # 如果是编译器任务，使用特殊优化
        if batch and batch[0].task_type == 'compile':
            results = self._process_compiler_batch(batch)
        else:
            # 普通批处理
            for task in batch:
                try:
                    self.task_status[task.task_id] = TaskStatus.RUNNING
                    task_result = self.process_single(task)
                    results[task.task_id] = task_result
                    self.task_status[task.task_id] = TaskStatus.COMPLETED
                except Exception as e:
                    results[task.task_id] = e
                    self.task_status[task.task_id] = TaskStatus.FAILED

        return results

    def _process_compiler_batch(self, batch: List[TaskItem]) -> Dict[str, Any]:
        """处理编译器批次（特殊优化）"""
        results = {}

        # 按语言分组
        language_groups = {}
        for task in batch:
            lang = task.data.get('language', 'unknown')
            if lang not in language_groups:
                language_groups[lang] = []
            language_groups[lang].append(task)

        # 批量编译同语言文件
        for lang, lang_tasks in language_groups.items():
            try:
                # 批量调用编译器
                self.logger.log_operation(
                    "编译器批处理",
                    f"批量编译 {len(lang_tasks)} 个 {lang} 文件"
                )

                # 这里可以调用实际的编译器批处理逻辑
                for task in lang_tasks:
                    # 模拟编译
                    results[task.task_id] = {'compiled': True}

            except Exception as e:
                for task in lang_tasks:
                    results[task.task_id] = e

        return results

    def process_single(self, task: TaskItem) -> Dict[str, Any]:
        """处理单个任务"""
        # 查找对应的处理器
        handler = self.handlers.get(task.task_type)

        if not handler:
            # 默认处理器
            handler = self._default_handler

        # 重试机制
        retry_count = 0
        last_error = None

        while retry_count <= self.processor_config.retry_count:
            try:
                return handler(task)
            except Exception as e:
                last_error = e
                retry_count += 1

                if retry_count <= self.processor_config.retry_count:
                    self.logger.log_warning(
                        f"任务 {task.task_id} 失败，重试 {retry_count}/{self.processor_config.retry_count}"
                    )
                    time.sleep(self.processor_config.retry_delay)

        raise last_error

    def _default_handler(self, task: TaskItem) -> Dict[str, Any]:
        """默认任务处理器"""
        # 简单返回任务数据
        return {
            'task_id': task.task_id,
            'processed': True,
            'data': task.data
        }

    def register_handler(self, task_type: str, handler: Callable[[TaskItem], Dict]):
        """注册任务处理器"""
        self.handlers[task_type] = handler
        self.logger.log_operation("处理器注册", f"已注册 {task_type} 处理器")

    def set_max_workers(self, max_workers: int):
        """设置最大工作线程数"""
        self.processor_config.max_workers = max_workers

    def set_batch_size(self, min_size: int, max_size: int):
        """设置批处理大小"""
        self.processor_config.min_batch_size = min_size
        self.processor_config.max_batch_size = max_size

    def cancel_task(self, task_id: str) -> bool:
        """取消任务"""
        if task_id in self.task_status:
            if self.task_status[task_id] == TaskStatus.PENDING:
                self.task_status[task_id] = TaskStatus.CANCELLED
                return True
        return False

    def get_task_status(self, task_id: str) -> TaskStatus:
        """获取任务状态"""
        return self.task_status.get(task_id, TaskStatus.PENDING)

    def get_statistics(self) -> Dict:
        """获取统计信息"""
        return self.statistics.copy()