#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
批量处理优化器
将小任务合并为批次，减少开销并提高处理效率
"""

import os
import time
import threading
from typing import Dict, List, Any, Optional, Callable, Tuple
from dataclasses import dataclass, field
from collections import defaultdict
from enum import Enum

from ..utils_module.logger import ObfuscationLogger
from .load_balancer import LoadBalancer, Task
from ..builtin_config_module.builtin_config import ProcessingConfig


class BatchStrategy(Enum):
    """批处理策略"""
    SIZE_BASED = "size_based"       # 基于大小
    TIME_BASED = "time_based"       # 基于时间
    COUNT_BASED = "count_based"     # 基于数量
    ADAPTIVE = "adaptive"           # 自适应


@dataclass
class BatchItem:
    """批处理项"""
    item_id: str
    type: str
    data: Any
    size: int = 0
    priority: int = 5
    metadata: Dict[str, Any] = field(default_factory=dict)


@dataclass
class Batch:
    """批次"""
    batch_id: str
    items: List[BatchItem] = field(default_factory=list)
    total_size: int = 0
    created_time: float = field(default_factory=time.time)
    status: str = "pending"
    result: Optional[Any] = None
    error: Optional[str] = None


class BatchProcessor:
    """批量处理优化器"""

    def __init__(self, config: ProcessingConfig = None,
                 logger: ObfuscationLogger = None):
        """
        初始化批量处理器

        Args:
            config: 处理配置 (ProcessingConfig)
            logger: 日志记录器
        """
        self.config = config or ProcessingConfig()
        self.logger = logger

        # 将strategy字符串转换为枚举
        strategy_str = self.config.strategy.lower()
        if strategy_str == "size_based":
            self.strategy = BatchStrategy.SIZE_BASED
        elif strategy_str == "time_based":
            self.strategy = BatchStrategy.TIME_BASED
        elif strategy_str == "count_based":
            self.strategy = BatchStrategy.COUNT_BASED
        else:
            self.strategy = BatchStrategy.ADAPTIVE

        # 批次管理
        self.pending_items: Dict[str, List[BatchItem]] = defaultdict(list)
        self.active_batches: Dict[str, Batch] = {}
        self.completed_batches: Dict[str, Batch] = {}
        self.batch_counter = 0
        self.batch_lock = threading.RLock()

        # 处理器
        self.processors: Dict[str, Callable] = {}

        # 负载均衡器
        self.load_balancer = LoadBalancer(
            logger=logger,
            max_workers=config.parallel_batches if config else 4
        )

        # 批处理线程
        self.batch_thread = None
        self.running = False
        self.stop_event = threading.Event()

        # 统计信息
        self.stats = {
            'total_items': 0,
            'total_batches': 0,
            'processed_items': 0,
            'failed_items': 0,
            'avg_batch_size': 0,
            'avg_process_time': 0,
            'total_bytes': 0
        }

        if logger:
            logger.log_operation("批处理器", f"初始化完成: 策略={self.strategy}")

    def register_processor(self, item_type: str, processor: Callable):
        """
        注册类型处理器

        Args:
            item_type: 项目类型
            processor: 处理函数
        """
        self.processors[item_type] = processor
        if self.logger:
            self.logger.log_operation("注册处理器", f"类型: {item_type}")

    def add_item(self, item: BatchItem) -> bool:
        """
        添加项目到批处理队列

        Args:
            item: 批处理项

        Returns:
            是否成功添加
        """
        with self.batch_lock:
            # 添加到待处理队列
            self.pending_items[item.type].append(item)
            self.stats['total_items'] += 1
            self.stats['total_bytes'] += item.size

            # 检查是否需要创建批次
            if self._should_create_batch(item.type):
                self._create_batch(item.type)

            return True

    def add_items(self, items: List[BatchItem]):
        """
        批量添加项目

        Args:
            items: 批处理项列表
        """
        for item in items:
            self.add_item(item)

    def start(self):
        """启动批处理器"""
        if self.running:
            return

        self.running = True
        self.stop_event.clear()

        # 启动批处理线程
        self.batch_thread = threading.Thread(
            target=self._batch_loop,
            daemon=True
        )
        self.batch_thread.start()

        # 启动负载均衡器
        self.load_balancer.set_task_processor(self._process_batch_task)
        self.load_balancer.start()

        if self.logger:
            self.logger.log_operation("批处理器", "已启动")

    def stop(self, timeout: float = 30):
        """
        停止批处理器

        Args:
            timeout: 超时时间
        """
        self.running = False
        self.stop_event.set()

        # 处理剩余项目
        self._flush_all()

        # 等待批处理线程
        if self.batch_thread and self.batch_thread.is_alive():
            self.batch_thread.join(timeout=timeout)

        # 停止负载均衡器
        self.load_balancer.stop(timeout=timeout)

        if self.logger:
            self.logger.log_operation(
                "批处理器",
                f"已停止: 处理={self.stats['processed_items']}, "
                f"失败={self.stats['failed_items']}"
            )

    def _batch_loop(self):
        """批处理循环"""
        while self.running:
            # 定期检查并创建批次
            with self.batch_lock:
                for item_type in list(self.pending_items.keys()):
                    if self._should_create_batch_timeout(item_type):
                        self._create_batch(item_type)

            # 等待间隔
            self.stop_event.wait(0.1)

    def _should_create_batch(self, item_type: str) -> bool:
        """
        判断是否应该创建批次

        Args:
            item_type: 项目类型

        Returns:
            是否创建
        """
        items = self.pending_items[item_type]
        if not items:
            return False

        if self.strategy == BatchStrategy.COUNT_BASED:
            return len(items) >= self.config.max_batch_size

        elif self.strategy == BatchStrategy.SIZE_BASED:
            total_size = sum(item.size for item in items)
            return total_size >= self.config.max_batch_memory

        elif self.strategy == BatchStrategy.TIME_BASED:
            return False  # 仅通过超时触发

        elif self.strategy == BatchStrategy.ADAPTIVE:
            # 自适应策略
            count_threshold = len(items) >= self.config.max_batch_size
            size_threshold = sum(item.size for item in items) >= self.config.max_batch_memory

            # 高优先级项目更快处理
            high_priority = any(item.priority < 3 for item in items)
            if high_priority and len(items) >= self.config.min_batch_size:
                return True

            return count_threshold or size_threshold

        return False

    def _should_create_batch_timeout(self, item_type: str) -> bool:
        """
        判断是否因超时创建批次

        Args:
            item_type: 项目类型

        Returns:
            是否创建
        """
        items = self.pending_items[item_type]
        if not items or len(items) < self.config.min_batch_size:
            return False

        # 检查最早项目的等待时间
        current_time = time.time()
        # 假设items按添加顺序排列
        if items and hasattr(items[0], 'add_time'):
            wait_time = current_time - items[0].add_time
            return wait_time >= self.config.max_wait_time

        return False

    def _create_batch(self, item_type: str) -> Optional[Batch]:
        """
        创建批次

        Args:
            item_type: 项目类型

        Returns:
            创建的批次
        """
        items = self.pending_items[item_type]
        if not items:
            return None

        # 确定批次大小
        batch_size = min(len(items), self.config.max_batch_size)

        # 提取项目
        batch_items = items[:batch_size]
        self.pending_items[item_type] = items[batch_size:]

        # 创建批次
        self.batch_counter += 1
        batch = Batch(
            batch_id=f"batch_{self.batch_counter}_{item_type}",
            items=batch_items,
            total_size=sum(item.size for item in batch_items)
        )

        self.active_batches[batch.batch_id] = batch
        self.stats['total_batches'] += 1

        # 创建任务并提交到负载均衡器
        task = Task(
            task_id=batch.batch_id,
            file_path="",  # 批处理不需要文件路径
            priority=min(item.priority for item in batch_items),
            estimated_time=self._estimate_batch_time(batch),
            metadata={'batch': batch, 'type': item_type}
        )

        self.load_balancer.add_task(task)

        if self.logger:
            self.logger.log_operation(
                "创建批次",
                f"{batch.batch_id}: {len(batch_items)}项, {batch.total_size}字节"
            )

        return batch

    def _process_batch_task(self, task: Task) -> Any:
        """
        处理批次任务

        Args:
            task: 任务对象

        Returns:
            处理结果
        """
        batch = task.metadata.get('batch')
        item_type = task.metadata.get('type')

        if not batch or not item_type:
            raise ValueError("无效的批次任务")

        # 获取处理器
        processor = self.processors.get(item_type)
        if not processor:
            raise ValueError(f"未找到类型 {item_type} 的处理器")

        start_time = time.time()

        try:
            # 处理批次
            if self.strategy == BatchStrategy.ADAPTIVE:
                # 自适应处理
                result = self._adaptive_process(batch, processor)
            else:
                # 标准批处理
                result = processor(batch.items)

            # 更新批次状态
            batch.status = "completed"
            batch.result = result

            # 移到已完成
            with self.batch_lock:
                if batch.batch_id in self.active_batches:
                    del self.active_batches[batch.batch_id]
                self.completed_batches[batch.batch_id] = batch

            # 更新统计
            elapsed = time.time() - start_time
            self.stats['processed_items'] += len(batch.items)
            self._update_avg_stats(len(batch.items), elapsed)

            if self.logger:
                self.logger.log_operation(
                    "批次完成",
                    f"{batch.batch_id}: {len(batch.items)}项, 耗时{elapsed:.2f}秒"
                )

            return result

        except Exception as e:
            # 处理失败
            batch.status = "failed"
            batch.error = str(e)

            self.stats['failed_items'] += len(batch.items)

            if self.logger:
                self.logger.log_error(f"批次处理失败 {batch.batch_id}: {e}")

            raise

    def _adaptive_process(self, batch: Batch, processor: Callable) -> Any:
        """
        自适应批处理

        Args:
            batch: 批次
            processor: 处理器

        Returns:
            处理结果
        """
        items = batch.items

        # 根据项目特征分组
        groups = self._group_items(items)

        results = []
        for group in groups:
            # 并行处理组
            if len(group) > 20:  # 大组并行
                # 分成子批次
                sub_batches = [group[i:i+10] for i in range(0, len(group), 10)]
                sub_results = []

                # 使用线程池处理
                import concurrent.futures
                with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
                    futures = [executor.submit(processor, sub_batch)
                              for sub_batch in sub_batches]
                    for future in concurrent.futures.as_completed(futures):
                        sub_results.append(future.result())

                # 合并结果
                results.extend(self._merge_results(sub_results))
            else:
                # 串行处理小组
                result = processor(group)
                results.append(result)

        return self._merge_results(results)

    def _group_items(self, items: List[BatchItem]) -> List[List[BatchItem]]:
        """
        将项目分组以优化处理

        Args:
            items: 项目列表

        Returns:
            分组后的项目
        """
        # 按优先级和大小分组
        groups = defaultdict(list)

        for item in items:
            # 组合键：优先级和大小范围
            size_range = item.size // (10 * 1024)  # 10KB为一个范围
            key = (item.priority, size_range)
            groups[key].append(item)

        return list(groups.values())

    def _merge_results(self, results: List[Any]) -> Any:
        """
        合并处理结果

        Args:
            results: 结果列表

        Returns:
            合并后的结果
        """
        if not results:
            return None

        # 如果结果是字典，合并字典
        if all(isinstance(r, dict) for r in results):
            merged = {}
            for r in results:
                merged.update(r)
            return merged

        # 如果结果是列表，连接列表
        if all(isinstance(r, list) for r in results):
            merged = []
            for r in results:
                merged.extend(r)
            return merged

        # 其他情况返回列表
        return results

    def _estimate_batch_time(self, batch: Batch) -> float:
        """
        估算批次处理时间

        Args:
            batch: 批次

        Returns:
            预估时间（秒）
        """
        # 基于项目数量和大小估算
        base_time = 0.01  # 基础开销
        per_item_time = 0.005  # 每项目时间
        size_factor = batch.total_size / (1024 * 1024)  # MB

        estimated = base_time + len(batch.items) * per_item_time + size_factor * 0.1

        # 使用历史数据调整
        if self.stats['avg_process_time'] > 0:
            # 加权平均
            estimated = estimated * 0.3 + self.stats['avg_process_time'] * 0.7

        return estimated

    def _update_avg_stats(self, items_count: int, process_time: float):
        """
        更新平均统计

        Args:
            items_count: 项目数量
            process_time: 处理时间
        """
        total_batches = self.stats['total_batches']

        # 更新平均批大小
        self.stats['avg_batch_size'] = (
            (self.stats['avg_batch_size'] * (total_batches - 1) + items_count)
            / total_batches
        )

        # 更新平均处理时间
        self.stats['avg_process_time'] = (
            (self.stats['avg_process_time'] * (total_batches - 1) + process_time)
            / total_batches
        )

    def _flush_all(self):
        """刷新所有待处理项目"""
        with self.batch_lock:
            for item_type in list(self.pending_items.keys()):
                while self.pending_items[item_type]:
                    self._create_batch(item_type)

        # 等待所有批次完成
        self.load_balancer.wait_for_completion(timeout=10)

    def process_batches(self, tasks: List[Any], batch_size: int = None) -> Any:
        """处理批任务"""
        from core.interfaces_module.processor import ProcessingResult

        batch_size = batch_size or self.config.max_batch_size
        processed_count = 0
        failed_count = 0

        start_time = time.time()

        # 分批处理
        for i in range(0, len(tasks), batch_size):
            batch = tasks[i:i + batch_size]

            try:
                # 实际处理批次
                self._process_single_batch(batch)
                processed_count += len(batch)

            except Exception as e:
                self.logger.log_error(f"批处理失败: {e}")
                failed_count += len(batch)

        processing_time = time.time() - start_time

        return ProcessingResult(
            total_tasks=len(tasks),
            completed_tasks=processed_count,
            failed_tasks=failed_count,
            processing_time=processing_time
        )

    def _process_single_batch(self, batch: List[Any]):
        """处理单个批次"""
        # 简单的批次处理逻辑
        for item in batch:
            # 这里可以添加具体的处理逻辑
            time.sleep(0.001)  # 模拟处理时间

    def add_item(self, item: Any) -> bool:
        """添加项目到批处理队列"""
        with self.batch_lock:
            item_type = getattr(item, 'task_type', 'default')
            self.pending_items[item_type].append(item)
            self.stats['total_items'] += 1

            # 检查是否需要创建批次
            if self._should_create_batch(item_type):
                self._create_batch(item_type)

            return True

    def get_batch_result(self, batch_id: str) -> Optional[Any]:
        """
        获取批次结果

        Args:
            batch_id: 批次ID

        Returns:
            批次结果
        """
        batch = self.completed_batches.get(batch_id)
        if batch:
            return batch.result
        return None

    def get_statistics(self) -> Dict[str, Any]:
        """
        获取统计信息

        Returns:
            统计信息字典
        """
        # 获取负载均衡器统计
        lb_stats = self.load_balancer.get_statistics()

        return {
            **self.stats,
            'pending_items': sum(len(items) for items in self.pending_items.values()),
            'active_batches': len(self.active_batches),
            'completed_batches': len(self.completed_batches),
            'load_balancer': lb_stats
        }

    def optimize_config(self):
        """根据运行统计优化配置"""
        if self.stats['total_batches'] < 10:
            return  # 数据不足

        avg_size = self.stats['avg_batch_size']
        avg_time = self.stats['avg_process_time']

        # 调整批大小
        if avg_time > 2.0:  # 处理时间过长
            self.config.max_batch_size = max(
                self.config.min_batch_size,
                int(self.config.max_batch_size * 0.8)
            )
        elif avg_time < 0.5:  # 处理时间过短
            self.config.max_batch_size = min(
                200,
                int(self.config.max_batch_size * 1.2)
            )

        # 调整并行数
        lb_stats = self.load_balancer.get_statistics()
        if lb_stats['performance']['cpu_usage'] < 50:
            self.config.parallel_batches = min(
                16,
                self.config.parallel_batches + 1
            )
        elif lb_stats['performance']['cpu_usage'] > 80:
            self.config.parallel_batches = max(
                2,
                self.config.parallel_batches - 1
            )

        if self.logger:
            self.logger.log_operation(
                "配置优化",
                f"批大小={self.config.max_batch_size}, "
                f"并行数={self.config.parallel_batches}"
            )