"""
数据管道

提供数据处理和传输的管道功能。
"""

from typing import Dict, List, Any, Optional, Callable, AsyncGenerator, Union
from datetime import datetime, timedelta
from dataclasses import dataclass, field
from enum import Enum
import asyncio
from abc import ABC, abstractmethod


class PipelineStatus(Enum):
    """管道状态"""
    IDLE = "idle"           # 空闲
    RUNNING = "running"     # 运行中
    PAUSED = "paused"       # 暂停
    STOPPED = "stopped"     # 停止
    ERROR = "error"         # 错误
    COMPLETED = "completed" # 完成


class ProcessorType(Enum):
    """处理器类型"""
    FILTER = "filter"           # 过滤器
    TRANSFORMER = "transformer" # 转换器
    VALIDATOR = "validator"     # 验证器
    AGGREGATOR = "aggregator"   # 聚合器
    ENRICHER = "enricher"       # 增强器
    CLEANER = "cleaner"         # 清洗器


@dataclass
class PipelineMetrics:
    """管道指标"""
    total_processed: int = 0
    successful_processed: int = 0
    failed_processed: int = 0
    start_time: Optional[datetime] = None
    end_time: Optional[datetime] = None
    processing_time: timedelta = field(default_factory=lambda: timedelta())
    throughput: float = 0.0  # 每秒处理数量
    error_rate: float = 0.0  # 错误率
    
    @property
    def success_rate(self) -> float:
        """成功率"""
        if self.total_processed == 0:
            return 0.0
        return self.successful_processed / self.total_processed
    
    def update_throughput(self) -> None:
        """更新吞吐量"""
        if self.processing_time.total_seconds() > 0:
            self.throughput = self.total_processed / self.processing_time.total_seconds()
    
    def update_error_rate(self) -> None:
        """更新错误率"""
        if self.total_processed == 0:
            self.error_rate = 0.0
        else:
            self.error_rate = self.failed_processed / self.total_processed


@dataclass
class ProcessingContext:
    """处理上下文"""
    pipeline_id: str
    step_id: str
    data: Any
    metadata: Dict[str, Any] = field(default_factory=dict)
    timestamp: datetime = field(default_factory=datetime.now)
    retry_count: int = 0
    max_retries: int = 3
    
    def add_metadata(self, key: str, value: Any) -> None:
        """添加元数据"""
        self.metadata[key] = value
    
    def get_metadata(self, key: str, default: Any = None) -> Any:
        """获取元数据"""
        return self.metadata.get(key, default)


class DataProcessor(ABC):
    """数据处理器基类"""
    
    def __init__(self, name: str, processor_type: ProcessorType):
        """
        初始化数据处理器
        
        Args:
            name: 处理器名称
            processor_type: 处理器类型
        """
        self.name = name
        self.processor_type = processor_type
        self._metrics = PipelineMetrics()
    
    @abstractmethod
    async def process(self, context: ProcessingContext) -> ProcessingContext:
        """
        处理数据
        
        Args:
            context: 处理上下文
            
        Returns:
            处理后的上下文
        """
        pass
    
    async def validate_input(self, context: ProcessingContext) -> bool:
        """
        验证输入数据
        
        Args:
            context: 处理上下文
            
        Returns:
            是否有效
        """
        return context.data is not None
    
    async def handle_error(self, context: ProcessingContext, error: Exception) -> ProcessingContext:
        """
        处理错误
        
        Args:
            context: 处理上下文
            error: 错误信息
            
        Returns:
            处理后的上下文
        """
        context.add_metadata("error", str(error))
        context.add_metadata("error_type", type(error).__name__)
        return context
    
    def get_metrics(self) -> PipelineMetrics:
        """获取处理器指标"""
        return self._metrics
    
    def reset_metrics(self) -> None:
        """重置指标"""
        self._metrics = PipelineMetrics()


class FilterProcessor(DataProcessor):
    """过滤处理器"""
    
    def __init__(self, name: str, filter_func: Callable[[Any], bool]):
        """
        初始化过滤处理器
        
        Args:
            name: 处理器名称
            filter_func: 过滤函数
        """
        super().__init__(name, ProcessorType.FILTER)
        self.filter_func = filter_func
    
    async def process(self, context: ProcessingContext) -> ProcessingContext:
        """
        过滤数据
        
        Args:
            context: 处理上下文
            
        Returns:
            处理后的上下文
        """
        try:
            if await self.validate_input(context):
                if self.filter_func(context.data):
                    context.add_metadata("filtered", False)
                else:
                    context.add_metadata("filtered", True)
                    context.data = None  # 过滤掉的数据设为None
            
            self._metrics.successful_processed += 1
            
        except Exception as e:
            self._metrics.failed_processed += 1
            context = await self.handle_error(context, e)
        
        finally:
            self._metrics.total_processed += 1
        
        return context


class TransformerProcessor(DataProcessor):
    """转换处理器"""
    
    def __init__(self, name: str, transform_func: Callable[[Any], Any]):
        """
        初始化转换处理器
        
        Args:
            name: 处理器名称
            transform_func: 转换函数
        """
        super().__init__(name, ProcessorType.TRANSFORMER)
        self.transform_func = transform_func
    
    async def process(self, context: ProcessingContext) -> ProcessingContext:
        """
        转换数据
        
        Args:
            context: 处理上下文
            
        Returns:
            处理后的上下文
        """
        try:
            if await self.validate_input(context):
                original_data = context.data
                context.data = self.transform_func(context.data)
                context.add_metadata("transformed", True)
                context.add_metadata("original_type", type(original_data).__name__)
                context.add_metadata("new_type", type(context.data).__name__)
            
            self._metrics.successful_processed += 1
            
        except Exception as e:
            self._metrics.failed_processed += 1
            context = await self.handle_error(context, e)
        
        finally:
            self._metrics.total_processed += 1
        
        return context


class ValidatorProcessor(DataProcessor):
    """验证处理器"""
    
    def __init__(self, name: str, validation_func: Callable[[Any], bool]):
        """
        初始化验证处理器
        
        Args:
            name: 处理器名称
            validation_func: 验证函数
        """
        super().__init__(name, ProcessorType.VALIDATOR)
        self.validation_func = validation_func
    
    async def process(self, context: ProcessingContext) -> ProcessingContext:
        """
        验证数据
        
        Args:
            context: 处理上下文
            
        Returns:
            处理后的上下文
        """
        try:
            if await self.validate_input(context):
                is_valid = self.validation_func(context.data)
                context.add_metadata("validation_passed", is_valid)
                
                if not is_valid:
                    context.add_metadata("validation_error", "Data validation failed")
            
            self._metrics.successful_processed += 1
            
        except Exception as e:
            self._metrics.failed_processed += 1
            context = await self.handle_error(context, e)
        
        finally:
            self._metrics.total_processed += 1
        
        return context


class AggregatorProcessor(DataProcessor):
    """聚合处理器"""
    
    def __init__(self, name: str, batch_size: int = 100, timeout: timedelta = timedelta(seconds=30)):
        """
        初始化聚合处理器
        
        Args:
            name: 处理器名称
            batch_size: 批次大小
            timeout: 超时时间
        """
        super().__init__(name, ProcessorType.AGGREGATOR)
        self.batch_size = batch_size
        self.timeout = timeout
        self._batch: List[Any] = []
        self._last_batch_time = datetime.now()
    
    async def process(self, context: ProcessingContext) -> ProcessingContext:
        """
        聚合数据
        
        Args:
            context: 处理上下文
            
        Returns:
            处理后的上下文
        """
        try:
            if await self.validate_input(context):
                self._batch.append(context.data)
                
                # 检查是否需要输出批次
                should_output = (
                    len(self._batch) >= self.batch_size or
                    datetime.now() - self._last_batch_time >= self.timeout
                )
                
                if should_output:
                    context.data = self._batch.copy()
                    context.add_metadata("batch_size", len(self._batch))
                    context.add_metadata("aggregated", True)
                    
                    # 重置批次
                    self._batch.clear()
                    self._last_batch_time = datetime.now()
                else:
                    # 还未到输出时机，返回空数据
                    context.data = None
                    context.add_metadata("batching", True)
            
            self._metrics.successful_processed += 1
            
        except Exception as e:
            self._metrics.failed_processed += 1
            context = await self.handle_error(context, e)
        
        finally:
            self._metrics.total_processed += 1
        
        return context


class DataPipelineStep:
    """数据管道步骤"""
    
    def __init__(self, step_id: str, processor: DataProcessor, parallel: bool = False):
        """
        初始化管道步骤
        
        Args:
            step_id: 步骤ID
            processor: 数据处理器
            parallel: 是否并行处理
        """
        self.step_id = step_id
        self.processor = processor
        self.parallel = parallel
        self._enabled = True
    
    async def execute(self, context: ProcessingContext) -> ProcessingContext:
        """
        执行步骤
        
        Args:
            context: 处理上下文
            
        Returns:
            处理后的上下文
        """
        if not self._enabled:
            return context
        
        context.step_id = self.step_id
        return await self.processor.process(context)
    
    def enable(self) -> None:
        """启用步骤"""
        self._enabled = True
    
    def disable(self) -> None:
        """禁用步骤"""
        self._enabled = False
    
    @property
    def is_enabled(self) -> bool:
        """是否启用"""
        return self._enabled


class DataPipeline:
    """
    数据管道
    
    提供数据处理的管道功能，支持多步骤处理和并行执行。
    """
    
    def __init__(self, pipeline_id: str, name: str):
        """
        初始化数据管道
        
        Args:
            pipeline_id: 管道ID
            name: 管道名称
        """
        self.pipeline_id = pipeline_id
        self.name = name
        self._steps: List[DataPipelineStep] = []
        self._status = PipelineStatus.IDLE
        self._metrics = PipelineMetrics()
        self._error_handlers: List[Callable[[ProcessingContext, Exception], ProcessingContext]] = []
        self._max_concurrent = 10
        self._semaphore = asyncio.Semaphore(self._max_concurrent)
    
    def add_step(self, step: DataPipelineStep) -> None:
        """
        添加管道步骤
        
        Args:
            step: 管道步骤
        """
        self._steps.append(step)
    
    def remove_step(self, step_id: str) -> bool:
        """
        移除管道步骤
        
        Args:
            step_id: 步骤ID
            
        Returns:
            是否成功移除
        """
        for i, step in enumerate(self._steps):
            if step.step_id == step_id:
                del self._steps[i]
                return True
        return False
    
    def get_step(self, step_id: str) -> Optional[DataPipelineStep]:
        """
        获取管道步骤
        
        Args:
            step_id: 步骤ID
            
        Returns:
            管道步骤或None
        """
        for step in self._steps:
            if step.step_id == step_id:
                return step
        return None
    
    def add_error_handler(self, handler: Callable[[ProcessingContext, Exception], ProcessingContext]) -> None:
        """
        添加错误处理器
        
        Args:
            handler: 错误处理函数
        """
        self._error_handlers.append(handler)
    
    async def process_single(self, data: Any, metadata: Optional[Dict[str, Any]] = None) -> ProcessingContext:
        """
        处理单个数据项
        
        Args:
            data: 要处理的数据
            metadata: 元数据
            
        Returns:
            处理结果上下文
        """
        context = ProcessingContext(
            pipeline_id=self.pipeline_id,
            step_id="",
            data=data,
            metadata=metadata or {}
        )
        
        async with self._semaphore:
            for step in self._steps:
                try:
                    context = await step.execute(context)
                    
                    # 如果数据被过滤掉，跳过后续步骤
                    if context.data is None and context.get_metadata("filtered", False):
                        break
                    
                except Exception as e:
                    # 执行错误处理器
                    for handler in self._error_handlers:
                        try:
                            context = handler(context, e)
                        except Exception:
                            pass
                    
                    # 如果重试次数未超限，重试当前步骤
                    if context.retry_count < context.max_retries:
                        context.retry_count += 1
                        try:
                            context = await step.execute(context)
                        except Exception:
                            break
                    else:
                        break
        
        return context
    
    async def process_batch(self, data_batch: List[Any], metadata: Optional[Dict[str, Any]] = None) -> List[ProcessingContext]:
        """
        批量处理数据
        
        Args:
            data_batch: 数据批次
            metadata: 元数据
            
        Returns:
            处理结果列表
        """
        self._status = PipelineStatus.RUNNING
        self._metrics.start_time = datetime.now()
        
        try:
            # 并行处理批次中的每个数据项
            tasks = [
                self.process_single(data, metadata)
                for data in data_batch
            ]
            
            results = await asyncio.gather(*tasks, return_exceptions=True)
            
            # 处理结果
            processed_results = []
            for result in results:
                if isinstance(result, Exception):
                    self._metrics.failed_processed += 1
                    # 创建错误上下文
                    error_context = ProcessingContext(
                        pipeline_id=self.pipeline_id,
                        step_id="error",
                        data=None,
                        metadata={"error": str(result)}
                    )
                    processed_results.append(error_context)
                else:
                    self._metrics.successful_processed += 1
                    processed_results.append(result)
                
                self._metrics.total_processed += 1
            
            self._status = PipelineStatus.COMPLETED
            
        except Exception as e:
            self._status = PipelineStatus.ERROR
            raise e
        
        finally:
            self._metrics.end_time = datetime.now()
            if self._metrics.start_time:
                self._metrics.processing_time = self._metrics.end_time - self._metrics.start_time
                self._metrics.update_throughput()
                self._metrics.update_error_rate()
        
        return processed_results
    
    async def process_stream(self, data_stream: AsyncGenerator[Any, None]) -> AsyncGenerator[ProcessingContext, None]:
        """
        流式处理数据
        
        Args:
            data_stream: 数据流
            
        Yields:
            处理结果上下文
        """
        self._status = PipelineStatus.RUNNING
        self._metrics.start_time = datetime.now()
        
        try:
            async for data in data_stream:
                try:
                    result = await self.process_single(data)
                    self._metrics.successful_processed += 1
                    yield result
                except Exception as e:
                    self._metrics.failed_processed += 1
                    # 创建错误上下文
                    error_context = ProcessingContext(
                        pipeline_id=self.pipeline_id,
                        step_id="error",
                        data=None,
                        metadata={"error": str(e)}
                    )
                    yield error_context
                
                self._metrics.total_processed += 1
            
            self._status = PipelineStatus.COMPLETED
            
        except Exception as e:
            self._status = PipelineStatus.ERROR
            raise e
        
        finally:
            self._metrics.end_time = datetime.now()
            if self._metrics.start_time:
                self._metrics.processing_time = self._metrics.end_time - self._metrics.start_time
                self._metrics.update_throughput()
                self._metrics.update_error_rate()
    
    def pause(self) -> None:
        """暂停管道"""
        if self._status == PipelineStatus.RUNNING:
            self._status = PipelineStatus.PAUSED
    
    def resume(self) -> None:
        """恢复管道"""
        if self._status == PipelineStatus.PAUSED:
            self._status = PipelineStatus.RUNNING
    
    def stop(self) -> None:
        """停止管道"""
        self._status = PipelineStatus.STOPPED
    
    def reset(self) -> None:
        """重置管道"""
        self._status = PipelineStatus.IDLE
        self._metrics = PipelineMetrics()
        
        # 重置所有步骤的指标
        for step in self._steps:
            step.processor.reset_metrics()
    
    @property
    def status(self) -> PipelineStatus:
        """获取管道状态"""
        return self._status
    
    def get_metrics(self) -> PipelineMetrics:
        """获取管道指标"""
        return self._metrics
    
    def get_step_metrics(self) -> Dict[str, PipelineMetrics]:
        """获取各步骤指标"""
        return {
            step.step_id: step.processor.get_metrics()
            for step in self._steps
        }
    
    def get_pipeline_info(self) -> Dict[str, Any]:
        """获取管道信息"""
        return {
            "pipeline_id": self.pipeline_id,
            "name": self.name,
            "status": self._status.value,
            "steps": [
                {
                    "step_id": step.step_id,
                    "processor_name": step.processor.name,
                    "processor_type": step.processor.processor_type.value,
                    "enabled": step.is_enabled,
                    "parallel": step.parallel
                }
                for step in self._steps
            ],
            "metrics": {
                "total_processed": self._metrics.total_processed,
                "successful_processed": self._metrics.successful_processed,
                "failed_processed": self._metrics.failed_processed,
                "success_rate": self._metrics.success_rate,
                "error_rate": self._metrics.error_rate,
                "throughput": self._metrics.throughput,
                "processing_time": str(self._metrics.processing_time)
            }
        }


class PipelineBuilder:
    """管道构建器"""
    
    def __init__(self, pipeline_id: str, name: str):
        """
        初始化管道构建器
        
        Args:
            pipeline_id: 管道ID
            name: 管道名称
        """
        self._pipeline = DataPipeline(pipeline_id, name)
    
    def add_filter(self, step_id: str, filter_func: Callable[[Any], bool], parallel: bool = False) -> 'PipelineBuilder':
        """
        添加过滤步骤
        
        Args:
            step_id: 步骤ID
            filter_func: 过滤函数
            parallel: 是否并行
            
        Returns:
            构建器实例
        """
        processor = FilterProcessor(f"filter_{step_id}", filter_func)
        step = DataPipelineStep(step_id, processor, parallel)
        self._pipeline.add_step(step)
        return self
    
    def add_transformer(self, step_id: str, transform_func: Callable[[Any], Any], parallel: bool = False) -> 'PipelineBuilder':
        """
        添加转换步骤
        
        Args:
            step_id: 步骤ID
            transform_func: 转换函数
            parallel: 是否并行
            
        Returns:
            构建器实例
        """
        processor = TransformerProcessor(f"transformer_{step_id}", transform_func)
        step = DataPipelineStep(step_id, processor, parallel)
        self._pipeline.add_step(step)
        return self
    
    def add_validator(self, step_id: str, validation_func: Callable[[Any], bool], parallel: bool = False) -> 'PipelineBuilder':
        """
        添加验证步骤
        
        Args:
            step_id: 步骤ID
            validation_func: 验证函数
            parallel: 是否并行
            
        Returns:
            构建器实例
        """
        processor = ValidatorProcessor(f"validator_{step_id}", validation_func)
        step = DataPipelineStep(step_id, processor, parallel)
        self._pipeline.add_step(step)
        return self
    
    def add_aggregator(self, step_id: str, batch_size: int = 100, timeout: timedelta = timedelta(seconds=30), parallel: bool = False) -> 'PipelineBuilder':
        """
        添加聚合步骤
        
        Args:
            step_id: 步骤ID
            batch_size: 批次大小
            timeout: 超时时间
            parallel: 是否并行
            
        Returns:
            构建器实例
        """
        processor = AggregatorProcessor(f"aggregator_{step_id}", batch_size, timeout)
        step = DataPipelineStep(step_id, processor, parallel)
        self._pipeline.add_step(step)
        return self
    
    def add_custom_processor(self, step_id: str, processor: DataProcessor, parallel: bool = False) -> 'PipelineBuilder':
        """
        添加自定义处理器
        
        Args:
            step_id: 步骤ID
            processor: 自定义处理器
            parallel: 是否并行
            
        Returns:
            构建器实例
        """
        step = DataPipelineStep(step_id, processor, parallel)
        self._pipeline.add_step(step)
        return self
    
    def build(self) -> DataPipeline:
        """
        构建管道
        
        Returns:
            数据管道实例
        """
        return self._pipeline