"""
执行器基础类和接口定义
"""

from abc import ABC, abstractmethod
from typing import Iterator, Optional, List, Dict, Any
import threading
import time
from contextlib import contextmanager

from .types import (
    Record, RecordBatch, ExecutionContext, ExecutionStats,
    ExecutionError, TimeoutException
)
from .memory_manager import get_memory_manager

class ExecutorBase(ABC):
    """执行器基础抽象类"""
    
    def __init__(self, executor_id: str, context: ExecutionContext):
        self.executor_id = executor_id # 执行器ID
        self.context = context
        self.stats = ExecutionStats() # 统计信息
        self.children: List['ExecutorBase'] = [] # 子节点
        self.parent: Optional['ExecutorBase'] = None # 父节点
        self.memory_manager = get_memory_manager()
        self.is_open = False
        self.is_finished = False
        self.lock = threading.RLock()
        
    @abstractmethod
    def open(self):
        """打开执行器，进行初始化"""
        pass
    
    @abstractmethod
    def next_batch(self) -> Optional[RecordBatch]:
        """获取下一批记录"""
        pass
    
    @abstractmethod
    def close(self):
        """关闭执行器，清理资源"""
        pass
    
    def add_child(self, child: 'ExecutorBase'):
        """添加子执行器"""
        with self.lock:
            self.children.append(child)
            child.parent = self
    
    def remove_child(self, child: 'ExecutorBase'):
        """移除子执行器"""
        with self.lock:
            if child in self.children:
                self.children.remove(child)
                child.parent = None
    
    def get_stats(self) -> ExecutionStats:
        """获取执行统计信息"""
        return self.stats
    
    def reset_stats(self):
        """重置统计信息"""
        self.stats = ExecutionStats()
    
    def check_timeout(self):
        """检查是否超时"""
        if self.context.timeout is not None:
            elapsed = time.time() - self.context.start_time
            if elapsed > self.context.timeout:
                raise TimeoutException(f"Execution timeout after {elapsed:.2f} seconds")
    
    def update_stats(self, rows_processed: int = 0, rows_returned: int = 0, 
                    memory_used: int = 0, io_ops: int = 0):
        """更新统计信息"""
        with self.lock:
            self.stats.rows_processed += rows_processed
            self.stats.rows_returned += rows_returned
            self.stats.memory_used_bytes += memory_used
            self.stats.io_operations += io_ops
    
    @contextmanager
    def execution_context(self):
        """执行上下文管理器"""
        start_time = time.time()
        try:
            if not self.is_open:
                self.open()
            yield self
        finally:
            end_time = time.time()
            self.stats.execution_time_ms += (end_time - start_time) * 1000
            if self.is_open:
                self.close()

class IteratorExecutor(ExecutorBase):
    """基于迭代器的执行器"""
    
    def __init__(self, executor_id: str, context: ExecutionContext):
        super().__init__(executor_id, context)
        self.current_iterator: Optional[Iterator[Record]] = None
        self.batch_size = 1000  # 默认批大小
    
    def __iter__(self):
        """使执行器可迭代"""
        try:
            self.open()
            while True:
                batch = self.next_batch()
                if not batch:
                    break
                for record in batch:
                    yield record
        finally:
            self.close()
    
    def next_record(self) -> Optional[Record]:
        """获取下一条记录"""
        try:
            if self.current_iterator is None:
                return None
            return next(self.current_iterator)
        except StopIteration:
            return None
    
    def next_batch(self) -> Optional[RecordBatch]:
        """获取下一批记录"""
        if self.is_finished:
            return None
        
        self.check_timeout()
        
        batch = []
        for _ in range(self.batch_size):
            record = self.next_record()
            if record is None:
                self.is_finished = True
                break
            batch.append(record)
        
        if batch:
            self.update_stats(rows_returned=len(batch))
            return batch
        
        return None

class PipelineExecutor(ExecutorBase):
    """管道执行器，支持流式处理"""
    
    def __init__(self, executor_id: str, context: ExecutionContext):
        super().__init__(executor_id, context)
        self.input_queue = []
        self.output_queue = []
        self.pipeline_depth = 0
        self.max_pipeline_depth = 10
    
    def push_batch(self, batch: RecordBatch):
        """向管道推送批次"""
        with self.lock:
            self.input_queue.append(batch)
    
    def pull_batch(self) -> Optional[RecordBatch]:
        """从管道拉取批次"""
        with self.lock:
            if self.output_queue:
                return self.output_queue.pop(0)
            return None
    
    def process_pipeline(self):
        """处理管道中的数据"""
        while self.input_queue and self.pipeline_depth < self.max_pipeline_depth:
            batch = self.input_queue.pop(0)
            processed_batch = self.process_batch(batch)
            if processed_batch:
                self.output_queue.append(processed_batch)
            self.pipeline_depth += 1
    
    @abstractmethod
    def process_batch(self, batch: RecordBatch) -> Optional[RecordBatch]:
        """处理单个批次"""
        pass

class ParallelExecutor(ExecutorBase):
    """并行执行器"""
    
    def __init__(self, executor_id: str, context: ExecutionContext, 
                 parallelism: int = 4):
        super().__init__(executor_id, context)
        self.parallelism = parallelism
        self.worker_threads = []
        self.task_queue = []
        self.result_queue = []
        self.stop_event = threading.Event()
    
    def start_workers(self):
        """启动工作线程"""
        for i in range(self.parallelism):
            worker = threading.Thread(
                target=self._worker_loop,
                name=f"{self.executor_id}-worker-{i}",
                daemon=True
            )
            worker.start()
            self.worker_threads.append(worker)
    
    def stop_workers(self):
        """停止工作线程"""
        self.stop_event.set()
        for worker in self.worker_threads:
            worker.join(timeout=5.0)
        self.worker_threads.clear()
    
    def _worker_loop(self):
        """工作线程主循环"""
        while not self.stop_event.is_set():
            try:
                # 从任务队列获取任务
                if self.task_queue:
                    with self.lock:
                        if self.task_queue:
                            task = self.task_queue.pop(0)
                        else:
                            continue
                    
                    # 执行任务
                    result = self.execute_task(task)
                    
                    # 将结果放入结果队列
                    with self.lock:
                        self.result_queue.append(result)
                else:
                    time.sleep(0.001)  # 避免忙等待
            except Exception as e:
                # 记录错误但不中断其他工作线程
                print(f"Worker error in {self.executor_id}: {e}")
    
    @abstractmethod
    def execute_task(self, task: Any) -> Any:
        """执行单个任务"""
        pass
    
    def submit_task(self, task: Any):
        """提交任务"""
        with self.lock:
            self.task_queue.append(task)
    
    def get_result(self) -> Optional[Any]:
        """获取结果"""
        with self.lock:
            if self.result_queue:
                return self.result_queue.pop(0)
            return None

class ExecutorFactory:
    """执行器工厂"""
    
    _executor_classes = {}
    
    @classmethod
    def register_executor(cls, executor_type: str, executor_class: type):
        """注册执行器类型"""
        cls._executor_classes[executor_type] = executor_class
    
    @classmethod
    def create_executor(cls, executor_type: str, executor_id: str, 
                       context: ExecutionContext, **kwargs) -> ExecutorBase:
        """创建执行器实例"""
        if executor_type not in cls._executor_classes:
            raise ExecutionError(f"Unknown executor type: {executor_type}")
        
        executor_class = cls._executor_classes[executor_type]
        return executor_class(executor_id, context, **kwargs)
    
    @classmethod
    def get_registered_types(cls) -> List[str]:
        """获取已注册的执行器类型"""
        return list(cls._executor_classes.keys())

class ExecutionPipeline:
    """执行管道，管理多个执行器的协调运行"""
    
    def __init__(self, pipeline_id: str, context: ExecutionContext):
        self.pipeline_id = pipeline_id
        self.context = context
        self.executors: List[ExecutorBase] = []
        self.is_running = False
        self.lock = threading.RLock()
    
    def add_executor(self, executor: ExecutorBase):
        """添加执行器到管道"""
        with self.lock:
            self.executors.append(executor)
    
    def remove_executor(self, executor: ExecutorBase):
        """从管道移除执行器"""
        with self.lock:
            if executor in self.executors:
                self.executors.remove(executor)
    
    def start(self):
        """启动管道"""
        with self.lock:
            if self.is_running:
                return
            
            self.is_running = True
            for executor in self.executors:
                executor.open()
    
    def stop(self):
        """停止管道"""
        with self.lock:
            if not self.is_running:
                return
            
            self.is_running = False
            for executor in reversed(self.executors):
                executor.close()
    
    def execute(self) -> Iterator[RecordBatch]:
        """执行管道"""
        try:
            self.start()
            
            # 简单的顺序执行模式
            if self.executors:
                root_executor = self.executors[0]
                while True:
                    batch = root_executor.next_batch()
                    if not batch:
                        break
                    yield batch
        finally:
            self.stop()
    
    def get_pipeline_stats(self) -> Dict[str, ExecutionStats]:
        """获取管道统计信息"""
        stats = {}
        for executor in self.executors:
            stats[executor.executor_id] = executor.get_stats()
        return stats

