#!/usr/bin/env python3
"""
流式结果处理器
实现内存高效的流式处理和结果收集
"""

import time
import threading
from typing import Iterator, List, Dict, Any, Callable, Optional
from dataclasses import dataclass
from queue import Queue, Empty
from collections import deque
import json


@dataclass
class StreamResult:
    """流式结果数据类"""
    repo_path: str
    status: str
    output: str = ""
    error: str = ""
    timestamp: float = None
    metadata: Dict[str, Any] = None
    
    def __post_init__(self):
        if self.timestamp is None:
            self.timestamp = time.time()
        if self.metadata is None:
            self.metadata = {}


class StreamProcessor:
    """流式处理器"""
    
    def __init__(self, 
                 buffer_size: int = 1000,
                 batch_size: int = 100,
                 flush_interval: float = 1.0):
        """
        初始化流式处理器
        
        Args:
            buffer_size: 缓冲区大小
            batch_size: 批处理大小
            flush_interval: 刷新间隔（秒）
        """
        self.buffer_size = buffer_size
        self.batch_size = batch_size
        self.flush_interval = flush_interval
        
        # 结果缓冲区
        self.result_buffer = deque(maxlen=buffer_size)
        self.processed_count = 0
        self.error_count = 0
        
        # 流控制
        self.result_queue = Queue()
        self.running = False
        self.processor_thread = None
        self.flush_thread = None
        
        # 回调函数
        self.result_callback: Optional[Callable] = None
        self.batch_callback: Optional[Callable] = None
        self.error_callback: Optional[Callable] = None
        
        # 锁
        self.lock = threading.Lock()
        
        # 统计信息
        self.stats = {
            'total_processed': 0,
            'success_count': 0,
            'error_count': 0,
            'processing_time': 0.0,
            'avg_processing_time': 0.0
        }
    
    def start(self):
        """启动流式处理器"""
        self.running = True
        
        # 启动处理线程
        self.processor_thread = threading.Thread(target=self._process_results)
        self.processor_thread.daemon = True
        self.processor_thread.start()
        
        # 启动刷新线程
        self.flush_thread = threading.Thread(target=self._flush_periodically)
        self.flush_thread.daemon = True
        self.flush_thread.start()
    
    def stop(self):
        """停止流式处理器"""
        self.running = False
        
        # 等待线程结束
        if self.processor_thread:
            self.processor_thread.join(timeout=1.0)
        if self.flush_thread:
            self.flush_thread.join(timeout=1.0)
        
        # 刷新剩余结果
        self.flush()
    
    def add_result(self, result: StreamResult):
        """
        添加结果到流
        
        Args:
            result: 流式结果
        """
        self.result_queue.put(result)
    
    def add_results(self, results: List[StreamResult]):
        """
        批量添加结果
        
        Args:
            results: 结果列表
        """
        for result in results:
            self.add_result(result)
    
    def set_result_callback(self, callback: Callable[[StreamResult], None]):
        """
        设置单个结果回调
        
        Args:
            callback: 回调函数
        """
        self.result_callback = callback
    
    def set_batch_callback(self, callback: Callable[[List[StreamResult]], None]):
        """
        设置批处理回调
        
        Args:
            callback: 回调函数
        """
        self.batch_callback = callback
    
    def set_error_callback(self, callback: Callable[[StreamResult], None]):
        """
        设置错误回调
        
        Args:
            callback: 回调函数
        """
        self.error_callback = callback
    
    def stream_results(self) -> Iterator[StreamResult]:
        """
        流式迭代结果
        
        Yields:
            StreamResult: 流式结果
        """
        while self.running or not self.result_queue.empty():
            try:
                result = self.result_queue.get(timeout=0.1)
                yield result
            except Empty:
                continue
    
    def get_batch_results(self, batch_size: int = None) -> List[StreamResult]:
        """
        获取批处理结果
        
        Args:
            batch_size: 批处理大小
            
        Returns:
            List[StreamResult]: 结果列表
        """
        if batch_size is None:
            batch_size = self.batch_size
        
        batch = []
        while len(batch) < batch_size:
            try:
                result = self.result_queue.get_nowait()
                batch.append(result)
            except Empty:
                break
        
        return batch
    
    def flush(self):
        """刷新所有缓冲结果"""
        batch = self.get_batch_results(self.buffer_size)
        if batch:
            self._process_batch(batch)
    
    def _process_results(self):
        """处理结果的后台线程"""
        while self.running:
            try:
                # 获取批处理结果
                batch = self.get_batch_results()
                
                if batch:
                    self._process_batch(batch)
                else:
                    time.sleep(0.01)  # 避免忙等待
                    
            except Exception as e:
                print(f"结果处理错误: {e}")
                time.sleep(0.1)
    
    def _process_batch(self, batch: List[StreamResult]):
        """
        处理批处理结果
        
        Args:
            batch: 结果批次
        """
        start_time = time.time()
        
        with self.lock:
            # 更新统计
            self.stats['total_processed'] += len(batch)
            
            # 分类处理
            success_results = []
            error_results = []
            
            for result in batch:
                if result.status == 'success':
                    success_results.append(result)
                    self.stats['success_count'] += 1
                else:
                    error_results.append(result)
                    self.stats['error_count'] += 1
            
            # 调用回调函数
            if self.result_callback:
                for result in batch:
                    try:
                        self.result_callback(result)
                    except Exception as e:
                        print(f"结果回调错误: {e}")
            
            if self.batch_callback:
                try:
                    self.batch_callback(batch)
                except Exception as e:
                    print(f"批处理回调错误: {e}")
            
            if self.error_callback and error_results:
                for result in error_results:
                    try:
                        self.error_callback(result)
                    except Exception as e:
                        print(f"错误回调错误: {e}")
            
            # 添加到缓冲区
            for result in batch:
                self.result_buffer.append(result)
            
            # 更新处理时间
            processing_time = time.time() - start_time
            self.stats['processing_time'] += processing_time
            self.stats['avg_processing_time'] = (
                self.stats['processing_time'] / self.stats['total_processed']
                if self.stats['total_processed'] > 0 else 0
            )
    
    def _flush_periodically(self):
        """定期刷新的后台线程"""
        while self.running:
            time.sleep(self.flush_interval)
            self.flush()
    
    def get_stats(self) -> Dict[str, Any]:
        """
        获取统计信息
        
        Returns:
            Dict[str, Any]: 统计信息
        """
        with self.lock:
            return self.stats.copy()
    
    def get_summary(self) -> Dict[str, Any]:
        """
        获取处理摘要
        
        Returns:
            Dict[str, Any]: 处理摘要
        """
        with self.lock:
            return {
                'total_processed': self.stats['total_processed'],
                'success_rate': (
                    self.stats['success_count'] / self.stats['total_processed'] * 100
                    if self.stats['total_processed'] > 0 else 0
                ),
                'error_rate': (
                    self.stats['error_count'] / self.stats['total_processed'] * 100
                    if self.stats['total_processed'] > 0 else 0
                ),
                'avg_processing_time': self.stats['avg_processing_time'],
                'buffer_size': len(self.result_buffer),
                'queue_size': self.result_queue.qsize()
            }
    
    def export_results(self, format: str = 'json', output_file: str = None) -> str:
        """
        导出结果
        
        Args:
            format: 输出格式（json, csv）
            output_file: 输出文件路径
            
        Returns:
            str: 导出的结果
        """
        with self.lock:
            results = list(self.result_buffer)
        
        if format == 'json':
            # 转换为可序列化的格式
            serializable_results = []
            for result in results:
                serializable_results.append({
                    'repo_path': result.repo_path,
                    'status': result.status,
                    'output': result.output,
                    'error': result.error,
                    'timestamp': result.timestamp,
                    'metadata': result.metadata
                })
            
            output = json.dumps(serializable_results, indent=2, ensure_ascii=False)
            
        elif format == 'csv':
            import csv
            import io
            
            output = io.StringIO()
            writer = csv.writer(output)
            writer.writerow(['Repository', 'Status', 'Output', 'Error', 'Timestamp'])
            
            for result in results:
                writer.writerow([
                    result.repo_path,
                    result.status,
                    result.output.replace('\n', '\\n'),
                    result.error.replace('\n', '\\n'),
                    result.timestamp
                ])
            
            output = output.getvalue()
        else:
            raise ValueError(f"不支持的格式: {format}")
        
        if output_file:
            with open(output_file, 'w', encoding='utf-8') as f:
                f.write(output)
        
        return output
    
    def clear_buffer(self):
        """清空缓冲区"""
        with self.lock:
            self.result_buffer.clear()


class ProgressTracker:
    """进度跟踪器"""
    
    def __init__(self, total_items: int):
        """
        初始化进度跟踪器
        
        Args:
            total_items: 总项目数
        """
        self.total_items = total_items
        self.processed_items = 0
        self.start_time = time.time()
        self.last_update_time = time.time()
        self.lock = threading.Lock()
    
    def update(self, count: int = 1):
        """
        更新进度
        
        Args:
            count: 处理的项目数
        """
        with self.lock:
            self.processed_items += count
            self.last_update_time = time.time()
    
    def get_progress(self) -> Dict[str, Any]:
        """
        获取进度信息
        
        Returns:
            Dict[str, Any]: 进度信息
        """
        with self.lock:
            elapsed = time.time() - self.start_time
            progress = self.processed_items / self.total_items if self.total_items > 0 else 0
            
            # 估算剩余时间
            if progress > 0:
                remaining_time = (elapsed / progress) * (1 - progress)
            else:
                remaining_time = None
            
            return {
                'processed': self.processed_items,
                'total': self.total_items,
                'percentage': progress * 100,
                'elapsed_time': elapsed,
                'estimated_remaining': remaining_time,
                'items_per_second': self.processed_items / elapsed if elapsed > 0 else 0
            }
    
    def print_progress(self):
        """打印进度"""
        progress = self.get_progress()
        
        print(f"\r进度: {progress['processed']}/{progress['total']} "
              f"({progress['percentage']:.1f}%) | "
              f"速度: {progress['items_per_second']:.1f} 项目/秒", end="")
        
        if progress['estimated_remaining'] is not None:
            print(f" | 剩余时间: {progress['estimated_remaining']:.0f}秒", end="")
        
        print(flush=True)


def test_stream_processor():
    """测试流式处理器"""
    print("测试流式处理器...")
    
    # 创建流式处理器
    processor = StreamProcessor(buffer_size=100, batch_size=10)
    
    # 设置回调
    def on_result(result: StreamResult):
        print(f"处理结果: {result.repo_path} - {result.status}")
    
    def on_batch(batch: List[StreamResult]):
        print(f"批处理: 收到 {len(batch)} 个结果")
    
    def on_error(result: StreamResult):
        print(f"错误: {result.repo_path} - {result.error}")
    
    processor.set_result_callback(on_result)
    processor.set_batch_callback(on_batch)
    processor.set_error_callback(on_error)
    
    # 启动处理器
    processor.start()
    
    try:
        # 模拟添加结果
        for i in range(25):
            result = StreamResult(
                repo_path=f"/path/to/repo{i}",
                status="success" if i % 5 != 0 else "failed",
                output=f"更新成功 {i}" if i % 5 != 0 else "",
                error=f"错误信息 {i}" if i % 5 == 0 else ""
            )
            processor.add_result(result)
            time.sleep(0.1)
        
        # 等待处理完成
        time.sleep(1)
        
        # 打印统计信息
        stats = processor.get_stats()
        print("\n统计信息:")
        for key, value in stats.items():
            print(f"  {key}: {value}")
        
        # 打印摘要
        summary = processor.get_summary()
        print("\n处理摘要:")
        for key, value in summary.items():
            print(f"  {key}: {value}")
        
        # 导出结果
        json_output = processor.export_results('json')
        print(f"\nJSON 导出（前200字符）: {json_output[:200]}...")
        
    finally:
        processor.stop()


if __name__ == "__main__":
    test_stream_processor()