"""
数据处理流程

定义数据采集、清洗、转换和存储的工作流程。
"""

from typing import Dict, List, Any, Optional
from datetime import datetime, timedelta

from .workflow_types import (
    WorkflowDefinition,
    WorkflowNode,
    WorkflowEdge,
    WorkflowType,
    NodeType,
    EdgeType
)


class DataProcessingFlow:
    """
    数据处理流程构建器
    
    负责构建数据处理相关的工作流定义。
    """
    
    @staticmethod
    def create_market_data_flow() -> WorkflowDefinition:
        """
        创建市场数据处理流程
        
        Returns:
            市场数据处理工作流定义
        """
        workflow = WorkflowDefinition(
            workflow_id="market_data_processing",
            name="市场数据处理流程",
            workflow_type=WorkflowType.DATA_PROCESSING,
            description="处理实时和历史市场数据的完整流程",
            timeout=timedelta(hours=1)
        )
        
        # 开始节点
        start_node = WorkflowNode(
            node_id="start",
            name="开始",
            node_type=NodeType.START,
            description="数据处理流程开始"
        )
        workflow.add_node(start_node)
        workflow.start_node_id = "start"
        
        # 数据源检查节点
        source_check_node = WorkflowNode(
            node_id="source_check",
            name="数据源检查",
            node_type=NodeType.TASK,
            description="检查各数据源的可用性和连接状态",
            executor="data_source_checker",
            timeout=timedelta(minutes=5),
            retry_count=3,
            parameters={
                "sources": ["wind", "tushare", "akshare", "yahoo_finance"],
                "check_connectivity": True,
                "check_data_freshness": True
            }
        )
        workflow.add_node(source_check_node)
        
        # 数据采集决策节点
        collection_decision = WorkflowNode(
            node_id="collection_decision",
            name="数据采集决策",
            node_type=NodeType.DECISION,
            description="根据数据源状态决定采集策略",
            conditions=[
                {
                    "name": "all_sources_available",
                    "expression": "all(source['status'] == 'available' for source in source_check_result['sources'])"
                },
                {
                    "name": "partial_sources_available", 
                    "expression": "any(source['status'] == 'available' for source in source_check_result['sources'])"
                },
                {
                    "name": "no_sources_available",
                    "expression": "not any(source['status'] == 'available' for source in source_check_result['sources'])"
                }
            ]
        )
        workflow.add_node(collection_decision)
        
        # 并行数据采集节点
        parallel_collection = WorkflowNode(
            node_id="parallel_collection",
            name="并行数据采集",
            node_type=NodeType.PARALLEL,
            description="并行从多个数据源采集数据",
            parallel_branches=["collect_realtime", "collect_historical", "collect_fundamental"]
        )
        workflow.add_node(parallel_collection)
        
        # 实时数据采集
        realtime_collection = WorkflowNode(
            node_id="collect_realtime",
            name="实时数据采集",
            node_type=NodeType.TASK,
            description="采集实时市场数据",
            executor="realtime_data_collector",
            timeout=timedelta(minutes=10),
            parameters={
                "data_types": ["tick", "kline_1m", "depth"],
                "symbols": "from_context",
                "start_time": "now",
                "buffer_size": 10000
            }
        )
        workflow.add_node(realtime_collection)
        
        # 历史数据采集
        historical_collection = WorkflowNode(
            node_id="collect_historical",
            name="历史数据采集",
            node_type=NodeType.TASK,
            description="采集历史市场数据",
            executor="historical_data_collector",
            timeout=timedelta(minutes=30),
            parameters={
                "data_types": ["kline_1d", "kline_1h"],
                "symbols": "from_context",
                "lookback_days": 252,
                "adjust": "qfq"
            }
        )
        workflow.add_node(historical_collection)
        
        # 基本面数据采集
        fundamental_collection = WorkflowNode(
            node_id="collect_fundamental",
            name="基本面数据采集",
            node_type=NodeType.TASK,
            description="采集基本面数据",
            executor="fundamental_data_collector",
            timeout=timedelta(minutes=20),
            parameters={
                "data_types": ["financial_report", "company_info", "industry_data"],
                "symbols": "from_context",
                "report_periods": ["latest", "last_4_quarters"]
            }
        )
        workflow.add_node(fundamental_collection)
        
        # 数据合并节点
        data_merge = WorkflowNode(
            node_id="data_merge",
            name="数据合并",
            node_type=NodeType.MERGE,
            description="合并来自不同源的数据",
            executor="data_merger",
            parameters={
                "merge_strategy": "outer_join",
                "time_alignment": True,
                "conflict_resolution": "latest_wins"
            }
        )
        workflow.add_node(data_merge)
        
        # 数据质量检查
        quality_check = WorkflowNode(
            node_id="quality_check",
            name="数据质量检查",
            node_type=NodeType.TASK,
            description="检查数据质量和完整性",
            executor="data_quality_checker",
            parameters={
                "checks": [
                    "missing_values",
                    "outliers",
                    "duplicates",
                    "data_consistency",
                    "time_series_continuity"
                ],
                "thresholds": {
                    "missing_rate": 0.05,
                    "outlier_zscore": 3.0
                }
            }
        )
        workflow.add_node(quality_check)
        
        # 数据清洗决策
        cleaning_decision = WorkflowNode(
            node_id="cleaning_decision",
            name="数据清洗决策",
            node_type=NodeType.DECISION,
            description="根据质量检查结果决定清洗策略",
            conditions=[
                {
                    "name": "needs_cleaning",
                    "expression": "quality_check_result['overall_score'] < 0.9"
                },
                {
                    "name": "quality_acceptable",
                    "expression": "quality_check_result['overall_score'] >= 0.9"
                }
            ]
        )
        workflow.add_node(cleaning_decision)
        
        # 数据清洗节点
        data_cleaning = WorkflowNode(
            node_id="data_cleaning",
            name="数据清洗",
            node_type=NodeType.TASK,
            description="清洗和修复数据问题",
            executor="data_cleaner",
            parameters={
                "cleaning_rules": [
                    "fill_missing_values",
                    "remove_outliers",
                    "deduplicate",
                    "normalize_formats"
                ],
                "fill_methods": {
                    "price": "forward_fill",
                    "volume": "zero_fill",
                    "fundamental": "interpolate"
                }
            }
        )
        workflow.add_node(data_cleaning)
        
        # 数据转换节点
        data_transformation = WorkflowNode(
            node_id="data_transformation",
            name="数据转换",
            node_type=NodeType.TASK,
            description="转换数据格式和计算衍生指标",
            executor="data_transformer",
            parameters={
                "transformations": [
                    "calculate_returns",
                    "calculate_technical_indicators",
                    "normalize_data",
                    "create_features"
                ],
                "technical_indicators": [
                    "sma_20", "sma_50", "ema_12", "ema_26",
                    "rsi_14", "macd", "bollinger_bands",
                    "atr_14", "volume_sma_20"
                ]
            }
        )
        workflow.add_node(data_transformation)
        
        # 数据验证节点
        data_validation = WorkflowNode(
            node_id="data_validation",
            name="数据验证",
            node_type=NodeType.TASK,
            description="验证处理后的数据",
            executor="data_validator",
            parameters={
                "validation_rules": [
                    "check_data_ranges",
                    "validate_calculations",
                    "check_business_rules",
                    "verify_data_integrity"
                ],
                "business_rules": {
                    "price_change_limit": 0.2,
                    "volume_spike_threshold": 10.0
                }
            }
        )
        workflow.add_node(data_validation)
        
        # 数据存储节点
        data_storage = WorkflowNode(
            node_id="data_storage",
            name="数据存储",
            node_type=NodeType.TASK,
            description="存储处理后的数据",
            executor="data_storage_manager",
            parameters={
                "storage_targets": [
                    {
                        "type": "timescaledb",
                        "table": "market_data",
                        "partition_by": "time",
                        "compression": True
                    },
                    {
                        "type": "redis",
                        "key_pattern": "realtime:{symbol}:{data_type}",
                        "ttl": 3600
                    },
                    {
                        "type": "file",
                        "format": "parquet",
                        "path": "/data/processed/{date}/{symbol}.parquet"
                    }
                ]
            }
        )
        workflow.add_node(data_storage)
        
        # 错误处理节点
        error_handling = WorkflowNode(
            node_id="error_handling",
            name="错误处理",
            node_type=NodeType.TASK,
            description="处理数据处理过程中的错误",
            executor="error_handler",
            parameters={
                "error_strategies": {
                    "data_source_error": "fallback_to_backup",
                    "quality_error": "partial_processing",
                    "storage_error": "retry_with_backoff"
                },
                "notification_channels": ["email", "slack", "webhook"]
            }
        )
        workflow.add_node(error_handling)
        
        # 结束节点
        end_node = WorkflowNode(
            node_id="end",
            name="结束",
            node_type=NodeType.END,
            description="数据处理流程结束"
        )
        workflow.add_node(end_node)
        workflow.end_node_ids = ["end"]
        
        # 添加边
        edges = [
            # 主流程
            WorkflowEdge("edge_1", "start", "source_check", EdgeType.SEQUENCE),
            WorkflowEdge("edge_2", "source_check", "collection_decision", EdgeType.SEQUENCE),
            
            # 数据采集分支
            WorkflowEdge("edge_3", "collection_decision", "parallel_collection", EdgeType.CONDITION,
                        condition="collection_decision_result['all_sources_available'] or collection_decision_result['partial_sources_available']"),
            WorkflowEdge("edge_4", "collection_decision", "error_handling", EdgeType.CONDITION,
                        condition="collection_decision_result['no_sources_available']"),
            
            # 并行采集到合并
            WorkflowEdge("edge_5", "parallel_collection", "data_merge", EdgeType.SEQUENCE),
            
            # 数据处理流程
            WorkflowEdge("edge_6", "data_merge", "quality_check", EdgeType.SEQUENCE),
            WorkflowEdge("edge_7", "quality_check", "cleaning_decision", EdgeType.SEQUENCE),
            
            # 清洗分支
            WorkflowEdge("edge_8", "cleaning_decision", "data_cleaning", EdgeType.CONDITION,
                        condition="cleaning_decision_result['needs_cleaning']"),
            WorkflowEdge("edge_9", "cleaning_decision", "data_transformation", EdgeType.CONDITION,
                        condition="cleaning_decision_result['quality_acceptable']"),
            WorkflowEdge("edge_10", "data_cleaning", "data_transformation", EdgeType.SEQUENCE),
            
            # 转换和验证
            WorkflowEdge("edge_11", "data_transformation", "data_validation", EdgeType.SEQUENCE),
            WorkflowEdge("edge_12", "data_validation", "data_storage", EdgeType.SEQUENCE),
            WorkflowEdge("edge_13", "data_storage", "end", EdgeType.SEQUENCE),
            
            # 错误处理
            WorkflowEdge("edge_14", "error_handling", "end", EdgeType.SEQUENCE),
        ]
        
        for edge in edges:
            workflow.add_edge(edge)
        
        # 设置输入输出模式
        workflow.input_schema = {
            "type": "object",
            "properties": {
                "symbols": {
                    "type": "array",
                    "items": {"type": "string"},
                    "description": "要处理的股票代码列表"
                },
                "data_types": {
                    "type": "array", 
                    "items": {"type": "string"},
                    "description": "要采集的数据类型"
                },
                "date_range": {
                    "type": "object",
                    "properties": {
                        "start_date": {"type": "string", "format": "date"},
                        "end_date": {"type": "string", "format": "date"}
                    }
                }
            },
            "required": ["symbols"]
        }
        
        workflow.output_schema = {
            "type": "object",
            "properties": {
                "processed_records": {"type": "integer"},
                "data_quality_score": {"type": "number"},
                "storage_locations": {
                    "type": "array",
                    "items": {"type": "string"}
                },
                "processing_summary": {"type": "object"}
            }
        }
        
        return workflow
    
    @staticmethod
    def create_alternative_data_flow() -> WorkflowDefinition:
        """
        创建另类数据处理流程
        
        Returns:
            另类数据处理工作流定义
        """
        workflow = WorkflowDefinition(
            workflow_id="alternative_data_processing",
            name="另类数据处理流程",
            workflow_type=WorkflowType.DATA_PROCESSING,
            description="处理新闻、社交媒体、卫星图像等另类数据",
            timeout=timedelta(hours=2)
        )
        
        # 开始节点
        start_node = WorkflowNode(
            node_id="start",
            name="开始",
            node_type=NodeType.START
        )
        workflow.add_node(start_node)
        workflow.start_node_id = "start"
        
        # 数据源配置
        source_config = WorkflowNode(
            node_id="source_config",
            name="数据源配置",
            node_type=NodeType.TASK,
            description="配置另类数据源",
            executor="alt_data_configurator",
            parameters={
                "sources": {
                    "news": ["reuters", "bloomberg", "sina_finance"],
                    "social": ["weibo", "twitter", "reddit"],
                    "satellite": ["planet", "maxar"],
                    "economic": ["fred", "oecd", "imf"]
                }
            }
        )
        workflow.add_node(source_config)
        
        # 并行数据采集
        parallel_collection = WorkflowNode(
            node_id="parallel_collection",
            name="并行数据采集",
            node_type=NodeType.PARALLEL,
            description="并行采集不同类型的另类数据",
            parallel_branches=["collect_news", "collect_social", "collect_satellite", "collect_economic"]
        )
        workflow.add_node(parallel_collection)
        
        # 新闻数据采集
        news_collection = WorkflowNode(
            node_id="collect_news",
            name="新闻数据采集",
            node_type=NodeType.TASK,
            description="采集新闻数据",
            executor="news_collector",
            parameters={
                "keywords": "from_context",
                "languages": ["zh", "en"],
                "sentiment_analysis": True
            }
        )
        workflow.add_node(news_collection)
        
        # 社交媒体数据采集
        social_collection = WorkflowNode(
            node_id="collect_social",
            name="社交媒体数据采集",
            node_type=NodeType.TASK,
            description="采集社交媒体数据",
            executor="social_collector",
            parameters={
                "platforms": ["weibo", "twitter"],
                "hashtags": "from_context",
                "user_filters": "verified_only"
            }
        )
        workflow.add_node(social_collection)
        
        # 卫星数据采集
        satellite_collection = WorkflowNode(
            node_id="collect_satellite",
            name="卫星数据采集",
            node_type=NodeType.TASK,
            description="采集卫星图像数据",
            executor="satellite_collector",
            parameters={
                "regions": "from_context",
                "resolution": "high",
                "cloud_coverage": "max_20_percent"
            }
        )
        workflow.add_node(satellite_collection)
        
        # 经济数据采集
        economic_collection = WorkflowNode(
            node_id="collect_economic",
            name="经济数据采集",
            node_type=NodeType.TASK,
            description="采集宏观经济数据",
            executor="economic_collector",
            parameters={
                "indicators": ["gdp", "inflation", "unemployment", "interest_rates"],
                "countries": ["CN", "US", "EU", "JP"],
                "frequency": "monthly"
            }
        )
        workflow.add_node(economic_collection)
        
        # 数据预处理
        data_preprocessing = WorkflowNode(
            node_id="data_preprocessing",
            name="数据预处理",
            node_type=NodeType.TASK,
            description="预处理另类数据",
            executor="alt_data_preprocessor",
            parameters={
                "text_processing": {
                    "tokenization": True,
                    "stop_words_removal": True,
                    "stemming": True,
                    "entity_recognition": True
                },
                "image_processing": {
                    "resize": [224, 224],
                    "normalization": True,
                    "augmentation": False
                }
            }
        )
        workflow.add_node(data_preprocessing)
        
        # 特征提取
        feature_extraction = WorkflowNode(
            node_id="feature_extraction",
            name="特征提取",
            node_type=NodeType.TASK,
            description="从另类数据中提取特征",
            executor="feature_extractor",
            parameters={
                "text_features": ["tfidf", "word2vec", "bert_embeddings"],
                "image_features": ["cnn_features", "object_detection"],
                "time_series_features": ["trend", "seasonality", "volatility"]
            }
        )
        workflow.add_node(feature_extraction)
        
        # 数据融合
        data_fusion = WorkflowNode(
            node_id="data_fusion",
            name="数据融合",
            node_type=NodeType.TASK,
            description="融合不同来源的另类数据",
            executor="data_fusion_engine",
            parameters={
                "fusion_methods": ["weighted_average", "ensemble", "attention_mechanism"],
                "alignment_strategy": "time_based",
                "missing_data_handling": "interpolation"
            }
        )
        workflow.add_node(data_fusion)
        
        # 结束节点
        end_node = WorkflowNode(
            node_id="end",
            name="结束",
            node_type=NodeType.END
        )
        workflow.add_node(end_node)
        workflow.end_node_ids = ["end"]
        
        # 添加边
        edges = [
            WorkflowEdge("edge_1", "start", "source_config", EdgeType.SEQUENCE),
            WorkflowEdge("edge_2", "source_config", "parallel_collection", EdgeType.SEQUENCE),
            WorkflowEdge("edge_3", "parallel_collection", "data_preprocessing", EdgeType.SEQUENCE),
            WorkflowEdge("edge_4", "data_preprocessing", "feature_extraction", EdgeType.SEQUENCE),
            WorkflowEdge("edge_5", "feature_extraction", "data_fusion", EdgeType.SEQUENCE),
            WorkflowEdge("edge_6", "data_fusion", "end", EdgeType.SEQUENCE),
        ]
        
        for edge in edges:
            workflow.add_edge(edge)
        
        return workflow
    
    @staticmethod
    def create_realtime_data_flow() -> WorkflowDefinition:
        """
        创建实时数据处理流程
        
        Returns:
            实时数据处理工作流定义
        """
        workflow = WorkflowDefinition(
            workflow_id="realtime_data_processing",
            name="实时数据处理流程",
            workflow_type=WorkflowType.DATA_PROCESSING,
            description="处理实时市场数据流",
            timeout=timedelta(minutes=30)
        )
        
        # 开始节点
        start_node = WorkflowNode(
            node_id="start",
            name="开始",
            node_type=NodeType.START
        )
        workflow.add_node(start_node)
        workflow.start_node_id = "start"
        
        # 数据流监听
        stream_listener = WorkflowNode(
            node_id="stream_listener",
            name="数据流监听",
            node_type=NodeType.TASK,
            description="监听实时数据流",
            executor="stream_listener",
            parameters={
                "stream_sources": ["websocket", "kafka", "redis_stream"],
                "buffer_size": 1000,
                "batch_timeout": 1000  # ms
            }
        )
        workflow.add_node(stream_listener)
        
        # 实时处理循环
        processing_loop = WorkflowNode(
            node_id="processing_loop",
            name="实时处理循环",
            node_type=NodeType.LOOP,
            description="循环处理实时数据",
            loop_condition="stream_active",
            max_iterations=1000000,  # 基本无限循环
            parallel_branches=["process_batch"]
        )
        workflow.add_node(processing_loop)
        
        # 批量处理
        batch_processing = WorkflowNode(
            node_id="process_batch",
            name="批量处理",
            node_type=NodeType.TASK,
            description="处理数据批次",
            executor="batch_processor",
            parameters={
                "processing_steps": [
                    "validate_data",
                    "calculate_indicators",
                    "detect_anomalies",
                    "update_cache"
                ]
            }
        )
        workflow.add_node(batch_processing)
        
        # 结束节点
        end_node = WorkflowNode(
            node_id="end",
            name="结束",
            node_type=NodeType.END
        )
        workflow.add_node(end_node)
        workflow.end_node_ids = ["end"]
        
        # 添加边
        edges = [
            WorkflowEdge("edge_1", "start", "stream_listener", EdgeType.SEQUENCE),
            WorkflowEdge("edge_2", "stream_listener", "processing_loop", EdgeType.SEQUENCE),
            WorkflowEdge("edge_3", "processing_loop", "end", EdgeType.SEQUENCE),
        ]
        
        for edge in edges:
            workflow.add_edge(edge)
        
        return workflow