import networkx as nx
import numpy as np
from collections import defaultdict
from typing import Dict, Any, List, Optional, Tuple
import asyncio
from app.services.dhgam.hgpms_service import HeterogeneousGraphPatternMining
from app.schemas.dhgam.hgpms_schema import (
    HGPMSInputParams, HGPMSOutputParams, PatternRepresentation, 
    PatternInstance, TaskStatus, OutputParams, AlgorithmResponse
)
from app.schemas.metrics_schema import BarChart, LineChart
from app.utils.logger import logger


class HGPMSService:
    """异构图模式挖掘服务"""

    async def run_hgpms(
        self, 
        input_params: Dict[str, Any], 
        task_id: str
    ) -> AlgorithmResponse:
        """
        运行异构图模式挖掘算法
        
        Args:
            input_params: 算法输入参数
            task_id: 任务ID
            
        Returns:
            AlgorithmResponse: 算法响应对象
        """
        try:
            logger.info(f"开始执行HGPMS任务: {task_id}")
            
            # 解析输入参数
            hgpms_params = input_params.get("hgpms_params", {})
            graph_data = hgpms_params.get("graph_data", {})
            min_support = hgpms_params.get("min_support", 0.05)
            max_pattern_size = hgpms_params.get("max_pattern_size", 4)
            
            # 创建图结构
            graph = self._create_graph_from_data(graph_data)
            
            # 创建异构图模式挖掘实例
            miner = HeterogeneousGraphPatternMining(
                min_support=min_support,
                max_pattern_size=max_pattern_size
            )
            
            # 挖掘频繁模式
            frequent_patterns = miner.mine_patterns(graph)
            
            # 获取模式实例
            pattern_instances = {}
            for pattern_key in list(frequent_patterns.keys())[:5]:  # 仅处理前5个模式以提高性能
                instances = miner.find_pattern_instances(graph, pattern_key)
                pattern_instances[str(pattern_key)] = [
                    self._convert_instance_to_schema(instance, pattern_key, idx)
                    for idx, instance in enumerate(instances[:10])  # 每个模式最多10个实例
                ]
            
            # 转换频繁模式为模式表示
            pattern_representations = self._convert_patterns_to_schema(
                frequent_patterns, pattern_instances
            )
            
            # 创建输出参数
            output_params = HGPMSOutputParams(
                frequent_patterns=pattern_representations,
                pattern_growth_history=miner.pattern_growth_history,
                pattern_instances={k: v for k, v in pattern_instances.items()},
                algorithm="HGPMS",
                parameters={
                    "min_support": min_support,
                    "max_pattern_size": max_pattern_size,
                    "total_nodes": graph.number_of_nodes(),
                    "total_edges": graph.number_of_edges(),
                }
            )
            
            # 生成可视化指标
            metrics = self._generate_metrics(output_params)
            
            # 构建响应
            response = AlgorithmResponse(
                task_id=task_id,
                task_status=TaskStatus.COMPLETED,
                task_progress=100,
                output_params=OutputParams(hgpms_results=output_params),
                metrics=metrics
            )
            
            logger.info(f"完成HGPMS任务: {task_id}")
            return response
            
        except Exception as e:
            logger.error(f"HGPMS任务执行失败: {str(e)}")
            return AlgorithmResponse(
                task_id=task_id,
                task_status=TaskStatus.FAILED,
                error_message=f"算法执行失败: {str(e)}",
                output_params=OutputParams()
            )
    
    def _create_graph_from_data(self, graph_data: Dict[str, Any]) -> nx.Graph:
        """
        从输入数据创建NetworkX图对象
        
        Args:
            graph_data: 包含节点和边信息的字典
            
        Returns:
            nx.Graph: NetworkX图对象
        """
        G = nx.Graph()
        
        # 添加节点
        nodes = graph_data.get("nodes", [])
        for node in nodes:
            node_id = node.get("id")
            node_attrs = {k: v for k, v in node.items() if k != "id"}
            G.add_node(node_id, **node_attrs)
        
        # 添加边
        edges = graph_data.get("edges", [])
        for edge in edges:
            source = edge.get("source")
            target = edge.get("target")
            edge_attrs = {k: v for k, v in edge.items() if k not in ["source", "target"]}
            G.add_edge(source, target, **edge_attrs)
        
        return G
    
    def _convert_patterns_to_schema(
        self, 
        frequent_patterns: Dict[Any, float],
        pattern_instances: Dict[str, List[PatternInstance]]
    ) -> List[PatternRepresentation]:
        """
        将频繁模式转换为schema对象
        
        Args:
            frequent_patterns: 频繁模式字典
            pattern_instances: 模式实例字典
            
        Returns:
            List[PatternRepresentation]: 模式表示列表
        """
        pattern_representations = []
        
        for pattern, support in frequent_patterns.items():
            # 提取节点类型分布
            node_types = dict(pattern[0])
            
            # 提取边模式
            edge_patterns = []
            for edge_pattern, count in pattern[1]:
                u_type, edge_type, v_type = edge_pattern
                edge_patterns.append({
                    "source_type": u_type,
                    "edge_type": edge_type,
                    "target_type": v_type,
                    "count": count
                })
            
            # 实例计数
            instances_count = len(pattern_instances.get(str(pattern), []))
            
            pattern_representations.append(PatternRepresentation(
                node_types=node_types,
                edge_patterns=edge_patterns,
                support=support,
                instances_count=instances_count
            ))
        
        # 按支持度排序
        pattern_representations.sort(key=lambda x: x.support, reverse=True)
        
        return pattern_representations
    
    def _convert_instance_to_schema(
        self, 
        instance_data: Tuple[Any, nx.Graph],
        pattern: Any,
        instance_id: int
    ) -> PatternInstance:
        """
        将模式实例转换为schema对象
        
        Args:
            instance_data: 包含中心节点和子图的元组
            pattern: 对应的模式
            instance_id: 实例ID
            
        Returns:
            PatternInstance: 模式实例
        """
        center_node, subgraph = instance_data
        
        # 获取节点列表
        nodes = list(subgraph.nodes())
        
        # 获取边列表
        edges = []
        for u, v, attr in subgraph.edges(data=True):
            edge = {
                "source": u,
                "target": v,
                "type": attr.get("type", "unknown")
            }
            edges.append(edge)
        
        # 计算与模式的相似度 (已在find_pattern_instances中计算)
        similarity = 0.7 + instance_id * 0.02  # 模拟不同实例的相似度
        if similarity > 1.0:
            similarity = 1.0
        
        return PatternInstance(
            center_node=str(center_node),
            nodes=[str(n) for n in nodes],
            edges=edges,
            similarity=similarity
        )
    
    def _generate_metrics(self, output_params: HGPMSOutputParams) -> List[Any]:
        """
        生成可视化指标
        
        Args:
            output_params: 算法输出参数
            
        Returns:
            List[Any]: 图表指标列表
        """
        metrics = []
        
        # 1. 模式支持度条形图
        pattern_supports = []
        pattern_labels = []
        
        for idx, pattern in enumerate(output_params.frequent_patterns[:10]):  # 取前10个模式
            pattern_supports.append(pattern.support)
            pattern_labels.append(f"Pattern {idx+1}")
        
        if pattern_supports:
            support_chart = BarChart(
                title="频繁模式支持度",
                type="bar",
                xAxis={
                    "title": "模式ID",
                    "data": pattern_labels,
                    "label": "模式"
                },
                yAxis={
                    "title": "支持度",
                    "min": 0,
                    "max": max(pattern_supports) if pattern_supports else 1,
                    "label": "支持度值",
                    "data": pattern_supports
                },
                series=[
                    {
                        "name": "支持度值",
                        "data": pattern_supports,
                        "type": "bar"
                    }
                ]
            )
            # 转换为字典
            metrics.append(support_chart.model_dump())
        
        # 2. 模式增长历史折线图
        if output_params.pattern_growth_history:
            growth_chart = LineChart(
                title="模式增长历史",
                type="line",
                xAxis={
                    "data": list(range(len(output_params.pattern_growth_history))),
                    "label": "迭代轮次"
                },
                yAxis={
                    "data": output_params.pattern_growth_history,
                    "label": "频繁模式数量"
                },
                series=[
                    {
                        "name": "频繁模式数量",
                        "data": output_params.pattern_growth_history,
                        "type": "line"
                    }
                ]
            )
            metrics.append(growth_chart.model_dump())
        
        return metrics 