import torch
import torch.nn.functional as F
from torch_geometric.nn import GCNConv
from torch_geometric.data import Data, InMemoryDataset
import numpy as np
import networkx as nx
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import json
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.preprocessing import normalize
import os
from auto_label import auto_label

class NmapGraphProcessorJSON:
    RISK_PORT = ['20', '21', '22', '23', '25', '111', '2049', '135', '137', '139', '445', '161', '389', '512', '513', '514',
                 '873', '1194', '1352', '1433', '1521', '2181', '3128', '2601', '2604', '3306', '3389', '3690', '4848',
                 '5000', '5432', '5900', '5901', '5902', '5984', '6379', '7001', '7002', '8069', '8080', '8081', '8082',
                 '8083', '8084', '8085', '8086', '8087', '8088', '8089', '9080', '9081', '9090', '9200', '9300', '11211',
                 '27017', '27018', '50070', '50030']
    RISK_SERV = ['ssh', 'mysql', 'redis', 'mongodb', 'vnc', 'ftp']

    def __init__(self):
        self.mlb_ports = MultiLabelBinarizer(classes=self.RISK_PORT)
        self.mlb_ports.fit([self.RISK_PORT])
        self.mlb_services = MultiLabelBinarizer(classes=self.RISK_SERV)
        self.mlb_services.fit([self.RISK_SERV])

    @staticmethod
    def calculate_os_security_index(os_string):
        if not os_string:
            return 3.0
        os_parts = os_string.split()
        os_type = os_parts[0].lower()
        os_version = None
        if len(os_parts) > 1:
            try:
                os_version = float(os_parts[1].split('.')[0])
            except ValueError:
                os_version = None

        if os_type in ['windows','microsoft']:
            if os_version is None:
                return 3.0
            if os_version >= 10.0:
                return 0.0
            elif os_version >= 6.0:
                return 1.0
            elif os_version >= 5.0:
                return 2.0
            else:
                return 3.0
        elif os_type in ['linux']:
            if os_version is None:
                return 3.0
            if os_version >= 5.0:
                return 0.0
            elif os_version >= 3.0:
                return 1.0
            elif os_version >= 2.0:
                return 2.0
            else:
                return 3.0
        else:
            return 3.0

    def get_host_features(self, node):
        node_feature = node.get('node_feature', {})
        open_ports = [str(port) for port in node_feature.get('ports', [])]
        open_ports_extracted = [port.split('/')[0] for port in open_ports]
        risky_ports = set(open_ports_extracted) & set(self.RISK_PORT)
        risky_services = [service for service in node_feature.get('service', []) if service in self.RISK_SERV]

        os_security_index = self.calculate_os_security_index(node_feature.get('os', 'unknown'))

        ports_encoded = self.mlb_ports.transform([list(risky_ports)])[0]
        services_encoded = self.mlb_services.transform([risky_services])[0]

        # 提取图结构特征
        graph_features = [
            node_feature.get("degree", 0),
            node_feature.get("neighbor_count", 0),
            node_feature.get("centrality", 0),
            node_feature.get("pagerank", 0),
            node_feature.get("betweenness_centrality", 0),
            node_feature.get("closeness_centrality", 0),
            node_feature.get("degree_centrality", 0),
            node_feature.get("eigenvector_centrality", 0),
            node_feature.get("katz_centrality", 0),
            node_feature.get("triangles", 0),
            node_feature.get("clustering", 0)
        ]

        # 计算高危端口和服务数量
        risky_ports_count = len(risky_ports)
        risky_services_count = len(risky_services)

        # 拼接所有特征
        features = np.concatenate((
            ports_encoded,
            services_encoded,
            graph_features,
            [risky_ports_count, risky_services_count, os_security_index]
        ))

        return features

    def get_edge_features(self, link):
        """
        提取边的特征
        """
        return [
            link.get("weight", 0),
            link.get("centrality_sum", 0),
            link.get("centrality_diff", 0),
            link.get("degree_sum", 0),
            link.get("vulnerability_transmission", 0),
            float(link.get("is_bridge", False))  # 布尔值转浮点数
        ]

    def create_graph_data(self, json_data):
        nodes = json_data["nodes"]
        edges = json_data["links"]

        node_features = []
        edge_index_list = []
        edge_attr_list = []
        ip_to_node_map = {}

        node_counter = 0
        for node in nodes:
            features = self.get_host_features(node)
            node_features.append(features)

            ip_to_node_map[node["id"]] = node_counter
            node_counter += 1

        for edge in edges:
            from_node = edge["source"]
            to_node = edge["target"]

            if from_node in ip_to_node_map and to_node in ip_to_node_map:
                from_node_idx = ip_to_node_map[from_node]
                to_node_idx = ip_to_node_map[to_node]
                edge_index_list.append([from_node_idx, to_node_idx])
                edge_index_list.append([to_node_idx, from_node_idx])
                edge_attr_list.append(self.get_edge_features(edge))

        node_features = torch.tensor(node_features, dtype=torch.float)
        edge_index = torch.tensor(edge_index_list, dtype=torch.long).t().contiguous()
        edge_attr = torch.tensor(edge_attr_list, dtype=torch.float)

        data = Data(x=node_features, edge_index=edge_index,edge_attr=edge_attr )
        return data

    def load_data(self, file_path):
        with open(file_path, 'r') as f:
            json_data = json.load(f)
        return self.create_graph_data(json_data)

    def save_data(self, data_list, save_path):
        """
        统一存储 PyG 数据，合并 Data 样本，并存储为一个 .pt 文件
        """
        if not isinstance(data_list, list):  # 确保传入的是列表
            data_list = [data_list]

        # 处理 PyG 数据格式
        data, slices = InMemoryDataset.collate(data_list)

        # 保存为一个 .pt 文件
        torch.save((data, slices), save_path)
        print(f"Saved {len(data_list)} graphs to {save_path}")
    @staticmethod
    def visualize_graph(data,title, pos=None):
        """
        可视化 PyG 图，并根据 y 标签（0/1）使用不同颜色显示节点。
        """
        G = nx.Graph()

        node_features = data.x.cpu().numpy()  # 取节点特征
        edge_index = data.edge_index.cpu().numpy()  # 取边信息

        # 添加节点
        for i in range(node_features.shape[0]):
            G.add_node(i, feature=node_features[i])

        # 添加边
        for edge in edge_index.T:
            G.add_edge(int(edge[0]), int(edge[1]))

        # 如果没有传入布局，则生成新的布局
        if pos is None:
            pos = nx.spring_layout(G, seed=42, k=0.2)

        # 处理 y 标签
        if hasattr(data, 'y') and data.y is not None:
            y_labels = data.y.cpu().numpy()
            color_map = ['green' if label == 0 else 'red' for label in y_labels]
            # 添加图例
            legend_patches = [
                mpatches.Patch(color='red', label='有脆弱性的节点'),
                mpatches.Patch(color='green', label='无脆弱性的节点')
            ]
            suptitle = f"基于{title}的危险节点打标"

        elif hasattr(data, 'masked_info') and data.masked_info is not None:
            masked_node_indices = data.masked_info.get('masked_node_indices', [])
            color_map = ['green'] * node_features.shape[0]  # 默认绿色
            for idx in masked_node_indices:
                color_map[idx] = 'gray'  # 将被 mask 的节点设为灰色
            # 添加图例
            legend_patches = [
                mpatches.Patch(color='green', label='保留节点'),
                mpatches.Patch(color='gray', label='剪枝节点')
            ]
            suptitle = f"基于{title}的剪枝节点打标"

        else:
            color_map = ['green'] * node_features.shape[0]  # 如果没有 y，默认蓝色
            # 添加图例
            legend_patches = [mpatches.Patch(color='green', label='图节点')]
            suptitle = f"基于{title}的拓扑图"

        # 绘制图
        plt.figure(figsize=(16, 12))
        nx.draw(G, pos, with_labels=True, node_size=500, node_color=color_map,
                font_size=8, font_weight='bold', edge_color='gray')
        plt.rcParams['font.sans-serif'] = ['SimHei']
        plt.rcParams['axes.unicode_minus'] = False
        plt.suptitle(suptitle, fontsize=14)
        plt.legend(handles=legend_patches, loc='upper right')
        plt.show(block=True)

        return pos
        
    def subgraph_aggregation_gcn(self, data, num_layers=2):
        """
        使用 Graph Convolutional Network (GCN) 进行子图聚合。
        :param data: 包含图结构和节点特征的数据对象。
        :param num_layers: GCN 层的数量。
        :return agg_features: 聚合后的特征向量。
        """
        class GCN(torch.nn.Module):
            def __init__(self, input_dim, hidden_dim, output_dim, num_layers):
                super(GCN, self).__init__()
                self.convs = torch.nn.ModuleList()
                self.convs.append(GCNConv(input_dim, hidden_dim))
                for _ in range(num_layers - 1):
                     self.convs.append(GCNConv(hidden_dim, hidden_dim))
                self.fc = torch.nn.Linear(hidden_dim, output_dim)

            def forward(self, x, edge_index, num_layers):
                for i in range(num_layers):
                    x = F.relu(self.convs[i](x, edge_index))
                x = self.fc(x)
                return x

        input_dim = data.x.shape[1]
        hidden_dim = 64
        output_dim = 128

        device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
        model = GCN(input_dim, hidden_dim, output_dim, num_layers).to(device)
        data = data.to(device)

        # 前向传播
        agg_features = model(data.x, data.edge_index, num_layers)

        return agg_features

    def visualize_agg_graph(self, data, agg_features):
        G = nx.Graph()

        node_features = agg_features.detach().numpy()
        edge_index = data.edge_index.numpy()

        for i in range(node_features.shape[0]):
            G.add_node(i, feature=node_features[i])

        for edge in edge_index.T:
            G.add_edge(edge[0], edge[1])

        pos = nx.spring_layout(G, seed=42)

        # 修改node_features[:, 0]以修改可视化维度
        # 根据第一个维度调整颜色
        colors = node_features[:, 0]  # 假设使用第一个维度
        norm = plt.Normalize(colors.min(), colors.max())
        cmap = plt.cm.viridis  # 选择一个 colormap

        # 根据第二个维度调整大小
        if node_features.shape[1] > 1:
            sizes = node_features[:, 1]
            sizes = normalize([sizes])[0] * 1500 + 300  # 更大的缩放因子和最小值
        else:
            sizes = np.ones(node_features.shape[0]) * 500  # 默认大小为500

        # 创建图形和子图
        fig, ax = plt.subplots(figsize=(12, 12))  # 创建一个包含 ax 的 figure
        nx.draw(
            G, pos,
            with_labels=True,
            node_size=sizes,
            node_color=[cmap(norm(color)) for color in colors],
            font_size=8,
            font_weight='bold',
            edge_color='gray',
            ax=ax  # 将 ax 传递给 nx.draw
        )

        # 创建 ScalarMappable 对象并传递给 colorbar
        sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
        sm.set_array([])

        # 显式指定 cax 参数
        cbar = plt.colorbar(sm, ax=ax, label="Node Feature Value (Dimension 0)")
        cbar.set_label("Node Feature Value (Dimension 0)")  # 为颜色条添加标签

        plt.title("Graph Visualization After GCN Aggregation", fontsize=16)
        plt.show(block=True)

class NmapGraphProcessorXML:
    RISK_PORT = ['20', '21', '22', '23', '25', '111', '2049', '135', '137', '139', '445', '161', '389', '512', '513', '514', '873', '1194', '1352', '1433', '1521', '2181', '3128', '2601', '2604', '3306', '3389', '3690', '4848', '5000', '5432', '5900', '5901', '5902', '5984', '6379', '7001', '7002', '8069', '8080', '8081', '8082', '8083', '8084', '8085', '8086', '8087', '8088', '8089', '9080', '9081', '9090', '9200', '9300', '11211', '27017', '27018', '50070', '50030']
    RISK_SERV = ['ssh', 'mysql', 'redis', 'mongodb', 'vnc', 'ftp']

    def __init__(self):
        self.mlb_ports = MultiLabelBinarizer(classes=self.RISK_PORT)
        self.mlb_ports.fit([self.RISK_PORT])
        self.mlb_services = MultiLabelBinarizer(classes=self.RISK_SERV)
        self.mlb_services.fit([self.RISK_SERV])

    @staticmethod
    def calculate_os_security_index(os_matches):
        if not os_matches or not os_matches[0].osclasses:
            return 3.0  # 无匹配项时，安全性最低
        os_class = os_matches[0].osclasses[0]
        version = None
        is_windows = any(keyword in os_class.vendor.lower() for keyword in ['windows', 'microsoft'])

        if is_windows:
            version = os_class.osgen
        else:
            version = os_class.cpelist[0].cpedict.get('version', None) if os_class.cpelist else None

        if version:
            try:
                max_version = float(version)
            except ValueError:
                return 3.0
        else:
            return 3.0

    def get_host_features(self, host):
        open_ports = [str(service.port) for service in host.services]
        risky_ports = set(open_ports) & set(self.RISK_PORT)
        risky_services = [service.service for service in host.services if service.service in self.RISK_SERV]

        os_security_index = self.calculate_os_security_index(host.os.osmatches)

        ports_encoded = self.mlb_ports.transform([list(risky_ports)])[0]
        services_encoded = self.mlb_services.transform([risky_services])[0]

        trace_info = len(host._extras['trace']) - 1 if 'trace' in host._extras else 0

        features = np.concatenate((ports_encoded, services_encoded, [os_security_index, trace_info]))
        return features

    def create_graph_data(self, hosts):
        node_features = []
        edge_index_list = []
        ip_to_node_map = {}

        node_counter = 0
        for host in hosts:
            ip_to_node_map[host.address] = node_counter
            node_features.append(self.get_host_features(host))
            node_counter += 1

        for host in hosts:
            for neighbor in host.hosts:
                if neighbor.address in ip_to_node_map:
                    edge_index_list.append([ip_to_node_map[host.address], ip_to_node_map[neighbor.address]])

        node_features = torch.tensor(node_features, dtype=torch.float)
        edge_index = torch.tensor(edge_index_list, dtype=torch.long).t().contiguous()

        data = Data(x=node_features, edge_index=edge_index)
        return data

    def load_data(self, file_path):
        with open(file_path, 'r') as f:
            json_data = json.load(f)
        return self.create_graph_data(json_data)

    @staticmethod
    def visualize_graph(data):
        G = nx.Graph()

        node_features = data.x.numpy()
        edge_index = data.edge_index.numpy()

        for i in range(node_features.shape[0]):
            G.add_node(i, feature=node_features[i])

        for edge in edge_index.T:
            G.add_edge(edge[0], edge[1])

        pos = nx.spring_layout(G, seed=42)

        plt.figure(figsize=(14, 14))
        nx.draw(G, pos, with_labels=True, node_size=500, node_color='lightblue', font_size=8, font_weight='bold', edge_color='gray')
        plt.title("Graph Visualization", fontsize=16)
        plt.show(block=True)

    def subgraph_aggregation_gcn(self, data, num_layers=2):
        """
        使用 Graph Convolutional Network (GCN) 进行子图聚合。

        参数:
        - data: 包含图结构和节点特征的数据对象。
        - num_layers: GCN 层的数量。

        返回:
        - agg_features: 聚合后的特征向量。
        """
        class GCN(torch.nn.Module):
            def __init__(self, input_dim, hidden_dim, output_dim, num_layers):
                super(GCN, self).__init__()
                self.convs = torch.nn.ModuleList()
                self.convs.append(GCNConv(input_dim, hidden_dim))
                for _ in range(num_layers - 1):
                     self.convs.append(GCNConv(hidden_dim, hidden_dim))
                self.fc = torch.nn.Linear(hidden_dim, output_dim)

            def forward(self, x, edge_index, num_layers):
                for i in range(num_layers):
                    x = F.relu(self.convs[i](x, edge_index))
                x = self.fc(x)
                return x

        input_dim = data.x.shape[1]
        hidden_dim = 64
        output_dim = 128

        device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
        model = GCN(input_dim, hidden_dim, output_dim, num_layers).to(device)
        data = data.to(device)

        # 前向传播
        agg_features = model(data.x, data.edge_index, num_layers)

        return agg_features
        
    def visualize_agg_graph(self, data, agg_features):
        G = nx.Graph()


        node_features = agg_features.detach().numpy()
        edge_index = data.edge_index.numpy()

        for i in range(node_features.shape[0]):
            G.add_node(i, feature=node_features[i])

        for edge in edge_index.T:
            G.add_edge(edge[0], edge[1])

        pos = nx.spring_layout(G, seed=42)

        # 修改node_features[:, 0]以修改可视化维度
        # 根据第一个维度调整颜色
        colors = node_features[:, 0]  # 假设使用第一个维度
        norm = plt.Normalize(colors.min(), colors.max())
        cmap = plt.cm.viridis  # 选择一个 colormap
    
        # 根据第二个维度调整大小
        if node_features.shape[1] > 1:
            sizes = node_features[:, 1]
            sizes = normalize([sizes])[0] * 1500 + 300  # 更大的缩放因子和最小值
        else:
            sizes = np.ones(node_features.shape[0]) * 500  # 默认大小为500

        plt.figure(figsize=(12, 12))
        nx.draw(
            G, pos,
            with_labels=True,
            node_size=sizes,
            node_color=[cmap(norm(color)) for color in colors],
            font_size=8,
            font_weight='bold',
            edge_color='gray'
        )
        sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
        sm.set_array([])
        plt.colorbar(sm, label="Node Feature Value (Dimension 0)")
        plt.title("Graph Visualization After GCN Aggregation", fontsize=16)
        plt.show(block=True)



if __name__ == '__main__':
    file = "./Topology_simulation/custom_hierarchical_graph_data_11.json"  # 可以替换为你实际的文件路径

    # 获取文件扩展名
    file_extension = os.path.splitext(file)[1].lower()

    # 根据扩展名选择不同的处理类
    if file_extension == '.xml':
        processor = NmapGraphProcessorXML()
    elif file_extension == '.json':
        processor = NmapGraphProcessorJSON()
    else:
        raise ValueError("Unsupported file format! Supported formats are .xml and .json")

    # 加载数据
    graph_data = processor.load_data(file)  # 节点特征，59（高危端口）+6（高危服务）

    d1_kmeans=auto_label.generate_y_kmeans(graph_data.clone())
    d2_thr=auto_label.generate_y_threshold(graph_data.clone())
    # 聚合子图特征
    # agg_features = processor.subgraph_aggregation_gcn(graph_data, num_layers=2)

    # 可视化
    processor.visualize_graph(graph_data,'原始')
    processor.visualize_graph(d1_kmeans,'kmeans')
    processor.visualize_graph(d2_thr,'阈值threshold')
    # processor.visualize_agg_graph(graph_data, agg_features)
