import torch
from torch_geometric.data import Data
from torch_geometric.nn import GCNConv
import torch.nn.functional as F
from torch_geometric.nn import global_mean_pool


class VersionedDependencyGraph:
    def __init__(self, dependency_matrix, jump_matrix, upgrade_time, device):
        """
        dependency_matrix: np.array shape (N,N,V)
        jump_matrix:        np.array shape (N,V,V)
        upgrade_time:       np.array shape (N,V,V)
        """
        self.dep  = torch.tensor(dependency_matrix, dtype=torch.long, device=device)
        self.jump = torch.tensor(jump_matrix,         dtype=torch.float, device=device)
        self.ut   = torch.tensor(upgrade_time,        dtype=torch.float, device=device)
        self.N, _, self.V = self.dep.shape
        self.device = device

    def build_graph(self, cur_versions):
        """
        cur_versions: torch.LongTensor shape (N,)
        返回 PyG Data:
          - x:   [N, F_node] 节点特征
          - edge_index: [2, E]   边索引
          - edge_attr:  [E, F_edge] 边特征
        """
        # ---- 1) 节点特征 x ----
        # (a) 当前版本 one-hot:
        v_oh = torch.nn.functional.one_hot(cur_versions, num_classes=self.V).float()  # [N,V]
        # (b) 跳跃能力统计: 对应行 jump[i, v_i, :]
        jm = self.jump[torch.arange(self.N,device=self.device), cur_versions]        # [N,V]
        # (c) 平均升级时长: mean over ut[i, v_i, :]
        avg_ut = self.ut[torch.arange(self.N,device=self.device), cur_versions].mean(dim=1,keepdim=True)  # [N,1]
        # 拼成节点特征
        x = torch.cat([v_oh, jm, avg_ut], dim=1)  # [N, V+V+1]

        # ---- 2) 边和边特征 ----
        src, dst, req = [], [], []
        for i in range(self.N):
            vi = cur_versions[i]
            # 对所有 j，看看 D[i,j,vi] >= 0 则加边 i→j
            deps = self.dep[i, :, vi]       # shape [N]
            mask = (deps >= 0)
            js = torch.nonzero(mask, as_tuple=False).view(-1)
            src.append(torch.full((js.size(0),), i, dtype=torch.long, device=self.device))
            dst.append(js)
            req.append(deps[js].unsqueeze(1).float())  # 边特征：要求版本

        if len(src):
            edge_index = torch.cat(src+dst, dim=0).view(2,-1)    # [2,E]
            edge_attr  = torch.cat(req, dim=0)                  # [E,1]
        else:
            edge_index = torch.empty((2,0),dtype=torch.long,device=self.device)
            edge_attr  = torch.empty((0,1),dtype=torch.float,device=self.device)

        # 3) batch 向量：全 0 表示所有节点都属于“第 0 张图”
        batch = torch.zeros(self.N, dtype=torch.long, device=self.device)

        return Data(x=x, edge_index=edge_index, edge_attr=edge_attr, batch=batch)
    



class GNNEncoder(torch.nn.Module):
    def __init__(self, in_dim, hid_dim):
        super().__init__()
        self.conv1 = GCNConv(in_dim, hid_dim)
        self.conv2 = GCNConv(hid_dim, hid_dim)

    def forward(self, data: Data):
        """
        data.x:        [N, in_dim]
        data.edge_index:[2, E]
        data.batch:    [N]  （所有节点映射到图 0）
        """
        h = F.relu(self.conv1(data.x, data.edge_index))
        h = F.relu(self.conv2(h, data.edge_index))
        # 池化到图级表示 [num_graphs, hid_dim], 这里 num_graphs=1
        graph_rep = global_mean_pool(h, data.batch)  # -> shape (1, hid_dim)
        # 直接返回 squeezed 后的一维向量 (hid_dim,)
        return graph_rep.squeeze(0)