from torch import nn
from torch.nn import functional as F
from torch.nn import BatchNorm1d
from torch_geometric.nn.conv import GCNConv
from models import gcn_conv


class nps_graph_representation(nn.Module):  # 719
    def __init__(self):
        super(nps_graph_representation, self).__init__()
        self.bn_feat = BatchNorm1d(152)
        self.conv_feat = gcn_conv.GCNConv(152, 128, gfn=True)
        self.trans_layer = nn.Linear(in_features=64 * 128, out_features=32 * 32)

    def get_rep(self, data_x):
        x = data_x.x if data_x.x is not None else data_x.feat
        edge_index, batch = data_x.edge_index, data_x.batch
        # row, col = edge_index
        # print(x.shape)
        x = self.bn_feat(x)
        # print(x.shape)
        # print(edge_index.shape)
        # assert None
        x = self.conv_feat(x, edge_index)
        x = F.relu(x)

        x = x.view(64, 8192)
        x = self.trans_layer(x)

        x = x.view(64, 32, 32)
        x = x.unsqueeze(1)
        return x


class tff_graph_representation(nn.Module):  # TFF
    def __init__(self):
        super(tff_graph_representation, self).__init__()
        self.bn_feat = BatchNorm1d(152)
        self.conv_feat = GCNConv(152, 128, gfn=True)
        self.trans_layer = nn.Linear(in_features=24 * 128, out_features=32 * 32)

    def get_rep(self, data_x):
        x = data_x.x if data_x.x is not None else data_x.feat
        edge_index, batch = data_x.edge_index, data_x.batch
        # row, col = edge_index
        x = self.bn_feat(x)
        x = self.conv_feat(x, edge_index)
        # print(x.shape)
        x = F.relu(x)

        x = x.view(64, 3072)
        x = self.trans_layer(x)

        x = x.view(64, 32, 32)
        x = x.unsqueeze(1)
        return x
