import torch
import torch.nn as nn
from torch_geometric.nn import GCNConv, SAGEConv, GATConv
import torch.nn.functional as F
#device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
device=torch.device("cpu")
from Network import SLOT_NUM

class Model(nn.Module):
    def __init__(self,node_size):
        hidden1=50
        head=6
        hidden2=10
        hidden3=100
        super(Model,self).__init__()
        self.gat1=GATConv(SLOT_NUM,hidden1,heads=head)
        self.gat2=GATConv(hidden1*head,hidden2)
        self.fc1=nn.Linear(hidden2*node_size,hidden3)
        self.fc2=nn.Linear(hidden3,1)
        #self.param_init()

    def param_init(self):
        for name,param in self.gat1.named_parameters():
            if 'weight' in name:
                torch.nn.init.xavier_normal_(param.data)
            elif 'bias' in name:
                torch.nn.init.constant_(param.data, 0.5)
        for name,param in self.gat2.named_parameters():
            if 'weight' in name:
                torch.nn.init.xavier_normal_(param.data)
            elif 'bias' in name:
                torch.nn.init.constant_(param.data, 0.5)
        for m in self.modules():
            if isinstance(m,nn.Linear):
                #torch.nn.init.normal_(m.weight.data, mean=0.0, std=1.0)
                torch.nn.init.xavier_normal_(m.weight.data, gain=1.0)
    def forward(self,x,adj):
        x=torch.FloatTensor(x).to(device)
        adj=torch.IntTensor(adj).to(device)
        x=self.gat1(x,adj)
        x=F.sigmoid(x)
        x=self.gat2(x,adj)
        x=F.sigmoid(x)
        x=x.view(-1)
        x=self.fc1(x)
        x=F.sigmoid(x)
        x=self.fc2(x)
        return x