import torch
import torch.nn as nn
from torch_geometric.nn import GCNConv, SAGEConv, GATConv
from GAT1 import GAT
from RNN1 import RNN
import torch.nn.functional as F
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
#device=torch.device("cpu")
RNN_LAYER=1
RNN_OUT=1
class Model(nn.Module):
    def __init__(self, out):
        super(Model, self).__init__()
        self.fc1=nn.Linear(1344,2560)
        self.fc2=nn.Linear(2560,256)
        self.fc3=nn.Linear(256,1280)
        self.fc4=nn.Linear(1280,64)
        self.fc5=nn.Linear(64,32)
        self.fc6=nn.Linear(32,out)
    def init_hidden(self):
        self.h0 = torch.randn(size=(RNN_LAYER, RNN_OUT)).to(device)
        self.c0 = torch.randn(size=(RNN_LAYER, RNN_OUT)).to(device)
    def init_GAT(self):
        for name,param in self.gat1.named_parameters():
            if 'weight' in name:
                torch.nn.init.xavier_normal_(param.data)
            elif 'bias' in name:
                torch.nn.init.constant_(param.data, 0.5)
        for name,param in self.gat2.named_parameters():
            if 'weight' in name:
                torch.nn.init.xavier_normal_(param.data)
            elif 'bias' in name:
                torch.nn.init.constant_(param.data, 0)
    def init_linear(self):
        for m in self.modules():
            if isinstance(m,nn.Linear):
                #torch.nn.init.normal_(m.weight.data, mean=0.0, std=1.0)
                torch.nn.init.xavier_normal_(m.weight.data, gain=1.0)

                #nn.init.normal_(m.weight.data)
    def init_weight(self):
        for m in self.modules():
            if isinstance(m,nn.Linear):
                nn.init.normal_(m.weight.data)
        # for name,param in self.rnn.named_parameters():
        #     if 'weight' in name:
        #         torch.nn.init.xavier_normal_(param.data)
        #     elif 'bias' in name:
        #         torch.nn.init.constant_(param.data, 0.5)
        # for name,param in self.gat2.named_parameters():
        #     if 'weight' in name:
        #         torch.nn.init.xavier_normal_(param.data)
        #     elif 'bias' in name:
        #         torch.nn.init.constant_(param.data, 0)

    def forward(self, x):
        x = F.normalize(x, dim=0)
        x=self.fc1(x)
        x=F.relu(x)
        x=F.normalize(x, dim=0)
        x=self.fc2(x)
        x=F.relu(x)
        x = F.normalize(x, dim=0)
        x=self.fc3(x)
        x=F.relu(x)
        x = F.normalize(x, dim=0)
        x=self.fc4(x)
        x=F.relu(x)
        x = F.normalize(x, dim=0)
        x=self.fc5(x)
        x=F.relu(x)
        x = F.normalize(x, dim=0)
        x=self.fc6(x)
        x=F.relu(x)
        return x
