import torch
import torch.nn as nn
import torch.nn.functional as F
import math

class gcns_layer(nn.Module):
    def __init__(self):
        super(gcns_layer, self).__init__()
 

    def forward(self, hl, adj):
        hn = torch.mm(adj, hl)
        return hn

class MLP(nn.Module):
    def __init__(self, insize, outsize, hidsize, layers_num):
        super(MLP, self).__init__()
        self.insize = insize
        self.outsize = outsize
        self.hidsize = hidsize
        self.layers_num = layers_num
        if layers_num < 1:
            raise ValueError("layers_num >= 1")
        elif layers_num == 1:
            self.firstlayer = nn.Linear(insize, outsize)
        elif layers_num == 2:
            self.firstlayer = nn.Linear(insize, hidsize)
            self.lastlayer = nn.Linear(hidsize, outsize)
        else:
            self.firstlayer = nn.Linear(insize, hidsize)
            self.lastlayer = nn.Linear(hidsize, outsize)
            self.hidlayers = []
            for i in range(layers_num - 2):
                self.hidlayers.append(nn.Linear(hidsize, hidsize))

    def forward(self, inmat):
        if self.layers_num < 1:
            raise ValueError("layers_num >= 1")
        elif self.layers_num == 1:
            outmat = self.firstlayer(inmat)
        elif self.layers_num == 2:
            hidmat = F.relu(self.firstlayer(inmat))
            outmat = self.lastlayer(hidmat)
        else:
            hidmat = F.relu(self.firstlayer(inmat))
            for i in range(self.layers_num - 2):
                hidmat = F.relu(self.hidlayers[i](hidmat))
            outmat = self.lastlayer(hidmat)
        return outmat

class gcns(nn.Module):
    def __init__(self, insize, outsize, hidsize, hidlayernum):
        super(gcns, self).__init__()
        self.ly1 = gcns_layer()
        self.ly2 = gcns_layer()
        self.hidlayernum = hidlayernum
        if hidlayernum < 0:
            raise ValueError("hidlayernum < 0")
        else:
            self.hid = []
            for i in range(hidlayernum):
                self.hid.append(gcns_layer())
        self.mlp = MLP(insize, outsize, hidsize, hidlayernum + 2)


    def forward(self, feature, adj):
        hid_out = self.ly1(feature, adj)
        for i in range(self.hidlayernum):
            hid_out = self.hid[i](hid_out, adj)
        ly2_out = self.ly2(hid_out, adj)
        res = F.log_softmax(self.mlp(ly2_out), dim=1)
        return res

