import torch
import torch.nn as nn
import torch.nn.functional as F
import math

class gin_layer(nn.Module):
    def __init__(self, mlp, eps=0):
        super(gin_layer, self).__init__()
        self.mlp = mlp
        self.eps = nn.Parameter(torch.FloatTensor([eps]))


    def forward(self, hl, adj):
        sum_mat = (1 + self.eps) * hl + torch.mm(adj, hl)
        hn = self.mlp(sum_mat)
        return hn

class MLP(nn.Module):
    def __init__(self, insize, outsize, hidsize, layers_num):
        super(MLP, self).__init__()
        self.insize = insize
        self.outsize = outsize
        self.hidsize = hidsize
        self.layers_num = layers_num
        if layers_num < 1:
            raise ValueError("layers_num >= 1")
        elif layers_num == 1:
            self.firstlayer = nn.Linear(insize, outsize)
        elif layers_num == 2:
            self.firstlayer = nn.Linear(insize, hidsize)
            self.lastlayer = nn.Linear(hidsize, outsize)
        else:
            self.firstlayer = nn.Linear(insize, hidsize)
            self.lastlayer = nn.Linear(hidsize, outsize)
            self.hidlayers = []
            for i in range(layers_num - 2):
                self.hidlayers.append(nn.Linear(hidsize, hidsize))

    def forward(self, inmat):
        if self.layers_num < 1:
            raise ValueError("layers_num >= 1")
        elif self.layers_num == 1:
            outmat = self.firstlayer(inmat)
        elif self.layers_num == 2:
            hidmat = F.relu(self.firstlayer(inmat))
            outmat = self.lastlayer(hidmat)
        else:
            hidmat = F.relu(self.firstlayer(inmat))
            for i in range(self.layers_num - 2):
                hidmat = F.relu(self.hidlayers[i](hidmat))
            outmat = self.lastlayer(hidmat)
        return outmat


class gin(nn.Module):
    def __init__(self, insize, outsize, hidsize, mlp_layer_num, mlp_layer_size, hidlayernum):
        super(gin, self).__init__()
        self.mlp1 = MLP(insize=insize, outsize=hidsize, hidsize=mlp_layer_size, layers_num=mlp_layer_num)
        self.mlp2 = MLP(insize=hidsize, outsize=outsize, hidsize=mlp_layer_size, layers_num=mlp_layer_num)
        self.ly1 = gin_layer(mlp=self.mlp1)
        self.ly2 = gin_layer(mlp=self.mlp2)
        self.hidlayernum = hidlayernum
        if hidlayernum < 0:
            raise ValueError("hidlayernum < 0")
        else:
            self.hid = []
            self.hidmlp = []
            for i in range(hidlayernum):
                thismlp = MLP(insize=hidsize, outsize=hidsize, hidsize=mlp_layer_size, layers_num=mlp_layer_num)
                self.hidmlp.append(thismlp)
                self.hid.append(gin_layer(mlp=thismlp))


    def forward(self, feature, adj):
        hid_out = F.relu(self.ly1(feature, adj))
        for i in range(self.hidlayernum):
            hid_out = F.relu(self.hid[i](hid_out, adj))
        res = F.log_softmax(self.ly2(hid_out, adj), dim=1)
        return res
