import torch
import torch.nn as nn
import torch.nn.functional as F

import dgl
import dgl.function as fn
from dgl.nn.pytorch import GraphConv

"""
    GCN: Graph Convolutional Networks
    Thomas N. Kipf, Max Welling, Semi-Supervised Classification with Graph Convolutional Networks (ICLR 2017)
    http://arxiv.org/abs/1609.02907
"""


class GCNLayer(nn.Module):

    def __init__(self, in_dim, out_dim, activation, dropout, batch_norm, residual=False, dgl_builtin=False):
        super().__init__()
        # 输入输出维度
        self.in_channels = in_dim
        self.out_channels = out_dim
        # 是否使用批归一化
        self.batch_norm = batch_norm
        # 是否使用残差连接
        self.residual = residual
        # 是否使用dgl内置的图卷积
        self.dgl_builtin = dgl_builtin

        # 如果输入和输出维度不相等，则不使用残差连接
        if in_dim != out_dim:
            self.residual = False
        #
        self.batchnorm_h = nn.BatchNorm1d(out_dim)
        self.activation = activation
        self.dropout = nn.Dropout(dropout)

        #dgl.__version__  0.4.2
        if dgl.__version__ < "0.5":
            self.conv = GraphConv(in_dim, out_dim)
        else:
            self.conv = GraphConv(in_dim, out_dim, allow_zero_in_degree=True)

    def forward(self, g, feature):

        h_in = feature  # 用于残差连接
        # h作为下一层的g，返回，feature是输入的特征
        h = self.conv(g, feature)

        if self.batch_norm:
            h = self.batchnorm_h(h)  # batch normalization  批归一化

        if self.activation:
            h = self.activation(h)

        if self.residual:
            h = h_in + h  # residual connection  残差连接

        h = self.dropout(h)
        return h

    def __repr__(self):  # 返回网络结构
        return '{}(in_channels={}, out_channels={}, residual={})'.format(self.__class__.__name__,
                                                                         self.in_channels,
                                                                         self.out_channels,
                                                                         self.residual)