import torch
import torch.nn.functional as F
from torch_geometric.datasets import *
from torch_geometric.nn import GCNConv # noqa

from myGCNConv import GCNConv as myConv, ChGCNConv as ChConv, GCOOConv
from dgl.nn import GraphConv


class AbstractGCN(torch.nn.Module):
    hidden_size = 16
    def __init__(self):
        super().__init__()

class Net(AbstractGCN):
    def __init__(self, input_dim, output_dim, do_normalize = False):
        super(Net, self).__init__()
        self.conv1 = GCNConv(input_dim, __class__.hidden_size, cached=True,
                             normalize=do_normalize)
        self.conv2 = GCNConv(__class__.hidden_size, output_dim, cached=True,
                             normalize=do_normalize)
        # self.conv1 = ChebConv(data.num_features, __class__.hidden_size, K=2)
        # self.conv2 = ChebConv(__class__.hidden_size, data.num_features, K=2)

    def forward(self, edge_index, edge_attr, x):
        x = F.relu(self.conv1(x, edge_index, edge_attr))
        x = F.dropout(x, training=self.training)
        x = self.conv2(x, edge_index, edge_attr)
        # return F.log_softmax(x, dim=1)
        return x


class DGLNet(AbstractGCN):
    def __init__(self, input_dim, output_dim, do_normalize):
        super(DGLNet, self).__init__()
        # normalize = 'none' if args.use_gdc else 'both'
        self.conv1 = GraphConv(input_dim, __class__.hidden_size, norm = do_normalize, allow_zero_in_degree=True)
        self.conv2 = GraphConv(__class__.hidden_size, output_dim, norm = do_normalize, allow_zero_in_degree=True)

    def forward(self, graph, x, edge_attr = None):
        x = F.relu(self.conv1(graph, x, edge_weight=edge_attr))
        x = F.dropout(x, training=self.training)
        x = self.conv2(graph, x, edge_weight=edge_attr)
        # return F.log_softmax(x, dim=1)
        return x


class MyNet(AbstractGCN):
    run_type = 'reverse'
    def set_runtype(string):
        __class__.run_type = string
    
    def __init__(self, input_dim, output_dim, do_normalize=True):
        super(MyNet, self).__init__()
        self.conv1 = myConv(input_dim, __class__.hidden_size, 
                            normalize=do_normalize, use_reverse= __class__.run_type == 'reverse')
        self.conv2 = myConv(__class__.hidden_size, output_dim,
                            normalize=do_normalize, use_reverse= __class__.run_type == 'reverse')

    def forward(self, graph, x):
        x = F.relu(self.conv1(graph, x))
        x = F.dropout(x, training=self.training)
        x = self.conv2(graph, x)
        # return F.log_softmax(x, dim=1)
        return x


class MyNetGCOO(AbstractGCN):
    def __init__(self, input_dim, output_dim, do_normalize):
        super(MyNet, self).__init__()
        self.conv1 = GCOOConv(input_dim, __class__.hidden_size, normalize=do_normalize)
        self.conv2 = GCOOConv(__class__.hidden_size, output_dim, normalize=do_normalize)

    def forward(self, graph, x):
        x = F.relu(self.conv1(graph, x))
        x = F.dropout(x, training=self.training)
        x = self.conv2(graph, x)
        # return F.log_softmax(x, dim=1)
        return x
        

class MyNetC(AbstractGCN):
    def __init__(self, input_dim, output_dim):
        super(MyNetC, self).__init__()
        self.conv1 = ChConv(input_dim, __class__.hidden_size)
        self.conv2 = ChConv(__class__.hidden_size, output_dim)
        
        
    def forward(self, graph, x):
        x = F.relu(self.conv1(x, graph))
        x = F.dropout(x, training=self.training)
        x = self.conv2(x, graph)
        # return F.log_softmax(x, dim=1)
        return x 

if __name__ == '__main__':
    AbstractGCN.hidden_size = 17
    x = MyNetC(10,10)
    print(x)