import torch, time
from torch.nn import Linear
from torch_geometric.nn import GCNConv
from Visualize import visualize_embedding


# GCNConv
# softmax(A_hat * RELU(A_hat * X * W_0) * W_1)
# 1、  A_hat = N*N  X = N*C  A_hat*X  N*C
# 2、  A_hat*X  N*C  W_0  C*H RELU(A_hat * X * W_0) = N*H
# 3、  A_hat * RELU(A_hat * X * W_0) * W_1 =

class GCN(torch.nn.Module):
    def __init__(self, dataset):
        super(GCN, self).__init__()
        torch.manual_seed(1234)
        #  34*34 再接一个全连接 4
        self.conv1 = GCNConv(dataset.num_features, 4)
        self.conv2 = GCNConv(4, 4)
        self.conv3 = GCNConv(4, 2)
        self.classifier = Linear(2, dataset.num_classes)

    def forward(self, x, edge_index):
        h = self.conv1(x, edge_index)
        # print(x.shape,h.shape,edge_index.shape,"11111111111111111")
        h = h.tanh()
        h = self.conv2(h, edge_index)
        # print(h.shape,edge_index.shape, "2222222222222222")
        h = h.tanh()
        h = self.conv3(h, edge_index)
        # print(h.shape,edge_index.shape, "333333333333333333333")
        h = h.tanh()

        out = self.classifier(h)
        return out, h


from torch_geometric.datasets import KarateClub

dataset = KarateClub()
model = GCN(dataset)
print(model)

# 第一次记录初始化特征信息
_, h = model(dataset.x, dataset.edge_index)
visualize_embedding(h, color=dataset.y)

criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)

"""
#data = KarateClub()
#print(data.x.shape, data.edge_index.shape, data.y.shape, data.train_mask.shape)
#torch.Size([34, 34]) torch.Size([2, 156]) torch.Size([34]) torch.Size([34])
"""


def train(data):
    optimizer.zero_grad()
    out, h = model(data.x, data.edge_index)
    loss = criterion(out[data.train_mask], data.y[data.train_mask])
    loss.backward()
    optimizer.step()
    return loss, h


def epoch_train(data):
    for epoch in range(1):
        loss, h = train(data)
        if epoch % 40 == 0:
            visualize_embedding(h, color=data.y, epoch=epoch, loss=loss)
            time.sleep(0.3)


# 使用 train_mask 来确定34个节点中只有4个分类标记，就可以将任务执行的非常好，
# 很好的验证了半监督能力。
epoch_train(dataset)
