#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Time    : 2022/3/20 1:28 下午
# @Author  : WangZhixing
import argparse
import os
import shutil
import sys

from ProcessData.Process import SymbolVector, FileVector

curPath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
rootPath = os.path.split(curPath)[0]
sys.path.append(rootPath)

import argparse
from sklearn.cluster import KMeans
from Visualization.Visualize import visualize

from Metric import Metric
from Output.output_mehod.result2rsf_file import result2rsf_file
from ProcessData import DependenceGraph,FileVectorDependenceGraph
from Utils import ConfigFile
from Model.Module.Gcnconv1 import Gcnconv1
from Model.Module.Gcnconv2 import Gcnconv2
from Model.Module.Gatconv1 import Gatconv1
from Model.Module.Gatconv2 import Gatconv2
import torch
from Model.Module.GraphSageconv1 import GraphSageconv1
from Model.Module.GraphSageconv2 import GraphSageconv2


def train(model, loader, optimizer, data, device):
    model.train()
    total_loss = 0
    for pos_rw, neg_rw in loader:
        optimizer.zero_grad()
        out = model(data.x, data.edge_index, data.edge_attr)
        loss = model.loss(out, pos_rw.to(device), neg_rw.to(device))
        loss.backward()
        optimizer.step()
        total_loss += loss.item()
    return total_loss / len(loader)


def NodeGCN(data,**kwarg):
    kwarg['num_features'] = data.num_features
    kwarg['walk_length'] = 10
    kwarg['context_size'] = 10
    kwarg['walks_per_node'] = 10
    kwarg['num_negative_samples'] = 1
    kwarg['num_nodes'] = None
    kwarg['p'] = 1
    kwarg['q'] = 1
    kwarg['sparse'] = None
    kwarg['model_out_layer'] = 20

    model = GraphSageconv1(edge_index=data.edge_index, **kwarg).to(kwarg['device'])

    loader = model.loader(batch_size=128, shuffle=False)
    optimizer = torch.optim.Adam(model.parameters(), lr=0.01, weight_decay=5e-4)
    for epoch in range(1, kwarg['train_epoch']):
        loss = train(model, loader, optimizer, data, kwarg['device'])
    with torch.no_grad():
        z = model(data.x, data.edge_index, data.edge_attr)
    kmeans_input = z
    kmeans = KMeans(n_clusters=kwarg['cluster'], random_state=0).fit(kmeans_input)
    preds = kmeans.predict(kmeans_input)
    return kmeans_input, preds


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description="ReadConfig")
    parser.add_argument("-c", "--config", type=str)
    args = parser.parse_args()
    kwarg = ConfigFile(args.config).ReadConfig()

    # if kwarg["data_type"] == "symbol":
    #     FileVector(kwarg['root'])

    if os.path.exists(os.path.join(kwarg["root"], "processed")):
        shutil.rmtree(os.path.join(kwarg["root"], "processed"))

    # data = FileVectorDependenceGraph(kwarg["root"]).data
    data = DependenceGraph(kwarg["root"]).data
    embedding, preds = NodeGCN(data, **kwarg)
    result2rsf_file(kwarg["root"], preds, kwarg["outfile_path"])
    Metric(kwarg["project"], kwarg["outfile_path"], kwarg["ground_path"],dep_file=os.path.join(kwarg["root"], "raw","edge.rsf"))
    visualize(embedding,preds)
