import sys

import networkx.classes.graph
import numpy as np
from matplotlib import pyplot as plt
import networkx as nx
from graphdot import Graph
from graphdot.kernel.marginalized import MarginalizedGraphKernel
from graphdot.microkernel import (
    TensorProduct,
    SquareExponential,
    KroneckerDelta,
    Constant
)

graphlist = {}
readfile = []
# readlist = ['D:\\shenhemulu\\model_gpi\\MNIST_Gpi', 'D:\\shenhemulu\\model_gpi\\Tiny-YOLOv2_Gpi', 'D:\\shenhemulu\\model_gpi\\FER-Emotion-Recognition_Gpi', 'D:\shenhemulu\model_gpi\Densenet121_Gpi',
#             'D:\shenhemulu\model_gpi\inception-v1-1.2_Gpi', 'D:\shenhemulu\model_gpi\squeezenet-1.2_Gpi']
for i in range(1, len(sys.argv)):
    readfile.append(sys.argv[i])
readstr = readfile[0][1:][:-1]
readstr1 = readstr.replace(" ", "")
readlist = readstr1.split(",")
#################################################################################################
for i in range(0, len(readlist)):
    graphlist[i] = nx.read_gpickle(readlist[i])
#################################################################################################
# MNIST
# graphlist[1] = nx.read_gpickle(readlist[0])
# #Tiny
# graphlist[0] = nx.read_gpickle(readlist[1])
# #FER
# graphlist[2] = nx.read_gpickle(readlist[2])
# #inception
# graphlist[3] = nx.read_gpickle(readlist[4])
# #Desene
# graphlist[4] = nx.read_gpickle(readlist[3])
# #squeezenet
# graphlist[5] = nx.read_gpickle(readlist[5])
#####################################################################################

# define node and edge kernelets
knode = TensorProduct(radius=SquareExponential(0.5),
                      category=KroneckerDelta(0.07))

kedge = Constant(1.0)

# compose the marginalized graph kernel and compute pairwise similarity
mlgk = MarginalizedGraphKernel(knode, kedge, q=0.05)

#################################################################################################
R = mlgk([Graph.from_networkx(graphlist[i]) for i in range(0, len(readlist))])
#################################################################################################

# R = mlgk([Graph.from_networkx(g) for g in [graphlist[0], graphlist[1], graphlist[2],graphlist[3]]])
#
# normalize the similarity matrix
d = np.diag(R) ** -0.5
K = np.diag(d).dot(R).dot(np.diag(d))

# print(K)
print(K[0][1])
count = K[0][1]
for i in range(0, len(readlist)):
    temp = K[0][i]
    if count == 1:
        count = 0
    if count < temp:
        count = temp
print(count)
