import collections
import math
from LSN2Vec import graph_util
import numpy as np
import random
from sklearn.model_selection import train_test_split
class net2vecclass:
    '''
    默认构造函数初始化嵌入矩阵，
    '''
    def __init__(self):
        self.M = []  ##M初始化默认维度为100
        self.walkGraph = []#游走walktimes 的序列结果
        self.mygraph = graph_util.build_graph_from_file('data/facebook_combined.txt')#从文件中读取的网络图结构
        # self.mygraph = graph_util.build_graph_from_file_yutube('data/com-youtube.ungraph.txt')  # 从文件中读取的网络图结构
        # self.mygraph = graph_util.build_graph_from_file_yutube1('data/youtubeCoded.csv')
    def initParams(self, m):
        '''
        初始化算法的基本参数，目前只有嵌入矩阵初始值化
        :param m: 特征向量的维度
        :return:
        '''
        nodeNum = len(self.mygraph.nodes)
        self.M = np.random.rand(nodeNum * m).reshape(nodeNum, m)

    def buildTrainningSet(self, maxLength, walkTimes, windowsSize=10):
        '''
        :param maxLength:
        :param walkTimes:
        :param windowsSize:
        :return:
        '''
        nodeList = list(self.mygraph.node)
        randomGraph = []
        windowTrainSet = []
        for i in range(walkTimes):
            #startNode = random.sample(nodeList, 1)
            #currNode = startNode[0]
            currNode = random.choice(nodeList)
            currSapmlePath = []
            currSapmlePath.append(currNode)
            for j in range(maxLength - 1):
                nodeNeighbors = list(self.mygraph.neighbors(currNode))
                #currNode = random.sample(nodeNeighbors, 1)[0]
                currNode = random.choice(nodeNeighbors)
                currSapmlePath.append(currNode)
            randomGraph.append(currSapmlePath)
        self.walkGraph = randomGraph
        #对每一个序列化串提取窗口训练集
        for s in randomGraph:
            allNodes = set()#保存已有节点
            for i in range(len(s)):
                if s[i] in allNodes:
                    continue
                allNodes.add(s[i])
                currNodeNeighbors = [s[i]]
                minSide = min(i-0, len(s)-i-1)
                maxSide = max(i-0, len(s)-i-1)
                minSideElems = random.randint(0,minSide) if maxSide>windowsSize else random.randint(windowsSize-maxSide,minSide)
                leftPart = []
                rightPart = []
                if i-0 < len(s)-i-1:#左边元素较少
                    leftPart += s[i-minSideElems:i]
                    rightPart += s[i+1:i+1+windowsSize-minSideElems]
                else:
                    leftPart += s[i-(windowsSize-minSideElems):i]
                    rightPart += s[i + 1:i + 1 + minSideElems]
                currNodeNeighbors += leftPart
                currNodeNeighbors += rightPart
                windowTrainSet.append(currNodeNeighbors)
            #allNodes = set(s)
        return randomGraph, windowTrainSet

    def updateParams(self,yita, windowSet, m, hfTree, nodeCodeDic):
        '''
        训练参数，更新参数
        :param yita: η学习率
        :param windowSet:
        :param m 特征向量维度
        :param hfTree 根据网络结构所构建的霍夫曼树
        :param nodeCodeDic 霍夫曼编码字典
        :return:
        '''
        for elem in windowSet:#遍历每一条训练数据
            currNode = elem[0]
            currNeighbors = elem[1:]
            Vs = np.array([0.0] * m)
            q = np.array([0.0] * m)
            for node in currNeighbors:#每一个周边节点周边节点
                Vs += np.array(self.M[node])
            if str(currNode) not in nodeCodeDic:
                continue
            nodeCode = nodeCodeDic[str(currNode)]
            nowNode = hfTree
            preNode = hfTree
            for ch in nodeCode:
                preNode = nowNode
                if ch == '0':#往左
                    nowNode = nowNode.left
                elif ch == '1':
                    nowNode = nowNode.right
                q = q + yita * (int(ch) - graph_util.Sigmoid(np.dot(Vs, preNode.thita)))*preNode.thita
                preNode.thita = preNode.thita + yita * (int(ch) - graph_util.Sigmoid(np.dot(Vs, preNode.thita)))*Vs
                if ch == '0':#往左
                    currNode = currNode.left
                elif ch == '1':
                    currNode = currNode.right
            for node in currNeighbors:
                self.M[node] = self.M[node] + q





if __name__ == '__main__':
    '''
    程序执行流程
    0.从文件中读取网络结构信息
    1.构建huff树
    2.获取叶子节点的编码
    3.生成训练原始数据
    4.提取结构邻近节点
    5.初始化参数
    6.更新参数
    -------------------------------
    7.结果验证
    7.1构造边集和无边集
    ------------------------------
    '''
    classIns1 = net2vecclass()
    classIns1.initParams(100)
    # g = graph_util.build_graph_from_file('data/facebook_combined.txt')
    structureMatrix0 = classIns1.M
    g = classIns1.mygraph
    degreeInfo = list(g.degree)
    aveDegree = math.floor(sum(elem[1] for elem in degreeInfo)/len(degreeInfo))
    graphList, windowSet = classIns1.buildTrainningSet(200, 500)
    hfTree = graph_util.huffmanTree()
    htreeRoot = hfTree.buildHuffTree(degreeInfo,theta_w=100)
    nodeCodes = graph_util.codeDic1#{'node':code}
    graph_util.HuffmanCodeDic(htreeRoot, '')
    nodeCodes = graph_util.codeDic1
    #调试信息
    nodeNum = len(g.nodes)
    codeNum = len(set(nodeCodes.keys()))
    #ttt = nodeCodes['1464']
    print(str(nodeNum) + '' + str(codeNum))
    #调试信息结束
    classIns1.updateParams(yita=0.2, windowSet=windowSet, m=100, hfTree=htreeRoot, nodeCodeDic=nodeCodes)
    structureMatrix = classIns1.M
    #构造边集跟无边集
    edge_set, ab_set= graph_util.find_no_edge(g)
    y = [1] * len(edge_set)
    _, test_set, _, _ = train_test_split(edge_set, y, test_size=0.2)
    auc_result = graph_util.cal_auc(test_set,ab_set,structureMatrix)
    print(auc_result)
    if auc_result > 0.90:
        np.savetxt('edgeset.csv', np.matrix(edge_set), fmt="%d", delimiter=",")
        np.savetxt('abset.csv', np.matrix(ab_set), fmt="%d", delimiter=",")
        graph_util.saveMatrixData(data=structureMatrix, fileName='vecedNodes.csv')
    # codeArray1 = graph_util.codeDic1
    # graph_util.HuffmanCodeDic(hfTree.root, '')
    # codeArray1 = graph_util.codeDic1
    # dic1 = collections.Counter(graphList[0])
    # uniqueList = np.unique(graphList[0])
    # print(graphList)