# -*- coding: utf-8 -*-
# @Time : 2021/12/29 14:46
# @Author : Huang
# @Email : HuangMJ6016@foxmail.com
# @File : main.py

import os
import networkx as nx
import numpy as np
from tqdm import tqdm
import node2vec

pseudo_count = 0.01


def gen_global_graph(file_path: str) -> nx.DiGraph:
    _graph = nx.DiGraph()
    with open(file_path, 'r', encoding='utf-8') as f:
        for line in f:
            line = line.strip()
            parts = line.split("\t\t")
            source = int(parts[0])
            _graph.add_node(source)
            if parts[1] != "null":
                node_freq_strs = parts[1].split("\t")
                # print(len(node_freq_strs))
                for node_freq_str in node_freq_strs:
                    node_freq = node_freq_str.split(":")
                    weight = int(node_freq[1])
                    target = int(node_freq[0])
                    _graph.add_node(target)
                    _graph.add_edge(source, target, global_weight=weight)
    return _graph


def parse_graph(graph_string: str, global_graph: nx.DiGraph) -> tuple:
    graph_string = graph_string.strip()
    parts = graph_string.split('\t')
    edge_strs = parts[4].split()
    res_graph = nx.DiGraph()
    for edge_str in edge_strs:
        source, target, _ = edge_str.split(':')
        source, target = int(source), int(target)
        res_graph.add_node(source)
        res_graph.add_node(target)
        res_graph.add_edge(source, target,
                           edge_weight=global_graph.edges.get((source, target), {'global_weight': 0})[
                                           'global_weight'] + pseudo_count,
                           global_degree=global_graph.out_degree[target] + pseudo_count)
    for edge in res_graph.edges:
        res_graph.add_edge(edge[0], edge[1], local_degree=res_graph.out_degree[edge[1]] + pseudo_count)
    return res_graph, int(parts[3]), int(parts[-1].split()[-1])


def random_walk(graph: nx.DiGraph, args):
    weight_sum, weight_sum_noleaf = 0, 0
    roots, roots_noleaf = list(), list()
    probs, probs_noleaf = list(), list()
    # str_list=[]
    for node, weight in graph.out_degree(weight=args.weight_type):
        org_weight = weight
        if weight == 0: weight += pseudo_count
        weight_sum += weight
        if org_weight > 0:
            weight_sum_noleaf += weight
        # print(node)
    for node, weight in graph.out_degree(weight=args.weight_type):
        org_weight = weight
        if weight == 0: weight += pseudo_count
        roots.append(node)
        probs.append(weight / weight_sum)
        if org_weight > 0:
            roots_noleaf.append(node)
            probs_noleaf.append(weight / weight_sum_noleaf)

    sample_total = args.walk_count
    first_time = True
    G = node2vec.Graph(graph, True, args.p, args.q, args.weight_type)
    G.preprocess_transition_probs()
    walk_res = []
    while True:
        if first_time:
            first_time = False
            node_list = roots
            prob_list = probs
        else:
            node_list = roots_noleaf
            prob_list = probs_noleaf
        n_sample = min(len(node_list), sample_total)
        if n_sample <= 0:
            break
        sample_total -= n_sample

        sampled_nodes = np.random.choice(node_list, n_sample, replace=False, p=prob_list)
        walks = G.simulate_walks(len(sampled_nodes), args.walk_length, sampled_nodes)
        walk_res.extend(walks)
    return np.asarray(walk_res)


def preprocess(args):
    global_graph_file_path = os.path.join(args.root_path, args.global_graph_path)
    global_graph = gen_global_graph(global_graph_file_path)
    args.num_nodes = global_graph.number_of_nodes()
    for postfix in args.data_postfix:
        read_cas_file = os.path.join(args.root_path,
                                     args.data_dir,
                                     args.rawdata_dir,
                                     args.data_prefix + postfix + '.txt')
        write_cas_file = os.path.join(args.root_path,
                                      args.data_dir,
                                      args.preprocess_dir,
                                      args.data_prefix + postfix + '.npz')
        features, graph_sizes, targets = [], [], []
        with open(read_cas_file, 'r', encoding='utf-8') as rf:
            for line in tqdm(rf):
                cas_graph, graph_size, target = parse_graph(line, global_graph)
                random_matrix = random_walk(cas_graph, args)
                features.append(random_matrix)
                graph_sizes.append(graph_size)
                targets.append(target)
        features = np.stack(features, 0)
        graph_sizes = np.stack(graph_sizes, 0)
        targets = np.stack(targets, 0)
        np.savez(write_cas_file,
                 features=features,
                 graph_sizes=graph_sizes,
                 targets=targets)
