# coding=utf-8
import json
import os
import subprocess
import platform
import networkx as nx
import numpy as np
import pandas as pd
from flask import jsonify
from flask import request
from flask_login import login_required
from flask_socketio import emit
from sklearn.decomposition import PCA

from app import socket
from . import networkapi
from .network import Network
from .task import Task
from app import app

sysstr = platform.system()


def server_response(data=None, code=0, msg=''):
    result = {
        'code': code,
        'msg': msg,
        'data': data
    }
    return jsonify(result)


@networkapi.route('/', methods=['POST', 'GET'])
@login_required
def networkapi_num_changed():
    # return render_template('network.py')
    if request.method == 'POST':
        a = request.get_data()
        listData = json.loads(a.decode('utf-8'))
        graph = Network(listData['data'])
        func = listData['api']
        if func == 19:
            tmp = graph.user_changed()
            user_changed = tmp[0]
            sub_graphs = tmp[1]
            sub_graphs_list = []
            for i in sub_graphs:
                sub_graphs_list.append(i.tolist())
            return jsonify({"user_changed": user_changed, "sub_graphs": sub_graphs_list})
        elif func == 1:
            tmp = graph.num_changed(listData['para'])
            return jsonify(tmp)
        # 下面九个网络中心性指标
        elif func == 2:
            tmp = graph.getDegreeCentrality()
            return jsonify(tmp)
        elif func == 3:
            tmp = graph.getClosenessCentrality()
            return jsonify(tmp)
        elif func == 4:
            tmp = graph.getBetweennessCentrality()
            return jsonify(tmp)
        elif func == 5:
            tmp = graph.getEBetweennessCentrality()
            ntmp = []
            for i in tmp:
                ntmp.append([i[0], i[1], tmp[i]])
            return jsonify(ntmp)
        elif func == 6:
            tmp = graph.getCurrentFlowClosenessCentrality()
            return jsonify(tmp)
        elif func == 7:
            tmp = graph.getCurrentFlowBetweennessCentrality()
            return jsonify(tmp)
        elif func == 8:
            tmp = graph.getEigenvectorCentrality()
            return jsonify(tmp)
        elif func == 9:
            tmp = graph.getEigenvectorCentralityNumpy()
            return jsonify(tmp)
        elif func == 10:
            tmp = graph.getLoadCentrality()
            return jsonify(tmp)
        # 下面四个链接预测
        elif func == 11:
            tmp = graph.resourceAllocationIndex()
            return jsonify(tmp)
        elif func == 12:
            tmp = graph.jaccardCoefficient()
            return jsonify(tmp)
        elif func == 13:
            tmp = graph.adamicAdarIndex()
            return jsonify(tmp)
        elif func == 14:
            tmp = graph.preferentialAttachment()
            return jsonify(tmp)
        # 下面三个社团检测
        elif func == 15:
            tmp = graph.asynFluidc(listData['para'])
            return jsonify(tmp)
        elif func == 16:
            tmp = graph.asynLpaCommunities()
            return jsonify(tmp)
        elif func == 17:
            tmp = graph.labelPropagation()
            return jsonify(tmp)
        elif func == 18:
            tmp = graph.user_changed_sensertive()
            user_changed = tmp[0]
            sub_graphs = tmp[1]
            sub_graphs_list = []
            for i in sub_graphs:
                sub_graphs_list.append(i.tolist())
            return jsonify({"user_changed": user_changed, "sub_graphs": sub_graphs_list})
    else:
        # GET 方法
        data = request.args.get('data')
        listData = json.loads(data)
        func = listData['api']
        graph = Network(listData['data'])
        if func == 19:
            tmp = graph.user_changed()
            user_changed = tmp[0]
            sub_graphs = tmp[1]
            sub_graphs_list = []
            for i in sub_graphs:
                sub_graphs_list.append(i.tolist())
            return jsonify({"user_changed": user_changed, "sub_graphs": sub_graphs_list})
        elif func == 1:
            tmp = graph.num_changed(listData['para'])
            return jsonify(tmp)
        # 下面九个网络中心性指标
        elif func == 2:
            tmp = graph.getDegreeCentrality()
            return jsonify(tmp)
        elif func == 3:
            tmp = graph.getClosenessCentrality()
            return jsonify(tmp)
        elif func == 4:
            tmp = graph.getBetweennessCentrality()
            return jsonify(tmp)
        elif func == 5:
            tmp = graph.getEBetweennessCentrality()
            ntmp = []
            for i in tmp:
                ntmp.append([i[0], i[1], tmp[i]])
            return jsonify(ntmp)
        elif func == 6:
            tmp = graph.getCurrentFlowClosenessCentrality()
            return jsonify(tmp)
        elif func == 7:
            tmp = graph.getCurrentFlowBetweennessCentrality()
            return jsonify(tmp)
        elif func == 8:
            tmp = graph.getEigenvectorCentrality()
            return jsonify(tmp)
        elif func == 9:
            tmp = graph.getEigenvectorCentralityNumpy()
            return jsonify(tmp)
        elif func == 10:
            tmp = graph.getLoadCentrality()
            return jsonify(tmp)
        # 下面四个链接预测
        elif func == 11:
            tmp = graph.resourceAllocationIndex()
            return jsonify(tmp)
        elif func == 12:
            tmp = graph.jaccardCoefficient()
            return jsonify(tmp)
        elif func == 13:
            tmp = graph.adamicAdarIndex()
            return jsonify(tmp)
        elif func == 14:
            tmp = graph.preferentialAttachment()
            return jsonify(tmp)
        # 下面三个社团检测
        elif func == 15:
            tmp = graph.asynFluidc(listData['para'])
            return jsonify(tmp)
        elif func == 16:
            tmp = graph.asynLpaCommunities()
            return jsonify(tmp)
        elif func == 17:
            tmp = graph.labelPropagation()
            return jsonify(tmp)
        elif func == 18:
            tmp = graph.user_changed_sensertive()
            user_changed = tmp[0]
            sub_graphs = tmp[1]
            sub_graphs_list = []
            for i in sub_graphs:
                sub_graphs_list.append(i.tolist())
            return jsonify({"user_changed": user_changed, "sub_graphs": sub_graphs_list})


@networkapi.route('/dataset', methods=['GET'])
@login_required
def choose_dataset():
    dataset = request.args.get('dataset')
    if dataset == 'dblp' or dataset == 'review-graph' or dataset == 'upaper':
        graph = nx.read_edgelist('app/static/data/complex/{}.edge'.format(dataset), nodetype=str)
    else:
        graph = nx.read_edgelist('app/static/data/complex/{}.edge'.format(dataset), nodetype=int)
    edges = nx.edges(graph)
    nodes = nx.nodes(graph)
    label = pd.read_csv('app/static/data/complex/{}.lbl'.format(dataset), sep=' ', header=None)
    label = label.sort_values([0]).values
    data = {
        'nodes': [],
        'edges': [],
        'label': label.tolist(),
        'classes': int(label[:, 1].max() + 1)
    }
    for node in nodes:
        data['nodes'].append({
            'name': node
        })
    for edge in edges:
        data['edges'].append({
            'source': edge[0],
            'target': edge[1]
        })
    return jsonify(data)


@networkapi.route('/get_attribute', methods=['GET'])
@login_required
def get_attribute():
    dataset = request.args.get('dataset')
    data = {
        'dataset': dataset,
        'nodetypes': [],
        'nodenums': []
    }
    if dataset == 'dblp1':
        data['nodetypes'].append({'name': 'author'})
        data['nodetypes'].append({'name': 'paper'})
        data['nodetypes'].append({'name': 'conference'})
        data['nodenums'].append({'number': '160713'})
        data['nodenums'].append({'number': '111409'})
        data['nodenums'].append({'number': '150'})
    elif dataset == 'review-graph1':
        data['nodetypes'].append({'name': 'user'})
        data['nodetypes'].append({'name': 'movie'})
        data['nodenums'].append({'number': '18340'})
        data['nodenums'].append({'number': '56361'})
    elif dataset == 'upaper1':
        data['nodetypes'].append({'name': 'paper'})
        data['nodetypes'].append({'name': 'tutor'})
        data['nodetypes'].append({'name': 'academy'})
        data['nodenums'].append({'number': '2076'})
        data['nodenums'].append({'number': '1080'})
        data['nodenums'].append({'number': '184'})
    print(data)
    return jsonify(data)


@networkapi.route('/statistic_info', methods=['GET'])
@login_required
def statistic_info():
    result = {
        "code": 0,
        'count': 0,
        "msg": '',
        'data': []
    }
    data_path = 'app/static/data/complex'
    for filename in os.listdir(data_path):
        if '.edge' in filename:
            graph = nx.read_edgelist(os.path.join(data_path, filename), nodetype=int)
            dataset = filename.replace('.edge', '')
            data = {
                'name': dataset,
                'nodes': graph.number_of_nodes(),
                'edges': graph.number_of_edges(),
                'density': nx.density(graph),
                'degree_max': max(dict(nx.degree(graph)).values()),
                'degree_min': min(dict(nx.degree(graph)).values()),
                'degree_avg': float(sum(dict(nx.degree(graph)).values())) / graph.number_of_nodes(),
                'clustering': nx.average_clustering(graph),
            }
            result['data'].append(data)
            result['count'] += 1
    return jsonify(result)


@networkapi.route('/upload', methods=['POST'])
@login_required
def upload():
    data_dir = 'app/static/data/complex'
    file = request.files['file']
    if '.edge' not in file.filename and '.lbl' not in file.filename:
        return server_response(msg='文件扩展名不正确', code=1)
    filepath = os.path.join(app.config['UPLOAD_FOLDER'], file.filename)
    file.save(filepath)
    df = pd.read_csv(filepath, sep=' ', header=None)
    if df[0].min() != 0 or (df[1].min() != 0 and 'lbl' in file.filename):
        return server_response(msg='数据格式不正确', code=1)
    if os.path.exists(os.path.join(data_dir, file.filename)):
        return server_response(msg='数据{}已存在'.format(file.filename), code=1)
    df.to_csv(os.path.join(data_dir, file.filename), index=False, header=False, sep=' ')
    return server_response(msg='上传成功')


@networkapi.route('/args', methods=['GET'])
@login_required
def role_args():
    args = {}
    method = request.args.get('method')
    if method == 'refex':
        args['lpmethod'] = 'Hadamard'
        args['recursive-iterations'] = 3
        args['aggregator'] = 'simple'
        args['bins'] = 4
        args['pruning-cutoff'] = float(0.5)
    elif method == 'deepwalk':
        args['number-walks'] = 10
        args['walk-length'] = 80
        args['representation-size'] = 128
        args['window-size'] = 10
    elif method == 'node2vec':
        args['number-walks'] = 10
        args['walk-length'] = 80
        args['representation-size'] = 128
        args['window-size'] = 10
        args['p'] = 1.0
        args['q'] = 1.0
    elif method == 'nfgae':
        args['struct'] = '-1,64,64'
        args['num_hist'] = 20
        args['epoch'] = 50
        args['batch-size'] = 32
        args['learning-rate'] = 0.001
        args['gamma'] = 1
        args['weight-decay'] = 0.001
    elif method == 'GraphWave':
        pass
    return jsonify(args)


@networkapi.route('/role', methods=['POST'])
@login_required
def role_embedding():
    data = request.get_data()
    data = json.loads(data.decode('utf-8'))
    edge = np.array(data['data'])[:, :2]
    task_id = data['task_id']
    pd.DataFrame(edge).to_csv('./temp/dataset/{}.edge'.format(task_id), sep=' ', index=False,
                              header=False)
    return jsonify({'result': 'success'})


@socket.on('status', namespace='/status')
def get_embedding(msg):
    method = msg['method']
    args = msg['args']
    task_id = msg['task_id']
    if method == 'refex':
        command = "cd app/networkapi/RolX && python3 -u main.py --dataset {} ".format(task_id)
    elif method == 'deepwalk':
        command = "python3 -u -m openne --dataset {} --method {} ".format(task_id, method)
    elif method == 'node2vec':
        command = "python3 -u -m openne --dataset {} --method {} ".format(task_id, method)
    elif method == 'nfgae':
        command = "cd app/networkapi/NFGAE && python3 -u main.py --dataset {} --model Test3 --device -1 ".format(
            task_id)
    elif method == 'GraphWave':
        command = "cd app/networkapi/graphwave && python3 main.py --dataset {} ".format(task_id)
    for key in args.keys():
        command += "--{}={} ".format(key, args[key])
    print(command)

    p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
    emit('embed_log', 'Run embedding')
    while p.poll() is None:
        if sysstr == 'Windows':
            line = p.stdout.readline().decode('gbk')
        else:
            line = p.stdout.readline().decode('utf-8')
        if line:
            emit('embed_log', line)
    emit('embed_log', 'Embedding done!!')

    if 'label' in msg.keys():
        # node classification
        emit('embed_log', 'Run classification')
        label = np.array(msg['label'])
        embed = pd.read_csv('temp/embed/{}.emb'.format(task_id))
        embed = embed.drop(['id'], axis=1).values
        task = Task('clf')
        data = []
        for i in np.round(np.linspace(0.1, 0.9, 9), decimals=1):
            result = task.classfication(embed, label, i, loop=20)
            data.append([result['split_ratio'], result['f1-micro'], result['f1-macro']])
            emit('embed_log', json.dumps(result))
        emit('classification', {
            'f1-micro': [[each[0], each[1]] for each in data],
            'f1-macro': [[each[0], each[2]] for each in data]
        })
        emit('embed_log', 'Classification done!!')

        # PCA visualization
        emit('embed_log', 'Run PCA')
        pca = PCA(n_components=3)
        emb = pca.fit_transform(embed)
        emit('pca', emb[:, :2].tolist())
        emit('embed_log', 'PCA done!!')

        # K-means clustering
        emit('embed_log', 'Run K-Means')
        classes = len(np.unique(label[:, 1]))
        kmeans_label = task.kmeans(embed, classes)
        emit('k-means', kmeans_label.tolist())
        emit('embed_log', 'K-Means done!!')
