import copy
import numpy as np
import time
import logging
import random
import igraph as ig
import leidenalg as la

import networkx as nx

from semantic_similarity import BGE, TFIDF, sim_model_dot, sim_model_sim_mat


def timeit(func):
    def wrapper(*args, **kwargs):
        start_time = time.time()
        result = func(*args, **kwargs)
        end_time = time.time()
        execution_time = end_time - start_time
        print(f"函数 {func.__name__} 的执行时间为: {execution_time} 秒")
        return result

    return wrapper


class AlignAlgorithm:
    def __init__(self, graph: nx.Graph, classics, annotations):
        self.graph = graph
        self.m = sum(graph.get_edge_data(u, v).get('weight') for u, v in graph.edges() if u != v) / 2
        self.classics = classics
        self.annotations = annotations

    def get_classic_id(self, classic_id: int):
        assert classic_id < self.graph.graph["classic_num"]
        return classic_id

    def get_annotation_id(self, annotation_id: int):
        return self.graph.graph["classic_num"] + annotation_id

    def get_original_id(self, id: int):
        classic_num = self.graph.graph["classic_num"]
        if id < classic_num:
            return id
        else:
            return id - classic_num

    def community_to_id(self, communities):
        assert len(communities) == self.graph.graph["classic_num"], (
            len(communities), len(self.graph.graph["classic_num"]))
        annotation_nodes = {}
        for com_id, com in enumerate(communities):
            for node in com:
                if node != com_id:  # 需要去掉节点自身
                    annotation_nodes[self.get_original_id(node)] = com_id
        return annotation_nodes

    def greedy(self, classic, annotation, sim_mat):
        community_ids = list(range(len(classic)))
        for i in range(len(classic), len(classic) + len(annotation)):
            community_ids.append(np.argmax(sim_mat[i][:len(classic)]))
        return self.position_to_community(community_ids)

    def accumulate_initialize_position(self):
        position = [i for i in range(self.graph.graph["classic_num"])]
        remain_annotation_num = self.graph.graph["annotation_num"]
        remain_classic_num = self.graph.graph["classic_num"]
        current_position = 0
        while remain_annotation_num > 0:
            # 试想如果annotation数量远大于class，那么平均每次移动步数很小，接近于0
            avg_step = min(float(remain_classic_num) / remain_annotation_num, 0)
            velocity = random.randint(0, int(2 * avg_step))
            position.append(current_position)
            current_position += velocity
            remain_annotation_num -= 1
            remain_classic_num -= velocity
        assert len(position) == self.graph.graph["classic_num"] + self.graph.graph["annotation_num"]
        return position

    def dp_pass(self, classic, annotation, sim_mat):
        dp_mat = [[{"value": 0, "path": None} for j in range(len(annotation) + 1)] for i in
                  range(len(classic) + 1)]
        # communities = [{"id": self.get_classic_id(i), "comment": []} for i in range(len(classic))]
        communities = [[] for i in range(len(classic))]
        assert len(classic) == self.graph.graph["classic_num"] and len(annotation) == self.graph.graph["annotation_num"]
        # dp[i][j] = max{dp[i-1][j-1]+sim(i,j), dp[i][j-1]+sim(i,j), dp[i-1][j]}
        for i in range(1, len(classic) + 1):
            for j in range(1, len(annotation) + 1):
                op_1 = dp_mat[i][j - 1]["value"] + sim_mat[self.get_classic_id(i - 1)][self.get_annotation_id(j - 1)]
                # self.graph[self.get_classic_id(i - 1)][self.get_annotation_id(j - 1)]["weight"]  # j与i匹配
                op_2 = dp_mat[i - 1][j]["value"]  # 跳过i
                if op_1 >= op_2:
                    # j属于i
                    dp_mat[i][j]["value"] = op_1
                    dp_mat[i][j]["path"] = (i, j - 1)
                else:
                    # j不属于i
                    dp_mat[i][j]["value"] = op_2
                    dp_mat[i][j]["path"] = (i - 1, j)
        i = len(classic)
        j = len(annotation)
        while i > 0 and j > 0:
            path = dp_mat[i][j]["path"]
            if path[0] == i and path[1] == j - 1:
                # path下标比实际id大1
                communities[self.get_classic_id(i - 1)].append(self.get_annotation_id(j - 1))
            elif path[0] == i - 1 and path[1] == j:
                # 这里不能continue，因为要把path的值给i,j
                pass
            else:
                raise Exception("wrong path")
            i, j = path
        for i in range(len(communities)):
            communities[i].append(i)  # 增加自己作为成员，需要在评价时去除
            communities[i].reverse()
        return communities

    def calculate_community_similarity(self, classics, annotations, communities, sim_model):
        assert len(classics) == len(communities)
        community_text = []
        empty_communities = []
        for i in range(len(classics)):
            if len(communities[i]) > 0:
                community_text.append("。".join([annotations[self.get_original_id(node)] for node in communities[i] if
                                                self.graph.nodes[node]["node_type"] == "annotation"]))
            else:
                community_text.append("")
                empty_communities.append(i)
        sim = sim_model_dot(sim_model, classics, community_text)
        for community_id in empty_communities:
            sim[community_id] = 0
        return sum(sim) / (len(classics) - len(empty_communities))

    @staticmethod
    def community_to_position(communities):
        num_of_nodes = sum([len(c) for c in communities])
        position = [-1 for _ in range(num_of_nodes)]
        for id, community in enumerate(communities):
            for node in community:
                position[node] = id
        assert min(position) >= 0
        return position

    @staticmethod
    def position_to_community(position):
        community_num = max(position) + 1
        communities = [[] for _ in range(community_num)]
        for i, label in enumerate(position):
            communities[label].append(i)
        return communities

    def calculate_modularity(self, graph, m, communities: list = None, position: list = None, classics=None,
                             annotations=None, sim_model=None, panalty=0, use_community_sim=False):
        """

        :param graph:
        :param m:
        :param communities: 形式是 [[node_1, node_2], ...}
        :param position: 形式是 [community_1, ..., community_2, ..., community_n]
        :return:
        """
        modularity = 0
        if position is not None:
            communities = self.position_to_community(position)
        for community in communities:
            # 社区中节点度数之和，相当于计算\sum_Aij, ij属于community
            deg_in = sum(sum(
                graph.get_edge_data(node1, node2).get('weight') for node2 in community if
                graph.has_edge(node1, node2) and node1 != node2) for node1 in community)
            # ki*kj虽然代码中的计算方式与传统模块度公式不完全一致，但在一定程度上通过 deg_sum 和 deg_in 的计算间接体现了传统模块度公式中关于  的关系
            deg_sum = sum(
                sum(graph.get_edge_data(node1, node2).get('weight') for node2 in graph.neighbors(node1) if
                    node1 != node2) for node1 in community)
            # doubao的算法：modularity += 2 * deg_in - deg_sum * deg_sum / (2 * self.m)
            # 下面是chatgpt4o的算法，我觉得下面的是正确的
            modularity += (deg_in / m) - (deg_sum / (2 * m)) ** 2
        assert panalty >= 0, panalty
        if use_community_sim:
            community_sim = self.calculate_community_similarity(classics, annotations, communities, sim_model)
        else:
            community_sim = 0
        return modularity - panalty + community_sim

    def forward(self, sim_mat):
        communities = self.dp_pass(self.classics, self.annotations, sim_mat)
        return communities

    def backward(self, sim_model):
        classic_num = len(self.classics)
        annotation_num = len(self.annotations)
        all_sent = self.classics[::-1] + self.annotations[::-1]
        all_sim = sim_model_sim_mat(sim_model, all_sent)
        communities = self.dp_pass(self.classics[::-1], self.annotations[::-1], all_sim)
        reverse_communities = communities[::-1]
        for community_id, community in enumerate(reverse_communities):
            for i in range(len(community)):
                # 反转得到真正的annotation_id
                if community[i] >= classic_num:
                    community[i] = (classic_num + annotation_num - 1) - (community[i] - classic_num)
                else:
                    community[i] = classic_num - community[i] - 1
                    assert community[i] == community_id
        return reverse_communities


class OldLeiden(AlignAlgorithm):
    def __init__(self, graph: nx.Graph, classics, annotations):
        super().__init__(graph, classics, annotations)
        self.graph = self.convert_graph(graph)

    @staticmethod
    def convert_graph(graph: nx.Graph):
        nodes = list(graph.nodes())
        edges = list(graph.edges(data=True))
        G_ig = ig.Graph()
        G_ig.add_vertices(len(nodes))
        for u, v, data in edges:
            source_index = nodes.index(u)
            target_index = nodes.index(v)
            weight = data['weight']
            G_ig.add_edge(source_index, target_index, weight=weight)
        return G_ig

    def forward(self, sim_mat=None):
        partition = la.find_partition(self.graph, la.ModularityVertexPartition)
        clusters = {}
        for cluster in partition:
            min_node = min(cluster)
            clusters[min_node] = list(cluster)
        communities = [[] for _ in range(len(self.classics))]
        for i in range(len(communities)):
            if i in clusters:
                communities[i] = clusters[i]
        return communities

    def community_to_id(self, communities):
        annotation_nodes = {}
        for com_id, com in enumerate(communities):
            for node in com:
                if node != com_id:  # 需要去掉节点自身
                    annotation_nodes[self.get_original_id(node)] = com_id
        return annotation_nodes

    def get_original_id(self, id: int):
        classic_num = len(self.classics)
        if id < classic_num:
            return id
        else:
            return id - classic_num

class StructuralAwareLeiden(AlignAlgorithm):
    def __init__(self, graph: nx.Graph, classics, annotations, sim_model=None, use_community_sim=False,
                 use_bidirection=False, use_dp=True):
        super().__init__(graph, classics, annotations)
        self.use_community_sim = use_community_sim
        self.use_bidirection = use_bidirection
        self.use_dp = use_dp
        self.sim_model = sim_model
        self.communities = []

    def forward(self, sim_mat):
        # 使用动态规划方法初始化社区
        # 计算当前的模块度
        if self.use_dp:
            self.communities = super().forward(sim_mat)
            communities = copy.deepcopy(self.communities)
            if self.use_bidirection:
                backward_communities = super().backward(self.sim_model)
        else:
            position = self.accumulate_initialize_position()
            self.communities = self.position_to_community(position)
            communities = copy.deepcopy(self.communities)

        def optimize(communities):
            current_modularity = self.calculate_modularity(self.graph, self.m, communities=communities,
                                                           classics=self.classics, annotations=self.annotations,
                                                           sim_model=self.sim_model,
                                                           use_community_sim=self.use_community_sim)
            while True:
                # 尝试使用leiden算法找到模块度更高的社区划分
                new_communities = self.single_pass(communities)
                new_modularity = self.calculate_modularity(self.graph, self.m, communities=new_communities,
                                                           classics=self.classics, annotations=self.annotations,
                                                           sim_model=self.sim_model,
                                                           use_community_sim=self.use_community_sim)
                if new_modularity > current_modularity:
                    communities = new_communities
                    current_modularity = new_modularity
                else:
                    break

            return communities, current_modularity

        original_communities = copy.deepcopy(communities)
        communities, modularity = optimize(communities)
        if self.use_bidirection and self.use_dp:
            backward_communities, backward_modularity = optimize(backward_communities)
        if not self.use_bidirection or modularity > backward_modularity:
            return communities, original_communities
        else:
            assert self.use_dp
            return backward_communities

    def modularity_gain(self, communities, node, current_community_id, target_community_id, current_modularity):
        new_communities = copy.deepcopy(communities)
        new_communities[current_community_id].remove(node)
        new_communities[target_community_id].append(node)
        new_modularity = self.calculate_modularity(self.graph, self.m, communities=new_communities,
                                                   classics=self.classics, annotations=self.annotations,
                                                   sim_model=self.sim_model, use_community_sim=self.use_community_sim)
        return new_modularity - current_modularity, new_communities

    def single_pass(self, communities):
        """
        这里使用传入的参数communities是因为可能后面会扩展到使用种子集进行优化，而不仅是贪心选择
        :param communities:
        :return:
        """
        improvements = True
        while improvements:
            improvements = False
            # 对所有社区迭代
            current_modularity = self.calculate_modularity(self.graph, self.m, communities=communities,
                                                           classics=self.classics, annotations=self.annotations,
                                                           sim_model=self.sim_model,
                                                           use_community_sim=self.use_community_sim)
            best_gain = 0
            best_communities = None
            for community_idx, community in enumerate(communities):
                # 原始leiden算法对社区内所有节点进行迭代，尝试将node换到相邻社区，并计算模块度是否有增加
                assert community_idx == min(community), (community_idx, community)
                # 在本任务中，只可以尝试移动边界处节点，也就是社区中非classic的最小或最大节点
                nodes = [node for node in community if self.graph.nodes[node]["node_type"] == "annotation"]
                if len(nodes) == 0:
                    continue
                # 找到增加最大的模块度和对应的移动目标社区
                # todo: 这里相邻社区是确定的，但是应该允许错误尝试，比如一次移动连续片段
                if community_idx > 0:
                    head_node = min(nodes)
                    # 如果不是最前面的社区（因为第一个社区无法再向前移动）
                    head_neighbor_community_idx = community_idx - 1
                    # 只尝试移动到非本社区的相邻节点所在社区
                    # 查看模块度是否有增加
                    gain, new_communities = self.modularity_gain(communities, head_node, community_idx,
                                                                 head_neighbor_community_idx,
                                                                 current_modularity)
                    if gain > best_gain:  # 模块度应该有所增加
                        best_gain = gain
                        best_communities = new_communities
                if community_idx < len(communities) - 1:
                    tail_node = max(nodes)
                    tail_neighbor_community_idx = community_idx + 1
                    gain, new_communities = self.modularity_gain(communities, tail_node, community_idx,
                                                                 tail_neighbor_community_idx,
                                                                 current_modularity)
                    if gain > best_gain:
                        best_gain = gain
                        best_communities = new_communities
                # 如果最佳移动方案的模块度增大值为正那么贪心地移动
                # todo: 这个地方可以优化
                # 按照leiden算法的原理，并不是一定将node移动到模块度增加最大的社区，而是按照概率随机移动到另一个社区
            if best_gain > 0:
                communities = best_communities
                improvements = True
        return communities


class ParticleSwarmOptimization(AlignAlgorithm):
    def __init__(self, graph, classics, annotations, num_particles, num_iterations, sim_model, use_community_sim):
        super().__init__(graph, classics=classics, annotations=annotations)
        self.sim_model = sim_model
        self.num_particles = num_particles
        self.dimension = len(self.graph.nodes)
        self.velocity_bound = 5
        self.use_community_sim = use_community_sim
        # todo: bounds的问题还没解决
        self.num_iterations = num_iterations
        self.particles = []
        self.global_best_position = None
        self.global_best_value = float('inf')
        self.c1 = 1.5
        self.c2 = 1.5

    def random_initialize_position(self):
        """
        将classic句子号随机分给注释
        :return:
        """
        position = [i for i in range(self.graph.graph["classic_num"])] + random.choices(
            list(range(self.graph.graph["classic_num"])), k=self.graph.graph["annotation_num"])
        return position

    def greedy_initialize_position(self):
        """
        查找classic中与annotation相似度最高的作标签
        :return:
        """
        position = [i for i in range(self.graph.graph["classic_num"])]
        for i in range(self.graph.graph["annotation_num"]):
            node1 = self.get_annotation_id(i)
            max_weight = -1
            max_community = -1
            for node2 in range(self.graph.graph["classic_num"]):
                if self.graph.has_edge(node1, node2):
                    weight = self.graph.get_edge_data(node1, node2).get('weight')
                    if weight > max_weight:
                        max_weight = weight
                        max_community = node2
            assert max_community >= 0
            position.append(max_community)
        assert len(position) == self.dimension
        return position

    def initialize_particles(self, sim_mat, initial_method=None):
        dp_communities = super().forward(sim_mat)
        self.global_best_position = self.community_to_position(dp_communities)
        assert len(self.global_best_position) == self.dimension
        self.global_best_value = self.calculate_modularity(self.graph, self.m, communities=dp_communities,
                                                           classics=self.classics, annotations=self.annotations,
                                                           sim_model=self.sim_model,
                                                           use_community_sim=self.use_community_sim)
        particle_velocity = ([0 for _ in range(self.graph.graph["classic_num"])] +
                             [random.uniform(-self.velocity_bound, self.velocity_bound) for _ in
                              range(self.graph.graph["classic_num"], self.dimension)])
        self.particles.append({'position': copy.deepcopy(self.global_best_position), 'velocity': particle_velocity,
                               'pbest_position': copy.deepcopy(self.global_best_position),
                               'pbest_value': self.global_best_value})
        for _ in range(self.num_particles - 1):
            initialize_method_seed = -1
            if initial_method == "all":
                initialize_method_seed = random.randint(0, 3)
            if initial_method == "random" or initialize_method_seed == 0:
                particle_position = self.random_initialize_position()
            elif initial_method == "greedy" or initialize_method_seed == 1:
                particle_position = self.greedy_initialize_position()
            elif initial_method == "accumulate" or initialize_method_seed == 2:
                particle_position = self.accumulate_initialize_position()
            elif initial_method == "dp" or initialize_method_seed == 3:
                particle_position = copy.deepcopy(self.global_best_position)
            assert len(particle_position) == len(self.global_best_position)
            particle_velocity = ([0 for _ in range(self.graph.graph["classic_num"])] +
                                 [random.uniform(-self.velocity_bound, self.velocity_bound) for _ in
                                  range(self.graph.graph["classic_num"], self.dimension)])
            assert len(particle_velocity) == self.dimension
            particle_pbest_position = copy.deepcopy(particle_position)
            particle_pbest_value = self.calculate_modularity(self.graph, self.m, position=particle_position,
                                                             classics=self.classics, annotations=self.annotations,
                                                             sim_model=self.sim_model,
                                                             use_community_sim=self.use_community_sim,
                                                             panalty=self.calculate_panalty(particle_position))
            self.particles.append({'position': particle_position, 'velocity': particle_velocity,
                                   'pbest_position': particle_pbest_position, 'pbest_value': particle_pbest_value})

    def update_global_best(self):
        """
        查找具有最大模块度的粒子
        :return:
        """
        for particle in self.particles:
            if particle['pbest_value'] > self.global_best_value:
                self.global_best_value = particle['pbest_value']
                self.global_best_position = copy.deepcopy(particle['pbest_position'])

    def update_particle(self, particle):
        w = 0.5  # 惯性权重
        r1 = random.random()
        r2 = random.random()
        for i in range(self.graph.graph["classic_num"], self.dimension):
            particle['velocity'][i] = w * particle['velocity'][i] + self.c1 * r1 * (
                    particle['pbest_position'][i] - particle['position'][i]) + self.c2 * r2 * (
                                              self.global_best_position[i] - particle['position'][i])
            particle['position'][i] += int(particle['velocity'][i])
            # 确保粒子位置在边界内
            # 边界可以尝试使用动态边界
            particle['position'][i] = max(0, min(particle['position'][i], self.graph.graph["classic_num"] - 1))
        particle_value = self.calculate_modularity(self.graph, self.m, position=particle['position'],
                                                   classics=self.classics, annotations=self.annotations,
                                                   sim_model=self.sim_model, use_community_sim=self.use_community_sim,
                                                   panalty=self.calculate_panalty(particle["position"]))
        if particle_value > particle['pbest_value']:
            particle['pbest_value'] = particle_value
            particle['pbest_position'] = copy.deepcopy(particle['position'])

    def calculate_panalty(self, position):
        # 加入
        panalty = 0
        for i in range(self.graph.graph["classic_num"], len(position)):
            for j in range(i + 1, len(position)):
                if 0 <= position[j] < position[i]:
                    panalty += position[i] - position[j]
        return panalty

    def forward(self, sim_mat, initial_method=None):
        self.initialize_particles(sim_mat, initial_method)
        for _ in range(self.num_iterations):
            self.update_global_best()
            for particle in self.particles:
                self.update_particle(particle)
        return self.position_to_community(self.global_best_position)


def build_graph_from_doc(classic: list, annotation: list, gamma: float, sim_model: dict):
    classic_num = len(classic)
    annotation_num = len(annotation)
    G = nx.Graph(classic_num=classic_num, annotation_num=annotation_num)
    for sent_id, classic_sent in enumerate(classic):
        G.add_node(sent_id, text=classic_sent, node_type="classic")
    for sent_id, annotation_sent in enumerate(annotation):
        G.add_node(sent_id + classic_num, text=annotation_sent, node_type="annotation")
    # 算出每对句子的相似度
    all_sent = classic + annotation
    sim_mat = sim_model_sim_mat(sim_model, all_sent)
    # 设置相邻的注释节点之间存在边，并且边的权重设置为相似度
    # todo: 这里有多种可能方法，是否使用阈值，权重是否设置为相似度
    for i in range(classic_num, G.number_of_nodes() - 1):
        # G.add_edge(i, i + 1, weight=1)
        G.add_edge(i, i + 1, weight=sim_mat[i, i + 1])
    # todo: 这里有多种可能，经典和注释之间是否都连边？注释之间是否使用阈值，经典和注释之间的阈值是否和注释之间一样？
    # annotation之间除相邻情况外都连语义边；classic只和annotation连语义边
    # 注释之间计算相似度作为边
    # 经典和注释之间计算相似度作为边，经典之间不连语义边
    for i in range(classic_num, G.number_of_nodes()):
        assert G.nodes[i]["node_type"] == "annotation"
        for j in range(G.number_of_nodes()):
            if i + 1 == j and G.nodes[i]["node_type"] == G.nodes[j]["node_type"] or i == j:
                # 相邻annotation句子天然具有边，因此不连语义边
                # 不连自己
                continue
            # semantic_weight = bge_similarity(G.nodes[i]["text"], G.nodes[j]["text"])
            semantic_weight = sim_mat[i][j]
            # print("semantic_weight", semantic_weight, type(semantic_weight))
            if G.nodes[j]["node_type"] == "classic":
                G.add_edge(i, j, weight=semantic_weight * gamma)
            else:
                G.add_edge(i, j, weight=semantic_weight)
    # print(G.graph.keys())
    return G, sim_mat


if __name__ == "__main__":
    pass
