import os
from json import load as json_load
from json import dump as json_dump
from math import exp
from torch import chunk
import numpy as np
from pathlib import Path
from sklearn.neighbors import KDTree
from mine_rule import rule_mining
from completion import Graph_Completion

class excavatePossibleTriple(object):
    def __init__(self, data_path, dataset, model_path, gamma, embedding_model, amie_config):
        self.dataset = dataset
        self.data_path = data_path
        self.model_load_path = model_path
        self.gamma = gamma
        self.embed_model = embedding_model
        self.amie_config = amie_config

    def reasoning(self, topk_per_epoch, pool_topk, k_kdt, use_embedding):
        if use_embedding:
            embed_model = excavatePossibleTripleByEmbedding(self.model_load_path, self.data_path, self.dataset, self.gamma, self.embed_model)
            embed_possibleTriples, embed_probabilities = embed_model.reasoning(pool_topk, k_kdt)
            while embed_probabilities:
                yield embed_possibleTriples[:topk_per_epoch], embed_probabilities[:topk_per_epoch], len(
                    embed_possibleTriples) - topk_per_epoch \
                    if len(embed_possibleTriples) >= topk_per_epoch else 0
                embed_possibleTriples = embed_possibleTriples[topk_per_epoch:]
                embed_probabilities = embed_probabilities[topk_per_epoch:]
        else:
            rule_model = excavatePossibleTripleByRule(self.amie_config)
            rule_possibleTriples, rule_probabilities = rule_model.reasoning()
            while rule_probabilities:
                yield rule_possibleTriples[:topk_per_epoch], rule_probabilities[:topk_per_epoch], len(
                    rule_possibleTriples) - topk_per_epoch \
                    if len(rule_possibleTriples) >= topk_per_epoch else 0
                rule_possibleTriples = rule_possibleTriples[topk_per_epoch:]
                rule_probabilities = rule_probabilities[topk_per_epoch:]
            # possibleTriples, probabilities = ['a\tb\tc', 'w\te\tr', 't\ty\tu'], [0.8, 0.6, 0.4]

        print('结束')
        yield [], [], []


class excavatePossibleTripleByRule(object):
    def __init__(self, config):
        self.amie_config = config

    def reasoning(self):
        mine_success = rule_mining(self.amie_config.data_dir, self.amie_config.result_dir)
        if not mine_success:
            print('未挖掘出知识...')
            return [], []
        data_path = Path(self.amie_config.data_dir)
        result_directory = self.amie_config.result_dir
        if not os.path.join(result_directory):
            os.makedirs(result_directory)

        GC = Graph_Completion(data_path)
        triples, probabilities = GC.completion(data_path, Path(result_directory))
        return triples, probabilities


class excavatePossibleTripleByEmbedding(object):

    def __init__(self, model_load_path, data_path, dataset, gamma, embed_model):
        self.dataset = dataset
        self.embed_model = embed_model

        self.entity_embedding = np.load(os.path.join(model_load_path, "entity_embedding.npy"))
        self.relation_embedding = np.load(os.path.join(model_load_path, "relation_embedding.npy"))
        self.hidden_dim = self.relation_embedding.shape[1]
        self.relation_range = (gamma + 2.0) / self.hidden_dim

        fin = open(os.path.join(data_path, "info.json"), "r")
        info = json_load(fin)
        self.entity_num = info["n_entity"]
        self.relation_num = info["n_relation"]

        self.result_path = os.path.join("..", "result", dataset)
        self.record_path = os.path.join("..", "record", dataset)

        fin = open(os.path.join(data_path, "ent_id2text.dict"), "r", encoding="utf-8")
        self.id_entity_dict = json_load(fin)

        fin = open(os.path.join(data_path, "rel_id2text.dict"), "r", encoding="utf-8")
        self.id_relation_dict = json_load(fin)

        if not os.path.exists(os.path.join("..", "record", dataset, "connected.json")):
            adjacent = np.zeros([self.entity_num, self.entity_num], dtype=np.bool)
            triples = np.loadtxt(os.path.join(data_path, "train_id.txt"), dtype=np.int)

            self.connected = {}
            for triple in triples:
                try:
                    self.connected[str(triple[0])].append(int(triple[2]))
                except:
                    self.connected[str(triple[0])] = [int(triple[2])]
                adjacent[triple[0], triple[2]] = True

            self.unconnected = {}
            for i in range(self.entity_num):
                self.unconnected[i] = [idx for idx, value in enumerate(adjacent[i]) if value == False]

            with open(os.path.join("..", "record", dataset, "Unconnected.json"), "w", encoding="utf-8") as fout:
                json_dump(self.unconnected, fout)

            with open(os.path.join("..", "record", dataset, "connected.json"), "w", encoding="utf-8") as fout:
                json_dump(self.connected, fout)

        else:
            fin = open(os.path.join("..", "record", dataset, "Unconnected.json"), "r")
            self.unconnected = json_load(fin)
            fin.close()

            fin = open(os.path.join("..", "record", dataset, "connected.json"), "r")
            self.connected = json_load(fin)
            fin.close()

    def reasoning(self, pool_topk, k=5):
        self.introduced_triples = set()
        if os.path.exists(os.path.join(self.result_path, "introduced_triples.txt")):
            with open(os.path.join(self.result_path, "introduced_triples.txt"), "r", encoding="utf-8") as fin:
                for triple in fin:
                    self.introduced_triples.add(triple.strip())

        triple_distance = {}
        for entity in self.unconnected:
            head_embedding = self.entity_embedding[int(entity)]
            unconnected = self.unconnected[entity]
            unconnected_entity_embedding = self.entity_embedding[unconnected]
            self.kdtree = KDTree(unconnected_entity_embedding, metric="euclidean", leaf_size=(len(unconnected) // 2))

            for relation_id in range(self.relation_num):
                relation_embedding = self.relation_embedding[relation_id]
                if self.embed_model == "RotatE":
                    tail = self.RotatE(head_embedding, relation_embedding)
                elif self.embed_model == "TransE":
                    tail = self.TransE(head_embedding, relation_embedding)
                elif self.embed_model == "ComplEx":
                    tail = self.ComplEx(head_embedding, relation_embedding)
                dists, ids = self.kdtree.query(tail.reshape(1, -1), k=k)
                dists_ids = [(dist, idx) for dist, idx in zip(dists[0], ids[0])]

                for i, (dist, idx) in enumerate(dists_ids):
                    tail = unconnected[idx]
                    triple_str = "\t".join((str(entity), str(relation_id), str(tail)))
                    if triple_str in self.introduced_triples: continue
                    triple_distance[(int(entity), relation_id, int(tail))] = dist
                triple_distance_sorted = sorted(triple_distance, key=lambda x: triple_distance[x])[:pool_topk]
                temp_triple_distance = {}
                for triple in triple_distance_sorted:
                    temp_triple_distance[triple] = triple_distance[triple]
                triple_distance = temp_triple_distance.copy()

        triple_probabilities = {}
        for triple in triple_distance:
            head_embedding = self.entity_embedding[triple[0]]
            relation_embedding = self.relation_embedding[triple[1]]
            tail_embedding = self.entity_embedding[triple[2]]

            probability = 1/(np.sqrt(2*np.pi)) * np.exp(-np.sum(np.power(tail_embedding - head_embedding - relation_embedding, 2)) / 2)
            triple_probabilities[triple] = probability


        triple_probability_sorted = sorted(triple_probabilities, key=lambda x: triple_probabilities[x], reverse=True)
        temp_triples = []
        for triple in triple_probability_sorted:
            head, relation, tail = triple
            temp_triples.append([str(head), str(relation), str(tail)])

        if not os.path.exists(self.result_path):
            os.makedirs(self.result_path)
        with open(os.path.join(self.result_path, "introduced_triples.txt"), "a+", encoding="utf-8") as fout:
            for triple in temp_triples:
                fout.write("\t".join(triple) + "\n")

        triples = []
        for triple in temp_triples:
            head, relation, tail = triple
            triples.append([str(self.id_entity_dict[str(head)]), str(self.id_relation_dict[str(relation)]),
                            str(self.id_entity_dict[str(tail)])])

        triples = ["\t".join(triple) for triple in triples]
        # triples = ["\t".join(str(elem) for elem in triple) for triple in triple_probability_sorted]
        probabilities = [triple_probabilities[key] for key in triple_probability_sorted]

        return triples, probabilities

    def RotatE(self, head_embedding, relation_embedding):
        re_head, im_head = head_embedding[:self.hidden_dim], head_embedding[self.hidden_dim:]
        relation_phase = relation_embedding / (self.relation_range / np.pi)
        re_relation, im_relation = np.cos(relation_phase), np.sin(relation_phase)

        re_tail = re_head * re_relation + im_head * im_relation
        im_tail = re_head * im_relation - im_head * re_relation

        tail = np.concatenate([re_tail, im_tail])
        return tail

    def TransE(self, head_embedding, relation_embedding):
        return head_embedding + relation_embedding

    def ComplEx(self, head, relation):
        re_head, im_head = chunk(head, 2, dim=2)
        re_relation, im_relation = chunk(relation, 2, dim=2)

        re_score = re_head * re_relation - im_head * im_relation
        im_score = re_head * im_relation + im_head * re_relation
        return re_score, im_score

    def softmax(self, lst):
        expsum = sum([exp(elem) for elem in lst])
        return [exp(elem) / expsum for elem in lst]


def estimateDistance(dataset, n_entity, model_load_path, hidden_dim, gamma, model):
    relation_range = (gamma + 2.0) / hidden_dim
    entity_embeddings = np.load(os.path.join(model_load_path, "entity_embedding.npy"))
    relation_embeddings = np.load(os.path.join(model_load_path, "relation_embedding.npy"))
    triples = np.loadtxt(os.path.join("..", "data", dataset, str(n_entity), "tiny_train_id.txt"), dtype=np.int)
    dists = []
    for triple in triples:
        head, relation, tail = triple
        head_embedding, relation_embedding, tail_embedding = entity_embeddings[head - 1], relation_embeddings[relation - 1], entity_embeddings[tail - 1]
        if model == "RotatE":
            re_head, im_head = head_embedding[:hidden_dim], head_embedding[hidden_dim:]
            # re_tail, im_tail = tail_embedding[:self.hidden_dim], tail_embedding[self.hidden_dim:]
            relation_phase = relation_embedding / (relation_range / np.pi)
            re_relation, im_relation = np.cos(relation_phase), np.sin(relation_phase)
            re_score = re_head * re_relation + im_head * im_relation
            im_score = re_head * im_relation - im_head * re_relation
            score = np.concatenate([re_score, im_score])
            score -= tail_embedding
            dists.append(np.sqrt(sum([np.power(s, 2) for s in score])))
        elif model == "TransE":
            score = head_embedding + relation_embedding - tail_embedding
            dists.append(np.sqrt(sum([np.power(s, 2) for s in score])))
    mean = np.mean(dists)
    std = np.std(dists)
    print("mean: ", mean)
    print("std: ", std)
    return dists


def caldist(embed1, embed2):
    embed = embed1 - embed2
    return np.sqrt(sum([np.power(s, 2) for s in embed]))


if __name__ == "__main__":
    dataSet = "FB15k"
    topk_per_epoch = 10
    data_path = os.path.join("..", "data", dataSet)
    gamma = 24.0
    use_delta = True
    model_path = os.path.join("..", "model", dataSet) if not use_delta else os.path.join("..", "model", dataSet, "delta")
    model_name = "TransE"
    pool_topk, k_kdtree = 50, 5
    excavateModel = excavatePossibleTriple(data_path, dataSet, model_path, gamma, model_name)
    triples_g = excavateModel.reasoning(topk_per_epoch, pool_topk, k_kdtree)
    for i in range(5):
        triples = next(triples_g)
        print(triples)