from os import path as os_path
from os import makedirs
import json


def data_preprocess_amie(data_dir):
    try:
        with open(os_path.join(data_dir, "ent_text2id.dict"), "r", encoding='utf-8') as fin:
            ent2id = json.load(fin)
    except:
        with open(os_path.join(data_dir, "entities.txt"), 'r', encoding="utf-8") as fin:
            entities = [entity.strip() for entity in fin.readlines()]
            print("n_entities: ", len(entities))
            ent2id = {entity: idx for idx, entity in enumerate(entities)}
            id2ent = {idx: entity for idx, entity in enumerate(entities)}
        with open(os_path.join(data_dir, "ent_text2id.dict"), "w", encoding='utf-8') as fout:
            json.dump(ent2id, fout)
        with open(os_path.join(data_dir, "ent_id2text.dict"), "w", encoding='utf-8') as fout:
            json.dump(id2ent, fout)

    try:
        with open(os_path.join(data_dir, "rel_text2id.dict"), "r", encoding='Utf8') as fin:
            rel2id = json.load(fin)
    except:
        with open(os_path.join(data_dir, "relations.txt"), 'r', encoding="utf-8") as fin:
            relations = [relation.strip() for relation in fin.readlines()]
            print("n_relations: ", len(relations))
            rel2id = {relation: idx for idx, relation in enumerate(relations)}
            id2rel = {idx: relation for idx, relation in enumerate(relations)}
        with open(os_path.join(data_dir, "rel_text2id.dict"), "w", encoding='utf-8') as fout:
            json.dump(rel2id, fout)
        with open(os_path.join(data_dir, "rel_id2text.dict"), "w", encoding='utf-8') as fout:
            json.dump(id2rel, fout)

    with open(os_path.join(data_dir, 'entity2id.txt'), 'w', encoding='utf-8') as fout:
        fout.write(str(len(ent2id)) + '\n')
        for key, value in ent2id.items():
            fout.write(key + '\t' + str(value) + '\n')

    with open(os_path.join(data_dir, 'relation2id.txt'), 'w', encoding='utf-8') as fout:
        fout.write(str(len(rel2id)) + '\n')
        for key, value in rel2id.items():
            fout.write(key + '\t' + str(value) + '\n')

    fin = open(os_path.join(data_dir, "info.json"), 'r', encoding='utf8')
    info = json.load(fin)
    ent_num, rel_num = info['n_entity'], info['n_relation']

    if not os_path.exists(os_path.join(data_dir, "amie")):
        makedirs(os_path.join(data_dir, "amie"))

    with open(os_path.join(data_dir, "amie", "all2id.txt"), "w", encoding="utf8") as fout:
        fout.write(str(ent_num + rel_num) + "\n")
        for ent in ent2id:
            fout.write("\t".join([ent, str(ent2id[ent])]) + "\n")
        for rel in rel2id:
            fout.write("\t".join([rel, str(rel2id[rel] + ent_num)]) + "\n")

    try:
        fin = open(os_path.join(data_dir, "train_id.txt"), "r", encoding="utf-8")
        triples = fin.readlines()
    except:
        fin = open(os_path.join(data_dir, "train.txt"), "r", encoding="utf-8")
        all_triples = fin.readlines()
        triples = []
        for triple in all_triples:
            elems = triple.strip().split(',')
            triples.append("\t".join([str(ent2id[elems[0]]), str(rel2id[elems[1]]), str(ent2id[elems[2]])]))

        with open(os_path.join(data_dir, "train_id.txt"), "w", encoding="utf-8") as fout:
            for line in triples:
                fout.write(line + '\n')

    for idx, triple in enumerate(triples):
        elems = triple.strip().split("\t")
        elems[1] = str(int(elems[1]) + ent_num)
        triples[idx] = "\t".join(['<' + elem + '>' for elem in elems])

    with open(os_path.join(data_dir, "amie", "triples.txt"), "w", encoding="utf-8") as fout:
        for line in triples:
            fout.write(line + "\n")


if __name__ == "__main__":
    data_dir = os_path.join("..", "..", "data", "PowerGrid")
    data_preprocess_amie(data_dir)