import os
import json
from glob import glob

paths = [
    "data_dir/tasks/QA/WebQSP/train.json",
    "data_dir/tasks/QA/WebQSP/test.json",
    "data_dir/tasks/QA/CWQ/train.json",
    "data_dir/tasks/QA/CWQ/dev.json",
    "data_dir/tasks/QA/CWQ/test.json",
]
lf_map = {}
for p in paths:
    data = json.load(open(p))
    for d in data:
        lf_map[d["QuestionId"]] = d

empty_lf = 0
def add_lf_to_fid_data(data):
    for d in data:
        d["LF_original"]  = lf_map[d["id"].replace(":QA","")].get("LF_original", [])
        d["answers"] = lf_map[d["id"].replace(":QA","")].get("LF_processed", [])
        if len(d["answers"]) == 0:
            global empty_lf
            empty_lf += 1

for dataset in ["WebQSP","CWQ"]:
    paths = glob(f"../LLM_KGQA/save-anno-clean/{dataset.lower()}/**/*.json")
    train_data = [json.load(open(path)) for path in paths]
    train_ids = [d["id"].replace(".P0","") for d in train_data]
    train_ids = set(train_ids)

    paths = glob(f"../LLM_KGQA/data/{dataset.lower()}/test/*.json")
    test_data = []
    for path in paths:
        with open(path) as f:
            test_data += json.load(f)
    test_ids = [d["id"].replace(".P0","") for d in test_data]
    test_ids = set(test_ids)
    
    valid_ids = test_ids + train_ids

def make_small_data(dataset):
    assert dataset in ["WebQSP","CWQ"]
    paths = glob(f"../LLM_KGQA/save-anno-clean/{dataset.lower()}/**/*.json")
    train_data = [json.load(open(path)) for path in paths]
    train_ids = [d["id"].replace(".P0","") for d in train_data]
    train_ids = set(train_ids)
    print("len(train_ids)", len(train_ids))
    
    fid_data_train = json.load(open(f"save_dir/Retrieval/pyserini/search_results/QA_{dataset}_Freebase_BM25/train_fid_SPQA.json"))
    fid_data_train = [d for d in fid_data_train if d["id"].replace(":QA","") in train_ids]
    add_lf_to_fid_data(fid_data_train)
    print("len(fid_data_train)", len(fid_data_train))
    
    # save to save_dir_fid_smalldata/{dataset}_{split}_SPQA.json
    save_dir = "save_dir_fid_smalldata"
    os.makedirs(save_dir, exist_ok=True)

    out = f"{save_dir}/{dataset}_train_SPQA.json"
    print(f"Saving to {out}")
    json.dump(fid_data_train, open(out, "w"), indent=4, ensure_ascii=False)

    # test
    paths = glob(f"../LLM_KGQA/data/{dataset.lower()}/test/*.json")
    test_data = []
    for path in paths:
        with open(path) as f:
            test_data += json.load(f)
    test_ids = [d["id"].replace(".P0","") for d in test_data]
    test_ids = set(test_ids)
    print("len(test_ids)", len(test_ids))

    fid_data_test = json.load(open(f"save_dir/Retrieval/pyserini/search_results/QA_{dataset}_Freebase_BM25/test_fid_SPQA.json"))
    if dataset == "CWQ":
        fid_data_test += json.load(open(f"save_dir/Retrieval/pyserini/search_results/QA_{dataset}_Freebase_BM25/dev_fid_SPQA.json"))
    fid_data_test = [d for d in fid_data_test if d["id"].replace(":QA","") in test_ids]
    add_lf_to_fid_data(fid_data_test)
    print("len(fid_data_test)", len(fid_data_test))
    
    out = f"{save_dir}/{dataset}_test_SPQA.json"
    print(f"Saving to {out}")
    json.dump(fid_data_test, open(out, "w"), indent=4, ensure_ascii=False)

def merge_webqsp_cwq():
    """merge webqsp and cwq"""
    webqsp_train = json.load(open("save_dir_fid_smalldata/WebQSP_train_SPQA.json"))
    webqsp_test = json.load(open("save_dir_fid_smalldata/WebQSP_test_SPQA.json"))
    cwq_train = json.load(open("save_dir_fid_smalldata/CWQ_train_SPQA.json"))
    cwq_test = json.load(open("save_dir_fid_smalldata/CWQ_test_SPQA.json"))
    merge_train = webqsp_train + cwq_train
    merge_test = webqsp_test + cwq_test
    import random
    random.seed(42)
    random.shuffle(merge_train)
    random.shuffle(merge_test)
    print("len(merge_train)", len(merge_train))
    print("len(merge_test)", len(merge_test))
    # save to save_dir_fid_smalldata/merge_{split}_SPQA.json
    save_dir = "save_dir_fid_smalldata"
    os.makedirs(save_dir, exist_ok=True)
    with open(f"{save_dir}/merge_train_SPQA.json", "w") as f:
        json.dump(merge_train, f, indent=4, ensure_ascii=False)
    with open(f"{save_dir}/merge_test_SPQA.json", "w") as f:
        json.dump(merge_test, f, indent=4, ensure_ascii=False)

if __name__ == "__main__":
    """
    python small_data.py
    """
    make_small_data("WebQSP")
    make_small_data("CWQ")
    merge_webqsp_cwq()

    # tar cvzf save_dir_fid_smalldata.tar.gz save_dir_fid_smalldata
    print("empty_lf", empty_lf)