import json
import os
import time

#定义要修改的字段名称（目前仅支持指定2项参数的情况）
#batch_build_mode如果设置为batch，则将自动修改字段并构建；如果设置为once，那么直接写入基础配置参数开始构建，不进行字段修改
batch_build_mode = ["batch", "once"][1]
field_names = ['build_mode', 'l1_graph_sample_ratio']
field_values = [
    ["annlite", [0.05, 0.1, 0.15, 0.2, 0.25]],
]

#获取所有参数组合
def get_all_combinations(field_values):
    combinations = []
    for line in field_values:
        for value in line[1]:
            combinations.append((line[0], value))
    return combinations

value_sets = None
if batch_build_mode == "batch":
    value_sets = get_all_combinations(field_values)
    print("All Test Values:")
    for value_set in value_sets:
        print(value_set)
elif batch_build_mode == "once":
    print("Build with config:")

#定义要修改的json文件路径
json_file_path = 'build_config.json'
#定义其他字段的值
values_bigann1m = {
    "build_mode": "annlite",
    "dataset_path": "../../dataset/vector-ssd/bigann/bigann_1M.bbin",
    "query_path": "../../dataset/vector-ssd/bigann/bigann_query.bbin",
    "gt_path": "../../dataset/vector-ssd/bigann/bigann_1M_gt.bin",
    "feature_type": "uint8",
    "cluster_count": 32000,
    "train_ratio": 1,
    "pq_bucket": 32,
    "pq_bit": 8,
    "cluster_redundancy": 2,
    "alignment_size": 4096,
    "l1_graph_R": 30,
    "l2_graph_R": 30,
    "l1_graph_sample_ratio": 0.1,
    "evaluation": 0,
    "overwrite": 1,
    "graph_type": "nsg",
    "diskann_executor_path": "/home/ljl/Code/DiskANN/build/apps/build_disk_index",
    "diskann_build_L": 100,
    "diskann_build_dram_limit": 120,
    "diskann_page_size": 4096,
    "max_query": 1000,
    "gt_k": 100
}

values_sift1m = {
    "build_mode": "annlite",
    "dataset_path": "../../dataset/vector-ssd/sift1m/sift_1m_base.fbin",
    "query_path": "../../dataset/vector-ssd/sift1m/sift_query.fbin",
    "gt_path": "../../dataset/vector-ssd/sift1m/sift_1m_groundtruth.fbin",
    "feature_type": "float32",
    "cluster_count": 32000,
    "train_ratio": 1,
    "pq_bucket": 32,
    "pq_bit": 8,
    "cluster_redundancy": 2,
    "alignment_size": 4096,
    "l1_graph_R": 30,
    "l2_graph_R": 30,
    "l1_graph_sample_ratio": 0.1,
    "evaluation": 0,
    "overwrite": 0,
    "graph_type": "nsg",
    "diskann_executor_path": "/home/ljl/Code/DiskANN/build/apps/build_disk_index",
    "diskann_build_L": 100,
    "diskann_build_dram_limit": 120,
    "diskann_page_size": 4096,
    "max_query": 1000,
    "gt_k": 100
}

values_bigann100m = {
    "build_mode": "annlite",
    "dataset_path": "../../dataset/vector-ssd/bigann/bigann_100M.bbin",
    "query_path": "../../dataset/vector-ssd/bigann/bigann_query.bbin",
    "gt_path": "../../dataset/vector-ssd/bigann/bigann_100M_gt.bin",
    "feature_type": "uint8",
    "cluster_count": 1600000,
    "train_ratio": 1,
    "pq_bucket": 32,
    "pq_bit": 8,
    "cluster_redundancy": 2,
    "alignment_size": 4096,
    "l1_graph_R": 30,
    "l2_graph_R": 30,
    "l1_graph_sample_ratio": 0.1,
    "evaluation": 0,
    "overwrite": 0,
    "graph_type": "nsg",
    "diskann_executor_path": "/home/ljl/Code/DiskANN/build/apps/build_disk_index",
    "diskann_build_L": 100,
    "diskann_build_dram_limit": 120,
    "diskann_page_size": 4096,
    "max_query": 1000,
    "gt_k": 100
}

values_bigann10m = {
    "build_mode": "annlite",
    "dataset_path": "../../dataset/vector-ssd/bigann/bigann_10M.bbin",
    "query_path": "../../dataset/vector-ssd/bigann/bigann_query.bbin",
    "gt_path": "../../dataset/vector-ssd/bigann/bigann_10M_gt.bin",
    "feature_type": "uint8",
    "cluster_count": 230000,
    "train_ratio": 1,
    "pq_bucket": 32,
    "pq_bit": 8,
    "cluster_redundancy": 2,
    "alignment_size": 4096,
    "l1_graph_R": 30,
    "l2_graph_R": 30,
    "l1_graph_sample_ratio": 0.1,
    "evaluation": 0,
    "overwrite": 1,
    "graph_type": "nsg",
    "diskann_executor_path": "/home/ljl/Code/DiskANN/build/apps/build_disk_index",
    "diskann_build_L": 100,
    "diskann_build_dram_limit": 120,
    "diskann_page_size": 4096,
    "max_query": 1000,
    "gt_k": 100
}

values = values_bigann1m
print(values)

if __name__ == "__main__":
    with open("indexpath.csv","w") as f:
        f.write("output folder,\n")

    #首先加载build_config.json文件
    build_config = None
    with open(json_file_path, 'r') as f:
        build_config = json.load(f)

    #先将其他字段的值复制到build_config
    for key, value in values.items():
        build_config[key] = value

    start_time = time.time()

    #依次将字段值修改为列表中的值并运行
    if batch_build_mode == "batch":
        for value_set in value_sets:
            for field_num in range(len(field_names)):
                build_config[field_names[field_num]] = value_set[field_num]
                print(f"set {field_names[field_num]} to {value_set[field_num]}")

            with open(json_file_path, 'w') as f:
                json.dump(build_config, f, indent=4)
            time.sleep(2)

            #运行make run
            print("start building")
            os.system("python annlite_builder.py")
            time.sleep(1) #等待sleep_time秒等待结果文件写入
    elif batch_build_mode == "once":
        with open(json_file_path, 'w') as f:
            json.dump(build_config, f, indent=4)
        time.sleep(2)
        #运行make run
        print("start building")
        os.system("python -u annlite_builder.py")
        time.sleep(1) #等待sleep_time秒等待结果文件写入

    end_time = time.time()
    print("Batch build done")
    if batch_build_mode == "batch":
        print("All Test Values:")
        for value_set in value_sets:
            print(value_set)
    print(f"Total time: {round(end_time - start_time, 2)} seconds")
    
