import json
import sys
import ir_datasets

query = {
    "query": {
        "neural": {
            "body_knn": {
                "query_text": "",
                "model_id": ""
            }
        }
    }
}


def main(write_file_name, model_id):
    dataset = ir_datasets.load("msmarco-qna/eval")
    current = 0
    final_lines = []
    for qq in dataset.queries_iter():
        current += 1
        query["query"]["neural"]["body_knn"]["query_text"] = qq[1]
        query["query"]["neural"]["body_knn"]["model_id"] = model_id
        final_lines.append(json.dumps(query))

    transformed_file = open(write_file_name, 'w+')
    lines_count = 0
    for line in final_lines:
        line = line.replace('\\t', '')
        line = line.replace('\\n', '')
        line = line.replace('\\u', '')
        line = line.replace('\\"', '')
        line = line.replace('\\', '')
        line = line.replace('\\r\\n', ' ')

        transformed_file.write(line)
        transformed_file.write("\n")
        lines_count += 1

        if (lines_count > 100000):
            break

    print("Line added {}".format(lines_count))
    transformed_file.close()


if __name__ == "__main__":
    main(sys.argv[1], sys.argv[2])
