import argparse
import os
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor


def run_single_testing(constraint_file_list, args):
    for constraint_file_name in constraint_file_list:
        full_api_name = constraint_file_name[:-5]
        # Step 1: batch generate seeds
        constraint_path = os.path.join(args.c, constraint_file_name)
        # print(constraint_path)
        current_time = datetime.now()
        exp_str = datetime.strftime(current_time, '%Y%m%d_%H%M%S')
        testing_path = os.path.join(full_api_name, exp_str)
        seed_output_path = os.path.join(args.dataset + "/pytorch/seed", testing_path)
        os.system("mkdir -p %s;" % seed_output_path +
                  "cd ~/onnx_samples/onnx_transformer;"
                  "python3 batch_generate_seed_inputs.py "
                  "-d=%s -c=%s -o=%s --max_iter=%d" % (args.d, constraint_path, seed_output_path, args.max_iter))

        # Step 2: batch generate pytorch models && model inputs && execute pytorch models
        seed_json_name = full_api_name + "_seeds.json"
        seed_npz_name = full_api_name + "_seeds.npz"
        pt_model_path = os.path.join(args.pt_path, testing_path)
        os.system("mkdir -p %s" % pt_model_path)
        onnx_model_path = os.path.join(args.onnx_path, testing_path)
        os.system("mkdir -p %s" % onnx_model_path)
        cann_model_path = os.path.join(args.om_path, testing_path)
        os.system("mkdir -p %s" % cann_model_path)
        model_input_path = os.path.join(args.dataset + "/pytorch/input", testing_path)
        os.system("mkdir -p %s" % model_input_path)
        model_output_path = os.path.join(args.pt_result_path, testing_path)
        os.system("mkdir -p %s" % model_output_path)
        os.system("cd ~/onnx_samples/onnx_transformer;"
                  "python3 batch_generate_and_run_pytorch_models.py "
                  "-s=%s -j=%s -n=%s -m=%s -o=%s -i=%s -r=%s --onnx_path=%s" %
                  (seed_output_path, seed_json_name, seed_npz_name, pt_model_path, cann_model_path, model_input_path,
                   model_output_path, onnx_model_path))


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("-c", default="/home/ubuntu/onnx_samples/onnx_transformer/DocTer/constraints/pytorch",
                        help="constraint path")
    parser.add_argument("-d", default="/home/ubuntu/onnx_samples/onnx_transformer/DocTer/pytorch_dtypes.yml",
                        help="dtype path")
    parser.add_argument("--dataset", default="/home/ubuntu/onnx_samples/onnx_transformer/pytorch2tf/source")
    parser.add_argument("--pt_path", default="/home/ubuntu/onnx_samples/onnx_transformer/pytorch2tf/models/pytorch")
    parser.add_argument("--onnx_path", default="/home/ubuntu/onnx_samples/onnx_transformer/pytorch2tf/models/onnx")
    parser.add_argument("--om_path", default="/home/ubuntu/Ascend/models/om")
    parser.add_argument("--pt_result_path", default="/home/ubuntu/onnx_samples/onnx_transformer/pytorch2tf/results/pytorch")
    parser.add_argument("--om_result_path", default="/home/ubuntu/Ascend/results/pt2cann/om")
    parser.add_argument("--max_iter", "-m", type=int, default=1500)
    args = parser.parse_args()
    # slice the input api list
    file_list = os.listdir(args.c)

    pool = ThreadPoolExecutor(30)
    cnt = 0
    constraint_files_list = []
    constraint_files = []
    for constraint_file in file_list:
        if cnt < 5:
            constraint_files.append(constraint_file)
            cnt += 1
        else:
            constraint_files_list.append(constraint_files)
            constraint_files = [constraint_file]
            cnt = 1
    # There might be something left
    if len(constraint_files) > 0:
        constraint_files_list.append(constraint_files)

    for constraint_files in constraint_files_list:
        pool.submit(run_single_testing, constraint_files, args)

