import argparse
import os
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor


def run_single_testing(constraint_file_list, args):
    for constraint_file_name in constraint_file_list:
        full_api_name = constraint_file_name[:-5]
        # Step 1: batch generate seeds
        constraint_path = os.path.join(args.c, constraint_file_name)
        # print(constraint_path)
        current_time = datetime.now()
        exp_str = datetime.strftime(current_time, '%Y%m%d_%H%M%S')
        testing_path = os.path.join(full_api_name, exp_str)
        seed_output_path = os.path.join(args.dataset + "/tensorflow/seed", testing_path)
        os.system("mkdir -p %s;" % seed_output_path +
                  "cd ~/onnx_samples/onnx_transformer;"
                  "python3 batch_generate_seed_inputs.py "
                  "-d=%s -c=%s -o=%s --max_iter=%d" % (args.d, constraint_path, seed_output_path, args.max_iter))

        # Step 2: batch generate tf models && model inputs
        seed_json_name = full_api_name + "_seeds.json"
        seed_npz_name = full_api_name + "_seeds.npz"
        tf_model_path = os.path.join(args.tf_path, testing_path)
        os.system("mkdir -p %s" % tf_model_path)
        cann_model_path = os.path.join(args.om_path, testing_path)
        os.system("mkdir -p %s" % cann_model_path)
        model_input_path = os.path.join(args.dataset + "/tensorflow/input", testing_path)
        os.system("mkdir -p %s" % model_input_path)
        os.system("cd ~/onnx_samples/onnx_transformer;"
                  "python3 batch_generate_tf_models.py "
                  "-s=%s -j=%s -n=%s -m=%s -o=%s -i=%s" %
                  (seed_output_path, seed_json_name, seed_npz_name, tf_model_path, cann_model_path, model_input_path))

        # Step 3: batch execute tf models
        # tf_result_path = os.path.join(args.tf_result_path, testing_path)
        # os.system("mkdir -p %s" % tf_result_path)
        # model_input_name = full_api_name + "_inputs.json"
        # os.system("cd ~/onnx_samples/onnx_transformer;"
        #           "python3 batch_run_tf_models.py "
        #           "-m=%s -s=%s -i=%s -r=%s --input_name=%s --seed_name=%s" %
        #           (tf_model_path, seed_output_path, model_input_path, tf_result_path, model_input_name, seed_npz_name))

        # Step 4: batch execute cann models
        # om_model_path = os.path.join(args.om_path, testing_path)
        # os.system("mkdir -p %s" % om_model_path)
        # om_result_path = os.path.join(args.om_result_path, testing_path)
        # os.system("mkdir -p %s" % om_result_path)
        # os.system("cd ~/Ascend/model_inference;"
        #           "python3 batch_run_cann_models.py "
        #           "--model_path=%s "
        #           "--input_path=%s "
        #           "--seed_path=%s "
        #           "--input_name=%s "
        #           "--seed_name=%s "
        #           "--output_path=%s" % (om_model_path, model_input_path, seed_output_path,
        #                                 model_input_name, seed_npz_name, om_result_path))


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("-c", default="/home/ubuntu/onnx_samples/onnx_transformer/DocTer/constraints/tf_raw_ops",
                        help="constraint path")
    parser.add_argument("-d", default="/home/ubuntu/onnx_samples/onnx_transformer/DocTer/tensorflow_dtypes.yml",
                        help="dtype path")
    parser.add_argument("--dataset", default="/home/ubuntu/Ascend/dataset", help="dataset path")
    parser.add_argument("--tf_path", default="/home/ubuntu/Ascend/models/tensorflow")
    parser.add_argument("--om_path", default="/home/ubuntu/Ascend/models/om")
    parser.add_argument("--tf_result_path", default="/home/ubuntu/Ascend/results/tf2cann/tensorflow")
    parser.add_argument("--om_result_path", default="/home/ubuntu/Ascend/results/tf2cann/om")
    parser.add_argument("--max_iter", "-m", type=int, default=1)
    args = parser.parse_args()
    # slice the input api list
    file_list = os.listdir(args.c)

    pool = ThreadPoolExecutor(max_workers=30)
    # cnt = 0
    # constraint_files_list = []
    # constraint_files = []
    # for constraint_file in file_list:
    #     if cnt < 10:
    #         constraint_files.append(constraint_file)
    #         cnt += 1
    #     else:
    #         constraint_files_list.append(constraint_files)
    #         constraint_files = [constraint_file]
    #         cnt = 1
    # There might be something left
    # if len(constraint_files) > 0:
    #     constraint_files_list.append(constraint_files)

    for constraint_file in file_list:
        constraint_files = [constraint_file]
        pool.submit(run_single_testing, constraint_files, args)

