import json
import os
import argparse

import numpy as np

import DocTer.fuzzer.util as util


def get_array_str(arr):
    arr_str = ""
    for i in range(len(arr)):
        if i > 0:
            arr_str += ", "
        arr_str += str(arr[i])

    return arr_str


def generate_seed(test_case, numpy_arr_path):
    nd_arrays = np.load(numpy_arr_path)
    param_list = test_case.keys()
    for param in param_list:
        val = test_case[param]
        if type(val) is str and val in nd_arrays:
            test_case[param] = nd_arrays[val]
            # print(nd_arrays[val])
    return test_case


def multi_inputs_generator(test_case, target_config, numpy_arr_path, model_path, onnx_path, om_path, output_path, index):
    all_name = target_config.get("title")
    constraints = target_config.get("constraints")
    # Model's inputs
    input_list = []
    # Model's attributes (constant)
    attribute_list = []
    # Is there an input existed
    flag = False
    # input dict
    input_dict = {
        "test_case_file_name": "",
        "inputs": []
    }
    # analyze test case
    kwargs_str = ""
    cuda_str = ""
    param_list = test_case.keys()
    for param in param_list:
        seed = test_case[param]
        # ignore the parameter "name"
        if param == "name":
            continue
        if type(seed) is int and seed > int(1e6):
            continue

        # np.array is considered as an input
        if type(seed) is np.ndarray:
            is_too_large = False
            space = 1
            for s in seed.shape:
                if space > int(1e5):
                    is_too_large = True
                    break
                else:
                    space *= s
            if is_too_large is True:
                continue
            input_dict["inputs"].append({param: param + "_" + str(index)})
            cuda_str += "        kwargs[\"%s\"] = kwargs[\"%s\"].cuda()\n" % (param, param)
            kwargs_str += "\"%s\": torch.as_tensor(input_dict[\"%s\"]), " % \
                          (param, param + "_" + str(index))
            input_list.append(param)
            flag = True
        else:
            attribute_list.append(param)
    if len(kwargs_str) > 0:
        kwargs_str = kwargs_str[:-2]

    forward_params = ""
    invoke_str = ""
    model_inputs_str = ""
    onnx_input_list = ""
    onnx_input_names = ""
    for input_param in input_list:
        forward_params += ", " + input_param
        invoke_str += "%s=%s, " % (input_param, input_param)
        model_inputs_str += "    %s = torch.randn(%s)\n" % (input_param, get_array_str(test_case[input_param].shape))
        onnx_input_list += input_param + ", "
        onnx_input_names += "\"%s\", " % input_param

    net_params = ""
    init_params = ""
    construct_params = ""
    for attribute in attribute_list:
        net_params += str(test_case[attribute]) + ", "
        init_params += ", " + attribute
        construct_params += "        self.%s = %s\n" % (attribute, attribute)
        invoke_str += "%s=self.%s, " % (attribute, attribute)

    construct_params += "\n"
    net_params = net_params[:-2]
    invoke_str = invoke_str[:-2]
    onnx_input_list = "(%s)" % onnx_input_list[:-2]
    onnx_input_names = "[%s]" % onnx_input_names[:-2]

    # create a new onnx builder
    model_name = all_name + "_" + str(index)
    input_dict["test_case_file_name"] = model_name
    full_pytorch_path = os.path.join(model_path, model_name + ".pth")
    full_onnx_path = os.path.join(onnx_path, model_name + ".onnx")
    # full_om_path = os.path.join(om_path, model_name)
    full_output_path = os.path.join(output_path, model_name + "_results")
    with open("pytorch_cann_generator.py", "w+") as f:
        f.write("import numpy as np\n"
                "import torch\n"
                "from torch import nn\n\n\n"
                "class Net(nn.Module):\n"
                "    def __init__(self%s) -> None:\n" % init_params +
                "        super().__init__()\n" + construct_params +
                "    def forward(self%s):\n" % forward_params +
                "        out = %s(%s)\n" % (all_name, invoke_str) +
                "        return out\n\n\n"
                "if __name__ == \"__main__\":\n"
                "    model = Net(%s)\n" % net_params +
                "    torch.save(model, \"%s\")\n\n" % full_pytorch_path + model_inputs_str +
                "\n"
                "    # transfer *.pth model to *.onnx model\n"
                "    input_list = %s\n" % onnx_input_list +
                "    input_names = %s\n" % onnx_input_names +
                "    torch.onnx.export(model, input_list, \"%s\", input_names=input_names, output_names=[\"output\"], opset_version=14)\n\n" % full_onnx_path +
                "    input_dict = np.load(\"%s\")\n" % numpy_arr_path +
                "    kwargs = {%s}\n" % kwargs_str +
                "    model = torch.load(\"%s\")\n" % full_pytorch_path +
                "    if torch.cuda.is_available():\n"
                "        model = model.cuda()\n" + cuda_str +
                "    model.eval()\n"
                "    res = model(**kwargs).cpu()\n"
                "    result_dict = {\"%s\": res}\n" % model_name +
                "    np.savez(\"%s\", **result_dict)\n" % full_output_path)

    # execute the onnx builder
    os.system("python pytorch_cann_generator.py")

    return input_dict


def generate_models(seed_path, numpy_arr_path, model_path, onnx_path, om_path, input_path, output_path, config):
    test_inputs = []
    with open(seed_path, "r") as f:
        seeds = json.load(f)
    for i in range(len(seeds)):
        seed = seeds[i]
        if numpy_arr_path != "":
            seed = generate_seed(seed, numpy_arr_path)
        # print(case)
        input_dict = multi_inputs_generator(seed, config, numpy_arr_path, model_path, onnx_path, om_path, output_path, i)
        test_inputs.append(input_dict)
    full_input_path = os.path.join(input_path, config.get("title") + "_inputs.json")
    # save input dicts to json
    with open(full_input_path, "w+") as f:
        json.dump(test_inputs, f)


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--seed_path", "-s", default="/home/ubuntu/onnx_samples/onnx_transformer/source")
    parser.add_argument("--json_path", "-j", default="torch.abs_seeds.json")
    parser.add_argument("--npz_path", "-n", default="torch.abs_seeds.npz")
    parser.add_argument("--model_path", "-m", default="/home/ubuntu/onnx_samples/onnx_transformer/source")
    parser.add_argument("--onnx_path", default="/home/ubuntu/onnx_samples/onnx_transformer/source")
    parser.add_argument("--om_path", "-o", default="/home/ubuntu/onnx_samples/onnx_transformer/source")
    parser.add_argument("--input_path", "-i", default="/home/ubuntu/onnx_samples/onnx_transformer/source")
    parser.add_argument("--result_path", "-r", default="/home/ubuntu/onnx_samples/onnx_transformer/source")
    args = parser.parse_args()
    api_name = args.json_path[:-11]
    print("[PYTORCH Model] Api Name: " + api_name)
    config = util.read_yaml(os.path.abspath("DocTer/constraints/pytorch/%s.yaml" % api_name))
    generate_models(os.path.join(args.seed_path, args.json_path),
                    os.path.join(args.seed_path, args.npz_path),
                    args.model_path, args.onnx_path, args.om_path, args.input_path, args.result_path, config)
