import sys
import os
import json
import logging
import numpy as np
import torch

from dataclasses import dataclass

from ascendebug.features.dump.dump_accuracy_checkpoint import set_acc_dump_dir

logging.basicConfig(filename='gen_bin_golden.log', level=logging.DEBUG)

DTYPE = {"float16": np.float16, "float32": np.float32, "int16": np.int16, "int32": np.int32, "int64": np.int64}


def get_input_attrs_content(input_json: str) -> dict:
    content = {}
    input_json = os.path.realpath(input_json)
    with open(input_json, "r") as in_hadle:
        try:
            content = json.load(in_hadle)
        except json.decoder.JSONDecodeError:
            logging.error("json.decoder.JSONDecodeError")
        finally:
            pass
    return content


def get_data_file(res_path, contents):
    data_files = {}
    for content in contents:
        if content["data_file"] is None:
            continue
        data_files[content["name"]] = os.path.join(res_path, content["data_file"])
    return data_files


@dataclass
class OperatorInfo:
    x_shape: tuple
    x_dtype: np.dtype
    y_shape: tuple
    stride_values: list
    so_values: list


def get_args():
    """
    需要用户按需配置
    sys.argv[1]: input data file path
    sys.argv[2]: output file path
    sys.argv[3]: gen data type: input/output/all
    """
    logging.info(f"data file path: {sys.argv[2]}")
    input_file = sys.argv[1]
    res_path = sys.argv[2]
    if len(sys.argv) > 3:
        checkpoint_dump_path = sys.argv[3]
        set_acc_dump_dir(checkpoint_dump_path)
    content = get_input_attrs_content(input_file)
    res_abs_path = os.path.abspath(res_path)
    if not os.path.exists(res_abs_path):
        os.popen(f'mkdir -p {res_abs_path}')

    in_content = content.get("inputs")
    out_content = content.get("outputs")
    input_files = get_data_file(res_path, in_content)
    output_files = get_data_file(res_path, out_content)

    x_shape = in_content[0].get("shape")
    x_dtype = DTYPE.get(in_content[0].get("dtype"), np.float32)
    y_shape = out_content[0].get("shape")
    stride_values = in_content[2].get("values")
    sot_values = in_content[3].get("values")
    op_info = OperatorInfo(x_shape, x_dtype, y_shape, stride_values, sot_values)
    return op_info, input_files, output_files


def gen_data_and_golden(op_info, input_paths, golden_paths):
    logging.info(f"op_info={op_info}")
    x = np.random.uniform(-10, 10, op_info.x_shape).astype(op_info.x_dtype)
    x.tofile(input_paths["x"])
    logging.info(f"x={x}")
    size = np.array(op_info.y_shape).astype(np.int32)
    size.tofile(input_paths["size"])
    logging.info(f"size={size}")
    stride = np.array(op_info.stride_values).astype(np.int32)
    stride.tofile(input_paths["stride"])
    logging.info(f"stride={stride}")
    storage_offset = np.array(op_info.so_values).astype(np.int32)
    storage_offset.tofile(input_paths["storage_offset"])
    logging.info(f"storage_offset={storage_offset}")

    res = torch.as_strided(torch.Tensor(x), tuple(size), tuple(stride), storage_offset[0])
    golden_y = res.numpy().astype(op_info.x_dtype)

    logging.info(f"golden_y={golden_y}")
    golden_y.tofile(golden_paths["y"])


if __name__ == '__main__':
    """
    sys.argv[1]: input data file path
    sys.argv[2]: output file path
    sys.argv[3]: gen data type: input/output/all
    """
    gen_data_and_golden(*get_args())
