# SPDX-License-Identifier: Apache-2.0
#
# Copyright (C) Arm Ltd. 2023

import pydoc
from typing import Any, Dict, Optional

__spe_parser_schema: Dict[str, Any] = {
    "cpu": {
        "description": "CPU number.",
        "type": int,
    },
    # Address Packet
    "pc": {
        "description": "Issued instruction virtual address (PC). Included for all operation.",
        "type": str,
    },
    "el": {
        "description": "Exception level which the instruction was executed in.",
        "values": {
            0: "EL0",
            1: "EL1",
            2: "EL2",
            3: "EL3",
        },
        "type": int,
    },
    "br_tgt": {
        "description": "Branch target address. Included for all other branch and exception return instructions.",
        "type": str,
    },
    "br_tgt_lvl": {
        "description": "Exception level at the target of the branch.",
        "values": {
            0: "EL0",
            1: "EL1",
            2: "EL2",
            3: "EL3",
        },
        "type": int,
    },
    "pbt": {
        "description": "Previous branch target address. The target virtual address of the most recently taken branch operation in program order before the sampled operation",
        "type": str,
    },
    "pbt_lvl": {
        "description": "Exception level at the previous branch target.",
        "values": {
            0: "EL0",
            1: "EL1",
            2: "EL2",
            3: "EL3",
        },
        "type": int,
    },
    "vaddr": {
        "description": "Data access virtual address. Included for all load, store and atomic operations.",
        "type": str,
    },
    "paddr": {
        "description": "Data access physical address. Included for all other load, store and atomic operations. Not included for all other accesses that generate an abort, or if disabled by CollectPhysicalAddress.",
        "type": str,
    },
    # Counter Packet
    "total_lat": {
        "description": "Total latency. Cycle count from the operation being dispatched for issue to the operation being microarchitecturally-finished. Included for all operations.",
        "type": int,
    },
    "issue_lat": {
        "description": "Issue latency. Cycle count from the operation being dispatched for issue to the operation being issued for execution. This counts any delay in waiting the operation being ready to issue. Included for all operations.",
        "type": int,
    },
    "xlat_lat": {
        "description": "Translation latency. Cycle count from a virtual address being passed to the MMU for translation to the result of the translation being available. Included for all load, store and atomic operations.",
        "type": int,
    },
    # Data Source Packet
    "data_source": {
        "description": "Data Source. Indicate where the data returned for a load operation was sourced. Refer to each CPU’s Technical Reference Manually for its supported sources. Below are data sources that N1 includes.",
        "values": {
            "L1D": "",
            "L2D": "",
            "PEER-CPU": "Snoop from peer CPU within the same DSU, NOT another CPU socket.",
            "LOCAL-CLUSTER": "L3 cache if there is any in the DSU.",
            "LL-CACHE": "",
            "PEER-CLUSTER": "Peer cluster should be a core in another DSU of the same CPU chip.",
            "REMOTE": "Remote CPUs in another socket.",
            "DRAM": "",
        },
        "type": str,
    },
    # Events Packet
    "event": {
        "description": "Indicates up to 64 events generated by the sampled operation.",
        "values": {
            "EXCEPTION-GEN": "Completed with a synchronous exception.",
            "RETIRED": "Committed its results to the architectural state of the PE, or completed with a synchronous architectural exception.",
            "L1D-ACCESS": "Load/store operation caused a cache access to at least the first level of data or unified cache.",
            "L1D-REFILL": "Load/store operation caused a refill of at least the first level of data or unified cache. This includes each data memory access that causes a refill from outside the cache. It excludes accesses that do not cause a new cache refill but are satisfied from refilling data of a previous miss.",
            "TLB-ACCESS": "Load/store operation caused an access to at least the first level of data or unified TLB.",
            "TLB-REFILL": "Load/store operation that causes a refill of a data or unified TLB, involving at least one translation table walk access. This includes each complete or partial translation table walk that causes an access to memory, including to data or translation table walk caches.",
            "NOT-TAKEN": "A conditional instruction that failed its condition code check.",
            "MISPRED": "A branch that caused a correction to the predicted program flow.",
            "LLC-ACCESS": "Load/store operation caused a cache access to at least the Last Level data or unified cache.",
            "LLC-REFILL": "Load/store operation caused an access to at least the Last Level cache but is not completed by the Last Level cache.",
            "REMOTE-ACCESS": "Load/store operation caused an access to another socket in a multi-socket system. This includes each data memory access that accesses another socket in a multi-socket system, including those that do not return data.",
            "ALIGNMENT": "Load/store operation that, due to the alignment of the address and size of data being accessed, incurred additional latency.",
            "SVE-PARTIAL-PRED": "Predicated SVE operation executed with at least one Inactive element.",
            "SVE-EMPTY-PRED": "SVE operation executed with all elements Inactive.",
        },
        "type": str,
    },
    # Operation Type Packet
    "op": {  # available in all packets
        "description": "Defines the type of operation sampled. Included for all operations.",
        "values": {
            "LD": "Load operation.",
            "ST": "Store operation.",
            "B": "Branch operation.",
        },
        "type": str,
    },
    "sve_evl": {  # available in other and ldst packets
        "description": "Effective Vector Length. the length of vector operated on by the sampled operation",
        "type": int,
    },
    "sve_pred": {  # available in other and ldst packets
        "description": "Predicated SVE operation",
        "values": {
            True: "Predicated SVE operation. The operation is an SVE operation that writes to a vector destination register under a Governing predicate using either zeroing or merging predication",
            False: "Not predicated",
        },
        "type": bool,
    },
    "condition": {  # available in other and branch packets
        "description": "Operation conditional.",
        "values": {
            True: "Conditional operation or select.",
            False: "Unconditional operation.",
        },
        "type": bool,
    },
    "subclass": {  # available in other and ldst packets
        "description": "Second-level instruction class. Defines the type of instruction.",
        "values": {
            "(LDST)GP-REG": "A load/store targeting the general-purpose registers, other than an atomic operation, load-acquire, store-release or exclusive.",
            "(LDST)SIMD-FP": "A load/store targeting the SIMD&FP registers.",
            "(LDST)UNSPEC-REG": "A load/store targeting unspecified registers.",
            "(LDST)NV-SYSREG": "An MRS or MSR operation at EL1 transformed to a load/store when HCR_EL2.NV2 is 1.",
            "(LDST)MTE-TAG": "A load/store of an Allocation Tag or multiple Allocation Tags.",
            "(LDST)MEMCPY": "A load/store from a Memory Copy operation.",
            "(LDST)MEMSET": "A store from a Memory Set operation.",
            "(LDST/OTHER)SVE": "SVE operation",
            "(OTHER)OTHER": "Other operation",
        },
        "type": str,
    },
    # Operation Type Packet: Load/Store
    "ar": {
        "description": "Acquire/Release.",
        "values": {
            True: "Load/store/atomic with Acquire or Release semantics.",
            False: "Load/store/atomic without Acquire or Release semantics.",
        },
        "type": bool,
    },
    "excl": {
        "description": "Exclusive.",
        "values": {
            True: "Load/store with Exclusive.",
            False: "Load/store/atomic without Exclusive.",
        },
        "type": bool,
    },
    "atomic": {
        "description": "Atomic load/store.",
        "values": {
            True: "Atomic.",
            False: "Not atomic.",
        },
        "type": bool,
    },
    "sve_sg": {
        "description": "Gather/scatter load/store",
        "values": {
            True: "Gather load or scatter store.",
            False: "Not gather load or scatter store.",
        },
        "type": bool,
    },
    # Operation Type Packet: Branch
    "indirect": {
        "description": "Branch type.",
        "values": {
            True: "Indirect branch.",
            False: "Direct branch.",
        },
        "type": bool,
    },
    # Operation Type Packet: Other
    "sve_fp": {
        "description": "Floating-point operation",
        "values": {
            True: "Floating-point",
            False: "Integer",
        },
        "type": bool,
    },
    # Timestamp Packet
    "ts": {
        "description": "Timestamp. When the operation was sampled.",
        "type": int,
    },
    # Context Packet
    "context": {
        "description": "Context. Provides context information for the record",
        "type": str,
    },
}

LDST_COLS = [
    "cpu",
    "op",
    "pc",
    "el",
    "atomic",
    "excl",
    "ar",
    "subclass",
    "sve_evl",
    "sve_pred",
    "sve_sg",
    "event",
    "issue_lat",
    "total_lat",
    "vaddr",
    "xlat_lat",
    "paddr",
    "data_source",
    "context",
    "ts",
]

BRANCH_COLS = [
    "cpu",
    "op",
    "pc",
    "el",
    "condition",
    "indirect",
    "event",
    "issue_lat",
    "total_lat",
    "br_tgt",
    "br_tgt_lvl",
    "pbt",
    "pbt_lvl",
    "context",
    "ts",
]

OTHER_COLS = [
    "cpu",
    "op",
    "pc",
    "el",
    "subclass",
    "sve_evl",
    "sve_pred",
    "sve_fp",
    "condition",
    "event",
    "issue_lat",
    "total_lat",
    "context",
    "ts",
]


def __gen_default_record(cols) -> Dict[str, Any]:
    ret: Dict[str, Any] = {}
    for col in cols:
        t = __spe_parser_schema[col]["type"]
        if t == str:
            ret[col] = ""
        elif t == int:
            ret[col] = 0
        elif t == bool:
            ret[col] = False
    return ret


def get_branch_default_record() -> Dict[str, Any]:
    return __gen_default_record(BRANCH_COLS)


def get_ldst_default_record() -> Dict[str, Any]:
    return __gen_default_record(LDST_COLS)


def get_other_default_record() -> Dict[str, Any]:
    return __gen_default_record(OTHER_COLS)


def get_schema_renderer():
    return PlainSchemaRenderer(__spe_parser_schema)


class SchemaRenderer:
    def __init__(self, schema):
        self.schema = schema

    def render(self, additional_contents: Optional[str] = None) -> None:
        """
        render schema content
        if additional_contents are not empty, render them together as well.
        """
        pass


class PlainSchemaRenderer(SchemaRenderer):
    def render(self, additional_contents: Optional[str] = None) -> None:
        contents = ""
        if additional_contents:
            contents += f"{additional_contents}\n\n"
        contents += "File Schema:\n"
        contents += "".join(
            [
                self.__get_ldst_content(),
                self.__get_branch_content(),
                self.__get_other_content(),
            ]
        )
        pydoc.pager(contents)

    def __get_branch_content(self) -> str:
        ret = "Branch Schema:\n"
        for col in BRANCH_COLS:
            ret += self.__gen_content_for_col(col)
        return ret

    def __get_ldst_content(self) -> str:
        ret = "Load/Store Schema:\n"
        for col in LDST_COLS:
            ret += self.__gen_content_for_col(col)
        return ret

    def __get_other_content(self) -> str:
        ret = "Other Schema:\n"
        for col in OTHER_COLS:
            ret += self.__gen_content_for_col(col)
        return ret

    def __gen_content_for_col(self, col) -> str:
        text = f"{col}:\n"
        text += f"  description: {self.schema[col]['description']}\n"
        values = self.schema[col].get("values")
        if values:
            text += f"  values({self.schema[col]['type'].__name__}):\n"
            for k, v in values.items():
                text += f"      {k}: {v}\n"
        text += "\n"
        return text
