{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "190dc715",
   "metadata": {
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "import onnx\n",
    "import onnx.numpy_helper\n",
    "import os\n",
    "import string\n",
    "import numpy as np\n",
    "import math\n",
    "\n",
    "ONNX_FILENAME = \"<ONNX_FILENAME.onnx>\"\n",
    "DEST_FOLDER = \"<DEST_FOLDER>\"\n",
    "CONVERT_TO_FP16 = False\n",
    "QUANTIZE_UINT8 = False\n",
    "DBG_PRINT_OUT = False\n",
    "\n",
    "model = onnx.load(ONNX_FILENAME)\n",
    "output_dir = os.path.join(os.getcwd(), DEST_FOLDER)\n",
    "\n",
    "try:\n",
    "    os.mkdir(output_dir)\n",
    "except FileExistsError:\n",
    "    for filename in os.listdir(output_dir):\n",
    "         os.remove(os.path.join(output_dir, filename))\n",
    "            \n",
    "def quantize(a: np.ndarray, t: str, op_type: str, in_out_index: int, from_left: float = 0.001, from_right: float = 0.001):\n",
    "    \n",
    "    q = True\n",
    "    \n",
    "    if (op_type == \"Conv\" and in_out_index == 2) or \\\n",
    "        (op_type == \"InstanceNormalization\" and in_out_index != 0) or \\\n",
    "        (op_type == \"Resize\" and in_out_index == 2):\n",
    "        q = False\n",
    "    \n",
    "    if q:\n",
    "        flat = a.flatten().tolist()\n",
    "        s = [f for f in flat if math.isfinite(f)]\n",
    "        s.sort()\n",
    "        if len(s) == 1 and len(flat) == 1:\n",
    "            scale = abs(flat[0])\n",
    "            zero = 0 if flat[0] >= 0 else 2\n",
    "            a = np.array([1], dtype=\"ubyte\")\n",
    "            t = \"uint8[\" + str(scale) + \",\" + str(zero) + \"]\"\n",
    "        elif len(s) >= 2:\n",
    "            left = s[int(len(s) * from_left)]\n",
    "            right = s[int(len(s) * from_right * -1 - 1)]\n",
    "            if left > 0 and right > 0:\n",
    "                left = 0\n",
    "            elif left < 0 and right < 0:\n",
    "                right = 0\n",
    "            if right > left:\n",
    "                scale = (right - left) / 255.0\n",
    "                zero = int(abs(left) / scale)\n",
    "                if zero > 255: zero = 255\n",
    "                a = (a / scale) + zero\n",
    "                a = np.clip(a, 0, 255).astype(\"ubyte\")\n",
    "                t = \"uint8[\" + str(scale) + \",\" + str(zero) + \"]\"\n",
    "\n",
    "    return a, t\n",
    "            \n",
    "def add_line_to_model(line: str):\n",
    "    \n",
    "    with open(os.path.join(output_dir, \"model.txt\"), \"a\") as f:\n",
    "        f.write(line + \"\\n\")\n",
    "\n",
    "def get_final_name(name: str):\n",
    "    \n",
    "    final_name = \"\"\n",
    "    for c in name:\n",
    "        if c in string.ascii_letters + string.digits:\n",
    "            final_name += c\n",
    "        else:\n",
    "            final_name += \"_\" + format(ord(c), 'X') + \"_\"\n",
    "    \n",
    "    return final_name\n",
    "\n",
    "def search_name(name: str, node: onnx.NodeProto, in_out_index: int):\n",
    "\n",
    "    weights = [t for t in model.graph.initializer if t.name == name]\n",
    "    input_idxs = [i for i, n in enumerate(model.graph.node) for x in n.input if x == name]\n",
    "    output_idxs = [i for i, n in enumerate(model.graph.node) for o in n.output if o == name]\n",
    "    graph_inputs = [i for i in model.graph.input if i.name == name]\n",
    "    graph_outputs = [o for o in model.graph.output if o.name == name]\n",
    "\n",
    "    shapes = [i for i in model.graph.value_info if i.name == name]\n",
    "    shape = \"\"\n",
    "    \n",
    "    name = get_final_name(name)\n",
    "    \n",
    "    if len(shapes) == 1 and len(weights) == 1:\n",
    "        shapes = []\n",
    "\n",
    "    if len(shapes) + len(graph_inputs) + len(graph_outputs) + len(weights) != 1:\n",
    "        raise ValueError(\"Error: \" + name)\n",
    "    elif len(shapes) == 1:\n",
    "        shape = \",\".join(str(d.dim_value) for d in shapes[0].type.tensor_type.shape.dim)\n",
    "    elif len(graph_inputs) == 1:\n",
    "        shape = \",\".join(str(d.dim_value) for d in graph_inputs[0].type.tensor_type.shape.dim)\n",
    "    elif len(graph_outputs) == 1:\n",
    "        shape = \",\".join(str(d.dim_value) for d in graph_outputs[0].type.tensor_type.shape.dim)\n",
    "    elif len(weights) == 1:\n",
    "        \n",
    "        a = onnx.numpy_helper.to_array(weights[0])\n",
    "        \n",
    "        if CONVERT_TO_FP16 == True and str(a.dtype) == \"float32\":\n",
    "            a = a.astype(\"float16\")\n",
    "        \n",
    "        t = str(a.dtype)\n",
    "        if t != \"float32\" and t != \"int64\" and t != \"float16\":\n",
    "            raise ValueError(\"Error\")\n",
    "        \n",
    "        if QUANTIZE_UINT8 == True and str(a.dtype) == \"float32\":\n",
    "            a, t = quantize(a, t, node.op_type, in_out_index)\n",
    "            \n",
    "        def save_to_disk(n, arr):\n",
    "            nonlocal shape\n",
    "            shape = t + \":\" + \",\".join(str(d) for d in arr.shape)\n",
    "            n = n + \".bin\"\n",
    "            arr.tofile(os.path.join(output_dir, n))\n",
    "            return n\n",
    "            \n",
    "        if node.op_type == \"Gemm\":\n",
    "            transA = next(iter(a for a in node.attribute if a.name == \"transA\" and a.i != 0 and in_out_index == 0), None)\n",
    "            transB = next(iter(a for a in node.attribute if a.name == \"transB\" and a.i != 0 and in_out_index == 1), None)\n",
    "            trans = False\n",
    "            if transA is not None:\n",
    "                node.attribute.remove(transA)\n",
    "                trans = True\n",
    "            if transB is not None:\n",
    "                node.attribute.remove(transB)\n",
    "                trans = True\n",
    "            if trans:\n",
    "                a = np.transpose(a)\n",
    "                name = name + \"_transposed\"\n",
    "        elif node.op_type == \"Conv\":\n",
    "            if in_out_index == 0 or in_out_index == 1:\n",
    "                if len(a.shape) != 4:\n",
    "                    raise ValueError('Error')\n",
    "                save_to_disk(name + \"_nhwc\", np.transpose(a, (0, 2, 3, 1)))\n",
    "                name = name + \"_nchw\"\n",
    "                \n",
    "        name = save_to_disk(name, a)\n",
    "\n",
    "    else:\n",
    "        raise ValueError(\"Error\")\n",
    "\n",
    "    return name, weights, input_idxs, output_idxs, graph_inputs, graph_outputs, shape\n",
    "\n",
    "op_stats = {}\n",
    "\n",
    "for idx, node in enumerate(model.graph.node):\n",
    "    \n",
    "    if node.op_type in op_stats:\n",
    "        op_stats[node.op_type] += 1;\n",
    "    else:\n",
    "        op_stats[node.op_type] = 1;\n",
    "    \n",
    "    line = []\n",
    "    \n",
    "    line.append(node.name + \":\" + node.op_type)\n",
    "\n",
    "    if len(node.input) == 0 or len(node.output) == 0:\n",
    "        raise ValueError(\"Error\")\n",
    "\n",
    "    inputs = []\n",
    "    for input_index, input_name in enumerate(node.input):\n",
    "        \n",
    "        if len(input_name) == 0:\n",
    "            inputs.append(\"\")\n",
    "            continue\n",
    "        \n",
    "        input_name, weights, input_idxs, output_idxs, graph_inputs, graph_outputs, shape = search_name(input_name, node, input_index)\n",
    "        \n",
    "        if len(output_idxs) >= 2:\n",
    "            raise ValueError(\"Error\")\n",
    "        elif len(output_idxs) == 1 and output_idxs[0] >= idx:\n",
    "            raise ValueError(\"Error\")\n",
    "        elif len(weights) == 0 and len(output_idxs) == 0 and len(graph_inputs) == 0:\n",
    "            raise ValueError(\"Error\")\n",
    "            \n",
    "        inputs.append(input_name + \"(\" + shape + \")\")\n",
    "        \n",
    "    if len(inputs) == 0:\n",
    "        raise ValueError(\"Error\")\n",
    "    else:\n",
    "        line.append(\"input:\" + \";\".join(inputs))\n",
    "\n",
    "    outputs = []\n",
    "    for output_index, output_name in enumerate(node.output):\n",
    "        \n",
    "        if len(output_name) == 0: raise ValueError(\"Error\")\n",
    "            \n",
    "        output_name, weights, input_idxs, output_idxs, graph_inputs, graph_outputs, shape = search_name(output_name, node, -output_index-1)\n",
    "        \n",
    "        if any(i <= idx for i in input_idxs):\n",
    "            raise ValueError(\"Error\")\n",
    "        elif len(input_idxs) == 0 and len(graph_outputs) == 0:\n",
    "            raise ValueError(\"Error\")\n",
    "        elif len(weights) != 0:\n",
    "            raise ValueError(\"Error\")\n",
    "            \n",
    "        outputs.append(output_name + \"(\" + shape + \")\")\n",
    "        \n",
    "    if len(outputs) == 0:\n",
    "        raise ValueError(\"Error\")\n",
    "    else:\n",
    "        line.append(\"output:\" + \";\".join(outputs))\n",
    "                \n",
    "    attrs = []\n",
    "    for a in node.attribute:\n",
    "        \n",
    "        attr = \"\"\n",
    "        if a.type == onnx.AttributeProto.INT:\n",
    "            attr = str(a.i)\n",
    "        elif a.type == onnx.AttributeProto.FLOAT:\n",
    "            attr = str(a.f)\n",
    "        elif a.type == onnx.AttributeProto.STRING:\n",
    "            if isinstance(a.s, str):\n",
    "                attr = a.s\n",
    "            elif isinstance(a.s, bytes):\n",
    "                attr = a.s.decode(\"utf-8\", errors=\"ignore\")\n",
    "            else:\n",
    "                attr = str(a.s)\n",
    "        elif a.type == onnx.AttributeProto.INTS:\n",
    "            attr = \",\".join(str(x) for x in a.ints)\n",
    "        elif a.type == onnx.AttributeProto.TENSOR:\n",
    "            v = onnx.numpy_helper.to_array(a.t).flatten().tolist()\n",
    "            if len(v) != 1:\n",
    "                raise ValueError(\"Error\")\n",
    "            attr = str(v[0])\n",
    "        else:\n",
    "            raise ValueError(\"Error\")\n",
    "            \n",
    "        attrs.append(a.name + \":\" + attr)\n",
    "        \n",
    "    if len(attrs) != 0:\n",
    "        line.append(\";\".join(attrs))\n",
    "        \n",
    "    if any(\"*\" in t for t in line):\n",
    "        raise ValueError(\"Error\")\n",
    "    else:\n",
    "        line_str = \"*\".join(line)\n",
    "        add_line_to_model(line_str)\n",
    "        if DBG_PRINT_OUT == True: print(line_str)\n",
    "\n",
    "total = 0\n",
    "for name, count in op_stats.items():\n",
    "    total += count\n",
    "    print(name, \"->\", count)\n",
    "print(\"TOTAL\", \"->\", total)\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.16"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
