{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# ADT\n",
    "\n",
    "参考：`tvm/tests/python/relay/test_adt.py`"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tvm\n",
    "from tvm import relay\n",
    "from tvm.relay.backend.interpreter import ConstructorValue\n",
    "from tvm.relay import create_executor\n",
    "from tvm.relay.prelude import Prelude\n",
    "from tvm.relay.testing import count as count_, make_nat_value, make_nat_expr\n",
    "\n",
    "import numpy as np\n",
    "\n",
    "prelude = p = Prelude(tvm.IRModule({}))\n",
    "p.mod.import_from_std(\"nat.rly\")\n",
    "\n",
    "def count(e):\n",
    "    return count_(p, e)\n",
    "\n",
    "dev = tvm.device(\"llvm\", 0)\n",
    "\n",
    "def eval(expr):\n",
    "    # 注意：这些测试为每个测试表达式重新处理整个预加载。\n",
    "    # 提升 create_executor 不会改善这一点，因为预处理直到评估时才会开始。\n",
    "    return create_executor(mod=prelude.mod, device=dev, target=\"llvm\").evaluate(expr)\n",
    "\n",
    "nat, z, s = prelude.mod.get_type(\"nat\")\n",
    "\n",
    "double = p.mod.get_global_var(\"nat_double\")\n",
    "add = p.mod.get_global_var(\"nat_add\")\n",
    "\n",
    "optional, some, none = prelude.mod.get_type(\"Option\")\n",
    "rlist, cons, nil = prelude.mod.get_type(\"List\")\n",
    "\n",
    "hd = p.hd\n",
    "tl = p.tl\n",
    "nth = p.nth\n",
    "update = p.update\n",
    "length = p.length\n",
    "map = p.map\n",
    "foldl = p.foldl\n",
    "foldr = p.foldr\n",
    "foldr1 = p.foldr1\n",
    "sum = p.sum\n",
    "\n",
    "concat = p.concat\n",
    "filter = p.filter\n",
    "zip = p.zip\n",
    "rev = p.rev\n",
    "unfoldl = p.unfoldl\n",
    "unfoldr = p.unfoldr\n",
    "map_accumr = p.map_accumr\n",
    "map_accuml = p.map_accuml\n",
    "\n",
    "tree, rose = prelude.mod.get_type(\"Tree\")\n",
    "\n",
    "tmap = p.tmap\n",
    "size = p.size\n",
    "\n",
    "compose = p.compose\n",
    "iterate = p.iterate"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "def to_list(l):\n",
    "    assert isinstance(l, ConstructorValue)\n",
    "    val = l\n",
    "    ret = []\n",
    "    while True:\n",
    "        if val.tag == cons.tag:\n",
    "            ret.append(val.fields[0])\n",
    "            val = val.fields[1]\n",
    "        else:\n",
    "            assert val.tag == nil.tag\n",
    "            break\n",
    "    return ret\n",
    "\n",
    "\n",
    "def tree_to_dict(t):\n",
    "    assert isinstance(t, ConstructorValue)\n",
    "    ret = {}\n",
    "    assert t.tag == rose.tag\n",
    "    ret[\"member\"] = t.fields[0]\n",
    "    ret[\"children\"] = []\n",
    "    for subtree in to_list(t.fields[1]):\n",
    "        l = tree_to_dict(subtree)\n",
    "        ret[\"children\"].append(l)\n",
    "    return ret\n",
    "\n",
    "\n",
    "def vmobj_to_list(o, dtype=\"float32\"):\n",
    "    if isinstance(o, tvm.nd.NDArray):\n",
    "        return [o.numpy().tolist()]\n",
    "    elif isinstance(o, tvm.runtime.container.ADT):\n",
    "        if len(o) == 0:\n",
    "            tensor_nil = p.get_var(\"tensor_nil\", dtype=dtype)\n",
    "            if tensor_nil.tag == o.tag:\n",
    "                return [0]\n",
    "            return []\n",
    "\n",
    "        result = []\n",
    "        for f in o:\n",
    "            result.extend(vmobj_to_list(f, dtype))\n",
    "        return result\n",
    "    elif isinstance(o, tvm.relay.backend.interpreter.ConstructorValue):\n",
    "        if o.constructor.name_hint == \"Cons\":\n",
    "            tl = vmobj_to_list(o.fields[1], dtype)\n",
    "            hd = vmobj_to_list(o.fields[0], dtype)\n",
    "            hd.extend(tl)\n",
    "            return hd\n",
    "        elif o.constructor.name_hint == \"Nil\":\n",
    "            return []\n",
    "        elif \"tensor_nil\" in o.constructor.name_hint:\n",
    "            return [0]\n",
    "        elif \"tensor\" in o.constructor.name_hint:\n",
    "            return [o.fields[0].numpy()]\n",
    "        else:\n",
    "            raise RuntimeError(\"Unknown object type: %s\" % o.constructor.name_hint)\n",
    "    else:\n",
    "        raise RuntimeError(\"Unknown object type: %s\" % type(o))\n",
    "\n",
    "\n",
    "# turns a scalar-valued relay tensor value into a python number\n",
    "def get_scalar(tv):\n",
    "    return tv.numpy().item()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 测试自然数"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "测试自然数值："
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "assert count(make_nat_value(p, 10)) == 10\n",
    "assert count(eval(s(s(z())))) == 2"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "测试自然数构造器："
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "func = relay.Function([], z())\n",
    "test_z = relay.GlobalVar(\"test_z\")\n",
    "test_sz = relay.GlobalVar(\"test_sz\")\n",
    "prelude.mod[test_z] = func\n",
    "func = relay.Function([], s(z()))\n",
    "prelude.mod[test_sz] = func\n",
    "ck_mod = relay.transform.InferType()(prelude.mod)\n",
    "assert ck_mod[test_z].body.checked_type == nat()\n",
    "assert ck_mod[test_sz].body.checked_type == nat()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## ``double``"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "assert prelude.mod[double].checked_type == relay.FuncType([nat()], nat())\n",
    "res = eval(double(s(z())))\n",
    "assert count(res) == 2"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## `add`"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "assert prelude.mod[add].checked_type == relay.FuncType([nat(), nat()], nat())\n",
    "res = eval(add(s(z()), s(z())))\n",
    "assert count(res) == 2"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## `list`"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "test_consz = relay.GlobalVar(\"test_consz\")\n",
    "func = relay.Function([], cons(z(), nil()))\n",
    "prelude.mod[test_consz] = func\n",
    "ck_mod = relay.transform.InferType()(prelude.mod)\n",
    "assert ck_mod[test_consz].body.checked_type == rlist(nat())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 其他"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "@tvm.testing.uses_gpu\n",
    "def test_hd_tl():\n",
    "    expected = list(range(10))\n",
    "    l = nil()\n",
    "    for i in reversed(expected):\n",
    "        l = cons(make_nat_expr(prelude, i), l)\n",
    "\n",
    "    got = []\n",
    "    for i in range(len(expected)):\n",
    "        got.append(count(eval(hd(l))))\n",
    "        l = tl(l)\n",
    "\n",
    "    assert got == expected\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_nth():\n",
    "    expected = list(range(10))\n",
    "    l = nil()\n",
    "    for i in reversed(expected):\n",
    "        l = cons(relay.const(i), l)\n",
    "\n",
    "    for i in range(len(expected)):\n",
    "        nth = prelude.mod.get_global_var(\"nth\")\n",
    "        item = eval(nth(l, relay.const(i)))\n",
    "        assert get_scalar(item) == i\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_update():\n",
    "    expected = list(range(10))\n",
    "    l = nil()\n",
    "    # create zero initialized list\n",
    "    for i in range(len(expected)):\n",
    "        l = cons(make_nat_expr(prelude, 0), l)\n",
    "\n",
    "    # set value\n",
    "    for i, v in enumerate(expected):\n",
    "        l = update(l, relay.const(i), make_nat_expr(prelude, v))\n",
    "\n",
    "    got = []\n",
    "    for i in range(len(expected)):\n",
    "        got.append(count(eval(nth(l, relay.const(i)))))\n",
    "\n",
    "    assert got == expected\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_length():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    assert prelude.mod[length].checked_type == relay.FuncType(\n",
    "        [rlist(a)], relay.scalar_type(\"int32\"), [a]\n",
    "    )\n",
    "    res = eval(length(cons(z(), cons(z(), cons(z(), nil())))))\n",
    "    assert get_scalar(res) == 3\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_map():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    b = relay.TypeVar(\"b\")\n",
    "    lhs = prelude.mod[map].checked_type\n",
    "    rhs = relay.FuncType([relay.FuncType([a], b), rlist(a)], rlist(b), [a, b])\n",
    "    assert lhs == rhs\n",
    "\n",
    "    x = relay.Var(\"x\")\n",
    "    add_one = relay.Function([x], s(x))\n",
    "    res = eval(map(add_one, cons(z(), cons(z(), nil()))))\n",
    "    ones = to_list(res)\n",
    "    assert len(ones) == 2\n",
    "    assert count(ones[0]) == 1 and count(ones[1]) == 1\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_foldl():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    b = relay.TypeVar(\"b\")\n",
    "\n",
    "    lhs = prelude.mod[foldl].checked_type\n",
    "    rhs = relay.FuncType([relay.FuncType([a, b], a), a, rlist(b)], a, [a, b])\n",
    "    assert lhs == rhs\n",
    "\n",
    "    x = relay.Var(\"x\")\n",
    "    y = relay.Var(\"y\")\n",
    "    rev_dup = relay.Function([y, x], cons(x, cons(x, y)))\n",
    "    res = eval(\n",
    "        foldl(\n",
    "            rev_dup,\n",
    "            nil(),\n",
    "            cons(\n",
    "                make_nat_expr(prelude, 1),\n",
    "                cons(make_nat_expr(prelude, 2), cons(make_nat_expr(prelude, 3), nil())),\n",
    "            ),\n",
    "        )\n",
    "    )\n",
    "    reversed = to_list(res)\n",
    "    assert len(reversed) == 6\n",
    "    assert count(reversed[0]) == 3 and count(reversed[1]) == 3\n",
    "    assert count(reversed[2]) == 2 and count(reversed[3]) == 2\n",
    "    assert count(reversed[4]) == 1 and count(reversed[5]) == 1\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_foldr():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    b = relay.TypeVar(\"b\")\n",
    "    lhs = prelude.mod[foldr].checked_type\n",
    "    rhs = relay.FuncType([relay.FuncType([a, b], b), b, rlist(a)], b, [a, b])\n",
    "    assert lhs == rhs\n",
    "\n",
    "    x = relay.Var(\"x\")\n",
    "    y = relay.Var(\"y\")\n",
    "    identity = relay.Function([x, y], cons(x, y))\n",
    "    res = eval(\n",
    "        foldr(\n",
    "            identity,\n",
    "            nil(),\n",
    "            cons(\n",
    "                make_nat_expr(prelude, 1),\n",
    "                cons(make_nat_expr(prelude, 2), cons(make_nat_expr(prelude, 3), nil())),\n",
    "            ),\n",
    "        )\n",
    "    )\n",
    "    same = to_list(res)\n",
    "    assert len(same) == 3\n",
    "    assert count(same[0]) == 1 and count(same[1]) == 2 and count(same[2]) == 3\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_foldr1():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    lhs = prelude.mod[foldr1].checked_type\n",
    "    rhs = relay.FuncType([relay.FuncType([a, a], a), rlist(a)], a, [a])\n",
    "    assert lhs == rhs\n",
    "\n",
    "    x = relay.Var(\"x\")\n",
    "    y = relay.Var(\"y\")\n",
    "    f = relay.Function([x, y], add(x, y))\n",
    "    res = eval(\n",
    "        foldr1(\n",
    "            f,\n",
    "            cons(\n",
    "                make_nat_expr(prelude, 1),\n",
    "                cons(make_nat_expr(prelude, 2), cons(make_nat_expr(prelude, 3), nil())),\n",
    "            ),\n",
    "        )\n",
    "    )\n",
    "\n",
    "    assert count(res) == 6\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_sum():\n",
    "    assert prelude.mod[sum].checked_type == relay.FuncType(\n",
    "        [rlist(relay.scalar_type(\"int32\"))], relay.scalar_type(\"int32\")\n",
    "    )\n",
    "    res = eval(sum(cons(relay.const(1), cons(relay.const(2), nil()))))\n",
    "    assert get_scalar(res) == 3\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_concat():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    assert prelude.mod[concat].checked_type == relay.FuncType([rlist(a), rlist(a)], rlist(a), [a])\n",
    "\n",
    "    l1 = cons(make_nat_expr(prelude, 1), cons(make_nat_expr(prelude, 2), nil()))\n",
    "    l2 = cons(make_nat_expr(prelude, 3), cons(make_nat_expr(prelude, 4), nil()))\n",
    "    res = eval(concat(l1, l2))\n",
    "\n",
    "    catted = to_list(res)\n",
    "    assert len(catted) == 4\n",
    "    assert count(catted[0]) == 1\n",
    "    assert count(catted[1]) == 2\n",
    "    assert count(catted[2]) == 3\n",
    "    assert count(catted[3]) == 4\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_filter():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    expected_type = relay.FuncType(\n",
    "        [relay.FuncType([a], relay.scalar_type(\"bool\")), rlist(a)], rlist(a), [a]\n",
    "    )\n",
    "    assert prelude.mod[filter].checked_type == expected_type\n",
    "\n",
    "    x = relay.Var(\"x\", nat())\n",
    "    greater_than_one = relay.Function(\n",
    "        [x],\n",
    "        relay.Match(\n",
    "            x,\n",
    "            [\n",
    "                relay.Clause(\n",
    "                    relay.PatternConstructor(\n",
    "                        s, [relay.PatternConstructor(s, [relay.PatternWildcard()])]\n",
    "                    ),\n",
    "                    relay.const(True),\n",
    "                ),\n",
    "                relay.Clause(relay.PatternWildcard(), relay.const(False)),\n",
    "            ],\n",
    "        ),\n",
    "    )\n",
    "    res = eval(\n",
    "        filter(\n",
    "            greater_than_one,\n",
    "            cons(\n",
    "                make_nat_expr(prelude, 1),\n",
    "                cons(\n",
    "                    make_nat_expr(prelude, 1),\n",
    "                    cons(\n",
    "                        make_nat_expr(prelude, 3),\n",
    "                        cons(\n",
    "                            make_nat_expr(prelude, 1),\n",
    "                            cons(make_nat_expr(prelude, 5), cons(make_nat_expr(prelude, 1), nil())),\n",
    "                        ),\n",
    "                    ),\n",
    "                ),\n",
    "            ),\n",
    "        )\n",
    "    )\n",
    "    filtered = to_list(res)\n",
    "    assert len(filtered) == 2\n",
    "    assert count(filtered[0]) == 3\n",
    "    assert count(filtered[1]) == 5\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_zip():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    b = relay.TypeVar(\"b\")\n",
    "    expected_type = relay.FuncType([rlist(a), rlist(b)], rlist(relay.TupleType([a, b])), [a, b])\n",
    "    assert prelude.mod[zip].checked_type == expected_type\n",
    "\n",
    "    l1 = cons(\n",
    "        make_nat_expr(prelude, 1),\n",
    "        cons(make_nat_expr(prelude, 2), cons(make_nat_expr(prelude, 3), nil())),\n",
    "    )\n",
    "    l2 = cons(nil(), cons(cons(nil(), nil()), cons(cons(nil(), cons(nil(), nil())), nil())))\n",
    "\n",
    "    res = eval(zip(l1, l2))\n",
    "    zipped = to_list(res)\n",
    "    assert len(zipped) == 3\n",
    "    assert count(zipped[0][0]) == 1\n",
    "    assert len(to_list(zipped[0][1])) == 0\n",
    "    assert count(zipped[1][0]) == 2\n",
    "    assert len(to_list(zipped[1][1])) == 1\n",
    "    assert count(zipped[2][0]) == 3\n",
    "    assert len(to_list(zipped[2][1])) == 2\n",
    "\n",
    "    # test truncation\n",
    "    l3 = cons(make_nat_expr(prelude, 4), cons(make_nat_expr(prelude, 5), nil()))\n",
    "    shorter_res = eval(zip(l3, l2))\n",
    "    truncated = to_list(shorter_res)\n",
    "    assert len(truncated) == 2\n",
    "    assert count(truncated[0][0]) == 4\n",
    "    assert len(to_list(truncated[0][1])) == 0\n",
    "    assert count(truncated[1][0]) == 5\n",
    "    assert len(to_list(truncated[1][1])) == 1\n",
    "\n",
    "    l4 = cons(nil(), nil())\n",
    "    shortest_res = eval(zip(l3, l4))\n",
    "    singleton = to_list(shortest_res)\n",
    "    assert len(singleton) == 1\n",
    "    assert count(singleton[0][0]) == 4\n",
    "    assert len(to_list(singleton[0][1])) == 0\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_rev():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    assert prelude.mod[rev].checked_type == relay.FuncType([rlist(a)], rlist(a), [a])\n",
    "\n",
    "    res = eval(\n",
    "        rev(\n",
    "            cons(\n",
    "                make_nat_expr(prelude, 1),\n",
    "                cons(make_nat_expr(prelude, 2), cons(make_nat_expr(prelude, 3), nil())),\n",
    "            )\n",
    "        )\n",
    "    )\n",
    "    reversed = to_list(res)\n",
    "\n",
    "    assert len(reversed) == 3\n",
    "    assert count(reversed[0]) == 3\n",
    "    assert count(reversed[1]) == 2\n",
    "    assert count(reversed[2]) == 1\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_unfoldr():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    b = relay.TypeVar(\"b\")\n",
    "    expected_type = relay.FuncType(\n",
    "        [relay.FuncType([a], optional(relay.TupleType([a, b]))), a], rlist(b), [a, b]\n",
    "    )\n",
    "\n",
    "    x = relay.Var(\"x\", nat())\n",
    "    n = relay.Var(\"n\", nat())\n",
    "    count_down = relay.Function(\n",
    "        [x],\n",
    "        relay.Match(\n",
    "            x,\n",
    "            [\n",
    "                relay.Clause(\n",
    "                    relay.PatternConstructor(s, [relay.PatternVar(n)]), some(relay.Tuple([n, x]))\n",
    "                ),\n",
    "                relay.Clause(relay.PatternConstructor(z, []), none()),\n",
    "            ],\n",
    "        ),\n",
    "    )\n",
    "\n",
    "    res = eval(unfoldr(count_down, make_nat_expr(prelude, 3)))\n",
    "    unfolded = to_list(res)\n",
    "\n",
    "    assert len(unfolded) == 3\n",
    "    assert count(unfolded[0]) == 3\n",
    "    assert count(unfolded[1]) == 2\n",
    "    assert count(unfolded[2]) == 1\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_unfoldl():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    b = relay.TypeVar(\"b\")\n",
    "    expected_type = relay.FuncType(\n",
    "        [relay.FuncType([a], optional(relay.TupleType([a, b]))), a], rlist(b), [a, b]\n",
    "    )\n",
    "\n",
    "    x = relay.Var(\"x\", nat())\n",
    "    n = relay.Var(\"n\", nat())\n",
    "    count_down = relay.Function(\n",
    "        [x],\n",
    "        relay.Match(\n",
    "            x,\n",
    "            [\n",
    "                relay.Clause(\n",
    "                    relay.PatternConstructor(s, [relay.PatternVar(n)]), some(relay.Tuple([n, x]))\n",
    "                ),\n",
    "                relay.Clause(relay.PatternConstructor(z, []), none()),\n",
    "            ],\n",
    "        ),\n",
    "    )\n",
    "\n",
    "    res = eval(unfoldl(count_down, make_nat_expr(prelude, 3)))\n",
    "    unfolded = to_list(res)\n",
    "\n",
    "    assert len(unfolded) == 3\n",
    "    assert count(unfolded[0]) == 1\n",
    "    assert count(unfolded[1]) == 2\n",
    "    assert count(unfolded[2]) == 3\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_map_accumr():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    b = relay.TypeVar(\"b\")\n",
    "    c = relay.TypeVar(\"c\")\n",
    "    expected_type = relay.FuncType(\n",
    "        [relay.FuncType([a, b], relay.TupleType([a, c])), a, rlist(b)],\n",
    "        relay.TupleType([a, rlist(c)]),\n",
    "        [a, b, c],\n",
    "    )\n",
    "    assert prelude.mod[map_accumr].checked_type == expected_type\n",
    "\n",
    "    acc = relay.Var(\"acc\", nat())\n",
    "    x = relay.Var(\"x\", nat())\n",
    "    add_acc_to_each = relay.Function([acc, x], relay.Tuple([add(x, acc), add(x, acc)]))\n",
    "\n",
    "    vals = cons(\n",
    "        make_nat_expr(prelude, 1),\n",
    "        cons(make_nat_expr(prelude, 2), cons(make_nat_expr(prelude, 3), nil())),\n",
    "    )\n",
    "    res = eval(map_accumr(add_acc_to_each, z(), vals))\n",
    "\n",
    "    sum = count(res[0])\n",
    "    new_vals = to_list(res[1])\n",
    "\n",
    "    assert sum == 6\n",
    "    assert len(new_vals) == 3\n",
    "    assert count(new_vals[0]) == 6\n",
    "    assert count(new_vals[1]) == 5\n",
    "    assert count(new_vals[2]) == 3\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_map_accuml():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    b = relay.TypeVar(\"b\")\n",
    "    c = relay.TypeVar(\"c\")\n",
    "    expected_type = relay.FuncType(\n",
    "        [relay.FuncType([a, b], relay.TupleType([a, c])), a, rlist(b)],\n",
    "        relay.TupleType([a, rlist(c)]),\n",
    "        [a, b, c],\n",
    "    )\n",
    "    assert prelude.mod[map_accuml].checked_type == expected_type\n",
    "\n",
    "    acc = relay.Var(\"acc\", nat())\n",
    "    x = relay.Var(\"x\", nat())\n",
    "    add_to_acc = relay.Function([acc, x], relay.Tuple([add(x, acc), x]))\n",
    "\n",
    "    vals = cons(\n",
    "        make_nat_expr(prelude, 1),\n",
    "        cons(make_nat_expr(prelude, 2), cons(make_nat_expr(prelude, 3), nil())),\n",
    "    )\n",
    "    res = eval(map_accuml(add_to_acc, z(), vals))\n",
    "\n",
    "    sum = count(res[0])\n",
    "    new_vals = to_list(res[1])\n",
    "\n",
    "    assert sum == 6\n",
    "    assert len(new_vals) == 3\n",
    "    assert count(new_vals[0]) == 3\n",
    "    assert count(new_vals[1]) == 2\n",
    "    assert count(new_vals[2]) == 1\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_optional_matching():\n",
    "    x = relay.Var(\"x\")\n",
    "    y = relay.Var(\"y\")\n",
    "    v = relay.Var(\"v\")\n",
    "    condense = relay.Function(\n",
    "        [x, y],\n",
    "        relay.Match(\n",
    "            x,\n",
    "            [\n",
    "                relay.Clause(relay.PatternConstructor(some, [relay.PatternVar(v)]), cons(v, y)),\n",
    "                relay.Clause(relay.PatternConstructor(none), y),\n",
    "            ],\n",
    "        ),\n",
    "    )\n",
    "\n",
    "    res = eval(\n",
    "        foldr(\n",
    "            condense,\n",
    "            nil(),\n",
    "            cons(\n",
    "                some(make_nat_expr(prelude, 3)),\n",
    "                cons(none(), cons(some(make_nat_expr(prelude, 1)), nil())),\n",
    "            ),\n",
    "        )\n",
    "    )\n",
    "\n",
    "    reduced = to_list(res)\n",
    "    assert len(reduced) == 2\n",
    "    assert count(reduced[0]) == 3\n",
    "    assert count(reduced[1]) == 1\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_tmap():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    b = relay.TypeVar(\"b\")\n",
    "    lhs = prelude.mod[tmap].checked_type\n",
    "    rhs = relay.FuncType([relay.FuncType([a], b), tree(a)], tree(b), [a, b])\n",
    "    assert lhs == rhs\n",
    "\n",
    "    x = relay.Var(\"x\")\n",
    "    add_one = relay.Function([x], s(x))\n",
    "    res = eval(tmap(add_one, rose(z(), cons(rose(z(), nil()), cons(rose(z(), nil()), nil())))))\n",
    "\n",
    "    tree_dict = tree_to_dict(res)\n",
    "    assert count(tree_dict[\"member\"]) == 1\n",
    "    assert len(tree_dict[\"children\"]) == 2\n",
    "    for subtree in tree_dict[\"children\"]:\n",
    "        assert count(subtree[\"member\"]) == 1\n",
    "        assert len(subtree[\"children\"]) == 0\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_size():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    lhs = prelude.mod[size].checked_type\n",
    "    rhs = relay.FuncType([tree(a)], relay.scalar_type(\"int32\"), [a])\n",
    "    assert lhs == rhs\n",
    "\n",
    "    root = rose(z(), cons(rose(z(), nil()), cons(rose(z(), nil()), nil())))\n",
    "    t = rose(z(), cons(root, cons(root, cons(root, nil()))))\n",
    "    res = eval(size(t))\n",
    "    assert get_scalar(res) == 10\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_wildcard_match_solo():\n",
    "    x = relay.Var(\"x\", nat())\n",
    "    copy = relay.Function([x], relay.Match(x, [relay.Clause(relay.PatternWildcard(), x)]), nat())\n",
    "\n",
    "    res = eval(copy(s(s(s(z())))))\n",
    "    assert count(res) == 3\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_wildcard_match_order():\n",
    "    x = relay.Var(\"x\", rlist(nat()))\n",
    "    y = relay.Var(\"y\")\n",
    "    a = relay.Var(\"a\")\n",
    "    return_zero = relay.Function(\n",
    "        [x],\n",
    "        relay.Match(\n",
    "            x,\n",
    "            [\n",
    "                relay.Clause(relay.PatternWildcard(), z()),\n",
    "                relay.Clause(\n",
    "                    relay.PatternConstructor(cons, [relay.PatternVar(y), relay.PatternVar(a)]), y\n",
    "                ),\n",
    "                relay.Clause(relay.PatternConstructor(nil), s(z())),\n",
    "            ],\n",
    "        ),\n",
    "        nat(),\n",
    "    )\n",
    "\n",
    "    res = eval(return_zero(cons(s(z()), nil())))\n",
    "    # wildcard pattern is evaluated first\n",
    "    assert count(res) == 0\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_nested_matches():\n",
    "    a = relay.TypeVar(\"a\")\n",
    "    # TODO(@jroesch): inference should be able to handle this one\n",
    "    x = relay.Var(\"x\", type_annotation=rlist(rlist(a)))\n",
    "    y = relay.Var(\"y\")\n",
    "    w = relay.Var(\"w\")\n",
    "    h = relay.Var(\"h\")\n",
    "    t = relay.Var(\"t\")\n",
    "    flatten = relay.GlobalVar(\"flatten\")\n",
    "\n",
    "    # flatten could be written using a fold, but this way has nested matches\n",
    "    inner_match = relay.Match(\n",
    "        y,\n",
    "        [\n",
    "            relay.Clause(relay.PatternConstructor(nil), flatten(w)),\n",
    "            relay.Clause(\n",
    "                relay.PatternConstructor(cons, [relay.PatternVar(h), relay.PatternVar(t)]),\n",
    "                cons(h, flatten(cons(t, w))),\n",
    "            ),\n",
    "        ],\n",
    "    )\n",
    "\n",
    "    prelude.mod[flatten] = relay.Function(\n",
    "        [x],\n",
    "        relay.Match(\n",
    "            x,\n",
    "            [\n",
    "                relay.Clause(relay.PatternConstructor(nil), nil()),\n",
    "                relay.Clause(\n",
    "                    relay.PatternConstructor(cons, [relay.PatternVar(y), relay.PatternVar(w)]),\n",
    "                    inner_match,\n",
    "                ),\n",
    "            ],\n",
    "        ),\n",
    "        rlist(a),\n",
    "        [a],\n",
    "    )\n",
    "\n",
    "    first_list = cons(\n",
    "        make_nat_expr(prelude, 1),\n",
    "        cons(make_nat_expr(prelude, 2), cons(make_nat_expr(prelude, 3), nil())),\n",
    "    )\n",
    "    second_list = cons(\n",
    "        make_nat_expr(prelude, 4),\n",
    "        cons(make_nat_expr(prelude, 5), cons(make_nat_expr(prelude, 6), nil())),\n",
    "    )\n",
    "    final_list = cons(first_list, cons(second_list, nil()))\n",
    "\n",
    "    res = eval(flatten(final_list))\n",
    "\n",
    "    flat = to_list(res)\n",
    "    assert len(flat) == 6\n",
    "    for i in range(6):\n",
    "        assert count(flat[i]) == i + 1\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_match_full_var():\n",
    "    x = relay.Var(\"x\")\n",
    "    v = relay.Var(\"v\")\n",
    "    id_func = relay.Function([x], relay.Match(x, [relay.Clause(relay.PatternVar(v), v)]))\n",
    "\n",
    "    res1 = eval(id_func(nil()))\n",
    "    res2 = eval(id_func(cons(z(), cons(z(), nil()))))\n",
    "\n",
    "    empty = to_list(res1)\n",
    "    assert len(empty) == 0\n",
    "\n",
    "    zeroes = to_list(res2)\n",
    "    assert len(zeroes) == 2\n",
    "    assert count(zeroes[0]) == 0\n",
    "    assert count(zeroes[1]) == 0\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_nested_pattern_match():\n",
    "    x = relay.Var(\"x\", rlist(nat()))\n",
    "    h1 = relay.Var(\"h1\")\n",
    "    h2 = relay.Var(\"h2\")\n",
    "    t = relay.Var(\"t\")\n",
    "    match = relay.Match(\n",
    "        x,\n",
    "        [\n",
    "            relay.Clause(\n",
    "                relay.PatternConstructor(\n",
    "                    cons,\n",
    "                    [\n",
    "                        relay.PatternVar(h1),\n",
    "                        relay.PatternConstructor(cons, [relay.PatternVar(h2), relay.PatternVar(t)]),\n",
    "                    ],\n",
    "                ),\n",
    "                h2,\n",
    "            ),\n",
    "            relay.Clause(relay.PatternWildcard(), z()),\n",
    "        ],\n",
    "    )\n",
    "    get_second = relay.Function([x], match)\n",
    "\n",
    "    res = eval(get_second(cons(s(z()), cons(s(s(z())), nil()))))\n",
    "\n",
    "    assert count(res) == 2\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_compose():\n",
    "    n = relay.Var(\"n\")\n",
    "    inc = relay.Function([n], s(n))\n",
    "    x = relay.Var(\"x\")\n",
    "    res = eval(relay.Call(compose(inc, double), [s(s(z()))]))\n",
    "    assert count(res) == 5\n",
    "\n",
    "\n",
    "@tvm.testing.uses_gpu\n",
    "def test_iterate():\n",
    "    expr = relay.Call(iterate(double, relay.const(2)), [make_nat_expr(prelude, 3)])\n",
    "    res = eval(relay.Function([], expr)())\n",
    "    assert count(res) == 12\n",
    "\n",
    "\n",
    "if __name__ == \"__main__\":\n",
    "    tvm.testing.main()\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "ai",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
