{
 "nbformat": 4,
 "nbformat_minor": 0,
 "metadata": {
  "colab": {
   "provenance": [],
   "collapsed_sections": [],
   "toc_visible": true
  },
  "kernelspec": {
   "name": "python3",
   "display_name": "Python 3"
  },
  "language_info": {
   "name": "python"
  },
  "accelerator": "GPU"
 },
 "cells": [
  {
   "cell_type": "markdown",
   "source": [
    "# **Setup**"
   ],
   "metadata": {
    "id": "MX5Sdk7L9pfN",
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "source": [
    "print('Installing torchquantum...')\n",
    "!git clone https://github.com/mit-han-lab/torchquantum.git\n",
    "%cd /content/torchquantum\n",
    "!pip install --editable . 1>/dev/null\n",
    "!pip install matplotlib==3.1.3 1>/dev/null\n",
    "%matplotlib inline\n",
    "print('All required packages have been successfully installed!')"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "f0jC7W3B9nDe",
    "outputId": "2066973c-6bb9-4207-e1ed-5aec9e7016ac",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 1,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Installing torchquantum...\n",
      "Cloning into 'torchquantum'...\n",
      "remote: Enumerating objects: 11836, done.\u001B[K\n",
      "remote: Counting objects: 100% (726/726), done.\u001B[K\n",
      "remote: Compressing objects: 100% (306/306), done.\u001B[K\n",
      "remote: Total 11836 (delta 435), reused 685 (delta 405), pack-reused 11110\u001B[K\n",
      "Receiving objects: 100% (11836/11836), 33.59 MiB | 25.33 MiB/s, done.\n",
      "Resolving deltas: 100% (6593/6593), done.\n",
      "/content/torchquantum\n",
      "\u001B[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
      "torchquantum 0.1.2 requires matplotlib>=3.3.2, but you have matplotlib 3.1.3 which is incompatible.\u001B[0m\n",
      "All required packages have been successfully installed!\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "import torchquantum as tq\n",
    "import torchquantum.functional as tqf\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import torch"
   ],
   "metadata": {
    "id": "10RsI2oaDXEI",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 2,
   "outputs": []
  },
  {
   "cell_type": "markdown",
   "source": [
    "# **1. TorchQuantum basic operations**"
   ],
   "metadata": {
    "id": "I3Vi2I17jo86",
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 1.2 TorchQuantum Operations"
   ],
   "metadata": {
    "id": "Fu9gqh2XNeqM",
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "tq.QuantumDevice Usage"
   ],
   "metadata": {
    "id": "abV1dwlE0Ksq",
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "Method 1 of using quantum gates through torchquantum.functional"
   ],
   "metadata": {
    "id": "DQHkBqqW0d4C",
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "source": [
    "q_dev = tq.QuantumDevice(n_wires=1)\n",
    "q_dev.reset_states(bsz=1)\n",
    "print(f\"all zero state: {q_dev}\")\n",
    "tqf.h(q_dev, wires=0)\n",
    "print(f\"after h gate: {q_dev}\")\n",
    "\n",
    "tqf.rx(q_dev, wires=0, params=[0.3])\n",
    "\n",
    "print(f\"after rx gate: {q_dev}\")"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "LXuCcc31NeKJ",
    "outputId": "49f1447c-97ec-4af7-ee43-d8b03ee210d1",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 16,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "all zero state: QuantumDevice 1 wires with states: tensor([[1.+0.j, 0.+0.j]])\n",
      "after h gate: QuantumDevice 1 wires with states: tensor([[0.7071+0.j, 0.7071+0.j]])\n",
      "after rx gate: QuantumDevice 1 wires with states: tensor([[0.6992-0.1057j, 0.6992-0.1057j]])\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "# method 2 of using tq.Operator\n",
    "q_dev.reset_states(bsz=1)\n",
    "print(f\"all zero state: {q_dev}\")\n",
    "\n",
    "h_gate = tq.H()\n",
    "h_gate(q_dev, wires=0)\n",
    "\n",
    "print(f\"after h gate: {q_dev}\")\n",
    "\n",
    "rx_gate = tq.RX(has_params=True, init_params=[0.3])\n",
    "\n",
    "rx_gate(q_dev, wires=0)\n",
    "\n",
    "print(f\"after rx gate: {q_dev}\")\n",
    "bitstring = tq.measure(q_dev, n_shots=1024, draw_id=0)\n",
    "\n",
    "print(bitstring)"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 361
    },
    "id": "L-UjU64i0czW",
    "outputId": "691e2f60-3054-4917-d341-4531f0ae446c",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 19,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "all zero state: QuantumDevice 1 wires with states: tensor([[1.+0.j, 0.+0.j]])\n",
      "after h gate: QuantumDevice 1 wires with states: tensor([[0.7071+0.j, 0.7071+0.j]])\n",
      "after rx gate: QuantumDevice 1 wires with states: tensor([[0.6992-0.1057j, 0.6992-0.1057j]])\n"
     ]
    },
    {
     "output_type": "display_data",
     "data": {
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ],
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAETCAYAAADNpUayAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAZu0lEQVR4nO3dfZwcVZ3v8c/XJAQENYSMEfLAIERdXK8IkQUEL8K6EkTDywvytBjYaPa6+FJE0ah3F9yFu+HqXYRl1csCS5TnBVkisCwRwQAaJIQQiQEyYGISHhICCcTwkMTf/aPOhEqne7pnpnseTr7v12teU3XOqapT3TXfrj5VPa2IwMzM8vKm/u6AmZk1n8PdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDDvcBSNKVks5L04dJeryJ6/5PSVPS9GmS7mviuk+RdGez1teN7X5I0hJJ6yUd29fbHwgkLZX05zXqthxPNerXS3pnC/u25ZizvjO0vztgXYuIe4F312sn6Vxgn4j4yzrrm9SMfklqB34HDIuITWndVwNXN2P93fT3wCURcVE/bHvQi4hdOqclXQmsiIj/VW+5vj7mrHt85r6dUCHX53tPYFF/d6JZJGVx0pX5MTfg+YEfACR9QNJ8SS9Luh7YsVR3uKQVpfmvS1qZ2j4u6UhJRwHfBE5Ib7EfSW3vkXS+pPuBDcA7U9lnt968LpG0TtJjko4sVWz1Vl/SuZKuSrNz0u+1aZsHVw7zSDpE0oNp3Q9KOqRUd4+kf5B0f9qXOyWN6uIx+pykDkkvSJolaY9U/iTwTuCnqR/Dqyy7VNLZkhZK+oOkyyWNTsMFL0v6maRdS+0PkvRLSWslPSLp8FLd6ZIWp+WekvTXpbpRkm5Ny70g6d7OcJMUkvYptS0PvR0uaUV6bp8F/k3SmyRNl/SkpDWSbpA0srT8qZKWpbpv1XrcSkZJmp36/QtJe5bWFZL2kTQNOAX4Wnosf5rqm3bMdR4jkr4r6UVJv5M0qdSXvSTNKT0v/9J5zEnaUdJVaZ/XpmNqdAP7vn2KCP/04w+wA7AM+DIwDDgO2Aicl+oPp3ibDMXwzHJgjzTfDuydps8FrqpY9z3A74H3UgzBDUtln031pwGbSts+AVgHjEz1S4E/L61vyzbStgMYWqo/DbgvTY8EXgROTds+Kc3vVurbk8C7gJ3S/Iwaj9ERwPPA/sBw4J+BOaX6rfpZZfmlwFxgNDAGWAXMBz5A8UL6c+Cc1HYMsAY4muLk56Npvi3VfxzYGxDw3ykCbP9U94/AD9NjOQw4DFCqC4ohjM4+XVnxHG8CLkj7txPwpdTnsans/wHXpvb7AuuBD6e6f0rLV30M0rZeLrW/qPN5quxbuV8tPOY2Ap8DhgCfB54uPU6/Ar5L8XdxKPASbxxzfw38FHhzWvYA4K39/Tc8UH985t7/DqL4A/heRGyMiBuBB2u03Uzxx7mvpGERsTQinqyz/isjYlFEbIqIjVXqV5W2fT3wOEWA9dbHgSUR8eO07WuBx4BPlNr8W0Q8ERGvADcA+9VY1ynAFRExPyJeA74BHKxi3L9R/xwRz0XESuBe4IGIeDgiXgVupgh6gL8Ebo+I2yPijxExG5hHEfZExG0R8WQUfgHcSRHiUITW7sCe6fG8N1IqNeCPFC8wr6XH438C34qIFWmfzwWOUzFkcxxwa0TMSXV/m5bvym2l9t+iePzGNdCvVhxzyyLiXyNiMzCT4jEbLWk88EHg7yLi9Yi4D5hVWm4jsBvFC9HmiHgoIl5qYB+2Sw73/rcHsLIiBJZVaxgRHcCZFH/oqyRd1zk80YXldeqrbbveOhuxB9vuxzKKM+NOz5amNwC7UN1W64qI9RRn02NqtK/mudL0K1XmO7e9J3B8etu/VtJaijPI3QEkTZI0Nw27rKUI/c7hpO8AHcCdachmejf6tzq90HTaE7i51IfFFEE7muLx2PK8RsQfKB6PrpTbrwdeoIHnuUXH3JbnPSI2pMldUn9eKJVVruvHwH8B10l6WtL/kTSs3j5srxzu/e8ZYIwklcrG12ocEddExKEUf/xB8VaeNF11kTrbr7btp9P0HyjeAnd6RzfW+3TqY9l4YGWd5equS9LOFGdwPVlXPcuBH0fEiNLPzhExI43n30QxbDA6IkYAt1MM0RARL0fEVyLincAngbP0xjWMDdR+LGHbx3M5MKmiHzumdx7PAFvOuiW9meLx6Eq5/S4Uw2ZPV2m3zfPagmOulmeAkWl/Om3pd3o39O2I2Bc4BDgG+EwPt5U9h3v/+xXFeOkXJQ2T9CngwGoNJb1b0hEpZF6lOOPsfDv+HNCu7t+d8PbSto8H/oQisAAWACemuokUwwGdVqdt17o/+nbgXZJOljRU0gkUY8W3drN/ANcCp0vaL+37/6YYVlnag3XVcxXwCUkfkzQkXcQ7XNJYinHg4RT7vildCPyLzgUlHZMuTIri2sVm3nh+FgAnp3UeRTFe35UfAud3XviU1CZpcqq7EThG0qGSdqC4FbTe8350qf0/AHMjotoZ9nOUntMWHXNVRcQyiiGwcyXtIOlgSsN4kj4i6X2ShlCMxW+k/nDUdsvh3s8i4nXgUxQXml6guKj5kxrNhwMzKC4uPksRzN9Idf+efq+RNL8bXXgAmJDWeT5wXER0vsX/W4qLhy8C3wauKfV7Q2p/fxo6OKhiv9ZQnFl9hWLI4GvAMRHxfDf61rmun6W+3ERxdrc3cGJ319PgtpYDkynuBFlNcQZ9NvCmiHgZ+CLF9YEXgZPZekx4AvAzioudvwK+HxF3p7ovUQTVWoprCP9RpysXpXXfKelliourf5b6uAg4g+L5eCb1ZUWN9XS6BjiH4hg7gOLaQjWXU4yvr5X0H7TmmOvKKcDBFMfMecD1wGup7h0UL2wvUQxT/YJiqMaq6LxCbWY24Ki4NfixiDinv/sy2PjM3cwGDEkflLS3ivv8j6J4F1XvXY5VkcUn4cwsG++gGJbcjWKo6fMR8XD/dmlw8rCMmVmGPCxjZpYhh7uZWYYGxJj7qFGjor29vb+7YWY2qDz00EPPR0RbtboBEe7t7e3Mmzevv7thZjaoSKr6r0rAwzJmZllyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGBsSHmMxy1j79tv7ugg1gS2c04/vot+UzdzOzDDnczcwy5HA3M8uQw93MLEMNhbukpZJ+I2mBpHmpbKSk2ZKWpN+7pnJJulhSh6SFkvZv5Q6Ymdm2unPm/pGI2C8iJqb56cBdETEBuCvNA0wCJqSfacAPmtVZMzNrTG9uhZwMHJ6mZwL3AF9P5T+K4stZ50oaIWn3iHimNx2txbeZWVdadZuZ2UDX6Jl7AHdKekjStFQ2uhTYzwKj0/QYYHlp2RWpzMzM+kijZ+6HRsRKSW8HZkt6rFwZESEpurPh9CIxDWD8+PHdWdTMzOpo6Mw9Ilam36uAm4EDgeck7Q6Qfq9KzVcC40qLj01lleu8NCImRsTEtraqXwFoZmY9VDfcJe0s6S2d08BfAI8Cs4ApqdkU4JY0PQv4TLpr5iBgXavG283MrLpGhmVGAzdL6mx/TUTcIelB4AZJU4FlwKdT+9uBo4EOYANwetN7bWZmXaob7hHxFPD+KuVrgCOrlAdwRlN6Z2ZmPeJPqJqZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGGg53SUMkPSzp1jS/l6QHJHVIul7SDql8eJrvSPXtrem6mZnV0p0z9y8Bi0vzFwAXRsQ+wIvA1FQ+FXgxlV+Y2pmZWR9qKNwljQU+DlyW5gUcAdyYmswEjk3Tk9M8qf7I1N7MzPpIo2fu3wO+Bvwxze8GrI2ITWl+BTAmTY8BlgOk+nWp/VYkTZM0T9K81atX97D7ZmZWTd1wl3QMsCoiHmrmhiPi0oiYGBET29ramrlqM7Pt3tAG2nwI+KSko4EdgbcCFwEjJA1NZ+djgZWp/UpgHLBC0lDgbcCapvfczMxqqnvmHhHfiIixEdEOnAj8PCJOAe4GjkvNpgC3pOlZaZ5U//OIiKb22szMutSb+9y/DpwlqYNiTP3yVH45sFsqPwuY3rsumplZdzUyLLNFRNwD3JOmnwIOrNLmVeD4JvTNzMx6yJ9QNTPLkMPdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDdcNd0o6Sfi3pEUmLJH07le8l6QFJHZKul7RDKh+e5jtSfXtrd8HMzCo1cub+GnBERLwf2A84StJBwAXAhRGxD/AiMDW1nwq8mMovTO3MzKwP1Q33KKxPs8PSTwBHADem8pnAsWl6cpon1R8pSU3rsZmZ1dXQmLukIZIWAKuA2cCTwNqI2JSarADGpOkxwHKAVL8O2K2ZnTYzs641FO4RsTki9gPGAgcC7+nthiVNkzRP0rzVq1f3dnVmZlbSrbtlImItcDdwMDBC0tBUNRZYmaZXAuMAUv3bgDVV1nVpREyMiIltbW097L6ZmVXTyN0ybZJGpOmdgI8CiylC/rjUbApwS5qeleZJ9T+PiGhmp83MrGtD6zdhd2CmpCEULwY3RMStkn4LXCfpPOBh4PLU/nLgx5I6gBeAE1vQbzMz60LdcI+IhcAHqpQ/RTH+Xln+KnB8U3pnZmY94k+ompllyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYbqhrukcZLulvRbSYskfSmVj5Q0W9KS9HvXVC5JF0vqkLRQ0v6t3gkzM9taI2fum4CvRMS+wEHAGZL2BaYDd0XEBOCuNA8wCZiQfqYBP2h6r83MrEt1wz0inomI+Wn6ZWAxMAaYDMxMzWYCx6bpycCPojAXGCFp96b33MzMaurWmLukduADwAPA6Ih4JlU9C4xO02OA5aXFVqQyMzPrIw2Hu6RdgJuAMyPipXJdRAQQ3dmwpGmS5kmat3r16u4samZmdTQU7pKGUQT71RHxk1T8XOdwS/q9KpWvBMaVFh+byrYSEZdGxMSImNjW1tbT/puZWRWN3C0j4HJgcUT8U6lqFjAlTU8BbimVfybdNXMQsK40fGNmZn1gaANtPgScCvxG0oJU9k1gBnCDpKnAMuDTqe524GigA9gAnN7UHpuZWV11wz0i7gNUo/rIKu0DOKOX/TIzs17wJ1TNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQ3XDXdIVklZJerRUNlLSbElL0u9dU7kkXSypQ9JCSfu3svNmZlZdI2fuVwJHVZRNB+6KiAnAXWkeYBIwIf1MA37QnG6amVl31A33iJgDvFBRPBmYmaZnAseWyn8UhbnACEm7N6uzZmbWmJ6OuY+OiGfS9LPA6DQ9BlhearcilZmZWR/q9QXViAggurucpGmS5kmat3r16t52w8zMSnoa7s91Drek36tS+UpgXKnd2FS2jYi4NCImRsTEtra2HnbDzMyq6Wm4zwKmpOkpwC2l8s+ku2YOAtaVhm/MzKyPDK3XQNK1wOHAKEkrgHOAGcANkqYCy4BPp+a3A0cDHcAG4PQW9NnMzOqoG+4RcVKNqiOrtA3gjN52yszMesefUDUzy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMtSTcJR0l6XFJHZKmt2IbZmZWW9PDXdIQ4F+AScC+wEmS9m32dszMrLZWnLkfCHRExFMR8TpwHTC5BdsxM7MahrZgnWOA5aX5FcCfVTaSNA2YlmbXS3q8BX3ZHo0Cnu/vTgwUuqC/e2BV+Bgt6eUxumetilaEe0Mi4lLg0v7afq4kzYuIif3dD7NafIz2jVYMy6wExpXmx6YyMzPrI60I9weBCZL2krQDcCIwqwXbMTOzGpo+LBMRmyR9AfgvYAhwRUQsavZ2rCYPddlA52O0Dygi+rsPZmbWZP6EqplZhhzuZmYZcribmWWo3+5zt+aQ9B6KTwCPSUUrgVkRsbj/emVm/c1n7oOYpK9T/HsHAb9OPwKu9T9ss4FO0un93Yec+W6ZQUzSE8B7I2JjRfkOwKKImNA/PTOrT9LvI2J8f/cjVx6WGdz+COwBLKso3z3VmfUrSQtrVQGj+7Iv2xuH++B2JnCXpCW88c/axgP7AF/ot16ZvWE08DHgxYpyAb/s++5sPxzug1hE3CHpXRT/Zrl8QfXBiNjcfz0z2+JWYJeIWFBZIemevu/O9sNj7mZmGfLdMmZmGXK4m5llyOE+iElql/RojbrLOr+7VtI3G1jXmZLe3EX9Zc34LtzU51ckbTMG2411nCbpkhp1vyxt5+SKum+kL21/XNLHerr9LvrVo+dD0hWSVtVadiCQdI+kbb5gQ9InOz9TIenY8jEi6UpJKyUNT/OjJC1N03tLWiBpfR/twnbH4Z6piPhsRPw2zdYNd4o7b6qGu6QhFevrrScjYr8mrWsrEXFImmwHtoR7Cp0TgfcCRwHfT1/m3ifqPB9Xpj4NOhExKyJmpNljgcoTgM3AX1VZrmXHgBUc7oPfUElXS1os6cbOs+/OMy1JM4Cd0lnS1ZJ2lnSbpEckPSrpBElfpLhf/m5Jd6fl10v6v5IeAQ4un7mluvPTOuZKGp3K907zv5F0XqNnZZK+JekJSfdJulbSV8v7kKa3nPUl41L9EknnlNbVuc0ZwGFpv79M8S8arouI1yLid0AHxV1GXfXrKEmPSZov6WJJt6byczv7mOYfldTek+cDICLmAC808lg1g6TdJN0paVF6R7EsPb5bvfOQ9FVJ55YWPTX1+1FJB6Y2p0m6RNIhwCeB76Q2e6dlvgd8WZLvzOtjDvfB793A9yPiT4CXgL8pV0bEdOCViNgvIk6hOEN8OiLeHxF/CtwRERcDTwMfiYiPpEV3Bh5I7e6r2ObOwNyIeD8wB/hcKr8IuCgi3kfxxeh1STqA4ox6P+Bo4IMN7veBwP8A/htwfJUhg+nAvWm/L6T6F7ePoQZJOwL/CnwCOAB4R4P96u7z0R/OAe6LiPcCN1N8NqIRb05n238DXFGuiIhfUnzj2tlp355MVb8H7gNObUrPrWEO98FveUTcn6avAg6t0/43wEclXSDpsIhYV6PdZuCmGnWvU9y/DPAQxRAIwMHAv6fpa+p1PDkMuDkiNkTESzT+lYyzI2JNRLwC/IT6+91d7wF+FxFLorhf+KoGl+vu89EfPkzan4i4jW0/YFTLtWmZOcBbJY1ocLl/BM7GedOn/GAPfpUfVOjygwsR8QSwP0XInyfp72o0fbWLD0JtjDc+ILGZ1n0YbhNvHKM7VtR1a79p7he3l/sFW/etu/0aSLraL+jhvkXEEmAB8Omed826y+E++I2XdHCaPpniLXCljZKGAUjaA9gQEVcB36EIeoCXgbf0si9zKYZKoBhqacQc4FhJO0l6C8UwSKelFEMiAMdVLPdRSSMl7URxIe/+ivrK/ZkFnChpuKS9gAkU/0UTSXdJqhyieQxoL40dn1TRr/3TsvsDe5XquvV8dEXSF1R8H3GPdLH8nNQ3JE0Cdk3lzwFvT2Pyw4FjKpY7IS1zKLCuyru+ro6h84Gv1qizFnC4D36PA2dIWkzxR/qDKm0uBRamC3jvA36t4lbEc4DzSm3u6Lyg2kNnAmep+GdR+wC1hny2iIj5wPXAI8B/Ag+Wqr8LfF7Sw8CoikV/TTFstBC4KSLmVdQvBDani75fTl/SfgPwW+AO4IyI2CzpTamvW13QjIhXgWnAbZLmA6tK1TcBIyUtovgfPk+U6rr7fCDpWuBXwLslrZA0NbV7D7CmcuF0Yfay0vyC0vRlpesPVZcHvg18OPX/UxTj4qT/Lvr3FI/tbIoXuLJX03PxQ2Aq27oOOFvSw6UXRdK6FwHzqyxjLeJ/P2BNk+4MeSUiQtKJwEkRMbmiTTtwa7qYW20d5wLrI+K7Le5u5/b+FPiriDirTrvDga9GROXZbMuku3M+FRGvt3L5dBfSxIh4vifb6Q1J6yNil77e7vbAtydZMx0AXCJJwFqq3N9MMUb/NkkLBsJ9zhHxKNBlsPeX3r6Q9OULUXelM/ubKIaCrAV85m5mliGPuZuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWof8PYQ1dpam8JcoAAAAASUVORK5CYII=\n"
     },
     "metadata": {
      "needs_background": "light"
     }
    },
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "[OrderedDict([('0', 503), ('1', 521)])]\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "# tq.QuantumState to prepare a EPR pair\n",
    "\n",
    "q_state = tq.QuantumState(n_wires=2)\n",
    "q_state.h(wires=0)\n",
    "q_state.cnot(wires=[0, 1])\n",
    "\n",
    "print(q_state)\n",
    "bitstring = tq.measure(q_state, n_shots=1024, draw_id=0)\n",
    "print(bitstring)\n"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 350
    },
    "id": "DSxQlQ7C0wrG",
    "outputId": "af933737-4234-4da8-9312-6d7d58378925",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 20,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "QuantumState 2 wires \n",
      " state: tensor([[0.7071+0.j, 0.0000+0.j, 0.0000+0.j, 0.7071+0.j]])\n"
     ]
    },
    {
     "output_type": "display_data",
     "data": {
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ],
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEZCAYAAABsPmXUAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAbdUlEQVR4nO3dfZgcZZ3u8e9tEgKCGkJCgCQwCFHEdUWILCB4EFYliCaXC/K2GFg0rouXIooEPbvgLpwNq2cRFl8OKyxR3hdEIiAL8mIADRIgBGJAAiYmAZIBEiCGt4Tf+aOeDpVJz3T3TPd05sn9ua6+pqqep6p+Xd1zT/XT1T2KCMzMLC9vaXcBZmbWfA53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdw3QpIukXRWmj5A0mNN3PYvJU1O08dLuruJ2z5W0i3N2l4D+/2QpMclrZI0qb/3vzGQtFDSX3fTtu751E37KknvbGFt655z1n8Gt7sA61lE3AW8u1Y/SWcCu0bE39bY3oRm1CWpA/gjMCQi1qRtXwZc1oztN+ifgQsi4rw27HvAi4itKtOSLgGWRMT/rrVefz/nrDE+c99EqJDr470TMK/dRTSLpCxOujJ/zm30fOA3ApI+IOkBSS9JugrYvNR2oKQlpfnTJC1NfR+TdLCkQ4BvAkeml9gPpb53Sjpb0j3AauCdadnn1t+9LpD0gqRHJR1caljvpb6kMyVdmmZnpp8r0z737TrMI2k/Sfelbd8nab9S252S/kXSPem+3CJpRA/H6POSFkh6XtIMSTuk5U8A7wR+keoYWmXdhZJOlTRX0p8lXSRpVBoueEnSryRtXeq/j6TfSFop6SFJB5baTpA0P633pKQvlNpGSLohrfe8pLsq4SYpJO1a6lseejtQ0pL02D4D/Jekt0iaKukJSc9JulrS8NL6x0lalNq+1d1xKxkh6dZU968l7VTaVkjaVdIU4FjgG+lY/iK1N+05V3mOSPqupBWS/ihpQqmWnSXNLD0u36885yRtLunSdJ9XpufUqDru+6YpInxr4w3YDFgEfBUYAhwOvA6cldoPpHiZDMXwzGJghzTfAeySps8ELu2y7TuBPwHvpRiCG5KWfS61Hw+sKe37SOAFYHhqXwj8dWl76/aR9h3A4FL78cDdaXo4sAI4Lu376DS/Tam2J4B3AVuk+WndHKODgGeBPYGhwH8AM0vt69VZZf2FwCxgFDAaWA48AHyA4g/p7cAZqe9o4DngUIqTn4+m+ZGp/RPALoCA/0URYHumtn8FfpSO5RDgAECpLSiGMCo1XdLlMV4DnJPu3xbAV1LNY9Ky/wdckfrvDqwCPpza/j2tX/UYpH29VOp/XuVx6lpbua4WPudeBz4PDAK+CDxVOk6/Bb5L8XuxP/Aibz7nvgD8AnhrWncv4O3t/h3eWG8+c2+/fSh+Ab4XEa9HxDXAfd30XUvxy7m7pCERsTAinqix/UsiYl5ErImI16u0Ly/t+yrgMYoA66tPAI9HxE/Tvq8AHgU+WerzXxHxh4h4Gbga2KObbR0LXBwRD0TEq8DpwL4qxv3r9R8RsSwilgJ3AfdGxIMR8QpwHUXQA/wtcFNE3BQRb0TErcBsirAnIm6MiCei8GvgFooQhyK0tgd2SsfzrkipVIc3KP7AvJqOx98D34qIJek+nwkcrmLI5nDghoiYmdr+Ma3fkxtL/b9FcfzG1lFXK55ziyLiPyNiLTCd4piNkrQj8EHgnyLitYi4G5hRWu91YBuKP0RrI+L+iHixjvuwSXK4t98OwNIuIbCoWseIWACcTPGLvlzSlZXhiR4srtFebd+1tlmPHdjwfiyiODOueKY0vRrYiurW21ZErKI4mx7dTf9qlpWmX64yX9n3TsAR6WX/SkkrKc4gtweQNEHSrDTsspIi9CvDSd8BFgC3pCGbqQ3U15n+0FTsBFxXqmE+RdCOojge6x7XiPgzxfHoSbn/KuB56nicW/ScW/e4R8TqNLlVquf50rKu2/op8D/AlZKekvRvkobUug+bKod7+z0NjJak0rIdu+scEZdHxP4Uv/xB8VKeNF11lRr7r7bvp9L0nyleAlds18B2n0o1lu0ILK2xXs1tSdqS4gyuN9uqZTHw04gYVrptGRHT0nj+tRTDBqMiYhhwE8UQDRHxUkR8LSLeCXwKOEVvvoexmu6PJWx4PBcDE7rUsXl65fE0sO6sW9JbKY5HT8r9t6IYNnuqSr8NHtcWPOe68zQwPN2finV1p1dD346I3YH9gMOAz/ZyX9lzuLffbynGS78saYikTwN7V+so6d2SDkoh8wrFGWfl5fgyoEONX52wbWnfRwDvoQgsgDnAUaltPMVwQEVn2nd310ffBLxL0jGSBks6kmKs+IYG6wO4AjhB0h7pvv8fimGVhb3YVi2XAp+U9HFJg9KbeAdKGkMxDjyU4r6vSW8EfqyyoqTD0huTonjvYi1vPj5zgGPSNg+hGK/vyY+AsytvfEoaKWliarsGOEzS/pI2o7gUtNbjfmip/78AsyKi2hn2MkqPaYuec1VFxCKKIbAzJW0maV9Kw3iSPiLpfZIGUYzFv07t4ahNlsO9zSLiNeDTFG80PU/xpubPuuk+FJhG8ebiMxTBfHpq++/08zlJDzRQwr3AuLTNs4HDI6LyEv8fKd48XAF8G7i8VPfq1P+eNHSwT5f79RzFmdXXKIYMvgEcFhHPNlBbZVu/SrVcS3F2twtwVKPbqXNfi4GJFFeCdFKcQZ8KvCUiXgK+TPH+wArgGNYfEx4H/Irizc7fAj+IiDtS21cogmolxXsIP69Rynlp27dIeonizdW/SjXOA06ieDyeTrUs6WY7FZcDZ1A8x/aieG+hmosoxtdXSvo5rXnO9eRYYF+K58xZwFXAq6ltO4o/bC9SDFP9mmKoxqqovENtZrbRUXFp8KMRcUa7axlofOZuZhsNSR+UtIuK6/wPoXgVVetVjlWRxSfhzCwb21EMS25DMdT0xYh4sL0lDUweljEzy5CHZczMMuRwNzPL0EYx5j5ixIjo6OhodxlmZgPK/fff/2xEjKzWtlGEe0dHB7Nnz253GWZmA4qkql9VAh6WMTPLksPdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDDnczsww53M3MMrRRfIjJzDZdHVNvbHcJbbVwWjP+H/2GfOZuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llqK5wl7RQ0sOS5kianZYNl3SrpMfTz63Tckk6X9ICSXMl7dnKO2BmZhtq5Mz9IxGxR0SMT/NTgdsiYhxwW5oHmACMS7cpwA+bVayZmdWnL8MyE4HpaXo6MKm0/CdRmAUMk7R9H/ZjZmYNqjfcA7hF0v2SpqRloyLi6TT9DDAqTY8GFpfWXZKWrUfSFEmzJc3u7OzsRelmZtader84bP+IWCppW+BWSY+WGyMiJEUjO46IC4ELAcaPH9/QumZm1rO6ztwjYmn6uRy4DtgbWFYZbkk/l6fuS4GxpdXHpGVmZtZPaoa7pC0lva0yDXwMeASYAUxO3SYD16fpGcBn01Uz+wAvlIZvzMysH9QzLDMKuE5Spf/lEXGzpPuAqyWdCCwCPpP63wQcCiwAVgMnNL3qEn8XdGu+C9rMBraa4R4RTwLvr7L8OeDgKssDOKkp1ZmZWa/4E6pmZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZajucJc0SNKDkm5I8ztLulfSAklXSdosLR+a5hek9o7WlG5mZt1p5Mz9K8D80vw5wLkRsSuwAjgxLT8RWJGWn5v6mZlZP6or3CWNAT4B/DjNCzgIuCZ1mQ5MStMT0zyp/eDU38zM+km9Z+7fA74BvJHmtwFWRsSaNL8EGJ2mRwOLAVL7C6n/eiRNkTRb0uzOzs5elm9mZtXUDHdJhwHLI+L+Zu44Ii6MiPERMX7kyJHN3LSZ2SZvcB19PgR8StKhwObA24HzgGGSBqez8zHA0tR/KTAWWCJpMPAO4LmmV25mZt2qeeYeEadHxJiI6ACOAm6PiGOBO4DDU7fJwPVpekaaJ7XfHhHR1KrNzKxHfbnO/TTgFEkLKMbUL0rLLwK2SctPAab2rUQzM2tUPcMy60TEncCdafpJYO8qfV4BjmhCbWZm1kv+hKqZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGaoZ7pI2l/Q7SQ9Jmifp22n5zpLulbRA0lWSNkvLh6b5Bam9o7V3wczMuqrnzP1V4KCIeD+wB3CIpH2Ac4BzI2JXYAVwYup/IrAiLT839TMzs35UM9yjsCrNDkm3AA4CrknLpwOT0vTENE9qP1iSmlaxmZnVVNeYu6RBkuYAy4FbgSeAlRGxJnVZAoxO06OBxQCp/QVgmyrbnCJptqTZnZ2dfbsXZma2nrrCPSLWRsQewBhgb2C3vu44Ii6MiPERMX7kyJF93ZyZmZU0dLVMRKwE7gD2BYZJGpyaxgBL0/RSYCxAan8H8FxTqjUzs7rUc7XMSEnD0vQWwEeB+RQhf3jqNhm4Pk3PSPOk9tsjIppZtJmZ9Wxw7S5sD0yXNIjij8HVEXGDpN8DV0o6C3gQuCj1vwj4qaQFwPPAUS2o28zMelAz3CNiLvCBKsufpBh/77r8FeCIplRnZma94k+ompllyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpYhh7uZWYYc7mZmGXK4m5llyOFuZpahmuEuaaykOyT9XtI8SV9Jy4dLulXS4+nn1mm5JJ0vaYGkuZL2bPWdMDOz9dVz5r4G+FpE7A7sA5wkaXdgKnBbRIwDbkvzABOAcek2Bfhh06s2M7Me1Qz3iHg6Ih5I0y8B84HRwERgeuo2HZiUpicCP4nCLGCYpO2bXrmZmXWroTF3SR3AB4B7gVER8XRqegYYlaZHA4tLqy1Jy7pua4qk2ZJmd3Z2Nli2mZn1pO5wl7QVcC1wckS8WG6LiACikR1HxIURMT4ixo8cObKRVc3MrIa6wl3SEIpgvywifpYWL6sMt6Sfy9PypcDY0upj0jIzM+sn9VwtI+AiYH5E/HupaQYwOU1PBq4vLf9sumpmH+CF0vCNmZn1g8F19PkQcBzwsKQ5adk3gWnA1ZJOBBYBn0ltNwGHAguA1cAJTa3YzMxqqhnuEXE3oG6aD67SP4CT+liXmZn1gT+hamaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGHO5mZhlyuJuZZcjhbmaWIYe7mVmGaoa7pIslLZf0SGnZcEm3Sno8/dw6LZek8yUtkDRX0p6tLN7MzKqr58z9EuCQLsumArdFxDjgtjQPMAEYl25TgB82p0wzM2tEzXCPiJnA810WTwSmp+npwKTS8p9EYRYwTNL2zSrWzMzq09sx91ER8XSafgYYlaZHA4tL/ZakZRuQNEXSbEmzOzs7e1mGmZlV0+c3VCMigOjFehdGxPiIGD9y5Mi+lmFmZiW9DfdlleGW9HN5Wr4UGFvqNyYtMzOzftTbcJ8BTE7Tk4HrS8s/m66a2Qd4oTR8Y2Zm/WRwrQ6SrgAOBEZIWgKcAUwDrpZ0IrAI+EzqfhNwKLAAWA2c0IKazcyshprhHhFHd9N0cJW+AZzU16LMzKxv/AlVM7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLkcDczy5DD3cwsQw53M7MMOdzNzDLUknCXdIikxyQtkDS1FfswM7PuNT3cJQ0Cvg9MAHYHjpa0e7P3Y2Zm3WvFmfvewIKIeDIiXgOuBCa2YD9mZtaNwS3Y5mhgcWl+CfBXXTtJmgJMSbOrJD3Wglr6wwjg2XbtXOe0a89N09bjlwkfw74ZyL/DO3XX0Ipwr0tEXAhc2K79N4uk2RExvt11DFQ+fn3nY9g3uR6/VgzLLAXGlubHpGVmZtZPWhHu9wHjJO0saTPgKGBGC/ZjZmbdaPqwTESskfQl4H+AQcDFETGv2fvZiAz4oaU28/HrOx/Dvsny+Cki2l2DmZk1mT+hamaWIYe7mVmGHO5mZhlyuJuZZcjh3gBJgyV9QdLNkuam2y8l/b2kIe2ubyCTlOUVC2bt4qtlGiDpCmAlMJ3iaxWg+JDWZGB4RBzZrtoGAknDu2sCHoqIMf1Zz0Ak6R3A6cAkYFsggOXA9cC0iFjZxvIGNEm/jIgJ7a6jWdr29QMD1F4R8a4uy5YAsyT9oR0FDTCdwCKKMK+INL9tWyoaeK4GbgcOjIhnACRtR3GCcTXwsTbWttGTtGd3TcAe/VlLqzncG/O8pCOAayPiDQBJbwGOAFa0tbKB4Ung4Ij4U9cGSYur9LcNdUTEel81lUL+HEl/16aaBpL7gF+z/glGxbB+rqWlHO6NOQo4B/i+pMrL32HAHanNevY9YGtgg3AH/q2faxmoFkn6BjA9IpYBSBoFHM/638Zq1c0HvhARj3dtyO0Ew2PuDZL0Horvpx+dFi0Fro+I+e2rauCQtBsbHr8ZPn71kbQ1MJXiGFaGspZRfH/TtIjwK8geSDoceDgiNviKcUmTIuLnbSirJXy1TAMknQZcTjFOfG+6AVzhfydYWzrjvJLiJfHv0k34+NUtIlZExGkRsVtEDE+390TEaRRvsloPIuKaasGebN2vxbSYz9wbkN40fW9EvN5l+WbAvIgY157KBgYfv9aS9KeI2LHddQxUuR0/j7k35g1gB4orPsq2T23WMx+/PpI0t7smYFR/1jIQbUrHz+HemJOB2yQ9zptvXu0I7Ap8qW1VDRw+fn03Cvg4G16dJeA3/V/OgLPJHD+HewMi4mZJ76L4J+DlNwTvi4i17atsYPDxa4obgK0iYk7XBkl39n85A84mc/w85m5mliFfLWNmliGHu5lZhhzuA5ikDkmPdNP2Y0m7p+lv1rGtkyW9tYf2ddvri1Tzy5I2GPNsYBvHS7qgm7bflPZzTJe20yUtkPSYpI/3dv891NWrx0PSxZKWd7fuxkDSnZLGV1n+qcpnFCRNKj9HJF0iaamkoWl+hKSFaXoXSXMkreqnu7DJcbhnKiI+FxG/T7M1w53iSpaq4S5pUJft9dUTEdGSL2mKiP3SZAewLtxT6BwFvBc4BPiBpEGtqKGbunp6PC5JNQ04ETEjIqal2UlA1xOAtcAG33kTES17DljB4T7wDZZ0maT5kq6pnH1XzrQkTQO2SGdJl0naUtKNkh6S9IikIyV9meL68zsk3ZHWXyXp/0p6CNi3fOaW2s5O25iVvtukcjY2S9LDks6q96xM0rck/UHS3ZKukPT18n1I0+vO+pKxqf1xSWeUtlXZ5zTggHS/v0rxcf0rI+LViPgjsIDiqp2e6jpE0qOSHpB0vqQb0vIzKzWm+UckdfTm8QCIiJnA8/Ucq2aQtI2kWyTNS68oFqXju94rD0lfl3RmadXjUt2PSNo79Tle0gWS9gM+BXwn9dklrfM94KuSfGVeP3O4D3zvBn4QEe8BXgT+odwYEVOBlyNij4g4luIM8amIeH9E/AVwc0ScDzwFfCQiPpJW3RK4N/W7u8s+twRmRcT7gZnA59Py84DzIuJ9vPl99z2StBfFGfUewKHAB+u833sDfwP8JXBElSGDqcBd6X6fS3HpZfmLoZbw5uWY1eraHPhP4JPAXsB2ddbV6OPRDmcAd0fEe4HrKD5rUI+3prPtfwAuLjdExG8ovt/m1HTfnkhNfwLuBo5rSuVWN4f7wLc4Iu5J05cC+9fo/zDwUUnnSDogIl7opt9a4Npu2l6juF4Y4H6KIRCAfYH/TtOX1yo8OQC4LiJWR8SLFAFRj1sj4rmIeBn4GbXvd6N2A/4YEY9Hcb3wpXWu1+jj0Q4fJt2fiLiR+r+u+oq0zkzg7ZLq/YrcfwVOxXnTr3ywB76uH1To8YMLEfEHYE+KkD9L0j910/WVHj5Y9Hq8+QGJtbTuw3BrePM5unmXtobuN8WHpcaW5sekZX2tC9avrdG6NiY93S/o5X1LX687B/hM70uzRjncB74dJe2bpo+heAnc1etK/+NV0g7A6oi4FPgORdADvAS8rY+1zKIYKoH6v99+JjBJ0haS3kYxDFKxkGJIBODwLut9VNJwSVtQvJF3T5f2rvdnBnCUpKGSdgbGUXwrJZJuk9R1iOZRoKM0dnx0l7r2TOvuCexcamvo8eiJpC9J6vXXMvSw/sxUG5Im8Oa3IS4Dtk1j8kOBw7qsd2RaZ3/ghSqv+np6Dp0NfL2bNmsBh/vA9xhwkqT5FL+kP6zS50JgbnoD733A71RcingGcFapz82VN1R76WTgFBVfzrQr0N2QzzoR8QBwFfAQ8EuK/5RT8V3gi5IeBEZ0WfV3FMNGcyn+M9bsLu1zgbXpTd+vRsQ8in9D93vgZuCkiFir4j9p7UqXNzQj4hVgCnCjpAco/k9pxbXAcEnzKL4Tp/wvFht9PCr/m/e3wLslLZF0Yuq3G/Bc15XTG7M/Ls3PKU3/uPT+Q9X1gW8DH071f5r0z1PSt3X+M8WxvZXiD1zZK+mx+BFwIhu6EjhV0oOlP4qkbc8DHqiyjrWIv37AmiZdGfJyRISko4CjI2Jilz4dwA3pzdxq2zgTWBUR321xuZX9/QXwdxFxSo1+BwJfj4iuZ7Mtk67O+XREvNbK9dNVSOMj4tne7KcvJK2KiK36e7+bAl+eZM20F3CBJAErqXJ9M8UY/TskzdkYrnOOiEeAHoO9Xfr6h6Q//xA1Kp3ZX0sxFGQt4DN3M7MMeczdzCxDDnczsww53M3MMuRwNzPLkMPdzCxDDnczswz9f2/AIduoHL2uAAAAAElFTkSuQmCC\n"
     },
     "metadata": {
      "needs_background": "light"
     }
    },
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "[OrderedDict([('00', 492), ('01', 0), ('10', 0), ('11', 532)])]\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "# tq.QuantumState\n",
    "q_state = tq.QuantumState(n_wires=3)\n",
    "q_state.x(wires=1)\n",
    "q_state.rx(wires=2, params=0.6 * np.pi)\n",
    "print(q_state)\n",
    "\n",
    "q_state.ry(wires=0, params=0.3 * np.pi)\n",
    "\n",
    "q_state.qubitunitary(wires=1, params=[[0, 1j], [-1j, 0]])\n",
    "\n",
    "q_state.cnot(wires=[0, 1])\n",
    "\n",
    "print(q_state)\n",
    "bitstring = tq.measure(q_state, n_shots=1024, draw_id=0)\n",
    "\n",
    "print(bitstring)"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 446
    },
    "id": "VJhfSURF06lP",
    "outputId": "e81c9476-4c85-4f33-f58d-ef2bd64fd82f",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 21,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "QuantumState 3 wires \n",
      " state: tensor([[0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j]])\n",
      "QuantumState 3 wires \n",
      " state: tensor([[0.0000+0.5237j, 0.7208+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.2668j, 0.3673+0.0000j]])\n"
     ]
    },
    {
     "output_type": "display_data",
     "data": {
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ],
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEfCAYAAAC6Z4bJAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3de7wdZX3v8c9XiIBghUCMkASCEFGsx4gRwUsPghdAK9QDGrQIFBvb4qmIt6DtAVtoY9Ui1qqNgkRBLgWVFNCCXETUAAFCJFwkSGISLtkC4VIESfieP+bZZGVl7b3Xvq6d2d/367Vee+Z5npn5zVpr/2bWMzfZJiIi6uV5nQ4gIiKGXpJ7REQNJblHRNRQkntERA0luUdE1FCSe0REDSW5j0KSzpJ0Shl+s6S7hnDeP5J0VBk+WtJ1QzjvD0i6fKjm14/lvlHS3ZKekHToSC9/NJC0TNJbe6h77vvUQ/0Tkl46jLE9952LkbN5pwOI3tn+GbBHX+0knQzsbvvP+5jfQUMRl6SpwL3AONtry7zPAc4Zivn30z8AX7V9egeWvcmzvU33sKSzgJW2/66v6Ub6Oxf9kz33MUKVun7euwBLOh3EUJFUi52umn/nRr288aOApNdIulnS45LOB7ZsqNtP0sqG8U9LWlXa3iXpAEkHAp8B3ld+Yt9a2l4j6VRJPweeBF5ayj604eL1VUmPSrpT0gENFRv81Jd0sqSzy+i15e+assx9m7t5JL1B0o1l3jdKekND3TWS/lHSz8u6XC5ph17eo7+UtFTSw5LmS9qplN8DvBT4rxLHFi2mXSbpk5IWS/ofSWdImli6Cx6X9BNJ2zW030fSLyStkXSrpP0a6o6RdEeZ7jeSPtxQt4OkS8p0D0v6WXdyk2RJuze0bex620/SyvLZPgB8W9LzJM2WdI+khyRdIGl8w/RHSlpe6j7b0/vWYAdJV5S4fyppl4Z5WdLukmYBHwA+Vd7L/yr1Q/ad6/6OSPqipEck3SvpoIZYdpV0bcPn8u/d3zlJW0o6u6zzmvKdmtjGuo9NtvPq4At4PrAc+BgwDjgMeAY4pdTvR/UzGarumRXATmV8KrBbGT4ZOLtp3tcAvwVeSdUFN66UfajUHw2sbVj2+4BHgfGlfhnw1ob5PbeMsmwDmzfUHw1cV4bHA48AR5ZlH1HGt2+I7R7gZcBWZXxOD+/R/sDvgL2ALYB/A65tqN8gzhbTLwMWABOBScBq4GbgNVQb0quAk0rbScBDwMFUOz9vK+MTSv07gd0AAf+bKoHtVer+GfhGeS/HAW8GVOpM1YXRHdNZTZ/xWuDzZf22Aj5aYp5cyv4DOLe03xN4AviTUvevZfqW70FZ1uMN7U/v/pyaY2uMaxi/c88AfwlsBvw1cF/D+/RL4ItU/xdvAh5j/Xfuw8B/AS8o074W+KNO/w+P1lf23DtvH6p/gC/bfsb2hcCNPbRdR/XPuaekcbaX2b6nj/mfZXuJ7bW2n2lRv7ph2ecDd1ElsMF6J3C37e+WZZ8L3An8aUObb9v+te3fAxcA03uY1weAM23fbPtp4ERgX1X9/u36N9sP2l4F/Ay43vYttp8CfkCV6AH+HLjM9mW2n7V9BbCQKtlj+1Lb97jyU+ByqiQOVdLaEdilvJ8/c8lKbXiWagPzdHk//gr4rO2VZZ1PBg5T1WVzGHCJ7WtL3d+X6XtzaUP7z1K9f1PaiGs4vnPLbX/T9jpgHtV7NlHSzsDrgP9n+w+2rwPmN0z3DLA91YZone2bbD/WxjqMSUnunbcTsKopCSxv1dD2UuB4qn/01ZLO6+6e6MWKPupbLbuvebZjJzZej+VUe8bdHmgYfhLYhtY2mJftJ6j2pif10L6VBxuGf99ivHvZuwCHl5/9ayStodqD3BFA0kGSFpRulzVUSb+7O+kLwFLg8tJlM7sf8XWVDU23XYAfNMRwB1WinUj1fjz3udr+H6r3ozeN7Z8AHqaNz3mYvnPPfe62nyyD25R4Hm4oa57Xd4H/Bs6TdJ+kf5E0rq91GKuS3DvvfmCSJDWU7dxTY9vfs/0mqn9+U/2Upwy3nKSP5bda9n1l+H+ofgJ3e0k/5ntfibHRzsCqPqbrc16StqbagxvIvPqyAviu7W0bXlvbnlP68y+i6jaYaHtb4DKqLhpsP27747ZfCrwbOEHrj2E8Sc/vJWz8fq4ADmqKY8vyy+N+4Lm9bkkvoHo/etPYfhuqbrP7WrTb6HMdhu9cT+4Hxpf16fZc3OXX0Ods7wm8AXgX8MEBLqv2ktw775dU/aV/K2mcpPcAe7dqKGkPSfuXJPMU1R5n98/xB4Gp6v/ZCS9uWPbhwCuoEhbAImBmqZtB1R3Qrassu6fzoy8DXibp/ZI2l/Q+qr7iS/oZH8C5wDGSppd1/yeqbpVlA5hXX84G/lTSOyRtVg7i7SdpMlU/8BZU6762HAh8e/eEkt5VDkyK6tjFOtZ/PouA95d5HkjVX9+bbwCndh/4lDRB0iGl7kLgXZLeJOn5VKeC9vW5H9zQ/h+BBbZb7WE/SMNnOkzfuZZsL6fqAjtZ0vMl7UtDN56kt0h6laTNqPrin6Hv7qgxK8m9w2z/AXgP1YGmh6kOan6/h+ZbAHOoDi4+QJWYTyx1/1n+PiTp5n6EcD0wrczzVOAw290/8f+e6uDhI8DngO81xP1kaf/z0nWwT9N6PUS1Z/Vxqi6DTwHvsv27fsTWPa+flFguotq72w2Y2d/5tLmsFcAhVGeCdFHtQX8SeJ7tx4G/pTo+8AjwfjbsE54G/ITqYOcvga/ZvrrUfZQqUa2hOobwwz5COb3M+3JJj1MdXH19iXEJcBzV53F/iWVlD/Pp9j3gJKrv2Gupji20cgZV//oaST9keL5zvfkAsC/Vd+YU4Hzg6VL3EqoN22NU3VQ/peqqiRa6j1BHRIw6qk4NvtP2SZ2OZVOTPfeIGDUkvU7SbqrO8z+Q6ldUX79yooVaXAkXEbXxEqpuye2pupr+2vYtnQ1p05RumYiIGkq3TEREDY2KbpkddtjBU6dO7XQYERGblJtuuul3tie0qhsVyX3q1KksXLiw02FERGxSJLW8mh3SLRMRUUtJ7hERNZTkHhFRQ0nuERE1lOQeEVFDSe4RETWU5B4RUUNJ7hERNZTkHhFRQ6PiCtW6mjr70o4uf9mcoXjOdURsirLnHhFRQ0nuERE1lOQeEVFDbSf38tT2WyRdUsZ3lXS9pKWSzi9PVUfSFmV8aamfOjyhR0RET/qz5/5RqieOd/s8cJrt3amevn5sKT8WeKSUn1baRUTECGoruUuaDLwT+FYZF7A/cGFpMg84tAwfUsYp9QeU9hERMULa3XP/MvAp4Nkyvj2wxvbaMr4SmFSGJwErAEr9o6X9BiTNkrRQ0sKurq4Bhh8REa30mdwlvQtYbfumoVyw7bm2Z9ieMWFCy6dERUTEALVzEdMbgXdLOhjYEvgj4HRgW0mbl73zycCq0n4VMAVYKWlz4EXAQ0MeeURE9KjPPXfbJ9qebHsqMBO4yvYHgKuBw0qzo4CLy/D8Mk6pv8q2hzTqiIjo1WDOc/80cIKkpVR96meU8jOA7Uv5CcDswYUYERH91a97y9i+BrimDP8G2LtFm6eAw4cgtoiIGKBcoRoRUUNJ7hERNZTkHhFRQ0nuERE1lOQeEVFDSe4RETWU5B4RUUNJ7hERNZTkHhFRQ0nuERE1lOQeEVFDSe4RETWU5B4RUUNJ7hERNZTkHhFRQ0nuERE11M4DsreUdIOkWyUtkfS5Un6WpHslLSqv6aVckr4iaamkxZL2Gu6ViIiIDbXzJKangf1tPyFpHHCdpB+Vuk/avrCp/UHAtPJ6PfD18jciIkZIOw/Itu0nyui48urtgdeHAN8p0y0AtpW04+BDjYiIdrXV5y5pM0mLgNXAFbavL1Wnlq6X0yRtUcomASsaJl9ZyprnOUvSQkkLu7q6BrEKERHRrK3kbnud7enAZGBvSX8MnAi8HHgdMB74dH8WbHuu7Rm2Z0yYMKGfYUdERG/6dbaM7TXA1cCBtu8vXS9PA98G9i7NVgFTGiabXMoiImKEtHO2zARJ25bhrYC3AXd296NLEnAocFuZZD7wwXLWzD7Ao7bvH5boIyKipXbOltkRmCdpM6qNwQW2L5F0laQJgIBFwF+V9pcBBwNLgSeBY4Y+7IiI6E2fyd32YuA1Lcr376G9geMGH1pERAxUrlCNiKihJPeIiBpKco+IqKEk94iIGkpyj4iooST3iIgaSnKPiKihJPeIiBpKco+IqKEk94iIGkpyj4iooST3iIgaSnKPiKihJPeIiBpKco+IqKEk94iIGmrnMXtbSrpB0q2Slkj6XCnfVdL1kpZKOl/S80v5FmV8aamfOryrEBERzdrZc38a2N/2q4HpwIHl2aifB06zvTvwCHBsaX8s8EgpP620i4iIEdRncnfliTI6rrwM7A9cWMrnUT0kG+CQMk6pP6A8RDsiIkZIW33ukjaTtAhYDVwB3AOssb22NFkJTCrDk4AVAKX+UWD7FvOcJWmhpIVdXV2DW4uIiNhAW8nd9jrb04HJwN7Aywe7YNtzbc+wPWPChAmDnV1ERDTo19kyttcAVwP7AttK2rxUTQZWleFVwBSAUv8i4KEhiTYiItrSztkyEyRtW4a3At4G3EGV5A8rzY4CLi7D88s4pf4q2x7KoCMioneb992EHYF5kjaj2hhcYPsSSbcD50k6BbgFOKO0PwP4rqSlwMPAzGGIOyIietFncre9GHhNi/LfUPW/N5c/BRw+JNFFRMSA5ArViIgaSnKPiKihJPeIiBpKco+IqKEk94iIGkpyj4iooST3iIgaauciplFt6uxLO7r8ZXPe2dHlR0S0kj33iIgaSnKPiKihJPeIiBpKco+IqKEk94iIGkpyj4iooST3iIgaSnKPiKihdh6zN0XS1ZJul7RE0kdL+cmSVklaVF4HN0xzoqSlku6S9I7hXIGIiNhYO1eorgU+bvtmSS8EbpJ0Rak7zfYXGxtL2pPq0XqvBHYCfiLpZbbXDWXgERHRsz733G3fb/vmMvw41cOxJ/UyySHAebaftn0vsJQWj+OLiIjh068+d0lTqZ6nen0p+oikxZLOlLRdKZsErGiYbCUtNgaSZklaKGlhV1dXvwOPiIietZ3cJW0DXAQcb/sx4OvAbsB04H7gS/1ZsO25tmfYnjFhwoT+TBoREX1oK7lLGkeV2M+x/X0A2w/aXmf7WeCbrO96WQVMaZh8cimLiIgR0s7ZMgLOAO6w/a8N5Ts2NPsz4LYyPB+YKWkLSbsC04Abhi7kiIjoSztny7wROBL4laRFpewzwBGSpgMGlgEfBrC9RNIFwO1UZ9oclzNlIiJGVp/J3fZ1gFpUXdbLNKcCpw4iroiIGIRcoRoRUUNJ7hERNZTkHhFRQ0nuERE1lOQeEVFDSe4RETWU5B4RUUNJ7hERNZTkHhFRQ0nuERE1lOQeEVFDSe4RETWU5B4RUUNJ7hERNZTkHhFRQ0nuERE11M5j9qZIulrS7ZKWSPpoKR8v6QpJd5e/25VySfqKpKWSFkvaa7hXIiIiNtTOnvta4OO29wT2AY6TtCcwG7jS9jTgyjIOcBDVc1OnAbOArw951BER0as+k7vt+23fXIYfB+4AJgGHAPNKs3nAoWX4EOA7riwAtm16mHZERAyzfvW5S5oKvAa4Hpho+/5S9QAwsQxPAlY0TLaylDXPa5akhZIWdnV19TPsiIjoTZ8PyO4maRvgIuB4249J65+ZbduS3J8F254LzAWYMWNGv6aNiBhKU2df2rFlL5vzzmGZb1t77pLGUSX2c2x/vxQ/2N3dUv6uLuWrgCkNk08uZRERMULaOVtGwBnAHbb/taFqPnBUGT4KuLih/IPlrJl9gEcbum8iImIEtNMt80bgSOBXkhaVss8Ac4ALJB0LLAfeW+ouAw4GlgJPAscMacQREdGnPpO77esA9VB9QIv2Bo4bZFwRETEIuUI1IqKGktwjImooyT0iooaS3CMiaijJPSKihpLcIyJqKMk9IqKGktwjImooyT0iooaS3CMiaijJPSKihpLcIyJqKMk9IqKGktwjImooyT0iooaS3CMiaqidx+ydKWm1pNsayk6WtErSovI6uKHuRElLJd0l6R3DFXhERPSsnT33s4ADW5SfZnt6eV0GIGlPYCbwyjLN1yRtNlTBRkREe/pM7ravBR5uc36HAOfZftr2vVTPUd17EPFFRMQADKbP/SOSFpdum+1K2SRgRUOblaVsI5JmSVooaWFXV9cgwoiIiGYDTe5fB3YDpgP3A1/q7wxsz7U9w/aMCRMmDDCMiIhoZUDJ3faDttfZfhb4Juu7XlYBUxqaTi5lERExggaU3CXt2DD6Z0D3mTTzgZmStpC0KzANuGFwIUZERH9t3lcDSecC+wE7SFoJnATsJ2k6YGAZ8GEA20skXQDcDqwFjrO9bnhCj4iInvSZ3G0f0aL4jF7anwqcOpigIiJicPpM7hERQ2Hq7Es7tuxlc97ZsWV3Sm4/EBFRQ0nuERE1lOQeEVFDSe4RETWU5B4RUUNJ7hERNZTkHhFRQ0nuERE1lOQeEVFDSe4RETWU5B4RUUNJ7hERNZTkHhFRQ0nuERE1lOQeEVFDfSZ3SWdKWi3ptoay8ZKukHR3+btdKZekr0haKmmxpL2GM/iIiGitnT33s4ADm8pmA1fangZcWcYBDqJ6buo0YBbw9aEJMyIi+qPP5G77WuDhpuJDgHlleB5waEP5d1xZAGzb9DDtiIgYAQPtc59o+/4y/AAwsQxPAlY0tFtZyjYiaZakhZIWdnV1DTCMiIhoZdAHVG0b8ACmm2t7hu0ZEyZMGGwYERHRYKDJ/cHu7pbyd3UpXwVMaWg3uZRFRMQIGmhynw8cVYaPAi5uKP9gOWtmH+DRhu6biIgYIZv31UDSucB+wA6SVgInAXOACyQdCywH3luaXwYcDCwFngSOGYaYIyKiD30md9tH9FB1QIu2Bo4bbFARETE4uUI1IqKGktwjImooyT0iooaS3CMiaijJPSKihpLcIyJqKMk9IqKGktwjImooyT0iooaS3CMiaijJPSKihpLcIyJqKMk9IqKGktwjImooyT0iooaS3CMiaqjPh3X0RtIy4HFgHbDW9gxJ44HzganAMuC9th8ZXJgREdEfQ7Hn/hbb023PKOOzgSttTwOuLOMRETGChqNb5hBgXhmeBxw6DMuIiIheDDa5G7hc0k2SZpWyibbvL8MPABNbTShplqSFkhZ2dXUNMoyIiGg0qD534E22V0l6MXCFpDsbK21bkltNaHsuMBdgxowZLdtERMTADGrP3faq8nc18ANgb+BBSTsClL+rBxtkRET0z4CTu6StJb2wexh4O3AbMB84qjQ7Crh4sEFGRET/DKZbZiLwA0nd8/me7R9LuhG4QNKxwHLgvYMPMyIi+mPAyd32b4BXtyh/CDhgMEFFRMTg5ArViIgaSnKPiKihJPeIiBpKco+IqKEk94iIGkpyj4iooST3iIgaSnKPiKihJPeIiBpKco+IqKEk94iIGkpyj4iooST3iIgaSnKPiKihJPeIiBpKco+IqKFhS+6SDpR0l6SlkmYP13IiImJjw5LcJW0G/DtwELAncISkPYdjWRERsbHh2nPfG1hq+ze2/wCcBxwyTMuKiIgmsj30M5UOAw60/aEyfiTwetsfaWgzC5hVRvcA7hryQNqzA/C7Di27L4ltYBLbwCS2gelkbLvYntCqYsAPyB4s23OBuZ1afjdJC23P6HQcrSS2gUlsA5PYBma0xjZc3TKrgCkN45NLWUREjIDhSu43AtMk7Srp+cBMYP4wLSsiIpoMS7eM7bWSPgL8N7AZcKbtJcOxrCHQ8a6hXiS2gUlsA5PYBmZUxjYsB1QjIqKzcoVqREQNJblHRNRQkntERA2N2eQuabyk8Z2OIyJiOIyp5C5pZ0nnSeoCrgdukLS6lE3tbHSjn6SJkvYqr4mdjqcvkrbpdAwRnTKmzpaR9Evgy8CFtteVss2Aw4Hjbe/Tyfh6IulXtl/VweVPB74BvIj1F6NNBtYAf2P75k7F1htJv7W98yiIYyIwqYyusv1gJ+Ppi6RtbD/R4RhEdY+q59434AaP4oQl6eW27+x0HN3GWnK/2/a0/taNBEnv6akK+EZP948YCZIWAR+2fX1T+T7Af9h+dWciA0kn9FQFfNZ2x7reslEc8PLfDnwNuJsN37fdqd63yzsVW286/b4169i9ZTrkJklfA+YBK0rZFOAo4JaORVU5HzgHaLW13XKEY2m2dXNiB7C9QNLWnQiowT8BXwDWtqjrdLfjWfS8Ufw2MFo3ip3uzjodeKvtZY2FknYFLgNe0YmgSgxf6akK2HYkY+nLWEvuHwSOBT7Hhj/35gNndCqoYjHwRdu3NVdIemsH4mn0I0mXAt9hw43iB4Efdyyqys3AD23f1Fwh6UMdiKdRNooDszmwskX5KmDcCMfS7Bjg48DTLeqOGOFYejWmumVGM0lvBpbb/m2Luhm2F3YgrMYYDqK6J/8GG0Xbl3UuKpC0B/CQ7Y1uuSppYif7t8te3m603ije23gL7A7E9gvg//awUVxhe0qLyUaEpBOB91I9B6LxfZsJXGD7nzsY21XA39n+RYu6e23v2oGwWhpTyV3S5lR77oeyYZK6GDjD9jOdii3qaZRvFB+23dWirqMbxRLDK2j9vt3euaiqU6iBp2w/2ck42jHWkvu5VAez5rH+Z99kqj738bbf18HYujc8fwbsVIpH/YZH0lzbs/puOfJGc2wRw22sJfdf235Zf+tGwijf8PR0xomAW21PHsl4NghgdMf2IuBEqj3QiVQHy1dTbbDn2F4zCmI7FHjxaIqtN5J+ZPugTsfRymiLbawdUH1Y0uHARbafBZD0PKrz3B/paGTw2hYbl5XAAkm/7kRADbqA5VQJs5vL+Is7EtF6ozm2C4CrgLfYfgBA0kuAo0vd2zsX2nOx7dcU21Gdjk3SXj1VAdNHMpaNAhjFsTUba3vuU4HPA2+h2kuG6vSlq4HZtu/tTGQgaQHwJVpveE6w/foOxnY3cEAPB3s7ffBtNMd2l+09+ls3EkZ5bOuAn7LhBrvbPra3GuGQnjOaY2s2pvbcbS+TdDLVOe0bHFDtZGIvZlJteP5dUvOGZ2bHoqp8GdgO2CiBAv8ywrE0G82xLZf0KWBe9wHKcrXq0aw/C6RTRnNsd1BdH3B3c4WkxNamsbbn/mmqRHkeG175NhM4z/acTsUGPZ4hcLHtOzoXVUXSy2l99kJi64Gk7YDZVLF1dxE9SHVdxRzbHesKHOWxHQb8yvZdLeoOtf3DDoTVvfxRG1uzsZbcfw28svnMk/Kc1yUdvv3AqN3wlD2895fYGg/2JrYBknSM7W93Oo5WEtvAjLbYxlpyvxN4h+3lTeW7AJd3uJ9xNG94EtsQG233IWmU2AZmtMU2pvrcgeOBK8tBuO7+sZ2pbkjUsasFi2epzm9f3lS+Y6nrpMQ2AJIW91RFdWpkxyS2gRnNsTUbU8nd9o8lvYyNbyV6Y/ctgDtoNG94EtvATATewcan2QrY6PL1EZbYBmY0x7aBMZXcAcpphgs6HUez0bzhSWwDdgmwje1FzRWSrhn5cDaQ2AZmNMe2gTHV5x4RMVZ0+taeERExDJLcIyJqKMl9EyZpqqSNHu5R6r4lac8y/Jk25nW8pBf0Uv/c/AajxPx7VY/uG+g8jpb01R7qftGwnPc31Z0oaamkuyS9Y6DL7yWuAX0eks5U9aD2ltOOBpKukTSjRfm7Jc0uw4c2fkcknSVplaQtyvgOkpaV4d0kLZLU0We11lmSe03Z/lDDva/7TO5UZ520TO6SNmua32DdY3tYbrJk+w1lcCrVxU0AlKQzE3glcCDwNVUPRx8RfXweZ5WYNjm25zdcKHYo0LwDsA74ixbTDdt3ICpJ7pu+zSWdI+kOSRd2731372lJmgNsVfaSzpG0taRLJd0q6TZJ75P0t1Tnil8t6eoy/ROSviTpVmDfxj23UndqmceCck+S7r2xBZJ+JemUdvfKJH1W0q8lXSfpXEmfaFyHMvzcXl8xpdTfLemkhnl1L3MO8Oay3h+jusz+PNtPl/sILaU6w6a3uA6UdKekmyV9RdIlpfzk7hjL+G2qbkrX788DwPa1wMPtvFdDQdL2ki6XtKT8olhe3t8NfnlI+oSqezF1O7LEfZukvUuboyV9VdIbgHcDXyhtdivTfBn4mKrnFcQISnLf9O0BfM32K4DHgL9prLQ9G/i97em2P0C1h3if7Vfb/mPgx7a/AtxHdWvat5RJtwauL+2ua1rm1sAC268GrgX+spSfDpxu+1W0fgbmRiS9lmqPejpwMPC6Ntd7b+D/AP8LOLxFl8Fs4GdlvU+jOk2y8cZOK1l/6mSruLYEvgn8KfBa4CVtxtXfz6MTTgKus/1K4AdU1wW04wVlb/tvgDMbK1w9dm4+8MmybveUqt8C1wFHDknk0bYk903fCts/L8NnA2/qo/2vgLdJ+rykN9t+tId264CLeqj7A9X5vgA3UXWBAOwL/GcZ/l5fgRdvBn5g+0nbj1EliHZcYfsh278Hvk/f691fL6d6zundrs4XPrvN6fr7eXTCn1DWx/altP8sg3PLNNcCfyRp2zan+2fgkyTfjKi82Zu+5gsVer1wwfavgb2okvwpkv5fD02f6uUioGe8/gKJdQzfxXBrWf8d3bKprl/rTXVhU+O93Sez/gZtg4kLNoytv3GNJr2tFwxw3crtcRdRPfQ6RkiS+6ZvZ0n7luH3U/0EbvaMpHEAknYCnrR9NvAFqkQP8DjwwkHGsoCqqwTavwf9tcChkraS9EKqbpBuy6i6RAAOa5rubZLGS9qK6kDez5vqm9dnPjBT0haSdgWmATcASLpSUnMXzZ3A1Ia+4yOa4tqrTLsX0PjE+359Hr2R9BFJA76FQi/TX1tiQ9UDvLcr5Q8CLy598lsA72qa7n1lmjcBj7b41dfbd+hU4BM91MUwSHLf9N0FHCfpDqp/0q+3aDMXWFwO4L0KuEHVqYgnAac0tPlx9wHVAToeOEHVzZV2B/FBG+kAAAFoSURBVHrq8nmO7ZuB84FbgR8BNzZUfxH4a0m3ADs0TXoDVbfRYqqnVy1sql8MrCsHfT9mewnV4+NuB34MHGd7naqnXe1O0wFN208Bs4BLJd1M9XzRbhcB4yUtobp/TeNjEPv7eaDq+bm/BPaQtFLSsaXdy4GHmicuB2a/1TC+qGH4Ww3HH1pOD3wO+JMS/3soDzopd9b8B6r39gqqDVyjp8pn8Q2qh7k3Ow/4pKRbGjaKlHkvAW5uMU0Mk9x+IIZMOTPk97YtaSZwhO1DmtpMBS4pB3NbzeNk4AnbXxzmcLuX98fAX9g+oY92+wGfsN28Nztsytk577H9h+GcvpyFNMP27waynMGQ9ITtbUZ6uWNBTk+KofRa4KuSRPWM2o3Ob6bqo3+RpEWj4Txn27cBvSb2ThnshmQkN0T9VfbsL6LqCophkD33iIgaSp97REQNJblHRNRQkntERA0luUdE1FCSe0REDf1/t7CDW216o8EAAAAASUVORK5CYII=\n"
     },
     "metadata": {
      "needs_background": "light"
     }
    },
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "[OrderedDict([('000', 273), ('001', 415), ('010', 0), ('011', 0), ('100', 0), ('101', 0), ('110', 138), ('111', 198)])]\n"
     ]
    }
   ]
  },
  {
   "cell_type": "markdown",
   "source": [
    "Batch mode process different states"
   ],
   "metadata": {
    "id": "rYQ1mg1XCt5P",
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "source": [
    "# batch mode processing\n",
    "\n",
    "q_state = tq.QuantumState(n_wires=3, bsz=64)\n",
    "q_state.x(wires=1)\n",
    "q_state.rx(wires=2, params=0.6 * np.pi)\n",
    "print(q_state)\n"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "l3ffmGshCrkQ",
    "outputId": "18ae0a4f-1b00-4c27-fb4b-ca394c3ddaab",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 22,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "QuantumState 3 wires \n",
      " state: tensor([[0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j],\n",
      "        [0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j,\n",
      "         0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j, 0.0000+0.0000j]])\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "q_state = tq.QuantumState(n_wires=2)\n",
    "print(q_state)\n",
    "q_state.set_states(torch.tensor([[0, 0, 1, 0], [0, 1, 0, 0]]))\n",
    "print(q_state)\n",
    "\n",
    "q_state.x(wires=0)\n",
    "print(q_state)"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "eCtQvKMH1JjI",
    "outputId": "86f9ca9d-e3c8-4c34-9fa6-82911b581ac2",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 23,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "QuantumState 2 wires \n",
      " state: tensor([[1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]])\n",
      "QuantumState 2 wires \n",
      " state: tensor([[0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j],\n",
      "        [0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j]])\n",
      "QuantumState 2 wires \n",
      " state: tensor([[1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n",
      "        [0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j]])\n"
     ]
    },
    {
     "output_type": "stream",
     "name": "stderr",
     "text": [
      "/content/torchquantum/torchquantum/states.py:47: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  states = torch.tensor(states, dtype=C_DTYPE).to(self.state.device)\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "# demonstrate the GPU processing\n",
    "\n",
    "n_qubits = 10\n",
    "bsz = 8\n",
    "run_iters = 5\n",
    "use_gpu = False\n",
    "\n",
    "q_state = tq.QuantumState(n_wires=n_qubits, bsz=bsz)\n",
    "if use_gpu:\n",
    "    q_state.to(torch.device('cuda'))\n",
    "\n",
    "# start = time.time()\n",
    "\n",
    "start = torch.cuda.Event(enable_timing=True)\n",
    "end = torch.cuda.Event(enable_timing=True)\n",
    "\n",
    "start.record()\n",
    "for k in range(run_iters):\n",
    "    print(k)\n",
    "    for qid in range(n_qubits):\n",
    "        q_state.rx(qid, params=np.random.rand())\n",
    "        q_state.cnot(wires=[qid, (qid+1) % n_qubits])\n",
    "end.record()\n",
    "\n",
    "torch.cuda.synchronize()\n",
    "\n",
    "print(f\"Use GPU: {use_gpu}, avg runtime for circuit with {n_qubits} qubits, {2*n_qubits} gates, {bsz} batch size is {start.elapsed_time(end) / run_iters / 1000:.2f} second\")\n",
    "\n"
   ],
   "metadata": {
    "id": "FCD00B-f1R14",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": null,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "# automatic gradient computation\n",
    "q_state = tq.QuantumState(n_wires=2)\n",
    "q_state._states.requires_grad = True\n",
    "\n",
    "q_state.x(wires=0)\n",
    "q_state.rx(wires=1, params=0.6 * np.pi)\n",
    "print(q_state)\n",
    "target_quantum_state = torch.tensor([0, 0, 0, 1], dtype=torch.complex64)\n",
    "loss = 1 - (q_state.get_states_1d()[0] @ target_quantum_state).abs()\n",
    "\n",
    "print(loss)\n",
    "\n",
    "loss.backward()\n",
    "\n",
    "print(q_state._states.grad)\n"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "FrmkOuSw1lOI",
    "outputId": "063d3d28-9a16-435c-ecf7-b16baaae2880",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 3,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "QuantumState 2 wires \n",
      " state: tensor([[0.0000+0.0000j, 0.0000+0.0000j, 0.5878+0.0000j, 0.0000-0.8090j]],\n",
      "       grad_fn=<UnsafeViewBackward0>)\n",
      "tensor(0.1910, grad_fn=<RsubBackward1>)\n",
      "tensor([[[-0.8090+0.0000j,  0.0000+0.5878j],\n",
      "         [ 0.0000+0.0000j,  0.0000+0.0000j]]])\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "# build a circuit\n",
    "\n",
    "class QModel(tq.QuantumModule):\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "        self.n_wires = 2\n",
    "        self.u3_0 = tq.U3(has_params=True, trainable=True)\n",
    "        self.u3_1 = tq.U3(has_params=True, trainable=True)\n",
    "        self.cu3_0 = tq.CU3(has_params=True, trainable=True)\n",
    "        self.cu3_1 = tq.CU3(has_params=True, trainable=True)\n",
    "        self.u3_2 = tq.U3(has_params=True, trainable=True)\n",
    "        self.u3_3 = tq.U3(has_params=True, trainable=True)\n",
    "        self.random_layer = tq.RandomLayer(n_ops=10,\n",
    "                                           wires=list(range(self.n_wires)))\n",
    "\n",
    "    def forward(self, q_device: tq.QuantumDevice):\n",
    "        self.u3_0(q_device, wires=0)\n",
    "        self.u3_1(q_device, wires=1)\n",
    "        self.cu3_0(q_device, wires=[0, 1])\n",
    "        self.u3_2(q_device, wires=0)\n",
    "        self.u3_3(q_device, wires=1)\n",
    "        self.cu3_1(q_device, wires=[1, 0])\n",
    "        self.random_layer(q_device)\n",
    "\n",
    "\n",
    "q_dev = tq.QuantumDevice(n_wires=2)\n",
    "q_dev.reset_states(bsz=3)\n",
    "print(q_dev)\n",
    "\n",
    "model = QModel()\n",
    "model(q_dev)\n",
    "print(q_dev)"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "11F-rQRN1q1g",
    "outputId": "6568e55e-408c-44d0-fee6-9cd544b62f17",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 4,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "QuantumDevice 2 wires with states: tensor([[1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n",
      "        [1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j],\n",
      "        [1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j]])\n",
      "QuantumDevice 2 wires with states: tensor([[ 0.1543-0.2309j,  0.2192-0.5838j, -0.4387-0.5519j, -0.0495+0.1859j],\n",
      "        [ 0.1543-0.2309j,  0.2192-0.5838j, -0.4387-0.5519j, -0.0495+0.1859j],\n",
      "        [ 0.1543-0.2309j,  0.2192-0.5838j, -0.4387-0.5519j, -0.0495+0.1859j]],\n",
      "       grad_fn=<UnsafeViewBackward0>)\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "# easy conversion to qiskit\n",
    "from torchquantum.plugin.qiskit_plugin import tq2qiskit\n",
    "\n",
    "circ = tq2qiskit(q_dev, model)\n",
    "circ.draw('mpl')"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 140
    },
    "id": "ZLa5glSA1s-J",
    "outputId": "a4d5c348-3a67-4a71-b2e1-acb43e7251d5",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 5,
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "<Figure size 1170.05x144.48 with 1 Axes>"
      ],
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4EAAAB7CAYAAADKS4UuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3dd3gU1frA8e/upockJAQIBAgt9CK9S1VA5AIqSBFFuYiAKLb7u17kihfFBqgXERuCAsJVVEAFkZYAUiQgvQQILZCEEhJISNvd/P4YElK2Jezu7LLv53nyQKaceWdydnbemTnnaPLz8/MRQgghhBBCCOERtGoHIIQQQgghhBDCeSQJFEIIIYQQQggPIkmgEEIIIYQQQngQSQKFEEIIIYQQwoNIEiiEEEIIIYQQHkSSQCGEEEIIIYTwIJIECiGEEEIIIYQHkSRQCCGEEEIIITyIJIFCCCGEEEII4UEkCRRCCCGEEEIIDyJJoBBCCCGEEEJ4EEkChRBCCCGEEMKDSBIohBBCCCGEEB5EkkAhhBBCCCGE8CCSBAohhBBCCCGEB5EkUAghhBBCCCE8iCSBQgghhBBCCOFBJAkUQgghhBBCCA8iSaAQQgghhBBCeBBJAoUQQgghhBDCg0gSKIQQQgghhBAeRJJAIYQQQgghhPAgkgQKIYQQQgghhAeRJFAIIYQQQgghPIgkgUIIIYQQQgjhQSQJFEIIIYQQQggP4qV2AK7CsGoP+RfT1A7jrqWpXhHdoDZqh+FxpF47li31+vgmuHHJSQG5uaAq0LBX+ddX61jfadxCCCGEs0kSeEv+xTTyE+RKTdxdpF6r78YlSEtUOwrPIMdaCCGEsI28DiqEEEIIIYQQHkSSQCGEEEIIIYTwIPI6qBBCCCGEEELYWU4eJKdDrh68dFAlGAJ91Y5KIUmgEEIIIYQQQtjBjWzYdQriTkNKOuSXmB8WCC1rQZdoCA9SJURAXgd1edGfP8fSI9tsni6EO5B67TyPzazNhj1LbJ5+t3tpfg8e+KcvA6dWYNC0EMbPuYfY/d+rHZYQQgg3ZzTCpiPwxkr4ZZ/yBLBkAgiQmgmbj8Jbq+GH3ZCjd3qogDwJFEII4WFG9ZnGqD6vYTDoWbX9Y97+diT1I1sRGV5f7dCEEEK4oZu5sCAWTpWhQ/Z8YGs8HE2C8T2hspOfCsqTQCGEEB5Jp/Oif4dxGIx6Tl3cp3Y4Qggh3FB2HszfWLYEsKgrN2DueriaYd+4rJEkUAghhEfK0+fyy/b5ANQIb6ByNEIIIdzRT3vgfKrlZT4cpfyYcz0Lvt4GBqN9Y7NEXgd1cymZ6QxdNQcfrRdZ+lxmdHuUXlHN1A7rrmHQgz4HvHxA5612NJ5D6rXzXLuRwvSvh+Cl8yE3L4sn+8+kdXRvtcNyqG83vsX3sbPIyrmBTufNi0O/pG71FgCs/XMBG/YsLlw2KTWB5nW68erIpWqFa5IxH27mgEYDAT7Kv0IUlZOn9Ejo76P0SigcJzsP8gzKZ1HnJo9XDEblHOLjBb5yfVNux5KUTmDs4dxViD0GvZrYpzxrXDoJNBqNzJkzh88++4zz58/TsGFD/vvf//L000/TvXt3Pv/8c7VDdDhvrQ69oXSL0TyjAW+tjnD/IDYPfx2dVktCWgqjfp7LjtFvqhDp3SXzKpz5E5KPQb5BucCqHA2120NwhNrRuT+p186j03mjN+aVmq435OGl8yY4MJw5E7ei0+pIuprAm0sepfXzu1WI1HlG9p7KqD6vcePmNWZ/P5b9JzfTv/1YAPq3H1v4/9Trybz8WU+e7PeWmuEWYzAqbUi2Hr/96lDlIOjWUOlpzl0uQIXjxCcrnVMcS1J+9/GCDnWVC8vQQHVju9scPK908JFwWfk9wAc61odejaGCn7qxmZN2U6kfu07d7pCkYYRSPxpWUzc2d7T+kH3L23QU7m3onBs3Lv11MXbsWGbMmMH48eNZu3Ytw4YNY8SIESQkJNCmTRu1w3OKqJDKnExLKTYtIzeb5Mw06oZUQafVotMqf8b0nJs0r1xLjTDvKmkXYNcSSDqiJIAA+flw6QTs/hYu2+mOjyeTeu08EaG1uXjlZLFpWTkZXLuRTLVKddFpdei0yrdNRlYadau1UCNMVQQFhPLi0C/ZdexXth9aVWye0Wjk7WWjGNv/bSLCaqsTYAl6A3wRAyv3QGqRtiNXbsCPcbBwi3NfJRKuZ8dJ+GQjHE++PS1Xr9w4mL0WUq6rF9vd5veDsGALnL5ye9rNXCXBmvMbpN9ULzZzLt9Q6sGW48V7pIxPhvmbYFu8erG5o+T08rcDNCcjGw6ct2+Z5rhsErhs2TIWLVrE6tWrefnll+nZsydTp06lU6dO6PV6WrdurXaITjG66b0sOLCJbYnHMBiNXMvO4MVN39AsvBb3VK0NwOm0S/RYNp0BK95hUHRbdQN2c0YDHFgFRj2l+/XNh3wjHPwZ8rLUiO7uIfXaee5vO4Y1uz7nYMJWDEYDN25e45NVz1M7ojn1q7cCICn1NFPmdeXVL/vSpdkQlSN2ruCAMB7u9iJf/fYvjMbbGdTi9W9QJ6I5XZoNVjG64jYUebpT9PRU8P9DF5RXiYRnunQdvtul/D/fRL/0mbmwaKvpeaJsEi7BmgPK/00dz2uZsHyXc2OyxTfbICOn9PSCXVixG5LSnBqSW4tPtr6MK5Vbksu+Djpz5kz69etH9+7di02vX78+3t7etGih3K0+c+YMTzzxBElJSfj6+vLJJ5/QrVs3NUJ2iJFNupKlz+W5DQs5d/0KFXz86FajMT8NeRmvW3fv61SsQsyI6SSkpdD3u7cYUM8zEmRHuHQCcq3cvTPq4eJhiJK8pNykXjtP79ajyMm7ydyfJpGSdhZ/nwq0qNudGU/9jE6nfAVUC6vDh5O2kXQ1gVc+60XHJg+qHLVzDen2PD9u/YD1e76hb7sx7D2xkT3xvzN7QqzaoRUyGG27S7/1OPRoBFqXvcUrHOWPE6bHJCuQn69c4J++DHWrOC2su9LWeNBg/njnA0cvKk/p1RwMvKhzV613XqIB/oiHR9o7JSS3Z+14ulq5JblkEpiYmMihQ4d44YUXSs07d+4cTZs2xdfXF4Dx48fz6KOPMnHiRLZv387QoUM5ffo0Pj4+FrehKdGKfv2jr9G9ppNaYpbR2Ba9GNuil8l5Ofo8fL2UFr3BPv4EervmS+gxsTHcN7GP2mFY9fzDn9K//Vh0WvMfDYPRwDfzfuXfiwY5MbLykXrtWLbU61nPbKZlvR7OCciMBzqM44EO40zOy9Xn4OOlnE8D/ILx96ngzNCKiY2Nod2InuVe35ZjPXtCTKlpgX7B/Pgf5Vs39XoyH698lplj1+LtZfl7pMCdxm2LsOqNGf3eEavLXbsJoRF1uH75jEPjEa5n1Mz9hNey/jr38HFT2b16phMiuns9Pf8y/kHhVpfr/uBTHNmy0AkRWdd6wMt0G/G+xWXygZ+3HGdoh0bOCcrNDfnnemo1K34NYKkHUEvzpxTpe+zEmUtoNFXLHVe+jY/7XTYJBIiIKN4DR1ZWFrGxsfTv3x+AK1eusG3bNlavXg1A586dqV69Ops3b6Zv377ODVolccmnmLbtO3QaLXlGPXN6Pa52SG5Np7HeElcDaLXS1ZojSb12nvjzu/lq7b/QanXoDXlMGPSR2iGpasmGGWRmp/P+/8YUTqtZuSFTHvlMvaAATRnOOWVZVtw9NDrb/u5SP+6crcfQlY61rdctrhSzqyv5QMmOBTum3JKbybc1XXSikydPEh0dzQcffMCUKVMKp7/xxhtMnz6defPmMXHiRPbu3csjjzxCQkJC4TLDhg2jT58+PP3002Xapn7+RvIT7Ny6UxTS1K2C1wTX73Y+cR8c22B9uTodoV5Xx8dzp6ReO5Yt9TpuOaQlOikgN1exBrQdXv711TrWdxq3LXL1MO2H4p05mBLgA/95SIYE8ETf7oDdCZZfCQUY1wOaRjojorvX/I1Kuy1rx/ql/lAzzCkhWXU8Sen8xRIN0CoKHneD6xtX8PU2+OusbcsWPAGcYsNoQ1WD4dWB5Y/LVi75JLBu3bq0aNGCmTNnEhYWRmRkJCtWrGDNmjUAHtMzqHC+iCZwIhYMpXvUv00DkZ7TgaIQwgX4eEGHekqvfpZ0qi8JoKfqEg1/JpifrwFCAqCxDANwx7o2KN4Da0kaDdQMdZ0EECA6AipVUHoWttSWsWsDZ0bl3mqG2Z4ElrVcZ3DJpuNarZbvv/+epk2bMmHCBJ588knCw8OZNGkSOp2usFOYWrVqkZKSQk7O7a6OTp8+TVRUlFqhCzfn5QNN+t36xczT+AY9wS/YaSEJIQQA/Zord4jNiQyF+5o5Lx7hWqLCzQ8yrdEonQWN7iydBtlDsxrQro7peRrA1wuGd3RqSFZpNfBYZ9DpzF7e0L0R1Kns1LDcmqM6WHJWx00u+SQQoEGDBmzevLnYtNGjR9OkSRP8/f0BCA8Pp0uXLixYsKCwY5gLFy7Qs6djG+iLu1vVhuDlC6f+gOtJt6cHhkPdTsp8IYRwtgBfeP5++GUf7D4NebfGMfXWKU8JH7wH/LzVjVGoa+A9ytOejYchNfP29OiqMKClkiiKO6fRwIhOEBECMcfgRvat6UDTGspnMSJE1RBNqlNZOYf8uu/2cDMAoQHKDYSuDZzWHO2uEFUJqleEi3YcVsPXC1rXtl95lrhsEmhKXFwcHTsWv7Xy6aefMmbMGD788EN8fHxYtmyZ1Z5B78SfSSd5efNitBoNbSPqMavnaKvz153ez/u7lM5r4q8lMbfPkwyKblfmbb+8eTF7khNoVbU2c3o9UTjdXPk383IYsfojMvNyCPYNYNnA5/D18mbx4S0sObwVg9HI1wMmERlU+rnzxYxrDP7xfY5evcC1578q7LYfMFuupXXcTaXayk9mKuz4SpnW8Ym79+Ro7W+nNxp44td5XLp5nTYRdXmn+0irn4WizNVdgDPpl+m6dBqNwiLx0XmxZuirJtcxFYM999Pc/tjr8wtwJf0i0xY+yNmUI/z8ZkbhEA0Au4/9xvLN7wCQePk4zz00n4Y125td3tZyrc3/YcsHbD34Ax9O2mbX/TE3Pzv3JjMWDyU7N5NAvxBeG/1dYe+kjmYp3nmrnufUxX3k5WUzfuAcmtXpwtKNb7F6+zz6tXuKJ/u96ZQYrQnwhWEdYGArePV7ZdqMhyX5EwqNRnkttFN9ePFbZdq0QUpiKOxLq4HeTaFHY3hpmTJt+hDllVtXVjMMnumljGX4xkpl2rTByv6IstFooGdjWLrDfmV2jnbe+dxtXgrIyMggPj6+1CDxdevWZcuWLcTHx3Po0KFS4wraW63gcH4fNpWYEdO5dDOdg5fPWZ3ft05LNgyfxobh06gZVIneUc3LvN2/Uk6TkZvN5hGvk2vQE5d0qnCeufLXnd5Pu2r12TB8Gu2q1WPdmf1cuJHK1vPHWDdsKhuGTzOZAAKE+QWybti/6FCtfql5psq1to67CixyeO7WBBCs/+1WnthNiypRrH/0NbL1uey/dNbqZ6GApbpboHdUczYMn1aYAJpax1QM9txPc/tjj89vgeCAMN57eiONa5V+T6hdo37MnhDD7AkxVKlYi9bRfSwub2u5lubn6nM4dXGfQ/bH3Pzdx3+jUa0OzJ4QQ8Na7Yk79lu5t19WluId/+As5kyI5bXR37Fsk9J9/gPt/86rI2xoxa8C/yL3OiUBFCUVvaCXBNCxdEWupF09ASwqNPD2/yUBLL+2daCRndrZhleAfk7sc8JtksAKFSpgMBiYPHmyqnFEBFbE79bYUd5aL3Qarc3zE9JSqBIYQgWfso95tuviycKLz15RzdmZdKLUMiXLr1uxKpl5SnvJ9OxMKvlVYP2ZAxjyjfT97i2mbFyEwWg0uT0/Lx9C/Ux/c5gq19o6wrVZ+9udTrtE8/BaALSsEsXOi/FWPwsFbKm7seeP0HPZG3wUt8bsOqZisOd+WtufO/n8FvDx9iMoINTiMklXE6gYVBV/3wo2LW9Luebm//bnAu5r+4SJNWxTnu1Wr1SP7FzlPbXMrDSCAyuVe/tlZSleL52SSWXlZFC3eksAQoOqOq4LcCGEEG5Po4ERHZVXai2ZstRyz6A+XjC6i/I6qLO4TRLoag5cPseVm9dpEl7D5vkrT+xmcP225dpeWk4mwb5KW8gQX3/Ssm+WWqZk+dGhEexKOkHLha+wJ+U0nSIbkHIznVyDnnXDpuLv5cvqk3FljsVUueLu1iCsGlsSjwIQc+4IaTm365+1z4K1ulstsCKHn5rN+kdfY9PZQxy4fM7kOpZisCdz+3Mnn9+y2HbwR7o0G+Lw7egNeew/FUOr+r0cvq2iIsOjOXp2B3+f1ZT4xDiaRHV26vYtmb5oCP/84n5aR/exvrAQQgiB8gR4Up/yP3X384bxPZ3fZtet2gQ6U3JmGo/9PLfYtKqBISwd+BypWRlM2biIbwc+Z3Jdc/N/PbWX7wa9UK5thvgGcD0nC4DrOVlU9Ct9y6Fk+YsPb2FA3Va81H4gc3b/wtIj2wjxCeDemo0B6FmrCXtSTls4CqaZKnd003vLXI5wPkt1zJIH67Vh87nD9P3uLaKCK1M1QGnxbu2zAFitu75e3viiPIV5oF4rDl85b3IdczHYax+t7Y+1z6+97Dj6M9Mf/9Hh29mwZzG9WllvV5l6PZm3lhYfBC8sKIKpjy0v13bXx31NxyYDGdbjFb6PmcXGvUu4r+3j5SrL3qaP+YnLaYn8Z/EjzJ28U+1whBBCuInwIHi5P6zaCztLt3oxq2GE0pNs0ddznUWSQDMiAiuyYfi0UtP1RgNj1szj3e4jiQisaPP85Mw0fHReVPIPKlzualYGVQNvX8ia2yZAx+rRfLF/I0MbdWTT2UM83qx40lWyfID8fAjzV25LhPsHcT0niy41GvLVAaXX1f2XzlI7pLLJWCwxVa5wD5bqmCU6rZYPe48BYMLvX3Bf7RYm67qpumSt7t7IzSLIR3nqt/1CPJNa98Vbqyu1jrkYyvI5ssTSZ9vU58sRUq8n463zsfiKpMGg5/rNq4QGVb2jbZ2/fJxTF/fxy45POZtymJXb5jKw04RSZYcFRzB7QswdbauofPIJClAa2wYHhpOZnW63su9Erj4HHy9f/H0r4OejwrexEEIIt+bvoyR0naNhWzzsPQt6Q+nlNBpoUl3pxKlxdfX6nJDXQctoxfFdxCUn8GrsMvosn8HOi/EkZ6bx9s6VZucD/HxyDwPr3R7k/kz6ZV7f9p3N221VtQ5+Xt70XPYGOq2WdtXqF9tuyfIBhjfuzIrjO+mzfAbLjv7BiMZduKdKbfy9fOizfAZxyQk83KCDyVjyDHr6ffcWBy6fZcCKd/gz6WTh9kyVa24d4R4s/b0BLtxIpc/yGdz/vzfpVL0BkUFhJuu6qbpkre5uSzxGh8X/4t5vX6d6hTDaV6tvch1TMZT1c2RpP819dsH056s89IY8/vFZHxKS9vPPL/ty9NwuUq8ns3TjWwBsP7yKTk0HWVw++doZFv72WpnKNTV/3IB3eWfcOt4e9xtRVZsyuOtkk2Xfyf6Ymt+r1Uhi93/HS/N7sOmvpfRqPepOD2u5491/KrYw1reWPMpL83swbeFAnrj/DQDW/rmAz35+iU17l/LfHyc5LU4hhBDuq1YlGNkJ3hmqPB0c2en2vOfvh3eGwbge0CRS3U4HNfn5+fnqbd516OdvJD/hktO292P8n4T6BdKzVlOnbVPNWDR1q+A1obfDynekDbOUf/u8rG4c5XG312u1P0e21Ou45ZCWaL9tbj3wAxUCQh3Sls+RZduiYg1oO9z6cubY+1jb6k7jLq+CTgY+dF4eLdyI1A/ncddj7a5xuyNXPNbyOqhKHmrQXu0QCrlSLMK9ObsueWLd7dbiYbcsWwghhBCuQ14HFUIIIYQQQggPIk8Cb9FUL93Ji7AfOb7qkOPuWLYc36AqTgikjApemaxoelQP1dzpsVLrWLvi31gIIYSwRJLAW3SD7rzTByFcjdRr9TVUp3mdRQXtXNVox+ZIrnishRBCCFckr4MKIYQQQgghhAeRJFAIIYQQQgghPIgkgUIIIYQQQgjhQSQJFEIIIYQQQggPIkmgEEIIIYQQQngQSQKFEEIIIYQQwoNIEiiEEEIIIYQQHkSSQCGEEEIIIYTwIJIECiGEEEIIIYQHkSRQCCGEEEIIITyIJIFCCCGEEEII4UEkCRRCCCGEEEIIDyJJoBBCCCGEEEJ4EC+1A3AXhlV7yL+YpnYYLk1TvSK6QW3KvN7xTXDjkgMCsrO45WpHYFpQFWjYy7ZlpR7fmfLWcSHuNmqdt8tyvitJze+aO4nbHcmxFpZI/XANkgTaKP9iGvkJbpCpuKEblyAtUe0orHOHGK2ReiyEsAd3OW8X5Y4xuys51sISqR+uQV4HFUIIIYQQQggPIkmgEEIIIYQQQngQeR1UCCGEcAPGfDiZovwkpt6e/kUM1AiD6KpQrwpoNKqFKFR25QYcugCJV29Pm7seqodCVCVoXgN8vdWL726SnA5HLsD5Ip/FeRsgMhRqV4amkeCtUy8+U3L0cOg8nL0KF67dnr74D6gZBk1rQOUg9eITziVJoIuL/vw5pncdxqgmXW2aLsrvsZm1GdP3Tfq0ecym6aL8pF4LYTtjPuw6BZuOwOUbpecfvqD8rDsIVYOhd1NoV0f9ZPCl+T04enYHOp03Wq2OiNA6jOw9le4th6obmAXuGDMoNwXW7IejFyG/xLxTl5SfrYCfN3SoB32bQ4CPGpHe5q7H+lQKrD2o3Iwp6USK8sMxCPSFLtHQpyn4qHy1nZ2nnB92nFT+X9KeM8rPyr3QqBo80BJqVXJ2lKW5ax1xF5IECiGEEC4q7SYs3X7rwtIGKdfh2x2w9wyM7ATB/g4Nz6pRfaYxqs9rGAx6Vm3/mLe/HUn9yFZEhtdXNzAL3ClmoxHWHYL1h5SbBdZk50HsMdh3FkZ0Ui741eROx1pvgFV/wdbjti2fmQO/H4K/zsKozlA73LHxmROfrJwT0m7atvyxJGWd3k2gXwvQqdxwzJ3qiLuRNoFCCCGEC7pyAz5cZ3sCWNSxJPjod7iWaf+4ykOn86J/h3EYjHpOXdyndjg2cfWYjUZYsl15wmNLAlhUehZ8vll5+uMKXP1Y5xmU165tTQCLunwDPt4Ax5PsHpZV+8/Bp5tsTwALGPNh/WH45g8wGB0TW1m5eh1xR5IECiGEEC4mO698F29FXc1QysjV2y+u8srT5/LL9vkA1AhvoHI0tnH1mFf/BXvPln99Y77ylPlUOW4y2JurH+tlO+B4cvnX1xvgy1hIcuIwvacvwzfbyn6DoKj95+CnPfaL6U64eh1xR/I6qJtLyUxn6Ko5+Gi9yNLnMqPbo/SKaqZ2WHelazdSmP71ELx0PuTmZfFk/5m0ju6tdlh3JanXwtOt/guuZFhe5sNRyr9TlppfJuU6/LofhrSxX2xl8e3Gt/g+dhZZOTfQ6bx5ceiX1K3eAoC1fy5gw57FhcsmpSbQvE43Xh1pYYecwFLMF66c5K0lj/LRszvw9vLhu5j3uZlzgzF9/+PUGE+mQMwxy8vYUj+M+fDtTvjHAPBV4YrQHY71X2etJ9u2HOs8g/Ja5pS+jn/FMlevbMtgJQG0Je5t8dCiJjSIsF98ZWGpjsxcOpJerUbSscmDALy+aDADO02kbcP71QnWzbj0k0Cj0cisWbOIjo7Gz8+Pli1bEhsbS8OGDXn66afVDs8pvLU69IbSt3HzjAa8tTrC/YPYPPx1NgyfxuIHn2XqluUqRHl30Om80RtLt5jWG/Lw0nkTHBjOnIlbmT0hhn+NWsaCNf9UIcq7g9RrdeTnQ+q527+f3Ao3r5lfXqjj4jXYfsJ+5W05Bpev26+8shjZeyorZ6SxYvoV2jd6gP0nNxfO699+LLMnxDB7QgxTRy3HzyeQJ/u9pU6gRViKOTK8Pl2bP8zyTW+TlHqamH3LGdl7qlPjy8+HH+PsV97VDIg5ar/yysLVj7XBaN8nYedT4c8E+5VnztbjpjuRKq8fdiv1Tg2W6siEQR+yaN00snIy2HrwRwL9QlwuAUxJh5//uv376cvqHcuSXDoJHDt2LDNmzGD8+PGsXbuWYcOGMWLECBISEmjTRqXbmk4WFVKZk2nF39XIyM0mOTONuiFV0Gm16LTKnzE95ybNK9dSI8y7QkRobS5eOVlsWlZOBtduJFOtUl10Wh06rdLfc0ZWGnWrtVAjzLuC1Gvny8mEP5fA3u9uTzuzC7YvgGMbIN9F2n0I+MOOCSAovUXau8yyCgoI5cWhX7Lr2K9sP7Sq2Dyj0cjby0Yxtv/bRITVVidAE8zFPKzHK+w8+gszl45gwt8+xMfL16lxnb4MF+38WuH2E+q2/XLVY33wPFzPsm+Z2+IdmwQYjfb/vKdcN90bqjOZqiOhFaowpOvzzFv1HN9ufJNn/vaBukEWob/15PftX2DjkdvTP/pdGbYlM0e92Aq4bBK4bNkyFi1axOrVq3n55Zfp2bMnU6dOpVOnTuj1elq3bq12iE4xuum9LDiwiW2JxzAYjVzLzuDFTd/QLLwW91StDcDptEv0WDadASveYVB0W3UDdmP3tx3Dml2fczBhKwajgRs3r/HJquepHdGc+tVbAZCUepop87ry6pd96dJsiMoRuy+p185l1CvJ341Lpucn7oMTsc6NSZhmzL+zdl7m7Dmj/t3n4IAwHu72Il/99i+MxtsZx+L1b1Anojldmg1WMTrTTMXspfOmed17yci6RrM6zh/OxhGduaRnKcNIqMlTjvWFa0pS5ShnrkCqAzqEcoVOhEzVkb7txpB4OZ7BXZ4jOCBM5QhvW7Hb/FPf05fhs81Kwq4ml00CZ86cSb9+/ejevXux6fXr18fb25sWLZSnMP/+979p0KABWq2WFStWqBGqQ41s0pUZ3R7luQ0LqfrxOFot+j+y9Ln8NORlvG49lapTsQoxI6azbdR/mLJxkboBu7HerUfxVP+ZzP1pEg+9Hsa42c3IyctixlM/o9MpjSWqhdXhw0nbmDt5Fx+vfFbliN2X1A/WUsoAABcvSURBVGvnunQCMq9SegCxIs7/pTwtFOq6egOycu1f7o1s5UJfbUO6PU/q9STW7/kGgL0nNrIn/nfGDXhP5cjMKxnzmeTDHD7zB63q92HNri+cHs+5q9aXKY/zDiq3LORY37lzqdaXcaVyy6pkHQGoXqm+Sw0ZcTUDdp4yPz8fpW4dvei0kExyyY5hEhMTOXToEC+88EKpeefOnaNp06b4+iqvBPTr148xY8bw1FNPOTtMpxnbohdjW/QyOS9Hn4evlzcAwT7+BHr7OTO0u84DHcbxQIdxJufl6nMKX0UJ8AvG36eCM0O760i9dp6LhwANFpPAfCOkHIdanvGShctKSndg2WlQMcBx5Zc0e0JMqWmBfsH8+B/lajL1ejIfr3yWmWPX4u2l8sjlt1iL2Wg08tGPzzB5yDxqhDfg+Xmd6dx0EKFBVZ0WY7KD6ogj654prn6ss3Idd+PEkcc62UE9kCanKW8TaDSOKd8Ua3XEVcWdtr6MBth9GprWcHg4ZrlsEggQEVG8K6KsrCxiY2Pp379/4bTOnTuXaxuaMtbi9Y++RveaTcq1LUeKSz7FtG3fodNoyTPqmdPrcdViiYmN4b6Jfcq83qxnNtOyXg/7B2Rn8ed389Xaf6HV6tAb8pgw6CO1QwIgNjaGdiN62rSsq9bjklypXhdV3jquts9e3G+1DWt+fj5vTH2br35zbscLoriGnUbQb9K3xaYV9OBnjrn5JXv8GzhoCKfiVt5BdLfZ47y9ZMMMMrPTef9/Ywqn1azckCmPfGZ2nbKc70qyR8w/75hPdGQbGtRQ+iUY03cGn6yewtRRyyyudydxl/T8kuJ3c+xVP5Yt/57RXYbdQWS33Q3HOiCkKuPmFR8Xwl7Hes4H/2XQ4ufvIDrz+k5YQqMuxQOxR9zGfPDy9sFoKN2BXlmped1nz8+iOT0en0vz3s+g1ZlPs/KBX3/fwpP3dje7THnl2/juv0smgeHh4QDEx8fzwAMPFE5/7733SEpK8phOYWzRpUYjNg3/t9pheIRmdboyZ+IWtcPwCFKv7Ss94xIGo6GwYyNTNBoN6TevODEqYYo+L9txZee6wPugRTz30Dyee2ie2mGUyaAuk4r93qXZYKe3ZdTnZuHl42//ch1Y98pD7WPt0M+iA8s2OKhso9FglwTQEf4xfJHaIRSTlXEFjYXvW1COZ9YNdb9zNfm2potOZDQaadWqFUlJScyaNYvIyEhWrFjBmjVrOHfuHDt37qRDhw7F1unRowfPPvssjzzyiENi0s/fSH6Cyq2mXZymbhW8JpR93Ly45ZCW6ICAPETFGtB2uG3LSj2+M+Wt42q7eBCOrLOykAa6Pg1+QU4JSZiRkq70JmcLW8b4KurfgyDMTm+xq3XeLsv5riQ1v2vuJO6SZq2BRBuGdilr/RjQEu6z03Csd8ux/vcPcN2GnKqsx3pER+hQr/xxWbL5KKzaa9uyZYm7Wgj834Plj6uou6V+mHPpOsz82fpyT3aDlip2fu6SHcNotVq+//57mjZtyoQJE3jyyScJDw9n0qRJ6HS6wk5hhBBCWFe1EfhXRGmEYEZkC0kAXUHlYMcM2h3oC6GB9i9XOF/NSo4pt5aDynVn7nisazqog0xHHYu7UZVgaB1lfr4GqFYRmqnYHhBcNAkEaNCgAZs3byYzM5Nz584xY8YMDh48SJMmTfD3t/9rEEIIcbfSeUProRAQemuChmIJYURjaGi6jx7hZFoN3GPh4qG8WkU5t0MH4TitHFA/KvhBvSr2L9fdOeJYR4QoP45SpzKEOOAy2RHH4m42vCM0v5XkFXzlFpyDq4fCM71Ap3IW5pJtAs2Ji4ujY8eOxaZNmzaNhQsXcvnyZQ4ePMiUKVOIjY2lXj0HPWe/5eXNi9mTnECrqrWZ0+uJYvNSszKYtH4BV7Nu0DOqGa92VN5hz8rLpcEXz7NowER6RzU3u1xJg398n/Scm/jovFjQ/xlqBBW/HbP48BaWHN6KwWjk6wGTyDMa6Lp0Go3CIvHRebFm6Ksmy72YcY3BP77P0asXuPb8V4Vd8xc4dPk8z25YQH4+zL3vKVpUrsXE37/k8JVENBr4bx9lmhDC9fmHQMcxcCUBLh0Hfa7y5K96cwh2XseGwgZdo2GXhe7Fy6NLtH3LuxsdPbeLT1e/gEajpWHNdkwoMvD0yQv7mPvTJLRaLU/1m0nzut04m3KEOd8rvUm3qt+LMf1mOCXO6KrKk4ZLdhxrrlM98LLchMkjtawFK/dAhh0H9u4S7dgbMjotdIqG3w7Yr8zwCtCwmv3K8wQ+XvDUvcpQELsSIP0m+PsoTwgbVVdu+KnNbZLAjIwM4uPjmThxYrHpM2bMYMYM55x4C/yVcpqM3Gw2j3idZ9cvIC7pFG2r3U4639zxA693eYRGlSKLrffVwc00q1zT6nIlfdDrCepUrMKGMwf5b9xa3uv5WOG8CzdS2Xr+GOuG3e7R70z6ZXpHNefrAZNMFVcozC+QdcP+xdCVH5icP/2P71k8YDJajYbJGxby45CXeKX936hTsQonriUxdctyvhtUehgPR5m/+gXiE+OoH9maSSV65szOvcmMxUPJzs0k0C+E10Z/h9FoKDWtYIiHspS9+9hvLN/8DgCJl4/z3EPz6dJssNl1ftjyAVsP/sCHk7aZ3JalC40C6+O+4fc9X2M0Gnh15FIupydaXaesDl0+z8T1X6LTaKlXsSpf9BtfrNfckjc6zqRftunmgql1i1p3ej/v71oNQPy1JOb2eZJB0e0A+ChuDT+d+JOYEdO5mZfDiNUfkZmXQ7BvAMsGPlc4bIS99tFUrJbic3daLVSpr/wI11WzErSro3Qfbg+d6yuvHjnblfSLTFv4IGdTjvDzmxmF460WOJ18iA9XPI1Wq6N6pfq8POwrNBpNqfNfeIjl70h7qVoxivfHb8LH24+3vx3F6aSD1KnWHICvf/83rz32P4ICwnjj64d4u+5v/LLjU8Y+8DYt6t7L/31+HxlZaVTwd/yB1mhgSBtlsGl7qBgAvVTqNLq8dQSsf9fag7cOBraCZTvtU161EOjkhPNvj0bKjaRrdhr7dUgbdZIWa9dLyalnmDy3A7WqNMZL58O7T/+OwaDnnWWPcS0jhYY12jHuQfXGINVoICpc+XFFLvs6aEkVKlTAYDAwefJktUNh18WT9I5Svhh6RTVnZ9KJYvMPX0nk3V2ruO9/b7LzYjwAuQY9u5JO0Kl6A4vLmVKnovKOhrdWh1Zb/E+2/swBDPlG+n73FlM2LsJgNAIQe/4IPZe9wUdxa8yW6+flQ6if+V4C0rIzqRlcicigMNJvjSJ9OxYvdBrnVZ8TiXvJysngg4lb0etzOX5+d7H5u4//RqNaHZg9IYaGtdoTd+w3k9PKU3a7Rv2YPSGG2RNiqFKxFq2j+5hdJ1efw6mL+yzuS8GFxoeTtpGWcYnTSQeLzb+SfoEDCbG8P34jsyfEEB4SaXWd8mgYVo0tI99g84jXAdiTnFA4r+iNjlyDnrgk5bFE76jmbBg+zWICaG7dAn3rtGTD8GlsGD6NmkGVCj9LOfo89l86W7jcutP7aVetPhuGT6NdtXqsO7PfrvtoLlZz8QnhTEPaWB/Tb8pS6x06VKoAf1Np7MfggDDee3ojjWt1NDm/ZuWGfPTsdj6YuBWA+MQ4k+c/ZwkLjsDn1pikOq032iJvx2RkXaNyxRr4+QSQnZdJTl4WNSo3JDM7HYPRAIC3mZuMjtC4uvVkwpb6oQEe7aA8oVBDeeoI2PZday/t61pvu2XLsdZpYWQn5zxx9fNWOp+xlrfZEnf7uuqNZWfLtU+b6PuYPSGGd5/+HYBth36ibvWWzHpmMzn6LE5dLPu1g6dwmyTQlaTlZBLsq7xwHeLrT1r2zWLzd1yM5x8dBrHkwcn8M1YZ7+mbQ7GMbNzV6nLmGIxG3t75E+NaFu+ZMOVmOrkGPeuGTcXfy5fVJ+OoFliRw0/NZv2jr7Hp7CEOXD5Xrv00FhlZumQXsq9tXc6zrfuWq9zyOHpuJ20a3AdA6+g+HDm7o9j86pXqkZ2rJKqZWWkEB1YyOa08ZRdIuppAxaCq+PtWMLvOb38u4L62T5hcv4ClCw2AuOPrMBgNvPJZbz5eORmD0WB1nfLwLnLH1dfLmxrBt4+PuRsdttxcsHaTpEBCWgpVAkOo4KPs18KDMYxudm/h/LoVq5KZp7yDk56dSSULNyzKs4/WYi0ZnxDOFOCrtBkJuoPqF+IPz/RULgjV4OPtR1BhQ9TSvHS3A/P28qVySE2T5z9nS7h4gPTMy0RVvf14LCSwMqeTD5GWcZkzyYfIyEqjTYP7+GTlczz1XkMaR3XC19u5/RU83Baa3mGOPKyDklCqpTx1BGz7rrUXjQZGd4Y6d/A0R6eBJ7o6t3OVBhEwopP1RNCSRtVgWHu7hVRmtlz77Du1mRc+6cYPW5SnhEmpCYXj4tarfg9Hzmx3XsBuRpLAcgjxDeB6jjLe0vWcLCr6Fb9dGx1ajcaVIqkaGIJWo0VvNPD7mQP0q3uPxeUs+UfMEkY17Ua9isUb74T4BHBvzcYA9KzVhGOpF/H18ibQxw8vrY4H6rXi8JXz5drPoicObZHf/rtnLY0rRdKlRqNylVseGVlpBPgGAxDoF0JGVlqx+ZHh0Rw9u4O/z2pKfGIcTaI6m5xWnrILbDv4I12aDTG7jt6Qx/5TMbSqb1sPG6YuNACuZaSgN+Ty/viN+HoHsP3wKqvrlNfPJ/dwz8J/kJKZXizJMnWjw9abC9ZukhRYeWI3g+u3BSDPoFcSzFpNC+dHh0awK+kELRe+wp6U03SKbGCynPLuo7VYi8YnhBoiQuC5+8vXk2Cdysq6lYPtH5c9bT+8mnGzmpF2I4XgwEoWz3/OcP1mKh+vfJaXhi4oNv3vD7zDp6tf5KMfnqFOtRaEBIazaN00Xhv9HQv/Ec+ZpIMkp55xaqxeOqXNUfdGZb/QD/RRuqd3xquJd6pkHSnrd609+HrDM72V17TLqmIAjO8FLWpaX9be2teFsd2hQjkeUndrAH/v7hptRc1d+4QFV2Ph/8Uza/xm9p7YQMLFA9Ss3JADp2IB2H9yMxnZpq/phBu1CXQlHatH88X+jQxt1JFNZw/xeJGnF6BcvCZlXCPYxx+90UBKZjrnr1/lwRXvcCothbUJ+2g9tE6p5QAuZaYT6hdY7AnGwoOb0Wg0jG5afDsAHSOj+eqA0jBg/6Wz1A6pzI3cLIJuDSS7/UI8k249sbtwI5XIINv7Dg71q0DijatoNVqCbl0orz9zgB0X4vl24HNlOGK2S72ezFtLiw/gEhYUQbM63biZo7SCz8y5Xqrdxfq4r+nYZCDDerzC9zGz2Lh3Cdm5maWm3df28VLbDPQLsVh2gR1Hf2b64z+aXWfDnsX0ajXSpv0suNB47bHvTMbTom53AO6p36vw9RdL65TXwPptGFi/DVM2LuLXhL8YfKvtm6kbHb5e3vii3JUtuLlgqmMgazdJCvx6am9hm9KlR7YxvHHxJH3x4S0MqNuKl9oPZM7uX1h6ZJvJz0ByZhqP/Ty32LSqgSEsvVVHze2jtViLxieEWioHwfP3w5bjEHMU0q2M917QvqtrtNIG1BnMnbenPrbc6rqdm/6Nzk3/xscrJ7PzyC9mz3/OUNCW6OkHZxEWHFFsXo3KDXj36d9Jz7zC/NUv4KXzJj8/nyD/MLRaLQF+IWTl3HBarAV0WuXV4RY14dd9kHDZ8vJeWmhdGwbeA0FOfHBpzzqSkXXN5u9ae/L1glGdld571+63Plajr5cyFmD/Fuq9bgvKq6z/fBB+2QdxZ0Bv5eF67XAYcI/SAZGzWKoflq59lH4elAy3Y+MHOZNyiO4tH+Wvkxt55bPeRITWJrSC9HxmjiSB5dCqah38vLzpuewNWlaJol21+iRnprHwYAyvdhzMvzs/wuhfPiZLn8trnR8iMiiMHaPfBOA/f6ygS42GhPpVKLUcwCsxS5h574hiydrkDQtpF1GPPstn0K1mY17v8gjv7VrNqCZduadKbfy9fOizfAaV/IN4vu0DbDx7kOl/fI+vzpsukY1oX60+eqOBv//2KWuH/quw3DyDnoE/vMuBy2cZsOIdZnR7lFrB4bf3o8sjjLp1cf1RnzEATNn4NcE+/tz3vzdpEFaNT+7/u12PbVhwBLMnxJSafiJxL7/u/IzuLYfx14kN3N92TLH5+eQTFKAcs+DAcDKz09FqdaWmGQx6rt+8SmjQ7ZNCk6hOFssG5QTlrfMpfKXU1DpbD/7AqYv7+GXHp5xNOczKbXMZ2GlCqe1ZutBQyu7Mml1fAHDq4j6qhdWxuk555OjzCjtaCfLxx9/r9reUqRsdpm4u6I0GrmZlUDUwxOK6JSVnpuGj86KSvzIw3fHUJA5cPsMX+zdy5Eoi8/auQ6fREuavPLkL9w/iek6Wye1FBFZkw/BpZd5HS7GWjE8INem00LMx3NsQDl+AkymQmKoMYq1BeWW0ZiXloq1xded3O27uvG1Nrj6nsLOuAN9gfL39TZ7/nCX2wPfEn9/NF7/+A4Cx/d9m075veXbwXNb+uYCNe5fg4+3P5CHzAHi05//x7vLRaLU6alVpXNiJjBrqVVGe/F68BgcSlfpx6ToYjMrrwNVDIaqS0s1/oPOaLhayZx05em5nqe/awV2d119E00hoUh3OXoUjF+B8KlzNAKNRSfYiQ5VE6p4o9V7FLqmCnzJkwcBWsO+sEvvFa5CVp5wvKgcp4ws2qwE1HDTOoCXm6oe1a5+b2TcIuDXA7eEzfzCo62R0Wh3PDlauXT9Y8TRtGzqv6ZK70eTn55ds7iVM0M/fSH7CJYdvZ/L6r5h731N2L3dvcgIHLp9jTPMedi+7gKZuFbwm9La+YAlxyyEt0fpy81Y9z8kLe6lX/R6eHTyX1OvJrN29gFG9p5KRlcabSx4lT5+Dl86bqY/9D61GW2rajZup/G/zu7w49Aubywb4Zcdn6I15DO7yrNl1ipoyrysfTtrGhSsnS21v01/L+GTVc0RVVV59HNv/bSLC6hTb3mc/v0x8YhwhgeG8OvJbth78odQ6TWp3AqBiDWhb/AaaWUXr8eqTcYVt++qHRjD//r9z6eb1wpsAL276mr9SztCyShQf9h7D2oS/it1ceLv7CE5eS2bWnz/zad9xxbZTct2iN0kAvti/kTyDnokm2pX2WDadmBHTScvOZNQv/yVHr8dbp2Ppg8+Rmp1hcnvmWNtHU7Faiq+8dVyIu40t5229IY9/fdmfExf2UD+yNU/1n0nVilGF57rth1axYuscQHml/4WHP0er1ZY6/3kXuXlTlvNdeWJ2lDuJ2x3ZeqzLW0cKFHzXFuVpx9od2Vo/rF0v7Tq6hq/XTcPby5dmdboxbsC7XEm/wNvfjkKr0dKnzeP0bTemWJlSP26TJNBGzkoC3Zmjk0B72HrgByoEhDqtLYEztlfeJNAefoz/k1C/wGJt+RzJ2dsrSZJAIRRqJVSSBLoHOdbCEqkfrkFeBxUepVuLh+/q7TnbQw2c222Ys7cnhBBCCHE3kiTQRprqKoy062bKe4yCqtg5EA9TluMn9fjOyPETQqHWeftOtqvmd42nfc/JsRaWSP1wDfI6qBBCCCGEEEJ4EBknUAghhBBCCCE8iCSBQgghhBBCCOFBJAkUQgghhBBCCA8iSaAQQgghhBBCeBBJAoUQQgghhBDCg0gSKIQQQgghhBAeRJJAIYQQQgghhPAgkgQKIYQQQgghhAeRJFAIIYQQQgghPIgkgUIIIYQQQgjhQSQJFEIIIYQQQggPIkmgEEIIIYQQQngQSQKFEEIIIYQQwoNIEiiEEEIIIYQQHkSSQCGEEEIIIYTwIJIECiGEEEIIIYQHkSRQCCGEEEIIITyIJIFCCCGEEEII4UH+H/Tv5YufplJTAAAAAElFTkSuQmCC\n"
     },
     "metadata": {},
     "execution_count": 5
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "#"
   ],
   "metadata": {
    "id": "qXO5aA1p27_L",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": null,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "! pip install pennylane"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "eYQShCDEMA2O",
    "outputId": "a32f8801-1253-4447-e480-58552dea6a29",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 3,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
      "Collecting pennylane\n",
      "  Downloading PennyLane-0.25.1-py3-none-any.whl (1.0 MB)\n",
      "\u001B[K     |████████████████████████████████| 1.0 MB 35.4 MB/s \n",
      "\u001B[?25hRequirement already satisfied: appdirs in /usr/local/lib/python3.7/dist-packages (from pennylane) (1.4.4)\n",
      "Requirement already satisfied: autograd in /usr/local/lib/python3.7/dist-packages (from pennylane) (1.4)\n",
      "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from pennylane) (1.7.3)\n",
      "Requirement already satisfied: cachetools in /usr/local/lib/python3.7/dist-packages (from pennylane) (4.2.4)\n",
      "Requirement already satisfied: toml in /usr/local/lib/python3.7/dist-packages (from pennylane) (0.10.2)\n",
      "Requirement already satisfied: networkx in /usr/local/lib/python3.7/dist-packages (from pennylane) (2.6.3)\n",
      "Collecting pennylane-lightning>=0.25\n",
      "  Downloading PennyLane_Lightning-0.25.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (13.6 MB)\n",
      "\u001B[K     |████████████████████████████████| 13.6 MB 29.3 MB/s \n",
      "\u001B[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from pennylane) (1.21.6)\n",
      "Collecting semantic-version>=2.7\n",
      "  Downloading semantic_version-2.10.0-py2.py3-none-any.whl (15 kB)\n",
      "Collecting autoray>=0.3.1\n",
      "  Downloading autoray-0.3.2-py3-none-any.whl (36 kB)\n",
      "Requirement already satisfied: retworkx in /usr/local/lib/python3.7/dist-packages (from pennylane) (0.11.0)\n",
      "Collecting ninja\n",
      "  Downloading ninja-1.10.2.3-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl (108 kB)\n",
      "\u001B[K     |████████████████████████████████| 108 kB 68.7 MB/s \n",
      "\u001B[?25hRequirement already satisfied: future>=0.15.2 in /usr/local/lib/python3.7/dist-packages (from autograd->pennylane) (0.16.0)\n",
      "Installing collected packages: ninja, semantic-version, pennylane-lightning, autoray, pennylane\n",
      "Successfully installed autoray-0.3.2 ninja-1.10.2.3 pennylane-0.25.1 pennylane-lightning-0.25.1 semantic-version-2.10.0\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "# Speed comparison with pennylane\n",
    "\n",
    "import pennylane as qml\n",
    "from pennylane import numpy as np\n",
    "import random\n",
    "import time \n"
   ],
   "metadata": {
    "id": "iAsj8ImRQ2e4",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 12,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "n_wires = 10\n",
    "bsz = 32\n",
    "use_gpu=False"
   ],
   "metadata": {
    "id": "DCr7hQ_MROPU",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 18,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "dev=qml.device(\"default.qubit\",wires=n_wires)\n",
    "\n",
    "@qml.qnode(dev,interface=\"torch\")\n",
    "def pennylane_circ(params):\n",
    "    qml.Rot(params[0],params[1],params[2],wires=0)\n",
    "    qml.Rot(params[3],params[4],params[5],wires=1)\n",
    "    qml.ctrl(qml.Rot,control=0)(params[6],params[7],params[8],wires=1)\n",
    "    qml.Rot(params[9],params[10],params[11],wires=0)\n",
    "    qml.Rot(params[12],params[13],params[14],wires=1)  \n",
    "    qml.ctrl(qml.Rot,control=1)(params[15],params[16],params[17],wires=0)\n",
    "    return qml.state()\n",
    "\n",
    "\n",
    "\n",
    "if use_gpu:\n",
    "  device = torch.device('cuda')\n",
    "else:\n",
    "  device = torch.device('cpu')\n",
    "\n",
    "params=np.zeros(18)\n",
    "\n",
    "reps = 20\n",
    "start = time.time()\n",
    "for _ in range(reps):\n",
    "  for k in range(bsz):\n",
    "    pennylane_circ(params)\n",
    "\n",
    "end = time.time()\n",
    "pennylane_time = (end-start)/reps\n",
    "print(f\"Pennylane inference time: {pennylane_time}\")\n"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "C0Vf_Kte29Xt",
    "outputId": "d989a826-c7cc-4860-dc8f-19a730135be7",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 19,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Pennylane inference time: 0.3734148144721985\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "reps = 1000\n",
    "'''\n",
    "Circuit definition in torchquantum\n",
    "'''\n",
    "class QModel(tq.QuantumModule):\n",
    "    def __init__(self, bsz, n_wires):\n",
    "        super().__init__()\n",
    "        self.bsz = bsz\n",
    "        self.n_wires = n_wires\n",
    "        self.u3_0 = tq.U3(has_params=True, trainable=True)\n",
    "        self.u3_1 = tq.U3(has_params=True, trainable=True)\n",
    "        self.cu3_0 = tq.CU3(has_params=True, trainable=True)\n",
    "        self.cu3_1 = tq.CU3(has_params=True, trainable=True)\n",
    "        self.u3_2 = tq.U3(has_params=True, trainable=True)\n",
    "        self.u3_3 = tq.U3(has_params=True, trainable=True)\n",
    "        \n",
    "    def forward(self, q_device: tq.QuantumDevice):\n",
    "        q_device.reset_states(self.bsz)\n",
    "        self.u3_0(q_device, wires=0)\n",
    "        self.u3_1(q_device, wires=1)\n",
    "        self.cu3_0(q_device, wires=[0, 1])\n",
    "        self.u3_2(q_device, wires=0)\n",
    "        self.u3_3(q_device, wires=1)\n",
    "        self.cu3_1(q_device, wires=[1, 0])\n",
    "\n",
    "tq_circ = QModel(n_wires=n_wires, bsz=bsz).to(device)\n",
    "q_device = tq.QuantumDevice(n_wires=n_wires)\n",
    "\n",
    "\n",
    "start = time.time()\n",
    "for _ in range(reps):\n",
    "  tq_circ(q_device)\n",
    "\n",
    "end = time.time()\n",
    "tq_time = (end-start)/reps\n",
    "\n",
    "print(f\"TorchQuantum inference time {tq_time}; is {pennylane_time/tq_time} X faster\")"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "-bH438r0Q5gV",
    "outputId": "00b1edc2-9dd9-4c65-e16e-e12ade91f6a6",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 20,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "TorchQuantum inference time 0.004048892259597778; is 92.22641417218001 X faster\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "# basic pulse\n",
    "pulse = tq.QuantumPulseDirect(n_steps=4,\n",
    "                                  hamil=[[0, 1], [1, 0]])\n",
    "pulse.get_unitary()\n"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "pW7OxsW55K4G",
    "outputId": "cffffadd-cf6a-4e89-a037-a97ed8a90492",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 26,
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor([[-0.6536+0.0000j,  0.0000+0.7568j],\n",
       "        [ 0.0000+0.7568j, -0.6536+0.0000j]], grad_fn=<MmBackward0>)"
      ]
     },
     "metadata": {},
     "execution_count": 26
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "theta = 0.6 * np.pi\n",
    "target_unitary = torch.tensor([[np.cos(theta/2), -1j*np.sin(theta/2)], [-1j*np.sin(theta/2), np.cos(theta/2)]], dtype=torch.complex64)\n",
    "loss = 1 - (torch.trace(pulse.get_unitary() @ target_unitary) / target_unitary.shape[0]).abs() ** 2\n",
    "loss.backward()\n",
    "print(pulse.pulse_shape.grad)\n"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "baPhKQj3_YZP",
    "outputId": "ee4fd4ce-9f61-48cb-d9c1-60056488f705",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 28,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "tensor([-0.4441, -0.4441, -0.4441, -0.4441])\n"
     ]
    }
   ]
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 1.3 TorchQuantum for state preparation circuit"
   ],
   "metadata": {
    "id": "ElNAsYJLj8J9",
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "source": [
    "import torch\n",
    "import torch.optim as optim\n",
    "import argparse\n",
    "\n",
    "import torchquantum as tq\n",
    "from torch.optim.lr_scheduler import CosineAnnealingLR\n",
    "\n",
    "import random\n",
    "import numpy as np"
   ],
   "metadata": {
    "id": "8ngaSqT-iItk",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 7,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "\n",
    "class QModel(tq.QuantumModule):\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "        self.n_wires = 2\n",
    "        self.u3_0 = tq.U3(has_params=True, trainable=True)\n",
    "        self.u3_1 = tq.U3(has_params=True, trainable=True)\n",
    "        self.cu3_0 = tq.CU3(has_params=True, trainable=True)\n",
    "        self.cu3_1 = tq.CU3(has_params=True, trainable=True)\n",
    "        self.u3_2 = tq.U3(has_params=True, trainable=True)\n",
    "        self.u3_3 = tq.U3(has_params=True, trainable=True)\n",
    "\n",
    "    def forward(self, q_state: tq.QuantumState):\n",
    "        q_state.reset_states(1)\n",
    "        self.u3_0(q_state, wires=0)\n",
    "        self.u3_1(q_state, wires=1)\n",
    "        self.cu3_0(q_state, wires=[0, 1])\n",
    "        self.u3_2(q_state, wires=0)\n",
    "        self.u3_3(q_state, wires=1)\n",
    "        self.cu3_1(q_state, wires=[1, 0])\n",
    "\n",
    "def train(target_state, state, model, optimizer):\n",
    "    model(state)\n",
    "    result_state = state.get_states_1d()[0]\n",
    "\n",
    "    # compute the state infidelity\n",
    "    loss = 1 - torch.dot(result_state, target_state).abs() ** 2\n",
    "\n",
    "    optimizer.zero_grad()\n",
    "    loss.backward()\n",
    "    optimizer.step()\n",
    "    print(f\"infidelity (loss): {loss.item()}, \\n target state : \"\n",
    "          f\"{target_state.detach().cpu().numpy()}, \\n \"\n",
    "          f\"result state : {result_state.detach().cpu().numpy()}\\n\")"
   ],
   "metadata": {
    "id": "kJ64ckPTiZtM",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 8,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "def main(n_epochs=3000):\n",
    "    seed = 42\n",
    "    random.seed(seed)\n",
    "    np.random.seed(seed)\n",
    "    torch.manual_seed(seed)\n",
    "\n",
    "    use_cuda = torch.cuda.is_available()\n",
    "    device = torch.device(\"cuda\" if use_cuda else \"cpu\")\n",
    "\n",
    "    model = QModel().to(device)\n",
    "\n",
    "    optimizer = optim.Adam(model.parameters(), lr=1e-2, weight_decay=0)\n",
    "    scheduler = CosineAnnealingLR(optimizer, T_max=n_epochs)\n",
    "\n",
    "    q_device = tq.QuantumState(n_wires=2)\n",
    "    target_state = torch.tensor([0, 1, 0, 0], dtype=torch.complex64)\n",
    "\n",
    "    for epoch in range(1, n_epochs + 1):\n",
    "        print(f\"Epoch {epoch}, LR: {optimizer.param_groups[0]['lr']}\")\n",
    "        train(target_state, q_device, model, optimizer)\n",
    "        scheduler.step()"
   ],
   "metadata": {
    "id": "85BzTkY0io0o",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 35,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "main(n_epochs=3000)"
   ],
   "metadata": {
    "id": "NyMvW0pai_lO",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": null,
   "outputs": []
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 1.4 TorchQuantum for VQE circuit "
   ],
   "metadata": {
    "id": "6QeYK4OjA9qB",
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "source": [
    "! wget https://www.dropbox.com/s/1rtttfxoo02s09e/h2_new.txt"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "FkF4XlbcVg0G",
    "outputId": "5e702e7c-34d8-40a4-def5-980621b0262b",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 10,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "--2022-09-19 15:25:09--  https://www.dropbox.com/s/1rtttfxoo02s09e/h2_new.txt\n",
      "Resolving www.dropbox.com (www.dropbox.com)... 162.125.65.18, 2620:100:6017:18::a27d:212\n",
      "Connecting to www.dropbox.com (www.dropbox.com)|162.125.65.18|:443... connected.\n",
      "HTTP request sent, awaiting response... 302 Found\n",
      "Location: /s/raw/1rtttfxoo02s09e/h2_new.txt [following]\n",
      "--2022-09-19 15:25:10--  https://www.dropbox.com/s/raw/1rtttfxoo02s09e/h2_new.txt\n",
      "Reusing existing connection to www.dropbox.com:443.\n",
      "HTTP request sent, awaiting response... 302 Found\n",
      "Location: https://ucfcd04121af2228bb42634017f1.dl.dropboxusercontent.com/cd/0/inline/BtNQ0j4Qw_P3NDqdfHMScfqMtF5UMizmFhmybBzezDMfQxVT-6XxJ8L4v68idx990zBZGgjFv_daTOhOCPhY7HqN47VGL7WU3mzIkkumskCkzELS-C8msPgRwrGbBLvst8KeznexC4Dk4dfyqQyM9YOjytB_H_HBaSmwsn9xn-VSGg/file# [following]\n",
      "--2022-09-19 15:25:10--  https://ucfcd04121af2228bb42634017f1.dl.dropboxusercontent.com/cd/0/inline/BtNQ0j4Qw_P3NDqdfHMScfqMtF5UMizmFhmybBzezDMfQxVT-6XxJ8L4v68idx990zBZGgjFv_daTOhOCPhY7HqN47VGL7WU3mzIkkumskCkzELS-C8msPgRwrGbBLvst8KeznexC4Dk4dfyqQyM9YOjytB_H_HBaSmwsn9xn-VSGg/file\n",
      "Resolving ucfcd04121af2228bb42634017f1.dl.dropboxusercontent.com (ucfcd04121af2228bb42634017f1.dl.dropboxusercontent.com)... 162.125.3.15, 2620:100:6017:15::a27d:20f\n",
      "Connecting to ucfcd04121af2228bb42634017f1.dl.dropboxusercontent.com (ucfcd04121af2228bb42634017f1.dl.dropboxusercontent.com)|162.125.3.15|:443... connected.\n",
      "HTTP request sent, awaiting response... 200 OK\n",
      "Length: 139 [text/plain]\n",
      "Saving to: ‘h2_new.txt’\n",
      "\n",
      "h2_new.txt          100%[===================>]     139  --.-KB/s    in 0s      \n",
      "\n",
      "2022-09-19 15:25:11 (26.7 MB/s) - ‘h2_new.txt’ saved [139/139]\n",
      "\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "import torchquantum as tq\n",
    "import torch\n",
    "import torch.nn.functional as F\n",
    "from torchquantum.vqe_utils import parse_hamiltonian_file\n",
    "from torchquantum.datasets import VQE\n",
    "import random\n",
    "import numpy as np\n",
    "import argparse\n",
    "import torch.optim as optim\n",
    "\n",
    "from torch.optim.lr_scheduler import CosineAnnealingLR, ConstantLR\n",
    "\n"
   ],
   "metadata": {
    "id": "-plW3t-BBDKG",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 4,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "class QVQEModel(tq.QuantumModule):\n",
    "    def __init__(self, arch, hamil_info):\n",
    "        super().__init__()\n",
    "        self.arch = arch\n",
    "        self.hamil_info = hamil_info\n",
    "        self.n_wires = hamil_info['n_wires']\n",
    "        self.n_blocks = arch['n_blocks']\n",
    "        self.u3_layers = tq.QuantumModuleList()\n",
    "        self.cu3_layers = tq.QuantumModuleList()\n",
    "        for _ in range(self.n_blocks):\n",
    "            self.u3_layers.append(tq.Op1QAllLayer(op=tq.U3,\n",
    "                                                  n_wires=self.n_wires,\n",
    "                                                  has_params=True,\n",
    "                                                  trainable=True,\n",
    "                                                  ))\n",
    "            self.cu3_layers.append(tq.Op2QAllLayer(op=tq.CU3,\n",
    "                                                   n_wires=self.n_wires,\n",
    "                                                   has_params=True,\n",
    "                                                   trainable=True,\n",
    "                                                   circular=True\n",
    "                                                   ))\n",
    "        self.measure = tq.MeasureMultipleTimes(\n",
    "            obs_list=hamil_info['hamil_list'])\n",
    "\n",
    "    def forward(self, q_device):\n",
    "        q_device.reset_states(bsz=1)\n",
    "        for k in range(self.n_blocks):\n",
    "            self.u3_layers[k](q_device)\n",
    "            self.cu3_layers[k](q_device)\n",
    "        x = self.measure(q_device)\n",
    "\n",
    "        hamil_coefficients = torch.tensor([hamil['coefficient'] for hamil in\n",
    "                                           self.hamil_info['hamil_list']],\n",
    "                                          device=x.device).double()\n",
    "\n",
    "        for k, hamil in enumerate(self.hamil_info['hamil_list']):\n",
    "            for wire, observable in zip(hamil['wires'], hamil['observables']):\n",
    "                if observable == 'i':\n",
    "                    x[k][wire] = 1\n",
    "            for wire in range(q_device.n_wires):\n",
    "                if wire not in hamil['wires']:\n",
    "                    x[k][wire] = 1\n",
    "\n",
    "        x = torch.cumprod(x, dim=-1)[:, -1].double()\n",
    "        x = torch.dot(x, hamil_coefficients)\n",
    "\n",
    "        if x.dim() == 0:\n",
    "            x = x.unsqueeze(0)\n",
    "\n",
    "        return x\n",
    "\n",
    "\n",
    "def train(dataflow, q_device, model, device, optimizer):\n",
    "    for _ in dataflow['train']:\n",
    "        outputs = model(q_device)\n",
    "        loss = outputs.mean()\n",
    "\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        print(f\"Expectation of energy: {loss.item()}\")\n",
    "\n",
    "\n",
    "def valid_test(dataflow, q_device, split, model, device):\n",
    "    with torch.no_grad():\n",
    "        for _ in dataflow[split]:\n",
    "            outputs = model(q_device)\n",
    "    loss = outputs.mean()\n",
    "\n",
    "    print(f\"Expectation of energy: {loss}\")\n"
   ],
   "metadata": {
    "id": "Psb0lOq3BSbQ",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 11,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "class Args(object):\n",
    "  def __init__(self):\n",
    "    pass\n",
    "\n",
    "def main():\n",
    "    # parser = argparse.ArgumentParser()\n",
    "    # parser.add_argument('--pdb', action='store_true', help='debug with pdb')\n",
    "    # parser.add_argument('--n_blocks', type=int, default=2,\n",
    "    #                     help='number of blocks, each contain one layer of '\n",
    "    #                          'U3 gates and one layer of CU3 with '\n",
    "    #                          'ring connections')\n",
    "    # parser.add_argument('--steps_per_epoch', type=int, default=10,\n",
    "    #                     help='number of training epochs')\n",
    "    # parser.add_argument('--epochs', type=int, default=100,\n",
    "    #                     help='number of training epochs')\n",
    "    # parser.add_argument('--hamil_filename', type=str, default='./h2_new.txt',\n",
    "    #                     help='number of training epochs')\n",
    "\n",
    "    args = Args()\n",
    "    args.n_blocks = 2\n",
    "    args.steps_per_epoch=100\n",
    "    args.epochs=100\n",
    "    args.hamil_filename = '/content/torchquantum/h2_new.txt'\n",
    "\n",
    "    # if args.pdb:\n",
    "    #     import pdb\n",
    "    #     pdb.set_trace()\n",
    "\n",
    "    seed = 0\n",
    "    random.seed(seed)\n",
    "    np.random.seed(seed)\n",
    "    torch.manual_seed(seed)\n",
    "\n",
    "    dataset = VQE(steps_per_epoch=args.steps_per_epoch)\n",
    "\n",
    "    dataflow = dict()\n",
    "\n",
    "    for split in dataset:\n",
    "        if split == 'train':\n",
    "            sampler = torch.utils.data.RandomSampler(dataset[split])\n",
    "        else:\n",
    "            sampler = torch.utils.data.SequentialSampler(dataset[split])\n",
    "        dataflow[split] = torch.utils.data.DataLoader(\n",
    "            dataset[split],\n",
    "            batch_size=1,\n",
    "            sampler=sampler,\n",
    "            num_workers=1,\n",
    "            pin_memory=True)\n",
    "\n",
    "    hamil_info = parse_hamiltonian_file(args.hamil_filename)\n",
    "\n",
    "    use_cuda = torch.cuda.is_available()\n",
    "    device = torch.device(\"cuda\" if use_cuda else \"cpu\")\n",
    "    model = QVQEModel(arch={\"n_blocks\": args.n_blocks},\n",
    "                       hamil_info=hamil_info)\n",
    "\n",
    "    model.to(device)\n",
    "\n",
    "    n_epochs = args.epochs\n",
    "    optimizer = optim.Adam(model.parameters(), lr=5e-3, weight_decay=1e-4)\n",
    "    scheduler = CosineAnnealingLR(optimizer, T_max=n_epochs)\n",
    "\n",
    "    q_device = tq.QuantumDevice(n_wires=hamil_info['n_wires'])\n",
    "    q_device.reset_states(bsz=1)\n",
    "\n",
    "    for epoch in range(1, n_epochs + 1):\n",
    "        # train\n",
    "        print(f\"Epoch {epoch}, LR: {optimizer.param_groups[0]['lr']}\")\n",
    "        train(dataflow, q_device, model, device, optimizer)\n",
    "\n",
    "        # valid\n",
    "        valid_test(dataflow, q_device, 'valid', model, device)\n",
    "        scheduler.step()\n",
    "\n",
    "    # final valid\n",
    "    valid_test(dataflow, q_device, 'valid', model, device)"
   ],
   "metadata": {
    "id": "UTTikHR1BZnV",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 14,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "main()"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 1000
    },
    "id": "TCEvpt3ECZhX",
    "outputId": "a9e0a50e-1b46-4995-88b7-a856b022c198",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 15,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Epoch 1, LR: 0.005\n",
      "Expectation of energy: -0.308297323072801\n",
      "Expectation of energy: -0.3150710661499416\n",
      "Expectation of energy: -0.3222581990990716\n",
      "Expectation of energy: -0.3298538992153188\n",
      "Expectation of energy: -0.3378549958850836\n",
      "Expectation of energy: -0.3462580818360128\n",
      "Expectation of energy: -0.35506080617605584\n",
      "Expectation of energy: -0.3642611309815864\n",
      "Expectation of energy: -0.37385944604789345\n",
      "Expectation of energy: -0.3838581631128637\n",
      "Expectation of energy: -0.39426284856410665\n",
      "Expectation of energy: -0.40508255287436734\n",
      "Expectation of energy: -0.41632968098560474\n",
      "Expectation of energy: -0.4280217907429371\n",
      "Expectation of energy: -0.4401789326278298\n",
      "Expectation of energy: -0.4528248988932262\n",
      "Expectation of energy: -0.4659814090349086\n",
      "Expectation of energy: -0.4796671753594215\n",
      "Expectation of energy: -0.49389354536284613\n",
      "Expectation of energy: -0.5086673454400741\n",
      "Expectation of energy: -0.5239896705033897\n",
      "Expectation of energy: -0.5398575345128833\n",
      "Expectation of energy: -0.5562654188755087\n",
      "Expectation of energy: -0.5732025354929666\n",
      "Expectation of energy: -0.5906582814303059\n",
      "Expectation of energy: -0.6086202753284728\n",
      "Expectation of energy: -0.6270759125807044\n",
      "Expectation of energy: -0.6460131499584596\n",
      "Expectation of energy: -0.6654197248861027\n",
      "Expectation of energy: -0.6852816790901917\n",
      "Expectation of energy: -0.7055832865251654\n",
      "Expectation of energy: -0.726304925766776\n",
      "Expectation of energy: -0.7474252975601621\n",
      "Expectation of energy: -0.7689180912762086\n",
      "Expectation of energy: -0.7907567045661701\n",
      "Expectation of energy: -0.8129120128083609\n",
      "Expectation of energy: -0.835353362605799\n",
      "Expectation of energy: -0.8580469167888662\n",
      "Expectation of energy: -0.880957539042001\n",
      "Expectation of energy: -0.904049305753527\n",
      "Expectation of energy: -0.9272828409789278\n",
      "Expectation of energy: -0.9506195169472338\n",
      "Expectation of energy: -0.9740194497956998\n",
      "Expectation of energy: -0.997441343767171\n",
      "Expectation of energy: -1.02084374128577\n",
      "Expectation of energy: -1.044184479376973\n",
      "Expectation of energy: -1.0674212651913764\n",
      "Expectation of energy: -1.0905136089809482\n",
      "Expectation of energy: -1.1134209261619075\n",
      "Expectation of energy: -1.1361055443887715\n",
      "Expectation of energy: -1.1585309511002313\n",
      "Expectation of energy: -1.180663069118116\n",
      "Expectation of energy: -1.2024708840223917\n",
      "Expectation of energy: -1.2239266828663689\n",
      "Expectation of energy: -1.2450046695668322\n",
      "Expectation of energy: -1.2656832661683326\n",
      "Expectation of energy: -1.2859423163229518\n",
      "Expectation of energy: -1.305764713513226\n",
      "Expectation of energy: -1.3251346780127407\n",
      "Expectation of energy: -1.3440385075139858\n",
      "Expectation of energy: -1.3624633073585322\n",
      "Expectation of energy: -1.3803982401215706\n",
      "Expectation of energy: -1.3978330414034656\n",
      "Expectation of energy: -1.4147587108826005\n",
      "Expectation of energy: -1.4311684817015302\n",
      "Expectation of energy: -1.4470565159801254\n",
      "Expectation of energy: -1.4624191672379407\n",
      "Expectation of energy: -1.4772557899128564\n",
      "Expectation of energy: -1.491566312061463\n",
      "Expectation of energy: -1.5053538021332036\n",
      "Expectation of energy: -1.518623216448085\n",
      "Expectation of energy: -1.5313816111045064\n",
      "Expectation of energy: -1.543637582764402\n",
      "Expectation of energy: -1.5554010688327917\n",
      "Expectation of energy: -1.566683542253599\n",
      "Expectation of energy: -1.577497960326701\n",
      "Expectation of energy: -1.58785648515607\n",
      "Expectation of energy: -1.5977735926623289\n",
      "Expectation of energy: -1.6072629639575209\n",
      "Expectation of energy: -1.6163387675503167\n",
      "Expectation of energy: -1.6250154649982833\n",
      "Expectation of energy: -1.6333083096243592\n",
      "Expectation of energy: -1.641231861539749\n",
      "Expectation of energy: -1.6488010623233564\n",
      "Expectation of energy: -1.6560317510167233\n",
      "Expectation of energy: -1.6629380850645679\n",
      "Expectation of energy: -1.6695359678676125\n",
      "Expectation of energy: -1.6758402260292533\n",
      "Expectation of energy: -1.6818649964748809\n",
      "Expectation of energy: -1.6876251155377096\n",
      "Expectation of energy: -1.6931347620651545\n",
      "Expectation of energy: -1.6984065362320644\n",
      "Expectation of energy: -1.7034537412957578\n",
      "Expectation of energy: -1.7082888786232986\n",
      "Expectation of energy: -1.712923466852262\n",
      "Expectation of energy: -1.7173688625873273\n",
      "Expectation of energy: -1.721636092546762\n",
      "Expectation of energy: -1.7257351333265731\n",
      "Expectation of energy: -1.7296762987476333\n",
      "Expectation of energy: -1.7334682116242563\n",
      "Expectation of energy: -1.7371199700047164\n",
      "Epoch 2, LR: 0.0049987664009143295\n",
      "Expectation of energy: -1.7371199700047164\n",
      "Expectation of energy: -1.7406386912828493\n",
      "Expectation of energy: -1.7440335320517497\n",
      "Expectation of energy: -1.7473111102056609\n",
      "Expectation of energy: -1.7504782222782216\n",
      "Expectation of energy: -1.7535412787390154\n",
      "Expectation of energy: -1.7565058968835403\n",
      "Expectation of energy: -1.759377074174433\n",
      "Expectation of energy: -1.7621601697925975\n",
      "Expectation of energy: -1.7648592241832979\n",
      "Expectation of energy: -1.767479029402394\n",
      "Expectation of energy: -1.7700228962512232\n",
      "Expectation of energy: -1.7724949221862025\n",
      "Expectation of energy: -1.7748978987552877\n",
      "Expectation of energy: -1.7772350746150434\n",
      "Expectation of energy: -1.7795094983733064\n",
      "Expectation of energy: -1.7817232498377027\n",
      "Expectation of energy: -1.7838785571014317\n",
      "Expectation of energy: -1.7859782904080883\n",
      "Expectation of energy: -1.7880235033033915\n",
      "Expectation of energy: -1.7900170050286142\n",
      "Expectation of energy: -1.7919597975826154\n",
      "Expectation of energy: -1.7938537272504917\n",
      "Expectation of energy: -1.7957003241548335\n",
      "Expectation of energy: -1.797500955558857\n",
      "Expectation of energy: -1.7992573516104649\n",
      "Expectation of energy: -1.8009698268775123\n",
      "Expectation of energy: -1.8026405103866445\n",
      "Expectation of energy: -1.804269877594594\n",
      "Expectation of energy: -1.8058594881338932\n",
      "Expectation of energy: -1.8074098600566129\n",
      "Expectation of energy: -1.8089226514277743\n",
      "Expectation of energy: -1.810398198739981\n",
      "Expectation of energy: -1.8118380108759542\n",
      "Expectation of energy: -1.8132425321819088\n",
      "Expectation of energy: -1.814613142876332\n",
      "Expectation of energy: -1.8159501233090745\n",
      "Expectation of energy: -1.8172551329175004\n",
      "Expectation of energy: -1.81852813868538\n",
      "Expectation of energy: -1.8197706051011815\n",
      "Expectation of energy: -1.8209829790204506\n",
      "Expectation of energy: -1.822166207271819\n",
      "Expectation of energy: -1.823321290024784\n",
      "Expectation of energy: -1.8244487293863783\n",
      "Expectation of energy: -1.825549328783198\n",
      "Expectation of energy: -1.826623644407344\n",
      "Expectation of energy: -1.827672846005034\n",
      "Expectation of energy: -1.8286970344156346\n",
      "Expectation of energy: -1.8296970745633039\n",
      "Expectation of energy: -1.8306736130123746\n",
      "Expectation of energy: -1.8316279059184533\n",
      "Expectation of energy: -1.8325600031308331\n",
      "Expectation of energy: -1.8334701107569\n",
      "Expectation of energy: -1.8343597108248098\n",
      "Expectation of energy: -1.835228860710742\n",
      "Expectation of energy: -1.8360780770628609\n",
      "Expectation of energy: -1.8369080639366242\n",
      "Expectation of energy: -1.8377194101268466\n",
      "Expectation of energy: -1.8385121571643477\n",
      "Expectation of energy: -1.839287746366928\n",
      "Expectation of energy: -1.8400452195649581\n",
      "Expectation of energy: -1.8407862108857265\n",
      "Expectation of energy: -1.8415108212650382\n",
      "Expectation of energy: -1.8422191268123873\n",
      "Expectation of energy: -1.842912218641005\n",
      "Expectation of energy: -1.843589837129386\n",
      "Expectation of energy: -1.8442525461550558\n",
      "Expectation of energy: -1.8449012251925572\n",
      "Expectation of energy: -1.8455352366198263\n",
      "Expectation of energy: -1.8461557895161853\n",
      "Expectation of energy: -1.8467627958509403\n",
      "Expectation of energy: -1.8473564416067503\n",
      "Expectation of energy: -1.8479375633844264\n",
      "Expectation of energy: -1.8485057965199916\n",
      "Expectation of energy: -1.8490621453479181\n",
      "Expectation of energy: -1.8496065068888792\n",
      "Expectation of energy: -1.8501391349165532\n",
      "Expectation of energy: -1.850660377615957\n",
      "Expectation of energy: -1.8511703488993607\n",
      "Expectation of energy: -1.8516692967983088\n",
      "Expectation of energy: -1.8521575221191722\n",
      "Expectation of energy: -1.852635510121204\n",
      "Expectation of energy: -1.8531035138774734\n",
      "Expectation of energy: -1.8535609310452\n",
      "Expectation of energy: -1.8540091911125138\n",
      "Expectation of energy: -1.854447580289549\n",
      "Expectation of energy: -1.8548766488154402\n",
      "Expectation of energy: -1.8552966706316276\n",
      "Expectation of energy: -1.8557072927267524\n",
      "Expectation of energy: -1.8561093597275444\n",
      "Expectation of energy: -1.8565031928483973\n",
      "Expectation of energy: -1.8568882184730278\n",
      "Expectation of energy: -1.8572652794722904\n",
      "Expectation of energy: -1.8576336223059016\n",
      "Expectation of energy: -1.857994803989237\n",
      "Expectation of energy: -1.8583482620165608\n",
      "Expectation of energy: -1.8586940939268743\n",
      "Expectation of energy: -1.8590323445415504\n",
      "Expectation of energy: -1.8593632780779858\n",
      "Expectation of energy: -1.8596873680752544\n",
      "Expectation of energy: -1.8600042583654561\n",
      "Epoch 3, LR: 0.00499506682107068\n",
      "Expectation of energy: -1.8600042583654561\n",
      "Expectation of energy: -1.8603142172677518\n",
      "Expectation of energy: -1.860617707234636\n",
      "Expectation of energy: -1.8609146078146557\n",
      "Expectation of energy: -1.8612047972227\n",
      "Expectation of energy: -1.861489111623201\n",
      "Expectation of energy: -1.8617669604698208\n",
      "Expectation of energy: -1.8620390353977796\n",
      "Expectation of energy: -1.8623051691146995\n",
      "Expectation of energy: -1.862565688347211\n",
      "Expectation of energy: -1.8628202542502552\n",
      "Expectation of energy: -1.8630695684233585\n",
      "Expectation of energy: -1.8633132516951922\n",
      "Expectation of energy: -1.8635518523975556\n",
      "Expectation of energy: -1.8637851993068324\n",
      "Expectation of energy: -1.8640135702935257\n",
      "Expectation of energy: -1.8642365658098885\n",
      "Expectation of energy: -1.8644549037971103\n",
      "Expectation of energy: -1.8646687229796104\n",
      "Expectation of energy: -1.864877684421353\n",
      "Expectation of energy: -1.8650817248095304\n",
      "Expectation of energy: -1.8652815026994112\n",
      "Expectation of energy: -1.8654769467233923\n",
      "Expectation of energy: -1.8656678268330742\n",
      "Expectation of energy: -1.8658546957856224\n",
      "Expectation of energy: -1.8660376180907576\n",
      "Expectation of energy: -1.866216213133451\n",
      "Expectation of energy: -1.866390751170692\n",
      "Expectation of energy: -1.8665617093688085\n",
      "Expectation of energy: -1.86672876160534\n",
      "Expectation of energy: -1.8668919349778421\n",
      "Expectation of energy: -1.8670513010336973\n",
      "Expectation of energy: -1.8672075259416876\n",
      "Expectation of energy: -1.8673598074301596\n",
      "Expectation of energy: -1.867508964918732\n",
      "Expectation of energy: -1.8676544929727523\n",
      "Expectation of energy: -1.8677965953155866\n",
      "Expectation of energy: -1.8679359598789396\n",
      "Expectation of energy: -1.8680717979680017\n",
      "Expectation of energy: -1.8682042940612478\n",
      "Expectation of energy: -1.8683345937557774\n",
      "Expectation of energy: -1.8684613316493213\n",
      "Expectation of energy: -1.8685852106391083\n",
      "Expectation of energy: -1.868706061337477\n",
      "Expectation of energy: -1.8688243485956573\n",
      "Expectation of energy: -1.8689398058035107\n",
      "Expectation of energy: -1.8690525834900675\n",
      "Expectation of energy: -1.8691624684803183\n",
      "Expectation of energy: -1.869270188715714\n",
      "Expectation of energy: -1.869375385808313\n",
      "Expectation of energy: -1.8694779908720567\n",
      "Expectation of energy: -1.8695781291362437\n",
      "Expectation of energy: -1.8696761419427235\n",
      "Expectation of energy: -1.8697714187081478\n",
      "Expectation of energy: -1.8698645602351573\n",
      "Expectation of energy: -1.8699556702287476\n",
      "Expectation of energy: -1.8700443964500817\n",
      "Expectation of energy: -1.8701308651455928\n",
      "Expectation of energy: -1.8702161275244586\n",
      "Expectation of energy: -1.870298457945267\n",
      "Expectation of energy: -1.8703787897589728\n",
      "Expectation of energy: -1.8704577178424848\n",
      "Expectation of energy: -1.8705343695584336\n",
      "Expectation of energy: -1.870609235947981\n",
      "Expectation of energy: -1.8706823050632249\n",
      "Expectation of energy: -1.8707531503177026\n",
      "Expectation of energy: -1.8708231370286865\n",
      "Expectation of energy: -1.870890831030976\n",
      "Expectation of energy: -1.8709570938094435\n",
      "Expectation of energy: -1.8710214026011474\n",
      "Expectation of energy: -1.8710843600539504\n"
     ]
    },
    {
     "output_type": "error",
     "ename": "KeyboardInterrupt",
     "evalue": "ignored",
     "traceback": [
      "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[0;31mKeyboardInterrupt\u001B[0m                         Traceback (most recent call last)",
      "\u001B[0;32m<ipython-input-15-263240bbee7e>\u001B[0m in \u001B[0;36m<module>\u001B[0;34m\u001B[0m\n\u001B[0;32m----> 1\u001B[0;31m \u001B[0mmain\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m",
      "\u001B[0;32m<ipython-input-14-ee35acbb01d2>\u001B[0m in \u001B[0;36mmain\u001B[0;34m()\u001B[0m\n\u001B[1;32m     67\u001B[0m         \u001B[0;31m# train\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     68\u001B[0m         \u001B[0mprint\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34mf\"Epoch {epoch}, LR: {optimizer.param_groups[0]['lr']}\"\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m---> 69\u001B[0;31m         \u001B[0mtrain\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mdataflow\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mq_device\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mmodel\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mdevice\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0moptimizer\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m     70\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     71\u001B[0m         \u001B[0;31m# valid\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m<ipython-input-11-8bc26e701b2e>\u001B[0m in \u001B[0;36mtrain\u001B[0;34m(dataflow, q_device, model, device, optimizer)\u001B[0m\n\u001B[1;32m     57\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     58\u001B[0m         \u001B[0moptimizer\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mzero_grad\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m---> 59\u001B[0;31m         \u001B[0mloss\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mbackward\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m     60\u001B[0m         \u001B[0moptimizer\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mstep\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     61\u001B[0m         \u001B[0mprint\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34mf\"Expectation of energy: {loss.item()}\"\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m/usr/local/lib/python3.7/dist-packages/torch/_tensor.py\u001B[0m in \u001B[0;36mbackward\u001B[0;34m(self, gradient, retain_graph, create_graph, inputs)\u001B[0m\n\u001B[1;32m    394\u001B[0m                 \u001B[0mcreate_graph\u001B[0m\u001B[0;34m=\u001B[0m\u001B[0mcreate_graph\u001B[0m\u001B[0;34m,\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m    395\u001B[0m                 inputs=inputs)\n\u001B[0;32m--> 396\u001B[0;31m         \u001B[0mtorch\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mautograd\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mbackward\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mself\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mgradient\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mretain_graph\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mcreate_graph\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0minputs\u001B[0m\u001B[0;34m=\u001B[0m\u001B[0minputs\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m    397\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m    398\u001B[0m     \u001B[0;32mdef\u001B[0m \u001B[0mregister_hook\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mself\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mhook\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m/usr/local/lib/python3.7/dist-packages/torch/autograd/__init__.py\u001B[0m in \u001B[0;36mbackward\u001B[0;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001B[0m\n\u001B[1;32m    173\u001B[0m     Variable._execution_engine.run_backward(  # Calls into the C++ engine to run the backward pass\n\u001B[1;32m    174\u001B[0m         \u001B[0mtensors\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mgrad_tensors_\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mretain_graph\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mcreate_graph\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0minputs\u001B[0m\u001B[0;34m,\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m--> 175\u001B[0;31m         allow_unreachable=True, accumulate_grad=True)  # Calls into the C++ engine to run the backward pass\n\u001B[0m\u001B[1;32m    176\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m    177\u001B[0m def grad(\n",
      "\u001B[0;31mKeyboardInterrupt\u001B[0m: "
     ]
    }
   ]
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 1.5 TorchQuantum for QNN circuit"
   ],
   "metadata": {
    "id": "4k_7FrcQBCtl",
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "source": [
    "import torch\n",
    "import torch.nn.functional as F\n",
    "import torch.optim as optim\n",
    "import argparse\n",
    "\n",
    "import torchquantum as tq\n",
    "import torchquantum.functional as tqf\n",
    "\n",
    "from torchquantum.plugin import (tq2qiskit_expand_params,\n",
    "                                  tq2qiskit,\n",
    "                                  tq2qiskit_measurement,\n",
    "                                  qiskit_assemble_circs)\n",
    "\n",
    "from torchquantum.datasets import MNIST\n",
    "from torch.optim.lr_scheduler import CosineAnnealingLR\n",
    "\n",
    "import random\n",
    "import numpy as np"
   ],
   "metadata": {
    "id": "n1U42zhEA6w3",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 47,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "class QFCModel(tq.QuantumModule):\n",
    "    class QLayer(tq.QuantumModule):\n",
    "        def __init__(self):\n",
    "            super().__init__()\n",
    "            self.n_wires = 4\n",
    "            self.random_layer = tq.RandomLayer(n_ops=50,\n",
    "                                               wires=list(range(self.n_wires)))\n",
    "\n",
    "            # gates with trainable parameters\n",
    "            self.rx0 = tq.RX(has_params=True, trainable=True)\n",
    "            self.ry0 = tq.RY(has_params=True, trainable=True)\n",
    "            self.rz0 = tq.RZ(has_params=True, trainable=True)\n",
    "            self.crx0 = tq.CRX(has_params=True, trainable=True)\n",
    "\n",
    "        @tq.static_support\n",
    "        def forward(self, q_device: tq.QuantumDevice):\n",
    "            \"\"\"\n",
    "            1. To convert tq QuantumModule to qiskit or run in the static\n",
    "            model, need to:\n",
    "                (1) add @tq.static_support before the forward\n",
    "                (2) make sure to add\n",
    "                    static=self.static_mode and\n",
    "                    parent_graph=self.graph\n",
    "                    to all the tqf functions, such as tqf.hadamard below\n",
    "            \"\"\"\n",
    "            self.q_device = q_device\n",
    "\n",
    "            self.random_layer(self.q_device)\n",
    "\n",
    "            # some trainable gates (instantiated ahead of time)\n",
    "            self.rx0(self.q_device, wires=0)\n",
    "            self.ry0(self.q_device, wires=1)\n",
    "            self.rz0(self.q_device, wires=3)\n",
    "            self.crx0(self.q_device, wires=[0, 2])\n",
    "\n",
    "            # add some more non-parameterized gates (add on-the-fly)\n",
    "            tqf.hadamard(self.q_device, wires=3, static=self.static_mode,\n",
    "                         parent_graph=self.graph)\n",
    "            tqf.sx(self.q_device, wires=2, static=self.static_mode,\n",
    "                   parent_graph=self.graph)\n",
    "            tqf.cnot(self.q_device, wires=[3, 0], static=self.static_mode,\n",
    "                     parent_graph=self.graph)\n",
    "            tqf.rx(self.q_device, wires=1, params=torch.tensor([0.1]),\n",
    "                   static=self.static_mode, parent_graph=self.graph)\n",
    "\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "        self.n_wires = 4\n",
    "        self.q_device = tq.QuantumDevice(n_wires=self.n_wires)\n",
    "        self.encoder = tq.GeneralEncoder(\n",
    "            tq.encoder_op_list_name_dict['4x4_ryzxy'])\n",
    "\n",
    "        self.q_layer = self.QLayer()\n",
    "        self.measure = tq.MeasureAll(tq.PauliZ)\n",
    "\n",
    "    def forward(self, x, use_qiskit=False):\n",
    "        bsz = x.shape[0]\n",
    "        x = F.avg_pool2d(x, 6).view(bsz, 16)\n",
    "        devi = x.device\n",
    "\n",
    "        if use_qiskit:\n",
    "            encoder_circs = tq2qiskit_expand_params(self.q_device, x,\n",
    "                                                    self.encoder.func_list)\n",
    "            q_layer_circ = tq2qiskit(self.q_device, self.q_layer)\n",
    "            measurement_circ = tq2qiskit_measurement(self.q_device,\n",
    "                                                     self.measure)\n",
    "            assembled_circs = qiskit_assemble_circs(encoder_circs,\n",
    "                                                    q_layer_circ,\n",
    "                                                    measurement_circ)\n",
    "            x0 = self.qiskit_processor.process_ready_circs(\n",
    "                self.q_device, assembled_circs).to(devi)\n",
    "            # x1 = self.qiskit_processor.process_parameterized(\n",
    "            #     self.q_device, self.encoder, self.q_layer, self.measure, x)\n",
    "            # print((x0-x1).max())\n",
    "            x = x0\n",
    "\n",
    "        else:\n",
    "            self.encoder(self.q_device, x)\n",
    "            self.q_layer(self.q_device)\n",
    "            x = self.measure(self.q_device)\n",
    "\n",
    "        x = x.reshape(bsz, 2, 2).sum(-1).squeeze()\n",
    "        x = F.log_softmax(x, dim=1)\n",
    "\n",
    "        return x\n",
    "\n",
    "\n",
    "def train(dataflow, model, device, optimizer):\n",
    "    for feed_dict in dataflow['train']:\n",
    "        inputs = feed_dict['image'].to(device)\n",
    "        targets = feed_dict['digit'].to(device)\n",
    "\n",
    "        outputs = model(inputs)\n",
    "        loss = F.nll_loss(outputs, targets)\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        print(f\"loss: {loss.item()}\", end='\\r')\n",
    "\n",
    "\n",
    "def valid_test(dataflow, split, model, device, qiskit=False):\n",
    "    target_all = []\n",
    "    output_all = []\n",
    "    with torch.no_grad():\n",
    "        for feed_dict in dataflow[split]:\n",
    "            inputs = feed_dict['image'].to(device)\n",
    "            targets = feed_dict['digit'].to(device)\n",
    "\n",
    "            outputs = model(inputs, use_qiskit=qiskit)\n",
    "\n",
    "            target_all.append(targets)\n",
    "            output_all.append(outputs)\n",
    "        target_all = torch.cat(target_all, dim=0)\n",
    "        output_all = torch.cat(output_all, dim=0)\n",
    "\n",
    "    _, indices = output_all.topk(1, dim=1)\n",
    "    masks = indices.eq(target_all.view(-1, 1).expand_as(indices))\n",
    "    size = target_all.shape[0]\n",
    "    corrects = masks.sum().item()\n",
    "    accuracy = corrects / size\n",
    "    loss = F.nll_loss(output_all, target_all).item()\n",
    "\n",
    "    print(f\"{split} set accuracy: {accuracy}\")\n",
    "    print(f\"{split} set loss: {loss}\")\n"
   ],
   "metadata": {
    "id": "srvo_I_sDWv5",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 49,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "\n",
    "def main():\n",
    "    # parser = argparse.ArgumentParser()\n",
    "    # parser.add_argument('--static', action='store_true', help='compute with '\n",
    "    #                                                           'static mode')\n",
    "    # parser.add_argument('--pdb', action='store_true', help='debug with pdb')\n",
    "    # parser.add_argument('--wires-per-block', type=int, default=2,\n",
    "    #                     help='wires per block int static mode')\n",
    "    # parser.add_argument('--epochs', type=int, default=5,\n",
    "    #                     help='number of training epochs')\n",
    "\n",
    "    # args = parser.parse_args()\n",
    "\n",
    "    # if args.pdb:\n",
    "        # import pdb\n",
    "        # pdb.set_trace()\n",
    "\n",
    "    n_epochs = 5\n",
    "    seed = 0\n",
    "    random.seed(seed)\n",
    "    np.random.seed(seed)\n",
    "    torch.manual_seed(seed)\n",
    "\n",
    "    dataset = MNIST(\n",
    "        root='./mnist_data',\n",
    "        train_valid_split_ratio=[0.9, 0.1],\n",
    "        digits_of_interest=[3, 6],\n",
    "        n_test_samples=75,\n",
    "    )\n",
    "    dataflow = dict()\n",
    "\n",
    "    for split in dataset:\n",
    "        sampler = torch.utils.data.RandomSampler(dataset[split])\n",
    "        dataflow[split] = torch.utils.data.DataLoader(\n",
    "            dataset[split],\n",
    "            batch_size=256,\n",
    "            sampler=sampler,\n",
    "            num_workers=8,\n",
    "            pin_memory=True)\n",
    "\n",
    "    use_cuda = torch.cuda.is_available()\n",
    "    device = torch.device(\"cuda\" if use_cuda else \"cpu\")\n",
    "\n",
    "    model = QFCModel().to(device)\n",
    "\n",
    "    optimizer = optim.Adam(model.parameters(), lr=5e-3, weight_decay=1e-4)\n",
    "    scheduler = CosineAnnealingLR(optimizer, T_max=n_epochs)\n",
    "\n",
    "    for epoch in range(1, n_epochs + 1):\n",
    "        # train\n",
    "        print(f\"Epoch {epoch}:\")\n",
    "        train(dataflow, model, device, optimizer)\n",
    "        print(optimizer.param_groups[0]['lr'])\n",
    "\n",
    "        # valid\n",
    "        valid_test(dataflow, 'valid', model, device)\n",
    "        scheduler.step()\n",
    "\n",
    "    # test\n",
    "    valid_test(dataflow, 'test', model, device, qiskit=False)\n",
    "\n",
    "    # run on Qiskit simulator and real Quantum Computers\n",
    "    try:\n",
    "        from qiskit import IBMQ\n",
    "        from torchquantum.plugin import QiskitProcessor\n",
    "\n",
    "        # firstly perform simulate\n",
    "        print(f\"\\nTest with Qiskit Simulator\")\n",
    "        processor_simulation = QiskitProcessor(use_real_qc=False)\n",
    "        model.set_qiskit_processor(processor_simulation)\n",
    "        valid_test(dataflow, 'test', model, device, qiskit=True)\n",
    "\n",
    "        # then try to run on REAL QC\n",
    "        backend_name = 'ibmq_lima'\n",
    "        print(f\"\\nTest on Real Quantum Computer {backend_name}\")\n",
    "        # Please specify your own hub group and project if you have the\n",
    "        # IBMQ premium plan to access more machines.\n",
    "        processor_real_qc = QiskitProcessor(use_real_qc=True,\n",
    "                                            backend_name=backend_name,\n",
    "                                            hub='ibm-q',\n",
    "                                            group='open',\n",
    "                                            project='main',\n",
    "                                            )\n",
    "        model.set_qiskit_processor(processor_real_qc)\n",
    "        valid_test(dataflow, 'test', model, device, qiskit=True)\n",
    "    except ImportError:\n",
    "        print(\"Please install qiskit, create an IBM Q Experience Account and \"\n",
    "              \"save the account token according to the instruction at \"\n",
    "              \"'https://github.com/Qiskit/qiskit-ibmq-provider', \"\n",
    "              \"then try again.\")"
   ],
   "metadata": {
    "id": "oBmCC02LDl25",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 52,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "main()"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 416
    },
    "id": "-MLaB9HTEkG_",
    "outputId": "3358a3f8-ce09-4ce1-cff6-2064f992f99b",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": 53,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stderr",
     "text": [
      "[2022-09-18 05:29:24.683] Only use the front 75 images as TEST set.\n"
     ]
    },
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Epoch 1:\n",
      "0.005\n",
      "valid set accuracy: 0.700414937759336\n",
      "valid set loss: 0.6310521364212036\n",
      "Epoch 2:\n"
     ]
    },
    {
     "output_type": "error",
     "ename": "KeyboardInterrupt",
     "evalue": "ignored",
     "traceback": [
      "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[0;31mKeyboardInterrupt\u001B[0m                         Traceback (most recent call last)",
      "\u001B[0;32m<ipython-input-53-263240bbee7e>\u001B[0m in \u001B[0;36m<module>\u001B[0;34m\u001B[0m\n\u001B[0;32m----> 1\u001B[0;31m \u001B[0mmain\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m",
      "\u001B[0;32m<ipython-input-52-c815ef425ce3>\u001B[0m in \u001B[0;36mmain\u001B[0;34m()\u001B[0m\n\u001B[1;32m     49\u001B[0m         \u001B[0;31m# train\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     50\u001B[0m         \u001B[0mprint\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34mf\"Epoch {epoch}:\"\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m---> 51\u001B[0;31m         \u001B[0mtrain\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mdataflow\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mmodel\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mdevice\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0moptimizer\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m     52\u001B[0m         \u001B[0mprint\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0moptimizer\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mparam_groups\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0;36m0\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0;34m'lr'\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     53\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m<ipython-input-49-7f3a43b36b13>\u001B[0m in \u001B[0;36mtrain\u001B[0;34m(dataflow, model, device, optimizer)\u001B[0m\n\u001B[1;32m     91\u001B[0m         \u001B[0mtargets\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mfeed_dict\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0;34m'digit'\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mto\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mdevice\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     92\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m---> 93\u001B[0;31m         \u001B[0moutputs\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mmodel\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0minputs\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m     94\u001B[0m         \u001B[0mloss\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mF\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mnll_loss\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0moutputs\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mtargets\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     95\u001B[0m         \u001B[0moptimizer\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mzero_grad\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m/usr/local/lib/python3.7/dist-packages/torch/nn/modules/module.py\u001B[0m in \u001B[0;36m_call_impl\u001B[0;34m(self, *input, **kwargs)\u001B[0m\n\u001B[1;32m   1128\u001B[0m         if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks\n\u001B[1;32m   1129\u001B[0m                 or _global_forward_hooks or _global_forward_pre_hooks):\n\u001B[0;32m-> 1130\u001B[0;31m             \u001B[0;32mreturn\u001B[0m \u001B[0mforward_call\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m*\u001B[0m\u001B[0minput\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0;34m**\u001B[0m\u001B[0mkwargs\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m   1131\u001B[0m         \u001B[0;31m# Do not call functions when jit is used\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m   1132\u001B[0m         \u001B[0mfull_backward_hooks\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mnon_full_backward_hooks\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0;34m[\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0;34m[\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m<ipython-input-49-7f3a43b36b13>\u001B[0m in \u001B[0;36mforward\u001B[0;34m(self, x, use_qiskit)\u001B[0m\n\u001B[1;32m     76\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     77\u001B[0m         \u001B[0;32melse\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m---> 78\u001B[0;31m             \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mencoder\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mq_device\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mx\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m     79\u001B[0m             \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mq_layer\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mq_device\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     80\u001B[0m             \u001B[0mx\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mmeasure\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mq_device\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m/usr/local/lib/python3.7/dist-packages/torch/nn/modules/module.py\u001B[0m in \u001B[0;36m_call_impl\u001B[0;34m(self, *input, **kwargs)\u001B[0m\n\u001B[1;32m   1128\u001B[0m         if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks\n\u001B[1;32m   1129\u001B[0m                 or _global_forward_hooks or _global_forward_pre_hooks):\n\u001B[0;32m-> 1130\u001B[0;31m             \u001B[0;32mreturn\u001B[0m \u001B[0mforward_call\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m*\u001B[0m\u001B[0minput\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0;34m**\u001B[0m\u001B[0mkwargs\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m   1131\u001B[0m         \u001B[0;31m# Do not call functions when jit is used\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m   1132\u001B[0m         \u001B[0mfull_backward_hooks\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0mnon_full_backward_hooks\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0;34m[\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0;34m[\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m/content/torchquantum/torchquantum/graph.py\u001B[0m in \u001B[0;36mforward_register_graph\u001B[0;34m(*args, **kwargs)\u001B[0m\n\u001B[1;32m     23\u001B[0m         \u001B[0;32mif\u001B[0m \u001B[0margs\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0;36m0\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mstatic_mode\u001B[0m \u001B[0;32mand\u001B[0m \u001B[0margs\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0;36m0\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mparent_graph\u001B[0m \u001B[0;32mis\u001B[0m \u001B[0;32mnot\u001B[0m \u001B[0;32mNone\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     24\u001B[0m             \u001B[0margs\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0;36m0\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mparent_graph\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0madd_op\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0margs\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0;36m0\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m---> 25\u001B[0;31m         \u001B[0mres\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mf\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0;34m*\u001B[0m\u001B[0margs\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0;34m**\u001B[0m\u001B[0mkwargs\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m     26\u001B[0m         \u001B[0;32mif\u001B[0m \u001B[0margs\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0;36m0\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mstatic_mode\u001B[0m \u001B[0;32mand\u001B[0m \u001B[0margs\u001B[0m\u001B[0;34m[\u001B[0m\u001B[0;36m0\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mis_graph_top\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     27\u001B[0m             \u001B[0;31m# finish build graph, set flag\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m/content/torchquantum/torchquantum/encoding.py\u001B[0m in \u001B[0;36mforward\u001B[0;34m(self, q_device, x)\u001B[0m\n\u001B[1;32m     69\u001B[0m                 \u001B[0mparams\u001B[0m\u001B[0;34m=\u001B[0m\u001B[0mparams\u001B[0m\u001B[0;34m,\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m     70\u001B[0m                 \u001B[0mstatic\u001B[0m\u001B[0;34m=\u001B[0m\u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mstatic_mode\u001B[0m\u001B[0;34m,\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m---> 71\u001B[0;31m                 \u001B[0mparent_graph\u001B[0m\u001B[0;34m=\u001B[0m\u001B[0mself\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mgraph\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m     72\u001B[0m             )\n\u001B[1;32m     73\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m/content/torchquantum/torchquantum/functional.py\u001B[0m in \u001B[0;36mry\u001B[0;34m(q_device, wires, params, n_wires, static, parent_graph, inverse, comp_method)\u001B[0m\n\u001B[1;32m   1685\u001B[0m         \u001B[0mstatic\u001B[0m\u001B[0;34m=\u001B[0m\u001B[0mstatic\u001B[0m\u001B[0;34m,\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m   1686\u001B[0m         \u001B[0mparent_graph\u001B[0m\u001B[0;34m=\u001B[0m\u001B[0mparent_graph\u001B[0m\u001B[0;34m,\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m-> 1687\u001B[0;31m         \u001B[0minverse\u001B[0m\u001B[0;34m=\u001B[0m\u001B[0minverse\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m   1688\u001B[0m     )\n\u001B[1;32m   1689\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m/content/torchquantum/torchquantum/functional.py\u001B[0m in \u001B[0;36mgate_wrapper\u001B[0;34m(name, mat, method, q_device, wires, params, n_wires, static, parent_graph, inverse)\u001B[0m\n\u001B[1;32m    260\u001B[0m                     name in ['qubitunitary', 'qubitunitaryfast',\n\u001B[1;32m    261\u001B[0m                              'qubitunitarystrict']:\n\u001B[0;32m--> 262\u001B[0;31m                 \u001B[0mmatrix\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mmat\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mparams\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m    263\u001B[0m             \u001B[0;32melif\u001B[0m \u001B[0mname\u001B[0m \u001B[0;32min\u001B[0m \u001B[0;34m[\u001B[0m\u001B[0;34m'multicnot'\u001B[0m\u001B[0;34m,\u001B[0m \u001B[0;34m'multixcnot'\u001B[0m\u001B[0;34m]\u001B[0m\u001B[0;34m:\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m    264\u001B[0m                 \u001B[0;31m# this is for gates that can be applied to arbitrary numbers of\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;32m/content/torchquantum/torchquantum/functional.py\u001B[0m in \u001B[0;36mry_matrix\u001B[0;34m(params)\u001B[0m\n\u001B[1;32m    354\u001B[0m     \u001B[0mtheta\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mparams\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mtype\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mC_DTYPE\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m    355\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0;32m--> 356\u001B[0;31m     \u001B[0mco\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mtorch\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0mcos\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mtheta\u001B[0m \u001B[0;34m/\u001B[0m \u001B[0;36m2\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[0m\u001B[1;32m    357\u001B[0m     \u001B[0msi\u001B[0m \u001B[0;34m=\u001B[0m \u001B[0mtorch\u001B[0m\u001B[0;34m.\u001B[0m\u001B[0msin\u001B[0m\u001B[0;34m(\u001B[0m\u001B[0mtheta\u001B[0m \u001B[0;34m/\u001B[0m \u001B[0;36m2\u001B[0m\u001B[0;34m)\u001B[0m\u001B[0;34m\u001B[0m\u001B[0;34m\u001B[0m\u001B[0m\n\u001B[1;32m    358\u001B[0m \u001B[0;34m\u001B[0m\u001B[0m\n",
      "\u001B[0;31mKeyboardInterrupt\u001B[0m: "
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [],
   "metadata": {
    "id": "Oi0O1RF2Eksg",
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "execution_count": null,
   "outputs": []
  }
 ]
}
