{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor(3.5000)\n"
     ]
    }
   ],
   "source": [
    "x = torch.tensor(3.5)\n",
    "print(x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor(6.5000)\n"
     ]
    }
   ],
   "source": [
    "y = x + 3\n",
    "print(y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor(3.5000, requires_grad=True)\n"
     ]
    }
   ],
   "source": [
    "x = torch.tensor(3.5, requires_grad=True)\n",
    "print(x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor(1.8750, grad_fn=<MulBackward0>)\n"
     ]
    }
   ],
   "source": [
    "y = (x - 1) * (x - 2) * (x - 3)\n",
    "print(y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "y.backward()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(5.7500)"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x.grad"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "x = torch.tensor(3.5, requires_grad=True)\n",
    "y = x*x\n",
    "z = 2*y + 3"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "z.backward()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(14.)"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x.grad"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "a = torch.tensor(2.0, requires_grad=True)\n",
    "b = torch.tensor(1.0, requires_grad=True)\n",
    "x = 2*a + 3*b\n",
    "y = 5*a*a + 3*b*b*b\n",
    "z = 2*x + 3*y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "z.backward()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(64.)"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "a.grad"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>0</th>\n",
       "      <th>1</th>\n",
       "      <th>2</th>\n",
       "      <th>3</th>\n",
       "      <th>4</th>\n",
       "      <th>5</th>\n",
       "      <th>6</th>\n",
       "      <th>7</th>\n",
       "      <th>8</th>\n",
       "      <th>9</th>\n",
       "      <th>...</th>\n",
       "      <th>775</th>\n",
       "      <th>776</th>\n",
       "      <th>777</th>\n",
       "      <th>778</th>\n",
       "      <th>779</th>\n",
       "      <th>780</th>\n",
       "      <th>781</th>\n",
       "      <th>782</th>\n",
       "      <th>783</th>\n",
       "      <th>784</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>7</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>2</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>4</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 785 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "   0    1    2    3    4    5    6    7    8    9    ...  775  776  777  778  \\\n",
       "0    7    0    0    0    0    0    0    0    0    0  ...    0    0    0    0   \n",
       "1    2    0    0    0    0    0    0    0    0    0  ...    0    0    0    0   \n",
       "2    1    0    0    0    0    0    0    0    0    0  ...    0    0    0    0   \n",
       "3    0    0    0    0    0    0    0    0    0    0  ...    0    0    0    0   \n",
       "4    4    0    0    0    0    0    0    0    0    0  ...    0    0    0    0   \n",
       "\n",
       "   779  780  781  782  783  784  \n",
       "0    0    0    0    0    0    0  \n",
       "1    0    0    0    0    0    0  \n",
       "2    0    0    0    0    0    0  \n",
       "3    0    0    0    0    0    0  \n",
       "4    0    0    0    0    0    0  \n",
       "\n",
       "[5 rows x 785 columns]"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df = pd.read_csv('mnist_dataset/mnist_test_10.csv', header=None)\n",
    "df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<class 'pandas.core.frame.DataFrame'>\n",
      "RangeIndex: 10 entries, 0 to 9\n",
      "Columns: 785 entries, 0 to 784\n",
      "dtypes: int64(785)\n",
      "memory usage: 61.5 KB\n"
     ]
    }
   ],
   "source": [
    "df.info()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAEICAYAAACZA4KlAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAP4klEQVR4nO3dfZBV9X3H8c8HfIoPUcluCEUEa7QqTkWzQROtD7VxhI4D6EQlbYZMbLFVZ2Im6dTYdGImiWPbGOuMVouFkdgENT4yVuMDSWujGetiNwqCDzFQoSvsBhuWxAfAb/+4h7ji3nMv59yn5fd+zdzZe8/3PHz3sh/Ovefcc3+OCAHY/Y1pdwMAWoOwA4kg7EAiCDuQCMIOJIKwA4kg7KOQ7TW2/6jOecP2Rwtup/Cy6DyEHS1je2/bC22vtT1ku8/2jHb3lQrCjlbaQ9Krkk6TdKCkr0q60/aUdjaVCsI+ytmebvuntv/Pdr/tG2zvtdNsM22/YnvQ9j/YHjNs+c/bXmX7ddsP257crF4j4tcRcVVErImIdyLiAUm/kPSxZm0T7yLso992SV+U1CXpE5LOlHTJTvPMkdQj6QRJsyR9XpJsz5J0paRzJXVL+k9JS+rZqO1/yv6DGen2bJ3rGC/pSEkr65kf5ZjPxo8+ttdI+rOIeGyE2uWSTouIOdnjkDQjIn6YPb5E0nkRcabthyTdFRELs9oYSVskHR0Ra7Nlj4iIl5vwO+wp6SFJP4+Iixu9frwfe/ZRzvaRth+w/ZrtzZKuVmUvP9yrw+6vlfQ72f3Jkq7fsUeWtEmSJU1scs9jJN0m6W1JlzVzW3gXYR/9bpK0WpU98AdVeVnuneaZNOz+oZL+N7v/qqSLI+KgYbcPRMSTtTZq+2bbW6rcqr4st21JCyWNV+UVxtb6f1WUQdhHvwMkbZa0xfZRkv5yhHn+yvbBtidJ+oKkO7LpN0v6iu2pkmT7QNufrmejEfEXEbF/ldvUnEVvknS0pHMi4o06f0c0AGEf/b4s6TOShiTdoneDPNz9kpZL6pP0b6rsWRUR90r6O0m3Z28BVkhq2nnv7Ej/xZKmSXpt2CuBP2nWNvEuDtABiWDPDiSCsAOJIOxAIgg7kIg9Wrmxrq6umDx5Sis3CSRl7do1Ghwc3PlzFpJKht322ZKulzRW0r9ExDV580+ePEVPPNVbZpMAcpx8Yk/VWuGX8bbHSrpRlfOyx0iaa/uYousD0Fxl3rNPl/RyRLwSEW9Lul2VK6oAdKAyYZ+o915gsU4jXEBhe77tXtu9A4MDJTYHoIymH42PiAUR0RMRPd1d3c3eHIAqyoR9vd57NdUh2TQAHahM2J+WdITtw7KvQbpQ0tLGtAWg0QqfeouIbbYvk/SwKqfeFkUEXy8EdKhS59kj4kFJDzaoFwBNxMdlgUQQdiARhB1IBGEHEkHYgUQQdiARhB1IBGEHEkHYgUQQdiARhB1IBGEHEkHYgUS09KukUczVy17MrW9+c3vV2pMrN+Qu+9wP7irU0w5nXTIvt37e8R+pWjt/2qSqNTQee3YgEYQdSARhBxJB2IFEEHYgEYQdSARhBxLBefYOcMa1/5Fb77vznuZtfMzYUos/cvO/5tZ/9NETqtZOvfGC3GU/ctA+hXrCyNizA4kg7EAiCDuQCMIOJIKwA4kg7EAiCDuQCM6zt0A7z6PvffTHc+szzpqaW1/9i9fz60vvz61ve/mZqrUbfnpi7rLfnHFUbh27plTYba+RNCRpu6RtEdHTiKYANF4j9uxnRMRgA9YDoIl4zw4komzYQ9Ijtpfbnj/SDLbn2+613TswOFBycwCKKhv2UyLiBEkzJF1q+9SdZ4iIBRHRExE93V3dJTcHoKhSYY+I9dnPjZLulTS9EU0BaLzCYbe9n+0DdtyXdJakFY1qDEBjlTkaP17SvbZ3rOf7EfHDhnQ1yqxavzm33nf30lLr3/fYk3LrT397dtXaQfvumb/uvfP/BLZueye3fvj/5J+IGep7omqt/1dv5i6Lxioc9oh4RdJxDewFQBNx6g1IBGEHEkHYgUQQdiARhB1IBJe4NkD/UI1TSBG55Vqn1n72j+fm1rsO2Dt/+yV8/dH84aKHVvYWXvclJ04uvCx2HXt2IBGEHUgEYQcSQdiBRBB2IBGEHUgEYQcSwXn2BvjDoz6cW3/+oatz6/vulT9s8oE1LlNtplvv7sufYetbLekD5bFnBxJB2IFEEHYgEYQdSARhBxJB2IFEEHYgEZxnb4EJB+3T7haq+uZj+der//qFvlLr/9BJZ1StTZ34wVLrxq5hzw4kgrADiSDsQCIIO5AIwg4kgrADiSDsQCI4z76b+/ELG3Pr1359Uf4K3n4jv/7hw3LLP/ji6VVr+9S4jh+NVXPPbnuR7Y22VwybNs72o7Zfyn4e3Nw2AZRVz8v4WyWdvdO0KyQti4gjJC3LHgPoYDXDHhGPS9q00+RZkhZn9xdLmt3YtgA0WtEDdOMjoj+7/5qk8dVmtD3fdq/t3oHBgYKbA1BW6aPxERGSqo5cGBELIqInInq6u7rLbg5AQUXDvsH2BEnKfuYf8gXQdkXDvlTSvOz+PEn3N6YdAM1S8zy77SWSTpfUZXudpK9JukbSnbYvkrRW0vnNbBLFPfDCYP4Mtc6j13DOBafm1o+fclCp9aNxaoY9IuZWKZ3Z4F4ANBEflwUSQdiBRBB2IBGEHUgEYQcSwSWuu4FPfOtHVWurH3601Lo/+bkLc+s3f/r3S60frcOeHUgEYQcSQdiBRBB2IBGEHUgEYQcSQdiBRHCefRQYHHort776iWeqF9/6Tf7Kuw7NLf/zBdNy6/vuzZ/QaMGeHUgEYQcSQdiBRBB2IBGEHUgEYQcSQdiBRHCSdBQ46asP5c/wy1cLr3vOn+Z/SfAh4z5QeN3oLOzZgUQQdiARhB1IBGEHEkHYgUQQdiARhB1IBOfZO8Cy1Rty679c/mThdU884+zc+g3nHlt43Rhdau7ZbS+yvdH2imHTrrK93nZfdpvZ3DYBlFXPy/hbJY20e7guIqZltwcb2xaARqsZ9oh4XNKmFvQCoInKHKC7zPaz2cv8g6vNZHu+7V7bvQODAyU2B6CMomG/SdLhkqZJ6pd0bbUZI2JBRPRERE93V3fBzQEoq1DYI2JDRGyPiHck3SJpemPbAtBohcJue8Kwh3Mkrag2L4DOUPM8u+0lkk6X1GV7naSvSTrd9jRJIWmNpIub1+Lo96vfbM2tz7+xxnn0rfnfG59n+nETcut873s6av5LR8TcESYvbEIvAJqIj8sCiSDsQCIIO5AIwg4kgrADieC8Swtc+dDq3Pqm//r3UuufOmd21RqXsGIH9uxAIgg7kAjCDiSCsAOJIOxAIgg7kAjCDiSC8+wt8P3rbmvq+u+79JNVa1zCih3YswOJIOxAIgg7kAjCDiSCsAOJIOxAIgg7kAhOwu4GNr+xrWptjzFuYSfvt/8+1f/E9hibv6/Ztv2d3PqWN6v/3rUM1Vj2y0tXFl53PcaOqf67L5o7LXfZffYaW2ib7NmBRBB2IBGEHUgEYQcSQdiBRBB2IBGEHUhEPUM2T5L0XUnjVRmieUFEXG97nKQ7JE1RZdjm8yPi9ea1imo+ds6V7W6hquMvOK9q7dDxB+Quu25gS259+ZK7CvXU6b7RvV9u/Vszjyq03nr27NskfSkijpF0kqRLbR8j6QpJyyLiCEnLsscAOlTNsEdEf0Q8k90fkrRK0kRJsyQtzmZbLGl2k3oE0AC79J7d9hRJx0t6StL4iOjPSq+p8jIfQIeqO+y295d0t6TLI2Lz8FpEhCrv50dabr7tXtu9A4MDpZoFUFxdYbe9pypB/15E3JNN3mB7QlafIGnjSMtGxIKI6ImInu6u7kb0DKCAmmG3bUkLJa2KiO8MKy2VNC+7P0/S/Y1vD0Cj1HOJ68mSPivpOdt92bQrJV0j6U7bF0laK+n8pnS4Gzjmj2fk1p+/777WNNIG/33H3dVrzd74HntVr40pdpnoDtNm5/+bnnnchMLrnv17zTn8VTPsEfETSdUuij6zse0AaBY+QQckgrADiSDsQCIIO5AIwg4kgrADieCrpFvgia+ckVv/Rs/E3PqbW/O/UrmMp17M/whzMy8jPe2iz+TWpx5yYKn1//nHJ1WtTalxGenuiD07kAjCDiSCsAOJIOxAIgg7kAjCDiSCsAOJ4Dx7B/jbTx3Zvo3X+lriy/+gNX2g6dizA4kg7EAiCDuQCMIOJIKwA4kg7EAiCDuQCMIOJIKwA4kg7EAiCDuQCMIOJIKwA4kg7EAiCDuQiJphtz3J9o9tP297pe0vZNOvsr3edl92m9n8dgEUVc+XV2yT9KWIeMb2AZKW2340q10XEd9uXnsAGqVm2COiX1J/dn/I9ipJ+UOYAOg4u/Se3fYUScdLeiqbdJntZ20vsn1wlWXm2+613TswmD/UEIDmqTvstveXdLekyyNis6SbJB0uaZoqe/5rR1ouIhZERE9E9HR3dZfvGEAhdYXd9p6qBP17EXGPJEXEhojYHhHvSLpF0vTmtQmgrHqOxlvSQkmrIuI7w6ZPGDbbHEkrGt8egEap52j8yZI+K+k5233ZtCslzbU9TVJIWiPp4ib0B6BB6jka/xNJHqH0YOPbAdAsfIIOSARhBxJB2IFEEHYgEYQdSARhBxJB2IFEEHYgEYQdSARhBxJB2IFEEHYgEYQdSARhBxLhiGjdxuwBSWuHTeqSNNiyBnZNp/bWqX1J9FZUI3ubHBEjfv9bS8P+vo3bvRHR07YGcnRqb53al0RvRbWqN17GA4kg7EAi2h32BW3efp5O7a1T+5LoraiW9NbW9+wAWqfde3YALULYgUS0Jey2z7b9gu2XbV/Rjh6qsb3G9nPZMNS9be5lke2NtlcMmzbO9qO2X8p+jjjGXpt664hhvHOGGW/rc9fu4c9b/p7d9lhJL0r6lKR1kp6WNDcinm9pI1XYXiOpJyLa/gEM26dK2iLpuxFxbDbt7yVtiohrsv8oD46Iv+6Q3q6StKXdw3hnoxVNGD7MuKTZkj6nNj53OX2drxY8b+3Ys0+X9HJEvBIRb0u6XdKsNvTR8SLicUmbdpo8S9Li7P5iVf5YWq5Kbx0hIvoj4pns/pCkHcOMt/W5y+mrJdoR9omSXh32eJ06a7z3kPSI7eW257e7mRGMj4j+7P5rksa3s5kR1BzGu5V2Gma8Y567IsOfl8UBuvc7JSJOkDRD0qXZy9WOFJX3YJ107rSuYbxbZYRhxn+rnc9d0eHPy2pH2NdLmjTs8SHZtI4QEeuznxsl3avOG4p6w44RdLOfG9vcz2910jDeIw0zrg547to5/Hk7wv60pCNsH2Z7L0kXSlrahj7ex/Z+2YET2d5P0lnqvKGol0qal92fJ+n+NvbyHp0yjHe1YcbV5ueu7cOfR0TLb5JmqnJE/ueS/qYdPVTp63cl/Sy7rWx3b5KWqPKybqsqxzYukvQhScskvSTpMUnjOqi32yQ9J+lZVYI1oU29naLKS/RnJfVlt5ntfu5y+mrJ88bHZYFEcIAOSARhBxJB2IFEEHYgEYQdSARhBxJB2IFE/D9YIsggbVuzJAAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "data = df.iloc[1]\n",
    "label = data[0]\n",
    "img = data[1:].values.reshape(28, 28)\n",
    "plt.title(f'label = {label}')\n",
    "plt.imshow(img, interpolation='none', cmap='Blues')\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "torch.FloatTensor(10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "torch.zeros(10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "ename": "ModuleNotFoundError",
     "evalue": "No module named 'torch'",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mModuleNotFoundError\u001b[0m                       Traceback (most recent call last)",
      "\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_6976\\3390982276.py\u001b[0m in \u001b[0;36m<cell line: 2>\u001b[1;34m()\u001b[0m\n\u001b[0;32m      1\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mnumpy\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[1;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'torch'"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "import torch"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "size = 600\n",
    "a = np.random.rand(size, size)\n",
    "b = np.random.rand(size, size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "5.25 ms ± 930 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n"
     ]
    }
   ],
   "source": [
    "%%timeit\n",
    "x = np.dot(a, b)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# %%timeit\n",
    "# c = np.zeros((size, size))\n",
    "# for i in range(size):\n",
    "#     for j in range(size):\n",
    "#         for k in range(size):\n",
    "#             c[i,j] += a[i,k] * b[k,j]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "('torch.FloatTensor', device(type='cpu'))"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x = torch.FloatTensor([3.5])\n",
    "x.type(), x.device"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "ename": "TypeError",
     "evalue": "type torch.cuda.FloatTensor not available. Torch not compiled with CUDA enabled.",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "\u001b[1;32m~\\AppData\\Local\\Temp\\ipykernel_11012\\3692756368.py\u001b[0m in \u001b[0;36m<cell line: 1>\u001b[1;34m()\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mx\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcuda\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mFloatTensor\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m3.5\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m      2\u001b[0m \u001b[0mx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtype\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdevice\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31mTypeError\u001b[0m: type torch.cuda.FloatTensor not available. Torch not compiled with CUDA enabled."
     ]
    }
   ],
   "source": [
    "x = torch.cuda.FloatTensor([3.5])\n",
    "x.type(), x.device"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import cupy"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3.8.8 ('base')",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.8"
  },
  "orig_nbformat": 4,
  "vscode": {
   "interpreter": {
    "hash": "20aabe8190757add272c93e171c72baada0dcd125271d96e437739185f4dbda6"
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
