{
 "cells": [
  {
   "attachments": {},
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "`HackGPT Environment Configuration`"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "vscode": {
     "languageId": "shellscript"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting directory_structure\n",
      "  Using cached directory_structure-1.1.2-py3-none-any.whl (4.3 kB)\n",
      "Collecting alive_progress\n",
      "  Downloading alive_progress-3.0.1-py3-none-any.whl (72 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m73.0/73.0 kB\u001b[0m \u001b[31m673.6 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting fade\n",
      "  Using cached fade-0.0.9.tar.gz (2.2 kB)\n",
      "  Preparing metadata (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25hCollecting python-dotenv\n",
      "  Downloading python_dotenv-1.0.0-py3-none-any.whl (19 kB)\n",
      "Collecting pathlib\n",
      "  Using cached pathlib-1.0.1-py3-none-any.whl (14 kB)\n",
      "Collecting openai\n",
      "  Downloading openai-0.27.0-py3-none-any.whl (70 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.1/70.1 kB\u001b[0m \u001b[31m2.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting gradio\n",
      "  Downloading gradio-3.20.0-py3-none-any.whl (14.3 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m14.3/14.3 MB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting ipywidgets\n",
      "  Downloading ipywidgets-8.0.4-py3-none-any.whl (137 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m137.8/137.8 kB\u001b[0m \u001b[31m4.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting emoji\n",
      "  Using cached emoji-2.2.0.tar.gz (240 kB)\n",
      "  Preparing metadata (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25hCollecting grapheme==0.6.0\n",
      "  Using cached grapheme-0.6.0.tar.gz (207 kB)\n",
      "  Preparing metadata (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25hCollecting about-time==4.2.1\n",
      "  Downloading about_time-4.2.1-py3-none-any.whl (13 kB)\n",
      "Collecting tqdm\n",
      "  Downloading tqdm-4.65.0-py3-none-any.whl (77 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m77.1/77.1 kB\u001b[0m \u001b[31m4.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting requests>=2.20\n",
      "  Downloading requests-2.28.2-py3-none-any.whl (62 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.8/62.8 kB\u001b[0m \u001b[31m4.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting aiohttp\n",
      "  Downloading aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl (336 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m336.9/336.9 kB\u001b[0m \u001b[31m5.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting httpx\n",
      "  Using cached httpx-0.23.3-py3-none-any.whl (71 kB)\n",
      "Collecting pyyaml\n",
      "  Downloading PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl (173 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m174.0/174.0 kB\u001b[0m \u001b[31m5.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting ffmpy\n",
      "  Using cached ffmpy-0.3.0.tar.gz (4.8 kB)\n",
      "  Preparing metadata (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25hCollecting pycryptodome\n",
      "  Downloading pycryptodome-3.17-cp35-abi3-macosx_10_9_universal2.whl (2.4 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.4/2.4 MB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting numpy\n",
      "  Downloading numpy-1.24.2-cp310-cp310-macosx_11_0_arm64.whl (13.9 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m13.9/13.9 MB\u001b[0m \u001b[31m4.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting jinja2\n",
      "  Using cached Jinja2-3.1.2-py3-none-any.whl (133 kB)\n",
      "Collecting pydub\n",
      "  Using cached pydub-0.25.1-py2.py3-none-any.whl (32 kB)\n",
      "Collecting fsspec\n",
      "  Downloading fsspec-2023.3.0-py3-none-any.whl (145 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m145.4/145.4 kB\u001b[0m \u001b[31m5.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting markupsafe\n",
      "  Downloading MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl (17 kB)\n",
      "Collecting mdit-py-plugins<=0.3.3\n",
      "  Using cached mdit_py_plugins-0.3.3-py3-none-any.whl (50 kB)\n",
      "Collecting pillow\n",
      "  Downloading Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl (3.0 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.0/3.0 MB\u001b[0m \u001b[31m5.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting pandas\n",
      "  Downloading pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl (10.9 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m10.9/10.9 MB\u001b[0m \u001b[31m5.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting typing-extensions\n",
      "  Downloading typing_extensions-4.5.0-py3-none-any.whl (27 kB)\n",
      "Collecting pydantic\n",
      "  Downloading pydantic-1.10.5-cp310-cp310-macosx_11_0_arm64.whl (2.5 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.5/2.5 MB\u001b[0m \u001b[31m5.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting orjson\n",
      "  Downloading orjson-3.8.7-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl (489 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m489.3/489.3 kB\u001b[0m \u001b[31m5.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting fastapi\n",
      "  Using cached fastapi-0.92.0-py3-none-any.whl (56 kB)\n",
      "Collecting python-multipart\n",
      "  Downloading python_multipart-0.0.6-py3-none-any.whl (45 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m45.7/45.7 kB\u001b[0m \u001b[31m3.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting altair>=4.2.0\n",
      "  Using cached altair-4.2.2-py3-none-any.whl (813 kB)\n",
      "Collecting matplotlib\n",
      "  Downloading matplotlib-3.7.1-cp310-cp310-macosx_11_0_arm64.whl (7.3 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.3/7.3 MB\u001b[0m \u001b[31m5.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting markdown-it-py[linkify]>=2.0.0\n",
      "  Downloading markdown_it_py-2.2.0-py3-none-any.whl (84 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m84.5/84.5 kB\u001b[0m \u001b[31m4.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting aiofiles\n",
      "  Using cached aiofiles-23.1.0-py3-none-any.whl (14 kB)\n",
      "Collecting websockets>=10.0\n",
      "  Downloading websockets-10.4-cp310-cp310-macosx_11_0_arm64.whl (97 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m97.9/97.9 kB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting uvicorn\n",
      "  Using cached uvicorn-0.20.0-py3-none-any.whl (56 kB)\n",
      "Requirement already satisfied: ipykernel>=4.5.1 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (6.21.2)\n",
      "Collecting widgetsnbextension~=4.0\n",
      "  Downloading widgetsnbextension-4.0.5-py3-none-any.whl (2.0 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.0/2.0 MB\u001b[0m \u001b[31m5.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: ipython>=6.1.0 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (8.11.0)\n",
      "Collecting jupyterlab-widgets~=3.0\n",
      "  Downloading jupyterlab_widgets-3.0.5-py3-none-any.whl (384 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m384.3/384.3 kB\u001b[0m \u001b[31m4.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: traitlets>=4.3.1 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (5.9.0)\n",
      "Collecting jsonschema>=3.0\n",
      "  Using cached jsonschema-4.17.3-py3-none-any.whl (90 kB)\n",
      "Collecting toolz\n",
      "  Downloading toolz-0.12.0-py3-none-any.whl (55 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m55.8/55.8 kB\u001b[0m \u001b[31m4.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting entrypoints\n",
      "  Using cached entrypoints-0.4-py3-none-any.whl (5.3 kB)\n",
      "Requirement already satisfied: tornado>=6.1 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (6.2)\n",
      "Requirement already satisfied: nest-asyncio in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (1.5.6)\n",
      "Requirement already satisfied: jupyter-client>=6.1.12 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (8.0.3)\n",
      "Requirement already satisfied: packaging in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (23.0)\n",
      "Requirement already satisfied: comm>=0.1.1 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.1.2)\n",
      "Requirement already satisfied: matplotlib-inline>=0.1 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.1.6)\n",
      "Requirement already satisfied: psutil in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (5.9.4)\n",
      "Requirement already satisfied: appnope in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.1.3)\n",
      "Requirement already satisfied: debugpy>=1.6.5 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (1.6.6)\n",
      "Requirement already satisfied: pyzmq>=20 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (25.0.0)\n",
      "Requirement already satisfied: jupyter-core!=5.0.*,>=4.12 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (5.2.0)\n",
      "Requirement already satisfied: pickleshare in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.7.5)\n",
      "Requirement already satisfied: prompt-toolkit!=3.0.37,<3.1.0,>=3.0.30 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (3.0.38)\n",
      "Requirement already satisfied: decorator in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (5.1.1)\n",
      "Requirement already satisfied: backcall in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.2.0)\n",
      "Requirement already satisfied: pexpect>4.3 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (4.8.0)\n",
      "Requirement already satisfied: jedi>=0.16 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.18.2)\n",
      "Requirement already satisfied: stack-data in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.6.2)\n",
      "Requirement already satisfied: pygments>=2.4.0 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (2.14.0)\n",
      "Collecting mdurl~=0.1\n",
      "  Using cached mdurl-0.1.2-py3-none-any.whl (10.0 kB)\n",
      "Collecting linkify-it-py<3,>=1\n",
      "  Downloading linkify_it_py-2.0.0-py3-none-any.whl (19 kB)\n",
      "Collecting pytz>=2020.1\n",
      "  Downloading pytz-2022.7.1-py2.py3-none-any.whl (499 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m499.4/499.4 kB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: python-dateutil>=2.8.1 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from pandas->gradio->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 7)) (2.8.2)\n",
      "Collecting urllib3<1.27,>=1.21.1\n",
      "  Downloading urllib3-1.26.14-py2.py3-none-any.whl (140 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m140.6/140.6 kB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting idna<4,>=2.5\n",
      "  Using cached idna-3.4-py3-none-any.whl (61 kB)\n",
      "Collecting charset-normalizer<4,>=2\n",
      "  Downloading charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl (122 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m122.5/122.5 kB\u001b[0m \u001b[31m4.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting certifi>=2017.4.17\n",
      "  Downloading certifi-2022.12.7-py3-none-any.whl (155 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m155.3/155.3 kB\u001b[0m \u001b[31m5.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting async-timeout<5.0,>=4.0.0a3\n",
      "  Using cached async_timeout-4.0.2-py3-none-any.whl (5.8 kB)\n",
      "Collecting attrs>=17.3.0\n",
      "  Downloading attrs-22.2.0-py3-none-any.whl (60 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m60.0/60.0 kB\u001b[0m \u001b[31m3.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting yarl<2.0,>=1.0\n",
      "  Downloading yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl (57 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.9/57.9 kB\u001b[0m \u001b[31m3.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting multidict<7.0,>=4.5\n",
      "  Downloading multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl (29 kB)\n",
      "Collecting aiosignal>=1.1.2\n",
      "  Downloading aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\n",
      "Collecting frozenlist>=1.1.1\n",
      "  Downloading frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl (34 kB)\n",
      "Collecting starlette<0.26.0,>=0.25.0\n",
      "  Using cached starlette-0.25.0-py3-none-any.whl (66 kB)\n",
      "Collecting sniffio\n",
      "  Using cached sniffio-1.3.0-py3-none-any.whl (10 kB)\n",
      "Collecting rfc3986[idna2008]<2,>=1.3\n",
      "  Using cached rfc3986-1.5.0-py2.py3-none-any.whl (31 kB)\n",
      "Collecting httpcore<0.17.0,>=0.15.0\n",
      "  Using cached httpcore-0.16.3-py3-none-any.whl (69 kB)\n",
      "Collecting contourpy>=1.0.1\n",
      "  Downloading contourpy-1.0.7-cp310-cp310-macosx_11_0_arm64.whl (229 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m229.7/229.7 kB\u001b[0m \u001b[31m5.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting pyparsing>=2.3.1\n",
      "  Using cached pyparsing-3.0.9-py3-none-any.whl (98 kB)\n",
      "Collecting fonttools>=4.22.0\n",
      "  Using cached fonttools-4.38.0-py3-none-any.whl (965 kB)\n",
      "Collecting kiwisolver>=1.0.1\n",
      "  Downloading kiwisolver-1.4.4-cp310-cp310-macosx_11_0_arm64.whl (63 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m63.2/63.2 kB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting cycler>=0.10\n",
      "  Using cached cycler-0.11.0-py3-none-any.whl (6.4 kB)\n",
      "Collecting h11>=0.8\n",
      "  Using cached h11-0.14.0-py3-none-any.whl (58 kB)\n",
      "Collecting click>=7.0\n",
      "  Using cached click-8.1.3-py3-none-any.whl (96 kB)\n",
      "Collecting anyio<5.0,>=3.0\n",
      "  Using cached anyio-3.6.2-py3-none-any.whl (80 kB)\n",
      "Requirement already satisfied: parso<0.9.0,>=0.8.0 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from jedi>=0.16->ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.8.3)\n",
      "Collecting pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0\n",
      "  Downloading pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl (82 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m82.5/82.5 kB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: platformdirs>=2.5 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from jupyter-core!=5.0.*,>=4.12->ipykernel>=4.5.1->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (3.1.0)\n",
      "Collecting uc-micro-py\n",
      "  Using cached uc_micro_py-1.0.1-py3-none-any.whl (6.2 kB)\n",
      "Requirement already satisfied: ptyprocess>=0.5 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from pexpect>4.3->ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.7.0)\n",
      "Requirement already satisfied: wcwidth in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from prompt-toolkit!=3.0.37,<3.1.0,>=3.0.30->ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.2.6)\n",
      "Requirement already satisfied: six>=1.5 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from python-dateutil>=2.8.1->pandas->gradio->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 7)) (1.16.0)\n",
      "Requirement already satisfied: executing>=1.2.0 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from stack-data->ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (1.2.0)\n",
      "Requirement already satisfied: pure-eval in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from stack-data->ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (0.2.2)\n",
      "Requirement already satisfied: asttokens>=2.1.0 in /Users/0xdeadbeef/Library/Python/3.10/lib/python/site-packages (from stack-data->ipython>=6.1.0->ipywidgets->-r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt (line 8)) (2.2.1)\n",
      "Building wheels for collected packages: grapheme, fade, emoji, ffmpy\n",
      "  Building wheel for grapheme (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25h  Created wheel for grapheme: filename=grapheme-0.6.0-py3-none-any.whl size=210079 sha256=e79b2ac757ba12b9477c8a8e13ea9b7000ed4f9a8112dfb1e82965c3c4bde7c4\n",
      "  Stored in directory: /Users/0xdeadbeef/Library/Caches/pip/wheels/01/e1/49/37e6bde9886439057450c494a79b0bef8bbe897a54aebfc757\n",
      "  Building wheel for fade (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25h  Created wheel for fade: filename=fade-0.0.9-py3-none-any.whl size=2225 sha256=89accae9439e84a1329dc8d465cebe2a4aefb3516a27095428f1a0a212780c47\n",
      "  Stored in directory: /Users/0xdeadbeef/Library/Caches/pip/wheels/68/9b/3c/1b9630f8c3fae92316b3f257fea1742ddf1347d610692bac2e\n",
      "  Building wheel for emoji (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25h  Created wheel for emoji: filename=emoji-2.2.0-py3-none-any.whl size=234912 sha256=656ca965fdb263b2aacdcdc091685be60dd3c4809c6d9af6c94058ddb7f4cd5a\n",
      "  Stored in directory: /Users/0xdeadbeef/Library/Caches/pip/wheels/02/3d/88/51a592b9ad17e7899126563698b4e3961983ebe85747228ba6\n",
      "  Building wheel for ffmpy (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25h  Created wheel for ffmpy: filename=ffmpy-0.3.0-py3-none-any.whl size=4693 sha256=b06c52ccf94e41b24232f1843f746f2c01d216da0ce3b53d67986a87d5afec40\n",
      "  Stored in directory: /Users/0xdeadbeef/Library/Caches/pip/wheels/0c/c2/0e/3b9c6845c6a4e35beb90910cc70d9ac9ab5d47402bd62af0df\n",
      "Successfully built grapheme fade emoji ffmpy\n",
      "Installing collected packages: rfc3986, pytz, pydub, pathlib, grapheme, ffmpy, fade, charset-normalizer, widgetsnbextension, websockets, urllib3, uc-micro-py, typing-extensions, tqdm, toolz, sniffio, pyyaml, python-multipart, python-dotenv, pyrsistent, pyparsing, pycryptodome, pillow, orjson, numpy, multidict, mdurl, markupsafe, kiwisolver, jupyterlab-widgets, idna, h11, fsspec, frozenlist, fonttools, entrypoints, emoji, cycler, click, certifi, attrs, async-timeout, aiofiles, about-time, yarl, uvicorn, requests, pydantic, pandas, markdown-it-py, linkify-it-py, jsonschema, jinja2, directory_structure, contourpy, anyio, alive_progress, aiosignal, starlette, mdit-py-plugins, matplotlib, httpcore, altair, aiohttp, openai, httpx, fastapi, ipywidgets, gradio\n",
      "Successfully installed about-time-4.2.1 aiofiles-23.1.0 aiohttp-3.8.4 aiosignal-1.3.1 alive_progress-3.0.1 altair-4.2.2 anyio-3.6.2 async-timeout-4.0.2 attrs-22.2.0 certifi-2022.12.7 charset-normalizer-3.0.1 click-8.1.3 contourpy-1.0.7 cycler-0.11.0 directory_structure-1.1.2 emoji-2.2.0 entrypoints-0.4 fade-0.0.9 fastapi-0.92.0 ffmpy-0.3.0 fonttools-4.38.0 frozenlist-1.3.3 fsspec-2023.3.0 gradio-3.20.0 grapheme-0.6.0 h11-0.14.0 httpcore-0.16.3 httpx-0.23.3 idna-3.4 ipywidgets-8.0.4 jinja2-3.1.2 jsonschema-4.17.3 jupyterlab-widgets-3.0.5 kiwisolver-1.4.4 linkify-it-py-2.0.0 markdown-it-py-2.2.0 markupsafe-2.1.2 matplotlib-3.7.1 mdit-py-plugins-0.3.3 mdurl-0.1.2 multidict-6.0.4 numpy-1.24.2 openai-0.27.0 orjson-3.8.7 pandas-1.5.3 pathlib-1.0.1 pillow-9.4.0 pycryptodome-3.17 pydantic-1.10.5 pydub-0.25.1 pyparsing-3.0.9 pyrsistent-0.19.3 python-dotenv-1.0.0 python-multipart-0.0.6 pytz-2022.7.1 pyyaml-6.0 requests-2.28.2 rfc3986-1.5.0 sniffio-1.3.0 starlette-0.25.0 toolz-0.12.0 tqdm-4.65.0 typing-extensions-4.5.0 uc-micro-py-1.0.1 urllib3-1.26.14 uvicorn-0.20.0 websockets-10.4 widgetsnbextension-4.0.5 yarl-1.8.2\n",
      "\n",
      "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip available: \u001b[0m\u001b[31;49m22.2.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.0.1\u001b[0m\n",
      "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpython3.10 -m pip install --upgrade pip\u001b[0m\n"
     ]
    }
   ],
   "source": [
    "#@title 1: Install dependencies\n",
    "!pip3 install -r https://raw.githubusercontent.com/NoDataFound/hackGPT/main/requirements.txt\n",
    "!mkdir input\n",
    "!mkdir output"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "𝙰𝚙𝚙𝚕𝚢𝚒𝚗𝚐 𝙰𝙿𝙸 𝚃𝚘𝚔𝚎𝚗\n",
      "▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ 100%\n",
      "\n",
      "𝙰𝙿𝙸 𝙲𝚘𝚗𝚏𝚒𝚐𝚞𝚛𝚊𝚝𝚒𝚘𝚗 𝚂𝚊𝚟𝚎𝚍 𝚝𝚘 .𝚎𝚗𝚟\n"
     ]
    }
   ],
   "source": [
    "#@title 2. Setting hackGPT Environment with OpenAI API key (Generate one here: https://platform.openai.com/account/api-keys )\n",
    "#OpenAI API Setup\n",
    "from dotenv import load_dotenv\n",
    "import os\n",
    "import fade\n",
    "from pathlib import Path\n",
    "import openai\n",
    "from time import sleep\n",
    "# Load API key from an environment variable or secret management service\n",
    "\n",
    "load_dotenv(\".env\")\n",
    "apiToken = os.environ.get('OPENAI_TOKEN')\n",
    "openai.api_key = apiToken\n",
    "\n",
    "if 'OPENAI_TOKEN' in os.environ:\n",
    "   pass\n",
    "else:\n",
    "  error='''           \n",
    "                     *   )           )            (   \n",
    "                     `(     ( /((        (  (      )\\   \n",
    "                      )\\(   )\\())\\  (    )\\))(  ((((_) \n",
    "                     ((_)\\ (_))((_) )\\ ) ((   ))\\  )\\) \n",
    "                     8\"\"\"\" 8\"\"\"8  8\"\"\"8  8\"\"\"88 8\"\"\"8  \n",
    "                     8     8   8  8   8  8    8 8   8  \n",
    "                     8eeee 8eee8e 8eee8e 8    8 8eee8e \n",
    "                     88    88   8 88   8 8    8 88   8 \n",
    "                     88    88   8 88   8 8    8 88   8 \n",
    "                     88eee 88   8 88   8 8eeee8 88   8 \n",
    "                                  \n",
    "   \\033[1;33mAttempting to Set OpenAI system variable with API key.'''\n",
    "  fadederror = fade.fire(error)\n",
    "  print(fadederror)\n",
    "  Path(\".env\").touch()\n",
    "  setting_token = open(\".env\", \"a\")\n",
    "  userkey = input('Enter OpenAI API Key: ').replace(\" \",\"\")\n",
    "  setting_token.write(\"OPENAI_TOKEN=\"+'\"'+userkey+'\"\\n')\n",
    "def progress(percent=0, width=15):\n",
    "    hashes = width * percent // 100\n",
    "    blanks = width - hashes\n",
    "\n",
    "    print('\\r', hashes*'▒', blanks*' ', '', f' {percent:.0f}%', sep='',\n",
    "        end='', flush=True)\n",
    "print('𝙰𝚙𝚙𝚕𝚢𝚒𝚗𝚐 𝙰𝙿𝙸 𝚃𝚘𝚔𝚎𝚗')\n",
    "for i in range(101):\n",
    "    progress(i)\n",
    "    sleep(.01)\n",
    "print('\\n')\n",
    "print(\"𝙰𝙿𝙸 𝙲𝚘𝚗𝚏𝚒𝚐𝚞𝚛𝚊𝚝𝚒𝚘𝚗 𝚂𝚊𝚟𝚎𝚍 𝚝𝚘 .𝚎𝚗𝚟\")                      "
   ]
  },
  {
   "attachments": {},
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "`HackGPT Chatbot`"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#@title ChatBot and Web UI for HackGPT\n",
    "#@title 4: This is the Hack button. Press it.\n",
    "#!/usr/bin/env python3\n",
    "# -*- coding: utf-8 -*-\n",
    "import os\n",
    "import fade\n",
    "from pathlib import Path\n",
    "import openai\n",
    "import requests\n",
    "import urllib.parse\n",
    "import urllib.request\n",
    "import openai\n",
    "from dotenv import load_dotenv\n",
    "import gradio as gr\n",
    "import pandas as pd\n",
    "import matplotlib.pyplot as plt\n",
    "import json\n",
    "import csv\n",
    "import datetime\n",
    "from ipywidgets import interact, Dropdown, Select\n",
    "\n",
    "persona = {'HackGPT':['v0','v1 (not active)'],'ChatGPT':['Stock (not active)','DEV (not active)']}\n",
    "personaV = Dropdown(options = persona.keys())\n",
    "versionP = Dropdown()\n",
    "\n",
    "def update_persona_options(*args): \n",
    "    versionP.options = persona[personaV.value]\n",
    "\n",
    "personaV.observe(update_persona_options) \n",
    "@interact(Persona = personaV, Version = versionP)\n",
    "def print_city(Persona, Version):\n",
    "    if Persona == 'HackGPT':\n",
    "        if Version == 'v0':\n",
    "            hackGPT_mode = open('personas/hackGPTv1.md' ,\"r\")\n",
    "            hackGPT_mode = hackGPT_mode.read()\n",
    "\n",
    "date_string = datetime.datetime.now()\n",
    "\n",
    "load_dotenv()  \n",
    "apiToken = os.environ.get(\"OPENAI_TOKEN\")\n",
    "headers = {\n",
    "                    \"Accept\": \"application/json; charset=utf-8\",\n",
    "                    \"Authorization\": \"Token\" + str(apiToken)\n",
    "                }\n",
    "\n",
    "\n",
    "if 'OPENAI_TOKEN' in os.environ:\n",
    "    pass\n",
    "else:\n",
    "    os.environ['OPENAI_TOKEN'] = input('Enter API Key: ').replace(\" \",\"\")\n",
    "token = os.environ.get(\"OPENAI_TOKEN\")\n",
    "hack=  \"\\n\"*7 + r\"\"\" \n",
    "\n",
    "\n",
    "\n",
    "                          |¯¯¯¯| |¯¯¯¯| '/¯¯¯/.\\¯¯¯\\‚ '/¯¯¯¯/\\¯¯¯¯\\  |¯¯¯¯| |¯¯¯¯|\n",
    "                          |:·.·|_|:·.·| |:·.·|_|:·.·| |:·.·|  |____| |:·.·|./____/ \n",
    "                          |:·.·|¯|:·.·| |:·.·|¯|:·.·| |:·.·|__|¯¯¯¯| |:·.·|.\\¯¯¯¯\\ \n",
    "                          |____| |____| |____|:|____|  \\__ _\\/____/  |____| |_____|\n",
    "        \n",
    "        \n",
    "                                                                                  \"\"\" + \"\\n\"*12\n",
    "\n",
    "gpt = \"\\n\"*4 +r\"\"\" \n",
    "\n",
    "                                                                                     ______  _______  ________ \n",
    "                                                                                   /      \\|       \\|        \\\n",
    "                                                                                   |  ▓▓▓▓▓▓\\ ▓▓▓▓▓▓▓\\\\▓▓▓▓▓▓▓▓\n",
    "                                                                                   | ▓▓ __\\▓▓ ▓▓__/ ▓▓  | ▓▓   \n",
    "                                                                                   | ▓▓|    \\ ▓▓    ▓▓  | ▓▓   \n",
    " | ▓▓ \\▓▓▓▓ ▓▓▓▓▓▓▓   | ▓▓   \n",
    "| ▓▓__| ▓▓ ▓▓        | ▓▓   \n",
    " \\▓▓    ▓▓ ▓▓        | ▓▓ \n",
    "  \\▓▓▓▓▓▓ \\▓▓         \\▓▓\n",
    "                      \"\"\"                                                                                                 \n",
    "\n",
    "fadedhack = fade.water(hack)\n",
    "fadedgpt = fade.random(gpt)\n",
    "\n",
    "\n",
    "for pair in zip(*map(str.splitlines, (fadedhack, fadedgpt))): \n",
    "  print(*pair)                                                                                                \n",
    "\n",
    "#----------------------------------hackchatGPT---------------------------------------------------\n",
    "with open('output/chat_hackGPT_log.csv', 'a+', encoding='UTF8', newline='') as f:\n",
    "    w = csv.writer(f, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n",
    "    w.writerow(['Date', 'Persona', 'Query', 'Response'])\n",
    "    f.close()\n",
    "    \n",
    "def add_text(state, text):\n",
    "    response = openai.Completion.create(\n",
    "        model=\"text-davinci-003\",\n",
    "        prompt=str(hackGPT_mode) + str(text),\n",
    "        temperature=0,\n",
    "        max_tokens=3000,\n",
    "        top_p=1,\n",
    "        frequency_penalty=0,\n",
    "        presence_penalty=0,\n",
    "        stop=[\"\\\"\\\"\\\"\"]\n",
    "        )\n",
    "    response = response['choices'][0]['text']\n",
    "        \n",
    "    state = state + [(str(response),str(text))]\n",
    "    try:\n",
    "        with open('output/chat_hackGPT_log.csv', 'a+', encoding='UTF8', newline='') as f:\n",
    "            w = csv.writer(f, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n",
    "            w.writerow([date_string, 'hackGPTv1', str(text).strip('\\n'), str(response).lstrip('\\n')])\n",
    "            f.close()\n",
    "    finally:\n",
    "        return state, state\n",
    "\n",
    "def add_file(file_state, file):\n",
    "    with open(file.name, 'r') as targets:\n",
    "        search = targets.read()\n",
    "        response = openai.Completion.create(\n",
    "            model=\"text-davinci-003\",\n",
    "            prompt=str(search)+\"\\n\",\n",
    "            temperature=0,\n",
    "            max_tokens=3000,\n",
    "            top_p=1,\n",
    "            frequency_penalty=0,\n",
    "            presence_penalty=0,\n",
    "            stop=[\"\\\"\\\"\\\"\"]\n",
    "            )\n",
    "        \n",
    "    file_response = response['choices'][0]['text']\n",
    "    file_state = file_state + [(\"\" + str(file_response), \"Processed file: \"+ str(file.name))]\n",
    "    try:\n",
    "        with open('output/chat_hackGPT_file_log.csv', 'a+', encoding='UTF8', newline='') as f:\n",
    "            w = csv.writer(f, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n",
    "            w.writerow([date_string, 'hackGPTv1', str(search).strip('\\n'), str(response).lstrip('\\n')])\n",
    "            f.close()\n",
    "    \n",
    "    finally:\n",
    "        return file_state, file_state\n",
    "            \n",
    "\n",
    "\n",
    "with gr.Blocks(css=\"#chatbot .output::-webkit-scrollbar {display: none;}\") as hackerchat:\n",
    "    state = gr.State([])\n",
    "    chatbot = gr.Chatbot()\n",
    "\n",
    "    with gr.Row():\n",
    "        with gr.Column(scale=0.85):\n",
    "            txt = gr.Textbox(show_label=False, placeholder=\"Enter query and press enter\").style(container=False)\n",
    "        with gr.Column(scale=0.15, min_width=0):\n",
    "            btn = gr.UploadButton(\"📁\", file_types=[\"file\"])\n",
    "\n",
    "    txt.submit(add_text, [state, txt], [ chatbot, state])\n",
    "    txt.submit(lambda :\"\", None, txt)\n",
    "    btn.upload(add_file, [state, btn], [state, chatbot])\n",
    "     \n",
    "if __name__ == \"__main__\":\n",
    "    hackerchat.launch(height=1000, quiet=True, favicon_path=\"res/hackgpt_fav.png\")"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.8"
  },
  "vscode": {
   "interpreter": {
    "hash": "eda7e54fe21129b67f77862937907ee926f057597a3e2fa1e18ac955e40912b3"
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
