{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Knowledge Graph Construction w/ WikiData Filtering\n",
    "\n",
    "In this notebook, we compare using [REBEL](https://huggingface.co/Babelscape/rebel-large) for knowledge graph construction with and without filtering from wikidata.\n",
    "\n",
    "This is a simplified version, find out more about using wikipedia for filtering, check here\n",
    "- [Make Meaningful Knowledge Graph from OpenSource REBEL Model](https://medium.com/@haiyangli_38602/make-meaningful-knowledge-graph-from-opensource-rebel-model-6f9729a55527)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Setup"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "%pip install llama-index-llms-openai\n",
    "%pip install llama-index-readers-web\n",
    "%pip install llama-index-readers-papers"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Requirement already satisfied: llama_index in /usr/local/lib/python3.10/dist-packages (0.8.37)\n",
      "Requirement already satisfied: transformers in /usr/local/lib/python3.10/dist-packages (4.33.3)\n",
      "Requirement already satisfied: wikipedia in /usr/local/lib/python3.10/dist-packages (1.4.0)\n",
      "Requirement already satisfied: html2text in /usr/local/lib/python3.10/dist-packages (2020.1.16)\n",
      "Requirement already satisfied: pyvis in /usr/local/lib/python3.10/dist-packages (0.3.2)\n",
      "Requirement already satisfied: tiktoken in /usr/local/lib/python3.10/dist-packages (from llama_index) (0.5.1)\n",
      "Requirement already satisfied: dataclasses-json in /usr/local/lib/python3.10/dist-packages (from llama_index) (0.6.1)\n",
      "Requirement already satisfied: langchain>=0.0.303 in /usr/local/lib/python3.10/dist-packages (from llama_index) (0.0.305)\n",
      "Requirement already satisfied: sqlalchemy>=2.0.15 in /usr/local/lib/python3.10/dist-packages (from llama_index) (2.0.20)\n",
      "Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from llama_index) (1.23.5)\n",
      "Requirement already satisfied: tenacity<9.0.0,>=8.2.0 in /usr/local/lib/python3.10/dist-packages (from llama_index) (8.2.3)\n",
      "Requirement already satisfied: openai>=0.26.4 in /usr/local/lib/python3.10/dist-packages (from llama_index) (0.28.1)\n",
      "Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (from llama_index) (1.5.3)\n",
      "Requirement already satisfied: urllib3<2 in /usr/local/lib/python3.10/dist-packages (from llama_index) (1.26.16)\n",
      "Requirement already satisfied: fsspec>=2023.5.0 in /usr/local/lib/python3.10/dist-packages (from llama_index) (2023.6.0)\n",
      "Requirement already satisfied: typing-inspect>=0.8.0 in /usr/local/lib/python3.10/dist-packages (from llama_index) (0.9.0)\n",
      "Requirement already satisfied: typing-extensions>=4.5.0 in /usr/local/lib/python3.10/dist-packages (from llama_index) (4.5.0)\n",
      "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.10/dist-packages (from llama_index) (4.11.2)\n",
      "Requirement already satisfied: nest-asyncio in /usr/local/lib/python3.10/dist-packages (from llama_index) (1.5.7)\n",
      "Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (from llama_index) (3.8.1)\n",
      "Requirement already satisfied: tree-sitter-languages in /usr/local/lib/python3.10/dist-packages (from llama_index) (1.7.0)\n",
      "Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from transformers) (3.12.2)\n",
      "Requirement already satisfied: huggingface-hub<1.0,>=0.15.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.17.3)\n",
      "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from transformers) (23.1)\n",
      "Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (6.0.1)\n",
      "Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.10/dist-packages (from transformers) (2023.6.3)\n",
      "Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from transformers) (2.31.0)\n",
      "Requirement already satisfied: tokenizers!=0.11.3,<0.14,>=0.11.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.13.3)\n",
      "Requirement already satisfied: safetensors>=0.3.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.3.3)\n",
      "Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.10/dist-packages (from transformers) (4.66.1)\n",
      "Requirement already satisfied: ipython>=5.3.0 in /usr/local/lib/python3.10/dist-packages (from pyvis) (7.34.0)\n",
      "Requirement already satisfied: jinja2>=2.9.6 in /usr/local/lib/python3.10/dist-packages (from pyvis) (3.1.2)\n",
      "Requirement already satisfied: jsonpickle>=1.4.1 in /usr/local/lib/python3.10/dist-packages (from pyvis) (3.0.2)\n",
      "Requirement already satisfied: networkx>=1.11 in /usr/local/lib/python3.10/dist-packages (from pyvis) (3.1)\n",
      "Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (67.7.2)\n",
      "Requirement already satisfied: jedi>=0.16 in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (0.19.0)\n",
      "Requirement already satisfied: decorator in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (4.4.2)\n",
      "Requirement already satisfied: pickleshare in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (0.7.5)\n",
      "Requirement already satisfied: traitlets>=4.2 in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (5.7.1)\n",
      "Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (3.0.39)\n",
      "Requirement already satisfied: pygments in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (2.16.1)\n",
      "Requirement already satisfied: backcall in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (0.2.0)\n",
      "Requirement already satisfied: matplotlib-inline in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (0.1.6)\n",
      "Requirement already satisfied: pexpect>4.3 in /usr/local/lib/python3.10/dist-packages (from ipython>=5.3.0->pyvis) (4.8.0)\n",
      "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2>=2.9.6->pyvis) (2.1.3)\n",
      "Requirement already satisfied: aiohttp<4.0.0,>=3.8.3 in /usr/local/lib/python3.10/dist-packages (from langchain>=0.0.303->llama_index) (3.8.5)\n",
      "Requirement already satisfied: anyio<4.0 in /usr/local/lib/python3.10/dist-packages (from langchain>=0.0.303->llama_index) (3.7.1)\n",
      "Requirement already satisfied: async-timeout<5.0.0,>=4.0.0 in /usr/local/lib/python3.10/dist-packages (from langchain>=0.0.303->llama_index) (4.0.3)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in /usr/local/lib/python3.10/dist-packages (from langchain>=0.0.303->llama_index) (1.33)\n",
      "Requirement already satisfied: langsmith<0.1.0,>=0.0.38 in /usr/local/lib/python3.10/dist-packages (from langchain>=0.0.303->llama_index) (0.0.41)\n",
      "Requirement already satisfied: numexpr<3.0.0,>=2.8.4 in /usr/local/lib/python3.10/dist-packages (from langchain>=0.0.303->llama_index) (2.8.5)\n",
      "Requirement already satisfied: pydantic<3,>=1 in /usr/local/lib/python3.10/dist-packages (from langchain>=0.0.303->llama_index) (1.10.12)\n",
      "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in /usr/local/lib/python3.10/dist-packages (from dataclasses-json->llama_index) (3.20.1)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (3.2.0)\n",
      "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (3.4)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (2023.7.22)\n",
      "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.10/dist-packages (from sqlalchemy>=2.0.15->llama_index) (2.0.2)\n",
      "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/lib/python3.10/dist-packages (from typing-inspect>=0.8.0->llama_index) (1.0.0)\n",
      "Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.10/dist-packages (from beautifulsoup4->llama_index) (2.5)\n",
      "Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk->llama_index) (8.1.7)\n",
      "Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk->llama_index) (1.3.2)\n",
      "Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.10/dist-packages (from pandas->llama_index) (2.8.2)\n",
      "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas->llama_index) (2023.3.post1)\n",
      "Requirement already satisfied: tree-sitter in /usr/local/lib/python3.10/dist-packages (from tree-sitter-languages->llama_index) (0.20.2)\n",
      "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain>=0.0.303->llama_index) (23.1.0)\n",
      "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain>=0.0.303->llama_index) (6.0.4)\n",
      "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain>=0.0.303->llama_index) (1.9.2)\n",
      "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain>=0.0.303->llama_index) (1.4.0)\n",
      "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain>=0.0.303->llama_index) (1.3.1)\n",
      "Requirement already satisfied: sniffio>=1.1 in /usr/local/lib/python3.10/dist-packages (from anyio<4.0->langchain>=0.0.303->llama_index) (1.3.0)\n",
      "Requirement already satisfied: exceptiongroup in /usr/local/lib/python3.10/dist-packages (from anyio<4.0->langchain>=0.0.303->llama_index) (1.1.3)\n",
      "Requirement already satisfied: parso<0.9.0,>=0.8.3 in /usr/local/lib/python3.10/dist-packages (from jedi>=0.16->ipython>=5.3.0->pyvis) (0.8.3)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in /usr/local/lib/python3.10/dist-packages (from jsonpatch<2.0,>=1.33->langchain>=0.0.303->llama_index) (2.4)\n",
      "Requirement already satisfied: ptyprocess>=0.5 in /usr/local/lib/python3.10/dist-packages (from pexpect>4.3->ipython>=5.3.0->pyvis) (0.7.0)\n",
      "Requirement already satisfied: wcwidth in /usr/local/lib/python3.10/dist-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->ipython>=5.3.0->pyvis) (0.2.6)\n",
      "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.1->pandas->llama_index) (1.16.0)\n"
     ]
    }
   ],
   "source": [
    "!pip install llama_index transformers wikipedia html2text pyvis"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import logging\n",
    "import sys\n",
    "\n",
    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from llama_index.core import KnowledgeGraphIndex\n",
    "from llama_index.readers.web import SimpleWebPageReader\n",
    "from llama_index.core.graph_stores import SimpleGraphStore\n",
    "from llama_index.core import StorageContext\n",
    "from llama_index.llms.openai import OpenAI"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 1. extract via huggingface pipeline\n",
    "\n",
    "The initial pipeline uses the provided extraction code from the [HuggingFace model card](https://huggingface.co/Babelscape/rebel-large)."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from transformers import pipeline\n",
    "\n",
    "triplet_extractor = pipeline(\n",
    "    \"text2text-generation\",\n",
    "    model=\"Babelscape/rebel-large\",\n",
    "    tokenizer=\"Babelscape/rebel-large\",\n",
    "    # comment this line to run on CPU\n",
    "    device=\"cuda:0\",\n",
    ")\n",
    "\n",
    "\n",
    "def extract_triplets(input_text):\n",
    "    text = triplet_extractor.tokenizer.batch_decode(\n",
    "        [\n",
    "            triplet_extractor(\n",
    "                input_text, return_tensors=True, return_text=False\n",
    "            )[0][\"generated_token_ids\"]\n",
    "        ]\n",
    "    )[0]\n",
    "\n",
    "    triplets = []\n",
    "    relation, subject, relation, object_ = \"\", \"\", \"\", \"\"\n",
    "    text = text.strip()\n",
    "    current = \"x\"\n",
    "    for token in (\n",
    "        text.replace(\"<s>\", \"\")\n",
    "        .replace(\"<pad>\", \"\")\n",
    "        .replace(\"</s>\", \"\")\n",
    "        .split()\n",
    "    ):\n",
    "        if token == \"<triplet>\":\n",
    "            current = \"t\"\n",
    "            if relation != \"\":\n",
    "                triplets.append(\n",
    "                    (subject.strip(), relation.strip(), object_.strip())\n",
    "                )\n",
    "                relation = \"\"\n",
    "            subject = \"\"\n",
    "        elif token == \"<subj>\":\n",
    "            current = \"s\"\n",
    "            if relation != \"\":\n",
    "                triplets.append(\n",
    "                    (subject.strip(), relation.strip(), object_.strip())\n",
    "                )\n",
    "            object_ = \"\"\n",
    "        elif token == \"<obj>\":\n",
    "            current = \"o\"\n",
    "            relation = \"\"\n",
    "        else:\n",
    "            if current == \"t\":\n",
    "                subject += \" \" + token\n",
    "            elif current == \"s\":\n",
    "                object_ += \" \" + token\n",
    "            elif current == \"o\":\n",
    "                relation += \" \" + token\n",
    "\n",
    "    if subject != \"\" and relation != \"\" and object_ != \"\":\n",
    "        triplets.append((subject.strip(), relation.strip(), object_.strip()))\n",
    "\n",
    "    return triplets"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 2. Extract with wiki filtering\n",
    "\n",
    "Optionally, we can filter our extracted relations using data from wikipedia.\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import wikipedia\n",
    "\n",
    "\n",
    "class WikiFilter:\n",
    "    def __init__(self):\n",
    "        self.cache = {}\n",
    "\n",
    "    def filter(self, candidate_entity):\n",
    "        # check the cache to avoid network calls\n",
    "        if candidate_entity in self.cache:\n",
    "            return self.cache[candidate_entity][\"title\"]\n",
    "\n",
    "        # pull the page from wikipedia -- if it exists\n",
    "        try:\n",
    "            page = wikipedia.page(candidate_entity, auto_suggest=False)\n",
    "            entity_data = {\n",
    "                \"title\": page.title,\n",
    "                \"url\": page.url,\n",
    "                \"summary\": page.summary,\n",
    "            }\n",
    "\n",
    "            # cache the page title and original entity\n",
    "            self.cache[candidate_entity] = entity_data\n",
    "            self.cache[page.title] = entity_data\n",
    "\n",
    "            return entity_data[\"title\"]\n",
    "        except:\n",
    "            return None\n",
    "\n",
    "\n",
    "wiki_filter = WikiFilter()\n",
    "\n",
    "\n",
    "def extract_triplets_wiki(text):\n",
    "    relations = extract_triplets(text)\n",
    "\n",
    "    filtered_relations = []\n",
    "    for relation in relations:\n",
    "        (subj, rel, obj) = relation\n",
    "        filtered_subj = wiki_filter.filter(subj)\n",
    "        filtered_obj = wiki_filter.filter(obj)\n",
    "\n",
    "        # skip if at least one entity not linked to wiki\n",
    "        if filtered_subj is None and filtered_obj is None:\n",
    "            continue\n",
    "\n",
    "        filtered_relations.append(\n",
    "            (\n",
    "                filtered_subj or subj,\n",
    "                rel,\n",
    "                filtered_obj or obj,\n",
    "            )\n",
    "        )\n",
    "\n",
    "    return filtered_relations"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Run with Llama_Index"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from llama_index.core import download_loader\n",
    "\n",
    "from llama_index.readers.papers import ArxivReader\n",
    "\n",
    "loader = ArxivReader()\n",
    "documents = loader.load_data(\n",
    "    search_query=\"Retrieval Augmented Generation\", max_results=1\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "import openai\n",
    "\n",
    "os.environ[\"OPENAI_API_KEY\"] = \"sk-...\"\n",
    "openai.api_key = os.environ[\"OPENAI_API_KEY\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from llama_index.core import Document\n",
    "\n",
    "# merge all documents into one, since it's split by page\n",
    "documents = [Document(text=\"\".join([x.text for x in documents]))]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[nltk_data] Downloading package punkt to /tmp/llama_index...\n",
      "[nltk_data]   Unzipping tokenizers/punkt.zip.\n"
     ]
    }
   ],
   "source": [
    "from llama_index.core import Settings\n",
    "\n",
    "# set global configs\n",
    "llm = OpenAI(temperature=0.1, model=\"gpt-3.5-turbo\")\n",
    "Settings.llm = llm\n",
    "Settings.chunk_size = 256\n",
    "\n",
    "# set up graph storage context\n",
    "graph_store = SimpleGraphStore()\n",
    "storage_context = StorageContext.from_defaults(graph_store=graph_store)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "NOTE: This next cell takes about 4mins on GPU."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/lib/python3.10/dist-packages/transformers/pipelines/base.py:1101: UserWarning: You seem to be using the pipelines sequentially on GPU. In order to maximize efficiency please use a dataset\n",
      "  warnings.warn(\n",
      "/usr/local/lib/python3.10/dist-packages/wikipedia/wikipedia.py:389: GuessedAtParserWarning: No parser was explicitly specified, so I'm using the best available HTML parser for this system (\"lxml\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n",
      "\n",
      "The code that caused this warning is on line 389 of the file /usr/local/lib/python3.10/dist-packages/wikipedia/wikipedia.py. To get rid of this warning, pass the additional argument 'features=\"lxml\"' to the BeautifulSoup constructor.\n",
      "\n",
      "  lis = BeautifulSoup(html).find_all('li')\n"
     ]
    }
   ],
   "source": [
    "index = KnowledgeGraphIndex.from_documents(\n",
    "    documents,\n",
    "    max_triplets_per_chunk=3,\n",
    "    kg_triplet_extract_fn=extract_triplets,\n",
    "    storage_context=storage_context,\n",
    "    include_embeddings=True,\n",
    ")\n",
    "\n",
    "index1 = KnowledgeGraphIndex.from_documents(\n",
    "    documents,\n",
    "    max_triplets_per_chunk=3,\n",
    "    kg_triplet_extract_fn=extract_triplets_wiki,\n",
    "    storage_context=storage_context,\n",
    "    include_embeddings=True,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "## create graph\n",
    "from pyvis.network import Network\n",
    "\n",
    "g = index.get_networkx_graph()\n",
    "net = Network(notebook=True, cdn_resources=\"in_line\", directed=True)\n",
    "net.from_nx(g)\n",
    "net.save_graph(\"non_filtered_graph.html\")\n",
    "\n",
    "from IPython.display import HTML\n",
    "\n",
    "HTML(filename=\"non_filtered_graph.html\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "## create graph\n",
    "from pyvis.network import Network\n",
    "\n",
    "g = index1.get_networkx_graph()\n",
    "net = Network(notebook=True, cdn_resources=\"in_line\", directed=True)\n",
    "net.from_nx(g)\n",
    "net.save_graph(\"wiki_filtered_graph.html\")\n",
    "\n",
    "from IPython.display import HTML\n",
    "\n",
    "HTML(filename=\"wiki_filtered_graph.html\")"
   ]
  }
 ],
 "metadata": {
  "accelerator": "GPU",
  "colab": {
   "gpuType": "T4",
   "provenance": []
  },
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
