{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "provenance": [], "collapsed_sections": [ "HjI_gRaRutfj" ], "authorship_tag": "ABX9TyOzyI9AMRRR7iatfugkbz3Y", "include_colab_link": true }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" } }, "cells": [ { "cell_type": "markdown", "metadata": { "id": "view-in-github", "colab_type": "text" }, "source": [ "\"Open" ] }, { "cell_type": "code", "source": [ "!pip install -q llama-index==0.10.37 openai==1.30.1 tiktoken==0.7.0 chromadb==0.5.0 llama-index-vector-stores-chroma==0.1.7 llama-index-readers-wikipedia==0.1.4 wikipedia==1.4.0" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "y_GAV7-zos0Y", "outputId": "aafcb14a-2caf-4424-c772-1d2bf6246cb2" }, "execution_count": 1, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m67.3/67.3 kB\u001b[0m \u001b[31m1.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n", " Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n", " Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.4/50.4 kB\u001b[0m \u001b[31m219.0 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m320.6/320.6 kB\u001b[0m \u001b[31m1.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.1/1.1 MB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m526.8/526.8 kB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.4/2.4 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m273.8/273.8 kB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m92.2/92.2 kB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m75.6/75.6 kB\u001b[0m \u001b[31m1.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m77.9/77.9 kB\u001b[0m \u001b[31m1.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.7/1.7 MB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m15.5/15.5 MB\u001b[0m \u001b[31m2.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.0/2.0 MB\u001b[0m \u001b[31m3.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m67.6/67.6 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.8/6.8 MB\u001b[0m \u001b[31m3.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m59.9/59.9 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m52.5/52.5 kB\u001b[0m \u001b[31m2.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m130.5/130.5 kB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m107.0/107.0 kB\u001b[0m \u001b[31m3.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m141.1/141.1 kB\u001b[0m \u001b[31m4.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m41.3/41.3 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.8/62.8 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m2.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m341.4/341.4 kB\u001b[0m \u001b[31m4.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m141.9/141.9 kB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m295.8/295.8 kB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m71.9/71.9 kB\u001b[0m \u001b[31m3.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.4/3.4 MB\u001b[0m \u001b[31m4.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m5.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m130.2/130.2 kB\u001b[0m \u001b[31m4.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m46.0/46.0 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m307.7/307.7 kB\u001b[0m \u001b[31m3.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m86.8/86.8 kB\u001b[0m \u001b[31m283.9 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m49.2/49.2 kB\u001b[0m \u001b[31m3.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h Building wheel for wikipedia (setup.py) ... \u001b[?25l\u001b[?25hdone\n", " Building wheel for pypika (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n" ] } ] }, { "cell_type": "code", "source": [ "# Allows running asyncio in environments with an existing event loop, like Jupyter notebooks.\n", "\n", "import nest_asyncio\n", "\n", "nest_asyncio.apply()" ], "metadata": { "id": "Ua0KNwgvyCaj" }, "execution_count": 2, "outputs": [] }, { "cell_type": "code", "source": [ "import os\n", "\n", "os.environ['OPENAI_API_KEY'] = '[OPENAI_API_KEY]'" ], "metadata": { "id": "--Q2zk06wElp" }, "execution_count": 3, "outputs": [] }, { "cell_type": "code", "source": [ "import logging\n", "import sys\n", "\n", "#You can set the logging level to DEBUG for more verbose output,\n", "# or use level=logging.INFO for less detailed information.\n", "logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)\n", "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))" ], "metadata": { "id": "tjwZjA8-wITr" }, "execution_count": 4, "outputs": [] }, { "cell_type": "markdown", "source": [ "# Wikipedia Example" ], "metadata": { "id": "HjI_gRaRutfj" } }, { "cell_type": "markdown", "source": [ "## LlamaHub Wikipedia Integration" ], "metadata": { "id": "PLUDcXpI41Q_" } }, { "cell_type": "code", "source": [ "from llama_index.readers.wikipedia import WikipediaReader\n", "\n", "# Initialize WikipediaReader\n", "reader = WikipediaReader()" ], "metadata": { "id": "2gko9Q3hrlMh" }, "execution_count": 12, "outputs": [] }, { "cell_type": "code", "source": [ "# Load data from Wikipedia\n", "documents = reader.load_data(pages=['Natural Language Processing', 'Artificial Intelligence'])" ], "metadata": { "id": "Z35ot7P1wIO0" }, "execution_count": 13, "outputs": [] }, { "cell_type": "code", "source": [ "len( documents )" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "0i9Zp6BJwILk", "outputId": "a6a1e0a7-98cf-4ba4-d48a-e4f5833b4967" }, "execution_count": 14, "outputs": [ { "output_type": "execute_result", "data": { "text/plain": [ "2" ] }, "metadata": {}, "execution_count": 14 } ] }, { "cell_type": "markdown", "source": [ "## Save on DeepLake" ], "metadata": { "id": "03lff4VUTaN9" } }, { "cell_type": "code", "source": [ "import chromadb\n", "from llama_index.vector_stores.chroma import ChromaVectorStore\n", "\n", "# Load the vector store from the local storage.\n", "db = chromadb.PersistentClient(path=\"./wikipedia-articles\")\n", "chroma_collection = db.get_or_create_collection(\"wikipedia-articles\")\n", "vector_store = ChromaVectorStore(chroma_collection=chroma_collection)" ], "metadata": { "id": "eo8CTHSFTcaR" }, "execution_count": 15, "outputs": [] }, { "cell_type": "markdown", "source": [ "## Create Nodes" ], "metadata": { "id": "qkKPAnIl44ss" } }, { "cell_type": "code", "source": [ "from llama_index.core.node_parser import SimpleNodeParser\n", "\n", "# Initialize the parser\n", "parser = SimpleNodeParser.from_defaults(chunk_size=512, chunk_overlap=20)\n", "\n", "# Parse documents into nodes\n", "nodes = parser.get_nodes_from_documents(documents)\n", "print( len( nodes ) )" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "eB6Rc0U0wII_", "outputId": "ec338be1-deca-45a7-e6ba-9997e4b7e25a" }, "execution_count": 20, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "45\n" ] } ] }, { "cell_type": "markdown", "source": [ "## Storage Context" ], "metadata": { "id": "E8tHMS5ZucFE" } }, { "cell_type": "code", "source": [ "from llama_index.core import StorageContext\n", "\n", "storage_context = StorageContext.from_defaults(vector_store=vector_store)" ], "metadata": { "id": "eWFtVpM_TcTQ" }, "execution_count": 18, "outputs": [] }, { "cell_type": "markdown", "source": [ "## Create index from Documents" ], "metadata": { "id": "kCgdd197CTDt" } }, { "cell_type": "code", "source": [ "from llama_index.core import VectorStoreIndex\n", "\n", "index = VectorStoreIndex(\n", " nodes=nodes, storage_context=storage_context\n", ")" ], "metadata": { "id": "g3GCf8LrULIW" }, "execution_count": 24, "outputs": [] }, { "cell_type": "code", "source": [ "query_engine = index.as_query_engine()\n", "response = query_engine.query(\"What does NLP stands for?\")\n", "response.response" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 35 }, "id": "G7BdNn-Q5AlG", "outputId": "a311ec41-6cdc-4fe1-fb59-ad338d0b6149" }, "execution_count": 25, "outputs": [ { "output_type": "execute_result", "data": { "text/plain": [ "'NLP stands for Natural Language Processing.'" ], "application/vnd.google.colaboratory.intrinsic+json": { "type": "string" } }, "metadata": {}, "execution_count": 25 } ] }, { "cell_type": "markdown", "source": [ "## Store/Load Vector Store" ], "metadata": { "id": "r6cGiUtxu5ga" } }, { "cell_type": "code", "source": [ "# Index Storage Checks\n", "import os.path\n", "from llama_index.core import StorageContext, load_index_from_storage\n", "\n", "# Let's see if our index already exists in storage.\n", "if not os.path.exists(\"./storage\"):\n", " index.storage_context.persist()\n", "\n", "else:\n", " # If the index already exists, we'll just load it:\n", " storage_context = StorageContext.from_defaults(persist_dir=\"./storage\")\n", " index = load_index_from_storage(storage_context)" ], "metadata": { "id": "GHtB0C0mu7f6" }, "execution_count": 28, "outputs": [] }, { "cell_type": "markdown", "source": [ "# Paul Graham Essay" ], "metadata": { "id": "iF8hwfMKuzst" } }, { "cell_type": "code", "source": [ "!mkdir -p './paul_graham/'\n", "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O './paul_graham/paul_graham_essay.txt'" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "DrzbBAglwUo0", "outputId": "5bd17a72-3733-4b8c-e421-5f363f05895f" }, "execution_count": 6, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "--2024-07-24 18:48:21-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\n", "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.109.133, 185.199.108.133, 185.199.111.133, ...\n", "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.109.133|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 75042 (73K) [text/plain]\n", "Saving to: ‘./paul_graham/paul_graham_essay.txt’\n", "\n", "\r ./paul_gr 0%[ ] 0 --.-KB/s \r./paul_graham/paul_ 100%[===================>] 73.28K --.-KB/s in 0.02s \n", "\n", "2024-07-24 18:48:21 (2.95 MB/s) - ‘./paul_graham/paul_graham_essay.txt’ saved [75042/75042]\n", "\n" ] } ] }, { "cell_type": "code", "source": [ "from llama_index.core import SimpleDirectoryReader\n", "\n", "# load documents\n", "documents = SimpleDirectoryReader(\"./paul_graham\").load_data()" ], "metadata": { "id": "S8-QmnkCwIiU" }, "execution_count": 7, "outputs": [] }, { "cell_type": "code", "source": [ "import chromadb\n", "from llama_index.vector_stores.chroma import ChromaVectorStore\n", "\n", "# Load the vector store from the local storage.\n", "db = chromadb.PersistentClient(path=\"./paul-graham\")\n", "chroma_collection = db.get_or_create_collection(\"paul-graham\")\n", "vector_store = ChromaVectorStore(chroma_collection=chroma_collection)" ], "metadata": { "id": "DfWglp75xc5f" }, "execution_count": 8, "outputs": [] }, { "cell_type": "code", "source": [ "from llama_index.core import StorageContext\n", "\n", "storage_context = StorageContext.from_defaults(vector_store=vector_store)" ], "metadata": { "id": "-EVBlUC-xcj1" }, "execution_count": 11, "outputs": [] }, { "cell_type": "code", "source": [ "from llama_index.core import VectorStoreIndex\n", "\n", "index = VectorStoreIndex.from_documents(\n", " documents, storage_context=storage_context\n", ")" ], "metadata": { "id": "8lMa4h9Cwn8b" }, "execution_count": 12, "outputs": [] }, { "cell_type": "code", "source": [ "query_engine = index.as_query_engine(similarity_top_k=10)" ], "metadata": { "id": "tJsfskjHxj0e" }, "execution_count": 13, "outputs": [] }, { "cell_type": "code", "source": [ "from llama_index.core.tools import QueryEngineTool, ToolMetadata\n", "from llama_index.core.query_engine import SubQuestionQueryEngine\n", "\n", "query_engine_tools = [\n", " QueryEngineTool(\n", " query_engine=query_engine,\n", " metadata=ToolMetadata(\n", " name=\"pg_essay\",\n", " description=\"Paul Graham essay on What I Worked On\",\n", " ),\n", " ),\n", "]\n", "\n", "query_engine = SubQuestionQueryEngine.from_defaults(\n", " query_engine_tools=query_engine_tools,\n", " use_async=True,\n", ")" ], "metadata": { "id": "yL9TsFwxxuoA" }, "execution_count": 14, "outputs": [] }, { "cell_type": "code", "source": [ "response = query_engine.query(\n", " \"How was Paul Grahams life different before, during, and after YC?\"\n", ")" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "JWc_n5Lhx2bq", "outputId": "972fa40f-065b-420a-d5e8-af36c1a2ad2d" }, "execution_count": 15, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Generated 3 sub questions.\n", "\u001b[1;3;38;2;237;90;200m[pg_essay] Q: What did Paul Graham work on before Y Combinator?\n", "\u001b[0m\u001b[1;3;38;2;90;149;237m[pg_essay] Q: What did Paul Graham work on during Y Combinator?\n", "\u001b[0m\u001b[1;3;38;2;11;159;203m[pg_essay] Q: What did Paul Graham work on after Y Combinator?\n", "\u001b[0m\u001b[1;3;38;2;237;90;200m[pg_essay] A: Paul Graham worked on building online stores through a web app called Viaweb before starting Y Combinator.\n", "\u001b[0m\u001b[1;3;38;2;11;159;203m[pg_essay] A: After Y Combinator, Paul Graham started painting.\n", "\u001b[0m\u001b[1;3;38;2;90;149;237m[pg_essay] A: Paul Graham worked on various aspects during Y Combinator, including being an angel firm, funding startups in batches, providing seed investments, and offering support to startups similar to what he had received when starting his own company.\n", "\u001b[0m" ] } ] }, { "cell_type": "code", "source": [ "print( \">>> The final response:\\n\", response )" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "T-ZC66Ltx5Za", "outputId": "f45ec6bb-ee45-4526-80bd-ca7c30cdbf8f" }, "execution_count": 16, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ ">>> The final response:\n", " Paul Graham's life involved building online stores through Viaweb before Y Combinator, working on various aspects within Y Combinator such as funding startups and providing support, and then transitioning to painting after Y Combinator.\n" ] } ] }, { "cell_type": "code", "source": [], "metadata": { "id": "27fS3JcDyFSj" }, "execution_count": null, "outputs": [] } ] }