AlaFalaki commited on
Commit
7e62fee
Β·
1 Parent(s): 14a73a7

Created using Colab

Browse files
Files changed (1) hide show
  1. notebooks/LlamaIndex_101.ipynb +586 -0
notebooks/LlamaIndex_101.ipynb ADDED
@@ -0,0 +1,586 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "nbformat": 4,
3
+ "nbformat_minor": 0,
4
+ "metadata": {
5
+ "colab": {
6
+ "provenance": [],
7
+ "authorship_tag": "ABX9TyNzaxKiokXX5SPot1IBiMhR",
8
+ "include_colab_link": true
9
+ },
10
+ "kernelspec": {
11
+ "name": "python3",
12
+ "display_name": "Python 3"
13
+ },
14
+ "language_info": {
15
+ "name": "python"
16
+ }
17
+ },
18
+ "cells": [
19
+ {
20
+ "cell_type": "markdown",
21
+ "metadata": {
22
+ "id": "view-in-github",
23
+ "colab_type": "text"
24
+ },
25
+ "source": [
26
+ "<a href=\"https://colab.research.google.com/github/towardsai/ai-tutor-rag-system/blob/main/notebooks/LlamaIndex_101.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "source": [
32
+ "!pip install -q llama-index==0.10.37 openai==1.30.1 tiktoken==0.7.0 chromadb==0.5.0 llama-index-vector-stores-chroma==0.1.7 llama-index-readers-wikipedia==0.1.4 wikipedia==1.4.0"
33
+ ],
34
+ "metadata": {
35
+ "colab": {
36
+ "base_uri": "https://localhost:8080/"
37
+ },
38
+ "id": "y_GAV7-zos0Y",
39
+ "outputId": "74d4a3c4-3576-455b-fbe2-1b8b67bf20d5"
40
+ },
41
+ "execution_count": 10,
42
+ "outputs": [
43
+ {
44
+ "output_type": "stream",
45
+ "name": "stdout",
46
+ "text": [
47
+ " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
48
+ " Building wheel for wikipedia (setup.py) ... \u001b[?25l\u001b[?25hdone\n"
49
+ ]
50
+ }
51
+ ]
52
+ },
53
+ {
54
+ "cell_type": "code",
55
+ "source": [
56
+ "# Allows running asyncio in environments with an existing event loop, like Jupyter notebooks.\n",
57
+ "\n",
58
+ "import nest_asyncio\n",
59
+ "\n",
60
+ "nest_asyncio.apply()"
61
+ ],
62
+ "metadata": {
63
+ "id": "Ua0KNwgvyCaj"
64
+ },
65
+ "execution_count": 49,
66
+ "outputs": []
67
+ },
68
+ {
69
+ "cell_type": "code",
70
+ "source": [
71
+ "import os\n",
72
+ "\n",
73
+ "os.environ['OPENAI_API_KEY'] = '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'"
74
+ ],
75
+ "metadata": {
76
+ "id": "--Q2zk06wElp"
77
+ },
78
+ "execution_count": 2,
79
+ "outputs": []
80
+ },
81
+ {
82
+ "cell_type": "code",
83
+ "source": [
84
+ "import logging\n",
85
+ "import sys\n",
86
+ "\n",
87
+ "#You can set the logging level to DEBUG for more verbose output,\n",
88
+ "# or use level=logging.INFO for less detailed information.\n",
89
+ "logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)\n",
90
+ "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
91
+ ],
92
+ "metadata": {
93
+ "id": "tjwZjA8-wITr"
94
+ },
95
+ "execution_count": 3,
96
+ "outputs": []
97
+ },
98
+ {
99
+ "cell_type": "markdown",
100
+ "source": [
101
+ "# Wikipedia Example"
102
+ ],
103
+ "metadata": {
104
+ "id": "HjI_gRaRutfj"
105
+ }
106
+ },
107
+ {
108
+ "cell_type": "markdown",
109
+ "source": [
110
+ "## LlamaHub Wikipedia Integration"
111
+ ],
112
+ "metadata": {
113
+ "id": "PLUDcXpI41Q_"
114
+ }
115
+ },
116
+ {
117
+ "cell_type": "code",
118
+ "source": [
119
+ "from llama_index.readers.wikipedia import WikipediaReader\n",
120
+ "\n",
121
+ "# Initialize WikipediaReader\n",
122
+ "reader = WikipediaReader()"
123
+ ],
124
+ "metadata": {
125
+ "id": "2gko9Q3hrlMh"
126
+ },
127
+ "execution_count": 12,
128
+ "outputs": []
129
+ },
130
+ {
131
+ "cell_type": "code",
132
+ "source": [
133
+ "# Load data from Wikipedia\n",
134
+ "documents = reader.load_data(pages=['Natural Language Processing', 'Artificial Intelligence'])"
135
+ ],
136
+ "metadata": {
137
+ "id": "Z35ot7P1wIO0"
138
+ },
139
+ "execution_count": 13,
140
+ "outputs": []
141
+ },
142
+ {
143
+ "cell_type": "code",
144
+ "source": [
145
+ "len( documents )"
146
+ ],
147
+ "metadata": {
148
+ "colab": {
149
+ "base_uri": "https://localhost:8080/"
150
+ },
151
+ "id": "0i9Zp6BJwILk",
152
+ "outputId": "a6a1e0a7-98cf-4ba4-d48a-e4f5833b4967"
153
+ },
154
+ "execution_count": 14,
155
+ "outputs": [
156
+ {
157
+ "output_type": "execute_result",
158
+ "data": {
159
+ "text/plain": [
160
+ "2"
161
+ ]
162
+ },
163
+ "metadata": {},
164
+ "execution_count": 14
165
+ }
166
+ ]
167
+ },
168
+ {
169
+ "cell_type": "markdown",
170
+ "source": [
171
+ "## Save on DeepLake"
172
+ ],
173
+ "metadata": {
174
+ "id": "03lff4VUTaN9"
175
+ }
176
+ },
177
+ {
178
+ "cell_type": "code",
179
+ "source": [
180
+ "import chromadb\n",
181
+ "from llama_index.vector_stores.chroma import ChromaVectorStore\n",
182
+ "\n",
183
+ "# Load the vector store from the local storage.\n",
184
+ "db = chromadb.PersistentClient(path=\"./wikipedia-articles\")\n",
185
+ "chroma_collection = db.get_or_create_collection(\"wikipedia-articles\")\n",
186
+ "vector_store = ChromaVectorStore(chroma_collection=chroma_collection)"
187
+ ],
188
+ "metadata": {
189
+ "id": "eo8CTHSFTcaR"
190
+ },
191
+ "execution_count": 15,
192
+ "outputs": []
193
+ },
194
+ {
195
+ "cell_type": "markdown",
196
+ "source": [
197
+ "## Create Nodes"
198
+ ],
199
+ "metadata": {
200
+ "id": "qkKPAnIl44ss"
201
+ }
202
+ },
203
+ {
204
+ "cell_type": "code",
205
+ "source": [
206
+ "from llama_index.core.node_parser import SimpleNodeParser\n",
207
+ "\n",
208
+ "# Initialize the parser\n",
209
+ "parser = SimpleNodeParser.from_defaults(chunk_size=512, chunk_overlap=20)\n",
210
+ "\n",
211
+ "# Parse documents into nodes\n",
212
+ "nodes = parser.get_nodes_from_documents(documents)\n",
213
+ "print( len( nodes ) )"
214
+ ],
215
+ "metadata": {
216
+ "colab": {
217
+ "base_uri": "https://localhost:8080/"
218
+ },
219
+ "id": "eB6Rc0U0wII_",
220
+ "outputId": "ec338be1-deca-45a7-e6ba-9997e4b7e25a"
221
+ },
222
+ "execution_count": 20,
223
+ "outputs": [
224
+ {
225
+ "output_type": "stream",
226
+ "name": "stdout",
227
+ "text": [
228
+ "45\n"
229
+ ]
230
+ }
231
+ ]
232
+ },
233
+ {
234
+ "cell_type": "markdown",
235
+ "source": [
236
+ "## Storage Context"
237
+ ],
238
+ "metadata": {
239
+ "id": "E8tHMS5ZucFE"
240
+ }
241
+ },
242
+ {
243
+ "cell_type": "code",
244
+ "source": [
245
+ "from llama_index.core import StorageContext\n",
246
+ "\n",
247
+ "storage_context = StorageContext.from_defaults(vector_store=vector_store)"
248
+ ],
249
+ "metadata": {
250
+ "id": "eWFtVpM_TcTQ"
251
+ },
252
+ "execution_count": 18,
253
+ "outputs": []
254
+ },
255
+ {
256
+ "cell_type": "markdown",
257
+ "source": [
258
+ "## Create index from Documents"
259
+ ],
260
+ "metadata": {
261
+ "id": "kCgdd197CTDt"
262
+ }
263
+ },
264
+ {
265
+ "cell_type": "code",
266
+ "source": [
267
+ "from llama_index.core import VectorStoreIndex\n",
268
+ "\n",
269
+ "index = VectorStoreIndex(\n",
270
+ " nodes=nodes, storage_context=storage_context\n",
271
+ ")"
272
+ ],
273
+ "metadata": {
274
+ "id": "g3GCf8LrULIW"
275
+ },
276
+ "execution_count": 24,
277
+ "outputs": []
278
+ },
279
+ {
280
+ "cell_type": "code",
281
+ "source": [
282
+ "query_engine = index.as_query_engine()\n",
283
+ "response = query_engine.query(\"What does NLP stands for?\")\n",
284
+ "response.response"
285
+ ],
286
+ "metadata": {
287
+ "colab": {
288
+ "base_uri": "https://localhost:8080/",
289
+ "height": 35
290
+ },
291
+ "id": "G7BdNn-Q5AlG",
292
+ "outputId": "a311ec41-6cdc-4fe1-fb59-ad338d0b6149"
293
+ },
294
+ "execution_count": 25,
295
+ "outputs": [
296
+ {
297
+ "output_type": "execute_result",
298
+ "data": {
299
+ "text/plain": [
300
+ "'NLP stands for Natural Language Processing.'"
301
+ ],
302
+ "application/vnd.google.colaboratory.intrinsic+json": {
303
+ "type": "string"
304
+ }
305
+ },
306
+ "metadata": {},
307
+ "execution_count": 25
308
+ }
309
+ ]
310
+ },
311
+ {
312
+ "cell_type": "markdown",
313
+ "source": [
314
+ "## Store/Load Vector Store"
315
+ ],
316
+ "metadata": {
317
+ "id": "r6cGiUtxu5ga"
318
+ }
319
+ },
320
+ {
321
+ "cell_type": "code",
322
+ "source": [
323
+ "# Index Storage Checks\n",
324
+ "import os.path\n",
325
+ "from llama_index.core import StorageContext, load_index_from_storage\n",
326
+ "\n",
327
+ "# Let's see if our index already exists in storage.\n",
328
+ "if not os.path.exists(\"./storage\"):\n",
329
+ " index.storage_context.persist()\n",
330
+ "\n",
331
+ "else:\n",
332
+ " # If the index already exists, we'll just load it:\n",
333
+ " storage_context = StorageContext.from_defaults(persist_dir=\"./storage\")\n",
334
+ " index = load_index_from_storage(storage_context)"
335
+ ],
336
+ "metadata": {
337
+ "id": "GHtB0C0mu7f6"
338
+ },
339
+ "execution_count": 28,
340
+ "outputs": []
341
+ },
342
+ {
343
+ "cell_type": "markdown",
344
+ "source": [
345
+ "# Paul Graham Essay"
346
+ ],
347
+ "metadata": {
348
+ "id": "iF8hwfMKuzst"
349
+ }
350
+ },
351
+ {
352
+ "cell_type": "code",
353
+ "source": [
354
+ "!mkdir -p './paul_graham/'\n",
355
+ "!wget 'https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt' -O './paul_graham/paul_graham_essay.txt'"
356
+ ],
357
+ "metadata": {
358
+ "colab": {
359
+ "base_uri": "https://localhost:8080/"
360
+ },
361
+ "id": "DrzbBAglwUo0",
362
+ "outputId": "73f30202-a708-4112-8491-9152e228c6cb"
363
+ },
364
+ "execution_count": 35,
365
+ "outputs": [
366
+ {
367
+ "output_type": "stream",
368
+ "name": "stdout",
369
+ "text": [
370
+ "--2024-07-24 17:20:40-- https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt\n",
371
+ "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.110.133, 185.199.109.133, 185.199.111.133, ...\n",
372
+ "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.110.133|:443... connected.\n",
373
+ "HTTP request sent, awaiting response... 200 OK\n",
374
+ "Length: 75042 (73K) [text/plain]\n",
375
+ "Saving to: β€˜./paul_graham/paul_graham_essay.txt’\n",
376
+ "\n",
377
+ "\r ./paul_gr 0%[ ] 0 --.-KB/s \r./paul_graham/paul_ 100%[===================>] 73.28K --.-KB/s in 0.02s \n",
378
+ "\n",
379
+ "2024-07-24 17:20:40 (3.33 MB/s) - β€˜./paul_graham/paul_graham_essay.txt’ saved [75042/75042]\n",
380
+ "\n"
381
+ ]
382
+ }
383
+ ]
384
+ },
385
+ {
386
+ "cell_type": "code",
387
+ "source": [
388
+ "from llama_index.core import SimpleDirectoryReader\n",
389
+ "\n",
390
+ "# load documents\n",
391
+ "documents = SimpleDirectoryReader(\"./paul_graham\").load_data()"
392
+ ],
393
+ "metadata": {
394
+ "id": "S8-QmnkCwIiU"
395
+ },
396
+ "execution_count": 37,
397
+ "outputs": []
398
+ },
399
+ {
400
+ "cell_type": "code",
401
+ "source": [
402
+ "import chromadb\n",
403
+ "from llama_index.vector_stores.chroma import ChromaVectorStore\n",
404
+ "\n",
405
+ "# Load the vector store from the local storage.\n",
406
+ "db = chromadb.PersistentClient(path=\"./paul-graham\")\n",
407
+ "chroma_collection = db.get_or_create_collection(\"paul-graham\")\n",
408
+ "vector_store = ChromaVectorStore(chroma_collection=chroma_collection)"
409
+ ],
410
+ "metadata": {
411
+ "id": "DfWglp75xc5f"
412
+ },
413
+ "execution_count": 38,
414
+ "outputs": []
415
+ },
416
+ {
417
+ "cell_type": "code",
418
+ "source": [
419
+ "from llama_index.core import StorageContext\n",
420
+ "\n",
421
+ "storage_context = StorageContext.from_defaults(vector_store=vector_store)\n",
422
+ "storage_context.docstore.add_documents(nodes)"
423
+ ],
424
+ "metadata": {
425
+ "id": "-EVBlUC-xcj1"
426
+ },
427
+ "execution_count": 39,
428
+ "outputs": []
429
+ },
430
+ {
431
+ "cell_type": "code",
432
+ "source": [
433
+ "from llama_index.core import VectorStoreIndex\n",
434
+ "\n",
435
+ "index = VectorStoreIndex.from_documents(\n",
436
+ " documents, storage_context=storage_context\n",
437
+ ")"
438
+ ],
439
+ "metadata": {
440
+ "id": "8lMa4h9Cwn8b"
441
+ },
442
+ "execution_count": 40,
443
+ "outputs": []
444
+ },
445
+ {
446
+ "cell_type": "code",
447
+ "source": [
448
+ "query_engine = index.as_query_engine(similarity_top_k=10)"
449
+ ],
450
+ "metadata": {
451
+ "id": "tJsfskjHxj0e"
452
+ },
453
+ "execution_count": 44,
454
+ "outputs": []
455
+ },
456
+ {
457
+ "cell_type": "code",
458
+ "source": [
459
+ "from llama_index.core.tools import QueryEngineTool, ToolMetadata\n",
460
+ "from llama_index.core.query_engine import SubQuestionQueryEngine\n",
461
+ "\n",
462
+ "query_engine_tools = [\n",
463
+ " QueryEngineTool(\n",
464
+ " query_engine=query_engine,\n",
465
+ " metadata=ToolMetadata(\n",
466
+ " name=\"pg_essay\",\n",
467
+ " description=\"Paul Graham essay on What I Worked On\",\n",
468
+ " ),\n",
469
+ " ),\n",
470
+ "]\n",
471
+ "\n",
472
+ "query_engine = SubQuestionQueryEngine.from_defaults(\n",
473
+ " query_engine_tools=query_engine_tools,\n",
474
+ " use_async=True,\n",
475
+ ")"
476
+ ],
477
+ "metadata": {
478
+ "id": "yL9TsFwxxuoA"
479
+ },
480
+ "execution_count": 50,
481
+ "outputs": []
482
+ },
483
+ {
484
+ "cell_type": "code",
485
+ "source": [
486
+ "response = query_engine.query(\n",
487
+ " \"How was Paul Grahams life different before, during, and after YC?\"\n",
488
+ ")"
489
+ ],
490
+ "metadata": {
491
+ "colab": {
492
+ "base_uri": "https://localhost:8080/"
493
+ },
494
+ "id": "JWc_n5Lhx2bq",
495
+ "outputId": "cd992d20-c701-4eb7-aaf2-30f790d1ca24"
496
+ },
497
+ "execution_count": 51,
498
+ "outputs": [
499
+ {
500
+ "output_type": "stream",
501
+ "name": "stdout",
502
+ "text": [
503
+ "Generated 3 sub questions.\n",
504
+ "\u001b[1;3;38;2;237;90;200m[pg_essay] Q: What did Paul Graham work on before Y Combinator?\n",
505
+ "\u001b[0m\u001b[1;3;38;2;90;149;237m[pg_essay] Q: What did Paul Graham work on during Y Combinator?\n",
506
+ "\u001b[0m\u001b[1;3;38;2;11;159;203m[pg_essay] Q: What did Paul Graham work on after Y Combinator?\n",
507
+ "\u001b[0mGenerated 1 sub questions.\n",
508
+ "\u001b[1;3;38;2;237;90;200m[pg_essay] Q: What did Paul Graham work on after Y Combinator?\n",
509
+ "\u001b[0mGenerated 1 sub questions.\n",
510
+ "\u001b[1;3;38;2;237;90;200m[pg_essay] Q: What is the title of Paul Graham's essay on What I Worked On?\n",
511
+ "\u001b[0mGenerated 1 sub questions.\n",
512
+ "\u001b[1;3;38;2;237;90;200m[pg_essay] Q: What is the title of Paul Graham's essay on What I Worked On?\n",
513
+ "\u001b[0mGenerated 1 sub questions.\n",
514
+ "\u001b[1;3;38;2;237;90;200m[pg_essay] Q: What did Paul Graham work on after Y Combinator?\n",
515
+ "\u001b[0mGenerated 1 sub questions.\n",
516
+ "\u001b[1;3;38;2;237;90;200m[pg_essay] Q: What is the title of Paul Graham's essay on What I Worked On?\n",
517
+ "\u001b[0m"
518
+ ]
519
+ },
520
+ {
521
+ "output_type": "stream",
522
+ "name": "stderr",
523
+ "text": [
524
+ "/usr/lib/python3.10/abc.py:123: RuntimeWarning: coroutine 'run_async_tasks.<locals>._gather' was never awaited\n",
525
+ " return _abc_subclasscheck(cls, subclass)\n",
526
+ "RuntimeWarning: Enable tracemalloc to get the object allocation traceback\n",
527
+ "/usr/lib/python3.10/abc.py:123: RuntimeWarning: coroutine 'SubQuestionQueryEngine._aquery_subq' was never awaited\n",
528
+ " return _abc_subclasscheck(cls, subclass)\n",
529
+ "RuntimeWarning: Enable tracemalloc to get the object allocation traceback\n"
530
+ ]
531
+ },
532
+ {
533
+ "output_type": "stream",
534
+ "name": "stdout",
535
+ "text": [
536
+ "Generated 1 sub questions.\n",
537
+ "\u001b[1;3;38;2;237;90;200m[pg_essay] Q: What is the title of Paul Graham's essay on What I Worked On?\n",
538
+ "\u001b[0m\u001b[1;3;38;2;237;90;200m[pg_essay] A: After Y Combinator, Paul Graham started painting.\n",
539
+ "\u001b[0m\u001b[1;3;38;2;237;90;200m[pg_essay] A: The title of Paul Graham's essay on What I Worked On is \"What I Worked On\".\n",
540
+ "\u001b[0m\u001b[1;3;38;2;237;90;200m[pg_essay] A: After Y Combinator, Paul Graham started painting.\n",
541
+ "\u001b[0m\u001b[1;3;38;2;237;90;200m[pg_essay] A: What I Worked On\n",
542
+ "\u001b[0m\u001b[1;3;38;2;237;90;200m[pg_essay] A: What I Worked On\n",
543
+ "\u001b[0m\u001b[1;3;38;2;237;90;200m[pg_essay] A: The title of Paul Graham's essay on What I Worked On is \"What I Worked On\".\n",
544
+ "\u001b[0m\u001b[1;3;38;2;11;159;203m[pg_essay] A: After Y Combinator, Paul Graham started painting.\n",
545
+ "\u001b[0m\u001b[1;3;38;2;90;149;237m[pg_essay] A: Paul Graham worked on various projects during his time at Y Combinator.\n",
546
+ "\u001b[0m\u001b[1;3;38;2;237;90;200m[pg_essay] A: Paul Graham worked on developing Viaweb before Y Combinator.\n",
547
+ "\u001b[0m"
548
+ ]
549
+ }
550
+ ]
551
+ },
552
+ {
553
+ "cell_type": "code",
554
+ "source": [
555
+ "print( \">>> The final response:\\n\", response )"
556
+ ],
557
+ "metadata": {
558
+ "colab": {
559
+ "base_uri": "https://localhost:8080/"
560
+ },
561
+ "id": "T-ZC66Ltx5Za",
562
+ "outputId": "d7a1c85d-d73c-467d-e0df-9e06078622e2"
563
+ },
564
+ "execution_count": 52,
565
+ "outputs": [
566
+ {
567
+ "output_type": "stream",
568
+ "name": "stdout",
569
+ "text": [
570
+ ">>> The final response:\n",
571
+ " Paul Graham worked on developing Viaweb before Y Combinator, on various projects during his time at Y Combinator, and started painting after Y Combinator.\n"
572
+ ]
573
+ }
574
+ ]
575
+ },
576
+ {
577
+ "cell_type": "code",
578
+ "source": [],
579
+ "metadata": {
580
+ "id": "27fS3JcDyFSj"
581
+ },
582
+ "execution_count": null,
583
+ "outputs": []
584
+ }
585
+ ]
586
+ }