File size: 7,695 Bytes
b8bc9e3
 
 
 
69283f8
b8bc9e3
 
c413f56
b8bc9e3
69283f8
b8bc9e3
 
 
 
69283f8
b8bc9e3
 
7da8f6b
b8bc9e3
 
 
 
 
 
 
 
11d734e
a5af360
11d734e
 
a402973
b8bc9e3
 
 
 
69283f8
b8bc9e3
 
69283f8
b8bc9e3
a5af360
b8bc9e3
 
 
 
 
 
 
 
 
69283f8
b8bc9e3
 
 
 
 
 
 
c413f56
b8bc9e3
 
 
 
 
 
 
 
 
 
 
3ef7248
b8bc9e3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a081905
065db74
b8bc9e3
 
 
 
 
 
 
69283f8
b8bc9e3
 
 
 
 
69283f8
 
 
b8bc9e3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69283f8
b8bc9e3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69283f8
b8bc9e3
 
 
 
 
c256eaf
b8bc9e3
 
 
 
 
 
69283f8
b8bc9e3
 
 
 
 
 
 
 
 
69283f8
b8bc9e3
 
69283f8
b8bc9e3
 
 
 
d551216
b8bc9e3
69283f8
c413f56
d551216
b8bc9e3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "543ba231",
   "metadata": {},
   "outputs": [],
   "source": [
    "#%pip install langchain openai chromadb tiktoken pypdf panel bokeh"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "4545d020",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain.chains import RetrievalQA\n",
    "from langchain.llms import OpenAI\n",
    "from langchain.document_loaders import PyPDFLoader\n",
    "from langchain.indexes import VectorstoreIndexCreator\n",
    "from langchain.text_splitter import CharacterTextSplitter\n",
    "from langchain.embeddings import OpenAIEmbeddings\n",
    "from langchain.vectorstores import Chroma\n",
    "import panel as pn\n",
    "import os\n",
    "import dotenv\n",
    "dotenv.load_dotenv()\n",
    "api_key = os.environ.get(\"API_KEY\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2a643296",
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "pn.extension('texteditor', template=\"bootstrap\", sizing_mode='stretch_width')\n",
    "pn.state.template.param.update(\n",
    "    main_max_width=\"690px\",\n",
    "    header_background=\"#F08080\",\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "f843fe71",
   "metadata": {},
   "outputs": [],
   "source": [
    "file_input = pn.widgets.FileInput(width=300)\n",
    "\n",
    "openaikey = pn.widgets.PasswordInput(\n",
    "    value=api_key, placeholder=\"Insert your Openai API key here\", width=250\n",
    ")\n",
    "prompt = pn.widgets.TextEditor(\n",
    "    value=\"\", placeholder=\"How do you become a good programmer?\", height=160, toolbar=False\n",
    ")\n",
    "search_button = pn.widgets.Button(name=\"Search!\")\n",
    "\n",
    "select_k = pn.widgets.IntSlider(\n",
    "    name=\"Number of relevant chunks\", start=1, end=10, step=1, value=2\n",
    ")\n",
    "select_chain_type = pn.widgets.RadioButtonGroup(\n",
    "    name='Chain type', \n",
    "    options=['stuff', 'map_reduce', \"refine\"]\n",
    ")\n",
    "\n",
    "legend = pn.pane.Markdown(\"\"\"\n",
    "**Chain types:**\n",
    "\n",
    "- `stuff`: Parse the entire document at one go. (for smaller documents)\n",
    "- `map_reduce`: Split the document into smaller chunks in parallel. (faster processing but less detailed answer)\n",
    "- `refine`: Split the document into smaller chunks subsequently. (slower processing but more detailed answer)\n",
    "\"\"\")\n",
    "\n",
    "widgets = pn.Row(\n",
    "    pn.Column(prompt, search_button, margin=5),\n",
    "    pn.Card(\n",
    "        \"Chain type:\",\n",
    "        pn.Column(select_chain_type, select_k),\n",
    "        title=\"Advanced settings\", margin=20,\n",
    "        style={\"width\": \"3000px\"}\n",
    "    ), \n",
    "    pn.Column(legend)\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "7e7daca0",
   "metadata": {},
   "outputs": [],
   "source": [
    "def qa(file, query, chain_type, k):\n",
    "    # load document pdf\n",
    "    loader = PyPDFLoader(file)\n",
    "    documents = loader.load()\n",
    "    # split the documents into chunks\n",
    "    text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0)\n",
    "    texts = text_splitter.split_documents(documents)\n",
    "    # select which embeddings we want to use\n",
    "    embeddings = OpenAIEmbeddings()\n",
    "    # create the vectorestore to use as the index\n",
    "    db = Chroma.from_documents(texts, embeddings)\n",
    "    # expose this index in a retriever interface\n",
    "    retriever = db.as_retriever(search_type=\"similarity\", search_kwargs={\"k\": k})\n",
    "    # create a chain to answer questions \n",
    "    qa = RetrievalQA.from_chain_type(\n",
    "        llm=OpenAI(), chain_type=chain_type, retriever=retriever, return_source_documents=True)\n",
    "    result = qa({\"query\": query})\n",
    "    print(result['result'])\n",
    "    return result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "1ddd25b6",
   "metadata": {},
   "outputs": [],
   "source": [
    "convos = []  # store all panel objects in a list\n",
    "\n",
    "def qa_result(_):\n",
    "    os.environ[\"OPENAI_API_KEY\"] = openaikey.value\n",
    "    \n",
    "    # save pdf file to a temp file \n",
    "    if file_input.value is not None:\n",
    "        file_input.save(\"/.cache/temp.pdf\")\n",
    "    \n",
    "        prompt_text = prompt.value\n",
    "        if prompt_text:\n",
    "            result = qa(file=\"/.cache/temp.pdf\", query=prompt_text, chain_type=select_chain_type.value, k=select_k.value)\n",
    "            convos.extend([\n",
    "                pn.Row(\n",
    "                    pn.panel(\"\\U0001F60A\", width=10),\n",
    "                    prompt_text,\n",
    "                    width=600\n",
    "                ),\n",
    "                pn.Row(\n",
    "                    pn.panel(\"\\U0001F916\", width=10),\n",
    "                    pn.Column(\n",
    "                        result[\"result\"],\n",
    "                        \"Relevant source text:\",\n",
    "                        pn.pane.Markdown('\\n--------------------------------------------------------------------\\n'.join(doc.page_content for doc in result[\"source_documents\"]))\n",
    "                    )\n",
    "                )\n",
    "            ])\n",
    "            #return convos\n",
    "    return pn.Column(*convos, margin=15, width=575, min_height=400)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "7341d7df",
   "metadata": {},
   "outputs": [],
   "source": [
    "qa_interactive = pn.panel(\n",
    "    pn.bind(qa_result, search_button),\n",
    "    loading_indicator=True,\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "88d43948",
   "metadata": {},
   "outputs": [],
   "source": [
    "output = pn.WidgetBox('*Output will show up here:*', qa_interactive, width=630, scroll=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "19041fc0",
   "metadata": {},
   "outputs": [],
   "source": [
    "# layout\n",
    "pn.Column(\n",
    "    pn.pane.Markdown(\"\"\"\n",
    "    ## Query anything in your files \\U0001F60A!\n",
    "    \n",
    "    1) Upload a pdf file. \\n\n",
    "    2) Enter OpenAI API key(Using Aaron's by default). \\n\n",
    "    3) Ask any question and click \"Search\". (Click twice on first startup)\n",
    "    \n",
    "    \"\"\"),\n",
    "    pn.Row(file_input,pn.pane.HTML(\"OpenAI API Key: \", width=110, align=\"end\"), openaikey),\n",
    "    output,\n",
    "    widgets\n",
    "\n",
    ").servable()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3a9b908d",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "100ec49c",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e533a218",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.8"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}