CSAle commited on
Commit
d3942c3
β€’
1 Parent(s): 29b7439

Releasing ChatWithMyPDF

Browse files
.chainlit/config.toml ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ # Whether to enable telemetry (default: true). No personal data is collected.
3
+ enable_telemetry = true
4
+
5
+ # List of environment variables to be provided by each user to use the app.
6
+ user_env = []
7
+
8
+ # Duration (in seconds) during which the session is saved when the connection is lost
9
+ session_timeout = 3600
10
+
11
+ # Enable third parties caching (e.g LangChain cache)
12
+ cache = false
13
+
14
+ # Follow symlink for asset mount (see https://github.com/Chainlit/chainlit/issues/317)
15
+ # follow_symlink = false
16
+
17
+ [features]
18
+ # Show the prompt playground
19
+ prompt_playground = true
20
+
21
+ [UI]
22
+ # Name of the app and chatbot.
23
+ name = "Chatbot"
24
+
25
+ # Description of the app and chatbot. This is used for HTML tags.
26
+ # description = ""
27
+
28
+ # Large size content are by default collapsed for a cleaner ui
29
+ default_collapse_content = true
30
+
31
+ # The default value for the expand messages settings.
32
+ default_expand_messages = false
33
+
34
+ # Hide the chain of thought details from the user in the UI.
35
+ hide_cot = false
36
+
37
+ # Link to your github repo. This will add a github button in the UI's header.
38
+ # github = ""
39
+
40
+ # Override default MUI light theme. (Check theme.ts)
41
+ [UI.theme.light]
42
+ #background = "#FAFAFA"
43
+ #paper = "#FFFFFF"
44
+
45
+ [UI.theme.light.primary]
46
+ #main = "#F80061"
47
+ #dark = "#980039"
48
+ #light = "#FFE7EB"
49
+
50
+ # Override default MUI dark theme. (Check theme.ts)
51
+ [UI.theme.dark]
52
+ #background = "#FAFAFA"
53
+ #paper = "#FFFFFF"
54
+
55
+ [UI.theme.dark.primary]
56
+ #main = "#F80061"
57
+ #dark = "#980039"
58
+ #light = "#FFE7EB"
59
+
60
+
61
+ [meta]
62
+ generated_by = "0.7.0"
__pycache__/app.cpython-39.pyc ADDED
Binary file (4.24 kB). View file
 
app.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from typing import List
3
+
4
+ from langchain.embeddings.openai import OpenAIEmbeddings
5
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
6
+ from langchain.vectorstores import Chroma
7
+ from langchain.chains import (
8
+ ConversationalRetrievalChain,
9
+ )
10
+ from langchain.document_loaders import PyPDFLoader
11
+ from langchain.chat_models import ChatOpenAI
12
+ from langchain.prompts.chat import (
13
+ ChatPromptTemplate,
14
+ SystemMessagePromptTemplate,
15
+ HumanMessagePromptTemplate,
16
+ )
17
+ from langchain.docstore.document import Document
18
+ from langchain.memory import ChatMessageHistory, ConversationBufferMemory
19
+ from chainlit.types import AskFileResponse
20
+
21
+ import chainlit as cl
22
+
23
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
24
+
25
+ system_template = """Use the following pieces of context to answer the users question.
26
+ If you don't know the answer, just say that you don't know, don't try to make up an answer.
27
+ ALWAYS return a "SOURCES" part in your answer.
28
+ The "SOURCES" part should be a reference to the source of the document from which you got your answer.
29
+
30
+ And if the user greets with greetings like Hi, hello, How are you, etc reply accordingly as well.
31
+
32
+ Example of your response should be:
33
+
34
+ The answer is foo
35
+ SOURCES: xyz
36
+
37
+
38
+ Begin!
39
+ ----------------
40
+ {summaries}"""
41
+ messages = [
42
+ SystemMessagePromptTemplate.from_template(system_template),
43
+ HumanMessagePromptTemplate.from_template("{question}"),
44
+ ]
45
+ prompt = ChatPromptTemplate.from_messages(messages)
46
+ chain_type_kwargs = {"prompt": prompt}
47
+
48
+
49
+ def process_file(file: AskFileResponse):
50
+ import tempfile
51
+
52
+ with tempfile.NamedTemporaryFile(mode="w", delete=False) as tempfile:
53
+ with open(tempfile.name, "wb") as f:
54
+ f.write(file.content)
55
+
56
+ pypdf_loader = PyPDFLoader(tempfile.name)
57
+ texts = pypdf_loader.load_and_split()
58
+ texts = [text.page_content for text in texts]
59
+ return texts
60
+
61
+
62
+ @cl.on_chat_start
63
+ async def on_chat_start():
64
+ files = None
65
+
66
+ # Wait for the user to upload a file
67
+ while files == None:
68
+ files = await cl.AskFileMessage(
69
+ content="Please upload a PDF file to begin!",
70
+ accept=["application/pdf"],
71
+ max_size_mb=20,
72
+ timeout=180,
73
+ ).send()
74
+
75
+ file = files[0]
76
+
77
+ msg = cl.Message(
78
+ content=f"Processing `{file.name}`...", disable_human_feedback=True
79
+ )
80
+ await msg.send()
81
+
82
+ # load the file
83
+ texts = process_file(file)
84
+
85
+ print(texts[0])
86
+
87
+ # Create a metadata for each chunk
88
+ metadatas = [{"source": f"{i}-pl"} for i in range(len(texts))]
89
+
90
+ # Create a Chroma vector store
91
+ embeddings = OpenAIEmbeddings()
92
+ docsearch = await cl.make_async(Chroma.from_texts)(
93
+ texts, embeddings, metadatas=metadatas
94
+ )
95
+
96
+ message_history = ChatMessageHistory()
97
+
98
+ memory = ConversationBufferMemory(
99
+ memory_key="chat_history",
100
+ output_key="answer",
101
+ chat_memory=message_history,
102
+ return_messages=True,
103
+ )
104
+
105
+ # Create a chain that uses the Chroma vector store
106
+ chain = ConversationalRetrievalChain.from_llm(
107
+ ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0, streaming=True),
108
+ chain_type="stuff",
109
+ retriever=docsearch.as_retriever(),
110
+ memory=memory,
111
+ return_source_documents=True,
112
+ )
113
+
114
+ # Let the user know that the system is ready
115
+ msg.content = f"Processing `{file.name}` done. You can now ask questions!"
116
+ await msg.update()
117
+
118
+ cl.user_session.set("chain", chain)
119
+
120
+
121
+ @cl.on_message
122
+ async def main(message):
123
+ chain = cl.user_session.get("chain") # type: ConversationalRetrievalChain
124
+ cb = cl.AsyncLangchainCallbackHandler()
125
+
126
+ res = await chain.acall(message, callbacks=[cb])
127
+ answer = res["answer"]
128
+ source_documents = res["source_documents"] # type: List[Document]
129
+
130
+ text_elements = [] # type: List[cl.Text]
131
+
132
+ if source_documents:
133
+ for source_idx, source_doc in enumerate(source_documents):
134
+ source_name = f"source_{source_idx}"
135
+ # Create the text element referenced in the message
136
+ text_elements.append(
137
+ cl.Text(content=source_doc.page_content, name=source_name)
138
+ )
139
+ source_names = [text_el.name for text_el in text_elements]
140
+
141
+ if source_names:
142
+ answer += f"\nSources: {', '.join(source_names)}"
143
+ else:
144
+ answer += "\nNo sources found"
145
+
146
+ await cl.Message(content=answer, elements=text_elements).send()
chainlit.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Welcome to Chainlit! πŸš€πŸ€–
2
+
3
+ Hi there, Developer! πŸ‘‹ We're excited to have you on board. Chainlit is a powerful tool designed to help you prototype, debug and share applications built on top of LLMs.
4
+
5
+ ## Useful Links πŸ”—
6
+
7
+ - **Documentation:** Get started with our comprehensive [Chainlit Documentation](https://docs.chainlit.io) πŸ“š
8
+ - **Discord Community:** Join our friendly [Chainlit Discord](https://discord.gg/ZThrUxbAYw) to ask questions, share your projects, and connect with other developers! πŸ’¬
9
+
10
+ We can't wait to see what you create with Chainlit! Happy coding! πŸ’»πŸ˜Š
11
+
12
+ ## Welcome screen
13
+
14
+ To modify the welcome screen, edit the `chainlit.md` file at the root of your project. If you do not want a welcome screen, just leave this file empty.
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ langchain
2
+ chromadb
3
+ tiktoken
4
+ pypdf
5
+ chainlit
6
+ openai