Spaces:
Sleeping
Sleeping
alexzhuzhou
commited on
Commit
·
db0cc84
1
Parent(s):
7316fdd
updates
Browse files- app.py +11 -4
- makechain.py +2 -5
- requirements.txt +1 -1
app.py
CHANGED
@@ -45,14 +45,21 @@ def main():
|
|
45 |
query = user_query.strip().replace('\n', ' ')
|
46 |
response = qa_chain(
|
47 |
{
|
48 |
-
'
|
49 |
}
|
50 |
)
|
51 |
-
st.write(f"{response['
|
52 |
st.write("Sources: ")
|
53 |
-
st.write(f"{response['sources']}")
|
54 |
-
|
55 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
try:
|
57 |
main()
|
58 |
except Exception as e:
|
|
|
45 |
query = user_query.strip().replace('\n', ' ')
|
46 |
response = qa_chain(
|
47 |
{
|
48 |
+
'query': query,
|
49 |
}
|
50 |
)
|
51 |
+
st.write(f"{response['result']}")
|
52 |
st.write("Sources: ")
|
|
|
|
|
53 |
|
54 |
+
documents = response['source_documents']
|
55 |
+
for document in documents:
|
56 |
+
page_content = document.page_content
|
57 |
+
source_url = document.metadata['source']
|
58 |
+
st.write("Page Content")
|
59 |
+
st.write(page_content)
|
60 |
+
st.write("Source URL:")
|
61 |
+
st.write(source_url)
|
62 |
+
st.markdown("""---""")
|
63 |
try:
|
64 |
main()
|
65 |
except Exception as e:
|
makechain.py
CHANGED
@@ -1,14 +1,11 @@
|
|
1 |
from langchain.chat_models import ChatOpenAI
|
2 |
from langchain.vectorstores.pinecone import Pinecone
|
3 |
import openai
|
4 |
-
from langchain.chains import
|
5 |
import os
|
6 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
7 |
def get_chain(vectorstore: Pinecone):
|
8 |
openai.api_key = OPENAI_API_KEY
|
9 |
llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")
|
10 |
-
|
11 |
-
qa_chain = RetrievalQAWithSourcesChain.from_chain_type(llm=llm, chain_type="stuff",
|
12 |
-
retriever=vectorstore.as_retriever())
|
13 |
-
|
14 |
return qa_chain
|
|
|
1 |
from langchain.chat_models import ChatOpenAI
|
2 |
from langchain.vectorstores.pinecone import Pinecone
|
3 |
import openai
|
4 |
+
from langchain.chains import RetrievalQA
|
5 |
import os
|
6 |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
7 |
def get_chain(vectorstore: Pinecone):
|
8 |
openai.api_key = OPENAI_API_KEY
|
9 |
llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")
|
10 |
+
qa_chain = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=vectorstore.as_retriever(),return_source_documents=True)
|
|
|
|
|
|
|
11 |
return qa_chain
|
requirements.txt
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
streamlit~=1.22.0
|
2 |
pinecone-client
|
3 |
-
langchain~=0.0.
|
4 |
openai~=0.27.7
|
5 |
tqdm~=4.65.0
|
|
|
1 |
streamlit~=1.22.0
|
2 |
pinecone-client
|
3 |
+
langchain~=0.0.215
|
4 |
openai~=0.27.7
|
5 |
tqdm~=4.65.0
|