joyson072 commited on
Commit
96702a6
1 Parent(s): b11cf64

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -0
app.py CHANGED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import langchain
2
+ from langchain.embeddings.openai import OpenAIEmbeddings
3
+ # from langchain.vectorstores import Chroma
4
+ from langchain.vectorstores import FAISS
5
+ from langchain.text_splitter import CharacterTextSplitter
6
+ from langchain.llms import OpenAI
7
+ from langchain.chains import VectorDBQA
8
+ from langchain.chains import RetrievalQA
9
+ from langchain.document_loaders import DirectoryLoader
10
+ from langchain.chains import ConversationalRetrievalChain
11
+ from langchain.memory import ConversationBufferMemory
12
+ from langchain.evaluation.qa import QAGenerateChain
13
+ import magic
14
+ import os
15
+ import streamlit as st
16
+ from streamlit_chat import message
17
+
18
+ st.title("Welcome to BhubBot")
19
+
20
+ if 'responses' not in st.session_state:
21
+ st.session_state['responses'] = ["How can I assist you?"]
22
+
23
+ if 'requests' not in st.session_state:
24
+ st.session_state['requests'] = []
25
+
26
+ openai_api_key = os.getenv("OPENAI_API_KEY", "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")
27
+ embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)
28
+ new_db = FAISS.load_local("faiss_leave_policy_RCV", embeddings)
29
+ llm = OpenAI(openai_api_key=openai_api_key, temperature=0.0)
30
+
31
+ # if 'buffer_memory' not in st.session_state:
32
+ memory= ConversationBufferMemory(memory_key="chat_history", return_messages=True)
33
+ retriever = new_db.as_retriever()
34
+ chain = ConversationalRetrievalChain.from_llm(llm=llm, chain_type="stuff", memory= memory,retriever=retriever, verbose=False)
35
+
36
+ # container for chat history
37
+ response_container = st.container()
38
+ # container for text box
39
+ textcontainer = st.container()
40
+
41
+
42
+ with textcontainer:
43
+ query = st.text_input(label="Please Enter Your Prompt Here: ", placeholder="Ask me")
44
+ if query:
45
+ with st.spinner("Cooking..."):
46
+ # conversation_string = get_conversation_string()
47
+ # st.code(conversation_string)
48
+ # refined_query = query_refiner(conversation_string, query)
49
+ # st.subheader("Refined Query:")
50
+ # st.write(refined_query)
51
+ # context = find_match(refined_query)
52
+ # print(context)
53
+ response = chain.run(query)
54
+ st.session_state.requests.append(query)
55
+ st.session_state.responses.append(response)
56
+ with response_container:
57
+ if st.session_state['responses']:
58
+
59
+ for i in range(len(st.session_state['responses'])):
60
+ message(st.session_state['responses'][i],key=str(i))
61
+ if i < len(st.session_state['requests']):
62
+ message(st.session_state["requests"][i], is_user=True,key=str(i)+ '_user')
63
+
64
+ # with st.expander('Message history'):
65
+ # st.info(memory.buffer)