gsarathkumar's picture
updated the file name
93aa7b3
raw
history blame
3.15 kB
import streamlit as st
from langchain.text_splitter import RecursiveCharacterTextSplitter
import os
from langchain import OpenAI
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain.chains.question_answering import load_qa_chain
from langchain.prompts import PromptTemplate
from dotenv import load_dotenv
from langchain.document_loaders import TextLoader
import pickle
load_dotenv()
embeddings = OpenAIEmbeddings(openai_api_key='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')
new_db = FAISS.load_local("faiss_index", embeddings)
def get_text_chunks(text):
text_splitter = RecursiveCharacterTextSplitter(separators=["\n\n", "\n", " "],
chunk_size = 200,
chunk_overlap=50, length_function=len)
chunks = text_splitter.split_documents(text)
return chunks
# def get_vector_store(text_chunks):
# embeddings = OpenAIEmbeddings(openai_api_key='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')
# vectorstore_openai = FAISS.from_documents(text_chunks, embeddings)
# vectorstore_openai.save_local("faiss_index")
def get_conversational_chain():
prompt_template = """
Answer the question as breif as possible from the provided context, make sure to provide all the details, if the answer is not in
provided context just say, "answer is not available in the context", don't provide the wrong answer.
Answer the question as canadian citizen as buyproperly customer care \n\n
Context:\n {context}?\n
Question: \n{question}\n
Answer:
"""
model = OpenAI(temperature=0.6, max_tokens=500, model='gpt-3.5-turbo-instruct')
prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
return chain
def user_input(user_question):
docs = new_db.similarity_search(user_question)
print('loaded from docs')
chain = get_conversational_chain()
response = chain(
{"input_documents": docs, "question": user_question}
, return_only_outputs=True)
print(response)
st.write("Reply: ", response["output_text"])
def main():
st.set_page_config("Chat with BuyProperly AI Assistant")
# with st.sidebar:
# st.title("Menu:")
# input_file_path = st.sidebar.text_input("Enter the path of the text file:")
# process_url_clicked = st.sidebar.button("Process URLs")
# if process_url_clicked:
# #print('clicked')
# loader = TextLoader(input_file_path, encoding='UTF-8')
# raw_text = loader.load()
# #print(raw_text)
# text_chunks = get_text_chunks(raw_text)
# get_vector_store(text_chunks)
# st.success("Done")
user_question = st.text_input("Ask a Question:")
if st.button("Submit & Process"):
with st.spinner("Processing..."):
print('user_question response', user_question)
if user_question:
print('entered the user question')
user_input(user_question)
if __name__ == "__main__":
main()