Spaces:
Runtime error
Runtime error
import streamlit as st | |
from langchain.document_loaders import DirectoryLoader, TextLoader | |
from langchain.text_splitter import CharacterTextSplitter | |
from langchain.embeddings.openai import OpenAIEmbeddings | |
from langchain.vectorstores import FAISS | |
from langchain.chains import RetrievalQA | |
from langchain.llms import OpenAI as LangchainOpenAI | |
from dotenv import load_dotenv | |
import os | |
# Load environment variables | |
load_dotenv() | |
# Initialize Streamlit | |
st.set_page_config(page_title="Ask the Docs") | |
st.header("Ask the Docs π") | |
st.write('Unleash your credit card superpowers') | |
st.write('Revolutionize the way you track and manage your credit cards.') | |
st.write('Experience hassle-free financial control with Anek.') | |
# Initialize chat session state with a welcome message | |
if "messages" not in st.session_state.keys(): | |
st.session_state.messages = [{"role": "assistant", "content": "Welcome to CreditCardChat! Your personal credit card advisor π. Drop in your queries."}] | |
# Display previous chat messages | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.write(message["content"]) | |
# Load Documents from Directory | |
text_loader_kwargs = {'encoding': 'utf-8'} | |
loader = DirectoryLoader(r'C:\Users\iamma\Documents\JupyterHub\MODELS\V1\credit_card_texts17AUG', glob="**/*.txt", loader_cls=TextLoader, loader_kwargs=text_loader_kwargs) | |
docs = loader.load() | |
# Embedding Configuration | |
openai_api_key = os.getenv('OPENAI_API_KEY') | |
embedding = OpenAIEmbeddings(openai_api_key=openai_api_key) | |
try: | |
embeddings = embedding.embed_documents([doc.page_content for doc in docs]) | |
except Exception as e: | |
st.write(f"An error occurred: {e}") | |
st.stop() | |
# FAISS Database Configuration | |
try: | |
db = FAISS.from_documents(docs, embedding) | |
except Exception as e: | |
st.write(f"An error occurred while creating FAISS database: {e}") | |
st.stop() | |
# Initialize Langchain components | |
llm = LangchainOpenAI(openai_api_key=openai_api_key) | |
qa_chain = RetrievalQA.from_chain_type(llm, retriever=db.as_retriever()) | |
# User-provided question | |
# ... (previous imports and initialization) | |
# User-provided question | |
if user_question := st.chat_input("Ask a question about the documents: "): | |
# Manually append user's question to session state and display immediately | |
st.session_state.messages.append({"role": "user", "content": user_question}) | |
# Display chat messages including the new question | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.write(message["content"]) | |
# Placeholder for assistant's response | |
with st.chat_message("assistant"): | |
assistant_placeholder = st.empty() | |
try: | |
# Get the model's response | |
result_dict = qa_chain({"query": user_question}) | |
result = result_dict.get("result", "I don't know.") | |
# Update assistant's message | |
assistant_placeholder.write(result) | |
st.session_state.messages.append({"role": "assistant", "content": result}) | |
except Exception as e: | |
assistant_placeholder.write(f"An error occurred: {e}") | |
st.session_state.messages.append({"role": "assistant", "content": f"An error occurred: {e}"}) | |