Spaces:
Sleeping
Sleeping
File size: 1,661 Bytes
112789d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
import uuid
import streamlit as st
from dotenv import load_dotenv
import scholar_integration
import rag_chatbot
from document_processor import DocumentProcessor
from utils import sample_suggestions
# Load environment variables from .env file
load_dotenv()
st.set_page_config(page_title="3Step AI Chatbot", layout="wide")
# Initialize session variables
if "messages" not in st.session_state:
st.session_state.messages = []
if "toggle" not in st.session_state:
st.session_state.toggle = False
if "vector_store" not in st.session_state:
st.session_state.vector_store = None
if "user_id" not in st.session_state:
st.session_state.user_id = uuid.uuid4().hex[:8] # Unique ID per user
if "processing_canceled" not in st.session_state:
st.session_state.processing_canceled = False
if "selected_question" not in st.session_state:
st.session_state.selected_question = None
if "suggested_questions" not in st.session_state:
st.session_state.suggested_questions = sample_suggestions
if "processing_question" not in st.session_state:
st.session_state.processing_question = False
if "current_question" not in st.session_state:
st.session_state.current_question = None
def main():
# Initialize document processor
doc_processor = DocumentProcessor()
mode = st.sidebar.radio("Choose mode:", ["RAG Chatbot", "Academic Research Assistant"])
if mode == "RAG Chatbot":
# Your existing RAG chatbot code
rag_chatbot.add_rag_chatbot_interface()
else:
st.empty()
# Add the scholar integration
scholar_integration.add_scholarly_chat_interface()
if __name__ == "__main__":
main() |