Spaces:
Sleeping
Sleeping
from datetime import datetime | |
from Obnoxious_Agent import Obnoxious_Agent | |
from Relevant_Documents_Agent import Relevant_Documents_Agent | |
from Query_Agent import Query_Agent | |
from Answering_Agent import Answering_Agent | |
from datetime import datetime | |
from langchain.document_loaders import UnstructuredPDFLoader, OnlinePDFLoader | |
import streamlit as st | |
from openai import OpenAI | |
from Head_Agent import Head_Agent | |
st.title("Mini Project 2: Streamlit Chatbot") | |
# TODO: Replace with your actual OpenAI API key | |
client = OpenAI(api_key='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') | |
# Define a function to get the conversation history (Not required for Part-2, will be useful in Part-3) | |
def get_conversation(): | |
# ... (code for getting conversation history) | |
history_conversation = [] | |
for message in st.session_state.messages: | |
if message["sender"] == "user": | |
cur_map = dict() | |
cur_map['role']= "user" | |
cur_map['content'] = message['content'] | |
history_conversation.append(cur_map) | |
elif message["sender"] == "assistant": | |
cur_map = dict() | |
cur_map['role'] = "assistant" | |
cur_map['content'] = message['content'] | |
history_conversation.append(cur_map) | |
return history_conversation | |
def display_all_chat_messages(): | |
for message in st.session_state.messages: | |
# st.text_area("", value=message["content"], key=message["sender"] + str(message["id"])) | |
if message["sender"] == "user": | |
with st.chat_message("user"): # 显示avatar | |
st.container().markdown(f"**You [{message['timestamp']}]:** {message['content']}") | |
elif message["sender"] == "assistant": | |
with st.chat_message("assistant"): # 显示avatar | |
st.container().markdown(f"**Assistant [{message['timestamp']}]:** {message['content']}") | |
# Initialize the Head Agent with necessary parameters | |
if 'head_agent' not in st.session_state: | |
openai_key = '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' | |
pinecone_key = "52ef9136-6188-4e51-af13-9639bf95c163" | |
pinecone_index_name = "ee596llm-project2" | |
st.session_state.head_agent = Head_Agent(openai_key, pinecone_key, pinecone_index_name) | |
# Your existing code for handling user input and displaying messages | |
# Replace the direct call to `get_completion` with `st.session_state.head_agent.process_query(prompt)` | |
# Example: | |
if prompt := st.chat_input("What would you like to chat about?"): | |
try: | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
message_id = len(st.session_state.messages) | |
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S") | |
user_message = {"id": message_id, "sender": "user", "content": prompt, "timestamp": current_time} | |
st.session_state.messages.append(user_message) | |
# Instantiate the Obnoxious Agent | |
obnoxious_agent = Obnoxious_Agent() | |
is_obnoxious = obnoxious_agent.check_query(prompt) | |
# Respond based on the check | |
if is_obnoxious: | |
response = "Yes" | |
else: | |
response = "No" | |
# You can then display this response to the user or use it as part of your application logic | |
is_obnoxious_response = "Is the query obnoxious? " + response | |
# st.write("Is the query obnoxious? " + response) | |
# display_message(user_message) | |
except Exception as e: | |
st.error("Failed to process your message. Please try again.") | |
# ... (display user message in the chat interface) | |
# display_message(user_message) # Use the display_message function to show the user's message | |
# Generate AI response | |
# with st.chat_message("assistant"): 删除掉 chat聊天框 不能嵌套 | |
# ... (send request to OpenAI API) | |
# ... (get AI response and display it) | |
ai_response = st.session_state.head_agent.process_query(prompt, get_conversation()) | |
# ... (append AI response to messages) | |
ai_message = {"id": len(st.session_state.messages), "sender": "assistant", "content": ai_response, | |
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S")} | |
st.session_state.messages.append(ai_message) | |
print(ai_message) | |
# display_message(ai_message) | |
display_all_chat_messages() | |