Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import os | |
| from langchain import HuggingFaceHub, PromptTemplate, LLMChain | |
| # Define your AI assistant setup and configurations here | |
| os.environ['API_KEY'] = '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' | |
| model_id = 'tiiuae/falcon-7b-instruct' | |
| falcon_llm = HuggingFaceHub(huggingfacehub_api_token=os.environ['API_KEY'], | |
| repo_id=model_id, | |
| model_kwargs={"temperature": 0.8, "max_new_tokens": 2000}) | |
| template = """ | |
| You are an AI assistant that provides helpful answers to user queries. | |
| {conversation} | |
| """ | |
| prompt = PromptTemplate(template=template, input_variables=['conversation']) | |
| falcon_chain = LLMChain(llm=falcon_llm, | |
| prompt=prompt, | |
| verbose=True) | |
| # Define the Streamlit app | |
| def main(): | |
| st.title("Mouli's AI Assistant") | |
| # Initialize conversation history as a list | |
| conversation_history = st.session_state.get("conversation_history", []) | |
| # Create an input box at the bottom for user's message | |
| user_message = st.text_input("Your message:") | |
| # If the user's message is not empty, process it | |
| if user_message: | |
| # Add user's message to conversation history | |
| conversation_history.append(("user", user_message)) | |
| # Combine conversation history to use as input for the AI assistant | |
| conversation_input = "\n".join([f"{author}: {message}" for author, message in conversation_history]) | |
| # Use your AI assistant to generate a response based on the conversation | |
| response = falcon_chain.run(conversation_input) | |
| # Add AI's response to conversation history | |
| conversation_history.append(("AI", response)) | |
| # Store the updated conversation history in session state | |
| st.session_state.conversation_history = conversation_history | |
| # Display the conversation history | |
| display_conversation(conversation_history) | |
| def display_conversation(conversation_history): | |
| st.markdown("<style>.message-container { display: flex; flex-direction: row; padding: 16px; }</style>", unsafe_allow_html=True) | |
| st.markdown("<style>.user-message { align: left; background-color: green; padding: 8px; border-radius: 8px; margin: 4px; }</style>", unsafe_allow_html=True) | |
| st.markdown("<style>.ai-message { align: right; background-color: black; padding: 8px; border-radius: 8px; margin: 4px; }</style>", unsafe_allow_html=True) | |
| st.markdown("<div class='message-container'>", unsafe_allow_html=True) | |
| for author, message in conversation_history: | |
| if author == "AI": | |
| st.markdown(f"<div class='ai-message'>{message}</div>", unsafe_allow_html=True) | |
| else: | |
| st.markdown(f"<div class='user-message'>{message}</div>", unsafe_allow_html=True) | |
| st.markdown("</div>", unsafe_allow_html=True) | |
| if __name__ == "__main__": | |
| main() | |