Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from transformers import pipeline | |
| # Initialize the text generation pipeline with the conversational model | |
| pipe = pipeline("text2text-generation", model="facebook/blenderbot-400M-distill") | |
| # Initialize conversation history | |
| if 'conversation_history' not in st.session_state: | |
| st.session_state.conversation_history = "" | |
| def converse(user_message): | |
| # Access and update conversation history | |
| st.session_state.conversation_history += f"User: {user_message}\n" | |
| # Generate response | |
| response = pipe(st.session_state.conversation_history)[0]['generated_text'] | |
| # Append the model's response to the conversation history | |
| st.session_state.conversation_history += f"Bot: {response}\n" | |
| return response | |
| # Streamlit app | |
| st.title("Conversational AI with Streamlit") | |
| user_message = st.text_input("You:", "") | |
| if st.button("Send"): | |
| if user_message: | |
| bot_response = converse(user_message) | |
| st.text_area("Bot Response", value=bot_response, height=100, max_chars=None, key="response") | |
| else: | |
| st.error("Please enter a message before sending.") | |
| if st.button("Show Conversation History"): | |
| st.text_area("Conversation History", value=st.session_state.conversation_history, height=300, max_chars=None, key="history") | |