File size: 2,823 Bytes
5251091
 
 
 
fe2c3a0
 
 
5251091
 
 
 
 
b2c0bfa
 
5251091
 
 
 
fe2c3a0
9571559
5251091
 
 
 
 
9571559
 
 
5251091
9571559
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5251091
9571559
 
 
 
 
 
 
 
 
 
 
4086994
9571559
 
 
5251091
9571559
4086994
9571559
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import streamlit as st
import os
import google.generativeai as genai

# Set your Google API Key directly or use Hugging Face Spaces' Secrets
genai.configure(api_key=os.getenv("GOOGLE_KEY"))  # Make sure to set this in the Spaces secrets

model = genai.GenerativeModel("gemini-pro")
chat = model.start_chat(history=[])

def get_gemini_response(prompt):
    try:
        chunks = [chunk.text for chunk in chat.send_message(prompt, stream=True)]  # Collect response chunks
        return " ".join(chunks)  # Combine all chunks into a single string
    except Exception as e:
        return f"An error occurred: {str(e)}"

# Streamlit app configuration
st.set_page_config(page_title="Med ChatBot")
st.title("Medical ChatBot")

# Initialize session state for chat history
if "chat_history" not in st.session_state:
    st.session_state["chat_history"] = []

# Function to handle submission (without the button)
def submit_input():
    input_text = st.session_state["input"]
    
    if input_text:
        # Context for the LLM with history included
        chat_history_text = " ".join([f"{role}: {text}" for role, text in st.session_state["chat_history"]])
        
        context = (
            "You are a medical chatbot designed to assist users in understanding their symptoms. "
            "Provide clear, concise, and informative responses based on NHS guidelines. "
            "Avoid technical jargon and code snippets. If asked a question unrelated to medical topics, "
            "respond with: 'I am a medical bot and I don't have that knowledge.' "
            f"Previous conversation: {chat_history_text} "
        )

        prompt = f"{context} User's latest input: {input_text}"  # Include the latest user input
        response = get_gemini_response(prompt)

        # Add user query to session state chat history
        st.session_state['chat_history'].append(("You", input_text))

        # Add bot response to session state chat history
        if isinstance(response, list):
            full_response = " ".join([chunk.text for chunk in response])
            st.session_state['chat_history'].append(("Bot", full_response))
        else:
            st.session_state['chat_history'].append(("Bot", response))

        # Clear input field after submission
        st.session_state["input"] = ""

# Chat input (without button, submit on "Enter")
st.text_input("Enter your message:", key="input", on_change=submit_input)

# Display chat history in a chatbox style
st.subheader("Chat History")
for role, text in st.session_state['chat_history']:
    if role == "You":
        st.markdown(f"<div style='text-align: right; color: red;'>{role}: {text}</div>", unsafe_allow_html=True)
    else:
        st.markdown(f"<div style='text-align: left; color: green;'>{role}: {text}</div>", unsafe_allow_html=True)