Spaces:
Running
Running
import streamlit as st | |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline | |
#from transformers import AutoTokenizer | |
#from llama_cpp import Llama | |
from datasets import load_dataset | |
# Replace with the direct image URL | |
flower_image_url = "https://i.postimg.cc/hG2FG85D/2.png" | |
# Inject custom CSS for the background with a centered and blurred image | |
st.markdown( | |
f""" | |
<style> | |
/* Container for background */ | |
html, body {{ | |
margin: 0; | |
padding: 0; | |
overflow: hidden; | |
}} | |
[data-testid="stAppViewContainer"] {{ | |
position: relative; | |
z-index: 1; /* Ensure UI elements are above the background */ | |
}} | |
/* Blurred background image */ | |
.blurred-background {{ | |
position: fixed; | |
top: 0; | |
left: 0; | |
width: 100%; | |
height: 100%; | |
z-index: -1; /* Send background image behind all UI elements */ | |
background-image: url("{flower_image_url}"); | |
background-size: cover; | |
background-position: center; | |
filter: blur(10px); /* Adjust blur ratio here */ | |
opacity: 0.8; /* Optional: Add slight transparency for a subtle effect */ | |
}} | |
</style> | |
""", | |
unsafe_allow_html=True | |
) | |
# Add the blurred background div | |
st.markdown('<div class="blurred-background"></div>', unsafe_allow_html=True) | |
#""""""""""""""""""""""""" Application Code Starts here """"""""""""""""""""""""""""""""""""""""""""" | |
# Hugging Face access token | |
HF_TOKEN = "HF_TOKEN" # Replace with your actual token or set it as an environment variable | |
# Load the text generation pipeline with model and tokenizer | |
def load_text_generation_pipeline(): | |
model_name = "google/gemma-2-9b-it" | |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=HF_TOKEN) | |
model = AutoModelForCausalLM.from_pretrained( | |
model_name, | |
load_in_8bit=True, | |
device_map="auto", | |
use_auth_token=HF_TOKEN | |
) | |
return pipeline("text-generation", model=model, tokenizer=tokenizer) | |
text_generator = load_text_generation_pipeline() | |
# Load the counseling dataset | |
def load_counseling_dataset(): | |
return load_dataset("Amod/mental_health_counseling_conversations") | |
dataset = load_counseling_dataset() | |
# Streamlit App | |
st.title("Mental Health Counseling Chat") | |
st.markdown(""" | |
Welcome to the **Mental Health Counseling Chat Application**. | |
This platform is designed to provide **supportive, positive, and encouraging responses** using a fast and efficient language model. | |
""") | |
# Display example dataset entries | |
if st.checkbox("Show Example Questions and Answers from Dataset"): | |
sample = dataset["train"].shuffle(seed=42).select(range(3)) # Display 3 random samples | |
for example in sample: | |
st.markdown(f"**Question:** {example.get('context', 'N/A')}") | |
st.markdown(f"**Answer:** {example.get('response', 'N/A')}") | |
st.markdown("---") | |
# User input for mental health concerns | |
user_input = st.text_area("Your question or concern:", placeholder="Type your question here...") | |
if st.button("Get Supportive Response"): | |
if user_input.strip(): | |
try: | |
# Generate response using the text generation pipeline | |
prompt = f"User: {user_input}\nCounselor:" | |
response = text_generator(prompt, max_length=100, num_return_sequences=1) | |
# Extract and display the response | |
counselor_reply = response[0]["generated_text"].strip() | |
st.subheader("Counselor's Response:") | |
st.write(counselor_reply) | |
except Exception as e: | |
st.error(f"An error occurred while generating the response: {e}") | |
else: | |
st.error("Please enter a question or concern to receive a response.") | |
# Sidebar resources | |
st.sidebar.header("Additional Mental Health Resources") | |
st.sidebar.markdown(""" | |
- [Mental Health Foundation](https://www.mentalhealth.org) | |
- [Mind](https://www.mind.org.uk) | |
- [National Suicide Prevention Lifeline](https://suicidepreventionlifeline.org) | |
""") | |
st.sidebar.info("This application is not a replacement for professional counseling. If you are in crisis, please seek professional help immediately.") | |