Spaces:
Running
Running
File size: 4,473 Bytes
10e4cb6 5878a82 de3564a c207ffc 3a64fb1 be722e2 10e4cb6 34a4f65 a4cce9a 34a4f65 9903fee 72d0e7a 34a4f65 72d0e7a 9903fee 34a4f65 9903fee be8b77d 9903fee 61d9513 72d0e7a 9903fee adf24b1 de3564a 1946379 cca43a4 1946379 ab6d6cd 1946379 3a64fb1 1946379 10e4cb6 de3564a d02fed4 3a64fb1 de3564a 1946379 3a64fb1 1946379 5878a82 87a519d 5878a82 3a64fb1 5878a82 1946379 5878a82 1946379 5878a82 1946379 5878a82 3a64fb1 5878a82 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 |
import streamlit as st
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
#from transformers import AutoTokenizer
#from llama_cpp import Llama
from datasets import load_dataset
import os
import requests
# Replace with the direct image URL
flower_image_url = "https://i.postimg.cc/hG2FG85D/2.png"
# Inject custom CSS for the background with a centered and blurred image
st.markdown(
f"""
<style>
/* Container for background */
html, body {{
margin: 0;
padding: 0;
overflow: hidden;
}}
[data-testid="stAppViewContainer"] {{
position: relative;
z-index: 1; /* Ensure UI elements are above the background */
}}
/* Blurred background image */
.blurred-background {{
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: -1; /* Send background image behind all UI elements */
background-image: url("{flower_image_url}");
background-size: cover;
background-position: center;
filter: blur(10px); /* Adjust blur ratio here */
opacity: 0.8; /* Optional: Add slight transparency for a subtle effect */
}}
</style>
""",
unsafe_allow_html=True
)
# Add the blurred background div
st.markdown('<div class="blurred-background"></div>', unsafe_allow_html=True)
#""""""""""""""""""""""""" Application Code Starts here """""""""""""""""""""""""""""""""""""""""""""
# Groq API Configuration
api_key = os.environ.get("LawersGuideAPIKey") # Ensure GROQ_API_KEY is set in your environment variables
base_url = "https://api.groq.com/openai/v1/models/google/gemma-2-9b-it/completions"
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
# Function to query Groq model
@st.cache_resource
def query_groq_model(prompt, max_tokens=100, temperature=0.7):
try:
payload = {
"prompt": prompt,
"max_tokens": max_tokens,
"temperature": temperature,
"top_p": 1.0,
"frequency_penalty": 0.0,
"presence_penalty": 0.0,
"n": 1
}
response = requests.post(base_url, headers=headers, json=payload)
response.raise_for_status()
result = response.json()
return result["choices"][0]["text"].strip()
except Exception as e:
return f"Error querying the model: {e}"
# Streamlit App
st.title("Mental Health Counseling Chat")
st.markdown("""
Welcome to the **Mental Health Counseling Chat Application**.
This platform is designed to provide **supportive, positive, and encouraging responses** using the Groq `google/gemma-2-9b-it` model.
""")
# Load example dataset for user exploration (optional)
@st.cache_resource
def load_counseling_dataset():
from datasets import load_dataset
return load_dataset("Amod/mental_health_counseling_conversations")
dataset = load_counseling_dataset()
# Display example questions and answers from dataset
if st.checkbox("Show Example Questions and Answers from Dataset"):
sample = dataset["train"].shuffle(seed=42).select(range(3)) # Display 3 random samples
for example in sample:
st.markdown(f"**Question:** {example.get('context', 'N/A')}")
st.markdown(f"**Answer:** {example.get('response', 'N/A')}")
st.markdown("---")
# User input for mental health concerns
user_input = st.text_area("Your question or concern:", placeholder="Type your question here...")
if st.button("Get Supportive Response"):
if user_input.strip():
try:
# Query Groq model
prompt = f"User: {user_input}\nCounselor:"
counselor_reply = query_groq_model(prompt, max_tokens=150, temperature=0.7)
st.subheader("Counselor's Response:")
st.write(counselor_reply)
except Exception as e:
st.error(f"An error occurred while querying the model: {e}")
else:
st.error("Please enter a question or concern to receive a response.")
# Sidebar resources
st.sidebar.header("Additional Mental Health Resources")
st.sidebar.markdown("""
- [Mental Health Foundation](https://www.mentalhealth.org)
- [Mind](https://www.mind.org.uk)
- [National Suicide Prevention Lifeline](https://suicidepreventionlifeline.org)
""")
st.sidebar.info("This application is not a replacement for professional counseling. If you are in crisis, please seek professional help immediately.")
|