Spaces:
Sleeping
Sleeping
import nltk | |
import numpy as np | |
import random | |
import json | |
import pickle | |
import gradio as gr | |
import requests | |
import folium | |
from nltk.tokenize import word_tokenize | |
from nltk.stem.lancaster import LancasterStemmer | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline | |
import tensorflow as tf | |
import tflearn | |
import torch | |
import pandas as pd | |
import time | |
from bs4 import BeautifulSoup | |
import re # Added for regex operations | |
import os | |
# Google Places API endpoint | |
url = "https://maps.googleapis.com/maps/api/place/textsearch/json" | |
places_details_url = "https://maps.googleapis.com/maps/api/place/details/json" | |
# Initialize necessary libraries for chatbot and NLP | |
nltk.download('punkt') | |
stemmer = LancasterStemmer() | |
# Load the chatbot intents file | |
with open("intents.json") as file: | |
data = json.load(file) | |
# Load preprocessed data from pickle | |
with open("data.pickle", "rb") as f: | |
words, labels, training, output = pickle.load(f) | |
# Build the chatbot model | |
net = tflearn.input_data(shape=[None, len(training[0])]) | |
net = tflearn.fully_connected(net, 8) | |
net = tflearn.fully_connected(net, 8) | |
net = tflearn.fully_connected(net, len(output[0]), activation="softmax") | |
net = tflearn.regression(net) | |
model = tflearn.DNN(net) | |
model.load("MentalHealthChatBotmodel.tflearn") | |
# Emotion and sentiment analysis model | |
def load_model(): | |
tokenizer = AutoTokenizer.from_pretrained("j-hartmann/emotion-english-distilroberta-base") | |
model = AutoModelForSequenceClassification.from_pretrained("j-hartmann/emotion-english-distilroberta-base") | |
return tokenizer, model | |
tokenizer, emotion_model = load_model() | |
# Google Places API query function | |
def get_places_data(query, location, radius=5000, api_key="GOOGLE_API_KEY"): | |
params = { | |
"query": query, | |
"location": location, | |
"radius": radius, | |
"key": api_key | |
} | |
response = requests.get(url, params=params) | |
if response.status_code == 200: | |
data = response.json() | |
return data.get('results', []) | |
else: | |
return [] | |
# Map generation function | |
def create_map(locations): | |
m = folium.Map(location=[21.3, -157.8], zoom_start=12) | |
for loc in locations: | |
name = loc.get("name", "No Name") | |
lat = loc['geometry']['location']['lat'] | |
lng = loc['geometry']['location']['lng'] | |
folium.Marker([lat, lng], popup=name).add_to(m) | |
return m._repr_html_() # Return HTML representation | |
# Sentiment Analysis function | |
def analyze_sentiment(user_input): | |
tokenizer = AutoTokenizer.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment") | |
model = AutoModelForSequenceClassification.from_pretrained("cardiffnlp/twitter-roberta-base-sentiment") | |
inputs = tokenizer(user_input, return_tensors="pt") | |
with torch.no_grad(): | |
outputs = model(**inputs) | |
predicted_class = torch.argmax(outputs.logits, dim=1).item() | |
sentiment = ["Negative", "Neutral", "Positive"][predicted_class] | |
return sentiment | |
# Chatbot function for user interaction | |
def bag_of_words(s, words): | |
bag = [0 for _ in range(len(words))] | |
s_words = word_tokenize(s) | |
s_words = [stemmer.stem(word.lower()) for word in s_words if word.lower() in words] | |
for se in s_words: | |
for i, w in enumerate(words): | |
if w == se: | |
bag[i] = 1 | |
return np.array(bag) | |
def chatbot(message, history): | |
history = history or [] | |
message = message.lower() | |
try: | |
results = model.predict([bag_of_words(message, words)]) | |
results_index = np.argmax(results) | |
tag = labels[results_index] | |
for tg in data["intents"]: | |
if tg['tag'] == tag: | |
responses = tg['responses'] | |
response = random.choice(responses) | |
break | |
else: | |
response = "I'm sorry, I didn't understand that. Could you please rephrase?" | |
except Exception as e: | |
response = f"An error occurred: {str(e)}" | |
history.append((message, response)) | |
return history, history | |
# Emotion Detection function | |
def detect_emotion(user_input): | |
pipe = pipeline("text-classification", model=emotion_model, tokenizer=tokenizer) | |
result = pipe(user_input) | |
emotion = result[0]['label'] | |
return emotion | |
# Scraping the website to extract phone number or email | |
def scrape_website_for_contact_info(website): | |
phone_number = "Not available" | |
email = "Not available" | |
try: | |
response = requests.get(website, timeout=5) | |
soup = BeautifulSoup(response.content, 'html.parser') | |
phone_match = re.search(r'$$?\+?[0-9]*$$?[0-9_\- $$$$]*', soup.get_text()) | |
if phone_match: | |
phone_number = phone_match.group() | |
email_match = re.search(r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}', soup.get_text()) | |
if email_match: | |
email = email_match.group() | |
except Exception as e: | |
print(f"Error scraping website {website}: {e}") | |
return phone_number, email | |
# Main Gradio interface for emotion detection and chatbot | |
def emotion_and_chatbot(user_input, history, query, location): | |
# Emotion Detection | |
emotion = detect_emotion(user_input) | |
sentiment = analyze_sentiment(user_input) | |
emotion_response = f"Emotion Detected: {emotion}. Sentiment: {sentiment}" | |
# Search Places (for wellness or other queries) | |
places_data = get_places_data(query, location) | |
places_map = create_map(places_data) if places_data else "No places found." | |
# Chatbot response | |
history, _ = chatbot(user_input, history) | |
return emotion_response, places_map, history, history | |
# Gradio interface setup | |
iface = gr.Interface( | |
fn=emotion_and_chatbot, | |
inputs=[ | |
gr.Textbox(label="Enter your message", placeholder="How are you feeling?"), | |
"state", # Chat history | |
gr.Textbox(label="Search Query (e.g. wellness)", placeholder="e.g. therapist"), | |
gr.Textbox(label="Location (latitude,longitude)", placeholder="e.g. 21.3,-157.8") | |
], | |
outputs=[ | |
gr.Textbox(label="Emotion and Sentiment"), | |
gr.HTML(label="Places Map"), | |
gr.Chatbot(label="Chatbot History"), | |
"state" | |
], | |
title="Wellbeing Chatbot with Emotion Detection & Location Search", | |
description="A chatbot that provides mental health support, analyzes emotions, and helps find wellness professionals near you." | |
) | |
# Launch Gradio app | |
if __name__ == "__main__": | |
iface.launch(debug=True) |