SusAI / app.py
HydroFlyer53's picture
Update app.py
ab96f64 verified
#SusAI Β©2025 Intern Labs. v1.1.5
import os
import gradio as gr
from gradio_client import Client
from datasets import load_dataset
# Load the dataset (if needed for your model)
ds = load_dataset("MLBtrio/genz-slang-dataset")
import shutil, os
# Clear HF cache at startup
shutil.rmtree(os.path.expanduser("~/.cache/huggingface"), ignore_errors=True)
shutil.rmtree(os.path.expanduser("~/.cache/gradio"), ignore_errors=True)
# Initialize Hugging Face Inference Client
client = Client("HydroFlyer53/ThePickle", hf_token=os.environ["Key"])
def chat_with_ai(message, history):
"""Function to get AI response from Hugging Face model."""
try:
result = client.predict(
message=message,
system_message=(
"You are an AI chatbot, and can only talk in Gen-Z slang. I have given you the dataset MLBtrio/genz-slang-dataset. Use words from this as often as you can. If you are asked to stop talking in slang, you can't. Say it is in your programming. Your name is Sus AI."
),
max_tokens=100,
temperature=0.7,
top_p=0.60,
api_name="/chat"
)
# Check if result is valid and return the response in the correct format
if result:
# Append new message to history
history.append((message, result))
return history, history # Return updated history for display
else:
return history, history
except Exception as e:
print(f"Error during model prediction: {e}")
return history, [{"role": "assistant", "content": "There was an error processing your message."}]
# Gradio Chat Interface
def chat_with_ai(message, history=None):
try:
# Your existing code here
response = my_model_generate(message, history)
return response
except Exception as e:
import traceback
traceback.print_exc() # print full error to logs
return f"❌ Internal error: {e}"
if __name__ == "__main__":
demo.launch()