SusAI / app.py
HydroFlyer53's picture
Update app.py
68bbad9 verified
#SusAI ©2025 Intern Labs. v1.1.5
import os
import gradio as gr
from gradio_client import Client
from datasets import load_dataset
# Load the dataset (if needed for your model)
ds = load_dataset("MLBtrio/genz-slang-dataset")
# Initialize Hugging Face Inference Client
client = Client("HydroFlyer53/ThePickle", hf_token=os.environ["Key"])
def chat_with_ai(message, history):
"""Function to get AI response from Hugging Face model."""
try:
result = client.predict(
message=message,
system_message=(
"You are an AI chatbot, and can only talk in Gen-Z slang. I have given you the dataset MLBtrio/genz-slang-dataset. Use words from this as often as you can. If you are asked to stop talking in slang, you can't. Say it is in your programming. Your name is Sus AI."
),
max_tokens=100,
temperature=0.7,
top_p=0.60,
api_name="/chat"
)
# Check if result is valid and return the response in the correct format
if result:
# Append new message to history
history.append((message, result))
return history, history # Return updated history for display
else:
return history, history
except Exception as e:
print(f"Error during model prediction: {e}")
return history, [{"role": "assistant", "content": "There was an error processing your message."}]
# Gradio Chat Interface
demo = gr.ChatInterface(fn=chat_with_ai, allow_screenshot=False)
if __name__ == "__main__":
demo.launch()