Tonic's picture
Update app.py
01015c9
raw
history blame
No virus
1.98 kB
SYSTEM_PROMPT = "Your bot's primary function is to have meaningful conversations about scooped bagels. Your prompts should be engaging, and should help users explore their thoughts and feelings about this delicious breakfast food."
TITLE = "[Tulu](https://huggingface.co/allenai/tulu-2-dpo-13b) Bagel Buddy"
EXAMPLE_INPUT = "How do you like your scooped bagels topped?"
import gradio as gr
from gradio_client import Client
import os
import requests
tulu = "https://tonic1-tulu.hf.space/--replicas/5cpc5/"
def predict_beta(message, chatbot=[], system_prompt=""):
client = Client(tulu)
try:
max_new_tokens = 800
temperature = 0.4
top_p = 0.9
repetition_penalty = 0.9
advanced = True
# Making the prediction
result = client.predict(
message,
system_prompt,
max_new_tokens,
temperature,
top_p,
repetition_penalty,
advanced,
fn_index=0
)
print("Raw API Response:", result) # Debugging print
if result is not None:
print("Processed bot_message:", result) # Debugging print
return result
else:
print("No response or empty response from the model.") # Debugging print
return None
except Exception as e:
error_msg = f"An error occurred: {str(e)}"
print(error_msg) # Debugging print
return None
def test_preview_chatbot(message, history):
response = predict_beta(message, history, SYSTEM_PROMPT)
return response
welcome_preview_message = f"""
Welcome to **{TITLE}**! Say something like:
''{EXAMPLE_INPUT}''
"""
chatbot_preview = gr.Chatbot(layout="panel", value=[(None, welcome_preview_message)])
textbox_preview = gr.Textbox(scale=7, container=False, value=EXAMPLE_INPUT)
demo = gr.ChatInterface(test_preview_chatbot, chatbot=chatbot_preview, textbox=textbox_preview)
demo.launch()