Spaces:
Sleeping
Sleeping
File size: 1,979 Bytes
5dae27e 992a491 5dae27e 01015c9 5dae27e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
SYSTEM_PROMPT = "Your bot's primary function is to have meaningful conversations about scooped bagels. Your prompts should be engaging, and should help users explore their thoughts and feelings about this delicious breakfast food."
TITLE = "[Tulu](https://huggingface.co/allenai/tulu-2-dpo-13b) Bagel Buddy"
EXAMPLE_INPUT = "How do you like your scooped bagels topped?"
import gradio as gr
from gradio_client import Client
import os
import requests
tulu = "https://tonic1-tulu.hf.space/--replicas/5cpc5/"
def predict_beta(message, chatbot=[], system_prompt=""):
client = Client(tulu)
try:
max_new_tokens = 800
temperature = 0.4
top_p = 0.9
repetition_penalty = 0.9
advanced = True
# Making the prediction
result = client.predict(
message,
system_prompt,
max_new_tokens,
temperature,
top_p,
repetition_penalty,
advanced,
fn_index=0
)
print("Raw API Response:", result) # Debugging print
if result is not None:
print("Processed bot_message:", result) # Debugging print
return result
else:
print("No response or empty response from the model.") # Debugging print
return None
except Exception as e:
error_msg = f"An error occurred: {str(e)}"
print(error_msg) # Debugging print
return None
def test_preview_chatbot(message, history):
response = predict_beta(message, history, SYSTEM_PROMPT)
return response
welcome_preview_message = f"""
Welcome to **{TITLE}**! Say something like:
''{EXAMPLE_INPUT}''
"""
chatbot_preview = gr.Chatbot(layout="panel", value=[(None, welcome_preview_message)])
textbox_preview = gr.Textbox(scale=7, container=False, value=EXAMPLE_INPUT)
demo = gr.ChatInterface(test_preview_chatbot, chatbot=chatbot_preview, textbox=textbox_preview)
demo.launch() |