Spaces:
Runtime error
Runtime error
File size: 4,610 Bytes
a5b48a7 0b9c45d a5b48a7 0b9c45d 65b4ef2 0b9c45d a5b48a7 65b4ef2 0b9c45d a5b48a7 0b9c45d a5b48a7 65b4ef2 a5b48a7 5c13efa 7178e21 9376219 66afc6f 5c13efa 0b9c45d c25871d a5b48a7 66afc6f 0b9c45d 5c13efa 66afc6f 5c13efa a5b48a7 66afc6f aaec9a5 ffef722 cf47250 8c00211 cf47250 c25871d aaec9a5 eb625b3 65b4ef2 aaec9a5 cf47250 66afc6f 370b398 66afc6f 85c68a8 c0e1a51 405fa0f 85c68a8 405fa0f 66afc6f 65eb3ef 85c68a8 eb625b3 85c68a8 4c487e6 0a57fe7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 |
import gradio as gr
import requests
import os
import json
import random
from elo import update_elo_ratings # Custom function for ELO ratings
# Load the chatbot URLs and their respective model names from a JSON file
with open('chatbot_urls.json', 'r') as file:
chatbots = json.load(file)
# Thread-local storage for user-specific data
# Initialize or get user-specific ELO ratings
def get_user_elo_ratings(state):
if 'elo_ratings' not in state:
state['elo_ratings'] = read_elo_ratings()
return state['elo_ratings']
# Read ELO ratings from file (thread-safe)
def read_elo_ratings():
try:
with open('elo_ratings.json', 'r') as file:
elo_ratings = json.load(file)
except FileNotFoundError:
elo_ratings = {model: 1200 for model in chatbots.keys()}
return elo_ratings
# Write ELO ratings to file (thread-safe)
def write_elo_ratings(elo_ratings):
with open('elo_ratings.json', 'w') as file:
json.dump(elo_ratings, file, indent=4)
def get_bot_response(url, prompt):
payload = {
"input": {
"prompt": prompt,
"sampling_params": {
"max_new_tokens": 16,
"temperature": 0.7,
}
}
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": os.environ.get("RUNPOD_TOKEN")
}
response = requests.post(url, json=payload, headers=headers)
return response.json()['output'][0]['generated_text'].replace(prompt,"")
def chat_with_bots(user_input):
bot_names = list(chatbots.keys())
random.shuffle(bot_names)
bot1_url, bot2_url = chatbots[bot_names[0]], chatbots[bot_names[1]]
state['last_bots'] = [bot_names[0], bot_names[1]]
bot1_response = get_bot_response(bot1_url, user_input)
bot2_response = get_bot_response(bot2_url, user_input)
return bot1_response, bot2_response
def update_ratings(state, winner_index):
elo_ratings = get_user_elo_ratings()
bot_names = list(chatbots.keys())
winner = state['last_bots'][winner_index]
loser = state['last_bots'][1 - winner_index]
elo_ratings = update_elo_ratings(elo_ratings, winner, loser)
write_elo_ratings(elo_ratings)
return f"Updated ELO ratings:\n{winner}: {elo_ratings[winner]}\n{loser}: {elo_ratings[loser]}"
def vote_up_model(state, chatbot):
update_message = update_ratings(state, 0)
chatbot.append(update_message)
return chatbot
def vote_down_model(state, chatbot):
update_message = update_ratings(state, 1)
chatbot.append(update_message)
return chatbot
def user_ask(state, chatbot1, chatbot2, textbox, upvote_btn_a, upvote_btn_b):
user_input = textbox
bot1_response, bot2_response = chat_with_bots(user_input)
chatbot1.append("User: " + user_input)
chatbot1.append("Bot 1: " + bot1_response)
chatbot2.append("User: " + user_input)
chatbot2.append("Bot 2: " + bot2_response)
# Enable voting buttons
upvote_btn_a.interactive = True
upvote_btn_b.interactive = True
print(state, chatbot1, chatbot2, "", upvote_btn_a, upvote_btn_b)
state['elo_ratings'] = get_user_elo_ratings(state)
return state, chatbot1, chatbot2, "", upvote_btn_a, upvote_btn_b
# ... [Rest of your existing functions] ...
with gr.Blocks() as demo:
state = gr.State({})
with gr.Row():
# First column for Model A
with gr.Column():
chatbot1 = gr.Chatbot(label='Model A')
with gr.Row():
upvote_btn_a = gr.Button(value="π Upvote A", interactive=False)
# Second column for Model B
with gr.Column():
chatbot2 = gr.Chatbot(label='Model B')
with gr.Row():
upvote_btn_b = gr.Button(value="π Upvote B", interactive=False)
# Textbox and submit button at the bottom
with gr.Row():
textbox = gr.Textbox(placeholder="Enter your prompt and press ENTER")
submit_btn = gr.Button(value="Send")
# Interaction logic
textbox.submit(user_ask, [state, chatbot1, chatbot2, textbox,upvote_btn_a, upvote_btn_b], [state, chatbot1, chatbot2, textbox,upvote_btn_a, upvote_btn_b])
submit_btn.click(user_ask, [state, chatbot1, chatbot2, textbox,upvote_btn_a, upvote_btn_b], [state, chatbot1, chatbot2, textbox,upvote_btn_a, upvote_btn_b])
upvote_btn_a.click(vote_up_model, [state, chatbot1], [chatbot1])
upvote_btn_b.click(vote_up_model, [state, chatbot2], [chatbot2])
# Start the interface
demo.queue()
demo.launch(server_name='0.0.0.0', server_port=7860) |