Spaces:
Sleeping
Sleeping
import gradio as gr | |
import os | |
from evaluation_logic import run_evaluation | |
from eval.predict import PROMPT_FORMATTERS | |
PROMPT_TEMPLATES = { | |
"duckdbinstgraniteshort": PROMPT_FORMATTERS["duckdbinstgraniteshort"]().PROMPT_TEMPLATE, | |
"duckdbinstgranitebench": PROMPT_FORMATTERS["duckdbinstgranitebench"]().PROMPT_TEMPLATE, | |
"duckdbinst": PROMPT_FORMATTERS["duckdbinst"]().PROMPT_TEMPLATE, | |
} | |
def gradio_run_evaluation(inference_api, model_name, prompt_format, openrouter_token=None, custom_prompt=None): | |
# Set environment variable if OpenRouter token is provided | |
if inference_api == "openrouter": | |
os.environ["OPENROUTER_API_KEY"] = str(openrouter_token) | |
output = [] | |
for result in run_evaluation(inference_api, str(model_name).strip(), prompt_format, custom_prompt): | |
output.append(result) | |
yield "\n".join(output) | |
def update_token_visibility(api): | |
"""Update visibility of the OpenRouter token input""" | |
return gr.update(visible=api == "openrouter") | |
def on_prompt_format_change(choice): | |
"""Only update content when selecting a preset""" | |
if choice in PROMPT_TEMPLATES: | |
return PROMPT_TEMPLATES[choice] | |
return gr.update() | |
with gr.Blocks(gr.themes.Soft()) as demo: | |
gr.Markdown("# DuckDB SQL Evaluation App") | |
with gr.Row(): | |
with gr.Column(): | |
inference_api = gr.Dropdown( | |
label="Inference API", | |
choices=['openrouter', 'inference_api'], | |
value="openrouter" | |
) | |
openrouter_token = gr.Textbox( | |
label="OpenRouter API Token", | |
placeholder="Enter your OpenRouter API token", | |
type="password", | |
visible=True | |
) | |
model_name = gr.Textbox( | |
label="Model Name (e.g., qwen/qwen-2.5-72b-instruct)" | |
) | |
gr.Markdown("[View OpenRouter Models](https://openrouter.ai/models?order=top-weekly)") | |
with gr.Row(): | |
with gr.Column(): | |
prompt_format = gr.Dropdown( | |
label="Prompt Format", | |
choices=['duckdbinst', 'duckdbinstgraniteshort', 'duckdbinstgranitebench', 'custom'], | |
value="duckdbinstgraniteshort" | |
) | |
custom_prompt = gr.TextArea( | |
label="Prompt Template Content", | |
placeholder="Enter your custom prompt template here or select a preset format above.", | |
lines=10, | |
value=PROMPT_TEMPLATES['duckdbinstgraniteshort'] | |
) | |
gr.Examples( | |
examples=[ | |
["openrouter", "qwen/qwen-2.5-72b-instruct", "duckdbinst"], | |
["openrouter", "meta-llama/llama-3.2-3b-instruct:free", "duckdbinstgraniteshort"], | |
["openrouter", "mistralai/mistral-nemo", "duckdbinst"], | |
], | |
inputs=[inference_api, model_name, prompt_format], | |
) | |
start_btn = gr.Button("Start Evaluation") | |
output = gr.Textbox(label="Output", lines=20) | |
# Event handlers | |
inference_api.change( | |
fn=update_token_visibility, | |
inputs=[inference_api], | |
outputs=[openrouter_token] | |
) | |
prompt_format.change( | |
fn=on_prompt_format_change, | |
inputs=[prompt_format], | |
outputs=[custom_prompt] | |
) | |
custom_prompt.input( | |
fn=lambda: "custom", | |
outputs=prompt_format | |
) | |
start_btn.click( | |
fn=gradio_run_evaluation, | |
inputs=[inference_api, model_name, prompt_format, openrouter_token, custom_prompt], | |
outputs=output | |
) | |
demo.queue().launch() |