import gradio as gr from huggingface_hub import HfApi from loguru import logger from display.formatting import styled_error from submission.submit import submit_hf_pipeline_agent, validate_model_name def error_html(msg: str, model_id) -> str: error_msg = msg.format( model_id=f'{model_id}' ) return ( f"
" f"{error_msg}" f"
" ) def verify_pipeline(model_id: str) -> tuple[bool, str]: """ Check if model_id is a public, non-gated model repo on Hugging Face Hub. Returns (success: bool, error_message: str) """ try: api = HfApi() # Get model info info = api.model_info(model_id) # Check if model is public and not gated is_public = info.private is False is_gated = getattr(info, "gated", False) if not is_public or is_gated: if not is_public: return False, error_html("Model {model_id} is private. Please make your model public.", model_id) if is_gated: return False, error_html("Model {model_id} is gated. Please use a non-gated model.", model_id) return True, "" except Exception as e: logger.exception(e) return False, error_html( "Could not verify model {model_id}. Please check if the model is public and not gated.", model_id ) def attempt_submission(model_id: str, description: str, competition_type: str, profile: gr.OAuthProfile | None): if profile is None: return styled_error("Authentication required. Please log in first to submit your model.") if "/" not in model_id: full_model_id = f"{profile.username}/{model_id}" elif model_id.split("/")[0] != profile.username: return error_html(f"Model {{model_id}} is not owned by you (username: {profile.username}).", model_id) else: full_model_id = model_id model_id = model_id.split("/", 1)[1] valid, msg = validate_model_name(model_id) if not valid: return error_html(msg, model_id) success, msg = verify_pipeline(full_model_id) if not success: return msg try: return submit_hf_pipeline_agent(model_id, description, competition_type, profile) except Exception as e: return styled_error(f"Error: Could not submit model '{model_id}': {e}") def create_model_submission_panel(app: gr.Blocks, competition_type: str): with gr.Column(): model_name_input = gr.Textbox( label="Hugging Face Model ID", placeholder=" OR ", ) description_input = gr.Textbox( label="Pipeline Description", placeholder="e.g. My QBT model is a simple model that uses a pipeline to predict the answer to a question.", ) submit_btn = gr.Button("📤 Submit", interactive=False) submit_status = gr.HTML(label="Submission Status", visible=False) def check_user_login(profile: gr.OAuthProfile | None): if profile is not None: return gr.update(interactive=True, value="📤 Submit") return gr.update(interactive=False, value="🔒 Login to submit") gr.on(triggers=app.load, fn=check_user_login, inputs=[], outputs=[submit_btn]) submit_btn.click( attempt_submission, inputs=[model_name_input, description_input, gr.State(competition_type)], outputs=[submit_status], concurrency_limit=1, ) return model_name_input, description_input, submit_btn, submit_status def create_hf_pipeline_submission_interface(demo: gr.Blocks): gr.Markdown( """ # Submit Your Hugging Face Pipeline Model Welcome to the Hugging Face pipeline submission interface for the QANTA 2025 competition! This page allows you to submit your models for both Tossup and Bonus tasks. **General Requirements:** - Your model must be a public, non-gated repository on the Hugging Face Hub. - Ensure your model can be loaded using the `pipeline()` function from the `transformers` library. - Adhere to the specified input/output formats for each task. For help getting started, check out our [Starter Code](https://github.com/qanta-challenge/qanta25-starter). You can also refer to Hugging Face's [custom pipeline creation guide](https://huggingface.co/docs/transformers/en/add_new_pipeline) for more information on how to create a custom pipeline. Select the appropriate tab below based on the type of question your model is designed for. """ ) with gr.Row(): with gr.Column(): gr.Markdown( """ ## 🛎️ QuizBowl Tossup – Submit your model Tossup questions are individual questions progressively revealed where you need to provide an answer, a confidence score, and decide whether to buzz. **Pipeline Loading:** Your model repository **must** be loadable with: ```python from transformers import pipeline model = pipeline(task="quizbowl-tossup", model="") ``` **Input:** The pipeline will receive a dictionary with the key `question_text` (string) which contains the progressively revealed question so far. ```python { "question_text": "In 1900, this city hosted a world's fair that introduced the public to the first escalator. Its famous tower, designed by Gustave Eiffel, was initially criticized by artists but is now a global icon. Name this European capital." } ``` **Output:** Similar to our agents, the pipeline **must** return a dictionary with the following keys: ```python { "answer": , "confidence": , "buzz": } ``` Enter your Hugging Face model repository ID (``) and a brief description below, then click "Submit". """ ) create_model_submission_panel(demo, "tossup") with gr.Column(): gr.Markdown( """ ## 🧐 QuizBowl Bonus – Submit your model Bonus questions consist of a lead-in paragraph followed by multiple parts. Your model will be called for each part. **Pipeline Loading:** Your model repository **must** be loadable with: ```python from transformers import pipeline model = pipeline(task="quizbowl-bonus", model="") ``` **Input:** The pipeline will receive a dictionary with two keys: - `leadin` (str): The introductory paragraph for the bonus question. - `part` (str): The specific part of the bonus question to answer. ```python { "leadin": "This author wrote about a young wizard attending a magical school.", "part": "For 10 points, name this author." } ``` **Output:** Similar to our agents, the pipeline **must** return a dictionary with the following keys for each part: ```python { "answer": , "confidence": , "explanation": } ``` Enter your Hugging Face model repository ID (``) and a brief description below, then click "Submit". """ ) create_model_submission_panel(demo, "bonus")