import gradio as gr
from transformers import AutoModel
from diffusers import FluxPipeline
import torch
from huggingface_hub.utils import HfHubHTTPError, RepositoryNotFoundError
def check_model_access(model_id: str, oauth_token: gr.OAuthToken | None):
"""
Checks if the logged-in user can load the configuration for a given model on the Hub.
Loading the config proves file-level access to the repository.
Args:
model_id: The ID of the model to check (e.g., "meta-llama/Llama-2-7b-chat-hf").
oauth_token: The user's OAuth token, automatically injected by Gradio.
Returns:
A string with the result of the access check, formatted for Markdown.
"""
# 1. Check if the user is logged in
if oauth_token is None:
return "### Authentication Error 🔴\nPlease log in using the button above to check model access."
# 2. Check if a model ID was provided
if not model_id:
return "### Input Missing 🟡\nPlease enter a model ID to check."
try:
# 3. The core test: try to load the model's config using the user's token.
# This will fail if the user doesn't have access to the gated repo's files.
pipe = FluxPipeline.from_pretrained(pretrained_model_name_or_path=model_id, torch_dtype=torch.bfloat16, token=oauth_token.token)
# 4. If the call succeeds, format a success message
return f"""
### Access Granted ✅
Successfully loaded the configuration for **{model_id}**.
- **Model Type:** `{pipe}`
- **Architecture:**
- token : {oauth_token.token}
"""
except RepositoryNotFoundError:
# 5. Handle the case where the repository does not exist
return f"### Not Found 🔴\nThe repository **{model_id}** does not exist."
except HfHubHTTPError as e:
# 6. Handle HTTP errors, which typically indicate permission issues for gated models
if e.response.status_code in [401, 403]:
return f"""
### Access Denied 🔴
You do not have permission to download files from **{model_id}**.
- Please ensure you have accepted the terms and conditions on the model's page.
- This might be a private model you don't have access to.
- **Status Code:** {e.response.status_code}
"""
else:
return f"### An Error Occurred 🔴\n**Details:** {str(e)}"
# --- Gradio Interface (No changes needed here) ---
with gr.Blocks(css="h1 { text-align: center; }") as demo:
gr.Markdown("# Gated Model Access Tester")
gr.Markdown("Log in with your Hugging Face account and enter a model ID. This will attempt to load the model's `config.json` file to verify access.")
gr.LoginButton()
with gr.Row():
model_id_input = gr.Textbox(
label="Model ID",
placeholder="e.g., meta-llama/Llama-2-7b-chat-hf",
scale=3,
)
check_button = gr.Button("Check Access", variant="primary", scale=1)
result_display = gr.Markdown("### Result will be displayed here.")
check_button.click(
fn=check_model_access,
inputs=[model_id_input],
outputs=[result_display]
)
demo.launch()