Update ui_components.py
Browse files- ui_components.py +68 -6
ui_components.py
CHANGED
|
@@ -112,7 +112,41 @@ class UIComponents:
|
|
| 112 |
#
|
| 113 |
# On Spaces, Gradio auto-configures OAuth to the Space subdomain, so we intentionally do NOT pass redirect_url.
|
| 114 |
# If running locally, OAuth via HF won't work; use HF_TOKEN instead.
|
| 115 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 116 |
|
| 117 |
def _create_provider_model_selection(self):
|
| 118 |
"""Create provider and model selection dropdowns with defaults"""
|
|
@@ -253,6 +287,24 @@ class UIComponents:
|
|
| 253 |
logger.info("✅ HuggingFace Inference client updated with OAuth token")
|
| 254 |
except Exception as e:
|
| 255 |
logger.error(f"❌ Failed to update HF client: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 256 |
|
| 257 |
# Provider selection with auto-model loading
|
| 258 |
def handle_provider_change(provider_id):
|
|
@@ -418,11 +470,21 @@ class UIComponents:
|
|
| 418 |
label=f"Active Servers ({len(server_choices)} loaded)"
|
| 419 |
)
|
| 420 |
|
| 421 |
-
# Connect OAuth
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 426 |
|
| 427 |
# Connect provider/model dropdowns with auto-selection on load
|
| 428 |
demo.load(
|
|
|
|
| 112 |
#
|
| 113 |
# On Spaces, Gradio auto-configures OAuth to the Space subdomain, so we intentionally do NOT pass redirect_url.
|
| 114 |
# If running locally, OAuth via HF won't work; use HF_TOKEN instead.
|
| 115 |
+
# Default: let Gradio handle redirect on Spaces
|
| 116 |
+
login_kwargs: Dict[str, Any] = {"value": "Sign in for Inference"}
|
| 117 |
+
|
| 118 |
+
# Opt-in: allow forcing a normalized redirect if needed
|
| 119 |
+
if os.getenv("FORCE_HF_REDIRECT", "0") == "1":
|
| 120 |
+
raw_host = os.getenv("SPACE_HOST", "").strip()
|
| 121 |
+
if raw_host:
|
| 122 |
+
# Normalize host (strip scheme and trailing slashes)
|
| 123 |
+
norm_host = raw_host
|
| 124 |
+
for prefix in ("https://", "http://"):
|
| 125 |
+
if norm_host.startswith(prefix):
|
| 126 |
+
norm_host = norm_host[len(prefix):]
|
| 127 |
+
norm_host = norm_host.rstrip("/")
|
| 128 |
+
if norm_host:
|
| 129 |
+
login_kwargs["redirect_url"] = f"https://{norm_host}/login/callback"
|
| 130 |
+
|
| 131 |
+
self.login_button = gr.LoginButton(**login_kwargs)
|
| 132 |
+
|
| 133 |
+
# Manual token fallback for cases where OAuth redirect is blocked/mismatched
|
| 134 |
+
with gr.Row():
|
| 135 |
+
self.token_input = gr.Textbox(
|
| 136 |
+
label="Or paste an HF access token",
|
| 137 |
+
placeholder="hf_...",
|
| 138 |
+
type="password",
|
| 139 |
+
scale=3
|
| 140 |
+
)
|
| 141 |
+
self.use_token_btn = gr.Button("Use token", variant="secondary")
|
| 142 |
+
|
| 143 |
+
# Status/debug info
|
| 144 |
+
server_seen_host = os.getenv("SPACE_HOST") or "(not on Spaces)"
|
| 145 |
+
self.login_status = gr.Markdown(
|
| 146 |
+
f"Server detected host: `{server_seen_host}`\n\nIf OAuth fails, paste a token from https://huggingface.co/settings/tokens.",
|
| 147 |
+
visible=True,
|
| 148 |
+
container=True
|
| 149 |
+
)
|
| 150 |
|
| 151 |
def _create_provider_model_selection(self):
|
| 152 |
"""Create provider and model selection dropdowns with defaults"""
|
|
|
|
| 287 |
logger.info("✅ HuggingFace Inference client updated with OAuth token")
|
| 288 |
except Exception as e:
|
| 289 |
logger.error(f"❌ Failed to update HF client: {e}")
|
| 290 |
+
|
| 291 |
+
# Update UI status optimistically
|
| 292 |
+
return gr.Markdown("✅ Logged in via Hugging Face OAuth", visible=True)
|
| 293 |
+
|
| 294 |
+
def handle_manual_token(token_text: str):
|
| 295 |
+
token_text = (token_text or "").strip()
|
| 296 |
+
if not token_text:
|
| 297 |
+
return gr.Markdown("❌ Please paste a valid HF token", visible=True)
|
| 298 |
+
os.environ["HF_TOKEN"] = token_text
|
| 299 |
+
try:
|
| 300 |
+
self.mcp_client.hf_client = OpenAI(
|
| 301 |
+
base_url="https://router.huggingface.co/v1",
|
| 302 |
+
api_key=token_text
|
| 303 |
+
)
|
| 304 |
+
return gr.Markdown("✅ Token set. Inference provider ready.", visible=True)
|
| 305 |
+
except Exception as e:
|
| 306 |
+
logger.error(f"Failed to set HF client with manual token: {e}")
|
| 307 |
+
return gr.Markdown(f"❌ Failed to initialize client: {e}", visible=True)
|
| 308 |
|
| 309 |
# Provider selection with auto-model loading
|
| 310 |
def handle_provider_change(provider_id):
|
|
|
|
| 470 |
label=f"Active Servers ({len(server_choices)} loaded)"
|
| 471 |
)
|
| 472 |
|
| 473 |
+
# Connect OAuth button explicitly and provide manual token fallback
|
| 474 |
+
if hasattr(self, "login_button"):
|
| 475 |
+
try:
|
| 476 |
+
self.login_button.login(
|
| 477 |
+
fn=handle_oauth_profile,
|
| 478 |
+
outputs=[self.login_status]
|
| 479 |
+
)
|
| 480 |
+
except Exception as e:
|
| 481 |
+
logger.warning(f"LoginButton.login hookup failed: {e}")
|
| 482 |
+
if hasattr(self, "use_token_btn"):
|
| 483 |
+
self.use_token_btn.click(
|
| 484 |
+
fn=handle_manual_token,
|
| 485 |
+
inputs=[self.token_input],
|
| 486 |
+
outputs=[self.login_status]
|
| 487 |
+
)
|
| 488 |
|
| 489 |
# Connect provider/model dropdowns with auto-selection on load
|
| 490 |
demo.load(
|