Update ui_components.py
Browse files- ui_components.py +8 -73
ui_components.py
CHANGED
|
@@ -106,46 +106,9 @@ class UIComponents:
|
|
| 106 |
def _create_login_section(self):
|
| 107 |
"""Create HuggingFace OAuth login section"""
|
| 108 |
with gr.Group(elem_classes="login-section"):
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
#
|
| 113 |
-
# On Spaces, Gradio auto-configures OAuth to the Space subdomain, so we intentionally do NOT pass redirect_url.
|
| 114 |
-
# If running locally, OAuth via HF won't work; use HF_TOKEN instead.
|
| 115 |
-
# Default: let Gradio handle redirect on Spaces
|
| 116 |
-
login_kwargs: Dict[str, Any] = {"value": "Sign in for Inference"}
|
| 117 |
-
|
| 118 |
-
# Opt-in: allow forcing a normalized redirect if needed
|
| 119 |
-
if os.getenv("FORCE_HF_REDIRECT", "0") == "1":
|
| 120 |
-
raw_host = os.getenv("SPACE_HOST", "").strip()
|
| 121 |
-
if raw_host:
|
| 122 |
-
# Normalize host (strip scheme and trailing slashes)
|
| 123 |
-
norm_host = raw_host
|
| 124 |
-
for prefix in ("https://", "http://"):
|
| 125 |
-
if norm_host.startswith(prefix):
|
| 126 |
-
norm_host = norm_host[len(prefix):]
|
| 127 |
-
norm_host = norm_host.rstrip("/")
|
| 128 |
-
if norm_host:
|
| 129 |
-
login_kwargs["redirect_url"] = f"https://{norm_host}/login/callback"
|
| 130 |
-
|
| 131 |
-
self.login_button = gr.LoginButton(**login_kwargs)
|
| 132 |
-
|
| 133 |
-
# Manual token fallback for cases where OAuth redirect is blocked/mismatched
|
| 134 |
-
with gr.Row():
|
| 135 |
-
self.token_input = gr.Textbox(
|
| 136 |
-
label="Or paste an HF access token",
|
| 137 |
-
placeholder="hf_...",
|
| 138 |
-
type="password",
|
| 139 |
-
scale=3
|
| 140 |
-
)
|
| 141 |
-
self.use_token_btn = gr.Button("Use token", variant="secondary")
|
| 142 |
-
|
| 143 |
-
# Status/debug info
|
| 144 |
-
server_seen_host = os.getenv("SPACE_HOST") or "(not on Spaces)"
|
| 145 |
-
self.login_status = gr.Markdown(
|
| 146 |
-
f"Server detected host: `{server_seen_host}`\n\nIf OAuth fails, paste a token from https://huggingface.co/settings/tokens.",
|
| 147 |
-
visible=True,
|
| 148 |
-
container=True
|
| 149 |
)
|
| 150 |
|
| 151 |
def _create_provider_model_selection(self):
|
|
@@ -287,24 +250,6 @@ class UIComponents:
|
|
| 287 |
logger.info("✅ HuggingFace Inference client updated with OAuth token")
|
| 288 |
except Exception as e:
|
| 289 |
logger.error(f"❌ Failed to update HF client: {e}")
|
| 290 |
-
|
| 291 |
-
# Update UI status optimistically
|
| 292 |
-
return gr.Markdown("✅ Logged in via Hugging Face OAuth", visible=True)
|
| 293 |
-
|
| 294 |
-
def handle_manual_token(token_text: str):
|
| 295 |
-
token_text = (token_text or "").strip()
|
| 296 |
-
if not token_text:
|
| 297 |
-
return gr.Markdown("❌ Please paste a valid HF token", visible=True)
|
| 298 |
-
os.environ["HF_TOKEN"] = token_text
|
| 299 |
-
try:
|
| 300 |
-
self.mcp_client.hf_client = OpenAI(
|
| 301 |
-
base_url="https://router.huggingface.co/v1",
|
| 302 |
-
api_key=token_text
|
| 303 |
-
)
|
| 304 |
-
return gr.Markdown("✅ Token set. Inference provider ready.", visible=True)
|
| 305 |
-
except Exception as e:
|
| 306 |
-
logger.error(f"Failed to set HF client with manual token: {e}")
|
| 307 |
-
return gr.Markdown(f"❌ Failed to initialize client: {e}", visible=True)
|
| 308 |
|
| 309 |
# Provider selection with auto-model loading
|
| 310 |
def handle_provider_change(provider_id):
|
|
@@ -470,21 +415,11 @@ class UIComponents:
|
|
| 470 |
label=f"Active Servers ({len(server_choices)} loaded)"
|
| 471 |
)
|
| 472 |
|
| 473 |
-
# Connect OAuth
|
| 474 |
-
|
| 475 |
-
|
| 476 |
-
|
| 477 |
-
|
| 478 |
-
outputs=[self.login_status]
|
| 479 |
-
)
|
| 480 |
-
except Exception as e:
|
| 481 |
-
logger.warning(f"LoginButton.login hookup failed: {e}")
|
| 482 |
-
if hasattr(self, "use_token_btn"):
|
| 483 |
-
self.use_token_btn.click(
|
| 484 |
-
fn=handle_manual_token,
|
| 485 |
-
inputs=[self.token_input],
|
| 486 |
-
outputs=[self.login_status]
|
| 487 |
-
)
|
| 488 |
|
| 489 |
# Connect provider/model dropdowns with auto-selection on load
|
| 490 |
demo.load(
|
|
|
|
| 106 |
def _create_login_section(self):
|
| 107 |
"""Create HuggingFace OAuth login section"""
|
| 108 |
with gr.Group(elem_classes="login-section"):
|
| 109 |
+
self.login_button = gr.LoginButton(
|
| 110 |
+
value="Sign in for Inference",
|
| 111 |
+
redirect_url="https://gradio-chat-gradio-app-hfips.hf.space/"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 112 |
)
|
| 113 |
|
| 114 |
def _create_provider_model_selection(self):
|
|
|
|
| 250 |
logger.info("✅ HuggingFace Inference client updated with OAuth token")
|
| 251 |
except Exception as e:
|
| 252 |
logger.error(f"❌ Failed to update HF client: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 253 |
|
| 254 |
# Provider selection with auto-model loading
|
| 255 |
def handle_provider_change(provider_id):
|
|
|
|
| 415 |
label=f"Active Servers ({len(server_choices)} loaded)"
|
| 416 |
)
|
| 417 |
|
| 418 |
+
# Connect OAuth
|
| 419 |
+
demo.load(
|
| 420 |
+
fn=handle_oauth_profile,
|
| 421 |
+
outputs=[]
|
| 422 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 423 |
|
| 424 |
# Connect provider/model dropdowns with auto-selection on load
|
| 425 |
demo.load(
|