Joseph Pollack commited on
Commit
3ed9e8d
·
1 Parent(s): 026ee5d

unfuck the git history and fork networks so our build render

Browse files
README.md CHANGED
@@ -7,7 +7,11 @@ sdk: gradio
7
  sdk_version: "6.0.1"
8
  python_version: "3.11"
9
  app_file: src/app.py
10
- pinned: false
 
 
 
 
11
  license: mit
12
  tags:
13
  - mcp-in-action-track-enterprise
 
7
  sdk_version: "6.0.1"
8
  python_version: "3.11"
9
  app_file: src/app.py
10
+ hf_oauth: true
11
+ hf_oauth_expiration_minutes: 480
12
+ hf_oauth_scopes:
13
+ - inference-api
14
+ pinned: true
15
  license: mit
16
  tags:
17
  - mcp-in-action-track-enterprise
src/app.py CHANGED
@@ -79,7 +79,7 @@ def configure_orchestrator(
79
  # Priority: oauth_token > env vars
80
  # On HuggingFace Spaces, OAuth token is available via request.oauth_token
81
  effective_api_key = oauth_token or os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_API_KEY")
82
-
83
  if effective_api_key:
84
  # We have an API key (OAuth or env) - use pydantic-ai with JudgeHandler
85
  # This uses HuggingFace's own inference API, not third-party providers
@@ -435,27 +435,29 @@ async def research_agent(
435
  # According to Gradio docs: OAuthToken and OAuthProfile are None if user not logged in
436
  token_value: str | None = None
437
  username: str | None = None
438
-
439
  if oauth_token is not None:
440
  # OAuthToken has a .token attribute containing the access token
441
  token_value = oauth_token.token if hasattr(oauth_token, "token") else None
442
-
443
  if oauth_profile is not None:
444
  # OAuthProfile has .username, .name, .profile_image attributes
445
  username = (
446
- oauth_profile.username
447
- if hasattr(oauth_profile, "username") and oauth_profile.username
448
- else (oauth_profile.name if hasattr(oauth_profile, "name") and oauth_profile.name else None)
 
 
 
 
449
  )
450
-
451
  # Check if user is logged in (OAuth token or env var)
452
  # Fallback to env vars for local development or Spaces with HF_TOKEN secret
453
  has_authentication = bool(
454
- token_value
455
- or os.getenv("HF_TOKEN")
456
- or os.getenv("HUGGINGFACE_API_KEY")
457
  )
458
-
459
  if not has_authentication:
460
  yield {
461
  "role": "assistant",
@@ -467,7 +469,7 @@ async def research_agent(
467
  ),
468
  }
469
  return
470
-
471
  if not message.strip():
472
  yield {
473
  "role": "assistant",
@@ -494,7 +496,7 @@ async def research_agent(
494
  # Convert empty strings from Textbox to None for defaults
495
  model_id = hf_model if hf_model and hf_model.strip() else None
496
  provider_name = hf_provider if hf_provider and hf_provider.strip() else None
497
-
498
  orchestrator, backend_name = configure_orchestrator(
499
  use_mock=False, # Never use mock in production - HF Inference is the free fallback
500
  mode=effective_mode,
@@ -542,7 +544,7 @@ def create_demo() -> gr.Blocks:
542
  variant="huggingface",
543
  size="lg",
544
  )
545
-
546
  # Create settings components (hidden - used only for additional_inputs)
547
  # Model/provider selection removed to avoid dropdown value mismatch errors
548
  # Settings will use defaults from configure_orchestrator
@@ -591,22 +593,21 @@ def create_demo() -> gr.Blocks:
591
  [
592
  "What drugs could be repurposed for Alzheimer's disease?",
593
  "simple",
594
- "Qwen/Qwen3-Next-80B-A3B-Thinking",
595
- "",
596
  ],
597
  [
598
  "Is metformin effective for treating cancer?",
599
  "simple",
600
- "Qwen/Qwen3-235B-A22B-Instruct-2507",
601
- "",
602
  ],
603
  [
604
  "What medications show promise for Long COVID treatment?",
605
  "simple",
606
- "zai-org/GLM-4.5-Air",
607
- "nebius",
608
  ],
609
-
610
  ],
611
  cache_examples=False, # CRITICAL: Disable example caching to prevent examples from running at startup
612
  # Examples will only run when user explicitly clicks them (after login)
 
79
  # Priority: oauth_token > env vars
80
  # On HuggingFace Spaces, OAuth token is available via request.oauth_token
81
  effective_api_key = oauth_token or os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_API_KEY")
82
+
83
  if effective_api_key:
84
  # We have an API key (OAuth or env) - use pydantic-ai with JudgeHandler
85
  # This uses HuggingFace's own inference API, not third-party providers
 
435
  # According to Gradio docs: OAuthToken and OAuthProfile are None if user not logged in
436
  token_value: str | None = None
437
  username: str | None = None
438
+
439
  if oauth_token is not None:
440
  # OAuthToken has a .token attribute containing the access token
441
  token_value = oauth_token.token if hasattr(oauth_token, "token") else None
442
+
443
  if oauth_profile is not None:
444
  # OAuthProfile has .username, .name, .profile_image attributes
445
  username = (
446
+ oauth_profile.username
447
+ if hasattr(oauth_profile, "username") and oauth_profile.username
448
+ else (
449
+ oauth_profile.name
450
+ if hasattr(oauth_profile, "name") and oauth_profile.name
451
+ else None
452
+ )
453
  )
454
+
455
  # Check if user is logged in (OAuth token or env var)
456
  # Fallback to env vars for local development or Spaces with HF_TOKEN secret
457
  has_authentication = bool(
458
+ token_value or os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_API_KEY")
 
 
459
  )
460
+
461
  if not has_authentication:
462
  yield {
463
  "role": "assistant",
 
469
  ),
470
  }
471
  return
472
+
473
  if not message.strip():
474
  yield {
475
  "role": "assistant",
 
496
  # Convert empty strings from Textbox to None for defaults
497
  model_id = hf_model if hf_model and hf_model.strip() else None
498
  provider_name = hf_provider if hf_provider and hf_provider.strip() else None
499
+
500
  orchestrator, backend_name = configure_orchestrator(
501
  use_mock=False, # Never use mock in production - HF Inference is the free fallback
502
  mode=effective_mode,
 
544
  variant="huggingface",
545
  size="lg",
546
  )
547
+
548
  # Create settings components (hidden - used only for additional_inputs)
549
  # Model/provider selection removed to avoid dropdown value mismatch errors
550
  # Settings will use defaults from configure_orchestrator
 
593
  [
594
  "What drugs could be repurposed for Alzheimer's disease?",
595
  "simple",
596
+ "Qwen/Qwen3-Next-80B-A3B-Thinking",
597
+ "",
598
  ],
599
  [
600
  "Is metformin effective for treating cancer?",
601
  "simple",
602
+ "Qwen/Qwen3-235B-A22B-Instruct-2507",
603
+ "",
604
  ],
605
  [
606
  "What medications show promise for Long COVID treatment?",
607
  "simple",
608
+ "zai-org/GLM-4.5-Air",
609
+ "nebius",
610
  ],
 
611
  ],
612
  cache_examples=False, # CRITICAL: Disable example caching to prevent examples from running at startup
613
  # Examples will only run when user explicitly clicks them (after login)
src/middleware/state_machine.py CHANGED
@@ -127,7 +127,3 @@ def get_workflow_state() -> WorkflowState:
127
  logger.debug("Workflow state not found, auto-initializing")
128
  return init_workflow_state()
129
  return state
130
-
131
-
132
-
133
-
 
127
  logger.debug("Workflow state not found, auto-initializing")
128
  return init_workflow_state()
129
  return state
 
 
 
 
src/tools/crawl_adapter.py CHANGED
@@ -56,7 +56,3 @@ async def crawl_website(starting_url: str) -> str:
56
  except Exception as e:
57
  logger.error("Crawl failed", error=str(e), url=starting_url)
58
  return f"Error crawling website: {e!s}"
59
-
60
-
61
-
62
-
 
56
  except Exception as e:
57
  logger.error("Crawl failed", error=str(e), url=starting_url)
58
  return f"Error crawling website: {e!s}"
 
 
 
 
src/tools/web_search_adapter.py CHANGED
@@ -61,7 +61,3 @@ async def web_search(query: str) -> str:
61
  except Exception as e:
62
  logger.error("Web search failed", error=str(e), query=query)
63
  return f"Error performing web search: {e!s}"
64
-
65
-
66
-
67
-
 
61
  except Exception as e:
62
  logger.error("Web search failed", error=str(e), query=query)
63
  return f"Error performing web search: {e!s}"