Commit ·
bb7e939
1
Parent(s): 076039f
Use HF_TOKEN env var to authenticate for gated MedGemma model
Browse files- models/medgemma_agent.py +8 -0
models/medgemma_agent.py
CHANGED
|
@@ -203,9 +203,17 @@ class MedGemmaAgent:
|
|
| 203 |
|
| 204 |
self._print("Initializing MedGemma agent...")
|
| 205 |
|
|
|
|
| 206 |
import torch
|
| 207 |
from transformers import pipeline
|
| 208 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 209 |
self._print(f"Loading model: {self.model_id}")
|
| 210 |
|
| 211 |
if torch.cuda.is_available():
|
|
|
|
| 203 |
|
| 204 |
self._print("Initializing MedGemma agent...")
|
| 205 |
|
| 206 |
+
import os
|
| 207 |
import torch
|
| 208 |
from transformers import pipeline
|
| 209 |
|
| 210 |
+
# Authenticate with HF Hub if a token is provided (required for gated models)
|
| 211 |
+
hf_token = os.environ.get("HF_TOKEN")
|
| 212 |
+
if hf_token:
|
| 213 |
+
from huggingface_hub import login
|
| 214 |
+
login(token=hf_token, add_to_git_credential=False)
|
| 215 |
+
self._print("Authenticated with HF Hub")
|
| 216 |
+
|
| 217 |
self._print(f"Loading model: {self.model_id}")
|
| 218 |
|
| 219 |
if torch.cuda.is_available():
|