Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
fix-trust-remote-code (#1044)
Browse files- Refactor model service status mapping and improve validation logs (5ccae0d3fab066f240f805c2965edd89c076f5ec)
- Improve error messages for model validation and gated model handling (d852c46e7b27e6c80ba20422143e8631c4ced3ef)
- Add python-dotenv dependency to backend configuration (eb6e17b451246174a4b6135e210a958130ff9a2f)
- Add datetime module (741988b8e01f511c6cd1f8826871f5931e94b10a)
- Fix weight types (682889985c337f827df7fdb6b43370932a3acc45)
backend/app/services/models.py
CHANGED
@@ -9,8 +9,7 @@ import asyncio
|
|
9 |
import time
|
10 |
from huggingface_hub import HfApi, CommitOperationAdd
|
11 |
from huggingface_hub.utils import build_hf_headers
|
12 |
-
import
|
13 |
-
from datasets import load_dataset, disable_progress_bar
|
14 |
import sys
|
15 |
import contextlib
|
16 |
from concurrent.futures import ThreadPoolExecutor
|
@@ -158,9 +157,9 @@ class ModelService(HuggingFaceService):
|
|
158 |
status = content.get("status", "PENDING").upper()
|
159 |
target_status = None
|
160 |
status_map = {
|
161 |
-
"PENDING": ["PENDING"
|
162 |
"EVALUATING": ["RUNNING"],
|
163 |
-
"FINISHED": ["FINISHED"
|
164 |
}
|
165 |
|
166 |
for target, source_statuses in status_map.items():
|
@@ -425,6 +424,17 @@ class ModelService(HuggingFaceService):
|
|
425 |
logger.error(LogFormatter.error("Failed to check existing submissions", e))
|
426 |
raise
|
427 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
428 |
# Validate model card
|
429 |
valid, error, model_card = await self.validator.check_model_card(
|
430 |
model_data["model_id"]
|
@@ -541,7 +551,7 @@ class ModelService(HuggingFaceService):
|
|
541 |
|
542 |
return {
|
543 |
"status": "success",
|
544 |
-
"message": "
|
545 |
}
|
546 |
|
547 |
async def get_model_status(self, model_id: str) -> Dict[str, Any]:
|
|
|
9 |
import time
|
10 |
from huggingface_hub import HfApi, CommitOperationAdd
|
11 |
from huggingface_hub.utils import build_hf_headers
|
12 |
+
from datasets import disable_progress_bar
|
|
|
13 |
import sys
|
14 |
import contextlib
|
15 |
from concurrent.futures import ThreadPoolExecutor
|
|
|
157 |
status = content.get("status", "PENDING").upper()
|
158 |
target_status = None
|
159 |
status_map = {
|
160 |
+
"PENDING": ["PENDING"],
|
161 |
"EVALUATING": ["RUNNING"],
|
162 |
+
"FINISHED": ["FINISHED"]
|
163 |
}
|
164 |
|
165 |
for target, source_statuses in status_map.items():
|
|
|
424 |
logger.error(LogFormatter.error("Failed to check existing submissions", e))
|
425 |
raise
|
426 |
|
427 |
+
# Check that model on hub and valid
|
428 |
+
valid, error, model_config = await self.validator.is_model_on_hub(
|
429 |
+
model_data["model_id"],
|
430 |
+
model_data["revision"],
|
431 |
+
test_tokenizer=True
|
432 |
+
)
|
433 |
+
if not valid:
|
434 |
+
logger.error(LogFormatter.error("Model on hub validation failed", error))
|
435 |
+
raise Exception(error)
|
436 |
+
logger.info(LogFormatter.success("Model on hub validation passed"))
|
437 |
+
|
438 |
# Validate model card
|
439 |
valid, error, model_card = await self.validator.check_model_card(
|
440 |
model_data["model_id"]
|
|
|
551 |
|
552 |
return {
|
553 |
"status": "success",
|
554 |
+
"message": "The model was submitted successfully, and the vote has been recorded"
|
555 |
}
|
556 |
|
557 |
async def get_model_status(self, model_id: str) -> Dict[str, Any]:
|
backend/app/utils/model_validation.py
CHANGED
@@ -196,15 +196,15 @@ class ModelValidator:
|
|
196 |
token=self.token
|
197 |
)
|
198 |
except ValueError as e:
|
199 |
-
return False, f"
|
200 |
except Exception:
|
201 |
-
return False, "
|
202 |
|
203 |
return True, None, config
|
204 |
|
205 |
except ValueError:
|
206 |
-
return False, "
|
207 |
except Exception as e:
|
208 |
if "You are trying to access a gated repo." in str(e):
|
209 |
-
return True, "
|
210 |
-
return False, f"was not found or misconfigured on the
|
|
|
196 |
token=self.token
|
197 |
)
|
198 |
except ValueError as e:
|
199 |
+
return False, f"The tokenizer is not available in an official Transformers release: {e}", None
|
200 |
except Exception:
|
201 |
+
return False, "The tokenizer cannot be loaded. Ensure the tokenizer class is part of a stable Transformers release and correctly configured.", None
|
202 |
|
203 |
return True, None, config
|
204 |
|
205 |
except ValueError:
|
206 |
+
return False, "The model requires `trust_remote_code=True` to launch, and for safety reasons, we don't accept such models automatically.", None
|
207 |
except Exception as e:
|
208 |
if "You are trying to access a gated repo." in str(e):
|
209 |
+
return True, "The model is gated and requires special access permissions.", None
|
210 |
+
return False, f"The model was not found or is misconfigured on the Hub. Error: {e.args[0]}", None
|
backend/pyproject.toml
CHANGED
@@ -18,6 +18,7 @@ transformers = "^4.47.0"
|
|
18 |
safetensors = "^0.4.5"
|
19 |
aiofiles = "^24.1.0"
|
20 |
fastapi-cache2 = "^0.2.1"
|
|
|
21 |
|
22 |
[tool.poetry.group.dev.dependencies]
|
23 |
pytest = "^8.3.4"
|
|
|
18 |
safetensors = "^0.4.5"
|
19 |
aiofiles = "^24.1.0"
|
20 |
fastapi-cache2 = "^0.2.1"
|
21 |
+
python-dotenv = "^1.0.1"
|
22 |
|
23 |
[tool.poetry.group.dev.dependencies]
|
24 |
pytest = "^8.3.4"
|
backend/utils/analyze_prod_models.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import os
|
2 |
import json
|
3 |
import logging
|
|
|
4 |
from pathlib import Path
|
5 |
from huggingface_hub import HfApi
|
6 |
from dotenv import load_dotenv
|
|
|
1 |
import os
|
2 |
import json
|
3 |
import logging
|
4 |
+
from datetime import datetime
|
5 |
from pathlib import Path
|
6 |
from huggingface_hub import HfApi
|
7 |
from dotenv import load_dotenv
|
frontend/src/pages/AddModelPage/components/ModelSubmissionForm/ModelSubmissionForm.js
CHANGED
@@ -26,9 +26,9 @@ import { SUBMISSION_PRECISIONS } from "../../../../pages/LeaderboardPage/compone
|
|
26 |
import AuthContainer from "../../../../components/shared/AuthContainer";
|
27 |
|
28 |
const WEIGHT_TYPES = [
|
29 |
-
{ value: "
|
30 |
-
{ value: "
|
31 |
-
{ value: "
|
32 |
];
|
33 |
|
34 |
const HELP_TEXTS = {
|
|
|
26 |
import AuthContainer from "../../../../components/shared/AuthContainer";
|
27 |
|
28 |
const WEIGHT_TYPES = [
|
29 |
+
{ value: "Original", label: "Original" },
|
30 |
+
{ value: "Delta", label: "Delta" },
|
31 |
+
{ value: "Adapter", label: "Adapter" },
|
32 |
];
|
33 |
|
34 |
const HELP_TEXTS = {
|