AppleSwing
commited on
Commit
•
22ce8a7
1
Parent(s):
60d9c33
Delete requests
Browse files- app.py +4 -2
- src/display/utils.py +6 -6
- src/submission/check_validity.py +1 -1
- src/submission/submit.py +5 -3
app.py
CHANGED
@@ -383,7 +383,7 @@ with demo:
|
|
383 |
choices=[t.to_str() for t in GPUType],
|
384 |
label="GPU type",
|
385 |
multiselect=False,
|
386 |
-
value=
|
387 |
interactive=True,
|
388 |
)
|
389 |
|
@@ -422,7 +422,7 @@ with demo:
|
|
422 |
|
423 |
submit_button = gr.Button("Submit Eval")
|
424 |
submission_result = gr.Markdown()
|
425 |
-
debug = gr.
|
426 |
submit_button.click(
|
427 |
add_new_eval,
|
428 |
[
|
@@ -434,6 +434,8 @@ with demo:
|
|
434 |
weight_type,
|
435 |
model_type,
|
436 |
inference_framework,
|
|
|
|
|
437 |
],
|
438 |
submission_result,
|
439 |
)
|
|
|
383 |
choices=[t.to_str() for t in GPUType],
|
384 |
label="GPU type",
|
385 |
multiselect=False,
|
386 |
+
value="NVIDIA-A100-PCIe-80GB",
|
387 |
interactive=True,
|
388 |
)
|
389 |
|
|
|
422 |
|
423 |
submit_button = gr.Button("Submit Eval")
|
424 |
submission_result = gr.Markdown()
|
425 |
+
debug = gr.Checkbox(value=args.debug, label="Debug", visible=False)
|
426 |
submit_button.click(
|
427 |
add_new_eval,
|
428 |
[
|
|
|
434 |
weight_type,
|
435 |
model_type,
|
436 |
inference_framework,
|
437 |
+
debug,
|
438 |
+
gpu_type
|
439 |
],
|
440 |
submission_result,
|
441 |
)
|
src/display/utils.py
CHANGED
@@ -191,9 +191,9 @@ class InferenceFramework(Enum):
|
|
191 |
return InferenceFramework.Unknown
|
192 |
|
193 |
class GPUType(Enum):
|
194 |
-
H100_pcie = ModelDetails("NVIDIA
|
195 |
-
A100_pcie = ModelDetails("NVIDIA
|
196 |
-
A5000 = ModelDetails("NVIDIA
|
197 |
Unknown = ModelDetails("?")
|
198 |
|
199 |
def to_str(self):
|
@@ -201,11 +201,11 @@ class GPUType(Enum):
|
|
201 |
|
202 |
@staticmethod
|
203 |
def from_str(gpu_type: str):
|
204 |
-
if gpu_type in ["NVIDIA
|
205 |
return GPUType.A100_pcie
|
206 |
-
if gpu_type in ["NVIDIA
|
207 |
return GPUType.H100_pcie
|
208 |
-
if gpu_type in ["NVIDIA
|
209 |
return GPUType.A5000
|
210 |
return GPUType.Unknown
|
211 |
|
|
|
191 |
return InferenceFramework.Unknown
|
192 |
|
193 |
class GPUType(Enum):
|
194 |
+
H100_pcie = ModelDetails("NVIDIA-H100-PCIe-80GB")
|
195 |
+
A100_pcie = ModelDetails("NVIDIA-A100-PCIe-80GB")
|
196 |
+
A5000 = ModelDetails("NVIDIA-A5000-24GB")
|
197 |
Unknown = ModelDetails("?")
|
198 |
|
199 |
def to_str(self):
|
|
|
201 |
|
202 |
@staticmethod
|
203 |
def from_str(gpu_type: str):
|
204 |
+
if gpu_type in ["NVIDIA-H100-PCIe-80GB"]:
|
205 |
return GPUType.A100_pcie
|
206 |
+
if gpu_type in ["NVIDIA-A100-PCIe-80GB"]:
|
207 |
return GPUType.H100_pcie
|
208 |
+
if gpu_type in ["NVIDIA-A5000-24GB"]:
|
209 |
return GPUType.A5000
|
210 |
return GPUType.Unknown
|
211 |
|
src/submission/check_validity.py
CHANGED
@@ -130,7 +130,7 @@ def already_submitted_models(requested_models_dir: str) -> set[str]:
|
|
130 |
continue
|
131 |
with open(os.path.join(root, file), "r") as f:
|
132 |
info = json.load(f)
|
133 |
-
file_names.append(f"{info['model']}_{info['revision']}_{info['precision']}_{info['inference_framework']}")
|
134 |
|
135 |
# Select organisation
|
136 |
if info["model"].count("/") == 0 or "submitted_time" not in info:
|
|
|
130 |
continue
|
131 |
with open(os.path.join(root, file), "r") as f:
|
132 |
info = json.load(f)
|
133 |
+
file_names.append(f"{info['model']}_{info['revision']}_{info['precision']}_{info['inference_framework']}_{info['gpu_type']}")
|
134 |
|
135 |
# Select organisation
|
136 |
if info["model"].count("/") == 0 or "submitted_time" not in info:
|
src/submission/submit.py
CHANGED
@@ -26,7 +26,8 @@ def add_new_eval(
|
|
26 |
weight_type: str,
|
27 |
model_type: str,
|
28 |
inference_framework: str,
|
29 |
-
debug: bool = False
|
|
|
30 |
):
|
31 |
global REQUESTED_MODELS
|
32 |
global USERS_TO_SUBMISSION_DATES
|
@@ -114,17 +115,18 @@ def add_new_eval(
|
|
114 |
"params": model_size,
|
115 |
"license": license,
|
116 |
"inference_framework": inference_framework,
|
|
|
117 |
}
|
118 |
|
119 |
# Check for duplicate submission
|
120 |
-
if f"{model}_{revision}_{precision}_{inference_framework}" in REQUESTED_MODELS:
|
121 |
return styled_warning("This model has been already submitted.")
|
122 |
|
123 |
print("Creating eval file")
|
124 |
OUT_DIR = f"{EVAL_REQUESTS_PATH}/{user_name}"
|
125 |
os.makedirs(OUT_DIR, exist_ok=True)
|
126 |
# out_path = f"{OUT_DIR}/{model_path}_eval_request_{private}_{precision}_{weight_type}.json"
|
127 |
-
out_path = f"{OUT_DIR}/{model_path}_eval_request_{private}_{precision}_{weight_type}_{inference_framework}.json"
|
128 |
|
129 |
with open(out_path, "w") as f:
|
130 |
f.write(json.dumps(eval_entry))
|
|
|
26 |
weight_type: str,
|
27 |
model_type: str,
|
28 |
inference_framework: str,
|
29 |
+
debug: bool = False,
|
30 |
+
gpu_type: str = "NVIDIA-A100-PCIe-80GB",
|
31 |
):
|
32 |
global REQUESTED_MODELS
|
33 |
global USERS_TO_SUBMISSION_DATES
|
|
|
115 |
"params": model_size,
|
116 |
"license": license,
|
117 |
"inference_framework": inference_framework,
|
118 |
+
"GPU_type": gpu_type
|
119 |
}
|
120 |
|
121 |
# Check for duplicate submission
|
122 |
+
if f"{model}_{revision}_{precision}_{inference_framework}_{gpu_type}" in REQUESTED_MODELS:
|
123 |
return styled_warning("This model has been already submitted.")
|
124 |
|
125 |
print("Creating eval file")
|
126 |
OUT_DIR = f"{EVAL_REQUESTS_PATH}/{user_name}"
|
127 |
os.makedirs(OUT_DIR, exist_ok=True)
|
128 |
# out_path = f"{OUT_DIR}/{model_path}_eval_request_{private}_{precision}_{weight_type}.json"
|
129 |
+
out_path = f"{OUT_DIR}/{model_path}_eval_request_{private}_{precision}_{weight_type}_{inference_framework}_{gpu_type}.json"
|
130 |
|
131 |
with open(out_path, "w") as f:
|
132 |
f.write(json.dumps(eval_entry))
|