Handle cases for first submissions for the task
Browse files- src/submission_uploader.py +11 -0
src/submission_uploader.py
CHANGED
@@ -90,6 +90,9 @@ class SubmissionUploader:
|
|
90 |
with open(os.path.join(temp_directory, "request_metadata.json"), "w") as f:
|
91 |
json.dump(request_metadata, f)
|
92 |
|
|
|
|
|
|
|
93 |
num_requests_already_present = len(self._fs.ls(f"datasets/{self._private_dataset_id}/{task_id}/"))
|
94 |
commit_operations = [
|
95 |
CommitOperationAdd(
|
@@ -236,6 +239,14 @@ class SubmissionUploader:
|
|
236 |
|
237 |
task_id = TASKS_PRETTY_REVERSE[task_pretty]
|
238 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
239 |
logging.info("Checking if this request has already been submitted...")
|
240 |
if not force:
|
241 |
if model_folder in self._fs.ls(f"datasets/{self._dataset_id}/{task_id}/predictions"):
|
|
|
90 |
with open(os.path.join(temp_directory, "request_metadata.json"), "w") as f:
|
91 |
json.dump(request_metadata, f)
|
92 |
|
93 |
+
if task_id not in self._fs.ls(f"datasets/{self._private_dataset_id}"):
|
94 |
+
self._fs.makedirs(f"datasets/{self._private_dataset_id}/{task_id}", exist_ok=True)
|
95 |
+
|
96 |
num_requests_already_present = len(self._fs.ls(f"datasets/{self._private_dataset_id}/{task_id}/"))
|
97 |
commit_operations = [
|
98 |
CommitOperationAdd(
|
|
|
239 |
|
240 |
task_id = TASKS_PRETTY_REVERSE[task_pretty]
|
241 |
|
242 |
+
if (
|
243 |
+
task_id not in self._fs.ls(f"datasets/{self._dataset_id}")
|
244 |
+
or "predictions" not in self._fs.ls(f"datasets/{self._dataset_id}/{task_id}")
|
245 |
+
or "results" not in self._fs.ls(f"datasets/{self._dataset_id}/{task_id}")
|
246 |
+
):
|
247 |
+
self._fs.makedirs(f"datasets/{self._dataset_id}/{task_id}/predictions", exist_ok=True)
|
248 |
+
self._fs.makedirs(f"datasets/{self._dataset_id}/{task_id}/results", exist_ok=True)
|
249 |
+
|
250 |
logging.info("Checking if this request has already been submitted...")
|
251 |
if not force:
|
252 |
if model_folder in self._fs.ls(f"datasets/{self._dataset_id}/{task_id}/predictions"):
|