Spaces:
Running
Running
ZeroCommand
commited on
Commit
•
8e32a09
1
Parent(s):
02cf07d
GSK-2852-hide-unused-config-files (#131)
Browse files- only show the folder where submitted config files are (79d13cdbf5e4d75e5bee945cb98fcd9ac69a4c8c)
- check if config submitted path exist (2f7dd9a64c5b3f35e2fe4e52e6931f7e5bd159ff)
- app_debug.py +4 -2
- io_utils.py +11 -1
- run_jobs.py +2 -2
- text_classification.py +1 -1
- text_classification_ui_helpers.py +2 -1
app_debug.py
CHANGED
@@ -3,12 +3,12 @@ from os.path import isfile, join
|
|
3 |
import html
|
4 |
|
5 |
import gradio as gr
|
6 |
-
|
7 |
import pipe
|
8 |
from io_utils import get_logs_file
|
9 |
|
10 |
LOG_PATH = "./tmp"
|
11 |
-
CONFIG_PATH = "./cicd/configs/"
|
12 |
MAX_FILES_NUM = 20
|
13 |
|
14 |
|
@@ -69,6 +69,8 @@ def get_queue_status():
|
|
69 |
|
70 |
|
71 |
def get_demo():
|
|
|
|
|
72 |
with gr.Row():
|
73 |
gr.HTML(
|
74 |
value=get_queue_status,
|
|
|
3 |
import html
|
4 |
|
5 |
import gradio as gr
|
6 |
+
import os
|
7 |
import pipe
|
8 |
from io_utils import get_logs_file
|
9 |
|
10 |
LOG_PATH = "./tmp"
|
11 |
+
CONFIG_PATH = "./cicd/configs/submitted/"
|
12 |
MAX_FILES_NUM = 20
|
13 |
|
14 |
|
|
|
69 |
|
70 |
|
71 |
def get_demo():
|
72 |
+
if not os.path.exists(CONFIG_PATH):
|
73 |
+
os.makedirs(CONFIG_PATH)
|
74 |
with gr.Row():
|
75 |
gr.HTML(
|
76 |
value=get_queue_status,
|
io_utils.py
CHANGED
@@ -1,15 +1,25 @@
|
|
1 |
import os
|
2 |
-
|
3 |
import yaml
|
4 |
|
5 |
YAML_PATH = "./cicd/configs"
|
6 |
LOG_FILE = "temp_log"
|
7 |
|
|
|
8 |
|
9 |
class Dumper(yaml.Dumper):
|
10 |
def increase_indent(self, flow=False, *args, **kwargs):
|
11 |
return super().increase_indent(flow=flow, indentless=False)
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
def get_yaml_path(uid):
|
15 |
if not os.path.exists(YAML_PATH):
|
|
|
1 |
import os
|
2 |
+
import logging
|
3 |
import yaml
|
4 |
|
5 |
YAML_PATH = "./cicd/configs"
|
6 |
LOG_FILE = "temp_log"
|
7 |
|
8 |
+
logger = logging.getLogger(__name__)
|
9 |
|
10 |
class Dumper(yaml.Dumper):
|
11 |
def increase_indent(self, flow=False, *args, **kwargs):
|
12 |
return super().increase_indent(flow=flow, indentless=False)
|
13 |
|
14 |
+
def get_submitted_yaml_path(uid):
|
15 |
+
if not os.path.exists(f"{YAML_PATH}/submitted"):
|
16 |
+
os.makedirs(f"{YAML_PATH}/submitted")
|
17 |
+
if not os.path.exists(f"{YAML_PATH}/{uid}_config.yaml"):
|
18 |
+
logger.error(f"config.yaml does not exist for {uid}")
|
19 |
+
os.system(f"cp config.yaml {YAML_PATH}/{uid}_config.yaml")
|
20 |
+
if not os.path.exists(f"{YAML_PATH}/submitted/{uid}_config.yaml"):
|
21 |
+
os.system(f"cp {YAML_PATH}/{uid}_config.yaml {YAML_PATH}/submitted/{uid}_config.yaml")
|
22 |
+
return f"{YAML_PATH}/submitted/{uid}_config.yaml"
|
23 |
|
24 |
def get_yaml_path(uid):
|
25 |
if not os.path.exists(YAML_PATH):
|
run_jobs.py
CHANGED
@@ -17,7 +17,7 @@ from app_env import (
|
|
17 |
HF_SPACE_ID,
|
18 |
HF_WRITE_TOKEN,
|
19 |
)
|
20 |
-
from io_utils import LOG_FILE,
|
21 |
from isolated_env import prepare_venv
|
22 |
from leaderboard import LEADERBOARD
|
23 |
|
@@ -98,7 +98,7 @@ def prepare_env_and_get_command(
|
|
98 |
"--label_mapping",
|
99 |
json.dumps(label_mapping),
|
100 |
"--scan_config",
|
101 |
-
|
102 |
"--inference_type",
|
103 |
inference_type,
|
104 |
"--inference_api_token",
|
|
|
17 |
HF_SPACE_ID,
|
18 |
HF_WRITE_TOKEN,
|
19 |
)
|
20 |
+
from io_utils import LOG_FILE, get_submitted_yaml_path, write_log_to_user_file
|
21 |
from isolated_env import prepare_venv
|
22 |
from leaderboard import LEADERBOARD
|
23 |
|
|
|
98 |
"--label_mapping",
|
99 |
json.dumps(label_mapping),
|
100 |
"--scan_config",
|
101 |
+
get_submitted_yaml_path(uid),
|
102 |
"--inference_type",
|
103 |
inference_type,
|
104 |
"--inference_api_token",
|
text_classification.py
CHANGED
@@ -7,9 +7,9 @@ import pandas as pd
|
|
7 |
from transformers import pipeline
|
8 |
import requests
|
9 |
import os
|
|
|
10 |
|
11 |
logger = logging.getLogger(__name__)
|
12 |
-
HF_WRITE_TOKEN = "HF_WRITE_TOKEN"
|
13 |
AUTH_CHECK_URL = "https://huggingface.co/api/whoami-v2"
|
14 |
|
15 |
logger = logging.getLogger(__file__)
|
|
|
7 |
from transformers import pipeline
|
8 |
import requests
|
9 |
import os
|
10 |
+
from app_env import HF_WRITE_TOKEN
|
11 |
|
12 |
logger = logging.getLogger(__name__)
|
|
|
13 |
AUTH_CHECK_URL = "https://huggingface.co/api/whoami-v2"
|
14 |
|
15 |
logger = logging.getLogger(__file__)
|
text_classification_ui_helpers.py
CHANGED
@@ -35,6 +35,7 @@ from wordings import (
|
|
35 |
get_dataset_fetch_error_raw,
|
36 |
)
|
37 |
import os
|
|
|
38 |
|
39 |
MAX_LABELS = 40
|
40 |
MAX_FEATURES = 20
|
@@ -268,7 +269,7 @@ def align_columns_and_show_prediction(
|
|
268 |
gr.Dropdown(visible=False) for _ in range(MAX_LABELS + MAX_FEATURES)
|
269 |
]
|
270 |
|
271 |
-
hf_token = os.environ.get(
|
272 |
|
273 |
prediction_input, prediction_response = get_example_prediction(
|
274 |
model_id, dataset_id, dataset_config, dataset_split, hf_token
|
|
|
35 |
get_dataset_fetch_error_raw,
|
36 |
)
|
37 |
import os
|
38 |
+
from app_env import HF_WRITE_TOKEN
|
39 |
|
40 |
MAX_LABELS = 40
|
41 |
MAX_FEATURES = 20
|
|
|
269 |
gr.Dropdown(visible=False) for _ in range(MAX_LABELS + MAX_FEATURES)
|
270 |
]
|
271 |
|
272 |
+
hf_token = os.environ.get(HF_WRITE_TOKEN, default="")
|
273 |
|
274 |
prediction_input, prediction_response = get_example_prediction(
|
275 |
model_id, dataset_id, dataset_config, dataset_split, hf_token
|