Spaces:
Sleeping
Sleeping
Marcos12886
commited on
Commit
•
95fada5
1
Parent(s):
1aa470d
Update app.py
Browse files
app.py
CHANGED
@@ -8,13 +8,13 @@ token = os.getenv("HF_TOKEN")
|
|
8 |
client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token=token)
|
9 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
10 |
model_class, id2label_class = predict_params(
|
11 |
-
model_path="distilhubert-finetuned-mixed-data",
|
12 |
dataset_path="data/mixed_data",
|
13 |
filter_white_noise=True,
|
14 |
undersample_normal=True
|
15 |
)
|
16 |
model_mon, id2label_mon = predict_params(
|
17 |
-
model_path="distilhubert-finetuned-cry-detector",
|
18 |
dataset_path="data/baby_cry_detection",
|
19 |
filter_white_noise=False,
|
20 |
undersample_normal=False
|
@@ -231,4 +231,3 @@ with gr.Blocks(theme=my_theme) as demo:
|
|
231 |
boton_predictor.click(cambiar_pestaña, outputs=[chatbot, pag_predictor])
|
232 |
boton_monitor.click(cambiar_pestaña, outputs=[chatbot, pag_monitor])
|
233 |
demo.launch(share=True)
|
234 |
-
|
|
|
8 |
client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token=token)
|
9 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
10 |
model_class, id2label_class = predict_params(
|
11 |
+
model_path="A-POR-LOS-8000/distilhubert-finetuned-mixed-data",
|
12 |
dataset_path="data/mixed_data",
|
13 |
filter_white_noise=True,
|
14 |
undersample_normal=True
|
15 |
)
|
16 |
model_mon, id2label_mon = predict_params(
|
17 |
+
model_path="A-POR-LOS-8000/distilhubert-finetuned-cry-detector",
|
18 |
dataset_path="data/baby_cry_detection",
|
19 |
filter_white_noise=False,
|
20 |
undersample_normal=False
|
|
|
231 |
boton_predictor.click(cambiar_pestaña, outputs=[chatbot, pag_predictor])
|
232 |
boton_monitor.click(cambiar_pestaña, outputs=[chatbot, pag_monitor])
|
233 |
demo.launch(share=True)
|
|