gorkemgoknar
commited on
Commit
•
1fdfba2
1
Parent(s):
cce82f4
Update app.py
Browse files
app.py
CHANGED
@@ -76,10 +76,11 @@ model.load_checkpoint(
|
|
76 |
model.cuda()
|
77 |
print("Done loading TTS")
|
78 |
|
|
|
79 |
|
80 |
-
title = "Voice chat with
|
81 |
|
82 |
-
DESCRIPTION = """# Voice chat with
|
83 |
css = """.toast-wrap { display: none !important } """
|
84 |
|
85 |
from huggingface_hub import HfApi
|
@@ -127,7 +128,6 @@ ROLE_PROMPTS["AI Assistant"]=system_message
|
|
127 |
from huggingface_hub import hf_hub_download
|
128 |
print("Downloading LLM")
|
129 |
|
130 |
-
llm_model = os.environ.get("LLM_MODEL", "mistral") # or "zephyr"
|
131 |
|
132 |
if llm_model == "zephyr":
|
133 |
#Zephyr
|
|
|
76 |
model.cuda()
|
77 |
print("Done loading TTS")
|
78 |
|
79 |
+
llm_model = os.environ.get("LLM_MODEL", "mistral") # or "zephyr"
|
80 |
|
81 |
+
title = f"Voice chat with {llm_model.upper()} and Coqui XTTS"
|
82 |
|
83 |
+
DESCRIPTION = f"""# Voice chat with {llm_model.upper()} and Coqui XTTS"""
|
84 |
css = """.toast-wrap { display: none !important } """
|
85 |
|
86 |
from huggingface_hub import HfApi
|
|
|
128 |
from huggingface_hub import hf_hub_download
|
129 |
print("Downloading LLM")
|
130 |
|
|
|
131 |
|
132 |
if llm_model == "zephyr":
|
133 |
#Zephyr
|