gorkemgoknar
commited on
Commit
•
ef86a55
1
Parent(s):
76aea32
Update app.py
Browse files
app.py
CHANGED
@@ -169,7 +169,7 @@ print("Running LLM Zephyr")
|
|
169 |
llm_zephyr = Llama(model_path=zephyr_model_path,n_gpu_layers=GPU_LAYERS-15,max_new_tokens=256, context_window=4096, n_ctx=4096,n_batch=128,verbose=LLAMA_VERBOSE)
|
170 |
|
171 |
print("Running Yi LLM")
|
172 |
-
|
173 |
|
174 |
|
175 |
# Mistral formatter
|
@@ -235,16 +235,18 @@ def generate_local(
|
|
235 |
formatted_prompt = format_prompt_zephyr(prompt, history,system_message=sys_message)
|
236 |
llm = llm_zephyr
|
237 |
else:
|
|
|
|
|
|
|
|
|
238 |
if "yi" in llm_model.lower():
|
239 |
llm_provider= "01.ai"
|
240 |
llm_model = "Yi"
|
|
|
241 |
else:
|
242 |
llm_provider= "Mistral"
|
243 |
llm_model = "Mistral"
|
244 |
-
|
245 |
-
sys_system_understand_message = system_understand_message.replace("##LLM_MODEL###",llm_model).replace("##LLM_MODEL_PROVIDER###",llm_provider)
|
246 |
-
formatted_prompt = format_prompt_mistral(prompt, history,system_message=sys_message,system_understand_message=sys_system_understand_message)
|
247 |
-
llm = llm_mistral
|
248 |
|
249 |
|
250 |
try:
|
|
|
169 |
llm_zephyr = Llama(model_path=zephyr_model_path,n_gpu_layers=GPU_LAYERS-15,max_new_tokens=256, context_window=4096, n_ctx=4096,n_batch=128,verbose=LLAMA_VERBOSE)
|
170 |
|
171 |
print("Running Yi LLM")
|
172 |
+
llm_yi = Llama(model_path=yi_model_path,n_gpu_layers=GPU_LAYERS-15,max_new_tokens=256, context_window=4096, n_ctx=4096,n_batch=128,verbose=LLAMA_VERBOSE)
|
173 |
|
174 |
|
175 |
# Mistral formatter
|
|
|
235 |
formatted_prompt = format_prompt_zephyr(prompt, history,system_message=sys_message)
|
236 |
llm = llm_zephyr
|
237 |
else:
|
238 |
+
sys_message= system_message.replace("##LLM_MODEL###",llm_model).replace("##LLM_MODEL_PROVIDER###",llm_provider)
|
239 |
+
sys_system_understand_message = system_understand_message.replace("##LLM_MODEL###",llm_model).replace("##LLM_MODEL_PROVIDER###",llm_provider)
|
240 |
+
formatted_prompt = format_prompt_mistral(prompt, history,system_message=sys_message,system_understand_message=sys_system_understand_message)
|
241 |
+
|
242 |
if "yi" in llm_model.lower():
|
243 |
llm_provider= "01.ai"
|
244 |
llm_model = "Yi"
|
245 |
+
llm = llm_yi
|
246 |
else:
|
247 |
llm_provider= "Mistral"
|
248 |
llm_model = "Mistral"
|
249 |
+
llm = llm_mistral
|
|
|
|
|
|
|
250 |
|
251 |
|
252 |
try:
|