gorkemgoknar commited on
Commit
8a37abb
1 Parent(s): a09bf7b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -122,6 +122,7 @@ ROLE_PROMPTS = {}
122
  ROLE_PROMPTS["AI Assistant"]=system_message
123
  ##"You are an AI assistant with Zephyr model by Mistral and Hugging Face and speech from Coqui XTTS . User will you give you a task. Your goal is to complete the task as faithfully as you can. While performing the task think step-by-step and justify your steps, your answers should be clear and short sentences"
124
 
 
125
 
126
 
127
  ### WILL USE LOCAL MISTRAL OR ZEPHYR
@@ -191,7 +192,7 @@ def generate_local(
191
  temperature=0.8,
192
  max_tokens=256,
193
  top_p=0.95,
194
- stop = ["</s>","<|user|>"]
195
  ):
196
  temperature = float(temperature)
197
  if temperature < 1e-2:
@@ -225,7 +226,7 @@ def generate_local(
225
  return
226
 
227
 
228
- output += response["choices"][0]["text"].replace("<|assistant|>","").replace("<|user|>","")
229
  yield output
230
 
231
  except Exception as e:
 
122
  ROLE_PROMPTS["AI Assistant"]=system_message
123
  ##"You are an AI assistant with Zephyr model by Mistral and Hugging Face and speech from Coqui XTTS . User will you give you a task. Your goal is to complete the task as faithfully as you can. While performing the task think step-by-step and justify your steps, your answers should be clear and short sentences"
124
 
125
+ LLM_STOP_WORDS= ["</s>","<|user|>","/s>"]
126
 
127
 
128
  ### WILL USE LOCAL MISTRAL OR ZEPHYR
 
192
  temperature=0.8,
193
  max_tokens=256,
194
  top_p=0.95,
195
+ stop = LLM_STOP_WORDS
196
  ):
197
  temperature = float(temperature)
198
  if temperature < 1e-2:
 
226
  return
227
 
228
 
229
+ output += response["choices"][0]["text"].replace("<|assistant|>","").replace("<|user|>","").replace("/s>","")
230
  yield output
231
 
232
  except Exception as e: