BenBranyon commited on
Commit
3251e75
·
verified ·
1 Parent(s): b17453b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -28,13 +28,13 @@ def generater(message, history, temperature, top_p, top_k):
28
  # If chat_history is empty, return instructions and message
29
  prompt = f"<s>[INST] {instructions} Hi [/INST] Hello! how can I help you</s>[INST] {message} [/INST]"
30
  print("sending this prompt\n==============\n",prompt,'\n---------\n')
31
- return prompt
32
  else:
33
  prompt = "<s>"
34
  for user_message, assistant_message in history:
35
  prompt += model.config["promptTemplate"].format(user_message)
36
  prompt += assistant_message + "</s>"
37
  prompt += model.config["promptTemplate"].format(message)
 
38
 
39
  outputs = []
40
  for token in model.generate(prompt=prompt, temp=temperature, top_k = top_k, top_p = top_p, max_tokens = max_new_tokens, streaming=True):
 
28
  # If chat_history is empty, return instructions and message
29
  prompt = f"<s>[INST] {instructions} Hi [/INST] Hello! how can I help you</s>[INST] {message} [/INST]"
30
  print("sending this prompt\n==============\n",prompt,'\n---------\n')
 
31
  else:
32
  prompt = "<s>"
33
  for user_message, assistant_message in history:
34
  prompt += model.config["promptTemplate"].format(user_message)
35
  prompt += assistant_message + "</s>"
36
  prompt += model.config["promptTemplate"].format(message)
37
+ print("sending this prompt\n==============\n",prompt,'\n---------\n')
38
 
39
  outputs = []
40
  for token in model.generate(prompt=prompt, temp=temperature, top_k = top_k, top_p = top_p, max_tokens = max_new_tokens, streaming=True):