audrito commited on
Commit
ba3e48c
1 Parent(s): b0a6243

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -11
app.py CHANGED
@@ -28,17 +28,10 @@ def generater(message, history, temperature, top_p, top_k):
28
  prompt += model.config["promptTemplate"].format(user_message)
29
  prompt += assistant_message + "</s>"
30
  prompt += model.config["promptTemplate"].format(message)
31
-
32
- try:
33
- generated_tokens = model.generate(prompt=prompt, temp=temperature, top_k=top_k, top_p=top_p, max_tokens=max_new_tokens, streaming=True)
34
-
35
- # Convert the generated tokens to a string
36
- generated_text = model.decode(generated_tokens, skip_special_tokens=True)[0]
37
- return generated_text
38
-
39
- except Exception as e:
40
- print("Error during generation:", str(e))
41
- return "An error occurred during generation."
42
 
43
 
44
 
 
28
  prompt += model.config["promptTemplate"].format(user_message)
29
  prompt += assistant_message + "</s>"
30
  prompt += model.config["promptTemplate"].format(message)
31
+ outputs = []
32
+ for token in model.generate(prompt=prompt, temp=temperature, top_k = top_k, top_p = top_p, max_tokens = max_new_tokens, streaming=True):
33
+ outputs.append(token)
34
+ yield "".join(outputs)
 
 
 
 
 
 
 
35
 
36
 
37