StevenChen16 commited on
Commit
69325c8
1 Parent(s): 349c813

delete the limit of max_new_token

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -83,7 +83,7 @@ def query_model(user_input, history):
83
  messages = [{"role": "user", "content": combined_query}]
84
 
85
  response = ""
86
- for new_text in chat_model.stream_chat(messages, max_new_tokens=512, temperature=0.9):
87
  response += new_text
88
  yield response
89
 
 
83
  messages = [{"role": "user", "content": combined_query}]
84
 
85
  response = ""
86
+ for new_text in chat_model.stream_chat(messages, temperature=0.9):
87
  response += new_text
88
  yield response
89