Spaces:
Sleeping
Sleeping
StevenChen16
commited on
Commit
•
69325c8
1
Parent(s):
349c813
delete the limit of max_new_token
Browse files
app.py
CHANGED
@@ -83,7 +83,7 @@ def query_model(user_input, history):
|
|
83 |
messages = [{"role": "user", "content": combined_query}]
|
84 |
|
85 |
response = ""
|
86 |
-
for new_text in chat_model.stream_chat(messages,
|
87 |
response += new_text
|
88 |
yield response
|
89 |
|
|
|
83 |
messages = [{"role": "user", "content": combined_query}]
|
84 |
|
85 |
response = ""
|
86 |
+
for new_text in chat_model.stream_chat(messages, temperature=0.9):
|
87 |
response += new_text
|
88 |
yield response
|
89 |
|