ysharma HF staff commited on
Commit
090e9aa
β€’
1 Parent(s): a58bd0b

update stream code

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -113,8 +113,9 @@ def predict_glm_stream(input, history=[]): #, top_p, temperature):
113
  response, history = model_glm.chat(tokenizer_glm, input, history)
114
  print(f"outside for loop resonse is ^^- {response}")
115
  print(f"outside for loop history is ^^- {history}")
116
- top_p, temperature = 1.0, 1.0
117
- for response, history in model.stream_chat(tokenizer_glm, input, history, top_p=top_p, temperature=temperature): #max_length=max_length,
 
118
  print(f"In for loop resonse is ^^- {response}")
119
  print(f"In for loop history is ^^- {history}")
120
  # translate Chinese to English
 
113
  response, history = model_glm.chat(tokenizer_glm, input, history)
114
  print(f"outside for loop resonse is ^^- {response}")
115
  print(f"outside for loop history is ^^- {history}")
116
+ top_p = 1.0
117
+ temperature = 1.0
118
+ for response, history in model.stream_chat(tokenizer_glm, input, history, top_p=1.0, temperature=1.0): #max_length=max_length,
119
  print(f"In for loop resonse is ^^- {response}")
120
  print(f"In for loop history is ^^- {history}")
121
  # translate Chinese to English