FINGU-AI commited on
Commit
eaf592b
1 Parent(s): 9834006

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -49,8 +49,8 @@ def inference(query):
49
  outputs = model.generate(tokenized_chat, **generation_params)
50
  decoded_outputs = tokenizer.batch_decode(outputs, skip_special_tokens=False)
51
  assistant_response = decoded_outputs[0].split("<|im_start|>assistant\n")[-1].strip()
52
- # response_ = assistant_response.replace('<|im_end|>', "")
53
- return assistant_response
54
  # outputs = model.generate(tokenized_chat, **generation_params, streamer=streamer)
55
  # return outputs
56
 
@@ -63,7 +63,8 @@ examples = ['How can options strategies such as straddles, strangles, and spread
63
 
64
  def response(message, history):
65
  text = inference(message)
66
- for i in range(len(text)):
67
- time.sleep(0.01)
68
- yield text[: i + 1]
 
69
  gr.ChatInterface(response,examples=examples).launch()
 
49
  outputs = model.generate(tokenized_chat, **generation_params)
50
  decoded_outputs = tokenizer.batch_decode(outputs, skip_special_tokens=False)
51
  assistant_response = decoded_outputs[0].split("<|im_start|>assistant\n")[-1].strip()
52
+ response_ = assistant_response.replace('<|im_end|>', "")
53
+ return response_
54
  # outputs = model.generate(tokenized_chat, **generation_params, streamer=streamer)
55
  # return outputs
56
 
 
63
 
64
  def response(message, history):
65
  text = inference(message)
66
+ return text
67
+ # for i in range(len(text)):
68
+ # time.sleep(0.01)
69
+ # yield text[: i + 1]
70
  gr.ChatInterface(response,examples=examples).launch()