ClueAI commited on
Commit
29c6f43
1 Parent(s): 8ceafaf

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -2
README.md CHANGED
@@ -48,7 +48,7 @@ def preprocess(text):
48
  def postprocess(text):
49
  return text.replace("\\n", "\n").replace("\\t", "\t")
50
 
51
- def answer(text, sample=True, top_p=0.9):
52
  '''sample:是否抽样。生成任务,可以设置为True;
53
  top_p:0-1之间,生成的内容越多样'''
54
  text = preprocess(text)
@@ -56,9 +56,10 @@ def answer(text, sample=True, top_p=0.9):
56
  if not sample:
57
  out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=512, num_beams=1, length_penalty=0.6)
58
  else:
59
- out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=512, do_sample=True, top_p=top_p, no_repeat_ngram_size=3)
60
  out_text = tokenizer.batch_decode(out["sequences"], skip_special_tokens=True)
61
  return postprocess(out_text[0])
 
62
  ```
63
 
64
  # 问答、写作与功能型助手
 
48
  def postprocess(text):
49
  return text.replace("\\n", "\n").replace("\\t", "\t")
50
 
51
+ def answer(text, sample=True, top_p=1, temperature=0.7):
52
  '''sample:是否抽样。生成任务,可以设置为True;
53
  top_p:0-1之间,生成的内容越多样'''
54
  text = preprocess(text)
 
56
  if not sample:
57
  out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=512, num_beams=1, length_penalty=0.6)
58
  else:
59
+ out = model.generate(**encoding, return_dict_in_generate=True, output_scores=False, max_new_tokens=512, do_sample=True, top_p=top_p, temperature=temperature, no_repeat_ngram_size=3)
60
  out_text = tokenizer.batch_decode(out["sequences"], skip_special_tokens=True)
61
  return postprocess(out_text[0])
62
+ print("end...")
63
  ```
64
 
65
  # 问答、写作与功能型助手