qgyd2021 commited on
Commit
58ab7a5
1 Parent(s): 9d715da

[update]add main

Browse files
Files changed (1) hide show
  1. main.py +4 -0
main.py CHANGED
@@ -98,6 +98,8 @@ def chat_with_llm_non_stream(question: str,
98
  utterances.append(question)
99
 
100
  encoded_utterances = tokenizer.__call__(utterances, add_special_tokens=False)
 
 
101
  for encoded_utterance in encoded_utterances:
102
  input_ids.extend(encoded_utterance)
103
  if model.config.model_type == "chatglm":
@@ -150,6 +152,8 @@ def chat_with_llm_streaming(question: str,
150
  utterances.append(question)
151
 
152
  encoded_utterances = tokenizer.__call__(utterances, add_special_tokens=False)
 
 
153
  for encoded_utterance in encoded_utterances:
154
  input_ids.extend(encoded_utterance)
155
  if model.config.model_type == "chatglm":
 
98
  utterances.append(question)
99
 
100
  encoded_utterances = tokenizer.__call__(utterances, add_special_tokens=False)
101
+ encoded_utterances = encoded_utterances["input_ids"]
102
+
103
  for encoded_utterance in encoded_utterances:
104
  input_ids.extend(encoded_utterance)
105
  if model.config.model_type == "chatglm":
 
152
  utterances.append(question)
153
 
154
  encoded_utterances = tokenizer.__call__(utterances, add_special_tokens=False)
155
+ encoded_utterances = encoded_utterances["input_ids"]
156
+
157
  for encoded_utterance in encoded_utterances:
158
  input_ids.extend(encoded_utterance)
159
  if model.config.model_type == "chatglm":