artistypl commited on
Commit
51abe5c
β€’
1 Parent(s): ac65081

Update app.py

Browse files

咞加 chatGPT ηš„θƒ½εŠ›

Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -33,7 +33,8 @@ embedding_model_dict = {
33
  llm_model_dict = {
34
  "ChatGLM-6B-int8": "THUDM/chatglm-6b-int8",
35
  "ChatGLM-6B-int4": "THUDM/chatglm-6b-int4",
36
- "ChatGLM-6b-int4-qe": "THUDM/chatglm-6b-int4-qe"
 
37
  }
38
 
39
  DEVICE = "cuda" if torch.cuda.is_available(
@@ -109,9 +110,9 @@ def get_knowledge_based_answer(query,
109
  prompt = PromptTemplate(template=prompt_template,
110
  input_variables=["context", "question"])
111
  chatLLM = ChatLLM()
112
- chatLLM.history = chat_history[-history_len:] if history_len > 0 else []
113
  if large_language_model == "ChatGPT":
114
- chatLLM.model = OpenAI()
115
  else:
116
  chatLLM.load_model(
117
  model_name_or_path=llm_model_dict[large_language_model])
 
33
  llm_model_dict = {
34
  "ChatGLM-6B-int8": "THUDM/chatglm-6b-int8",
35
  "ChatGLM-6B-int4": "THUDM/chatglm-6b-int4",
36
+ "ChatGLM-6b-int4-qe": "THUDM/chatglm-6b-int4-qe",
37
+ "ChatGPT": "ChatGPT"
38
  }
39
 
40
  DEVICE = "cuda" if torch.cuda.is_available(
 
110
  prompt = PromptTemplate(template=prompt_template,
111
  input_variables=["context", "question"])
112
  chatLLM = ChatLLM()
113
+ chatLLM.history = chat_history[-history_len:] if history_len > 0 else chat_history
114
  if large_language_model == "ChatGPT":
115
+ chatLLM.model = "ChatGPT"
116
  else:
117
  chatLLM.load_model(
118
  model_name_or_path=llm_model_dict[large_language_model])