Spaces:
Runtime error
Runtime error
Update app.py
Browse filesε’ε chatGPT ηθ½ε
app.py
CHANGED
@@ -33,7 +33,8 @@ embedding_model_dict = {
|
|
33 |
llm_model_dict = {
|
34 |
"ChatGLM-6B-int8": "THUDM/chatglm-6b-int8",
|
35 |
"ChatGLM-6B-int4": "THUDM/chatglm-6b-int4",
|
36 |
-
"ChatGLM-6b-int4-qe": "THUDM/chatglm-6b-int4-qe"
|
|
|
37 |
}
|
38 |
|
39 |
DEVICE = "cuda" if torch.cuda.is_available(
|
@@ -109,9 +110,9 @@ def get_knowledge_based_answer(query,
|
|
109 |
prompt = PromptTemplate(template=prompt_template,
|
110 |
input_variables=["context", "question"])
|
111 |
chatLLM = ChatLLM()
|
112 |
-
chatLLM.history = chat_history[-history_len:] if history_len > 0 else
|
113 |
if large_language_model == "ChatGPT":
|
114 |
-
chatLLM.model =
|
115 |
else:
|
116 |
chatLLM.load_model(
|
117 |
model_name_or_path=llm_model_dict[large_language_model])
|
|
|
33 |
llm_model_dict = {
|
34 |
"ChatGLM-6B-int8": "THUDM/chatglm-6b-int8",
|
35 |
"ChatGLM-6B-int4": "THUDM/chatglm-6b-int4",
|
36 |
+
"ChatGLM-6b-int4-qe": "THUDM/chatglm-6b-int4-qe",
|
37 |
+
"ChatGPT": "ChatGPT"
|
38 |
}
|
39 |
|
40 |
DEVICE = "cuda" if torch.cuda.is_available(
|
|
|
110 |
prompt = PromptTemplate(template=prompt_template,
|
111 |
input_variables=["context", "question"])
|
112 |
chatLLM = ChatLLM()
|
113 |
+
chatLLM.history = chat_history[-history_len:] if history_len > 0 else chat_history
|
114 |
if large_language_model == "ChatGPT":
|
115 |
+
chatLLM.model = "ChatGPT"
|
116 |
else:
|
117 |
chatLLM.load_model(
|
118 |
model_name_or_path=llm_model_dict[large_language_model])
|