NCTCMumbai
commited on
Update backend/query_llm.py
Browse files- backend/query_llm.py +1 -1
backend/query_llm.py
CHANGED
@@ -108,7 +108,7 @@ def generate_hf(prompt: str, history: str, temperature: float = 0.5, max_new_tok
|
|
108 |
def generate_qwen(formatted_prompt: str, history: str):
|
109 |
response = client.predict(
|
110 |
query=formatted_prompt,
|
111 |
-
history=
|
112 |
system='You are wonderful',
|
113 |
api_name="/model_chat"
|
114 |
)
|
|
|
108 |
def generate_qwen(formatted_prompt: str, history: str):
|
109 |
response = client.predict(
|
110 |
query=formatted_prompt,
|
111 |
+
history=history,
|
112 |
system='You are wonderful',
|
113 |
api_name="/model_chat"
|
114 |
)
|