Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -102,11 +102,11 @@ def build_model(model_choice, temperature=0.7):
|
|
| 102 |
llm = HuggingFaceEndpoint(
|
| 103 |
repo_id="google/flan-t5-base",
|
| 104 |
huggingfacehub_api_token=api_key,
|
| 105 |
-
task="
|
| 106 |
max_new_tokens=500,
|
| 107 |
temperature=temperature
|
| 108 |
)
|
| 109 |
-
return llm,
|
| 110 |
|
| 111 |
elif model_choice == "DeepSeek":
|
| 112 |
llm = HuggingFaceEndpoint(
|
|
@@ -189,7 +189,7 @@ if st.button("Run Chatbot"):
|
|
| 189 |
relevant_docs = retriever.invoke(query)
|
| 190 |
context_text = "\n\n".join(doc.page_content for doc in relevant_docs)
|
| 191 |
|
| 192 |
-
prompt = prompt_template.
|
| 193 |
|
| 194 |
with st.spinner(f"Generating response using {model_choice}..."):
|
| 195 |
model, is_chat = build_model(model_choice, temperature)
|
|
|
|
| 102 |
llm = HuggingFaceEndpoint(
|
| 103 |
repo_id="google/flan-t5-base",
|
| 104 |
huggingfacehub_api_token=api_key,
|
| 105 |
+
task="text-generation",
|
| 106 |
max_new_tokens=500,
|
| 107 |
temperature=temperature
|
| 108 |
)
|
| 109 |
+
return ChatHuggingFace(llm=llm, temperature=temperature), True # (model, is_chat)
|
| 110 |
|
| 111 |
elif model_choice == "DeepSeek":
|
| 112 |
llm = HuggingFaceEndpoint(
|
|
|
|
| 189 |
relevant_docs = retriever.invoke(query)
|
| 190 |
context_text = "\n\n".join(doc.page_content for doc in relevant_docs)
|
| 191 |
|
| 192 |
+
prompt = prompt_template.invoke(context=context_text, question=query)
|
| 193 |
|
| 194 |
with st.spinner(f"Generating response using {model_choice}..."):
|
| 195 |
model, is_chat = build_model(model_choice, temperature)
|