Spaces:
Configuration error
Configuration error
Update agent.py
Browse files
agent.py
CHANGED
@@ -209,11 +209,13 @@ def initialize_models(use_api_mode=False):
|
|
209 |
# Embedding model
|
210 |
embed_model = HuggingFaceEmbedding(
|
211 |
model_name="llamaindex/vdr-2b-multi-v1",
|
212 |
-
device="
|
213 |
trust_remote_code=True,
|
214 |
-
model_kwargs={
|
|
|
|
|
215 |
)
|
216 |
-
|
217 |
return proj_llm, code_llm, embed_model
|
218 |
except Exception as e:
|
219 |
print(f"Error initializing models: {e}")
|
@@ -903,7 +905,6 @@ async def main():
|
|
903 |
}
|
904 |
|
905 |
print(question_data)
|
906 |
-
dynamic_qe_manager = DynamicQueryEngineManager()
|
907 |
content = enhanced_web_search_and_update("How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? List them !")
|
908 |
print(content)
|
909 |
#answer = await agent.solve_gaia_question(question_data)
|
|
|
209 |
# Embedding model
|
210 |
embed_model = HuggingFaceEmbedding(
|
211 |
model_name="llamaindex/vdr-2b-multi-v1",
|
212 |
+
device="cpu",
|
213 |
trust_remote_code=True,
|
214 |
+
model_kwargs={
|
215 |
+
"torch_dtype": "auto"
|
216 |
+
}
|
217 |
)
|
218 |
+
|
219 |
return proj_llm, code_llm, embed_model
|
220 |
except Exception as e:
|
221 |
print(f"Error initializing models: {e}")
|
|
|
905 |
}
|
906 |
|
907 |
print(question_data)
|
|
|
908 |
content = enhanced_web_search_and_update("How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? List them !")
|
909 |
print(content)
|
910 |
#answer = await agent.solve_gaia_question(question_data)
|