Update agent.py
Browse files
agent.py
CHANGED
@@ -54,12 +54,10 @@ from llama_index.core.query_pipeline import QueryPipeline
|
|
54 |
import importlib.util
|
55 |
import sys
|
56 |
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
llama_debug # For general debugging
|
62 |
-
])
|
63 |
|
64 |
logging.basicConfig(level=logging.INFO)
|
65 |
logging.getLogger("llama_index.core.agent").setLevel(logging.DEBUG)
|
@@ -99,12 +97,7 @@ code_llm = HuggingFaceLLM(
|
|
99 |
generate_kwargs={"temperature": 0.0, "do_sample": False}
|
100 |
)
|
101 |
|
102 |
-
embed_model = HuggingFaceEmbedding("BAAI/bge-
|
103 |
-
|
104 |
-
wandb.init(project="gaia-llamaindex-agents") # Choisis ton nom de projet
|
105 |
-
wandb_callback = WandbCallbackHandler(run_args={"project": "gaia-llamaindex-agents"})
|
106 |
-
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
|
107 |
-
callback_manager = CallbackManager([wandb_callback, llama_debug])
|
108 |
|
109 |
Settings.llm = proj_llm
|
110 |
Settings.embed_model = embed_model
|
|
|
54 |
import importlib.util
|
55 |
import sys
|
56 |
|
57 |
+
wandb.init(project="gaia-llamaindex-agents") # Choisis ton nom de projet
|
58 |
+
wandb_callback = WandbCallbackHandler(run_args={"project": "gaia-llamaindex-agents"})
|
59 |
+
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
|
60 |
+
callback_manager = CallbackManager([wandb_callback, llama_debug])
|
|
|
|
|
61 |
|
62 |
logging.basicConfig(level=logging.INFO)
|
63 |
logging.getLogger("llama_index.core.agent").setLevel(logging.DEBUG)
|
|
|
97 |
generate_kwargs={"temperature": 0.0, "do_sample": False}
|
98 |
)
|
99 |
|
100 |
+
embed_model = HuggingFaceEmbedding("BAAI/bge-visualized-m3")
|
|
|
|
|
|
|
|
|
|
|
101 |
|
102 |
Settings.llm = proj_llm
|
103 |
Settings.embed_model = embed_model
|