Update agent.py
Browse files
agent.py
CHANGED
@@ -181,6 +181,7 @@ def build_graph(provider: str = "huggingface"):
|
|
181 |
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
182 |
elif provider == "huggingface":
|
183 |
# TODO: Add huggingface endpoint
|
|
|
184 |
llm = ChatHuggingFace(
|
185 |
llm=HuggingFaceEndpoint(
|
186 |
#endpoint_url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
|
@@ -189,6 +190,15 @@ def build_graph(provider: str = "huggingface"):
|
|
189 |
temperature=0,
|
190 |
),
|
191 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
192 |
else:
|
193 |
raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
|
194 |
# Bind tools to LLM
|
|
|
181 |
llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
|
182 |
elif provider == "huggingface":
|
183 |
# TODO: Add huggingface endpoint
|
184 |
+
"""
|
185 |
llm = ChatHuggingFace(
|
186 |
llm=HuggingFaceEndpoint(
|
187 |
#endpoint_url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
|
|
|
190 |
temperature=0,
|
191 |
),
|
192 |
)
|
193 |
+
"""
|
194 |
+
llm_id=HuggingFaceEndpoint(
|
195 |
+
repo_id="Qwen/Qwen2.5-Coder-32B.Instruct",
|
196 |
+
task="text-generation",
|
197 |
+
temperature = 0,
|
198 |
+
)
|
199 |
+
|
200 |
+
llm=ChatHuggingFace(llm=llm_id)
|
201 |
+
|
202 |
else:
|
203 |
raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
|
204 |
# Bind tools to LLM
|