Spaces:
Sleeping
Sleeping
Change llm model
Browse files
agent.py
CHANGED
|
@@ -131,13 +131,11 @@ tools = [
|
|
| 131 |
|
| 132 |
def build_graph():
|
| 133 |
"""Build the graph"""
|
| 134 |
-
# Initialize Groq LLM
|
| 135 |
llm = ChatGroq(
|
| 136 |
-
model="
|
| 137 |
temperature=0.1
|
| 138 |
)
|
| 139 |
|
| 140 |
-
# Bind tools to LLM
|
| 141 |
llm_with_tools = llm.bind_tools(tools)
|
| 142 |
|
| 143 |
# Node
|
|
|
|
| 131 |
|
| 132 |
def build_graph():
|
| 133 |
"""Build the graph"""
|
|
|
|
| 134 |
llm = ChatGroq(
|
| 135 |
+
model="llama2-70b-4096",
|
| 136 |
temperature=0.1
|
| 137 |
)
|
| 138 |
|
|
|
|
| 139 |
llm_with_tools = llm.bind_tools(tools)
|
| 140 |
|
| 141 |
# Node
|
app.py
CHANGED
|
@@ -25,7 +25,6 @@ class BasicAgent:
|
|
| 25 |
|
| 26 |
def __call__(self, question: str) -> str:
|
| 27 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
| 28 |
-
# Wrap the question in a HumanMessage from langchain_core
|
| 29 |
messages = [HumanMessage(content=question)]
|
| 30 |
result = self.graph.invoke({"messages": messages})
|
| 31 |
last_message = result['messages'][-1]
|
|
|
|
| 25 |
|
| 26 |
def __call__(self, question: str) -> str:
|
| 27 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
|
|
|
| 28 |
messages = [HumanMessage(content=question)]
|
| 29 |
result = self.graph.invoke({"messages": messages})
|
| 30 |
last_message = result['messages'][-1]
|