darabos commited on
Commit
95f0558
·
1 Parent(s): 0eac0f8

Use LLM instead of TaskSolver.

Browse files
lynxkite-lynxscribe/src/lynxkite_lynxscribe/agentic.py CHANGED
@@ -5,12 +5,12 @@ import os
5
  import typing
6
 
7
  from lynxkite_core import ops
8
- from lynxscribe.components.task_solver import TaskSolver
9
  from lynxscribe.components.tool_use import LLM
10
  from lynxscribe.components.mcp_client import MCPClient
11
  from lynxscribe.components.rag.rag_chatbot import RAGChatbotResponse
12
  from lynxscribe.core.llm.base import get_llm_engine
13
- from lynxscribe.core.models.prompts import Function, Tool
 
14
 
15
  if typing.TYPE_CHECKING:
16
  import fastapi
@@ -33,10 +33,7 @@ def gradio_chat(agent: dict):
33
 
34
  async def respond(message, chat_history):
35
  await ag.init()
36
- response = await ag.llm.ask(
37
- [*chat_history, {"role": "user", "content": message}],
38
- expose_tool_calls=True,
39
- )
40
  async for messages in rag_chatbot_response_to_gradio(response):
41
  yield messages
42
 
@@ -56,6 +53,7 @@ def agent_from_dict(agent: dict, default_model: dict) -> "Agent":
56
  mcp_servers=agent["mcp_servers"],
57
  agents=[agent_from_dict(a, default_model=model) for a in agent["sub_agents"]],
58
  model=model,
 
59
  )
60
 
61
 
@@ -153,6 +151,7 @@ class Agent:
153
  mcp_servers: list[list[str]],
154
  agents: list["Agent"],
155
  model: dict,
 
156
  ):
157
  self.name = name
158
  self.description = description
@@ -160,29 +159,22 @@ class Agent:
160
  self.mcp_servers = mcp_servers
161
  self.agents = agents
162
  self.mcp_client = None
163
- self.task_solver = None
164
  self.llm_engine_params = {**model}
165
  self.model_name = self.llm_engine_params.pop("model_name")
 
166
 
167
  async def init(self):
168
- if self.task_solver is not None:
169
  return
170
  self.mcp_client = MCPClient(*self.mcp_servers)
171
  await self.mcp_client.aenter()
172
  agents_as_functions = [agent.as_function() for agent in self.agents]
173
- self.task_solver = TaskSolver(
174
- llm=get_llm_engine(**self.llm_engine_params),
175
- model=self.model_name,
176
- initial_messages=[self.prompt],
177
- functions=[*self.mcp_client.functions, *agents_as_functions],
178
- tool_choice="required",
179
- temperature=0.0,
180
- max_tool_call_steps=999,
181
- )
182
  self.llm = LLM(
183
  engine=get_llm_engine(**self.llm_engine_params),
184
  model=self.model_name,
185
  tools=[*self.mcp_client.functions, *agents_as_functions],
 
186
  )
187
 
188
  def get_description(self, url: str) -> str:
@@ -206,22 +198,28 @@ class Agent:
206
 
207
  async def post(self, request: "fastapi.Request") -> dict:
208
  if request.state.remaining_path == "chat/completions":
209
- request = await request.json()
210
- assert not request["stream"]
 
211
  await self.init()
212
- res = await self.task_solver.solve(request["messages"][-1]["content"])
213
- return {"choices": [{"message": {"role": "assistant", "content": res}}]}
214
 
215
  return {"error": "Not found"}
216
 
 
 
 
 
 
217
  def as_function(self):
218
  """A callable that can be used as a tool by another Agent."""
219
 
220
- # Find the value of x given that 4*x^4 = 44. Compute the numerical value.
221
  async def ask(message):
222
  print(f"Calling agent {self.name} with message: {message}")
223
  await self.init()
224
- res = await self.task_solver.solve(message)
 
225
  print(f"Agent {self.name} response: {res}")
226
  return res
227
 
@@ -267,7 +265,9 @@ async def rag_chatbot_response_to_gradio(response: RAGChatbotResponse):
267
  role="assistant",
268
  content="",
269
  metadata=dict(
270
- title=f"Using tool: {tool_call.function.name}",
 
 
271
  status="pending",
272
  ),
273
  )
 
5
  import typing
6
 
7
  from lynxkite_core import ops
 
8
  from lynxscribe.components.tool_use import LLM
9
  from lynxscribe.components.mcp_client import MCPClient
10
  from lynxscribe.components.rag.rag_chatbot import RAGChatbotResponse
11
  from lynxscribe.core.llm.base import get_llm_engine
12
+ from lynxscribe.core.models.prompts import Function, Tool, ChatCompletionPrompt
13
+ from lynxscribe.core import router
14
 
15
  if typing.TYPE_CHECKING:
16
  import fastapi
 
33
 
34
  async def respond(message, chat_history):
35
  await ag.init()
36
+ response = await ag.ask([*chat_history, {"role": "user", "content": message}])
 
 
 
37
  async for messages in rag_chatbot_response_to_gradio(response):
38
  yield messages
39
 
 
53
  mcp_servers=agent["mcp_servers"],
54
  agents=[agent_from_dict(a, default_model=model) for a in agent["sub_agents"]],
55
  model=model,
56
+ expose_tool_calls=True,
57
  )
58
 
59
 
 
151
  mcp_servers: list[list[str]],
152
  agents: list["Agent"],
153
  model: dict,
154
+ expose_tool_calls: bool,
155
  ):
156
  self.name = name
157
  self.description = description
 
159
  self.mcp_servers = mcp_servers
160
  self.agents = agents
161
  self.mcp_client = None
162
+ self.llm = None
163
  self.llm_engine_params = {**model}
164
  self.model_name = self.llm_engine_params.pop("model_name")
165
+ self.expose_tool_calls = expose_tool_calls
166
 
167
  async def init(self):
168
+ if self.llm is not None:
169
  return
170
  self.mcp_client = MCPClient(*self.mcp_servers)
171
  await self.mcp_client.aenter()
172
  agents_as_functions = [agent.as_function() for agent in self.agents]
 
 
 
 
 
 
 
 
 
173
  self.llm = LLM(
174
  engine=get_llm_engine(**self.llm_engine_params),
175
  model=self.model_name,
176
  tools=[*self.mcp_client.functions, *agents_as_functions],
177
+ expose_tool_calls=self.expose_tool_calls,
178
  )
179
 
180
  def get_description(self, url: str) -> str:
 
198
 
199
  async def post(self, request: "fastapi.Request") -> dict:
200
  if request.state.remaining_path == "chat/completions":
201
+ request: dict = await request.json()
202
+ stream: bool = request.pop("stream", False)
203
+ request: ChatCompletionPrompt = ChatCompletionPrompt.model_validate(request)
204
  await self.init()
205
+ res = await self.ask(request.messages)
206
+ return await router.rag_chatbot_response_to_http(res, stream)
207
 
208
  return {"error": "Not found"}
209
 
210
+ async def ask(self, messages: list[str], **kwargs) -> RAGChatbotResponse:
211
+ await self.init()
212
+ res = await self.llm.ask([self.prompt, *messages], **kwargs)
213
+ return res
214
+
215
  def as_function(self):
216
  """A callable that can be used as a tool by another Agent."""
217
 
 
218
  async def ask(message):
219
  print(f"Calling agent {self.name} with message: {message}")
220
  await self.init()
221
+ res = await self.ask([message])
222
+ res = await res.get_answer()
223
  print(f"Agent {self.name} response: {res}")
224
  return res
225
 
 
265
  role="assistant",
266
  content="",
267
  metadata=dict(
268
+ title=f"Asking {tool_call.function.name.removeprefix('ask_').replace('_', ' ')}"
269
+ if tool_call.function.name.startswith("ask_")
270
+ else f"Using {tool_call.function.name}",
271
  status="pending",
272
  ),
273
  )