darabos commited on
Commit
0eac0f8
·
1 Parent(s): caaccd3

Show tool calls on the Gradio chat interface.

Browse files
lynxkite-lynxscribe/src/lynxkite_lynxscribe/agentic.py CHANGED
@@ -8,6 +8,7 @@ from lynxkite_core import ops
8
  from lynxscribe.components.task_solver import TaskSolver
9
  from lynxscribe.components.tool_use import LLM
10
  from lynxscribe.components.mcp_client import MCPClient
 
11
  from lynxscribe.core.llm.base import get_llm_engine
12
  from lynxscribe.core.models.prompts import Function, Tool
13
 
@@ -32,12 +33,12 @@ def gradio_chat(agent: dict):
32
 
33
  async def respond(message, chat_history):
34
  await ag.init()
35
- response = await ag.llm.ask([*chat_history, {"role": "user", "content": message}])
36
- answer = ""
37
- async for chunk in response.answer:
38
- if chunk.choices and chunk.choices[0].delta.content:
39
- answer += chunk.choices[0].delta.content
40
- yield answer
41
 
42
  ag = agent_from_dict(agent, default_model={"name": "openai", "model_name": "gpt-4.1-nano"})
43
  with gr.Blocks() as demo:
@@ -75,7 +76,7 @@ def agent(
75
  models = []
76
  for tool in tools:
77
  if "model" in tool:
78
- models.append(tool["llm"]["model"])
79
  elif tool.get("extra_prompt"):
80
  prompt.append(tool["extra_prompt"])
81
  params = {
@@ -243,3 +244,42 @@ class Agent:
243
  ),
244
  )
245
  return ask
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  from lynxscribe.components.task_solver import TaskSolver
9
  from lynxscribe.components.tool_use import LLM
10
  from lynxscribe.components.mcp_client import MCPClient
11
+ from lynxscribe.components.rag.rag_chatbot import RAGChatbotResponse
12
  from lynxscribe.core.llm.base import get_llm_engine
13
  from lynxscribe.core.models.prompts import Function, Tool
14
 
 
33
 
34
  async def respond(message, chat_history):
35
  await ag.init()
36
+ response = await ag.llm.ask(
37
+ [*chat_history, {"role": "user", "content": message}],
38
+ expose_tool_calls=True,
39
+ )
40
+ async for messages in rag_chatbot_response_to_gradio(response):
41
+ yield messages
42
 
43
  ag = agent_from_dict(agent, default_model={"name": "openai", "model_name": "gpt-4.1-nano"})
44
  with gr.Blocks() as demo:
 
76
  models = []
77
  for tool in tools:
78
  if "model" in tool:
79
+ models.append(tool["model"])
80
  elif tool.get("extra_prompt"):
81
  prompt.append(tool["extra_prompt"])
82
  params = {
 
244
  ),
245
  )
246
  return ask
247
+
248
+
249
+ async def rag_chatbot_response_to_gradio(response: RAGChatbotResponse):
250
+ """The Gradio chatbot interface expects a list of ChatMessage objects to be yielded.
251
+ We can keep updating the messages to stream the response.
252
+ """
253
+ import gradio as gr
254
+
255
+ answer: list[gr.ChatMessage] = []
256
+ yield answer
257
+ async for chunk in response.answer:
258
+ if not chunk.choices:
259
+ continue
260
+ delta = chunk.choices[0].delta
261
+ if delta.tool_calls:
262
+ for tool_call in delta.tool_calls:
263
+ if not answer or not answer[-1].metadata:
264
+ # Create a message for the tool call.
265
+ answer.append(
266
+ gr.ChatMessage(
267
+ role="assistant",
268
+ content="",
269
+ metadata=dict(
270
+ title=f"Using tool: {tool_call.function.name}",
271
+ status="pending",
272
+ ),
273
+ )
274
+ )
275
+ if delta.content is None:
276
+ pass
277
+ elif answer and bool(answer[-1].metadata) == bool(delta.role == "tool"):
278
+ # This chunk is also a tool call (or not), so append to the last message.
279
+ answer[-1].content += delta.content
280
+ else:
281
+ if answer and answer[-1].metadata:
282
+ # Looks like we're done with the tool.
283
+ answer[-1].metadata["status"] = "done"
284
+ answer.append(gr.ChatMessage(role="assistant", content=delta.content))
285
+ yield answer