File size: 2,200 Bytes
b5606e7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14458e0
b5606e7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from langchain import hub
from langchain.agents import AgentExecutor, create_openai_tools_agent, load_tools
from langchain_openai import ChatOpenAI
from gradio import ChatMessage
import gradio as gr
import os

if not (os.getenv("SERPAPI_API_KEY") and os.getenv("OPENAI_API_KEY")):
    with gr.Blocks() as demo:
        gr.Markdown("""
# Chat with a LangChain Agent πŸ¦œβ›“οΈ and see its thoughts πŸ’­

In order to run this space, duplicate it and add the following space secrets:

* SERPAPI_API_KEY - create an account at serpapi.com and get an API key
* OPENAI_API_KEY - create an openai account and get an API key
""")
    demo.launch()

model = ChatOpenAI(temperature=0, streaming=True)

tools = load_tools(["serpapi"])

# Get the prompt to use - you can modify this!
prompt = hub.pull("hwchase17/openai-tools-agent")
# print(prompt.messages) -- to see the prompt
agent = create_openai_tools_agent(
    model.with_config({"tags": ["agent_llm"]}), tools, prompt
)
agent_executor = AgentExecutor(agent=agent, tools=tools).with_config(
    {"run_name": "Agent"}
)


async def interact_with_langchain_agent(prompt, messages):
    messages.append(ChatMessage(role="user", content=prompt))
    yield messages
    async for chunk in agent_executor.astream(
        {"input": prompt}
    ):
        if "steps" in chunk:
            for step in chunk["steps"]:
                messages.append(ChatMessage(role="assistant", content=step.action.log,
                                  metadata={"title": f"πŸ› οΈ Used tool {step.action.tool}"}))
                yield messages
        if "output" in chunk:
            messages.append(ChatMessage(role="assistant", content=chunk["output"]))
            yield messages


with gr.Blocks() as demo:
    gr.Markdown("# Chat with a LangChain Agent πŸ¦œβ›“οΈ and see its thoughts πŸ’­")
    chatbot_2 = gr.Chatbot(
        msg_format="messages",
        label="Agent",
        avatar_images=(
            None,
            "https://em-content.zobj.net/source/twitter/141/parrot_1f99c.png",
        ),
    )
    input_2 = gr.Textbox(lines=1, label="Chat Message")
    input_2.submit(interact_with_langchain_agent, [input_2, chatbot_2], [chatbot_2])

demo.launch()