File size: 2,629 Bytes
426f195
b472985
 
1af235e
e7a7122
b472985
a98c02f
e87659d
 
b472985
 
 
a49d96b
 
 
 
 
 
b472985
 
 
426f195
9c01ca1
b472985
2ed007f
a84d38a
2ed007f
b472985
a49d96b
 
 
 
 
 
 
 
 
 
 
 
 
 
7233fbc
e7a7122
a49d96b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b472985
a49d96b
6e86bb5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from langchain_huggingface import HuggingFacePipeline as HF
from subprocess import Popen, PIPE as P
from langchain_experimental.tools.python.tool import PythonREPLTool as PYT
from langchain.agents import load_tools, initialize_agent as Agent,AgentExecutor as Ex, AgentType as Type
from langchain.agents.agent_toolkits import create_retriever_tool as crt
from langchain_community.agent_toolkits import FileManagementToolkit as FMT
from langchain.tools import Tool
from langchain.memory import ConversationBufferMemory as MEM,RedisChatMessageHistory as HIS
from langchain.schema import SystemMessage as SM,HumanMessage as HM, AIMessage as AM
from langchain import hub
import os
from langchain.retrievers import WikipediaRetriever as Wiki
import gradio as gr
chatbot = gr.Chatbot(
    label="SYAI4.1",
    show_copy_button=True,
    layout="panel"
)
def terminal(c):
    a=Popen(c,shell=True,stdin=P,stdout=P,stderr=P)
    return a.stdout.read()+a.stderr.read()
llm=HF.from_model_id(model_id="peterpeter8585/syai4.1")
tools=[]
tools.append(PYT())
tools.extend(load_tools(["requests_all"],allow_dangerous_tools=True))
tools.extend(load_tools(["llm-math","ddg-search"],llm=llm))
tools.append(Tool.from_function(func=terminal,name="terminal",description="터미널 명령어실행에 적합함"))
tools.append(crt(name="wiki",description="위키 백과를 검색하여 정보를 가져온다",retriever=Wiki(lang="ko",top_k_results=1)))
def chat(message,
    history: list[tuple[str, str]],
    system_message,
    max_tokens,
    temperature,
    top_p, chat_session=""):
    messages=[SM(content=system_message+"And, Your name is Chatchat")]
    for val in history:
        if val[0]:
            messages.append(HM(content=val[0]))
        if val[1]:
            messages.append(AM(content=val[1]))

    messages.append(HM(content=message))
    memory=MEM(memory_key="history")
    agent=Agent(llm=llm,tools=tools,memory=memory,verbose=True,agent=Type.CHAT_ZERO_SHOT_REACT_DESCRIPTION)
    yield agent.invoke(messages)
ai1=gr.ChatInterface(
    chat,
    chatbot=chatbot,
    additional_inputs=[
        gr.Textbox(value="You are a helpful assistant.", label="System message",  interactive=True),
        gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
        gr.Slider(minimum=0.1, maximum=4.0, value=0.1, step=0.1, label="Temperature"),
        gr.Slider(
            minimum=0.1,
            maximum=1.0,
            value=0.1,
            step=0.05,
            label="Top-p (nucleus sampling)",
        ),
        gr.Textbox(label="chat_id(please enter the chat id!)")
    ],
    
)
ai1.launch()