|
|
|
from flaml import autogen |
|
|
|
|
|
config_list = autogen.config_list_from_json( |
|
"OAI_CONFIG_LIST", |
|
filter_dict={ |
|
"model": ["gpt4", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-v0314"], |
|
}, |
|
) |
|
|
|
llm_config = { |
|
"request_timeout": 600, |
|
"seed": 42, |
|
"config_list": config_list, |
|
"temperature": 0, |
|
} |
|
|
|
|
|
assistant = autogen.AssistantAgent( |
|
name="assistant", |
|
llm_config=llm_config, |
|
) |
|
|
|
user_proxy = autogen.UserProxyAgent( |
|
name="user_proxy", |
|
human_input_mode="TERMINATE", |
|
max_consecutive_auto_reply=10, |
|
is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"), |
|
code_execution_config={"work_dir": "web"}, |
|
llm_config=llm_config, |
|
system_message="""Reply TERMINATE if the task has been solved at full satisfaction. |
|
Otherwise, reply CONTINUE, or the reason why the task is not solved yet.""" |
|
) |
|
|
|
|
|
user_proxy.initiate_chat( |
|
assistant, |
|
message=""" |
|
Tell me about this project, and the libary, then also tell me what I can use it for: https://www.gradio.app/guides/quickstart |
|
""", |
|
) |