from autogen_agentchat.agents import AssistantAgent, UserProxyAgent
from autogen_agentchat.base import TaskResult
from autogen_agentchat.conditions import TextMentionTermination, ExternalTermination, TextMessageTermination
from autogen_agentchat.teams import RoundRobinGroupChat
from autogen_agentchat.ui import Console
from autogen_ext.models.openai import OpenAIChatCompletionClient
model_client = OpenAIChatCompletionClient(model="modelscope.cn/Qwen/Qwen2.5-7B-Instruct-GGUF:q5_k_m",
                                                 model_info={
                                                     "vision": False,
                                                     "function_calling": True,
                                                     "family": "Qwen3",
                                                     "structured_output": True,
                                                     "json_output": True,
                                                 },
                                                 api_key="ollama",
                                          base_url="http://127.0.0.1:11434/v1")
assistant = AssistantAgent("assistant", model_client=model_client)
user_proxy = UserProxyAgent("user_proxy", input_func=input)  # 使用 input() 从控制台获取用户输入。

# 创建终止条件，当用户说 "APPROVE" 时结束对话。
termination = TextMentionTermination("APPROVE")

# 创建团队。
team = RoundRobinGroupChat([assistant, user_proxy], termination_condition=termination)

# 运行对话并将其流式输出到控制台。

async def main():
    await Console(team.run_stream(task="写一首关于海洋的绝句."))

if __name__ == "__main__":
    import asyncio
    asyncio.run(main())