import autogen
from src.agent.config import llm_config


def analyze_requirement(prompt: str) -> str:
    """
    Analyzes the user's requirement.
    In a real scenario, this would involve a call to an LLM or a more complex logic.
    """
    # Simulate analysis process
    print(f"Analyzing prompt: {prompt}")
    return "这是分析后的需求文档..."


def analyze_requirement_autogen(prompt: str) -> str:
    """
    Analyzes the user's requirement using an autogen agent.
    """
    assistant = autogen.AssistantAgent(
        name="需求分析师",
        llm_config=llm_config,
        system_message="你是一个专业的需求分析师, 你需要分析用户的需求, 并输出一份完整的需求文档。",
    )

    user_proxy = autogen.UserProxyAgent(
        name="user_proxy",
        human_input_mode="NEVER",
        max_consecutive_auto_reply=1,
        is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"),
        code_execution_config=False,
    )

    chat_result = user_proxy.initiate_chat(
        assistant,
        message=f"请分析以下需求：\n\n{prompt}",
    )

    response = chat_result.summary
    return response 