
from autogen import ConversableAgent
config_list = [
    {
        # Let's choose the Meta's Llama 3.1 model (model names must match Ollama exactly)
        #"model": "deepseek-r1:32b",
        #"model": "llama3.1:latest",
        "model":"qwen2.5-coder:latest",
        # We specify the API Type as 'ollama' so it uses the Ollama client class
        "api_type": "ollama",
        "stream": False,
        "client_host": "http://192.168.99.142:11434",
        "num_predict": -1,
        "repeat_penalty": 1.1,
        "seed": 42,
        "stream": False,
        "temperature": 0.5,
        "top_k": 50,
        "top_p": 0.8
    }
]
cathy = ConversableAgent(
    "cathy",
    system_message="Your name is Cathy and you are a part of a duo of comedians.",
    llm_config={"config_list":config_list },
    human_input_mode="NEVER",  # Never ask for human input.
)

joe = ConversableAgent(
    "joe",
    system_message="Your name is Joe and you are a part of a duo of comedians.",
    llm_config={"config_list": config_list},
    human_input_mode="NEVER",  # Never ask for human input.
)
result = joe.initiate_chat(cathy, message="Cathy, tell me a joke in chinese.", max_turns=2)