import os

from autogen import ConversableAgent

config_list = [
    {
        # Let's choose the Meta's Llama 3.1 model (model names must match Ollama exactly)
        #"model": "llama3.1:8b",
        "model":"qwen2.5-coder:latest",
        # We specify the API Type as 'ollama' so it uses the Ollama client class
        "api_type": "ollama",
        "stream": False,
        "client_host": "http://192.168.99.142:11434",
    }
]
agent = ConversableAgent(
    "chatbot",
    llm_config={"config_list":config_list },
    code_execution_config=False,  # Turn off code execution, by default it is off.
    function_map=None,  # No registered functions, by default it is None.
    human_input_mode="NEVER",  # Never ask for human input.
)
reply = agent.generate_reply(messages=[{"content": "Tell me a joke.", "role": "user"}])
print(reply)