from dotenv import load_dotenv
from langchain_ollama import ChatOllama

if __name__ == '__main__':
    load_dotenv(override=True)
    model = ChatOllama(model="qwen3:latest",base_url="http://192.168.97.217:11434")
    response = model.invoke("你好，请介绍一下你自己")
    print(response.content)
