import { Ollama,ChatOllama } from"@langchain/ollama"
import { SystemMessage, HumanMessage } from"@langchain/core/messages"; 


async function main(): Promise <void> {
    const ollamaLlm = new Ollama({
        baseUrl: "http://127.0.0.1:11434",
        model: "deepseek-r1:7b",
    });


    const stream = await ollamaLlm.stream(
        `你谁，擅长什么？`
    );

    for await (const chunk of stream) {
        process.stdout.write(chunk);
    }

}

main().catch(error => {
    console.error("程序执行出错:");
    console.error(error);
});