// 聊天的方式，多次的输入输出
// import { ChatOllama } from '@langchain/ollama';
const { ChatOllama } = require('@langchain/ollama');
const model = 'codegeex4:9b';

const llm = new ChatOllama({ model });

const chatData = async (input, cb) => {
    for await (const chunk of await llm.stream(input)) {
        if (chunk?.response_metadata?.done) {
            cb('[DONE]');
        } else {
            cb(chunk.content);
        }
    }
};

module.exports = {
    chatData,
};
