
var { Ollama } = require('ollama')
// qwen2.5:7b                           845dbda0ea48    4.7 GB    6 hours ago
// deepseek-r1:8b-llama-distill-q8_0    0db4dcd4c434    8.5 GB    8 hours ago
// qwen2.5:14b                          7cdf5a0187d5    9.0 GB    8 hours ago
// llama3.1:8b                          46e0c10c039e    4.9 GB    9 hours ago
// qwen2.5:1.5b                         65ec06548149    986 MB    20 hours ago
// qwen2.5:3b                           357c53fb659c    1.9 GB    20 hours ago
// qwen2.5:0.5b                         a8b0c5157701    397 MB    22 hours ago
// deepseek-llm:7b                      9aab369a853b    4.0 GB    24 hours ago
// deepseek-r1:14b                      ea35dfe18182    9.0 GB    31 hours ago
// deepseek-r1:8b                       28f8fd6cdc67    4.9 GB    44 hours ago
// deepseek-coder-v2:16b                63fb193b3a9b    8.9 GB    2 days ago
// qwen2:7b                             dd314f039b9d    4.4 GB    2 days ago
// qwen:7b                              2091ee8c8d8f    4.5 GB    2 days ago
// llama3.2:3b                          a80c4f17acd5    2.0 GB    2 days ago
// qwen:4b                              d53d04290064    2.3 GB    2 days ago
// llama3.2:1b                          baf6a787fdff    1.3 GB    2 days ago
// qwen2:1.5b                           f6daf2b25194    934 MB    2 days ago
// qwen:1.8b                            b6e8ec2e7126    1.1 GB    2 days ago
// qwen2:0.5b                           6f48b936a09f    352 MB    2 days ago
// qwen:0.5b                            b5dc5e784f2a    394 MB    2 days ago
// deepseek-r1:7b                       0a8c26691023    4.7 GB    6 days ago
// deepseek-r1:1.5b                     a42b25d8c10a    1.1 GB    7 days ago

var aa = async () => {
    const ollama = new Ollama({ host: 'http://10.0.2.2:11434' });
    //const ollama = new Ollama({ host: 'http://192.168.1.40:11434' });
    if (true) {
        const response = await ollama.chat({
            //model: 'llama3.1',
            //model: 'deepseek-r1:1.5b',
            model: 'qwen2:7b',

            messages: [{
                role: 'user', content: `
你是一个高级语言模型，擅长分析文本的换行方式。我会给你一段文本，其中包含换行符 \n，你需要判断换行符是 自然断句的换行（如句号、问号、感叹号后）还是 被截断的换行（如句子中途被打断）。
你的任务是：
分析两行英文文本，判断它们之间的换行符的类型。
如果换行是 自然断句（例如在句号、问号、感叹号后换行），请标记 "自然换行"。
如果换行是 被截断（例如一个句子中途断开），请标记 "被截断换行"。
你可以参考标点符号、上下文语义来进行判断。
这是两行英文文本：
“Who cares?” said Ron irritably, while his teacup stood drunkenly
again, trembling violently at the knees. “Montague shouldn’t have
      ` }],
            stream: true,
            options: { temperature: 0 }
        });
        for await (const part of response) {
            console.log(part.message.content);
        }
    } else {
        var response1 = await ollama.embeddings(
            {
                model: 'qwen2:7b',
                prompt: "She turned away, leaving Professor Trelawney standing rooted to the spot, her chest heaving.",
                options: { temperature: 0, embedding_only: true }
            });
        console.log(response1);

        const response = await ollama.chat({
            //model: 'llama3.1',
            //model: 'deepseek-r1:1.5b',
            model: 'qwen2:7b',

            messages: [{
                role: 'user', content: `fdsfds` }],
            //stream: true,
            options: { temperature: 0, embedding_only: true }
        });
        console.log(response.message.content);
        // var all = "";
        // for await (const part of response) {
        //     all += part.message.content;
        //     console.log(part.message.content);
        // }
        // console.log(all);
    }
}


var translate = async (word, model) => {
    const ollama = new Ollama({ host: 'http://10.0.2.2:11434' });

    const response = await ollama.chat({
        //model: 'llama3.1',
        //model: 'deepseek-r1:1.5b',
        model: model,

        messages: [{
            role: 'user', content: `
翻译成中文：
${word}
` }],
        options: { temperature: 0 }
    });
    return response.message.content;
}

module.exports = { translate };

aa();