package com.zjj.lbw.ai.ollama;

import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.language.LanguageModel;
import dev.langchain4j.model.ollama.OllamaLanguageModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;

/**
 * 使用ollama部署的本地大模型
 */
public class OllamaChatTest {

    public static void main(String[] args) {
//        LanguageModel model = OllamaLanguageModel.builder()
//                .baseUrl("http://localhost:11434/v1/")
//                .modelName("qwen2.5:14b")
//                .build();
//
//        String result = model.generate("你是谁").content();
//        System.out.println(result);

        OllamaStreamingChatModel model = OllamaStreamingChatModel.builder()
                .baseUrl("http://localhost:11434/v1/")
                .modelName("llama3:8b")
                .build();

        model.generate("你是谁", new StreamingResponseHandler<AiMessage>() {
            @Override
            public void onNext(String token) {
                System.out.print(token);
            }

            @Override
            public void onError(Throwable error) {
                System.out.println(error);
            }
        });
    }
}
