package com.zhm.langchain4j;

import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;

@Component
public class AiModelClient {

    @Value("${ollama.base-url:external-ollama}")
    private String baseUrl;

    private ChatLanguageModel model;
    private StreamingChatLanguageModel streamingChatModel;

    @PostConstruct
    public void init() {
        if(StringUtils.isEmpty(baseUrl)){
            baseUrl = "external-ollama";
        }
        String url = "http://" + baseUrl + ":11434";
        System.out.println("url:" + url);
        model = OllamaChatModel.builder()
                .baseUrl(url)
                .temperature(0.0)
                .logRequests(true)
                .logResponses(true)
                .modelName("llama3.2:1b")
                .build();

        streamingChatModel = OllamaStreamingChatModel.builder()
                .baseUrl(url)
                .temperature(0.0)
                .logRequests(true)
                .logResponses(true)
                .modelName("llama3.2:1b")
                .build();
    }

    public ChatLanguageModel getModel() {
        return model;
    }


    public StreamingChatLanguageModel getStreamModel() {
        return streamingChatModel;
    }

    public static void t() {
        AiModelClient aiModelClient = new AiModelClient();
        aiModelClient.init();
        ChatLanguageModel model = aiModelClient.getModel();
        String answer = model.chat("Say 'Hello World'");
        System.out.println(answer);
    }

    public static void t2() {
        AiModelClient aiModelClient = new AiModelClient();
        aiModelClient.init();
        StreamingChatLanguageModel model = aiModelClient.getStreamModel();
        model.chat("what can you do", new StreamingChatResponseHandler() {
            @Override
            public void onPartialResponse(String s) {
                System.out.println("onNext: " + s);
            }

            @Override
            public void onCompleteResponse(ChatResponse chatResponse) {
                System.out.println("onComplete: " + chatResponse.toString());
            }

            @Override
            public void onError(Throwable throwable) {
                throwable.printStackTrace();
            }
        });

    }

    public static void main(String[] args) {
        t2();
        while (true) ;
    }
}
