package com.liu.ai.chat;

import com.liu.ai.advisor.SimpleLoggerAdvisor;
import com.liu.ai.tools.AnalysisDataTools;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaApi;
import org.springframework.ai.ollama.api.OllamaOptions;

public class OllamaChat {
    public static void main(String[] args) throws InterruptedException {
        OllamaApi ollamaApi = OllamaApi.builder()
                .baseUrl("http://localhost:11434")
                .build();

        OllamaOptions options = OllamaOptions.builder()
                .model("llama3.2:latest")
                .temperature(0.9)
                .build();

        OllamaChatModel chatModel = OllamaChatModel.builder()
                .ollamaApi(ollamaApi)
                .defaultOptions(options)
                .build();

//        chatModel.stream("你是谁？ /no_think")
//                .subscribe(i -> System.out.print(i));


        ChatClient.builder(chatModel).build()
                .prompt("现在是什么时间？ ")
                .advisors(new SimpleLoggerAdvisor())
                // 内部系统工具调用，后期可基于MCP协议调用外部的接口或其他工具
                .tools(new AnalysisDataTools())
                .stream()
                .content()
                .subscribe(System.out::print);

        Thread.sleep(60 * 1000);
    }
}
