package org.liu.knowledge.chat;

import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.request.ChatRequest;
import dev.langchain4j.model.chat.request.ChatRequestParameters;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;

import java.util.List;
import java.util.concurrent.TimeUnit;

class OllamaChat {
//  static String MODEL_NAME = "deepseek-r1:14b"; // try other local ollama model names
  static String MODEL_NAME = "qwen2.5:14b";
  static String BASE_URL = "http://localhost:11434"; // local ollama base url

  public static void main(String[] args) throws InterruptedException {
      ChatLanguageModel model = OllamaChatModel.builder()
              .baseUrl(BASE_URL)
              .modelName(MODEL_NAME)
              //.responseFormat(ResponseFormat.JSON) // json格式
              .build();
//      String answer = model.chat("介绍一下你自己");
//      System.out.println(answer);


      OllamaStreamingChatModel streaming = OllamaStreamingChatModel.builder()
              .baseUrl(BASE_URL)
              .modelName(MODEL_NAME)
              .build();
      streaming.chat("介绍一下你自己", new StreamingChatResponseHandler() {
          @Override
          public void onPartialResponse(String s) {
              System.out.print(s);
          }

          @Override
          public void onCompleteResponse(ChatResponse chatResponse) {
              System.out.println();
              System.out.println("complete response: " + chatResponse);
          }

          @Override
          public void onError(Throwable throwable) {
              throwable.printStackTrace();
          }
      });


      TimeUnit.SECONDS.sleep(100);
  }
}