package com.example.demo.ai.llm.service;

import java.io.IOException;
import org.noear.solon.ai.chat.ChatModel;
import org.noear.solon.ai.chat.ChatResponse;
import org.noear.solon.annotation.Component;
import org.noear.solon.annotation.Inject;
import reactor.core.publisher.Flux;

/**
 * @author airhead
 */
@Component
public class LlmService {
  @Inject private ChatModel chatModel;

  public String chat(String prompt) {
    try {
      ChatResponse response = chatModel.prompt(prompt).call();

      return response.getMessage().getContent();
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
  }

  public Flux<String> stream(String prompt) throws IOException {
    return Flux.from(chatModel.prompt(prompt).stream())
        .filter(ChatResponse::hasChoices)
        .map(resp -> resp.getMessage().getContent());
  }

  public String functionCall(String prompt) {
    try {
      ChatResponse response =
          chatModel.prompt(prompt).options(o -> o.functionAdd(new Tools())).call();

      return response.getMessage().getContent();
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
  }
}
