package com.my.ollama.service.impl;

import com.my.ollama.model.OllamaRequest;
import com.my.ollama.service.OllamaService;
import lombok.RequiredArgsConstructor;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.ChatOptions;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;

/**
 * @author mars
 * @date 2025-01-27 20:40
 */
@Service
@RequiredArgsConstructor
public class OllamaServiceImpl implements OllamaService {

    private final OllamaChatModel chatModel;

    public String chat(OllamaRequest request) {
        Prompt prompt = new Prompt(
                request.getPrompt(),
                ChatOptions.builder()
                        .model(request.getModel())
                        .build());
        return chatModel.call(prompt).getResult().getOutput().getText();
    }

    @Override
    public Flux<ChatResponse> streamChat(OllamaRequest request) {
        Prompt prompt = new Prompt(
                request.getPrompt(),
                ChatOptions.builder()
                        .model(request.getModel())
                        .build());
        return chatModel.stream(prompt);
    }
}
