package com.hxb.example.chatModel.ollama.service;

import com.hxb.example.service.ChatModelService;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;

@Slf4j
@Service
@ConditionalOnProperty(value = "spring.ai.ollama.chat.enabled", havingValue = "true")
public class OllamaImpl implements ChatModelService {

    @Resource
    OllamaChatModel chatModel;

    /**
     * 普通文本调用
     *
     * @param msg
     * @return
     */
    public String call(String msg) {
        Prompt prompt = new Prompt(msg);
        ChatResponse call = chatModel.call(prompt);
        return call.getResult().getOutput().getContent();
    }

    /**
     * 流式调用
     *
     * @param msg
     * @return
     */
    public Flux<ChatResponse> streamCall(String msg) {
        Prompt prompt = new Prompt(msg);
        return chatModel.stream(prompt);
    }
}