package com.langChain4j.model.factory.impl;

import com.langChain4j.api.constants.ModelHostConstant;
import com.langChain4j.api.enums.ModelEnum;
import com.langChain4j.model.factory.AbsAgentLoader;
import com.langChain4j.api.vo.AgentChatVo;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.chat.request.ChatRequest;
import dev.langchain4j.model.chat.request.ChatRequestParameters;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ObjectUtils;
import org.springframework.stereotype.Component;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;

import java.util.List;

/**
 * @author linjun
 * @date 2025/8/21 09:26
 * @description Ollama模型加载器实现
 */
@Slf4j
@Component("ollama")
public class OllamaAgentLoader extends AbsAgentLoader {

    @Override
    public String chat(AgentChatVo chatVo) {
        ChatRequestParameters parameters = ChatRequestParameters.builder()
                .modelName(ObjectUtils.defaultIfNull(chatVo.getModelName(), ModelEnum.MODEL_OLLAMA_DEFAULT.getCode()))
                .temperature(ObjectUtils.defaultIfNull(chatVo.getTemperature(), 1.0))
                .build();

        List<ChatMessage> historyMessage = chatVo.getHistoryMessage();
        UserMessage userMessage = UserMessage.from(chatVo.getMessage());
        historyMessage.add(userMessage);

        ChatRequest chatRequest = ChatRequest.builder()
                .messages(historyMessage)
                .parameters(parameters)
                .build();

        ChatModel chatModel = OllamaChatModel.builder()
                .baseUrl(ObjectUtils.defaultIfNull(chatVo.getBaseUrl(), ModelHostConstant.OLLAMA_API_URL))
                .modelName(chatVo.getModelName())
                .temperature(chatVo.getTemperature())
                .logRequests(true)
                .build();

        ChatResponse chatResponse = chatModel.chat(chatRequest);
        AiMessage aiMessage = chatResponse.aiMessage();
        historyMessage.add(aiMessage);
        super.contractSessionMessage(chatVo);
        return aiMessage.text();
    }

    @Override
    public Flux<String> streamChat(AgentChatVo chatVo) {
        List<ChatMessage> historyMessage = chatVo.getHistoryMessage();
        UserMessage userMessage = UserMessage.from(chatVo.getMessage());
        historyMessage.add(userMessage);

        StreamingChatModel chatModel = OllamaStreamingChatModel.builder()
                .baseUrl(ObjectUtils.defaultIfNull(chatVo.getBaseUrl(), ModelHostConstant.OLLAMA_API_URL))
                .modelName(chatVo.getModelName())
                .temperature(chatVo.getTemperature())
                .build();

        // 使用 Sinks.Many 来创建一个可发射多个元素的发布者
        Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();

        chatModel.chat(historyMessage, new StreamingChatResponseHandler() {
            @Override
            public void onPartialResponse(String partialResponse) {
                // 发射部分响应数据
                sink.tryEmitNext(partialResponse).orThrow();
            }

            @Override
            public void onCompleteResponse(ChatResponse completeResponse) {
                AiMessage aiMessage = completeResponse.aiMessage();
                historyMessage.add(aiMessage);
                OllamaAgentLoader.super.contractSessionMessage(chatVo);
                // 完成时关闭流
                sink.tryEmitComplete().orThrow();
            }

            @Override
            public void onError(Throwable error) {
                // 发生错误时发射错误
                sink.tryEmitError(error).orThrow();
            }
        });

        return sink.asFlux();
    }
}
