package com.lq.springai.service.impl;

import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSON;
import cn.hutool.json.JSONUtil;
import com.lq.springai.cache.ModelCache;
import com.lq.springai.entity.ModelParams;
import com.lq.springai.model.OllamaServiceModel;
import com.lq.springai.model.OnlineServiceModel;
import com.lq.springai.entity.ChatRequest;
import com.lq.springai.service.ModelService;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.codec.ServerSentEvent;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;

import java.util.List;

/**
 * * Author 30783
 * * Data：2025/6/26 13:57
 */
@Slf4j
@Service
public class ModelServiceImpl implements ModelService {

    @Resource
    private OllamaServiceModel ollamaServiceModel;

    @Resource
    private OnlineServiceModel onlineServiceModel;

    @Override
    public Flux<ServerSentEvent<String>> aiModel(ChatRequest chatRequest) {

        //问题信息
        String message = chatRequest.getMessage();
        //大模型
        String model = chatRequest.getModel();

        if(StrUtil.isEmpty(message)){
            chatRequest.setMessage("你好");
        }

        ModelParams modelParams = ModelCache.getInstance().getModel(model);
        log.info("当前调用模型{}", JSONUtil.toJsonStr(modelParams));

        return modelParams.getOrigin().equals("ollama")?
                ollamaServiceModel.streamChat(chatRequest,modelParams):
                onlineServiceModel.streamChat(chatRequest,modelParams);
    }

    @Override
    public List<ModelParams> modelList() {
        return ModelCache.getInstance().modelList();
    }

}
