package com.kaf.springbootollama.service.Impl;

import com.kaf.springbootollama.config.OllamaConfig;
import com.kaf.springbootollama.controller.OllamaController;
import com.kaf.springbootollama.domain.ChatResult;
import com.kaf.springbootollama.service.OllamaService;
import io.micrometer.common.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

@Service
public class OllamaServiceImpl implements OllamaService {

    private static final Logger log = LoggerFactory.getLogger(OllamaController.class);

    @Value("${ai.ollama.chat.options.model}")
    private String defaultChatOptionsModel;

    @Autowired
    private OllamaConfig ollamaConfig;

    /**
     * 聊天
     * @param msg
     * @return
     */
    @Override
    public String chat(String msg){
        log.info("msg:{}",msg);
        String result = "";
        if(StringUtils.isBlank(msg)){
            return "请输入chat内容";
        }
        Prompt prompt = new Prompt(
                msg,
                OllamaOptions.builder()
                        .withModel(defaultChatOptionsModel)
                        .withTemperature(0.4)
                        .build()
        );
        long startTime = System.nanoTime();// 记录开始时间
        ChatResponse response = ollamaConfig.getOllamaChatModel().call(prompt);
        long endTime = System.nanoTime();// 记录结束时间

        // 计算并打印耗时
        double timeTaken = (endTime - startTime) / 1e9; // 将纳秒转换为秒
        log.info("耗时: " + timeTaken + " 秒");
        result = response.getResult().getOutput().getContent();
        log.info("response:{}",result);

        return result;
    }

    @Override
    public ChatResult chat(String msg, String model){
        ChatResult result = new ChatResult();
        result.setStatus("0");
        log.info("msg:{}",msg,"model:{}",model);
        result.setYourInput(msg);
        if(StringUtils.isBlank(msg)){
            result.setAiOutput("请输入chat内容");
            result.setStatus("1");
            return result;
        }
        if(StringUtils.isBlank(model)){
            model = defaultChatOptionsModel;
        }
        Prompt prompt = new Prompt(
                msg,
                OllamaOptions.builder()
                        .withModel(model)
                        .withTemperature(0.4)
                        .build()
        );
        long startTime = System.nanoTime();// 记录开始时间
        ChatResponse response = ollamaConfig.getOllamaChatModel().call(prompt);
        long endTime = System.nanoTime();// 记录结束时间

        // 计算并打印耗时
        double timeTaken = (endTime - startTime) / 1e9; // 将纳秒转换为秒
        log.info("耗时: " + timeTaken + " 秒");
        result.setAiOutput(response.getResult().getOutput().getContent());
        log.info("response:{}",result);
        result.setModel(model);
        result.setTimestamp(String.valueOf(System.nanoTime()));
        return result;
    }
}
