package cn.iocoder.yudao.module.infra.llm.impl.mock;

import cn.hutool.core.util.RandomUtil;
import cn.iocoder.yudao.module.infra.llm.ILLMService;
import cn.iocoder.yudao.module.infra.llm.model.LLMMessage;
import cn.iocoder.yudao.module.infra.llm.model.LLMRes;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;

import java.util.List;

@Service("llm-mock-sleep")
@Slf4j
public class MockSleepLLMServiceImpl implements ILLMService {

    @Override
    public void streamChatResponse(Integer toneId, String prompt, String userMessage,
                                   List<LLMMessage> historyMessages,
                                   java.util.function.Consumer<LLMRes> onMessage) {

        log.info("请求LLM开始, userMessage = {}", userMessage);

        try {
            Thread.sleep(RandomUtil.randomInt(500, 3000));
        } catch (InterruptedException e) {
            throw new RuntimeException(e);
        }

        final String reply = "我是MockLLM这是一个模拟的返回结果只是为了测试功能";
        log.info("请求LLM结束, reply = {}", reply);

        onMessage.accept(LLMRes.builder().done(true).replyText(reply).build());
    }

}