package com.webchat.aigc.llm;


import com.webchat.common.enums.PromptTemplateEnum;
import com.webchat.common.service.FreeMarkEngineService;
import com.webchat.common.util.JsonExtractorFromMarkdown;
import com.webchat.common.util.JsonUtil;
import com.webchat.domain.vo.llm.ChatCompletionChoice;
import com.webchat.domain.vo.llm.ChatCompletionMessage;
import com.webchat.domain.vo.llm.ChatCompletionResponse;
import com.webchat.domain.vo.llm.ChatMessageRole;
import com.webchat.domain.vo.llm.FunctionCallResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Slf4j
@Service
public class AiFunctionCallService {


    @Value("${llm.config.model}")
    private String model;

    @Autowired
    private FreeMarkEngineService freeMarkEngineService;

    /**
     * 用户输入意图识别
     *
     *
     * @param input
     * @return
     */
    public FunctionCallResponse getFunction(Map<String, Object> vars, PromptTemplateEnum promptTemplate) throws Exception {
        String prompt = freeMarkEngineService.getContentByTemplate(promptTemplate.getPath(), vars);
        // 调用大模型，意图识别
        final List<ChatCompletionMessage> messageList = Arrays.asList(
                new ChatCompletionMessage(ChatMessageRole.USER.value(), prompt));
        AbstractLLMChatService abstractLLMChatService = LLMServiceFactory.getLLMService(model);
        ChatCompletionResponse chatCompletionResponse = abstractLLMChatService.chat(messageList);
        ChatCompletionChoice chatCompletionChoice = chatCompletionResponse.getChoices().get(0);
        String functionMdJson = chatCompletionChoice.getMessage().getContent();
        String functionJson = JsonExtractorFromMarkdown.getJson(functionMdJson);
        return JsonUtil.fromJson(functionJson, FunctionCallResponse.class);
    }

}
