package com.pai4j.aigc.llm;



import com.pai4j.common.enums.PromptTemplateEnum;
import com.pai4j.common.service.FreeMarkEngineService;
import com.pai4j.common.util.JsonExtractorFromMarkdown;
import com.pai4j.common.util.JsonUtil;
import com.pai4j.domain.vo.llm.*;
import com.pai4j.aigc.llm.entity.LLMModelEntity;
import com.pai4j.aigc.llm.service.LLMModelService;
import com.pai4j.aigc.llm.service.LLMUsageService;
import com.pai4j.aigc.llm.service.MetricsService;
import com.pai4j.aigc.llm.service.PricingService;
import com.pai4j.aigc.llm.service.TokenEstimator;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;

@Slf4j
@Service
public class AiFunctionCallService {


    @Value("${llm.config.model}")
    private String model;

    @Autowired
    private FreeMarkEngineService freeMarkEngineService;

    @Autowired
    private LLMUsageService usageService;

    @Autowired
    private TokenEstimator tokenEstimator;

    @Autowired
    private PricingService pricingService;

    @Autowired
    private MetricsService metricsService;

    @Autowired
    private LLMModelService modelService;

    /**
     * 用户输入意图识别
     *
     * @param input
     * @return
     */
    public FunctionCallResponse getFunction(String input ,String senderId) throws Exception {
        // prompt 构建
        Map<String, Object> vars = new HashMap<>();
        vars.put("input", input);
        String prompt = freeMarkEngineService.getContentByTemplate(PromptTemplateEnum.ROBOT_FC.getPath(), vars);
        // 调用大模型，意图识别
        final List<ChatCompletionMessage> messageList = Arrays.asList(
                new ChatCompletionMessage(ChatMessageRole.USER.value(), prompt));
        AbstractLLMChatService abstractLLMChatService = LLMServiceFactory.getLLMService(model);
        // resolve model info
        Optional<LLMModelEntity> modelEntityOpt = modelService.findByGlobalCode(model);
        LLMModelEntity modelEntity = modelEntityOpt.orElse(null);
        String providerKey = modelEntity != null ? modelEntity.getProvider() : model;
        Long modelId = modelEntity != null ? modelEntity.getId() : -1L;
        String requestId = UUID.randomUUID().toString();
        long start = System.currentTimeMillis();
        try {
            ChatCompletionResponse chatCompletionResponse = abstractLLMChatService.chat(messageList);
            String functionMdJson = chatCompletionResponse.getChoices().get(0).getMessage().getContent();
            Usage  usage = chatCompletionResponse.getUsage();
            long latency = System.currentTimeMillis() - start;
            int pt = usage.getPromptTokens();
            int ct = usage.getCompletionTokens();
            int total = usage.getTotalTokens();
            Integer costCents = pricingService.calcCostCents(modelEntity, pt, ct);
            usageService.recordSuccess(null, requestId, senderId, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    pt, ct, total, latency, costCents);
            metricsService.recordSuccess(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);

            String functionJson = JsonExtractorFromMarkdown.getJson(functionMdJson);
            return JsonUtil.fromJson(functionJson, FunctionCallResponse.class);
        } catch (Exception e) {
            long latency = System.currentTimeMillis() - start;
            usageService.recordFailure(null, requestId, null, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    "LLM_ERROR", e.getMessage(), latency);
            metricsService.recordFailure(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);
            throw e;
        }
    }

    /**
     * 用户输入意图识别（支持插件列表）
     *
     * @param vars 包含input和pluginFuncList的参数Map
     * @return
     */
    public FunctionCallResponse getFunction(Map<String, Object> vars, String senderId) throws Exception {
        // prompt 构建
        String prompt = freeMarkEngineService.getContentByTemplate(PromptTemplateEnum.ROBOT_FC.getPath(), vars);
        // 调用大模型，意图识别
        final List<ChatCompletionMessage> messageList = Arrays.asList(
                new ChatCompletionMessage(ChatMessageRole.USER.value(), prompt));
        AbstractLLMChatService abstractLLMChatService = LLMServiceFactory.getLLMService(model);
        Optional<LLMModelEntity> modelEntityOpt = modelService.findByGlobalCode(model);
        LLMModelEntity modelEntity = modelEntityOpt.orElse(null);
        String providerKey = modelEntity != null ? modelEntity.getProvider() : model;
        Long modelId = modelEntity != null ? modelEntity.getId() : -1L;
        String requestId = UUID.randomUUID().toString();
        long start = System.currentTimeMillis();
        try {
            ChatCompletionResponse chatCompletionResponse = abstractLLMChatService.chat(messageList);
            String functionMdJson = chatCompletionResponse.getChoices().get(0).getMessage().getContent();
            Usage  usage = chatCompletionResponse.getUsage();
            long latency = System.currentTimeMillis() - start;
            int pt = usage.getPromptTokens();
            int ct = usage.getCompletionTokens();
            int total = usage.getTotalTokens();
            Integer costCents = pricingService.calcCostCents(modelEntity, pt, ct);
            usageService.recordSuccess(null, requestId, senderId, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    pt, ct, total, latency, costCents);
            metricsService.recordSuccess(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);

            String functionJson = JsonExtractorFromMarkdown.getJson(functionMdJson);
            return JsonUtil.fromJson(functionJson, FunctionCallResponse.class);
        } catch (Exception e) {
            long latency = System.currentTimeMillis() - start;
            usageService.recordFailure(null, requestId, null, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    "LLM_ERROR", e.getMessage(), latency);
            metricsService.recordFailure(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);
            throw e;
        }
    }

}
