package com.pai4j.aigc.llm;

import com.pai4j.common.enums.PromptTemplateEnum;
import com.pai4j.common.service.FreeMarkEngineService;
import com.pai4j.domain.vo.llm.*;
import com.pai4j.aigc.llm.entity.LLMModelEntity;
import com.pai4j.aigc.llm.service.LLMModelService;
import com.pai4j.aigc.llm.service.LLMUsageService;
import com.pai4j.aigc.llm.service.MetricsService;
import com.pai4j.aigc.llm.service.PricingService;
import com.pai4j.aigc.llm.service.TokenEstimator;
import jakarta.annotation.Resource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.RequestMapping;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 *  ai生成plantUml代码
 *
 * @author: CYM-pai
 * @date: 2025/06/01 20:25
 **/

@Service
public class AiPlantUmlService {
    @Resource
    private AbstractLLMChatService abstractLLMChatService;
    
    @Resource
    private FreeMarkEngineService freeMarkEngineService;

    @Resource
    private LLMUsageService usageService;

    @Resource
    private TokenEstimator tokenEstimator;

    @Resource
    private PricingService pricingService;

    @Resource
    private MetricsService metricsService;

    @Resource
    private LLMModelService modelService;

    @Value("${llm.config.model}")
    private String model;
    
    /**
     * 生成PlantUML代码
     *
     * @author: CYM-pai
     * @date: 2025/06/01 21:25
     **/
    public String generatePlantUmlCode(String userPrompt) throws Exception {
        // 构建模板变量
        Map<String, Object> vars = new HashMap<>();
        vars.put("userPrompt", userPrompt);
        
        // 使用PlantUml模板生成提示词
        String prompt = freeMarkEngineService.getContentByTemplate(PromptTemplateEnum.PlantUml.getPath(), vars);
        
        // 构建消息列表
        final List<ChatCompletionMessage> messageList = Arrays.asList(
                new ChatCompletionMessage(ChatMessageRole.SYSTEM.value(), "你是一个专业的PlantUML代码生成专家"),
                new ChatCompletionMessage(ChatMessageRole.USER.value(), prompt)
        );
        
        // 调用大模型生成PlantUML代码并记录用量
        LLMModelEntity modelEntity = modelService.findByGlobalCode(model).orElse(null);
        String providerKey = modelEntity != null ? modelEntity.getProvider() : model;
        Long modelId = modelEntity != null ? modelEntity.getId() : -1L;
        String requestId = java.util.UUID.randomUUID().toString();
        long start = System.currentTimeMillis();
        try {
            ChatCompletionResponse response = abstractLLMChatService.chat(messageList);
            String plantUmlCode = response.getChoices().get(0).getMessage().getContent();
            long latency = System.currentTimeMillis() - start;
            int pt = tokenEstimator.estimateTokens(userPrompt);
            int ct = tokenEstimator.estimateTokens(plantUmlCode);
            int total = pt + ct;
            Integer costCents = pricingService.calcCostCents(modelEntity, pt, ct);
            usageService.recordSuccess(null, requestId, null, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    pt, ct, total, latency, costCents);
            metricsService.recordSuccess(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);
            return plantUmlCode;
        } catch (Exception e) {
            long latency = System.currentTimeMillis() - start;
            usageService.recordFailure(null, requestId, null, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    "LLM_ERROR", e.getMessage(), latency);
            metricsService.recordFailure(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);
            throw e;
        }
    }
    
    /**
     * 使用上下文历史生成或修改PlantUML代码
     *
     * @param userPrompt 用户输入的提示
     * @param originalPrompt 原始提示（如果有）
     * @param originalCode 原始生成的代码（如果有）
     * @return 生成的PlantUML代码
     * @throws Exception 如果处理过程中发生错误
     */
    public String generatePlantUmlCodeWithContext(String userPrompt, String originalPrompt, String originalCode) throws Exception {
        // 构建模板变量
        Map<String, Object> vars = new HashMap<>();
        vars.put("userPrompt", userPrompt);
        vars.put("originalPrompt", originalPrompt);
        vars.put("originalCode", originalCode);
        
        // 使用带上下文的PlantUml模板生成提示词
        String prompt = freeMarkEngineService.getContentByTemplate(PromptTemplateEnum.PlantUmlWithContext.getPath(), vars);
        
        // 构建消息列表，包含上下文信息
        List<ChatCompletionMessage> messageList = new ArrayList<>();
        messageList.add(new ChatCompletionMessage(ChatMessageRole.SYSTEM.value(), "你是一个专业的PlantUML代码生成专家，根据用户的新需求对已有代码进行修改"));
        
        // 添加原始提示和代码作为上下文
        if (originalPrompt != null && !originalPrompt.isEmpty()) {
            messageList.add(new ChatCompletionMessage(ChatMessageRole.USER.value(), "原始需求：" + originalPrompt));
            messageList.add(new ChatCompletionMessage(ChatMessageRole.ASSISTANT.value(), "生成的PlantUML代码：\n```plantuml\n" + originalCode + "\n```"));
        }
        
        // 添加用户的新提示
        messageList.add(new ChatCompletionMessage(ChatMessageRole.USER.value(), prompt));
        
        // 调用大模型生成PlantUML代码
        ChatCompletionResponse response = abstractLLMChatService.chat(messageList);
        
        // 从响应中提取PlantUML代码
        String plantUmlCode = response.getChoices().get(0).getMessage().getContent();
        
        // 返回生成的PlantUML代码
        return plantUmlCode;
    }
    
    /**
     * 优化PlantUML代码
     *
     * @param plantUmlCode 原始PlantUML代码
     * @return 优化后的PlantUML代码
     * @throws Exception 如果处理过程中发生错误
     */
    public String optimizePlantUmlCode(String plantUmlCode) throws Exception {
        // 构建模板变量
        Map<String, Object> vars = new HashMap<>();
        vars.put("plantUmlCode", plantUmlCode);
        
        // 使用OptimizationPlantUml模板生成提示词
        String prompt = freeMarkEngineService.getContentByTemplate(PromptTemplateEnum.OptimizationPlantUml.getPath(), vars);
        
        // 构建消息列表
        final List<ChatCompletionMessage> messageList = Arrays.asList(
                new ChatCompletionMessage(ChatMessageRole.SYSTEM.value(), "你是一个专业的PlantUML代码优化专家"),
                new ChatCompletionMessage(ChatMessageRole.USER.value(), prompt)
        );
        
        // 调用大模型优化PlantUML代码并记录用量
        LLMModelEntity modelEntity = modelService.findByGlobalCode(model).orElse(null);
        String providerKey = modelEntity != null ? modelEntity.getProvider() : model;
        Long modelId = modelEntity != null ? modelEntity.getId() : -1L;
        String requestId = java.util.UUID.randomUUID().toString();
        long start = System.currentTimeMillis();
        try {
            ChatCompletionResponse response = abstractLLMChatService.chat(messageList);
            String optimizedCode = response.getChoices().get(0).getMessage().getContent();
            long latency = System.currentTimeMillis() - start;
            int pt = tokenEstimator.estimateTokens(plantUmlCode);
            int ct = tokenEstimator.estimateTokens(optimizedCode);
            int total = pt + ct;
            Integer costCents = pricingService.calcCostCents(modelEntity, pt, ct);
            usageService.recordSuccess(null, requestId, null, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    pt, ct, total, latency, costCents);
            metricsService.recordSuccess(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);
            return optimizedCode;
        } catch (Exception e) {
            long latency = System.currentTimeMillis() - start;
            usageService.recordFailure(null, requestId, null, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    "LLM_ERROR", e.getMessage(), latency);
            metricsService.recordFailure(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);
            throw e;
        }
    }
    
    /**
     * 根据用户反馈修改PlantUML代码
     *
     * @param plantUmlCode 原始PlantUML代码
     * @param userFeedback 用户反馈
     * @param originalPrompt 原始提示
     * @return 根据反馈修改后的PlantUML代码
     * @throws Exception 如果处理过程中发生错误
     */
    public String modifyPlantUmlCodeWithFeedback(String plantUmlCode, String userFeedback, String originalPrompt) throws Exception {
        // 构建模板变量
        Map<String, Object> vars = new HashMap<>();
        vars.put("plantUmlCode", plantUmlCode);
        vars.put("userFeedback", userFeedback);
        vars.put("originalPrompt", originalPrompt);
        
        // 使用反馈修改模板生成提示词
        String prompt = freeMarkEngineService.getContentByTemplate(PromptTemplateEnum.PlantUmlFeedback.getPath(), vars);
        
        // 构建消息列表
        final List<ChatCompletionMessage> messageList = Arrays.asList(
                new ChatCompletionMessage(ChatMessageRole.SYSTEM.value(), "你是一个专业的PlantUML代码修改专家，根据用户反馈对代码进行修改"),
                new ChatCompletionMessage(ChatMessageRole.USER.value(), prompt)
        );
        
        // 调用大模型修改PlantUML代码并记录用量
        LLMModelEntity modelEntity = modelService.findByGlobalCode(model).orElse(null);
        String providerKey = modelEntity != null ? modelEntity.getProvider() : model;
        Long modelId = modelEntity != null ? modelEntity.getId() : -1L;
        String requestId = java.util.UUID.randomUUID().toString();
        long start = System.currentTimeMillis();
        try {
            ChatCompletionResponse response = abstractLLMChatService.chat(messageList);
            String modifiedCode = response.getChoices().get(0).getMessage().getContent();
            long latency = System.currentTimeMillis() - start;
            int pt = tokenEstimator.estimateTokens(plantUmlCode + "\n" + userFeedback + "\n" + originalPrompt);
            int ct = tokenEstimator.estimateTokens(modifiedCode);
            int total = pt + ct;
            Integer costCents = pricingService.calcCostCents(modelEntity, pt, ct);
            usageService.recordSuccess(null, requestId, null, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    pt, ct, total, latency, costCents);
            metricsService.recordSuccess(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);
            return modifiedCode;
        } catch (Exception e) {
            long latency = System.currentTimeMillis() - start;
            usageService.recordFailure(null, requestId, null, modelId, providerKey,
                    modelEntity != null ? modelEntity.getCode() : providerKey,
                    "LLM_ERROR", e.getMessage(), latency);
            metricsService.recordFailure(providerKey, null, modelEntity != null ? modelEntity.getCode() : providerKey, latency);
            throw e;
        }
    }
}
