package com.gel.codegeneration.ai;

import com.gel.codegeneration.ai.model.enums.CodeGenTypeEnum;
import com.gel.codegeneration.ai.tools.FileWriteTool;
import com.gel.codegeneration.ai.tools.ToolManager;
import com.gel.codegeneration.common.enums.ErrorCode;
import com.gel.codegeneration.convention.exception.BusinessException;
import com.gel.codegeneration.service.ChatHistoryService;
import com.gel.codegeneration.utils.SpringContextUtil;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import dev.langchain4j.community.store.memory.chat.redis.RedisChatMemoryStore;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.service.AiServices;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;

import java.time.Duration;

/**
 * @BelongsProject: code-generation-platform
 * @BelongsPackage: com.gel.codegeneration.ai
 * @Author: gel
 * @CreateTime: 2025-08-22  10:49
 * @Description: TODO 初始化ai会话服务工厂
 * @Version: 1.0
 */
@Configuration
@Slf4j
public class AiCodeGenerationServiceFactory {
    @Resource(name = "openAiChatModel")
    private ChatModel chatModel;
//    @Resource(name = "openAiStreamingChatModel")
//    private StreamingChatModel openAistreamingChatModel;
    @Resource
    private RedisChatMemoryStore redisChatMemoryStore;
//    @Resource(name = "reasoningStreamingChatModelPrototype")
//    private  StreamingChatModel reasoningStreamingChatModel;
    @Resource
    @Lazy
    private ChatHistoryService chatHistoryService;
    @Resource
    private ToolManager toolManager;

    private final Cache<String ,AiCodeGenerationService> serviceCache = Caffeine.newBuilder()
            .maximumSize(1000)
            .expireAfterWrite(Duration.ofMinutes(60))
            .expireAfterAccess(Duration.ofMinutes(10))
            .removalListener((key, value, cause) -> {
                // 缓存项被移除时执行
                log.error("缓存ai服务实例被移除：缓存键=%s,  原因：=%s", key,  cause);
                // 可以在此处执行清理操作，如关闭数据库连接、释放资源等
            })
            .build();

    //基于service层面实现会话记忆
    public AiCodeGenerationService getAiCodeGenerationService(long appId, CodeGenTypeEnum codeGenType){
        //没有就创建一个并保存
        String cacheKey = buildCacheKey(appId,codeGenType);
        return serviceCache.get(cacheKey, key-> createAiCodeGenerationService(appId,codeGenType));
    }
    private  AiCodeGenerationService createAiCodeGenerationService(long appId,CodeGenTypeEnum codeGenType){
        MessageWindowChatMemory chatMemory=MessageWindowChatMemory.builder()
                .id(appId)
                .chatMemoryStore(redisChatMemoryStore)
                .maxMessages(50)
                .build();
        //从数据库加载历史会话
        chatHistoryService.loadChatHistoryToMemory(appId,chatMemory,20);
        //根据代码生成类型选择不同模型配置
        return  switch (codeGenType){
            case VUE_PROJECT->{
                //采用多例模式的StreamingChatModel解决并发问题
                StreamingChatModel reasoningStreamingChatModel= SpringContextUtil.getBean("reasoningStreamingChatModelPrototype", StreamingChatModel.class);
                yield AiServices.builder(AiCodeGenerationService.class)
                        .chatModel(chatModel)
                        .streamingChatModel(reasoningStreamingChatModel)
                        //此处使用chatMemoryProvider是为每个memoryId绑定会话记忆，防止报错
                        .chatMemoryProvider(memoryId->chatMemory)
                        .tools(toolManager.getAllTools())
                        //幻觉工具名称策略，配置找不到工具的处理策略
                        .hallucinatedToolNameStrategy(toolExecutionRequest -> ToolExecutionResultMessage.from(
                                toolExecutionRequest,
                                "Error: this is no tool called"+toolExecutionRequest.name()
                        ))
                        .build();
            }
            case HTML, MULTI_FILE -> {
                StreamingChatModel openAiStreamingChatModel = SpringContextUtil.getBean("streamingChatModelPrototype", StreamingChatModel.class);

                yield AiServices.builder(AiCodeGenerationService.class)
                        .chatModel(chatModel)
                        .streamingChatModel(openAiStreamingChatModel)
                        .chatMemory(chatMemory)
                        .build();
            }
            default -> throw new BusinessException(ErrorCode.SYSTEM_ERROR,"不支持的代码生成类型：" + codeGenType.getValue());



        };

    }
    private String buildCacheKey(long appId,CodeGenTypeEnum codeGenType){
        return appId+"_"+codeGenType.getValue();
    }

    public AiCodeGenerationService getAiCodeGenerationService(long appId){
        return getAiCodeGenerationService(appId,CodeGenTypeEnum.HTML);
        //基于model层面实现会话记忆
//        return AiServices.builder(AiCodeGenerationService.class)
//                .chatModel(chatModel)
//                .streamingChatModel(streamingChatModel)
//                //根据id构建独立的会话记忆
//                .chatMemoryProvider(memoryId->MessageWindowChatMemory
//                        .builder()
//                        .id(memoryId)
//                        .chatMemoryStore(redisChatMemoryStore)
//                        .maxMessages(20)
//                        .build())
//                .build();
    }
}
