package com.allm.ai.common.strategy.impl;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.messages.AssistantMessage;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.SystemMessage;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.Generation;
import org.springframework.ai.deepseek.DeepSeekAssistantMessage;
import org.springframework.stereotype.Component;

import com.allm.ai.common.dto.ApiRequest;
import com.allm.ai.common.dto.StreamingOutput;
import com.allm.ai.common.entity.AiModel;
import com.allm.ai.common.service.ChatModelCacheService;
import com.allm.ai.common.strategy.AiModelStrategy;

import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import reactor.core.publisher.Flux;

/**
 * DeepSeek 策略实现
 * 基于 Spring AI，使用 ChatModelCacheService 管理缓存
 * 实现真正的流式输出功能
 */
@Slf4j
@Component
@RequiredArgsConstructor
public class DeepSeekStrategy implements AiModelStrategy {
    
    private final ChatModelCacheService cacheService;
    
    /**
     * 转换消息格式：从 Map 转换为 Spring AI Message
     */
    private List<Message> convertMessages(List<Map<String, Object>> messages) {
        if (messages == null || messages.isEmpty()) {
            return new ArrayList<>();
        }
        
        List<Message> springMessages = new ArrayList<>();
        
        for (Map<String, Object> msg : messages) {
            String role = (String) msg.get("role");
            String content = (String) msg.get("content");
            
            if (content == null || content.trim().isEmpty()) {
                continue;
            }
            
            switch (role != null ? role.toLowerCase() : "user") {
                case "system":
                    springMessages.add(new SystemMessage(content));
                    break;
                case "user":
                    springMessages.add(new UserMessage(content));
                    break;
                case "assistant":
                    springMessages.add(new AssistantMessage(content));
                    break;
                default:
                    log.warn("未知的消息角色: {}, 作为用户消息处理", role);
                    springMessages.add(new UserMessage(content));
            }
        }
        
        return springMessages;
    }
    
    @Override
    public String callApi(AiModel model, ApiRequest request) throws Exception {
        log.info("调用 DeepSeek API (同步)，模型: {}, 端点: {}", model.getName(), model.getApiEndpoint());
        
        try {
            // 使用缓存服务获取简单的 ChatClient（无会话记忆）
            ChatClient chatClient = cacheService.getOrCreateSimpleChatClient(model);
            
            List<Message> messages = convertMessages(request.getMessages());
            
            if (messages.isEmpty()) {
                throw new IllegalArgumentException("消息列表不能为空");
            }
            
            // 同步调用
            return chatClient.prompt()
                    .messages(messages)
                    .call()
                    .content();
                    
        } catch (Exception e) {
            log.error("DeepSeek API 同步调用失败", e);
            throw new RuntimeException("DeepSeek API 调用失败: " + e.getMessage(), e);
        }
    }
    
    /**
     * 流式调用API - 基于 Spring AI 的真正流式实现（带会话记忆）
     */
    @Override
    public Flux<StreamingOutput> callApiStream(AiModel model, ApiRequest request) {
        log.info("调用 DeepSeek API (流式)，模型: {}, 端点: {}", model.getName(), model.getApiEndpoint());
        
        try {
            // 使用缓存服务获取带会话记忆的 ChatClient
            ChatClient chatClient = cacheService.getOrCreateChatClient(model);
            
            List<Message> messages = convertMessages(request.getMessages());
            
            if (messages.isEmpty()) {
                return Flux.error(new IllegalArgumentException("消息列表不能为空"));
            }
            
            // 生成会话ID（如果请求中没有提供）
            final String conversationId = (request.getConversationId() != null && !request.getConversationId().trim().isEmpty()) 
                ? request.getConversationId() 
                : "deepseek-" + System.currentTimeMillis();

            // 检查是否是推理模型
            if (isReasoningModel(model)) {
                log.info("使用推理模型，将特殊处理推理过程");
                try {
                    return handleReasoningModelStream(chatClient, messages, conversationId);
                } catch (Exception e) {
                    log.error("推理模型流式调用失败", e);
                    return Flux.error(new RuntimeException("推理模型流式调用失败: " + e.getMessage(), e));
                }
            }

            // Spring AI 原生流式调用
            return chatClient.prompt()
                    .messages(messages)
                    .advisors(a -> a.param("chatMemoryConversationId", conversationId))
                    .stream()
                    .content()
                    .map(output -> new StreamingOutput(StreamingOutput.CONTENT, output))
                    .doOnNext(chunk -> {
                        log.debug("DeepSeek 流式响应片段: {}", chunk);
                    })
                    .doOnError(error -> log.error("DeepSeek 流式调用出错: ", error))
                    .doOnComplete(() -> log.info("DeepSeek 流式调用完成，会话ID: {}", conversationId))
                    .onErrorResume(error -> {
                        log.error("DeepSeek 流式调用失败", error);
                        return Flux.error(new RuntimeException("流式调用失败: " + error.getMessage(), error));
                    });
                    
        } catch (Exception e) {
            log.error("DeepSeek 流式调用初始化失败", e);
            return Flux.error(new RuntimeException("流式调用初始化失败: " + e.getMessage(), e));
        }
    }

    /**
     * 处理推理模型的流式输出 - 使用 DeepSeekAssistantMessage
     */
    private Flux<StreamingOutput> handleReasoningModelStream(ChatClient chatClient,List<Message> messages, String conversationId) {
        return chatClient.prompt()
            .messages(messages)
            .advisors(a -> a.param("chatMemoryConversationId", conversationId))
            .stream().chatResponse()
            .map(chatResponse -> {
                Generation generation = chatResponse.getResult();
                if (generation == null || generation.getOutput() == null) {
                    return new StreamingOutput(StreamingOutput.CONTENT, "");
                }
                
                // 检查是否是 DeepSeekAssistantMessage
                if (generation.getOutput() instanceof DeepSeekAssistantMessage dsam) {
                    // 检查推理过程
                    String reasoningContent = dsam.getReasoningContent();
                    if (reasoningContent != null && !reasoningContent.trim().isEmpty()) {
                        log.info("检测到推理过程: {}", reasoningContent);
                        return new StreamingOutput(StreamingOutput.THINKING, reasoningContent);
                    }
                    
                    // 检查最终答案
                    String text = dsam.getText();
                    if (text != null && !text.trim().isEmpty()) {
                        log.info("检测到最终答案: {}", text);
                        return new StreamingOutput(StreamingOutput.CONTENT, text);
                    }
                }
                
                return new StreamingOutput(StreamingOutput.CONTENT, "done");
            })
            .filter(output -> output != null && !output.data().isBlank())
            .doOnNext(chunk -> log.debug("DeepSeek 推理流式响应: {}", chunk))
            .doOnError(error -> log.error("DeepSeek 推理流式调用出错: ", error))
            .doOnComplete(() -> log.info("DeepSeek 推理流式调用完成"));
    }

    /**
     * 流式调用API - 简化版本（不使用会话记忆）
     */
    public Flux<String> callApiStreamSimple(AiModel model, ApiRequest request) {
        log.info("调用 DeepSeek API (简化流式)，模型: {}", model.getName());
        
        try {
            // 使用缓存服务获取简单的 ChatClient
            ChatClient chatClient = cacheService.getOrCreateSimpleChatClient(model);
            
            List<Message> messages = convertMessages(request.getMessages());
            
            if (messages.isEmpty()) {
                return Flux.error(new IllegalArgumentException("消息列表不能为空"));
            }
            
            // 简化的流式调用（无会话记忆）
            return chatClient.prompt()
                    .messages(messages)
                    .stream()
                    .content()
                    .doOnNext(chunk -> log.debug("DeepSeek 简化流式响应: {}", chunk))
                    .onErrorResume(error -> {
                        log.error("DeepSeek 简化流式调用失败", error);
                        return Flux.error(new RuntimeException("简化流式调用失败: " + error.getMessage(), error));
                    });
                    
        } catch (Exception e) {
            log.error("DeepSeek 简化流式调用初始化失败", e);
            return Flux.error(new RuntimeException("简化流式调用初始化失败: " + e.getMessage(), e));
        }
    }
    
    /**
     * 流式调用API - 自定义会话ID版本
     */
    public Flux<String> callApiStreamWithConversation(AiModel model, ApiRequest request, String conversationId) {
        log.info("调用 DeepSeek API (自定义会话流式)，模型: {}, 会话ID: {}", model.getName(), conversationId);
        
        try {
            ChatClient chatClient = cacheService.getOrCreateChatClient(model);
            List<Message> messages = convertMessages(request.getMessages());
            
            if (messages.isEmpty()) {
                return Flux.error(new IllegalArgumentException("消息列表不能为空"));
            }
            
            final String finalConversationId = (conversationId != null && !conversationId.trim().isEmpty()) 
                ? conversationId 
                : "deepseek-custom-" + System.currentTimeMillis();
            
            return chatClient.prompt()
                    .messages(messages)
                    .advisors(a -> a.param("chatMemoryConversationId", finalConversationId))
                    .stream()
                    .content()
                    .doOnNext(chunk -> log.debug("DeepSeek 自定义会话流式响应: {}", chunk))
                    .doOnComplete(() -> log.info("DeepSeek 自定义会话流式调用完成，会话ID: {}", finalConversationId))
                    .onErrorResume(error -> {
                        log.error("DeepSeek 自定义会话流式调用失败", error);
                        return Flux.error(new RuntimeException("自定义会话流式调用失败: " + error.getMessage(), error));
                    });
                    
        } catch (Exception e) {
            log.error("DeepSeek 自定义会话流式调用初始化失败", e);
            return Flux.error(new RuntimeException("自定义会话流式调用初始化失败: " + e.getMessage(), e));
        }
    }
    
    /**
     * 清理缓存 - 委托给缓存服务
     */
    public void clearCache() {
        log.info("清理 DeepSeek 缓存");
        cacheService.clearAllCache();
    }
    
    /**
     * 清理特定模型的缓存
     */
    public void clearCache(AiModel model) {
        log.info("清理特定模型的缓存，模型: {}", model.getName());
        cacheService.clearCache(model);
    }
    
    /**
     * 获取缓存统计信息
     */
    public Map<String, Object> getCacheStats() {
        return cacheService.getCacheStats();
    }
    
    /**
     * 获取缓存健康状态
     */
    public Map<String, Object> getCacheHealth() {
        return cacheService.getHealthStatus();
    }
    
    /**
     * 测试模型连接
     */
    public String testConnection(AiModel model) {
        try {
            ChatClient chatClient = cacheService.getOrCreateSimpleChatClient(model);
            return chatClient.prompt()
                    .user("Hello, please respond with 'Connection successful'")
                    .call()
                    .content();
        } catch (Exception e) {
            log.error("DeepSeek 连接测试失败", e);
            throw new RuntimeException("连接测试失败: " + e.getMessage(), e);
        }
    }
    
    @Override
    public boolean supports(String serviceProvider) {
        return "deepseek".equalsIgnoreCase(serviceProvider) || 
               "deepseek-ai".equalsIgnoreCase(serviceProvider) ||
               "deepseek-springai".equalsIgnoreCase(serviceProvider);
    }

    /**
     * 检查是否支持流式调用
     */
    @Override
    public boolean supportsStreaming() {
        return true;
    }
}