package com.allm.ai.common.strategy.impl;

import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;

import org.springframework.stereotype.Component;

import com.allm.ai.common.dto.ApiRequest;
import com.allm.ai.common.dto.StreamingOutput;
import com.allm.ai.common.entity.AiModel;
import com.allm.ai.common.strategy.BaseAiModelStrategy;
import com.openai.client.OpenAIClient;
import com.openai.client.okhttp.OpenAIOkHttpClient;
import com.openai.core.JsonValue;
import com.openai.core.http.AsyncStreamResponse;
import com.openai.models.chat.completions.ChatCompletion;
import com.openai.models.chat.completions.ChatCompletionAssistantMessageParam;
import com.openai.models.chat.completions.ChatCompletionChunk;
import com.openai.models.chat.completions.ChatCompletionCreateParams;
import com.openai.models.chat.completions.ChatCompletionMessageParam;
import com.openai.models.chat.completions.ChatCompletionSystemMessageParam;
import com.openai.models.chat.completions.ChatCompletionUserMessageParam;

import lombok.extern.slf4j.Slf4j;
import reactor.core.publisher.Flux;

/**
 * Kimi模型策略实现 - 基于OpenAI官方Java SDK v3.5.0
 * 支持月之暗面Kimi模型的调用，使用OpenAI兼容的API格式
 * 参考: https://github.com/openai/openai-java
 * 
 * 目前来看只有OpenAI官方Java SDK v3.5.0才能正常实现流式和思维链输出，但是输出方式也需要再调试，不够顺滑
 */
@Slf4j
@Component
public class KimiOpenAiStrategy extends BaseAiModelStrategy {
    
    @Override
    public String callApi(AiModel model, ApiRequest request) throws Exception {
        log.info("调用Kimi API (同步)，模型: {}", model.getName());
        
        try {
            // 创建OpenAI客户端
            OpenAIClient client = createOpenAIClient(model);
            
            // 构建消息列表
            List<ChatCompletionMessageParam> messages = buildChatMessages(request);
            
            // 构建请求
            ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
                .model(model.getName())
                .messages(messages)
                .temperature(request.getTemperature() != null ? request.getTemperature().doubleValue() : 0.7)
                .maxCompletionTokens(request.getMaxTokens() != null ? request.getMaxTokens() : 1000)
                .build();
            
            // 发送请求
            ChatCompletion response = client.chat().completions().create(params);
            
            // 处理响应
            if (response.choices() != null && !response.choices().isEmpty()) {
                var choice = response.choices().get(0);
                var message = choice.message();
                
                if (message != null && message.content() != null) {
                    return message.content().get();
                }
            }
            
            throw new Exception("API响应中没有有效内容");
            
        } catch (Exception e) {
            log.error("Kimi API调用失败", e);
            throw new Exception("Kimi API调用失败: " + e.getMessage(), e);
        }
    }
    
    @Override
    public Flux<StreamingOutput> callApiStream(AiModel model, ApiRequest request) {
        log.info("调用Kimi流式API，模型: {}", model.getName());
        
        return Flux.create(sink -> {
            try {
                // 创建OpenAI客户端
                OpenAIClient client = createOpenAIClient(model);
                
                // 构建消息列表
                List<ChatCompletionMessageParam> messages = buildChatMessages(request);
                
                // 构建流式请求
                ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
                    .model(model.getRequestModelName())
                    .messages(messages)
                    .temperature(request.getTemperature() != null ? request.getTemperature().doubleValue() : 0.7)
                    .maxCompletionTokens(request.getMaxTokens() != null ? request.getMaxTokens() : 1000)
                    .build();
                
                // 发送流式请求
                client.async().chat().completions().createStreaming(params).subscribe(new AsyncStreamResponse.Handler<ChatCompletionChunk>() {
                    @Override
                    public void onNext(ChatCompletionChunk chunk) {
                        try {
                            if (chunk.choices() != null && !chunk.choices().isEmpty()) {
                                ChatCompletionChunk.Choice choice = chunk.choices().get(0);
                                ChatCompletionChunk.Choice.Delta message = choice.delta();
                                
                                if (message != null) {
                                    // 处理推理内容（thinking模型）
                                    if (isReasoningModel(model)) {
                                        String thinkingContent = extractThinkingContent(message);
                                        if (thinkingContent != null && !thinkingContent.trim().isEmpty()) {
                                            log.info("推理内容: {}", thinkingContent);
                                            streamContentByTokenEstimate(thinkingContent, sink, StreamingOutput.THINKING);
                                        }
                                    }
                                    
                                    // 处理常规内容 - 安全地处理Optional
                                    if (message.content().isPresent()) {
                                        String content = message.content().get();
                                        if (content != null && !content.trim().isEmpty()) {
                                            log.info("消息内容: {}", content);
                                            streamContentByTokenEstimate(content, sink, StreamingOutput.CONTENT);
                                        }
                                    }
                                }
                                
                                // 检查是否完成
                                if (choice.finishReason() != null && choice.finishReason().isPresent()) {
                                    log.info("流式调用完成，原因: {}", choice.finishReason());
                                    sink.next(new StreamingOutput(StreamingOutput.DONE, "DONE"));
                                    sink.complete();
                                    return;
                                }
                            }
                        } catch (Exception e) {
                            log.error("处理流式响应时出错", e);
                            sink.error(e);
                        }
                    }

                    @Override
                    public void onComplete(Optional<Throwable> error) {
                        if (error.isPresent()) {
                            log.error("流式响应处理出错", error.get());
                            sink.error(error.get());
                        } else {
                            log.info("流式响应处理完成");
                            sink.complete();
                        }
                    }
                });
            } catch (Exception e) {
                log.error("Kimi流式API调用失败", e);
                sink.error(e);
            }
        });
    }
    
    @Override
    public boolean supports(String serviceProvider) {
        return "月之暗面".equals(serviceProvider) || 
               "moonshot".equalsIgnoreCase(serviceProvider) ||
               "kimi".equalsIgnoreCase(serviceProvider);
    }
    
    @Override
    public boolean supportsStreaming() {
        return true;
    }
    
    // /**
    //  * 提取推理内容 - 根据OpenAI SDK的实际响应格式
    //  */
    private String extractThinkingContent(ChatCompletionChunk.Choice.Delta message) {
        try {
            JsonValue reasoningContent = message._additionalProperties().get("reasoning_content");
            if (reasoningContent != null && reasoningContent.asString().isPresent()) {
                return reasoningContent.asString().get().toString();
            }
        } catch (Exception e) {
            log.debug("无法提取thinking内容: {}", e.getMessage());
        }
        
        return null;
    }
    

    /**
     * 创建OpenAI客户端
     */
    private OpenAIClient createOpenAIClient(AiModel model) {
        return OpenAIOkHttpClient.builder()
            .baseUrl(model.getApiEndpoint())
            .apiKey(model.getApiKey())
            .timeout(Duration.ofMinutes(30))
            .build();
    }
    
    /**
     * 构建聊天消息列表
     */
    private List<ChatCompletionMessageParam> buildChatMessages(ApiRequest request) {
        List<ChatCompletionMessageParam> chatMessages = new ArrayList<>();

        for (Map<String, Object> msg : request.getMessages()) {
            String role = (String) msg.get("role");
            String content = (String) msg.get("content");
            
            if (content == null || content.trim().isEmpty()) {
                continue;
            }
            
            ChatCompletionMessageParam chatMessage = null;
            switch (role != null ? role.toLowerCase() : "user") {
                case "system":
                    ChatCompletionSystemMessageParam chatSystemMessage = ChatCompletionSystemMessageParam.builder().content(content).build();
                    chatMessage = ChatCompletionMessageParam.ofSystem(chatSystemMessage);
                    break;
                case "user":
                    ChatCompletionUserMessageParam chatUserMessage = ChatCompletionUserMessageParam.builder().content(content).build();
                    chatMessage = ChatCompletionMessageParam.ofUser(chatUserMessage);
                    break;
                case "assistant":
                    ChatCompletionAssistantMessageParam chatAssistantMessage = ChatCompletionAssistantMessageParam.builder().content(content).build();
                    chatMessage = ChatCompletionMessageParam.ofAssistant(chatAssistantMessage);
                    break;
                default:
                    log.warn("未知的消息角色: {}, 作为用户消息处理", role);
                    ChatCompletionUserMessageParam defaultUserMessageParam = ChatCompletionUserMessageParam.builder().content(content).build();
                    chatMessage = ChatCompletionMessageParam.ofUser(defaultUserMessageParam);
            }
            
            chatMessages.add(chatMessage);
        }
        
        if (chatMessages.isEmpty()) {
            throw new IllegalArgumentException("消息列表不能为空");
        }
        
        return chatMessages;
    }
    
    
}
