package com.example.airoles.service.impl;

import com.example.airoles.dto.ChatRequest;
import com.example.airoles.service.LlmService;
import com.example.airoles.controller.ChatController;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.reactive.function.client.WebClientResponseException;
import reactor.core.publisher.Mono;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeoutException;

@Service
@ConditionalOnProperty(name = "llm.provider", havingValue = "qiniu")
public class QiniuLlmServiceImpl implements LlmService {
    private static final Logger logger = LoggerFactory.getLogger(QiniuLlmServiceImpl.class);
    private final WebClient client;
    
    // Retry configuration
    private static final int MAX_RETRIES = 3;
    private static final Duration TIMEOUT_DURATION = Duration.ofSeconds(30);

    public QiniuLlmServiceImpl(@Value("${qiniu.api.key}") String apiKey) {
        this.client = WebClient.builder()
                .baseUrl("https://openai.qiniu.com/v1")
                .defaultHeader("Authorization", "Bearer " + apiKey)
                .defaultHeader("Content-Type", "application/json")
                .build();
    }

    @Override
    public ChatController.ChatResponse chatWithRole(
            String systemPrompt, String skillFragment, List<ChatRequest.Message> convo) {
        
        List<Map<String, Object>> messages = buildMessages(systemPrompt, skillFragment, convo);
        
        Map<String, Object> requestBody = Map.of(
                "model", "qwen2.5-72b-instruct",  // Correct Qiniu AI model name
                "messages", messages,
                "temperature", 0.8,
                "max_tokens", 800,
                "stream", false
        );
        
        return executeWithRetry(requestBody);
    }
    
    private List<Map<String, Object>> buildMessages(String systemPrompt, String skillFragment, List<ChatRequest.Message> convo) {
        List<Map<String, Object>> messages = new ArrayList<>();
        
        // Add system prompt
        messages.add(Map.of("role", "system", "content", systemPrompt));
        
        // Add skill fragment as additional system message if provided
        if (skillFragment != null && !skillFragment.isEmpty()) {
            messages.add(Map.of("role", "system", "content", "技能指导: " + skillFragment));
        }
        
        // Add conversation history with context awareness
        if (convo != null) {
            // Limit conversation history to last 10 messages to avoid token limits
            int startIndex = Math.max(0, convo.size() - 10);
            for (int i = startIndex; i < convo.size(); i++) {
                ChatRequest.Message msg = convo.get(i);
                if (msg.text != null && !msg.text.trim().isEmpty()) {
                    messages.add(Map.of(
                        "role", msg.role,
                        "content", msg.text.trim()
                    ));
                }
            }
        }
        
        return messages;
    }
    
    private ChatController.ChatResponse executeWithRetry(Map<String, Object> requestBody) {
        Exception lastException = null;
        
        for (int attempt = 1; attempt <= MAX_RETRIES; attempt++) {
            try {
                logger.debug("Attempting Qiniu LLM request, attempt {}/{}", attempt, MAX_RETRIES);
                
                Map<String, Object> response = client.post()
                        .uri("/chat/completions")
                        .body(Mono.just(requestBody), Map.class)
                        .retrieve()
                        .bodyToMono(Map.class)
                        .timeout(TIMEOUT_DURATION)
                        .block();
                
                return parseQiniuResponse(response);
                
            } catch (WebClientResponseException e) {
                lastException = e;
                logger.warn("Qiniu LLM request failed on attempt {}: HTTP {} - {}", 
                           attempt, e.getStatusCode(), e.getMessage());
                
                // Don't retry on client errors (4xx)
                if (e.getStatusCode().is4xxClientError()) {
                    break;
                }
                
                // Wait before retrying
                if (attempt < MAX_RETRIES) {
                    try {
                        Thread.sleep(1000 * attempt); // Exponential backoff
                    } catch (InterruptedException ie) {
                        Thread.currentThread().interrupt();
                        break;
                    }
                }
                
            } catch (Exception e) {
                lastException = e;
                logger.warn("Qiniu LLM request failed on attempt {}: {}", attempt, e.getMessage());
                
                if (attempt < MAX_RETRIES) {
                    try {
                        Thread.sleep(1000 * attempt);
                    } catch (InterruptedException ie) {
                        Thread.currentThread().interrupt();
                        break;
                    }
                }
            }
        }
        
        // All retries failed, return fallback response
        logger.error("All Qiniu LLM request attempts failed", lastException);
        return createFallbackResponse(lastException);
    }
    
    private ChatController.ChatResponse parseQiniuResponse(Map<String, Object> response) {
        String assistantText = "（七牛AI模型暂时无法回复，请稍后再试）";
        
        try {
            // Use OpenAI-compatible response format
            @SuppressWarnings("unchecked")
            List<Map<String, Object>> choices = (List<Map<String, Object>>) response.get("choices");
            
            if (choices != null && !choices.isEmpty()) {
                @SuppressWarnings("unchecked")
                Map<String, Object> firstChoice = choices.get(0);
                @SuppressWarnings("unchecked")
                Map<String, Object> message = (Map<String, Object>) firstChoice.get("message");
                
                if (message != null) {
                    String content = (String) message.get("content");
                    if (content != null && !content.trim().isEmpty()) {
                        assistantText = content.trim();
                    }
                }
            }
            
            // Log usage statistics if available (OpenAI-compatible format)
            @SuppressWarnings("unchecked")
            Map<String, Object> usage = (Map<String, Object>) response.get("usage");
            if (usage != null) {
                logger.debug("Qiniu Token usage - Prompt: {}, Completion: {}, Total: {}",
                           usage.get("prompt_tokens"),
                           usage.get("completion_tokens"),
                           usage.get("total_tokens"));
            }
            
        } catch (Exception e) {
            logger.error("Error parsing Qiniu LLM response", e);
            assistantText = "（回复解析出错，请重试）";
        }
        
        ChatController.ChatResponse result = new ChatController.ChatResponse();
        result.assistant = assistantText;
        return result;
    }
    
    private ChatController.ChatResponse createFallbackResponse(Exception exception) {
        String fallbackMessage;
        
        if (exception instanceof WebClientResponseException) {
            WebClientResponseException webEx = (WebClientResponseException) exception;
            if (webEx.getStatusCode().value() == 429) {
                fallbackMessage = "抱歉，七牛AI服务当前繁忙，请稍后再试。我在这里等您。";
            } else if (webEx.getStatusCode().is5xxServerError()) {
                fallbackMessage = "七牛AI服务暂时不可用，请稍后再试。我会继续陪伴您的。";
            } else {
                fallbackMessage = "遇到了一些技术问题，但我仍然在这里倾听您的需要。请重新描述一下您的情况？";
            }
        } else if (exception instanceof TimeoutException) {
            fallbackMessage = "回复时间有点长，让我重新整理一下思路。请告诉我，您最希望我帮您解决什么问题？";
        } else {
            fallbackMessage = "虽然遇到了技术困难，但我依然关心您的感受。请继续和我分享，我会尽力帮助您。";
        }
        
        ChatController.ChatResponse response = new ChatController.ChatResponse();
        response.assistant = fallbackMessage;
        return response;
    }
}