package com.virtualperson.service.impl;

import com.alibaba.dashscope.aigc.generation.Generation;
import com.alibaba.dashscope.aigc.generation.GenerationParam;
import com.alibaba.dashscope.aigc.generation.GenerationResult;
import com.alibaba.dashscope.aigc.generation.GenerationOutput;
import com.alibaba.dashscope.common.Message;
import com.alibaba.dashscope.common.Role;
import com.alibaba.dashscope.common.ResultCallback;
import com.alibaba.dashscope.exception.NoApiKeyException;
import com.alibaba.dashscope.exception.InputRequiredException;
import com.virtualperson.config.AliyunConfig;
import com.virtualperson.dto.ChatRequest;
import com.virtualperson.dto.ChatResponse;
import com.virtualperson.service.AliyunAIService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

@Slf4j
@Service
@RequiredArgsConstructor
public class AliyunAIServiceImpl implements AliyunAIService {

    private final AliyunConfig aliyunConfig;
    private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
    
    private static final String DEFAULT_MODEL = "qwen-turbo";
    private static final Double DEFAULT_TEMPERATURE = 0.7;
    private static final int DEFAULT_MAX_TOKENS = 2048;
    private static final int MAX_WAIT_TIME = 60; // 最大等待时间（秒）

    @Override
    public ChatResponse chat(ChatRequest request) {
        try {
            log.info("开始AI聊天，请求参数: {}", request);
            
            // 同步模式：一次性返回完整结果
            return handleSyncMode(request);
            
        } catch (NoApiKeyException e) {
            log.error("阿里云API密钥未配置", e);
            ChatResponse response = new ChatResponse();
            response.setSuccess(false);
            response.setErrorMessage("API密钥未配置，请联系管理员");
            return response;
        } catch (InputRequiredException e) {
            log.error("输入参数错误", e);
            ChatResponse response = new ChatResponse();
            response.setSuccess(false);
            response.setErrorMessage("输入参数错误: " + e.getMessage());
            return response;
        } catch (Exception e) {
            log.error("调用阿里云大模型失败", e);
            ChatResponse response = new ChatResponse();
            response.setSuccess(false);
            response.setErrorMessage("AI聊天失败: " + e.getMessage());
            return response;
        }
    }

    @Override
    public void chatStream(ChatRequest request, AliyunAIService.StreamCallback callback) {
        try {
            log.info("开始AI流式聊天，请求参数: {}", request);
            
            // 构建消息列表
            List<Message> messages = buildMessages(request);
            log.info("构建的消息列表: {}", messages);
            
            // 构建参数 - 使用GenerationParam
            GenerationParam param = GenerationParam.builder()
                    .model(DEFAULT_MODEL)
                    .messages(messages)
                    .temperature(request.getTemperature() != null ? request.getTemperature().floatValue() : DEFAULT_TEMPERATURE.floatValue())
                    .maxTokens(request.getMaxTokens() != null ? request.getMaxTokens() : DEFAULT_MAX_TOKENS)
                    .apiKey(aliyunConfig.getApiKey())
                    .resultFormat(GenerationParam.ResultFormat.MESSAGE)
                    .build();

            log.info("流式AI聊天参数: model={}, temperature={}, maxTokens={}, messagesCount={}, apiKey={}", 
                param.getModel(), param.getTemperature(), param.getMaxTokens(), messages.size(), 
                param.getApiKey() != null ? "已设置" : "未设置");

            // 设置超时
            scheduler.schedule(() -> {
                log.warn("流式AI聊天超时（{}秒）", MAX_WAIT_TIME);
                callback.onError("AI聊天超时，请稍后重试");
            }, MAX_WAIT_TIME, TimeUnit.SECONDS);

            // 调用流式API - 使用streamCall方法
            Generation gen = new Generation();
            
            log.info("开始调用阿里云流式API...");
            
            gen.streamCall(param, new ResultCallback<GenerationResult>() {
                private StringBuilder fullText = new StringBuilder();
                private boolean hasReceivedData = false;
                private int eventCount = 0;
                
                @Override
                public void onEvent(GenerationResult result) {
                    eventCount++;
                    log.info("收到第{}个流式响应事件: {}", eventCount, result);
                    hasReceivedData = true;
                    
                    // 尝试从不同位置获取文本内容
                    String text = null;
                    
                    // 方法1: 直接从output.text获取
                    if (result.getOutput() != null && result.getOutput().getText() != null) {
                        text = result.getOutput().getText();
                        log.info("从output.text获取到文本: {}", text);
                    }
                    // 方法2: 从choices中获取
                    else if (result.getOutput() != null && result.getOutput().getChoices() != null && 
                             !result.getOutput().getChoices().isEmpty()) {
                        GenerationOutput.Choice choice = result.getOutput().getChoices().get(0);
                        if (choice != null && choice.getMessage() != null && choice.getMessage().getContent() != null) {
                            text = choice.getMessage().getContent();
                            log.info("从choices[0].message.content获取到文本: {}", text);
                        }
                    }
                    
                    if (text != null) {
                        // 计算增量文本（新文本减去已有文本）
                        String incrementalText = text;
                        if (fullText.length() > 0) {
                            // 如果新文本包含已有文本，则提取增量部分
                            if (text.startsWith(fullText.toString())) {
                                incrementalText = text.substring(fullText.length());
                                log.info("计算增量文本: 新文本={}, 已有文本={}, 增量={}", text, fullText.toString(), incrementalText);
                            } else {
                                // 如果不包含，可能是新的开始，重置
                                log.warn("新文本不包含已有文本，重置: 新文本={}, 已有文本={}", text, fullText.toString());
                                fullText.setLength(0);
                                incrementalText = text;
                            }
                        }
                        
                        if (!incrementalText.isEmpty()) {
                            fullText.append(incrementalText);
                            // 实时推送增量文本片段
                            log.info("推送流式文本片段: {}", incrementalText);
                            callback.onText(incrementalText);
                        } else {
                            log.info("增量文本为空，跳过推送");
                        }
                    } else {
                        log.warn("流式响应中没有找到文本内容: {}", result);
                        // 尝试从其他字段获取信息
                        if (result.getOutput() != null) {
                            log.info("Output对象内容: {}", result.getOutput());
                            if (result.getOutput().getChoices() != null) {
                                log.info("Choices数量: {}", result.getOutput().getChoices().size());
                                for (int i = 0; i < result.getOutput().getChoices().size(); i++) {
                                    GenerationOutput.Choice choice = result.getOutput().getChoices().get(i);
                                    log.info("Choice[{}]: {}", i, choice);
                                }
                            }
                        }
                        if (result.getUsage() != null) {
                            log.info("Usage对象内容: {}", result.getUsage());
                        }
                        if (result.getRequestId() != null) {
                            log.info("RequestId: {}", result.getRequestId());
                        }
                    }
                }

                @Override
                public void onComplete() {
                    log.info("流式AI聊天完成，总事件数: {}, 总文本: {}", eventCount, fullText.toString());
                    if (!hasReceivedData) {
                        log.warn("流式聊天完成但没有收到任何数据");
                        callback.onError("未收到AI回复，请稍后重试");
                    } else {
                        callback.onComplete(fullText.toString());
                    }
                }

                @Override
                public void onError(Exception e) {
                    log.error("流式AI聊天出错", e);
                    callback.onError("流式AI聊天失败: " + e.getMessage());
                }
            });
            
            log.info("阿里云流式API调用已启动");
            
        } catch (NoApiKeyException e) {
            log.error("阿里云API密钥未配置", e);
            callback.onError("API密钥未配置，请联系管理员");
        } catch (InputRequiredException e) {
            log.error("输入参数错误", e);
            callback.onError("输入参数错误: " + e.getMessage());
        } catch (Exception e) {
            log.error("流式AI聊天失败", e);
            callback.onError("流式AI聊天失败: " + e.getMessage());
        }
    }

    /**
     * 处理同步模式
     */
    private ChatResponse handleSyncMode(ChatRequest request) throws NoApiKeyException, InputRequiredException {
        // 构建消息列表
        List<Message> messages = buildMessages(request);
        
        // 构建参数 - 使用GenerationParam
        GenerationParam param = GenerationParam.builder()
                .model(DEFAULT_MODEL)
                .messages(messages)
                .temperature(request.getTemperature() != null ? request.getTemperature().floatValue() : DEFAULT_TEMPERATURE.floatValue())
                .maxTokens(request.getMaxTokens() != null ? request.getMaxTokens() : DEFAULT_MAX_TOKENS)
                .apiKey(aliyunConfig.getApiKey())
                .resultFormat(GenerationParam.ResultFormat.MESSAGE)
                .build();

        log.info("AI聊天参数: model={}, temperature={}, maxTokens={}, messagesCount={}", 
            param.getModel(), param.getTemperature(), param.getMaxTokens(), messages.size());

        // 调用API
        Generation gen = new Generation();
        GenerationResult result = gen.call(param);
        
        log.info("AI聊天完成，结果: {}", result);

        // 构建响应
        ChatResponse response = new ChatResponse();
        response.setSuccess(true);
        response.setRequestId(UUID.randomUUID().toString());
        response.setResponse(result.getOutput().getText());
        
        return response;
    }

    @Override
    public ChatResponse simpleChat(String prompt) {
        ChatRequest request = new ChatRequest();
        request.setPrompt(prompt);
        request.setTemperature(DEFAULT_TEMPERATURE);
        request.setMaxTokens(DEFAULT_MAX_TOKENS);
        request.setStreamMode(false); // 简单聊天默认使用同步模式
        return chat(request);
    }

    @Override
    public AliyunConfig getAliyunConfig() {
        return aliyunConfig;
    }

    /**
     * 构建消息列表
     */
    private List<Message> buildMessages(ChatRequest request) {
        List<Message> messages = new ArrayList<>();
        
        // 添加历史消息
        if (request.getHistory() != null && !request.getHistory().isEmpty()) {
            for (ChatRequest.Message historyMsg : request.getHistory()) {
                messages.add(Message.builder()
                        .role(historyMsg.getRole())
                        .content(historyMsg.getContent())
                        .build());
            }
        }
        
        // 添加当前用户消息
        if (request.getPrompt() != null && !request.getPrompt().trim().isEmpty()) {
            messages.add(Message.builder()
                    .role(Role.USER.getValue())
                    .content(request.getPrompt())
                    .build());
        }
        
        return messages;
    }
} 