package com.starhub.application.agent.service.impl;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.starhub.application.agent.concurrent.ConcurrentTaskExecutor;
import com.starhub.application.agent.enums.StreamEventTypeEnum;
import com.starhub.application.agent.service.IModelService;
import com.starhub.application.function.biz.AiFunctionProvider;
import com.starhub.application.message.dto.Message;
import com.starhub.application.model.biz.ModelConnectionFactory;
import com.starhub.application.model.dto.ModelCallDTO;
import com.starhub.application.model.dto.ModelConfigDto;

import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.*;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.Result;
import dev.langchain4j.service.TokenStream;
import dev.langchain4j.service.tool.ToolExecution;
import dev.langchain4j.service.tool.ToolExecutor;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;

import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;


@Service
public class ModelServiceImpl implements IModelService {

    private static final Logger log = LoggerFactory.getLogger(ModelServiceImpl.class);
    private static final long DEFAULT_TIMEOUT = 30 * 60 * 1000L; // 默认超时时间30分钟
    private static final long MESSAGE_INTERVAL = 50L; // 消息间隔时间50毫秒

    interface Assistant {
        Result<String> chat(String message);
    }
    interface StreamingAssistant {
        TokenStream chat(String message);
    }

    @Autowired
    private ModelConnectionFactory connectionFactory;
    
    @Autowired
    private ConcurrentTaskExecutor taskExecutor;

    @Autowired
    private AiFunctionProvider aiFunctionProvider;

    @Override
    public String call(String mark, String message) {
        // 从工厂中根据 mark 获取模型实例
        ChatLanguageModel chatModel = connectionFactory.getChatConnection(mark);
        
        return chatModel.generate(message);
    }

    /*
     * 执行对话任务
     * @param mark 模型标识
     * @param chatMemory 对话记忆
     * @param agentId 智能体ID
     * @param availableFunctions 可用函数
     * @param historyMessages 历史消息
     * @return 对话结果
     */
    @Override
    public AiMessage call(String mark, ChatMemory chatMemory, Long agentId, List availableFunctions, List<Message> historyMessages) {
        // 获取非流式聊天模型
        ChatLanguageModel chatModel = connectionFactory.getChatConnection(mark);

        // 构建消息列表
        List<ChatMessage> chatMessages = chatMemory.messages();
        
        //相关的函数工具
        List<ToolSpecification> toolSpecifications = null;

        AiMessage aiMessage = null;

        try {
            // 使用任务执行器执行对话
            aiMessage =  taskExecutor.execute(agentId, () -> {
                // 您的任务逻辑，返回一个结果
                return chatModel.generate(chatMessages).content();
            });
        } catch (Exception e) {
            log.error("执行对话任务失败", e);
            throw new RuntimeException("执行对话任务失败: " + e.getMessage());
        }
        return aiMessage;
    }

    @Override
    public Result<String> call(ModelCallDTO modelCallDTO) {
        // 获取非流式聊天模型
        ModelConfigDto modelConfigDto = ModelConfigDto.builder()
                .maxTokens(modelCallDTO.getMaxtoken())
                .build();
        ChatLanguageModel chatModel = connectionFactory.getChatConnection(modelCallDTO.getMark(), modelConfigDto);

        String message = modelCallDTO.getMessage();
        ChatMemory chatMemory = modelCallDTO.getChatMemory();
        Map<ToolSpecification, ToolExecutor> tools = modelCallDTO.getTools();

        try {
            Assistant assistant = AiServices.builder(Assistant.class)
                                            .chatLanguageModel(chatModel)
                                            .chatMemory(chatMemory)
//                                            .tools(tools)
                    .build();
            // 使用任务执行器执行对话
            return taskExecutor.execute(modelCallDTO.getAgentId(), () -> assistant.chat(message));
        } catch (Exception e) {
            log.error("执行对话任务失败", e);
            throw new RuntimeException("执行对话任务失败: " + e.getMessage());
        }
    }

    @Override
    public SseEmitter callSse(ModelCallDTO modelCallDTO) {
        // 创建SSE发射器，设置超时时间
        SseEmitter emitter = new SseEmitter(DEFAULT_TIMEOUT);
        
        // 设置超时和完成时的回调
        emitter.onTimeout(() -> {
            log.info("SSE connection timed out");
            emitter.complete();
        });
        
        emitter.onCompletion(() -> {
            log.info("SSE connection completed");
        });

        try {
            // 从工厂获取流式模型连接

            //调用时的自定义配置，从智能体的配置中获取
            ModelConfigDto modelConfigDto = ModelConfigDto.builder()
                .maxTokens(modelCallDTO.getMaxtoken())
                .build();
            StreamingChatLanguageModel chatModel = connectionFactory.getStreamingConnection(modelCallDTO.getMark(), modelConfigDto);
            StreamingAssistant assistant = AiServices.builder(StreamingAssistant.class)
                                            .streamingChatLanguageModel(chatModel)
                                            .chatMemory(modelCallDTO.getChatMemory())
//                                            .tools(modelCallDTO.getTools())
                                            .build();
            TokenStream tokenStream = assistant.chat(modelCallDTO.getMessage());
            Consumer<Throwable> onError = (Throwable error) ->{
                log.error("Error in streaming response %s", error);
                try {
                    emitter.send(SseEmitter.event()
                                           .data("Error: " + error.getMessage())
                                           .id(String.valueOf(System.currentTimeMillis()))
                                           .name(StreamEventTypeEnum.ERROR.getCode()));
                    emitter.complete();
                } catch (IOException e) {
                    log.error("Error sending error event", e);
                    emitter.completeWithError(e);
                }
            };
            Consumer<String> onNext = (String token) -> {
                try {
                    emitter.send(SseEmitter.event()
                                           .data(token)
                                           .id(String.valueOf(System.currentTimeMillis()))
                                           .name(StreamEventTypeEnum.TEXT.getCode()));
                } catch (Exception e) {
                    log.error(ExceptionUtils.getStackTrace(e));
                }
            };
            Consumer<Response<AiMessage>> onComplete = (Response<AiMessage> response) -> {
                try {
                    // 发送完成事件
                    emitter.send(SseEmitter.event()
                                           .data("stop")
                                           .id(String.valueOf(System.currentTimeMillis()))
                                           .name(StreamEventTypeEnum.STOP.getCode()));

                    // 记录token使用情况
                    if (response.tokenUsage() != null) {
                        log.info("Token usage - Input: {}, Output: {}, Total: {}",
                                response.tokenUsage().inputTokenCount(),
                                response.tokenUsage().outputTokenCount(),
                                response.tokenUsage().totalTokenCount());
                    }

                    emitter.complete();

                } catch (IOException e) {
                    log.error(ExceptionUtils.getStackTrace(e));
                }
            };
            Consumer<ToolExecution> onToolExecuted = (ToolExecution execution) -> {
                try {
                    // 发送完成事件
                    emitter.send(SseEmitter.event()
                                           .data(execution.request().name())
                                           .id(String.valueOf(System.currentTimeMillis()))
                                           .name(StreamEventTypeEnum.TOOL_CALLS.getCode()));

                } catch (IOException e) {
                    log.error(ExceptionUtils.getStackTrace(e));
                }
            };

            // 发送开始事件
            emitter.send(SseEmitter.event().name(StreamEventTypeEnum.START.getCode()).data(""));

            tokenStream.onNext(onNext).onError(onError).onComplete(onComplete).onToolExecuted(onToolExecuted);
            tokenStream.start();
        } catch (Exception e) {
            log.error("Error in callSse", e);
            try {
                emitter.send(SseEmitter.event()
                    .data("Error: " + e.getMessage())
                    .id(String.valueOf(System.currentTimeMillis()))
                    .name("error"));
                emitter.complete();
            } catch (IOException ex) {
                log.error("Error sending error event", ex);
                emitter.completeWithError(ex);
            }
        }

        return emitter;
    }

    @Override
    public Response<AiMessage> callSse(ModelCallDTO modelCallDTO, PrintWriter ioWriter) throws IOException, ExecutionException, InterruptedException, TimeoutException {
        String uuid = UUID.randomUUID().toString();

        // 从工厂获取流式模型连接
        ModelConfigDto modelConfigDto = ModelConfigDto.builder()
                .maxTokens(modelCallDTO.getMaxtoken())
                .build();
        StreamingChatLanguageModel chatModel = connectionFactory.getStreamingConnection(modelCallDTO.getMark(), modelConfigDto);
        StreamingAssistant assistant = AiServices.builder(StreamingAssistant.class)
                .streamingChatLanguageModel(chatModel)
                .chatMemory(modelCallDTO.getChatMemory())
//                .tools(modelCallDTO.getTools())
                .build();
        TokenStream tokenStream = assistant.chat(modelCallDTO.getMessage());

        CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>();

        Consumer<Throwable> onError = onSSEError(ioWriter, uuid, futureResponse);
        Consumer<String> onNext = onSSENext(ioWriter, uuid, futureResponse);
        Consumer<ToolExecution> onToolExecuted = onSSEToolExecuted(ioWriter, uuid, futureResponse);
        Consumer<Response<AiMessage>> onComplete = onSSEComplete(ioWriter, uuid, futureResponse);

        // 发送开始事件
        sendSSEEvent(ioWriter, uuid, StreamEventTypeEnum.START.getCode(), "");
        tokenStream.onNext(onNext).onError(onError).onComplete(onComplete).onToolExecuted(onToolExecuted);
        tokenStream.start();

        return futureResponse.get(10, TimeUnit.MINUTES);
    }

    @Override
    public SseEmitter callSse(String mark, String message) {
        // 创建SSE发射器，设置超时时间
        SseEmitter emitter = new SseEmitter(DEFAULT_TIMEOUT);
        
        // 设置超时和完成时的回调
        emitter.onTimeout(() -> {
            log.info("SSE connection timed out");
            emitter.complete();
        });
        
        emitter.onCompletion(() -> {
            log.info("SSE connection completed");
        });
        
        try {
            // 从工厂获取流式模型连接
            StreamingChatLanguageModel chatModel = connectionFactory.getStreamingConnection(mark);
            
            // 发送开始事件
            emitter.send(SseEmitter.event().name("start").data(""));
            
            // 使用流式输出

            chatModel.generate(message, new StreamingResponseHandler<AiMessage>() {
                private final StringBuilder responseBuilder = new StringBuilder();

                @Override
                public void onNext(String token) {
                    try {
                        // 将token添加到响应构建器
                        responseBuilder.append(token);
                        
                        // 当遇到标点符号或空格时发送当前消息
                        if (isPunctuationOrSpace(token) || responseBuilder.length() >= 10) {
                            emitter.send(SseEmitter.event()
                                .data(responseBuilder.toString())
                                .id(String.valueOf(System.currentTimeMillis()))
                                .name("message"));
                            responseBuilder.setLength(0);
                            
                            // 控制发送间隔
                            TimeUnit.MILLISECONDS.sleep(MESSAGE_INTERVAL);
                        }
                    } catch (Exception e) {
                        onError(e);
                    }
                }

                @Override
                public void onComplete(Response<AiMessage> response) {
                    try {
                        // 发送剩余内容
                        if (responseBuilder.length() > 0) {
                            emitter.send(SseEmitter.event()
                                .data(responseBuilder.toString())
                                .id(String.valueOf(System.currentTimeMillis()))
                                .name("message"));
                        }
                        
                        // 发送完成事件
                        emitter.send(SseEmitter.event()
                            .data("")
                            .id(String.valueOf(System.currentTimeMillis()))
                            .name("complete"));
                        
                        emitter.complete();
                        
                        // 记录token使用情况
                        if (response.tokenUsage() != null) {
                            log.info("Token usage - Input: {}, Output: {}, Total: {}", 
                                response.tokenUsage().inputTokenCount(), 
                                response.tokenUsage().outputTokenCount(), 
                                response.tokenUsage().totalTokenCount());
                        }
                    } catch (IOException e) {
                        onError(e);
                    }
                }

                @Override
                public void onError(Throwable error) {
                    log.error("Error in streaming response", error);
                    try {
                        emitter.send(SseEmitter.event()
                            .data("Error: " + error.getMessage())
                            .id(String.valueOf(System.currentTimeMillis()))
                            .name("error"));
                        emitter.complete();
                    } catch (IOException e) {
                        log.error("Error sending error event", e);
                        emitter.completeWithError(e);
                    }
                }
            });
            
        } catch (Exception e) {
            log.error("Error in callSse", e);
            try {
                emitter.send(SseEmitter.event()
                    .data("Error: " + e.getMessage())
                    .id(String.valueOf(System.currentTimeMillis()))
                    .name("error"));
                emitter.complete();
            } catch (IOException ex) {
                log.error("Error sending error event", ex);
                emitter.completeWithError(ex);
            }
        }
        
        return emitter;
    }


    /**
     * 判断字符串是否为标点符号或空格
     */
    private boolean isPunctuationOrSpace(String token) {
        if (token.length() != 1) {
            return false;
        }
        char c = token.charAt(0);
        return c == ' ' || c == ',' || c == '.' || c == '!' || c == '?' || 
               c == ';' || c == ':' || c == '\n' || c == '\r';
    }

    public void sendSSEEvent(PrintWriter writer, String id, String eventName, String data) throws IOException {
        // 写入事件类型（可选）
        if (eventName != null && !eventName.isEmpty()) {
            writer.write("event: " + eventName + "\n");
        }

        // 写入数据
        data =  data.replace("\n", "\\n").replace("\r", "");
        writer.write("data: " + JSON.toJSONString(data) + "\n");

        // 写入ID（可选）
        writer.write("id: " + id + "\n");

        // 结束事件
        writer.write("\n");
        writer.flush();
        if (writer.checkError()) {
            throw new IOException("Error writing SSE event");
        }
    }

    private Consumer<String> onSSENext(PrintWriter ioWriter, String id, CompletableFuture<Response<AiMessage>> future)  {
        return (String token) -> {
            try {
                if(future.isCancelled() || future.isCompletedExceptionally() || future.isDone()) {
                    return;
                }
                sendSSEEvent(ioWriter, id, StreamEventTypeEnum.TEXT.getCode(), token);
            } catch (IOException e) {
                log.error(ExceptionUtils.getStackTrace(e));
                future.completeExceptionally(e);
            }
        };
    }

    private Consumer<Throwable> onSSEError(PrintWriter ioWriter, String id, CompletableFuture<Response<AiMessage>> future)  {
        return  (Throwable error) -> {
            log.error("Error in streaming response %s", error);
            try {
                sendSSEEvent(ioWriter, id, StreamEventTypeEnum.ERROR.getCode(), "Error: " + error.getMessage());
            } catch (IOException e) {
                e.printStackTrace();
            } finally {
                future.completeExceptionally(error);
            }
        };
    }

    private Consumer<ToolExecution> onSSEToolExecuted(PrintWriter ioWriter, String id, CompletableFuture<Response<AiMessage>> future)  {
        return (ToolExecution execution) -> {
            try {
                sendSSEEvent(ioWriter, id, StreamEventTypeEnum.TOOL_CALLS.getCode(), execution.request().name());
                future.complete(new Response<>(new AiMessage(execution.result(), List.of(execution.request())), null, FinishReason.TOOL_EXECUTION));
            } catch (IOException e) {
                log.error(ExceptionUtils.getStackTrace(e));
                future.completeExceptionally(e);
            }
        };
    }

    private Consumer<Response<AiMessage>> onSSEComplete(PrintWriter ioWriter, String id, CompletableFuture<Response<AiMessage>> future)  {
        return (Response<AiMessage> response) -> {
            try {
                // 发送完成事件
                sendSSEEvent(ioWriter, id, StreamEventTypeEnum.STOP.getCode(), "stop");

                // 记录token使用情况
                if (response.tokenUsage() != null) {
                    log.info("Token usage - Input: {}, Output: {}, Total: {}",
                            response.tokenUsage().inputTokenCount(),
                            response.tokenUsage().outputTokenCount(),
                            response.tokenUsage().totalTokenCount());
                    JSONObject usageObj = new JSONObject();
                    usageObj.put("input", response.tokenUsage().inputTokenCount());
                    usageObj.put("output", response.tokenUsage().outputTokenCount());
                    usageObj.put("total", response.tokenUsage().totalTokenCount());
                    // 发送token使用情况
                    sendSSEEvent(ioWriter, id, StreamEventTypeEnum.USAGE.getCode(), usageObj.toJSONString());
                }
            } catch (IOException e) {
                log.error(ExceptionUtils.getStackTrace(e));
            } finally {
                future.complete(response);
            }
        };
    }
}