package com.example.aigc_education.integration.impl;

import com.agentsflex.core.llm.ChatContext;
import com.agentsflex.core.llm.StreamResponseListener;
import com.agentsflex.core.llm.response.AiMessageResponse;
import com.agentsflex.core.message.AiMessage;
import com.agentsflex.core.message.HumanMessage;
import com.agentsflex.core.prompt.HistoriesPrompt;
import com.agentsflex.llm.chatglm.ChatglmLlm;
import com.agentsflex.llm.chatglm.ChatglmLlmConfig;
import com.alibaba.fastjson.JSON;
import com.example.aigc_education.common.BusinessException;
import com.example.aigc_education.constant.CacheConstants;
import com.example.aigc_education.domain.request.ChatAiContentRequest;
import com.example.aigc_education.integration.ChatGLMFileIntegration;
import com.example.aigc_education.integration.config.RestTemplateUtils;
import com.example.aigc_education.integration.request.ChatMessageRequest;
import com.example.aigc_education.integration.response.DeleteFileResponse;
import com.example.aigc_education.utils.*;
import com.fasterxml.jackson.core.type.TypeReference;
import com.zhipu.oapi.ClientV4;
import com.zhipu.oapi.Constants;
import com.zhipu.oapi.core.response.HttpxBinaryResponseContent;
import com.zhipu.oapi.service.v4.file.FileApiResponse;
import com.zhipu.oapi.service.v4.file.FileDeleted;
import com.zhipu.oapi.service.v4.file.UploadFileRequest;
import com.zhipu.oapi.service.v4.model.*;
import io.reactivex.Flowable;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.*;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestClientException;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;

import javax.annotation.Resource;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;

import static com.example.aigc_education.integration.request.ChatMessageRequest.Role.AI;
import static com.example.aigc_education.integration.request.ChatMessageRequest.Role.USER;

@Service
@Slf4j
public class ChatGLMFileIntegrationImpl implements ChatGLMFileIntegration {

    private static final String API_KEY = "3f27ec6531bfcfde704b7d3dc9979a02.xej75qtuJfG84U2K";

    @Resource
    private RestTemplate restTemplate;

    @Resource
    private RedisCache redisCache;

    private static final ClientV4 client = new ClientV4.Builder(API_KEY)
            .build();

    @Override
    public String ocrResult(String fileId) {
        String result;
        try {
            HttpxBinaryResponseContent responseContent = client.fileContent(fileId);
            String contentText = responseContent.getText();
            result = JSON.parseObject(contentText).getString("content");

            // 删除文件
            DeleteFileResponse response = deleteFile(fileId);
            if (response.getDeleted()) {
                log.info("ChatGLMFileIntegrationImpl File deleted successfully: {}", fileId);
            } else {
                log.error("ChatGLMFileIntegrationImpl Failed to delete file: {}", fileId);
                throw new BusinessException("Failed to delete file: " + fileId);
            }
        } catch (IOException e) {
            throw new BusinessException("文件解析失败");
        }
        return result;
    }

    @Override
    public String uploadFile(String filePath) {
        UploadFileRequest request = new UploadFileRequest();
        request.setFilePath(filePath);
        request.setPurpose("file-extract");
        FileApiResponse fileApiResponse = client.invokeUploadFileApi(request);
        return fileApiResponse.getData().getId();
    }

    @Override
    public String queryFileStatus(String fileId) {
        // TODO:
        return null;
    }

    /**
     * curl DELETE --location 'https://open.bigmodel.cn/api/paas/v4/files/{fileID}' \
     * --header 'Authorization: Bearer <你的apikey>' \
     * --header 'Content-Type: application/json'
     */
    private DeleteFileResponse deleteFile(String fileId) {
        String url = "https://open.bigmodel.cn/api/paas/v4/files/" + fileId;
        log.info("ChatGLMFileIntegrationImpl Deleting file url: {}", url);
        HttpHeaders headers = new HttpHeaders();
        headers.set("Authorization", "Bearer " + API_KEY);
        headers.setContentType(MediaType.APPLICATION_JSON);

        HttpEntity<String> entity = new HttpEntity<>(headers);

        try {
            ResponseEntity<DeleteFileResponse> response = restTemplate.exchange(url, HttpMethod.DELETE, entity, DeleteFileResponse.class);
            if (response.getStatusCode() == HttpStatus.OK) {
                return response.getBody();
            } else {
                throw new BusinessException("Failed to delete file: " + response.getStatusCode());
            }
        } catch (RestClientException e) {
            throw new BusinessException("Error occurred while deleting file: " + e.getMessage());
        }
    }



    @Override
    public ChatMessageRequest chat(ChatAiContentRequest request, String userId) {
        Long messageId = System.currentTimeMillis();
        // 未连接进行连接
        SseEmitter emitter = getOrCreateEmitter(messageId);

        // 从数据库中获取用户的聊天记录
        List<ChatMessageRequest> historyChatMessageList = getChatMessagesFromCache(request.getConvId(), userId);
        // 构建聊天消息
        List<ChatMessage> messages = buildChatMessages(historyChatMessageList, request);

        // 构建chatCompletionRequest
        ChatCompletionRequest chatCompletionRequest = buildChatCompletionRequest(request, messages);
//        handleModelApiResponse(chatCompletionRequest, emitter, messageId, request.getConvId(), historyChatMessageList, userId);
        handleModelApiResponse(emitter, messageId, request.getConvId(), historyChatMessageList, userId);

        return buildChatMessageRequest(messageId, request, historyChatMessageList, userId);
    }

    private SseEmitter getOrCreateEmitter(Long messageId) {
        return Optional.ofNullable(SseServer.getConnect(messageId))
                .orElseGet(() -> {
                    log.info("AlgoServiceImpl-chat sseEmitter is null, create new connect");
                    return SseServer.createConnect(messageId);
                });
    }

    // 从缓存中获取用户的聊天记录
    private List<ChatMessageRequest> getChatMessagesFromCache(String convId, String userId) {
        // 数据库中获取用户的聊天记录，使用redis缓存，空则新建
       String chatMessages = (String) Optional.ofNullable(redisCache.getCacheObject(String.format(CacheConstants.CHAT_RECORDS_KEY, convId, userId)))
        .orElse("");
       log.info("ChatGLMFileIntegrationImpl getChatMessagesFromCache chatMessages: {}", chatMessages);
        if (StringUtils.isEmpty(chatMessages)) {
            return new ArrayList<>();
        }
        return JsonUtils.fromJson(chatMessages, ArrayList.class, ChatMessageRequest.class);
    }

    private List<ChatMessage> buildChatMessages(List<ChatMessageRequest> historyMessageList, ChatAiContentRequest request) {
        List<ChatMessage> messages = new ArrayList<>();
        ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "你是一个能够基于文档跟踪用户输入的AI助手，你可以回答用户的问题，也可以提供一些有用的信息。");
        messages.add(systemMessage);

        if (historyMessageList != null && !historyMessageList.isEmpty()) {
            historyMessageList.forEach(chatMessageRequest -> {
                //枚举比对发出信息人的身份
                ChatMessage chatMessage;
                if (chatMessageRequest.getRole() == AI) {
                    chatMessage = new ChatMessage(ChatMessageRole.ASSISTANT.value(), chatMessageRequest.getContent());
                } else {
                    chatMessage = new ChatMessage(ChatMessageRole.USER.value(), chatMessageRequest.getContent());
                }
                messages.add(chatMessage);
            });
        }
        ChatMessage userMessage = new ChatMessage(ChatMessageRole.USER.value(), request.getContent());
        messages.add(userMessage);
        return messages;
    }

    private ChatCompletionRequest buildChatCompletionRequest(ChatAiContentRequest request, List<ChatMessage> messages) {
        HashMap<String, Object> extraJson = new HashMap<>();
        extraJson.put("temperature", 0.5);
        extraJson.put("max_tokens", 2000);

        return ChatCompletionRequest.builder()
                .model("glm-4-long")
                .stream(Boolean.TRUE)
                .messages(messages)
                .requestId(request.getConvId())
                .extraJson(extraJson)
                .build();
    }

    private void handleModelApiResponse(ChatCompletionRequest chatCompletionRequest, SseEmitter emitter,
                                        Long messageId, String convId, List<ChatMessageRequest> historyMessageList, String userId) {
        ModelApiResponse sseModelApiResp = client.invokeModelApi(chatCompletionRequest);

        if (sseModelApiResp.isSuccess()) {
            AtomicBoolean isFirst = new AtomicBoolean(true);
            List<Choice> choices = new ArrayList<>();
            // 接受AI的回复
            AtomicReference<String> aiContent = new AtomicReference<>();
            mapStreamToAccumulator(sseModelApiResp.getFlowable())
                    .doOnNext(accumulator -> {
                        if (isFirst.getAndSet(false)) {
                            log.info("accumulator.getUsage(): {}", accumulator.getUsage());
                        }
                        if (accumulator.getDelta() != null && accumulator.getDelta().getContent() != null) {
                            log.info("accumulator.getDelta().getContent(): {}", accumulator.getDelta().getContent());
                            try {
                                ChatMessageRequest response = ChatMessageRequest.builder()
                                        .messageId(String.valueOf(messageId))
                                        .content(accumulator.getDelta().getContent())
                                        .role(AI)
                                        .userId(userId)
                                        .convId(convId)
                                        .chunkResp(true)
                                        .build();

                                aiContent.set(aiContent.get() + response.getContent());
                                emitter.send(JsonUtils.toJson(response));
                            } catch (IOException e) {
                                emitter.completeWithError(e);
                            }
                        } else{
                            log.info("没有数据了");
                        }
                        choices.add(accumulator.getChoice());
                    })
                    .doOnComplete(() -> {
                        ModelData data = new ModelData();
                        data.setChoices(choices);
                        data.setRequestId(chatCompletionRequest.getRequestId());
                        sseModelApiResp.setFlowable(null);
                        sseModelApiResp.setData(data);
                        // todo: 是否该关闭连接
                        emitter.complete();
                        // 保存AI输出的聊天记录
                        ChatMessageRequest aireponse = ChatMessageRequest.builder()
                                .messageId(String.valueOf(messageId))
                                .content(aiContent.get())
                                .role(AI)
                                .userId(userId)
                                .convId(convId)
                                .chunkResp(true)
                                .build();
                        historyMessageList.add(aireponse);
                        // 保存AI响应信息记录至redis
                        redisCache.setCacheObject(String.format(CacheConstants.CHAT_RECORDS_KEY, aireponse.getConvId(), aireponse.getUserId()), JsonUtils.toJson(historyMessageList));
                    })
                    .lastElement()
                    .blockingGet();
        } else {
            emitter.completeWithError(new BusinessException("Failed to invoke model API"));
        }
    }

    private void handleModelApiResponse(SseEmitter emitter,
                                        Long messageId, String convId, List<ChatMessageRequest> historyMessageList, String userId) {

        ChatglmLlmConfig config = new ChatglmLlmConfig();
        config.setModel("glm-4-long");
        config.setApiKey(API_KEY);
        ChatglmLlm llm = new ChatglmLlm(config);

        HistoriesPrompt historiesPrompt = new HistoriesPrompt();
        // 填充历史记录
        historiesPrompt.addMessage(new HumanMessage("你好"));
        final String[] aiContent = {""};
        llm.chatStream("你现在在干嘛", new StreamResponseListener() {
            @Override
            public void onMessage(ChatContext chatContext, AiMessageResponse aiMessageResponse) {
                System.out.println(">>>> " + aiContent[0]);
                aiContent[0] += aiMessageResponse.getMessage().getContent();
                try {
                    emitter.send(aiContent[0]);
                } catch (IOException e) {
                    throw new BusinessException(e.getMessage());
                }
            }
        });
//        try {
//            Thread.sleep(1000 * 60);
//        } catch (InterruptedException e) {
//            log.error("ChatGLMFileIntegrationImpl handleModelApiResponse error:{}", e.getMessage());
//        }
    }

    private ChatMessageRequest buildChatMessageRequest(Long messageId, ChatAiContentRequest request, List<ChatMessageRequest> historyMessageList, String userId) {
        //  保存用户的聊天记录
        ChatMessageRequest userRequest = ChatMessageRequest.builder()
                .messageId(String.valueOf(messageId))
                .userId(userId)
                .chunkResp(true)
                .content(request.getContent())
                .role(USER)
                .convId(request.getConvId())
                .build();
        if (historyMessageList == null) {
            historyMessageList = new ArrayList<>();
        }
        historyMessageList.add(userRequest);
        // todo:保存用户记录至redis
        redisCache.setCacheObject(String.format(CacheConstants.CHAT_RECORDS_KEY, userRequest.getConvId(), userRequest.getUserId()), JsonUtils.toJson(historyMessageList));
        return userRequest;
    }



    private Flowable<ChatMessageAccumulator> mapStreamToAccumulator(Flowable<ModelData> flowable) {
        return flowable.map(chunk -> {
            return new ChatMessageAccumulator(chunk.getChoices().get(0).getDelta(), null, chunk.getChoices().get(0), chunk.getUsage(), chunk.getCreated(), chunk.getId());
        });
    }

    @Override
    public SseEmitter docChatSse(ChatAiContentRequest request, String userId) {
        // 单独出来注册
        ChatglmLlmConfig config = new ChatglmLlmConfig();
        config.setModel("glm-4-long");
        config.setApiKey(API_KEY);
        ChatglmLlm llm = new ChatglmLlm(config);

        String startTime = String.valueOf(System.currentTimeMillis());
        ChatMessageRequest userRequest = ChatMessageRequest.builder()
                .messageId(startTime)
                .userId(userId)
                .content(request.getContent())
                .role(USER)
                .convId(request.getConvId())
                .chunkResp(true)
                .build();

        // 保存用户的聊天记录
        List<ChatMessageRequest> historyMessageList = getChatMessagesFromCache(request.getConvId(), userId);
        historyMessageList.add(userRequest);

        SseEmitter emitter = new SseEmitter();
        HistoriesPrompt historiesPrompt = new HistoriesPrompt();
        // 填充历史记录
        getChatMessagesFromCache(request.getConvId(), userId).forEach(chatMessage -> {
            if (chatMessage.getRole() == AI) {
                AiMessage aiMessage = new AiMessage();
                aiMessage.setContent(chatMessage.getContent());
                historiesPrompt.addMessage(aiMessage);
            } else {
                historiesPrompt.addMessage(new HumanMessage(chatMessage.getContent()));
            }
        });
        historiesPrompt.addMessage(new HumanMessage(request.getContent()));
        final String[] aiContent = {""};
        emitter.onCompletion(new Runnable() {
            @Override
            public void run() {
                log.info("ChatGLMFileIntegrationImpl docChatSse onCompletion");
                ChatMessageRequest aiResponse = ChatMessageRequest.builder()
                        .messageId(startTime)
                        .content(aiContent[0])
                        .role(AI)
                        .userId(userId)
                        .convId(request.getConvId())
                        .chunkResp(true)
                        .build();

                // 获取并更新历史记录
//                List<ChatMessageRequest> historyList = getChatMessagesFromCache(request.getConvId(), userId);
                historyMessageList.add(aiResponse);

                // 保存到Redis
                redisCache.setCacheObject(
                    String.format(CacheConstants.CHAT_RECORDS_KEY, request.getConvId(), userId),
                    JsonUtils.toJson(historyMessageList)
                );
                log.info("ChatGLMFileIntegrationImpl docChatSse chatMessage saved to redis");
            }
        });
        log.info("ChatGLMFileIntegrationImpl docChatSse start historyPromp {}", historiesPrompt.toString());
        llm.chatStream(historiesPrompt, (chatContext, aiMessageResponse) -> {
            log.info(">>>> " + aiContent[0]);
            aiContent[0] += aiMessageResponse.getMessage().getContent();
            try {
                emitter.send(
                    JsonUtils.toJson(
                        ChatMessageRequest.builder()
                            .messageId(startTime)
                            .content(aiContent[0])
                            .role(AI)
                            .userId(userId)
                            .convId(request.getConvId())
                            .chunkResp(true)
                            .build()
                    )
                );
            } catch (IOException e) {
                throw new BusinessException(e.getMessage());
            }
        });
        return emitter;
    }
}
