package cn.iocoder.yudao.module.dify.service.translationWorkflow;

import cn.iocoder.yudao.framework.common.pojo.CommonResult;
import cn.iocoder.yudao.module.dify.config.DifyWorkflowManager;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import io.github.imfangs.dify.client.DifyClientFactory;
import io.github.imfangs.dify.client.DifyWorkflowClient;
import io.github.imfangs.dify.client.callback.WorkflowStreamCallback;
import io.github.imfangs.dify.client.enums.FileTransferMethod;
import io.github.imfangs.dify.client.enums.FileType;
import io.github.imfangs.dify.client.enums.ResponseMode;
import io.github.imfangs.dify.client.event.*;
import io.github.imfangs.dify.client.exception.DifyApiException;
import io.github.imfangs.dify.client.model.file.FileInfo;
import io.github.imfangs.dify.client.model.file.FileUploadRequest;
import io.github.imfangs.dify.client.model.file.FileUploadResponse;
import io.github.imfangs.dify.client.model.workflow.WorkflowRunRequest;
import io.github.imfangs.dify.client.model.workflow.WorkflowRunResponse;
import okhttp3.MediaType;
import org.apache.poi.xwpf.usermodel.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;

@Service
public class DifyTranslationWorkflowServiceImpl implements DifyTranslationWorkflowService {

    @Autowired
    private DifyWorkflowManager difyWorkflowManager;

    public Flux<CommonResult<String>> translateWorkflow(MultipartFile file, Long userId) throws DifyApiException, IOException, InterruptedException {
        FileUploadRequest fileUploadRequest = FileUploadRequest.builder()
                .user(userId.toString())
                // 不传默认为application/octet-stream
                .mediaType(MediaType.parse("text/plain"))
                .build();
        DifyWorkflowClient translateWorkflowClient = difyWorkflowManager.getClient("translate");
        FileUploadResponse uploadResponse = translateWorkflowClient.uploadFile(fileUploadRequest, file.getInputStream(), file.getOriginalFilename());
        // 创建文件信息
        FileInfo fileInfo = FileInfo.builder()
                .type(FileType.DOCUMENT)
                .transferMethod(FileTransferMethod.LOCAL_FILE)
                .uploadFileId(uploadResponse.getId())
                .build();

        Map<String, Object> inputs = new HashMap<>();
        // 创建工作流请求，需要在开始节点增加两个参数，例: query: 文本, file: 类型选择文件列表
//        inputs.put("translatedDocument", Collections.singletonList(fileInfo));
        inputs.put("translatedDocument", fileInfo);
        WorkflowRunRequest request = WorkflowRunRequest.builder()
                .inputs(inputs)
                .responseMode(ResponseMode.STREAMING)
                .user(userId.toString())
                .build();

        // 用于等待异步回调完成
        CountDownLatch latch = new CountDownLatch(1);
        StringBuilder outputBuilder = new StringBuilder();
        // 创建 Sinks.Many 用于推送数据
        Sinks.Many<CommonResult<String>> sink = Sinks.many().multicast().onBackpressureBuffer();
        // 执行工作流流式请求
        translateWorkflowClient.runWorkflowStream(request, new WorkflowStreamCallback() {
            @Override
            public void onWorkflowStarted(WorkflowStartedEvent event) {
                System.out.println("工作流开始: " + event);
            }

            @Override
            public void onNodeStarted(NodeStartedEvent event) {
                System.out.println("节点开始: " + event);
            }

            @Override
            public void onNodeFinished(NodeFinishedEvent event) {
                System.out.println("节点完成: " + event);
                // 直接打印事件，不尝试访问可能不存在的方法
                if (event.toString().contains("output")) {
                    JSONObject jsonObject = JSON.parseObject(event.toString());
                    sink.tryEmitNext(CommonResult.success(jsonObject.getString("output")));  // 推送数据到 Flux
                    outputBuilder.append(event.toString()).append("\n");
                }
            }

            @Override
            public void onWorkflowFinished(WorkflowFinishedEvent event) {
                System.out.println("工作流完成: " + event);
                latch.countDown();
            }

            @Override
            public void onWorkflowTextChunk(WorkflowTextChunkEvent event) {
                System.out.println("工作流DDL执行过程: " + event);
            }

            @Override
            public void onTtsMessage(TtsMessageEvent event) {
                System.out.println("收到TTS消息: " + event);
            }

            @Override
            public void onTtsMessageEnd(TtsMessageEndEvent event) {
                System.out.println("TTS消息结束: " + event);
            }

            @Override
            public void onError(ErrorEvent event) {
                System.out.println("错误事件: " + event);
            }

            @Override
            public void onPing(PingEvent event) {
                System.out.println("心跳: " + event);
            }

            @Override
            public void onException(Throwable throwable) {
                System.out.println("异常: " + throwable.getMessage());
                latch.countDown();
            }
        });

        // 等待流式响应完成
        boolean completed = latch.await(36000, TimeUnit.SECONDS);
        if (!completed) {
            new RuntimeException("流式响应超时");
        }
        System.out.println("完整输出: " + outputBuilder.toString());
        // 返回 Flux<String>
        return sink.asFlux();
    }

    /**
     * gradually
     *
     * @param context
     * @param userId
     * @return
     * @throws DifyApiException
     * @throws IOException
     * @throws InterruptedException
     */
    @Override
    public String translateWorkflowGradually(String context, Long userId) throws DifyApiException, IOException, InterruptedException {
        // 创建工作流请求
        Map<String, Object> inputs = new HashMap<>();
        inputs.put("query", context);

        WorkflowRunRequest request = WorkflowRunRequest.builder()
                .inputs(inputs)
                .responseMode(ResponseMode.BLOCKING)
                .user(userId.toString())
                .build();

        // 执行工作流并获取响应
        DifyWorkflowClient translateWorkflowClient = difyWorkflowManager.getClient("translate");
        WorkflowRunResponse response = translateWorkflowClient.runWorkflow(request);
        StringBuffer s = new StringBuffer();
        // 输出结果
        if (response.getData() != null) {
            for (Map.Entry<String, Object> entry : response.getData().getOutputs().entrySet()) {
                System.out.println(entry.getKey() + ": " + entry.getValue());
                s = s.append(entry.getValue().toString());
            }
            return s.toString();
        }
        return s.toString();
    }

}
