package com.knowledge.business.controller;


import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.io.IOException;
import java.util.concurrent.CompletableFuture;

import cn.hutool.core.collection.CollectionUtil;
import com.knowledge.business.domain.TCp;
import com.knowledge.business.service.ITCpService;
import com.knowledge.business.service.OpenAiService;
import com.knowledge.business.service.impl.DocumentVectorService;
import com.knowledge.common.utils.SecurityUtils;
import io.milvus.v2.client.ConnectConfig;
import io.milvus.v2.client.MilvusClientV2;
import io.milvus.v2.service.database.request.CreateDatabaseReq;
import lombok.extern.log4j.Log4j2;

import com.knowledge.business.service.IKnowledgeDocumentSegmentService;
import com.knowledge.common.annotation.Log;
import com.knowledge.common.core.controller.BaseController;
import com.knowledge.common.core.domain.AjaxResult;
import com.knowledge.common.enums.BusinessType;
import com.knowledge.common.utils.StringUtils;
import com.knowledge.common.utils.poi.ExcelUtil;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.knowledge.business.domain.KnowledgeDocument;
import com.knowledge.business.service.IKnowledgeDocumentService;
import com.knowledge.common.core.page.TableDataInfo;

/**
 * 知识库文档Controller
 *
 * @author zhaoyk
 * @date 2025-09-13
 */
@Log4j2
@RestController
@RequestMapping("/business/document")
public class KnowledgeDocumentController extends BaseController {
    @Autowired
    private IKnowledgeDocumentService knowledgeDocumentService;

    @Autowired
    private IKnowledgeDocumentSegmentService knowledgeDocumentSegmentService;

    @Autowired
    private DocumentVectorService documentVectorService;


    /**
     * 查询知识库文档列表
     */
//@RequiresPermissions("business:document:list")
    @GetMapping("/list")
    public TableDataInfo list(KnowledgeDocument knowledgeDocument) {
        startPage();
        knowledgeDocument.setIsFolder("0");
        List<KnowledgeDocument> list = knowledgeDocumentService.selectKnowledgeDocumentList(knowledgeDocument);
        return getDataTable(list);
    }


    /**
     * 导出知识库文档列表
     */
    //@RequiresPermissions("business:document:export")
    @Log(title = "知识库文档", businessType = BusinessType.EXPORT)
    @PostMapping("/export")
    public void export(HttpServletResponse response, KnowledgeDocument knowledgeDocument) {
        List<KnowledgeDocument> list = knowledgeDocumentService.selectKnowledgeDocumentList(knowledgeDocument);
        ExcelUtil<KnowledgeDocument> util = new ExcelUtil<KnowledgeDocument>(KnowledgeDocument.class);
        util.exportExcel(response, list, "知识库文档数据");
    }

    /**
     * 获取知识库文档详细信息
     */
    //@RequiresPermissions("business:document:query")
    @GetMapping(value = "view/{id}")
    public AjaxResult getInfo(@PathVariable("id") String id) {
        return AjaxResult.success(knowledgeDocumentService.getById(id));
    }

    @Autowired
    private ITCpService tCpService;

    /**
     * 新增知识库文档
     */
    //@RequiresPermissions("business:document:add")
    @Log(title = "知识库文档", businessType = BusinessType.INSERT)
    @PostMapping("/add")
    public AjaxResult add(@RequestBody KnowledgeDocument knowledgeDocument) {
        try {
            // 获取文件路径，解析文件
            knowledgeDocument.setUserId(SecurityUtils.getUserIdStr());
            knowledgeDocumentService.insertKnowledgeDocument(knowledgeDocument);
            // 异步执行分段和向量化处理
            CompletableFuture.runAsync(() -> {
                try {
                    // 等待文档解析完成，最多等待30秒
                    int waitCount = 0;
                    KnowledgeDocument currentDocument;
                    do {
                        Thread.sleep(1000); // 等待1秒
                        currentDocument = knowledgeDocumentService.getById(knowledgeDocument.getId());
                        if (StringUtils.isEmpty(currentDocument.getParseStatus())) {
                            currentDocument.setParseStatus("Processing");
                        }
                        waitCount++;
                        log.info("等待文档 {} ，当前等待次数: {}", knowledgeDocument.getId(), waitCount);

                    } while ("Processing".equals(currentDocument.getParseStatus()) && waitCount < 500);
                    String documentContent = currentDocument.getDocumentContent();
                    if ("Success".equals(currentDocument.getParseStatus()) && StringUtils.isNotEmpty(documentContent)) {
                        log.info("文档 {} 解析完成，开始进行分段处理", knowledgeDocument.getId());
                        try {
                            // 分段处理
                            knowledgeDocumentSegmentService.segmentDocumentContent(
                                    knowledgeDocument.getId(),
                                    knowledgeDocument.getDocumentType(),
                                    currentDocument.getDocumentContent()
                            );
                            // 向量化处理
                            log.info("开始对文档 {} 进行向量化处理", knowledgeDocument.getId());
                            try {
                                documentVectorService.vectorizeDocumentSegments(knowledgeDocument.getId());
                                log.info("文档 {} 向量化处理完成", knowledgeDocument.getId());
                            } catch (Exception vectorException) {
                                log.error("文档 {} 向量化处理失败: {}", knowledgeDocument.getId(), vectorException.getMessage(), vectorException);
                            }
                        } catch (Exception segmentException) {
                            log.error("文档 {} 分段处理失败: {}", knowledgeDocument.getId(), segmentException.getMessage(), segmentException);
                        }
                    } else {
                        log.error("文档 {} 解析失败或超时，状态: {}，内容是否为空: {}",
                                knowledgeDocument.getId(),
                                currentDocument.getParseStatus(),
                                StringUtils.isEmpty(documentContent));

                        // 构建更详细的失败原因
                        StringBuilder errorMsg = new StringBuilder("文档解析失败: ");
                        if ("Fail".equals(currentDocument.getParseStatus())) {
                            errorMsg.append("解析过程出错：" + currentDocument.getParseMsg());
                        } else if (waitCount >= 500) {
                            errorMsg.append("解析超时");
                        } else if (StringUtils.isEmpty(documentContent)) {
                            errorMsg.append("解析内容为空");
                        } else {
                            errorMsg.append("未知错误");
                        }
                        currentDocument.setParseStatus("Failed");
                        // 添加详细的失败原因
                        currentDocument.setParseMsg(errorMsg.toString());
                        knowledgeDocumentService.updateById(currentDocument);
                    }

                } catch (Exception e) {
                    log.error("文档 {} 分段或向量化处理失败: {}", knowledgeDocument.getId(), e.getMessage(), e);
                }
            });

            return AjaxResult.success("文档添加成功，正在后台进行解析、分段和向量化处理");
        } catch (Exception e) {
            log.error("添加文档失败: {}", e.getMessage(), e);
            return AjaxResult.error("添加文档失败: " + e.getMessage());
        }
    }


    @Autowired
    private OpenAiService openAiService;


    /**
     * 获取知识库文档的AI回复补充意见
     */
    @GetMapping("/nologin/getAiReply")
    public AjaxResult getAiReply(String content) {
        String chatReply = openAiService.getBcChatReply(content);
        return AjaxResult.success(chatReply);
    }


    /**
     * 新增知识库文档分段(异步执行向量化处理)
     */
    @Log(title = "知识库文档", businessType = BusinessType.INSERT)
    @PostMapping("/addToSegment")
    public AjaxResult addToSegment(@RequestBody KnowledgeDocument knowledgeDocument) throws IOException {
        KnowledgeDocument byId = knowledgeDocumentService.getById(knowledgeDocument.getId());
        String documentContent = byId.getDocumentContent();
        if (StringUtils.isEmpty(documentContent)) {
            return error("文档内容不能为空");
        }
        // 创建文档分段
//        knowledgeDocumentSegmentService.segmentDocumentContent(knowledgeDocument.getId(), documentContent, byId.getFileType());
//        // 异步执行向量化处理
//        CompletableFuture.runAsync(() -> {
//            try {
//                log.info("开始对文档 {} 进行向量化处理", knowledgeDocument.getId());
//                documentVectorService.vectorizeDocumentSegments(knowledgeDocument.getId());
//                log.info("文档 {} 向量化处理完成", knowledgeDocument.getId());
//            } catch (Exception e) {
//                log.error("文档 {} 向量化处理失败: {}", knowledgeDocument.getId(), e.getMessage(), e);
//            }
//        });
//        // 提取产品信息
//        String chatReply = openAiService.getChatReply(knowledgeDocument.getDocumentContent());
//        System.out.println("产品信息：" + chatReply);
        return AjaxResult.success("分段创建成功，正在执行向量化处理");
    }

    /**
     * 修改知识库文档
     */
    //@RequiresPermissions("business:document:edit")
    @Log(title = "知识库文档", businessType = BusinessType.UPDATE)
    @PutMapping("/update")
    public AjaxResult edit(@RequestBody KnowledgeDocument knowledgeDocument) throws Exception {
        knowledgeDocumentService.updateKnowledgeDocument(knowledgeDocument);
        // 创建文档分段
//        knowledgeDocumentSegmentService.segmentDocumentContent(knowledgeDocument.getId(), knowledgeDocument.getDocumentContent(), knowledgeDocument.getFileType());
//        // 异步执行向量化处理
//        CompletableFuture.runAsync(() -> {
//            try {
//                log.info("开始对文档 {} 进行向量化处理", knowledgeDocument.getId());
//                documentVectorService.vectorizeDocumentSegments(knowledgeDocument.getId());
//                log.info("文档 {} 向量化处理完成", knowledgeDocument.getId());
//            } catch (Exception e) {
//                log.error("文档 {} 向量化处理失败: {}", knowledgeDocument.getId(), e.getMessage(), e);
//            }
//        });

        return AjaxResult.success();
    }

    /**
     * 删除知识库文档
     */
    //@RequiresPermissions("business:document:remove")
    @Log(title = "知识库文档", businessType = BusinessType.DELETE)
    @DeleteMapping("/delete/{ids}")
    public AjaxResult remove(@PathVariable String[] ids) {
        knowledgeDocumentService.deleteSj(ids);
        return AjaxResult.success();
    }
}
