package com.btl.service.ai.biz.queue;

import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.btl.service.ai.biz.constants.SegmentStatusEnum;
import com.btl.service.ai.biz.constants.TaskStatusEnum;
import com.btl.service.ai.biz.entity.KwDocument;
import com.btl.service.ai.biz.entity.KwSegment;
import com.btl.service.ai.biz.service.IKwDocumentService;
import com.btl.service.ai.biz.service.IKwSegmentService;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.store.embedding.IngestionResult;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.task.VirtualThreadTaskExecutor;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;

import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Executor;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicInteger;

@Slf4j
@Component
public class DocumentIngestQueue {

    private static final int BATCH_SIZE = 400;

    private static final Executor executor = new VirtualThreadTaskExecutor();

    private static final Semaphore semaphore = new Semaphore(10);

    private static final BlockingQueue<DocumentIngestTask> queue = new LinkedBlockingQueue<>(40);

    @Resource
    private IKwDocumentService iKwDocumentService;
    @Resource
    private IKwSegmentService iKwSegmentService;


    public DocumentIngestQueue() {
        Thread thread = new Thread(this::consumeLoop);
        thread.setDaemon(true);
        thread.start();
    }

    public Boolean enqueue(DocumentIngestTask task) {
        boolean offer = queue.offer(task);
        if (!offer) {
            log.warn("DocumentIngestQueue is full, task: {} will be dropped.", task.fileName());
        } else {
            log.info("DocumentIngestTask added to queue: {}", task.fileName());
        }
        log.info("DocumentIngestQueue size: {}", queue.size());
        return offer;
    }

    private void consumeLoop() {
        while (true) {
            try {
                DocumentIngestTask task = queue.take();
                executor.execute(() -> {
                    try {
                        semaphore.acquire();
                        try {
                            processTask(task);
                        } catch (Exception e) {
                            iKwDocumentService.updateById(KwDocument.builder()
                                    .status(TaskStatusEnum.FAILED.name())
                                    .id(task.documentId())
                                    .build());
                            log.error("处理任务失败: {}", task.fileName(), e);
                        }

                    } catch (InterruptedException e) {
                        Thread.currentThread()
                                .interrupt();
                    } finally {
                        semaphore.release();
                    }
                });
            } catch (Exception e) {
                log.error("提取队列-文档任务-异常", e);
            }
        }
    }

    private void processTask(DocumentIngestTask task) {
        KwDocument kwDocument = iKwDocumentService.getOne(new LambdaQueryWrapper<KwDocument>()
                .eq(KwDocument::getId, task.documentId()));
        if (Objects.isNull(kwDocument)) {
            log.warn("文档ID:{}不存在，忽略任务: {}", task.documentId(), task.fileName());
            return;
        }
        if (!Objects.equals(TaskStatusEnum.PENDING.name(), kwDocument.getStatus())
                &&
                !Objects.equals(TaskStatusEnum.FAILED.name(), kwDocument.getStatus())
        ) {
            log.warn("文档ID:{}已执行，忽略任务: {}", task.documentId(), task.fileName());
            return;
        }
        iKwDocumentService.updateById(KwDocument.builder()
                .status(TaskStatusEnum.RUNNING.name())
                .id(task.documentId())
                .build());
        log.info("开始处理队列中的文档: {}", task.fileName());
        int batchIndex = 1;
        while (true) {
            List<KwSegment> segments = iKwSegmentService.list(new LambdaQueryWrapper<KwSegment>()
                    .eq(KwSegment::getDocumentId, kwDocument.getId())
                    .eq(KwSegment::getStatus, SegmentStatusEnum.WAITING.name())
                    .last("LIMIT " + BATCH_SIZE));  // 分页查询
            if (segments.isEmpty()) {
                log.warn("文档ID:{},无可执行分段，忽略任务: {}", task.documentId(), task.fileName());
                break;
            }
            log.info("文档ID:{},分段任务开始: {}-批次:{}", task.documentId(), task.fileName(), batchIndex);
            List<Document> documents = segments.stream()
                    .map(kwSegment ->
                            {
                                Metadata metadata = new Metadata(
                                        Map.of("vectorId", kwSegment.getVectorId(), "segmentId", kwSegment.getId()
                                                        .toString(),
                                                "index", kwSegment.getSeqId(),
                                                "documentId", kwSegment.getDocumentId()
                                                        .toString()));
                                String content = kwSegment.getSegment();
                                if (StringUtils.hasText(kwSegment.getTitle())) {
                                    content = kwSegment.getTitle() + "\n\n" +
                                            kwSegment.getSegment();
                                }
                                return Document.from(content, metadata);
                            }

                    )
                    .toList();
            // 向量化
            IngestionResult result = task.ingestor()
                    .embed(task.collectionName(), documents);
            // 更新segment 状态
            List<Long> segmentIds = segments.stream()
                    .map(KwSegment::getId)
                    .toList();
            iKwSegmentService.update(KwSegment.builder()
                    .status(SegmentStatusEnum.VECTORED.name())
                    .updateTime(LocalDateTime.now())
                    .build(), new LambdaQueryWrapper<KwSegment>()
                    .eq(KwSegment::getDocumentId, task.documentId())
                    .in(KwSegment::getId, segmentIds));
            log.info("文档ID:{},分段任务结束: {}-批次:{},token: {}", task.documentId(), task.fileName(), batchIndex, result.tokenUsage()
                    .totalTokenCount());
            batchIndex++;
        }
        iKwDocumentService.updateById(KwDocument.builder()
                .status(TaskStatusEnum.COMPLETED.name())
                .id(task.documentId())
                .build());
    }
}
