package org.gwh.airagknowledge.core.document;

import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentParser;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.gwh.airagknowledge.core.embedding.EmbeddingGenerator;
import org.gwh.airagknowledge.entity.DocumentChunk;
import org.gwh.airagknowledge.repository.DocumentChunkRepository;
import org.gwh.airagknowledge.repository.DocumentRepository;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;


import java.io.File;
import java.io.FileInputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;

@Slf4j
@Component
@RequiredArgsConstructor
public class DocumentProcessor {

    private final DocumentRepository documentRepository;
    private final DocumentChunkRepository documentChunkRepository;
    private final EmbeddingGenerator embeddingGenerator;
    private final DocumentParser documentParser =new TextDocumentParser();
    private final DocumentSplitter splitter = DocumentSplitters.recursive(1000, 200);

    @Async
    @Transactional
    public void processDocumentAsync(org.gwh.airagknowledge.entity.Document dbDocument) {
        try {
            log.info("Processing document: {}", dbDocument.getFileName());
            
            // 解析文档
            Path path = Paths.get(dbDocument.getFilePath());
            File file = path.toFile();
            Document doc = documentParser.parse(new FileInputStream(file));
            
            // 分块文档
            List<TextSegment> chunks = splitter.split(doc);
            log.info("Document split into {} chunks", chunks.size());
            
            // 保存块并生成向量
            AtomicInteger index = new AtomicInteger(0);
            chunks.forEach(chunk -> {
                // 创建并保存文档块
                DocumentChunk docChunk = DocumentChunk.builder()
                        .document(dbDocument)
                        .content(chunk.text())
                        .chunkIndex(index.getAndIncrement())
                        .build();
                
                documentChunkRepository.save(docChunk);
                
                // 生成并保存向量嵌入
                embeddingGenerator.generateEmbedding(docChunk);
            });
            
            // 更新文档状态为完成
            dbDocument.setStatus("COMPLETED");
            documentRepository.save(dbDocument);
            
            log.info("Document processing completed: {}", dbDocument.getFileName());
        } catch (Exception e) {
            log.error("Error processing document: {}", dbDocument.getFileName(), e);
            dbDocument.setStatus("ERROR");
            documentRepository.save(dbDocument);
        }
    }
} 