package com.cy.ragbase.service;

import com.cy.ragbase.dto.DocumentProcessingTask;
import com.cy.ragbase.entity.Document;
import com.cy.ragbase.entity.DocumentChunk;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.document.parser.apache.tika.ApacheTikaDocumentParser;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.model.embedding.EmbeddingModel;
import jakarta.annotation.Resource;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.IntStream;

@Service
@RequiredArgsConstructor
@Slf4j
public class DocumentService {

    @Resource
    private EmbeddingModel embeddingModel;

    private final MilvusService milvusService;
    private final RedisQueueService redisQueueService;
    private final RedisTemplate<String, String> redisTemplate;
    private final ApacheTikaDocumentParser documentParser = new ApacheTikaDocumentParser();

    @Value("${app.document.upload-path}")
    private String uploadPath;

    @Value("${app.document.chunk-size}")
    private int chunkSize;

    @Value("${app.document.chunk-overlap}")
    private int chunkOverlap;

    /**
     * 上传文档并加入处理队列
     */
    public Document uploadDocument(MultipartFile file) throws IOException {
        // 验证文件
        validateFile(file);

        // 创建上传目录
        Path uploadDir = Paths.get(uploadPath);
        if (!Files.exists(uploadDir)) {
            Files.createDirectories(uploadDir);
        }

        // 生成文档ID和文件路径
        String documentId = UUID.randomUUID().toString();
        String fileName = file.getOriginalFilename();
        String filePath = uploadPath + "/" + documentId + "_" + fileName;

        // 保存文件
        Path destinationPath = Paths.get(filePath);
        Files.copy(file.getInputStream(), destinationPath);

        // 创建文档记录
        Document document = Document.builder()
                .id(documentId)
                .fileName(fileName)
                .filePath(filePath)
                .contentType(file.getContentType())
                .fileSize(file.getSize())
                .status("PENDING")
                .uploadTime(LocalDateTime.now())
                .build();

        // 保存文档信息到Redis
        saveDocumentToRedis(document);

        // 加入处理队列
        String taskId = redisQueueService.enqueueDocumentProcessing(documentId, filePath, fileName);
        log.info("上传文档并创建处理任务: {} (文件: {})", taskId, fileName);
        return document;
    }

    /**
     * 处理文档（由队列消费者调用）
     */
    public void processDocument(DocumentProcessingTask task) {
        Document document = getDocumentFromRedis(task.getDocumentId());
        if (document == null) {
            throw new RuntimeException("Document not found: " + task.getDocumentId());
        }

        try {
            // 更新状态为处理中
            document.setStatus("PROCESSING");
            document.setProcessTime(LocalDateTime.now());
            saveDocumentToRedis(document);

            // 解析文档
            dev.langchain4j.data.document.Document langchainDoc = parseDocument(task.getFilePath());

            // 分割成块
            List<TextSegment> segments = splitDocument(langchainDoc);

            // 生成向量并保存
            List<DocumentChunk> chunks = generateChunks(task, segments);
            milvusService.insertDocumentChunks(chunks);

            // 更新状态为完成
            document.setStatus("COMPLETED");
            saveDocumentToRedis(document);

            log.info("文档处理成功: {}",  task.getDocumentId());

        } catch (Exception e) {
            // 更新状态为失败
            document.setStatus("FAILED");
            document.setErrorMessage(e.getMessage());
            saveDocumentToRedis(document);

            log.error("文档处理失败: {} (文件: {})", task.getDocumentId(), task.getFileName(), e);
            throw new RuntimeException("Document processing failed", e);
        }
    }

    /**
     * 获取文档信息
     */
    public Document getDocument(String documentId) {
        return getDocumentFromRedis(documentId);
    }

    /**
     * 删除文档
     */
    public void deleteDocument(String documentId) {
        Document document = getDocumentFromRedis(documentId);
        if (document == null) {
            throw new RuntimeException("Document not found: " + documentId);
        }

        try {
            // 从Milvus删除向量
            milvusService.deleteDocumentChunks(documentId);

            // 删除文件
            Path filePath = Paths.get(document.getFilePath());
            if (Files.exists(filePath)) {
                Files.delete(filePath);
            }

            // 从Redis删除记录
            String key = "document:" + documentId;
            redisTemplate.delete(key);

            log.info("删除文档: {}", documentId);

        } catch (Exception e) {
            log.error("删除文档失败: {}", documentId, e);
            throw new RuntimeException("Document deletion failed", e);
        }
    }

    /**
     * 验证上传文件
     */
    private void validateFile(MultipartFile file) {
        if (file.isEmpty()) {
            throw new RuntimeException("File is empty");
        }

        String fileName = file.getOriginalFilename();
        if (fileName == null || fileName.trim().isEmpty()) {
            throw new RuntimeException("File name is empty");
        }



        // 检查文件大小
        if (file.getSize() > 50 * 1024 * 1024) { // 50MB
            throw new RuntimeException("File size exceeds limit");
        }
    }

    /**
     * 解析文档
     */
    private dev.langchain4j.data.document.Document parseDocument(String filePath) {

        try (InputStream inputStream = new FileInputStream(filePath)) {
            return documentParser.parse(inputStream);

        } catch (FileNotFoundException e) {
            throw new RuntimeException(e);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }

    }

    /**
     * 分割文档
     */
    private List<TextSegment> splitDocument(dev.langchain4j.data.document.Document document) {
        var splitter = DocumentSplitters.recursive(chunkSize, chunkOverlap);
        return splitter.split(document);
    }

    /**
     * 生成文档块和向量
     */
    private List<DocumentChunk> generateChunks(DocumentProcessingTask task, List<TextSegment> segments) {
        List<DocumentChunk> chunks = new ArrayList<>();

        String documentId = task.getDocumentId();

        for (int i = 0; i < segments.size(); i++) {
            TextSegment segment = segments.get(i);
            String content = segment.text();

            // 生成向量
            Embedding embedding = embeddingModel.embed(content).content();
            float[] vector = embedding.vector();

            // 创建块
            DocumentChunk chunk = DocumentChunk.builder()
                    .id(documentId + "_chunk_" + i)
                    .documentId(documentId)
                    .chunkIndex(i)
                    .content(content)
                    .vector(vector)
                    .build();

            chunks.add(chunk);
        }

        log.info("为文档 {} 生成了 {} 个块 (文件: {})", documentId, chunks.size(), task.getFileName());

        return chunks;
    }

    /**
     * 从文件路径提取文件名
     */
    private String getFileNameFromPath(String filePath) {
        if (filePath == null) return "未知文件";

        // 提取原始文件名（去掉UUID前缀）
        String fileName = Paths.get(filePath).getFileName().toString();
        int underscoreIndex = fileName.indexOf('_');
        if (underscoreIndex > 0 && underscoreIndex < fileName.length() - 1) {
            return fileName.substring(underscoreIndex + 1);
        }
        return fileName;
    }

    /**
     * 保存文档信息到Redis
     */
    private void saveDocumentToRedis(Document document) {
        try {
            String key = "document:" + document.getId();
            redisTemplate.opsForHash().put(key, "id", document.getId());
            redisTemplate.opsForHash().put(key, "fileName", document.getFileName());
            redisTemplate.opsForHash().put(key, "filePath", document.getFilePath());
            redisTemplate.opsForHash().put(key, "contentType", document.getContentType());
            redisTemplate.opsForHash().put(key, "fileSize", String.valueOf(document.getFileSize()));
            redisTemplate.opsForHash().put(key, "status", document.getStatus());
            redisTemplate.opsForHash().put(key, "uploadTime", document.getUploadTime().toString());

            if (document.getProcessTime() != null) {
                redisTemplate.opsForHash().put(key, "processTime", document.getProcessTime().toString());
            }
            if (document.getErrorMessage() != null) {
                redisTemplate.opsForHash().put(key, "errorMessage", document.getErrorMessage());
            }

            redisTemplate.expire(key, 30, TimeUnit.DAYS);

        } catch (Exception e) {
            log.error("保存文档到Redis失败: {}", document.getId(), e);
            throw new RuntimeException("Failed to save document", e);
        }
    }

    /**
     * 从Redis获取文档信息
     */
    private Document getDocumentFromRedis(String documentId) {
        try {
            String key = "document:" + documentId;
            Map<Object, Object> hash = redisTemplate.opsForHash().entries(key);

            if (hash.isEmpty()) {
                return null;
            }

            Document.DocumentBuilder builder = Document.builder()
                    .id((String) hash.get("id"))
                    .fileName((String) hash.get("fileName"))
                    .filePath((String) hash.get("filePath"))
                    .contentType((String) hash.get("contentType"))
                    .fileSize(Long.parseLong((String) hash.get("fileSize")))
                    .status((String) hash.get("status"))
                    .uploadTime(LocalDateTime.parse((String) hash.get("uploadTime")));

            if (hash.containsKey("processTime")) {
                builder.processTime(LocalDateTime.parse((String) hash.get("processTime")));
            }
            if (hash.containsKey("errorMessage")) {
                builder.errorMessage((String) hash.get("errorMessage"));
            }

            return builder.build();

        } catch (Exception e) {
            log.error("从Redis获取文档失败: {}", documentId, e);
            return null;
        }
    }


}