import os
from pocketflow import AsyncNode
import uuid
from scorpio.services.database import DocumentProcessor
from scorpio.services.ingestion import IngestionService
from scorpio.core.common import get_logger
logger = get_logger(__name__)

# RAG ingest flow
class IngestAction(AsyncNode):
    async def prep_async(self, shared):
        """Get the search query from the shared store.
        shared = {
            "context": {
                "embedding_config": self.embedding_config,
                "ingestion": self.ingestion_service
            },
            "document": {
                "file_path": file_path,
                "title": title or os.path.basename(file_path),
            },
            "embedding_config": self.embedding_config
        }
        """
        context = shared.get("context")
        document = shared.get("document")
        logger.info(f"document info: {document}.")
        return context, document
        
    async def exec_async(self, inputs):
        """流式处理文档分块，返回chunk生成器"""
        context, document = inputs
        ingestion: IngestionService = context.get("ingestion")
        processor: DocumentProcessor = context.get("processor")
        file_path = document.get("file_path")
        title = document.get("title", os.path.basename(file_path))
        content_type = document.get("content_type", os.path.splitext(file_path)[1].lower())
        file_size = document.get("file_size", os.path.getsize(file_path))
        logger.info(f"开始流式处理文档: {title} ({file_path})")
        try:
            # 返回chunk生成器，供后续EmbeddingAction流式处理
            chunks = ingestion.chunk(file_path)
            logger.info(f"文档分块完成，共 {ingestion.chunk_count} 个块")
            document_data = {
                "file_path": file_path,
                "title": title or os.path.basename(file_path),
                "file_size": file_size,
                "content_type": content_type,
                "external_id":  str(uuid.uuid4()),
                "metadata": {
                    "chunk_size": ingestion.chunk_config.chunk_size,
                    "chunk_overlap": ingestion.chunk_config.chunk_overlap,
                    "strategy": ingestion.chunk_config.strategy,
                }
            }
            
            document["id"]=await processor.insert(document_data)
            document["external_id"] = document_data["external_id"]
            document["total_chunks"] = ingestion.chunk_count
            document["chunk"] =chunks
            # 返回生成器而不是等待所有chunk完成
            return "prepared" 
        except ValueError as e:
            logger.error(f"文档处理错误: {e}")
            return "failed"
        except Exception as e:
            raise RuntimeError(f"文档处理异常: {e}")
    
    async def post_async(self, shared, prep_res, exec_res):
        """Save the search results and go back to the decision node."""
        if exec_res != "prepared":
            logger.error(f"failed to prepare agent resources in Pocketflow FlowPreparation") 
        logger.info(f"IngestAction post exec res: {exec_res}")
        return exec_res

# RAG upsert action
class UpsertAction(AsyncNode):
    async def prep_async(self, shared):
        """Get the search query from the shared store."""
        context = shared.get("context")
        document = shared.get("document")
        return context, document    
        
    async def exec_async(self, inputs):
        """Search the web for the given query."""
        # Call the search utility function
        context, document = inputs
        processor: DocumentProcessor = context.get("processor")
        logger.info(f"upsert chunks for document with id={document["id"]}.")
        chunk_embeddings = document.get("chunk_embeddings")
        try:
            await processor.insert_chunks(document["id"], chunk_embeddings)
            # to-do: update documents status
            # await processor.update(document["id"], {"status": "processed"})
        except Exception as e:
            logger.error(f"Failed to insert document: {e}")
            raise RuntimeError(f"Failed to insert document: {e}")
        # to-do: insert into vector db
        return "success"
    
    async def post_async(self, shared, prep_res, exec_res):
        """Post upsert chunks results."""
        if exec_res != "success":
            logger.error(f"failed to upsert document and chunks into vector db") 
        logger.info(f"UpsertAction node -- post exec res: {exec_res}")
        return exec_res

class Completed(AsyncNode):
    async def post_async(self, shared, prep_res, exec_res):
        return None
