from langchain_core.documents import Document
from langchain_text_splitters import RecursiveCharacterTextSplitter


class DocsProcessor:
    @classmethod
    def split_docs(cls,
                   docs_list: list[Document],
                   chunk_size: int = 1000,
                   chunk_overlap: int = 0) -> list[Document]:
        """
        分词处理
        :param docs_list:
        :param chunk_size:
        :param chunk_overlap:
        :return:
        """

        try:
            if chunk_size <= 0 or chunk_overlap < 0:
                raise Exception("分词参数不可小于0")

            if chunk_overlap > chunk_size:
                raise Exception("重叠窗口大小不可大于分词长度")

            if len(docs_list) <= 0:
                raise Exception("文档列表为空")

            # 创建分词器
            splitter = RecursiveCharacterTextSplitter(chunk_size=chunk_size, chunk_overlap=chunk_overlap)
            if splitter is None:
                raise Exception("分词器创建失败")

            # 拆分文档
            return splitter.split_documents(docs_list)
        except Exception as e:
            raise Exception(f"分词处理失败：{e}")