DeLaw_ollama / split_document.py
Sadiksmart0's picture
Upload 11 files
74d8f71 verified
raw
history blame contribute delete
349 Bytes
from langchain_experimental.text_splitter import SemanticChunker
def split_docs(docs, embedder):
# Split into chunks using the SemanticChunker with the embedder'
print("Splitting documents into chunks...")
text_splitter = SemanticChunker(embeddings=embedder)
documents = text_splitter.split_documents(docs)
return documents