"""Build or refresh the vector store from local documents."""

from __future__ import annotations

from pathlib import Path
from typing import Iterable, List

try:
    from langchain_text_splitters import RecursiveCharacterTextSplitter
except ImportError:
    from langchain.text_splitter import RecursiveCharacterTextSplitter  # type: ignore
from langchain_community.document_loaders import DirectoryLoader, TextLoader
from langchain_community.vectorstores import Chroma

try:
    from langchain_openai import OpenAIEmbeddings
except ImportError:  # pragma: no cover - fallback for older LangChain installs
    from langchain.embeddings import OpenAIEmbeddings  # type: ignore

from settings import load_settings


def load_documents(source_dir: Path) -> List:
    """Load text and markdown documents from the provided directory."""
    if not source_dir.exists():
        raise FileNotFoundError(f"Source directory not found: {source_dir}")

    loader = DirectoryLoader(
        str(source_dir),
        glob="**/*.*",
        loader_cls=TextLoader,
        loader_kwargs={"encoding": "utf-8"},
        show_progress=True,
        silent_errors=True,
        use_multithreading=True,
    )
    return loader.load()


def split_documents(documents: Iterable, chunk_size: int, chunk_overlap: int) -> List:
    """Split documents into smaller chunks for embedding."""
    splitter = RecursiveCharacterTextSplitter(
        chunk_size=chunk_size,
        chunk_overlap=chunk_overlap,
        add_start_index=True,
    )
    return splitter.split_documents(list(documents))


def build_embeddings(api_key: str, api_base: str | None, model: str) -> OpenAIEmbeddings:
    """Construct the embeddings backend with compatibility fallbacks."""
    primary_kwargs = {"model": model}
    if api_key:
        primary_kwargs["api_key"] = api_key
    if api_base:
        primary_kwargs["base_url"] = api_base

    try:
        return OpenAIEmbeddings(**primary_kwargs)
    except Exception as primary_err:
        legacy_kwargs = {"model": model}
        if api_key:
            legacy_kwargs["openai_api_key"] = api_key
        if api_base:
            legacy_kwargs["openai_api_base"] = api_base
        try:
            return OpenAIEmbeddings(**legacy_kwargs)
        except Exception:
            raise primary_err


def main() -> None:
    settings = load_settings()

    api_key = settings.api_key or "sk-pdfifkpjdlxvyvgkerbluaotktpznsmpbcvskjauotenxgvz"
    api_base = settings.api_base or "https://api.siliconflow.cn/v1/"

    documents = load_documents(settings.source_dir)
    if not documents:
        print("No documents found. Add files to data/source_documents and re-run.")
        return

    chunks = split_documents(documents, settings.chunk_size, settings.chunk_overlap)
    embeddings = build_embeddings(api_key, api_base, settings.embed_model)

    Chroma.from_documents(
        documents=chunks,
        embedding=embeddings,
        persist_directory=str(settings.persist_dir),
    )

    print(f"Ingestion complete. Stored {len(chunks)} chunks in {settings.persist_dir}.")


if __name__ == "__main__":
    main()
