from typing import Any, List, Optional
from langchain_core.embeddings import Embeddings
from pydantic import BaseModel
from patagent.constant import EMBEDDING_MODEL, LOCAL_DEVICE


class LocalEmbeddings(BaseModel, Embeddings):
    """Local Embedding models.

    Setup:
        To use, you should deploy on server with GPU resource.

    Instantiate:
        .. code-block:: python

            from embedding import LocalEmbeddings

            embeddings = LocalEmbeddings()

    Embed:
        .. code-block:: python

            # embed the documents
            vectors = embeddings.embed_documents([text1, text2, ...])

            # embed the query
            vectors = embeddings.embed_query(text)
    """

    tokenizer: Any = None
    model: Any = None

    def __init__(self, **kwargs: Any):
        """Initialize the sentence_transformer."""
        super().__init__(**kwargs)

        try:
            from modelscope import AutoTokenizer, AutoModel
        except ImportError as exc:
            raise ImportError(
                "Could not import modelscope python package. "
                "Please install it with `pip install modelscope`."
            ) from exc

        self.tokenizer = AutoTokenizer.from_pretrained(EMBEDDING_MODEL)
        self.model = AutoModel.from_pretrained(EMBEDDING_MODEL)
        self.model.to(LOCAL_DEVICE)

    def _embed(self, texts: List[str]) -> Optional[List[List[float]]]:
        """Internal method to call Patsnap Embedding API and return embeddings.

        Args:
            texts: A list of texts to embed.

        Returns:
            A list of list of floats representing the embeddings, or None if an
            error occurs.
        """
        inputs = self.tokenizer(texts, padding=True, truncation=True, max_length=512, return_tensors="pt")
        inputs_on_device = {k: v.to(LOCAL_DEVICE) for k, v in inputs.items()}
        
        outputs = self.model(**inputs_on_device, return_dict=True)
        embeddings = outputs.last_hidden_state[:, 0]
        embeddings = embeddings / embeddings.norm(dim=1, keepdim=True)
        return embeddings.tolist()

    def embed_documents(self, texts: List[str]) -> Optional[List[List[float]]]:  # type: ignore[override]
        """Public method to get embeddings for a list of documents.

        Args:
            texts: The list of texts to embed.

        Returns:
            A list of embeddings, one for each text, or None if an error occurs.
        """
        return self._embed(texts)

    def embed_query(self, text: str) -> Optional[List[float]]:  # type: ignore[override]
        """Public method to get embedding for a single query text.

        Args:
            text: The text to embed.

        Returns:
            Embeddings for the text, or None if an error occurs.
        """
        result = self._embed([text])
        return result[0] if result is not None else None
