from typing import Optional, Any, List

from langchain_ollama import OllamaEmbeddings
from llama_index.core.bridge.pydantic import Field, PrivateAttr

from llama_index.core.base.embeddings.base import BaseEmbedding

# DEFAULT_OPENAI_API_BASE = "https://api.openai.com/v1"
# DEFAULT_OPENAI_MODEL = "text-embedding-ada-002"
DEFAULT_OPENAI_API_BASE = "http://localhost:11434"
#qllama/bge-small-en-v1.5:latest
DEFAULT_OPENAI_MODEL = "bge-m3:latest"

from langchain_openai import OpenAIEmbeddings


async def aget_embedding(
        aclient: OllamaEmbeddings, text: str
) -> List[float]:
    """Asynchronously get embedding.

    NOTE: Copied from OpenAI's embedding utils:
    https://github.com/openai/openai-python/blob/main/openai/embeddings_utils.py

    Copied here to avoid importing unnecessary dependencies
    like matplotlib, plotly, scipy, sklearn.

    """
    text = text.replace("\n", " ")

    return (await aclient.aembed_query(text=text))

def get_embedding(aclient: OllamaEmbeddings, text: str) -> List[float]:
    """Get embedding.

    NOTE: Copied from OpenAI's embedding utils:
    https://github.com/openai/openai-python/blob/main/openai/embeddings_utils.py

    Copied here to avoid importing unnecessary dependencies
    like matplotlib, plotly, scipy, sklearn.

    """
    text = text.replace("\n", " ")

    return aclient.embed_query(text=text)


class SksOllamaEmbedding(BaseEmbedding):
    api_key: str = Field(default='ollama',description="The OpenAI API key.")
    api_base: Optional[str] = Field(
        default=DEFAULT_OPENAI_API_BASE, description="The base URL for OpenAI API."
    )
    model: Optional[str] = Field(
        default=DEFAULT_OPENAI_MODEL, description="The base URL for OpenAI API."
    )
    _client: Optional[OllamaEmbeddings] = PrivateAttr()

    def __init__(
            self,
            api_base: Optional[str] = DEFAULT_OPENAI_API_BASE,
            api_key: Optional[str] = 'ollama',
            model: Optional[str] = DEFAULT_OPENAI_MODEL,
            **kwargs,
    ):
        """"""
        # OpenAIEmbeddings(openai_api_base=api_base,
        #                  openai_api_key=api_key,
        #                  model=model,
        #                  )
        super().__init__(
            api_key=api_key,
            api_base=api_base,
            model=model,
            **kwargs
        )
        self._client = OllamaEmbeddings(base_url=api_base,model=model)

    async def _aget_query_embedding(self, query: str) -> List[float]:
        """"""
        return await  aget_embedding(
            aclient=self._client,
            text=query,

        )

    def _get_query_embedding(self, query: str) -> List[float]:
        """Get query embedding."""
        return  get_embedding(
            aclient=self._client,
            text=query,
        )

    def _get_text_embedding(self, text: str) -> List[float]:
        """Get text embedding."""
        return  get_embedding(
            aclient=self._client,
            text=text,
        )

