import chromadb,time,hashlib
from support.ai.localai import Localai
from support.ai.ollama import Ollama
from chromadb import Documents, EmbeddingFunction, Embeddings

class MyEmbeddingFunction(EmbeddingFunction):
    WAIT_TIMEOUT = 5
    def __init__(self, model):
        #self.models = Localai()
        self.models = Ollama()
        self.embeddingsModel = model

    def __call__(self, input: Documents) -> Embeddings:
        embeddings = None
        while(True):
            embeddings = self.models.embeddings(input, model=self.embeddingsModel)
            if(embeddings is None):
                time.sleep(MyEmbeddingFunction.WAIT_TIMEOUT)
            else:
                break
        return embeddings


class VectorDB():
    CHROMA_SERVER_URL="chroma"
    CHROMA_SERVER_PORT=8000

    def __init__(self, name, *, embeddings=None):
        self.client = chromadb.HttpClient(host=VectorDB.CHROMA_SERVER_URL, port=VectorDB.CHROMA_SERVER_PORT)
        self.embeddingFunction = MyEmbeddingFunction(embeddings)
        self.collection = self.client.get_or_create_collection(name=name, embedding_function=self.embeddingFunction)

    def clean(self, name):
        self.client.delete_collection(name=name)

    def __isExist__(self, _id):
        ret = self.collection.get(ids=[_id])
        return len(ret.get('ids')) > 0 

    def addData(self, metadatas, _documents):
        embeddings = None
        _id = hashlib.md5(_documents.encode()).hexdigest()
        if(not self.__isExist__(_id)):
            self.collection.add(
                ids=[_id],
                metadatas=[metadatas],
                documents=[_documents]
            )

    def query(self, _text, *, num=10):
        return self.collection.query(
            query_texts=[_text],
            n_results=num
        )
