import uuid
import os.path
from datetime import datetime
from typing import List, Dict
import chromadb
from chromadb.utils import embedding_functions
from abc import ABC, abstractmethod
from openai import OpenAI
import numpy as np
from chromadb.api.types import QueryResult
chromadb.logger.setLevel(chromadb.logging.ERROR)
from research_agent.constant import API_BASE_URL

class Memory:
    def __init__(
            self,
            project_path: str,
            db_name: str = '.sa',
            platform: str = 'OpenAI', 
            api_key: str = None, 
            embedding_model: str = "text-embedding-3-small"
    ):
        """
        Memory: memory and external knowledge management.
        Args:
            project_path: the path to store the data.
            embedding_model: the embedding model to use, default will use the embedding model from ChromaDB,
             if the OpenAI has been set in the configuration, it will use the OpenAI embedding model
             "text-embedding-ada-002".
        """
        self.db_name = db_name
        self.collection_name = 'memory'
        self.client = chromadb.PersistentClient(path=os.path.join(project_path, self.db_name))
        self.client.get_or_create_collection(
                self.collection_name,
            ) 
        # use the OpenAI embedding function if the openai section is set in the configuration.
        if platform == 'OpenAI':
            openai_client = OpenAI(api_key=api_key or os.environ["OPENAI_API_KEY"], base_url=API_BASE_URL)
            self.embedder = lambda x: [i.embedding for i in openai_client.embeddings.create(input=x, model=embedding_model).data]
        else:
            # self.embedder = embedding_functions.DefaultEmbeddingFunction()
            self.embedder = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="all-MiniLM-L6-v2")

    def add_query(
            self,
            queries: List[Dict[str, str]],
            collection: str = None,
            idx: List[str] = None
    ):
        """
        add_query: add the queries to the memery.
        Args:
            queries: the queries to add to the memery. Should be in the format of
                {
                    "query": "the query",
                    "response": "the response"
                }
            collection: the name of the collection to add the queries.
            idx: the ids of the queries, should be in the same length as the queries.
            If not provided, the ids will be generated by UUID.

        Return: A list of generated IDs.
        """
        if idx:
            ids = idx
        else:
            ids = [str(uuid.uuid4()) for _ in range(len(queries))]

        if not collection:
            collection = self.collection_name

        query_list = [query['query'] for query in queries]
        embeddings = self.embedder(query_list)
        added_time = datetime.now().isoformat()
        resp_list = [{'response': query['response'], 'created_at': added_time} for query in queries]
        # insert the record into the database
        self.client.get_or_create_collection(collection).add(
            documents=query_list,
            metadatas=resp_list,
            ids=ids, 
            embeddings=embeddings
        )

        return ids

    def query(self, query_texts: List[str], collection: str = None, n_results: int = 5) -> QueryResult:
        """
        query: query the memery.
        Args:
            query_texts: the query texts to search in the memery.
            collection: the name of the collection to search.
            n_results: the number of results to return.

        Returns: QueryResult
        class QueryResult(TypedDict):
            ids: List[IDs]
            embeddings: Optional[
                Union[
                    List[Embeddings],
                    List[PyEmbeddings],
                    List[NDArray[Union[np.int32, np.float32]]],
                ]
            ]
            documents: Optional[List[List[Document]]]
            uris: Optional[List[List[URI]]]
            data: Optional[List[Loadable]]
            metadatas: Optional[List[List[Metadata]]]
            distances: Optional[List[List[float]]]
            included: Include
        """
        if not collection:
            collection = self.collection_name
        query_embedding = self.embedder(query_texts)
        return self.client.get_or_create_collection(collection).query(query_embeddings=query_embedding, n_results=n_results)

    def peek(self, collection: str = None, n_results: int = 20):
        """
        peek: peek the memery.
        Args:
            collection: the name of the collection to peek.
            n_results: the number of results to return.

        Returns: the top k results.
        """
        if not collection:
            collection = self.collection_name
        return self.client.get_or_create_collection(collection).peek(limit=n_results)

    def get(self, collection: str = None, record_id: str = None):
        """
        get: get the record by the id.
        Args:
            record_id: the id of the record.
            collection: the name of the collection to get the record.

        Returns: the record.
        """
        if not collection:
            collection = self.collection_name
        collection = self.client.get_collection(collection)
        if not record_id:
            return collection.get()

        return collection.get(record_id)

    def delete(self, collection_name=None):
        """
        delete: delete the memery collections.
        Args:
            collection_name: the name of the collection to delete.
        """
        if not collection_name:
            collection_name = self.collection_name
        return self.client.delete_collection(name=collection_name)

    def count(self, collection_name=None):
        """
        count: count the number of records in the memery.
        Args:
            collection_name: the name of the collection to count.
        """
        if not collection_name:
            collection_name = self.collection_name
        return self.client.get_or_create_collection(name=collection_name).count()

    def reset(self):
        """
        reset: reset the memory.
        Notice: You may need to set the environment variable `ALLOW_RESET` to `TRUE` to enable this function.
        """
        self.client.reset()

class Reranker:
    def __init__(self, model: str) -> None:
        self.model = model
    @abstractmethod
    def rerank(self, query_text: str, query_results: List[Dict]) -> List[Dict]:
        raise NotImplementedError("Reranker is not implemented")