# -*- coding: utf-8 -*-
# @Author: Tim Liu
# @Date: 2024-06-04
# @Last Modified by: Tim Liu
# @Last Modified time: 2024-06-04

# @Description: vector database service

import os

from sqlalchemy.ext.asyncio import AsyncSession

from fastapi import Depends

# from langchain_community.vectorstores import Milvus
from langchain_milvus import Milvus, Zilliz
from langchain_core.embeddings import Embeddings
from langchain_openai import AzureOpenAIEmbeddings

from config.settings import *
from core.database import db_getter
from core.exception import CustomException
from utils import status

from crewplus.apps.rag.cruds.knowledge_base import KnowledgeBaseDal
from crewplus.apps.rag.models.knowledge_base import KnowledgeBaseDB

import logging

class VDBService(object):
    """
    vector database service class

    """    
        
    def get_embeddings(self) -> Embeddings :
        # TODO: implement Embedding API pool (due to azure api unstable issue)
            
        # use Azure OpenAI embeddings
        os.environ["AZURE_OPENAI_API_KEY"] = OPENAI_EMBEDDING_API_KEY
        os.environ["AZURE_OPENAI_ENDPOINT"] = OPENAI_EMBEDDING_API_BASE

        embeddings = AzureOpenAIEmbeddings( azure_deployment=OPENAI_EMBEDDING_DEPLOYMENT, openai_api_version=OPENAI_EMBEDDING_API_VERSION, 
                                           chunk_size=16, request_timeout=60, max_retries=2,)
                                        #    api_key="b981ffef4db744af8f01299a4291abb5",
                                        #    azure_endpoint="https://crewplus-sweden-central.openai.azure.com/")    
        
        return embeddings
    
    async def get_vector_store_by_kbase(self, kbase_id: int = 0, kbase_name: str = None, db: AsyncSession = Depends(db_getter)) -> Milvus:
        collection_name = None
           
        if kbase_id > 0:
            # check if the kbase_id does exist, if not, report error
            kbase: KnowledgeBaseDB = await KnowledgeBaseDal(db).get_data(id=kbase_id)

            if kbase is None:
                raise CustomException("Invalid knowledge base id", code=status.HTTP_ERROR)
            
            collection_name = kbase.name
            
        if not collection_name and kbase_name is not None:
            # check if the kbase_name does exist, if not, report error
            kbase: KnowledgeBaseDB = await KnowledgeBaseDal(db).get_data(name=kbase_name)

            if kbase is None:
                raise CustomException("knowledge base name does not exist", code=status.HTTP_ERROR)
        
            collection_name = kbase.name
            
        # get vector store instance
        vdb = self.get_vector_store(collection_name)
        
        return vdb
                    
            
    def get_vector_store(self, collection_name: str = None, embeddings: Embeddings = None) -> Zilliz:
        """Set up a vector store used to save the vector embeddings. Here we use Milvus as the vector store.

        Args:
            collection_name (str, optional): collection name in vector db. Defaults to None.
            embeddings (Embeddings, optional): embedding model. Defaults to None.

        Returns:
            Milvus: milvus instance
        """    
        if collection_name is None:
            return None
        
        if embeddings is None:
            embeddings = self.get_embeddings()
    
        vdb = Zilliz(
            embedding_function = embeddings,
            collection_name = collection_name,
            connection_args=MILVUS_CONNECTION_ARGS
        )
        
        return vdb

    def delete_old_indexes(self, url: str = None, vdb: Milvus = None) -> None:
        """ Delete old indexes of the same source_url

        Args:
            url (str): source url 
        """    
        if url is None or vdb is None:
            return
        
        # Delete indexes of the same source_url
        expr = "source in [\"" + url + "\"]"
        pks = vdb.get_pks(expr)

        # Delete entities by pks
        if pks is not None and len(pks) > 0 :
            old_items = vdb.delete(pks)
            logging.info("ingesting document -- delete old indexes -- " + str(old_items))