# -*- coding: utf-8 -*-
#!/usr/local/bin/python
# Author: yuxiang yu

'''
国产向量数据库

安装依赖
pip install pymilvus==2.2.13

milvus 版本
2.2.11

# sort
1. create collection
2. crete index
3. load
4. search
'''
from pymilvus import connections, utility, FieldSchema, CollectionSchema, DataType, Collection, Partition
import datetime


class MilvusDB(object):
    global _ALIAS
    global _HOST
    global _PORT
    global _USER
    global _PASSWORD
    global _INDEX_FILE_SIZE
    global _NLIST
    global _NLIST
    global _NPROBE

    # 开发环境
    _ALIAS = "default"  # 别名
    _HOST = "106.75.215.89"
    _PORT = "24006"
    _USER = "root"
    _PASSWORD = "Huayuan@2022"

    _INDEX_FILE_SIZE = 1024
    _NLIST = 2048
    _NPROBE = 16

    def __init__(
        self, 
        dimension=768, 
        collection_name_ip="history_dialogue_ip", 
        collection_name_l2="history_dialogue_l2", 
        output_fields=None, 
        only_search=False
    ):
        self.rq = datetime.datetime.now()
        self._DIM = dimension
        self.collection_name_ip = collection_name_ip
        self.collection_name_l2 = collection_name_l2

        assert self._conn() is True
        self.load_collection()
        self.set_index()
        if only_search: # only search
            if not self.collection_ip.has_index() or not self.collection_ip.has_index():
                self.set_index()
            self.collection.load()
        if output_fields is None:
            self.output_fields = ["id", "code", "question", "user", "session_id", "project_name", "source"]
        else:
            self.output_fields = output_fields

    def load_collection(self):
        try:
            self.collection_ip, self.collection_l2 = self.load_or_create_collection()
            # self.collection.load()
        except:
            self.load_or_create_collection().release()
            self.collection_ip, self.collection_l2 = self.load_or_create_collection()
            # self.collection.load()

    def __del__(self):
        try:
            # self.collection.release()
            self._disconnect()
        except Exception as e:
            print(f"auto release failed {e}")

    def __repr__(self):
        return f"db [{_HOST}], rq {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"

    def _conn(self):
        connections.connect(
            alias = _ALIAS,
            host = _HOST,
            port = _PORT,
            user=_USER,
            password=_PASSWORD,
            db_name = _ALIAS,  # 默认数据库
        )
        try:
            if connections.has_connection(_ALIAS):
                print("connection success")
                return True
            return False
        except Exception as e:
            print(f"connection error: {e}")
            exit(-1)
            return False

    def _disconnect(self):
        try:
            connections.disconnect(_ALIAS)
        except Exception as e:
            print(f"disconnect error: {e}")

    def manual_close(self):
        try:
            self.collection_ip.release()
            self.collection_l2.release()
            print("collection release success")
            connections.disconnect(_ALIAS)
            print("connection close success")
        except Exception as e:
            print(f"disconnect error: {e}")

    @property
    def show_all_columns(self):
        return ["code", "question", "feature", "user", "session_id", "embedding_type", "project_name", "status", "create_time"]

    def set_schema(self):
        fields_list = [
            FieldSchema(name="id", dtype=DataType.INT64, is_primary=True, auto_id=True,),
            FieldSchema(name="code", dtype=DataType.VARCHAR, max_length=32),
            FieldSchema(name="question", dtype=DataType.VARCHAR, max_length=2048),
            FieldSchema(name="feature", dtype=DataType.FLOAT_VECTOR, dim=self._DIM),
            FieldSchema(name="user", dtype=DataType.VARCHAR, max_length=64),
            FieldSchema(name="session_id", dtype=DataType.VARCHAR, max_length=64),
            FieldSchema(name="embedding_type", dtype=DataType.VARCHAR, max_length=64),
            FieldSchema(name="project_name", dtype=DataType.VARCHAR, max_length=64),
            FieldSchema(name="source", dtype=DataType.VARCHAR, max_length=64),
            FieldSchema(name="status", dtype=DataType.INT64, max_length=2),
            FieldSchema(name="create_time", dtype=DataType.VARCHAR, max_length=50)
        ]
        schema_ip = CollectionSchema(
            fields=fields_list,
            description=f"collection: {self.collection_name_ip}",
            enable_dynamic_field=True
            )
        schema_l2 = CollectionSchema(
            fields=fields_list,
            description=f"collection: {self.collection_name_l2}",
            enable_dynamic_field=True
            )
        return schema_ip, schema_l2

    def load_or_create_collection(self):
        # TODO: collection whether or not it exists
        if utility.has_collection(self.collection_name_ip, using=_ALIAS):
            collection_ip = Collection(self.collection_name_ip, using=_ALIAS)
            collection_l2 = Collection(self.collection_name_l2, using=_ALIAS)
            # collection.load(replica_number=2)
            return collection_ip, collection_l2

        # TODO: create a collection with the schema
        print(f"init databse, start create collection")
        schema_ip, schema_l2 = self.set_schema()
        collection_ip = Collection(
                name=self.collection_name_ip,
                schema=schema_ip,
                using=_ALIAS,
                # consistency_level="Strong",
                shards_num=2,
            )
        collection_l2 = Collection(
                name=self.collection_name_l2,
                schema=schema_l2,
                using=_ALIAS,
                # consistency_level="Strong",
                shards_num=2,
            )
        
        return collection_ip, collection_l2

    def modify_collection(self):
        # todo: old method
        # 生存时间验证
        self.collection.set_properties(properties={"collection.ttl.seconds": 1800})
        return True
    
    def del_collection(self, collection_name="gptcache_test"):
        # from pymilvus import utility
        utility.drop_collection(collection_name)

    def check_collection(self, collection):
        # 检查 collection
        desc = f"""schema: {collection.schema}\ndescription: {collection.description}\nname: {collection.name}\nis_empty: {collection.is_empty}\nnum_entities: {collection.num_entities}\nprimary_field: {collection.primary_field}\npartitions: {collection.partitions}\nindexes: {collection.indexes}\n"""
        print(desc)

    def get_num_entities(self, collection):
        return collection.num_entities

    def get_primary_field(self, collection):
        return collection.primary_field

    def get_partitions(self, collection):
        return collection.partitions

    def list_collection(self):
        try:
            # self.collection.load()
            return utility.list_collections()
        except Exception as e:
            print(f"list_collection: {e}")
            return None

    def list_partition(self, collection):
        try:
            return collection.partitions
        except Exception as e:
            print(f"list partition error: {e}")
            return False

    def drop_collection(self, collection_name=None):
        if collection_name is None:
            collection_name = "models"

        try:
            utility.drop_collection(collection_name)
            print(f"drop collection success")
            return True
        except Exception as e:
            print(f"drop collection error: {e}")
            return False

    def drop_partition(self, partition_name):
        try:
            self.collection_ip.drop_partition(partition_name)
            self.collection_l2.drop_partition(partition_name)
            return True
        except Exception as e:
            print(f"drop partition error: {e}")
            self.collection_ip.release()
            self.collection_ip.drop_partition(partition_name)
            self.collection_l2.release()
            self.collection_l2.drop_partition(partition_name)
            return False

    def load_partition(self, partition_name):
        if isinstance(partition_name, str):
            partition_name = [str(partition_name).strip()]

        if not isinstance(partition_name, list):
            partition_name = [partition_name]

        try:
            self.collection_ip.load(partition_name, replica_number=2)
            self.collection_l2.load(partition_name, replica_number=2)
            return True
        except Exception as e:
            print(f"load partition error: {e}")
            return False

    def release_collection(self):
        try:
            self.collection_ip.release()
            self.collection_l2.release()
            return True
        except Exception as e:
            print(f"release collection error: {e}")
            return False

    def release_partition(self, partition_name):
        if isinstance(partition_name, str):
            partition_name = str(partition_name).strip()
            partition = Partition(self.collection, partition_name)
            partition.release()
            return True

        if isinstance(partition_name, (tuple, list)):
            for name in partition_name:
                try:
                    partition = Partition(self.collection, name)
                    partition.release()
                except Exception as e:
                    print(f"release partition error: {e}")
            return True
        print(f"partition_name: {partition_name}")
        return False

    def delete_entity(self, expr):
        # expr = "sku in [0,1]"
        try:
            self.collection_ip.delete(expr)
            self.collection_l2.delete(expr)
            return True
        except Exception as e:
            print(f"delete entity error: {e}")
            return False

    def set_partition(self, collection, partition_name=None, timeout=20):
        # TODO: create a partition
        if not collection.has_partition(
                partition_name=partition_name,
                timeout=timeout):
            try:
                if partition_name is None or str(partition_name).startswith("None"):
                    partition_name = "part1"
                collection.create_partition(
                    partition_name=partition_name,
                    description=partition_name
                    )
                return True
            except Exception as e:
                print(f"set partition error:{e}, rq: {self.rq}")
        print(f"set_partition: {partition_name} 已存在, rq:{self.rq}")
        return True

    def set_index(self):
        has_index_ip = self.collection_ip.has_index()
        has_index_l2 = self.collection_l2.has_index()
        index_params_ip = {
            "metric_type": "IP",
            "index_type": "IVF_FLAT",
            "params": {"nlist": 128},
        }
        index_params_l2 = {
            "metric_type": "L2",
            "index_type": "IVF_FLAT",
            "params": {"nlist": 128},
        }
        if not has_index_ip and not has_index_ip:
            try:
                self.collection_ip.create_index(field_name="feature", index_params=index_params_ip)
                self.collection_l2.create_index(field_name="feature", index_params=index_params_l2)
                print("build or rebuild index ip and l2 success")
                # TODO: reload
                self.collection_ip.load()
                self.collection_l2.load()
                return True
            except Exception as e:
                print(f"build index error: {e}")
                return False
        if not has_index_ip:
            try:
                self.collection_ip.create_index(field_name="feature", index_params=index_params_ip)
                print("build or rebuild index ip success")
                # TODO: reload
                self.collection_ip.load()
                return True
            except Exception as e:
                print(f"build index error: {e}")
                return False
        if not has_index_l2:
            try:
                self.collection_l2.create_index(field_name="feature", index_params=index_params_l2)
                print("build or rebuild index l2 success")
                # TODO: reload
                self.collection_l2.load()
                return True
            except Exception as e:
                print(f"build index error: {e}")
                return False
        return True

    def drop_index(self):
        # TODO: the fuction removes all corresponding index files
        try:
            self.collection.drop_index()
            return True
        except Exception as e:
            print(f"drop index error: {e}")
            return False

    def delete_data(self, data, partition_name: str):
        # 首列唯一索引
        idx_list = data[0]
        if not isinstance(idx_list, list):
            print("idx list must be type of list")
            exit(-1)
        expr = f"no in {idx_list}"
        try:
            self.collection.delete(expr, partition_name=partition_name)
            print("Remove duplicates finished")
            return True
        except Exception as e:
            print(f"delete data error {e}")
            return False

    def insert_data(self, data, partition_name: str, timeout=30):
        if not connections.has_connection(_ALIAS):
            self._conn()
            self.load_collection()
            self.set_index()
            self.collection_ip.load()
            self.collection_l2.load()
        self.set_partition(self.collection_ip, partition_name=partition_name, timeout=timeout)
        self.set_partition(self.collection_l2, partition_name=partition_name, timeout=timeout)
        init_insert_status = None
        try:
            self.set_index()
            init_ip_insert_status = self.collection_ip.insert(data, partition_name=partition_name)
            init_l2_insert_status = self.collection_l2.insert(data, partition_name=partition_name)
            return True
        except Exception as e:
            self.set_index()
            print(f"[insert error] {e} [partition_name] {partition_name} with data {data[0]} ")
            self.collection_ip.release()
            self.collection_ip.load()
            self.collection_l2.release()
            self.collection_l2.load()
            init_ip_insert_status = self.collection_ip.insert(data, partition_name=partition_name)
            init_l2_insert_status = self.collection_l2.insert(data, partition_name=partition_name)
            return True

    def _result_dict(self, hit):
        tmp_dict = {
            "id": hit.entity.get("id"),
            "code": hit.entity.get("code"),
            "question": hit.entity.get("question"),
            "user": hit.entity.get("user"),
            "session_id": hit.entity.get("session_id"),
            "project_name": hit.entity.get("project_name"),
            "source": hit.entity.get("source"),
            "score": round(hit.distance, 3)
            }
        return tmp_dict

    def get_hybrid_vector_search(self, query_vector, embedding_type, project_name, session_id, similar_type, topk=1, timeout=10, nprobe=10, is_dict=True):
        if similar_type == "ip":
            similar_param = {
                "metric_type": "IP",
                "params": {"nprobe": nprobe},
            }
        elif similar_type == "l2":
            similar_param = {
                "metric_type": "L2",
                "params": {"nprobe": nprobe},
            }
        else:
            similar_param = {
                "metric_type": "IP",
                "params": {"nprobe": nprobe},
            }
        if session_id is None:
            expr = f"embedding_type == '{embedding_type}' and status == 1"
        else:
            expr = f"embedding_type == '{embedding_type}' and session_id == '{session_id}' and status == 1"
        partition_names = [project_name]
        # 范围检索
        search_param = {
            "data": query_vector,
            "anns_field": "feature",
            "param": similar_param,
            "offset": 0,
            "limit": int(topk),
            "partition_tags": partition_names,
            "expr": expr,
            "output_fields": self.output_fields,
            "timeout": timeout,
            "consistency_level": "Strong"
        }
        if not connections.has_connection(_ALIAS):
            self._conn()
            self.load_collection()
            self.set_index()
            self.collection_ip.load()
            self.collection_l2.load()
        if similar_type == "ip":
            result = self.collection_ip.search(**search_param)
        elif similar_type == "l2":
            result = self.collection_l2.search(**search_param)
        else:
            result = self.collection_ip.search(**search_param)
        if is_dict:
            res_list = []
            for hits in result:
                res_list.append(list(map(self._result_dict, list(hits))))
            return res_list[0]

        return result


if __name__ == "__main__":
    # 构建一批模型数据
    vector_db = MilvusDB()
    # print(vector_db.get_hybrid_vector_search(self, query_vector, embedding_type, project_name, session_id, similar_type, topk=1, timeout=10, nprobe=10, is_dict=True))
    # vector_db.release_collection()
    # status = vector_db.drop_index()
    # print(status)
#     from uuid import uuid4
#     import numpy as np
#     import datetime
#     nums = 3

#     # TODO: 写入数据
#     db = MilvusDB()
#     # 显示所有列
#     print(db.show_all_columns)
#     # db.del_collection()
#     # 模拟数据
#     tmp_result_list = []
#     ids = [i for i in range(3, 6)]
#     question = [f"question_{i}" for i in range(3, 6)]
#     feature = [np.random.random((1, 768)).tolist()[0] for _ in range(nums)]
#     status = [str(1) for _ in range(nums)]
#     create_time = [datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') for _ in range(nums)]
#     tmp_result_list.extend([ids, question, feature, status, create_time])
#     # 写入数据
#     db.insert_data(data=tmp_result_list, partition_name='test')
    # 手动关闭
    # db.manual_close()
    # del db

    # TODO: 检索数据
    # 检索
#     db_search = MilvusDB(only_search=True)
#     result = db_search.get_search([feature[0]], partition_names=['test', '_default'], topk=10, nprobe=10, is_dict=True)
#     print(f"识别结果", result)

#     result = db_search.get_search([feature[1]], partition_names=['test', '_default'], topk=10, nprobe=10, is_dict=True)
#     print(f"识别结果", result)
#     # 手动释放
#     db_search.manual_close()
#     del db_search

    # TODO: 其他指标
    # print("=== 其他指标 ===")
    # db_info = MilvusDB()
    # print(db_info.get_num_entities())
    # db_info.manual_close()
    # del db_info
