import numpy as np
from pymilvus import connections, db, FieldSchema, CollectionSchema, DataType, Collection
from pymilvus.orm import utility


def create_db():
    """
    创建数据库
    :return:
    """
    conn = connections.connect(host="127.0.0.1", port=19530)
    database = db.create_database("db_yudao_ai")
    # 切换和显示数据库
    db.using_database("db_yudao_ai")
    db_list = db.list_database()
    print(db_list)
    # ['default', 'db_yudao_ai']


def create_collection():
    """
    创建 Collection
    :return:
    """
    conn = connections.connect(host="127.0.0.1", port=19530)
    db.using_database("db_yudao_ai")

    m_id = FieldSchema(name="m_id", dtype=DataType.INT64, is_primary=True)
    embedding = FieldSchema(name="embedding", dtype=DataType.FLOAT_VECTOR, dim=768)
    count = FieldSchema(name="count", dtype=DataType.INT64)
    desc = FieldSchema(name="desc", dtype=DataType.VARCHAR, max_length=256)
    schema = CollectionSchema(
        fields=[m_id, embedding, desc, count],
        description="测试创建 Collection",
        enable_dynamic_field=True
    )

    # 类似在关系型数据库中创建表
    collection_name = "ai_word_vector"
    collection = Collection(name=collection_name, schema=schema, shards_num=2)


def create_index():
    """
    创建索引
    :return:
    """
    conn = connections.connect(host="127.0.0.1", port=19530)
    db.using_database("db_yudao_ai")
    index_params = {
        "metric_type": "IP",
        "index_type": "IVF_FLAT",
        "params": {"nlist": 1024}
    }

    collection = Collection("ai_word_vector")
    collection.create_index(
        field_name="embedding",
        index_params=index_params
    )

    utility.index_building_progress("ai_word_vector")


def insert_data():
    """
    插入数据
    :return:
    """
    conn = connections.connect(host="127.0.0.1", port=19530)
    db.using_database("db_yudao_ai")

    m_ids, embeddings, counts, descs = [], [], [], []
    data_num = 18
    for m_id in range(0, data_num):
        m_ids.append(m_id)
        embeddings.append(np.random.normal(0, 0.1, 768).tolist())
        descs.append(f"m_id为 {m_id}")
        counts.append(m_id)

    collection = Collection("ai_word_vector")
    result = collection.insert([m_ids, embeddings, descs, counts])
    print(result)

    collection.load()


def search_data():
    """
    检索数据
    :return:
    """
    conn = connections.connect(host="127.0.0.1", port=19530)
    db.using_database("db_yudao_ai")

    search_params = {
        "metric_type": "IP",
        "offset": 0,
        "ignore_growing": False,
        "params": {"nprobe": 16}
    }

    collection = Collection("ai_word_vector")
    collection.load()

    results = collection.search(
        data=[np.random.normal(0, 0.1, 768).tolist()],
        anns_field="embedding",
        param=search_params,
        limit=3,
        expr=None,
        output_fields=["m_id", "desc", "count"],
        consistency_level="Strong"
    )
    collection.release()
    print(results[0].ids)
    # [8, 10, 16]
    print(results[0].distances)
    # [0.2971017360687256, 0.20072953402996063, 0.19959932565689087]
    hit = results[0][0]
    # m_id为 8
    print(hit.entity.get("desc"))
    print(results)
    # data: [[{'m_id': 8, 'distance': 0.2971017360687256, 'entity': {'desc': 'm_id为 8', 'count': 8, 'm_id': 8}}, {'m_id': 10, 'distance': 0.20072953402996063, 'entity': {'desc': 'm_id为 10', 'count': 10, 'm_id': 10}}, {'m_id': 16, 'distance': 0.19959932565689087, 'entity': {'desc': 'm_id为 16', 'count': 16, 'm_id': 16}}]]


def delete_data():
    """
    删除数据
    :return:
    """
    conn = connections.connect(host="127.0.0.1", port=19530)
    db.using_database("db_yudao_ai")

    collection = Collection("ai_word_vector")

    m_ids = [str(m_id) for m_id in range(3)]
    m_ids_str = ",".join(m_ids)
    query_expr = f"m_id in [{m_ids_str}]"
    result = collection.delete(query_expr)
    print(result)

    collection.load()


if __name__ == '__main__':
    # create_db()
    # create_collection()
    # create_index()
    # insert_data()
    # search_data()
    delete_data()
