/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied.  See the License for the
 * specific language governing permissions and limitations
 * under the License.
 */
package io.milvus.v2.bulkwriter;

import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonNull;
import com.google.gson.JsonObject;
import io.milvus.bulkwriter.BulkWriter;
import io.milvus.bulkwriter.RemoteBulkWriter;
import io.milvus.bulkwriter.RemoteBulkWriterParam;
import io.milvus.bulkwriter.common.clientenum.BulkFileType;
import io.milvus.bulkwriter.common.clientenum.CloudStorage;
import io.milvus.bulkwriter.common.utils.GeneratorUtils;
import io.milvus.bulkwriter.connect.AzureConnectParam;
import io.milvus.bulkwriter.connect.S3ConnectParam;
import io.milvus.bulkwriter.connect.StorageConnectParam;
import io.milvus.bulkwriter.request.describe.CloudDescribeImportRequest;
import io.milvus.bulkwriter.request.describe.MilvusDescribeImportRequest;
import io.milvus.bulkwriter.request.import_.CloudImportRequest;
import io.milvus.bulkwriter.request.import_.MilvusImportRequest;
import io.milvus.bulkwriter.request.list.CloudListImportJobsRequest;
import io.milvus.bulkwriter.request.list.MilvusListImportJobsRequest;
import io.milvus.bulkwriter.restful.BulkImportUtils;
import io.milvus.v1.CommonUtils;
import io.milvus.v2.client.ConnectConfig;
import io.milvus.v2.client.MilvusClientV2;
import io.milvus.v2.common.ConsistencyLevel;
import io.milvus.v2.common.DataType;
import io.milvus.v2.common.IndexParam;
import io.milvus.v2.service.collection.request.*;
import io.milvus.v2.service.database.request.CreateDatabaseReq;
import io.milvus.v2.service.database.response.ListDatabasesResp;
import io.milvus.v2.service.index.request.CreateIndexReq;
import io.milvus.v2.service.vector.request.QueryReq;
import io.milvus.v2.service.vector.response.QueryResp;

import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.TimeUnit;


public class BulkWriterRemoteExample {
    // milvus
    public static final String HOST = "127.0.0.1";
    public static final Integer PORT = 19530;
    public static final String USER_NAME = "user.name";
    public static final String PASSWORD = "password";

    private static final Gson GSON_INSTANCE = new Gson();

    /**
     * If you need to transfer the files generated by bulkWriter to the corresponding remote storage (AWS S3, GCP GCS, Azure Blob, Aliyun OSS, Tencent Cloud TOS),
     * you need to configure it accordingly; Otherwise, you can ignore it.
     */
    public static class StorageConsts {
        public static final CloudStorage cloudStorage = CloudStorage.MINIO;

        /**
         * If using remote storage such as AWS S3, GCP GCS, Aliyun OSS, Tencent Cloud TOS, Minio
         * please configure the following parameters.
         */
        public static final String STORAGE_ENDPOINT = cloudStorage.getEndpoint("http://127.0.0.1:9000");
        public static final String STORAGE_BUCKET = "a-bucket"; // default bucket name of MinIO/Milvus standalone
        public static final String STORAGE_ACCESS_KEY = "minioadmin"; // default ak of MinIO/Milvus standalone
        public static final String STORAGE_SECRET_KEY = "minioadmin"; // default sk of MinIO/Milvus standalone
        /**
         * if using remote storage, please configure the parameter
         * if using local storage such as Local Minio, please set this parameter to empty.
         */
        public static final String STORAGE_REGION = "";

        /**
         * If using remote storage such as Azure Blob
         * please configure the following parameters.
         */
        public static final String AZURE_CONTAINER_NAME = "azure.container.name";
        public static final String AZURE_ACCOUNT_NAME = "azure.account.name";
        public static final String AZURE_ACCOUNT_KEY = "azure.account.key";
    }


    /**
     * If you have used remoteBulkWriter to generate remote data and want to import data using the Import interface on Zilliz Cloud after generation,
     * you don't need to configure the following object-related parameters (OBJECT_URL, OBJECT_ACCESS_KEY, OBJECT_SECRET_KEY). You can call the callCloudImport method, as the internal logic has been encapsulated for you.
     * <p>
     * If you already have data stored in remote storage (not generated through remoteBulkWriter), and you want to invoke the Import interface on Zilliz Cloud to import data,
     * you need to configure the following parameters and then follow the exampleCloudBulkInsert method.
     * <p>
     * If you do not need to import data through the Import interface on Zilliz Cloud, you can ignore this.
     */
    public static class CloudImportConsts {

        /**
         * The value of the URL is fixed.
         * For overseas regions, it is: https://api.cloud.zilliz.com
         * For regions in China, it is: https://api.cloud.zilliz.com.cn
         */
        public static final String CLOUD_ENDPOINT = "https://api.cloud.zilliz.com";
        public static final String API_KEY = "_api_key_for_cluster_org_";
        public static final String CLUSTER_ID = "_your_cloud_cluster_id_";
        public static final String COLLECTION_NAME = "_collection_name_on_the_cluster_id_";
        // If partition_name is not specified, use ""
        public static final String PARTITION_NAME = "_partition_name_on_the_collection_";

        /**
         * Please provide the complete URL for the file or folder you want to import, similar to https://bucket-name.s3.region-code.amazonaws.com/object-name.
         * For more details, you can refer to https://docs.zilliz.com/docs/import-data-on-web-ui.
         */
        public static final String OBJECT_URL = "_your_storage_object_url_";
        public static final String OBJECT_ACCESS_KEY = "_your_storage_access_key_";
        public static final String OBJECT_SECRET_KEY = "_your_storage_secret_key_";
    }

    private static final String DATABASE_NAME = "java_sdk_db";
    private static final String SIMPLE_COLLECTION_NAME = "java_sdk_bulkwriter_simple_v2";
    private static final String ALL_TYPES_COLLECTION_NAME = "java_sdk_bulkwriter_all_v2";
    private static final Integer DIM = 512;
    private static final Integer ARRAY_CAPACITY = 10;
    private static final String TIME_ZONE = "Asia/Shanghai";

    private static MilvusClientV2 milvusClient;

    public static void main(String[] args) throws Exception {
        createConnection();

        List<BulkFileType> fileTypes = Lists.newArrayList(
                BulkFileType.PARQUET,
                BulkFileType.JSON,
                BulkFileType.CSV
        );

        exampleSimpleCollection(fileTypes);
        exampleAllTypesCollectionRemote(fileTypes);

        // to call cloud import api, you need to apply a cloud service from Zilliz Cloud(https://zilliz.com/cloud)
        // exampleCloudImport();
    }

    private static void createConnection() {
        System.out.println("\nCreate connection...");
        String url = String.format("http://%s:%s", HOST, PORT);
        milvusClient = new MilvusClientV2(ConnectConfig.builder()
                .uri(url)
                .username(USER_NAME)
                .password(PASSWORD)
                .build());
        System.out.println("\nConnected");

        ListDatabasesResp dbs = milvusClient.listDatabases();
        if (!dbs.getDatabaseNames().contains(DATABASE_NAME)) {
            milvusClient.createDatabase(CreateDatabaseReq.builder()
                    .databaseName(DATABASE_NAME)
                    .build());
            try {
                milvusClient.useDatabase(DATABASE_NAME);
            } catch (Exception e) {
                System.out.println("Unable to switch database, error: " + e);
            }
        }
    }

    private static void exampleSimpleCollection(List<BulkFileType> fileTypes) throws Exception {
        CreateCollectionReq.CollectionSchema collectionSchema = buildSimpleSchema();
        createCollection(SIMPLE_COLLECTION_NAME, collectionSchema);

        for (BulkFileType fileType : fileTypes) {
            remoteWriter(collectionSchema, fileType);
        }
    }

    private static void exampleAllTypesCollectionRemote(List<BulkFileType> fileTypes) throws Exception {
        List<Map<String, Object>> originalData = genOriginalData(5);
        List<JsonObject> rows = genImportData(originalData, true);

        // 4 types vectors + all scalar types + dynamic field enabled, use bulkInsert interface
        for (BulkFileType fileType : fileTypes) {
            CreateCollectionReq.CollectionSchema collectionSchema = buildAllTypesSchema();
            List<List<String>> batchFiles = allTypesRemoteWriter(collectionSchema, fileType, rows);
            createCollection(ALL_TYPES_COLLECTION_NAME, collectionSchema);
            callBulkInsert(batchFiles);
            verifyImportData(collectionSchema, originalData);
        }

//        // 4 types vectors + all scalar types + dynamic field enabled, use cloud import api.
//        // You need to apply a cloud service from Zilliz Cloud(https://zilliz.com/cloud)
//        for (BulkFileType fileType : fileTypes) {
//            CreateCollectionReq.CollectionSchema collectionSchema = buildAllTypesSchema();
//            List<List<String>> batchFiles = allTypesRemoteWriter(collectionSchema, fileType, rows);
//            createCollection(ALL_TYPES_COLLECTION_NAME, collectionSchema);
//            callCloudImport(batchFiles, ALL_TYPES_COLLECTION_NAME, "");
//            verifyImportData(collectionSchema, originalData);
//        }
    }

    private static void remoteWriter(CreateCollectionReq.CollectionSchema collectionSchema, BulkFileType fileType) throws Exception {
        System.out.printf("\n===================== remote writer (%s) ====================%n", fileType.name());

        try (RemoteBulkWriter remoteBulkWriter = buildRemoteBulkWriter(collectionSchema, fileType)) {
            // read data from csv
            readCsvSampleData("data/train_embeddings.csv", remoteBulkWriter);

            // append rows
            for (int i = 0; i < 100000; i++) {
                JsonObject row = new JsonObject();
                row.addProperty("path", "path_" + i);
                row.add("vector", GSON_INSTANCE.toJsonTree(GeneratorUtils.genFloatVector(DIM)));
                row.addProperty("label", "label_" + i);

                remoteBulkWriter.appendRow(row);
            }

            System.out.printf("%s rows appends%n", remoteBulkWriter.getTotalRowCount());

            remoteBulkWriter.commit(false);
            List<List<String>> batchFiles = remoteBulkWriter.getBatchFiles();

            System.out.printf("Remote writer done! output remote files: %s%n", batchFiles);
        } catch (Exception e) {
            System.out.println("Remote writer catch exception: " + e);
            throw e;
        }
    }

    private static Map<String, Object> genOriginStruct(int seed) {
        Map<String, Object> st = new HashMap<>();
        st.put("st_bool", seed % 3 == 0);
        st.put("st_int8", seed % 128);
        st.put("st_int16", seed % 16384);
        st.put("st_int32", seed % 65536);
        st.put("st_int64", seed);
        st.put("st_float", (float) seed / 4);
        st.put("st_double", seed / 3);
        st.put("st_string", String.format("dummy_%d", seed));
        st.put("st_float_vector", CommonUtils.generateFloatVector(DIM));
        return st;
    }

    private static String genTimestamptz(int i) {
        ZoneId zone = ZoneId.of(TIME_ZONE);
        DateTimeFormatter formatter = DateTimeFormatter.ISO_OFFSET_DATE_TIME;
        LocalDateTime tt = LocalDateTime.of(2025, 1, 1, 0, 0, 0);
        tt = tt.plusDays(i);
        ZonedDateTime zt = tt.atZone(zone);
        return zt.format(formatter);
    }

    private static List<Map<String, Object>> genOriginalData(int count) {
        List<Map<String, Object>> data = new ArrayList<>();
        for (int i = 0; i < count; ++i) {
            Map<String, Object> row = new HashMap<>();
            // scalar field
            row.put("id", (long) i);
            row.put("bool", i % 5 == 0);
            row.put("int8", i % 128);
            row.put("int16", i % 1000);
            row.put("int32", i % 100000);
            row.put("float", (float) i / 3);
            row.put("double", (double) i / 7);
            row.put("varchar", "varchar_" + i);
            row.put("json", String.format("{\"dummy\": %s, \"ok\": \"name_%s\"}", i, i));
            row.put("geometry", String.format("POINT (%d %d)", i, i));

            // timestamptz field

            row.put("timestamp", genTimestamptz(i));

            // vector field
            row.put("float_vector", CommonUtils.generateFloatVector(DIM));
            row.put("binary_vector", CommonUtils.generateBinaryVector(DIM).array());
//            row.put("int8_vector", CommonUtils.generateInt8Vector(DIM).array());
            row.put("sparse_vector", CommonUtils.generateSparseVector());

            // array field
            row.put("array_bool", GeneratorUtils.generatorBoolValue(3));
            row.put("array_int8", GeneratorUtils.generatorInt8Value(4));
            row.put("array_int16", GeneratorUtils.generatorInt16Value(5));
            row.put("array_int32", GeneratorUtils.generatorInt32Value(6));
            row.put("array_int64", GeneratorUtils.generatorLongValue(7));
            row.put("array_varchar", GeneratorUtils.generatorVarcharValue(8, 10));
            row.put("array_float", GeneratorUtils.generatorFloatValue(9));
            row.put("array_double", GeneratorUtils.generatorDoubleValue(10));

            // struct field
            List<Map<String, Object>> structList = new ArrayList<>();
            for (int k = 0; k < i % 4 + 1; k++) {
                structList.add(genOriginStruct(i + k));
            }
            row.put("struct_field", structList);

            data.add(row);
        }
        // a special record with null/default values
        {
            Map<String, Object> row = new HashMap<>();
            // scalar field
            row.put("id", (long) data.size());
            row.put("bool", null);
            row.put("int8", null);
            row.put("int16", 16);
            row.put("int32", null);
            row.put("float", null);
            row.put("double", null);
            row.put("varchar", null);
            row.put("json", null);
            row.put("geometry", null);
            row.put("timestamp", null);

            // vector field
            row.put("float_vector", CommonUtils.generateFloatVector(DIM));
            row.put("binary_vector", CommonUtils.generateBinaryVector(DIM).array());
//            row.put("int8_vector", CommonUtils.generateInt8Vector(DIM).array());
            row.put("sparse_vector", CommonUtils.generateSparseVector());

            // array field
            row.put("array_bool", GeneratorUtils.generatorBoolValue(10));
            row.put("array_int8", GeneratorUtils.generatorInt8Value(9));
            row.put("array_int16", null);
            row.put("array_int32", GeneratorUtils.generatorInt32Value(7));
            row.put("array_int64", GeneratorUtils.generatorLongValue(6));
            row.put("array_varchar", GeneratorUtils.generatorVarcharValue(5, 10));
            row.put("array_float", GeneratorUtils.generatorFloatValue(4));
            row.put("array_double", null);

            // struct field
            row.put("struct_field", Collections.singletonList(genOriginStruct(0)));

            data.add(row);
        }
        return data;
    }

    private static List<JsonObject> genImportData(List<Map<String, Object>> originalData, boolean isEnableDynamicField) {
        List<JsonObject> data = new ArrayList<>();
        for (Map<String, Object> row : originalData) {
            JsonObject rowObject = new JsonObject();

            // scalar field
            rowObject.addProperty("id", (Number) row.get("id"));
            if (row.get("bool") != null) { // nullable value can be missed
                rowObject.addProperty("bool", (Boolean) row.get("bool"));
            }
            rowObject.addProperty("int8", row.get("int8") == null ? null : (Number) row.get("int8"));
            rowObject.addProperty("int16", row.get("int16") == null ? null : (Number) row.get("int16"));
            rowObject.addProperty("int32", row.get("int32") == null ? null : (Number) row.get("int32"));
            rowObject.addProperty("float", row.get("float") == null ? null : (Number) row.get("float"));
            if (row.get("double") != null) { // nullable value can be missed
                rowObject.addProperty("double", (Number) row.get("double"));
            }
            rowObject.addProperty("varchar", row.get("varchar") == null ? null : (String) row.get("varchar"));
            rowObject.addProperty("geometry", row.get("geometry") == null ? null : (String) row.get("geometry"));
            rowObject.addProperty("timestamp", row.get("timestamp") == null ? null : (String) row.get("timestamp"));

            // Note: for JSON field, use gson.fromJson() to construct a real JsonObject
            // don't use rowObject.addProperty("json", jsonContent) since the value is treated as a string, not a JsonObject
            Object jsonContent = row.get("json");
            rowObject.add("json", jsonContent == null ? null : GSON_INSTANCE.fromJson((String) jsonContent, JsonElement.class));

            // vector field
            rowObject.add("float_vector", GSON_INSTANCE.toJsonTree(row.get("float_vector")));
            rowObject.add("binary_vector", GSON_INSTANCE.toJsonTree(row.get("binary_vector")));
//            rowObject.add("int8_vector", GSON_INSTANCE.toJsonTree(row.get("int8_vector")));
            rowObject.add("sparse_vector", GSON_INSTANCE.toJsonTree(row.get("sparse_vector")));

            // array field
            rowObject.add("array_bool", GSON_INSTANCE.toJsonTree(row.get("array_bool")));
            rowObject.add("array_int8", GSON_INSTANCE.toJsonTree(row.get("array_int8")));
            rowObject.add("array_int16", GSON_INSTANCE.toJsonTree(row.get("array_int16")));
            rowObject.add("array_int32", GSON_INSTANCE.toJsonTree(row.get("array_int32")));
            rowObject.add("array_int64", GSON_INSTANCE.toJsonTree(row.get("array_int64")));
            rowObject.add("array_varchar", GSON_INSTANCE.toJsonTree(row.get("array_varchar")));
            rowObject.add("array_float", GSON_INSTANCE.toJsonTree(row.get("array_float")));
            rowObject.add("array_double", GSON_INSTANCE.toJsonTree(row.get("array_double")));

            // struct field
            rowObject.add("struct_field", GSON_INSTANCE.toJsonTree(row.get("struct_field")));

            // dynamic fields
            if (isEnableDynamicField) {
                rowObject.addProperty("dynamic", "dynamic_" + row.get("id"));
            }

            data.add(rowObject);
        }
        return data;
    }

    private static List<List<String>> allTypesRemoteWriter(CreateCollectionReq.CollectionSchema collectionSchema,
                                                           BulkFileType fileType,
                                                           List<JsonObject> data) throws Exception {
        System.out.printf("\n===================== all field types (%s) ====================%n", fileType.name());

        try (RemoteBulkWriter remoteBulkWriter = buildRemoteBulkWriter(collectionSchema, fileType)) {
            for (JsonObject rowObject : data) {
                remoteBulkWriter.appendRow(rowObject);
            }
            System.out.printf("%s rows appends%n", remoteBulkWriter.getTotalRowCount());
            System.out.println("Generate data files...");
            remoteBulkWriter.commit(false);

            System.out.printf("Data files have been uploaded: %s%n", remoteBulkWriter.getBatchFiles());
            return remoteBulkWriter.getBatchFiles();
        } catch (Exception e) {
            System.out.println("allTypesRemoteWriter catch exception: " + e);
            throw e;
        }
    }

    private static RemoteBulkWriter buildRemoteBulkWriter(CreateCollectionReq.CollectionSchema collectionSchema, BulkFileType fileType) throws IOException {
        StorageConnectParam connectParam = buildStorageConnectParam();
        RemoteBulkWriterParam bulkWriterParam = RemoteBulkWriterParam.newBuilder()
                .withCollectionSchema(collectionSchema)
                .withRemotePath("bulk_data")
                .withFileType(fileType)
                .withChunkSize(512 * 1024 * 1024)
                .withConnectParam(connectParam)
                .withConfig("sep", "|") // only take effect for CSV file
                .build();
        return new RemoteBulkWriter(bulkWriterParam);
    }

    private static StorageConnectParam buildStorageConnectParam() {
        StorageConnectParam connectParam;
        if (CloudStorage.isAzCloud(StorageConsts.cloudStorage.getCloudName())) {
            String connectionStr = "DefaultEndpointsProtocol=https;AccountName=" + StorageConsts.AZURE_ACCOUNT_NAME +
                    ";AccountKey=" + StorageConsts.AZURE_ACCOUNT_KEY + ";EndpointSuffix=core.windows.net";
            connectParam = AzureConnectParam.newBuilder()
                    .withConnStr(connectionStr)
                    .withContainerName(StorageConsts.AZURE_CONTAINER_NAME)
                    .build();
        } else {
            connectParam = S3ConnectParam.newBuilder()
                    .withEndpoint(StorageConsts.STORAGE_ENDPOINT)
                    .withCloudName(StorageConsts.cloudStorage.getCloudName())
                    .withBucketName(StorageConsts.STORAGE_BUCKET)
                    .withAccessKey(StorageConsts.STORAGE_ACCESS_KEY)
                    .withSecretKey(StorageConsts.STORAGE_SECRET_KEY)
                    .withRegion(StorageConsts.STORAGE_REGION)
                    .build();
        }
        return connectParam;
    }

    private static void readCsvSampleData(String filePath, BulkWriter writer) throws IOException, InterruptedException {
        ClassLoader classLoader = BulkWriterRemoteExample.class.getClassLoader();
        URL resourceUrl = classLoader.getResource(filePath);
        filePath = new File(resourceUrl.getFile()).getAbsolutePath();

        CsvMapper csvMapper = new CsvMapper();

        File csvFile = new File(filePath);
        CsvSchema csvSchema = CsvSchema.builder().setUseHeader(true).build();
        Iterator<CsvDataObject> iterator = csvMapper.readerFor(CsvDataObject.class).with(csvSchema).readValues(csvFile);
        while (iterator.hasNext()) {
            CsvDataObject dataObject = iterator.next();
            JsonObject row = new JsonObject();

            row.add("vector", GSON_INSTANCE.toJsonTree(dataObject.toFloatArray()));
            row.addProperty("label", dataObject.getLabel());
            row.addProperty("path", dataObject.getPath());

            writer.appendRow(row);
        }
    }

    private static void callBulkInsert(List<List<String>> batchFiles) throws InterruptedException {
        String url = String.format("http://%s:%s", HOST, PORT);
        System.out.println("\n===================== import files to milvus ====================");
        Map<String, Object> options = new HashMap<>();
        options.put("sep", "|"); // this option only take effect for CSV
        MilvusImportRequest milvusImportRequest = MilvusImportRequest.builder()
                .collectionName(ALL_TYPES_COLLECTION_NAME)
                .dbName(DATABASE_NAME)
                .files(batchFiles)
                .apiKey(USER_NAME + ":" + PASSWORD)
                .options(options)
                .build();
        String bulkImportResult = BulkImportUtils.bulkImport(url, milvusImportRequest);
        System.out.println(bulkImportResult);

        JsonObject bulkImportObject = convertJsonObject(bulkImportResult);
        String jobId = bulkImportObject.getAsJsonObject("data").get("jobId").getAsString();
        System.out.println("Create a bulkInert task, job id: " + jobId);

        System.out.println("\n===================== listBulkInsertJobs() ====================");
        MilvusListImportJobsRequest listImportJobsRequest = MilvusListImportJobsRequest.builder()
                .collectionName(ALL_TYPES_COLLECTION_NAME)
                .dbName(DATABASE_NAME)
                .apiKey(USER_NAME + ":" + PASSWORD)
                .build();
        String listImportJobsResult = BulkImportUtils.listImportJobs(url, listImportJobsRequest);
        System.out.println(listImportJobsResult);
        while (true) {
            System.out.println("Wait 5 second to check bulkInsert job state...");
            TimeUnit.SECONDS.sleep(5);

            System.out.println("\n===================== getBulkInsertState() ====================");
            MilvusDescribeImportRequest request = MilvusDescribeImportRequest.builder()
                    .jobId(jobId)
                    .apiKey(USER_NAME + ":" + PASSWORD)
                    .build();
            String getImportProgressResult = BulkImportUtils.getImportProgress(url, request);
            System.out.println(getImportProgressResult);

            JsonObject getImportProgressObject = convertJsonObject(getImportProgressResult);
            String state = getImportProgressObject.getAsJsonObject("data").get("state").getAsString();
            String progress = getImportProgressObject.getAsJsonObject("data").get("progress").getAsString();
            if ("Failed".equals(state)) {
                String reason = getImportProgressObject.getAsJsonObject("data").get("reason").getAsString();
                throw new RuntimeException(String.format("The job %s failed, reason: %s", jobId, reason));
            } else if ("Completed".equals(state)) {
                System.out.printf("The job %s completed%n", jobId);
                break;
            } else {
                System.out.printf("The job %s is running, state:%s progress:%s%n", jobId, state, progress);
            }
        }
    }

//    private static void callCloudImport(List<List<String>> batchFiles, String collectionName, String partitionName) throws InterruptedException {
//        String objectUrl = StorageConsts.cloudStorage == CloudStorage.AZURE
//                ? StorageConsts.cloudStorage.getAzureObjectUrl(StorageConsts.AZURE_ACCOUNT_NAME, StorageConsts.AZURE_CONTAINER_NAME, ImportUtils.getCommonPrefix(batchFiles))
//                : StorageConsts.cloudStorage.getS3ObjectUrl(StorageConsts.STORAGE_BUCKET, ImportUtils.getCommonPrefix(batchFiles), StorageConsts.STORAGE_REGION);
//        String accessKey = StorageConsts.cloudStorage == CloudStorage.AZURE ? StorageConsts.AZURE_ACCOUNT_NAME : StorageConsts.STORAGE_ACCESS_KEY;
//        String secretKey = StorageConsts.cloudStorage == CloudStorage.AZURE ? StorageConsts.AZURE_ACCOUNT_KEY : StorageConsts.STORAGE_SECRET_KEY;
//
//        System.out.println("\n===================== call cloudImport ====================");
//        CloudImportRequest bulkImportRequest = CloudImportRequest.builder()
//                .objectUrl(objectUrl).accessKey(accessKey).secretKey(secretKey)
//                .clusterId(CloudImportConsts.CLUSTER_ID).collectionName(collectionName).partitionName(partitionName)
//                .apiKey(CloudImportConsts.API_KEY)
//                .build();
//        String bulkImportResult = BulkImportUtils.bulkImport(CloudImportConsts.CLOUD_ENDPOINT, bulkImportRequest);
//        JsonObject bulkImportObject = convertJsonObject(bulkImportResult);
//
//        String jobId = bulkImportObject.getAsJsonObject("data").get("jobId").getAsString();
//        System.out.println("Create a cloudImport job, job id: " + jobId);
//
//        System.out.println("\n===================== call cloudListImportJobs ====================");
//        CloudListImportJobsRequest listImportJobsRequest = CloudListImportJobsRequest.builder().clusterId(CloudImportConsts.CLUSTER_ID).currentPage(1).pageSize(10).apiKey(CloudImportConsts.API_KEY).build();
//        String listImportJobsResult = BulkImportUtils.listImportJobs(CloudImportConsts.CLOUD_ENDPOINT, listImportJobsRequest);
//        System.out.println(listImportJobsResult);
//        while (true) {
//            System.out.println("Wait 5 second to check bulkInsert job state...");
//            TimeUnit.SECONDS.sleep(5);
//
//            System.out.println("\n===================== call cloudGetProgress ====================");
//            CloudDescribeImportRequest request = CloudDescribeImportRequest.builder().clusterId(CloudImportConsts.CLUSTER_ID).jobId(jobId).apiKey(CloudImportConsts.API_KEY).build();
//            String getImportProgressResult = BulkImportUtils.getImportProgress(CloudImportConsts.CLOUD_ENDPOINT, request);
//            JsonObject getImportProgressObject = convertJsonObject(getImportProgressResult);
//            String importProgressState = getImportProgressObject.getAsJsonObject("data").get("state").getAsString();
//            String progress = getImportProgressObject.getAsJsonObject("data").get("progress").getAsString();
//
//            if ("Failed" .equals(importProgressState)) {
//                String reason = getImportProgressObject.getAsJsonObject("data").get("reason").getAsString();
//                System.out.printf("The job %s failed, reason: %s%n", jobId, reason);
//                break;
//            } else if ("Completed" .equals(importProgressState)) {
//                System.out.printf("The job %s completed%n", jobId);
//                break;
//            } else {
//                System.out.printf("The job %s is running, state:%s progress:%s%n", jobId, importProgressState, progress);
//            }
//        }
//    }

    /**
     * @param collectionSchema collection info
     */
    private static void createCollection(String collectionName, CreateCollectionReq.CollectionSchema collectionSchema) {
        System.out.println("\n===================== create collection ====================");
        checkMilvusClientIfExist();

        CreateCollectionReq requestCreate = CreateCollectionReq.builder()
                .collectionName(collectionName)
                .databaseName(DATABASE_NAME)
                .collectionSchema(collectionSchema)
                .consistencyLevel(ConsistencyLevel.BOUNDED)
                .build();

        milvusClient.dropCollection(DropCollectionReq.builder()
                .collectionName(collectionName)
                .databaseName(DATABASE_NAME)
                .build());
        milvusClient.createCollection(requestCreate);

        System.out.printf("Collection %s created%n", collectionName);
    }

    private static void comparePrint(CreateCollectionReq.CollectionSchema collectionSchema,
                                     Map<String, Object> expectedData, Map<String, Object> fetchedData,
                                     String fieldName) {
        CreateCollectionReq.FieldSchema field = collectionSchema.getField(fieldName);
        Object expectedValue = expectedData.get(fieldName);
        if (expectedValue == null) {
            if (field.getDefaultValue() != null) {
                expectedValue = field.getDefaultValue();
                // for Int8/Int16 value, the default value is Short type, the returned value is Integer type
                if (expectedValue instanceof Short) {
                    expectedValue = ((Short) expectedValue).intValue();
                }
            }
        }

        Object fetchedValue = fetchedData.get(fieldName);
        if (fetchedValue == null || fetchedValue instanceof JsonNull) {
            if (!field.getIsNullable()) {
                throw new RuntimeException("Field is not nullable but fetched data is null");
            }
            if (expectedValue != null) {
                throw new RuntimeException("Expected value is not null but fetched data is null");
            }
            return; // both fetchedValue and expectedValue are null
        }

        boolean matched;
        if (fetchedValue instanceof Float) {
            matched = Math.abs((Float) fetchedValue - (Float) expectedValue) < 1e-4;
        } else if (fetchedValue instanceof Double) {
            matched = Math.abs((Double) fetchedValue - (Double) expectedValue) < 1e-8;
        } else if (fetchedValue instanceof JsonElement) {
            JsonElement expectedJson = GSON_INSTANCE.fromJson((String) expectedValue, JsonElement.class);
            matched = fetchedValue.equals(expectedJson);
        } else if (fetchedValue instanceof ByteBuffer) {
            byte[] bb = ((ByteBuffer) fetchedValue).array();
            matched = Arrays.equals(bb, (byte[]) expectedValue);
        } else if (fetchedValue instanceof List) {
            matched = fetchedValue.equals(expectedValue);
            // currently, for array field, null value, the server returns an empty list
            if (((List<?>) fetchedValue).isEmpty() && expectedValue == null) {
                matched = true;
            }
        } else {
            matched = fetchedValue.equals(expectedValue);
        }

        if (!matched) {
            System.out.print("Fetched value:");
            System.out.println(fetchedValue);
            System.out.print("Expected value:");
            System.out.println(expectedValue);
            throw new RuntimeException("Fetched data is unmatched");
        }
    }

    private static void compareStruct(CreateCollectionReq.CollectionSchema collectionSchema,
                                      Map<String, Object> expectedData, Map<String, Object> fetchedData,
                                      String fieldName) {
        CreateCollectionReq.StructFieldSchema field = collectionSchema.getStructField(fieldName);
        Object expectedValue = expectedData.get(fieldName);
        Object fetchedValue = fetchedData.get(fieldName);
        if (fetchedValue == null) {
            throw new RuntimeException(String.format("Struct field '%s' missed in fetched data", fieldName));
        }

        List<Map<String, Object>> expectedList = (List<Map<String, Object>>) expectedValue;
        if (!(fetchedValue instanceof List<?>)) {
            throw new RuntimeException(String.format("Struct field '%s' value should be a list", fieldName));
        }

        List<Map<String, Object>> fetchedList = (List<Map<String, Object>>) fetchedValue;
        if (expectedList.size() != fetchedList.size()) {
            throw new RuntimeException(String.format("Struct field '%s' list count unmatched", fieldName));
        }

        for (int i = 0; i < expectedList.size(); i++) {
            Map<String, Object> expectedStruct = expectedList.get(i);
            Map<String, Object> fetchedStruct = fetchedList.get(i);
            if (expectedStruct.equals(fetchedStruct)) {
                throw new RuntimeException(String.format("Struct field '%s' value unmatched", fieldName));
            }
        }
    }

    private static void verifyImportData(CreateCollectionReq.CollectionSchema collectionSchema, List<Map<String, Object>> rows) {
        List<Long> QUERY_IDS = Lists.newArrayList(1L, (long) rows.get(rows.size() - 1).get("id"));
        System.out.printf("Load collection and query items %s%n", QUERY_IDS);
        createIndex(collectionSchema);
        loadCollection();
        String expr = String.format("id in %s", QUERY_IDS);
        System.out.println(expr);

        List<QueryResp.QueryResult> results = query(expr, Lists.newArrayList("*"));
        System.out.println("Verify data...");
        if (results.size() != QUERY_IDS.size()) {
            throw new RuntimeException("Result count is incorrect");
        }
        for (QueryResp.QueryResult result : results) {
            Map<String, Object> fetchedEntity = result.getEntity();
            long id = (Long) fetchedEntity.get("id");
            Map<String, Object> originalEntity = rows.get((int) id);
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "bool");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "int8");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "int16");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "int32");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "float");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "double");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "varchar");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "json");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "geometry");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "timestamp");

            comparePrint(collectionSchema, originalEntity, fetchedEntity, "array_bool");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "array_int8");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "array_int16");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "array_int32");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "array_int64");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "array_varchar");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "array_float");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "array_double");

            comparePrint(collectionSchema, originalEntity, fetchedEntity, "float_vector");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "binary_vector");
//            comparePrint(collectionSchema, originalEntity, fetchedEntity, "int8_vector");
            comparePrint(collectionSchema, originalEntity, fetchedEntity, "sparse_vector");

            compareStruct(collectionSchema, originalEntity, fetchedEntity, "struct_field");

            System.out.println(fetchedEntity);
        }
        System.out.println("Result is correct!");
    }

    private static void createIndex(CreateCollectionReq.CollectionSchema collectionSchema) {
        System.out.println("Create index...");
        checkMilvusClientIfExist();

        List<IndexParam> indexes = new ArrayList<>();
        for (CreateCollectionReq.FieldSchema field : collectionSchema.getFieldSchemaList()) {
            IndexParam.IndexType indexType;
            IndexParam.MetricType metricType;
            switch (field.getDataType()) {
                case FloatVector:
                case Float16Vector:
                case BFloat16Vector:
                    indexType = IndexParam.IndexType.IVF_FLAT;
                    metricType = IndexParam.MetricType.L2;
                    break;
                case BinaryVector:
                    indexType = IndexParam.IndexType.BIN_FLAT;
                    metricType = IndexParam.MetricType.HAMMING;
                    break;
                case Int8Vector:
                    indexType = IndexParam.IndexType.AUTOINDEX;
                    metricType = IndexParam.MetricType.L2;
                    break;
                case SparseFloatVector:
                    indexType = IndexParam.IndexType.SPARSE_WAND;
                    metricType = IndexParam.MetricType.IP;
                    break;
                default:
                    continue;
            }
            indexes.add(IndexParam.builder()
                    .fieldName(field.getName())
                    .indexName(String.format("index_%s", field.getName()))
                    .indexType(indexType)
                    .metricType(metricType)
                    .build());
        }

        for (CreateCollectionReq.StructFieldSchema struct : collectionSchema.getStructFields()) {
            for (CreateCollectionReq.FieldSchema subField : struct.getFields()) {
                IndexParam.IndexType indexType;
                IndexParam.MetricType metricType;
                switch (subField.getDataType()) {
                    case FloatVector:
                        indexType = IndexParam.IndexType.HNSW;
                        metricType = IndexParam.MetricType.MAX_SIM_COSINE;
                        break;
                    default:
                        continue;
                }
                indexes.add(IndexParam.builder()
                        .fieldName(String.format("%s[%s]", struct.getName(), subField.getName()))
                        .indexName(String.format("index_%s", subField.getName()))
                        .indexType(indexType)
                        .metricType(metricType)
                        .build());
            }
        }
        milvusClient.createIndex(CreateIndexReq.builder()
                .collectionName(ALL_TYPES_COLLECTION_NAME)
                .databaseName(DATABASE_NAME)
                .indexParams(indexes)
                .build());

        milvusClient.loadCollection(LoadCollectionReq.builder()
                .collectionName(ALL_TYPES_COLLECTION_NAME)
                .databaseName(DATABASE_NAME)
                .build());
    }

    private static void loadCollection() {
        System.out.println("Refresh load collection...");
        checkMilvusClientIfExist();
        // RefreshLoad is a new interface from v2.5.3,
        // mainly used when there are new segments generated by bulkinsert request,
        // force the new segments to be loaded into memory.
        milvusClient.refreshLoad(RefreshLoadReq.builder()
                .collectionName(ALL_TYPES_COLLECTION_NAME)
                .databaseName(DATABASE_NAME)
                .build());
        System.out.println("Collection row number: " + getCollectionRowCount());
    }

    private static List<QueryResp.QueryResult> query(String expr, List<String> outputFields) {
        System.out.println("========== query() ==========");
        checkMilvusClientIfExist();
        QueryReq test = QueryReq.builder()
                .collectionName(ALL_TYPES_COLLECTION_NAME)
                .databaseName(DATABASE_NAME)
                .filter(expr)
                .outputFields(outputFields)
                .consistencyLevel(ConsistencyLevel.STRONG)
                .timezone(TIME_ZONE)
                .build();
        QueryResp response = milvusClient.query(test);
        return response.getQueryResults();
    }

    private static Long getCollectionRowCount() {
        System.out.println("========== getCollectionRowCount() ==========");
        checkMilvusClientIfExist();

        // Get row count, set ConsistencyLevel.STRONG to sync the data to query node so that data is visible
        QueryResp countR = milvusClient.query(QueryReq.builder()
                .collectionName(ALL_TYPES_COLLECTION_NAME)
                .databaseName(DATABASE_NAME)
                .filter("")
                .outputFields(Collections.singletonList("count(*)"))
                .consistencyLevel(ConsistencyLevel.STRONG)
                .build());
        return (long) countR.getQueryResults().get(0).getEntity().get("count(*)");
    }

    private static void exampleCloudImport() {
        System.out.println("\n===================== import files to cloud vectordb ====================");
        List<String> objectUrls = Lists.newArrayList(CloudImportConsts.OBJECT_URL);
        CloudImportRequest request = CloudImportRequest.builder()
                .objectUrls(Lists.newArrayList(Collections.singleton(objectUrls))).accessKey(CloudImportConsts.OBJECT_ACCESS_KEY).secretKey(CloudImportConsts.OBJECT_SECRET_KEY)
                .clusterId(CloudImportConsts.CLUSTER_ID).collectionName(CloudImportConsts.COLLECTION_NAME).partitionName(CloudImportConsts.PARTITION_NAME)
                .apiKey(CloudImportConsts.API_KEY)
                .build();
        String bulkImportResult = BulkImportUtils.bulkImport(CloudImportConsts.CLOUD_ENDPOINT, request);
        System.out.println(bulkImportResult);

        System.out.println("\n===================== get import job progress ====================");

        JsonObject bulkImportObject = convertJsonObject(bulkImportResult);
        String jobId = bulkImportObject.getAsJsonObject("data").get("jobId").getAsString();
        CloudDescribeImportRequest getImportProgressRequest = CloudDescribeImportRequest.builder().clusterId(CloudImportConsts.CLUSTER_ID).jobId(jobId).apiKey(CloudImportConsts.API_KEY).build();
        String getImportProgressResult = BulkImportUtils.getImportProgress(CloudImportConsts.CLOUD_ENDPOINT, getImportProgressRequest);
        System.out.println(getImportProgressResult);

        System.out.println("\n===================== list import jobs ====================");
        CloudListImportJobsRequest listImportJobsRequest = CloudListImportJobsRequest.builder().clusterId(CloudImportConsts.CLUSTER_ID).currentPage(1).pageSize(10).apiKey(CloudImportConsts.API_KEY).build();
        String listImportJobsResult = BulkImportUtils.listImportJobs(CloudImportConsts.CLOUD_ENDPOINT, listImportJobsRequest);
        System.out.println(listImportJobsResult);
    }

    private static CreateCollectionReq.CollectionSchema buildSimpleSchema() {
        CreateCollectionReq.CollectionSchema schemaV2 = CreateCollectionReq.CollectionSchema.builder()
                .build();
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("id")
                .dataType(DataType.Int64)
                .isPrimaryKey(Boolean.TRUE)
                .autoID(true)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("path")
                .dataType(DataType.VarChar)
                .maxLength(512)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("label")
                .dataType(DataType.VarChar)
                .maxLength(512)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("vector")
                .dataType(DataType.FloatVector)
                .dimension(DIM)
                .build());

        return schemaV2;
    }

    private static CreateCollectionReq.CollectionSchema buildAllTypesSchema() {
        CreateCollectionReq.CollectionSchema schemaV2 = CreateCollectionReq.CollectionSchema.builder()
                .enableDynamicField(true)
                .build();
        // scalar field
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("id")
                .dataType(DataType.Int64)
                .isPrimaryKey(Boolean.TRUE)
                .autoID(false)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("bool")
                .dataType(DataType.Bool)
                .isNullable(true)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("int8")
                .dataType(DataType.Int8)
                .defaultValue((short) 88)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("int16")
                .dataType(DataType.Int16)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("int32")
                .dataType(DataType.Int32)
                .isNullable(true)
                .defaultValue(999999)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("float")
                .dataType(DataType.Float)
                .isNullable(true)
                .defaultValue((float) 3.14159)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("double")
                .dataType(DataType.Double)
                .isNullable(true)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("varchar")
                .dataType(DataType.VarChar)
                .maxLength(512)
                .isNullable(true)
                .defaultValue("this is default value")
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("json")
                .dataType(DataType.JSON)
                .isNullable(true)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("geometry")
                .dataType(DataType.Geometry)
                .isNullable(true)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("timestamp")
                .dataType(DataType.Timestamptz)
                .isNullable(true)
                .build());

        // vector fields
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("float_vector")
                .dataType(DataType.FloatVector)
                .dimension(DIM)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("binary_vector")
                .dataType(DataType.BinaryVector)
                .dimension(DIM)
                .build());
//        schemaV2.addField(AddFieldReq.builder()
//                .fieldName("int8_vector")
//                .dataType(DataType.Int8Vector)
//                .dimension(DIM)
//                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("sparse_vector")
                .dataType(DataType.SparseFloatVector)
                .build());

        // array fields
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("array_bool")
                .dataType(DataType.Array)
                .maxCapacity(ARRAY_CAPACITY)
                .elementType(DataType.Bool)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("array_int8")
                .dataType(DataType.Array)
                .maxCapacity(ARRAY_CAPACITY)
                .elementType(DataType.Int8)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("array_int16")
                .dataType(DataType.Array)
                .maxCapacity(ARRAY_CAPACITY)
                .elementType(DataType.Int16)
                .isNullable(true)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("array_int32")
                .dataType(DataType.Array)
                .maxCapacity(ARRAY_CAPACITY)
                .elementType(DataType.Int32)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("array_int64")
                .dataType(DataType.Array)
                .maxCapacity(ARRAY_CAPACITY)
                .elementType(DataType.Int64)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("array_varchar")
                .dataType(DataType.Array)
                .maxCapacity(ARRAY_CAPACITY)
                .elementType(DataType.VarChar)
                .maxLength(512)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("array_float")
                .dataType(DataType.Array)
                .maxCapacity(ARRAY_CAPACITY)
                .elementType(DataType.Float)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("array_double")
                .dataType(DataType.Array)
                .maxCapacity(ARRAY_CAPACITY)
                .elementType(DataType.Double)
                .isNullable(true)
                .build());
        schemaV2.addField(AddFieldReq.builder()
                .fieldName("struct_field")
                .dataType(DataType.Array)
                .elementType(DataType.Struct)
                .maxCapacity(100)
                .addStructField(AddFieldReq.builder()
                        .fieldName("st_bool")
                        .dataType(DataType.Bool)
                        .build())
                .addStructField(AddFieldReq.builder()
                        .fieldName("st_int8")
                        .dataType(DataType.Int8)
                        .build())
                .addStructField(AddFieldReq.builder()
                        .fieldName("st_int16")
                        .dataType(DataType.Int16)
                        .build())
                .addStructField(AddFieldReq.builder()
                        .fieldName("st_int32")
                        .dataType(DataType.Int32)
                        .build())
                .addStructField(AddFieldReq.builder()
                        .fieldName("st_int64")
                        .dataType(DataType.Int64)
                        .build())
                .addStructField(AddFieldReq.builder()
                        .fieldName("st_float")
                        .dataType(DataType.Float)
                        .build())
                .addStructField(AddFieldReq.builder()
                        .fieldName("st_double")
                        .dataType(DataType.Double)
                        .build())
                .addStructField(AddFieldReq.builder()
                        .fieldName("st_string")
                        .dataType(DataType.VarChar)
                        .maxLength(100)
                        .build())
                .addStructField(AddFieldReq.builder()
                        .fieldName("st_float_vector")
                        .dataType(DataType.FloatVector)
                        .dimension(DIM)
                        .build())
                .build());

        return schemaV2;
    }

    private static void checkMilvusClientIfExist() {
        if (milvusClient == null) {
            String msg = "milvusClient is null. Please initialize it by calling createConnection() first before use.";
            throw new RuntimeException(msg);
        }
    }

    private static JsonObject convertJsonObject(String result) {
        return GSON_INSTANCE.fromJson(result, JsonObject.class);
    }
}
