/*
* Copyright (c) 2022 Shenzhen Kaihong Digital Industry Development Co., Ltd. 
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*     http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/


/**
 * @file ai_inference.h
 *
 * @brief Defines the basic functions for the AI inference.
 *
 * @since 2.2
 * @version 1.0
 */

#ifndef AI_INFERENCE_H
#define AI_INFERENCE_H

#include <memory>
#include <vector>
#include <sys/types.h>
#include "ai_datatype.h"
#include "ai_callback.h"
#include "ai_constants.h"

#include "protocol/struct_definition/infer_info_define.h"
#include "utils/constants/constants.h"
#include "platform/os_wrapper/utils/single_instance.h"

namespace OHOS {
namespace AI_STD {

class AiInference {
public:
    /**
     * @brief Defines the constructor for the AI inference.
     *
     * @since 2.2
     * @version 1.0
     */
    AiInference() 
    {
        algorithmInfo_.modelPathInfos.clear();
    }

    /**
     * @brief Defines the destructor for the AI inference.
     *
     * @since 2.2
     * @version 1.0
     */
    ~AiInference()
    {
        Destroy();
    }

    /**
     * @brief Creates a inference instance..
     * @param frameworkType Indicates the framework id defined by {@link Framework_type} for framework load.
     * @param algorithmName Indicates the neure network name.
     * @param modelPathInfos Indicates the path of inference model.
     * @param algorithmVersion Indicates the version of neure network.
     * @return Returns {@link AI_RETCODE_SUCCESS} if the operation is successful;
     * returns a non-zero error code defined by {@link AiRetCode} otherwise.
     *
     * @since 2.2
     * @version 1.0
     */
    int32_t Create(int frameworkType, const std::string &algorithmName, std::vector<ModelPathInfo> &modelPathInfos, 
		long long algorithmVersion=ALGOTYPE_VERSION_DEFAULT);

    /**
     * @brief Synchronously executes the KWS task.
     *
     * @param input Indicates the input array defined by {@link IOTensors} for the Inference task.
     * @param output Indicates the output tensors defined by {@link IOTensors} for the Inference task.
     * @return Returns {@link AI_RETCODE_SUCCESS} if the operation is successful;
     * returns a non-zero error code defined by {@link AiRetCode} otherwise.
     *
     * @since 2.2
     * @version 1.0
     */
    int32_t SyncExecute(const IOTensors &input, IOTensors &output);

    /**
     * @brief Sets the callback for the Inference task.
     *
     * @param callback Indicates the callback defined by {@link ICallback} for implementing the post-processing logic.
     * @return Returns {@link AI_RETCODE_SUCCESS} if the operation is successful;
     * returns a non-zero error code defined by {@link AiRetCode} otherwise.
     *
     * @since 2.2
     * @version 1.0
     */
    int32_t SetCallback(const std::shared_ptr<ICallback> &callback);

    /**
     * @brief Destroys the inference instance to release the session engaged with the plugin.
     *
     * @return Returns {@link AI_RETCODE_SUCCESS} if the operation is successful;
     * returns a non-zero error code defined by {@link AiRetCode} otherwise.
     *
     * @since 2.2
     * @version 1.0
     */
    int32_t Destroy();
private:
    std::shared_ptr<ICallback> callback_ = nullptr;
    ConfigInfo configInfo_ {
        .description = "Inference config description"
    };
    ClientInfo clientInfo_ {
        .clientVersion = CLIENT_VERSION_DEFAULT,
        .clientId = INVALID_CLIENT_ID,
        .sessionId = INVALID_SESSION_ID,
        .extendLen = EXT_MSG_LEN_DEFAULT,
        .extendMsg = nullptr
    };
    AlgorithmInfo algorithmInfo_ {
        .clientVersion = CLIENT_VERSION_DEFAULT,
        .isAsync = false,
        .frameworkType = INVALID_FRAMEWORK_TYPE,
        .algorithmName = ALGORITHM_TYPE_DEFAULT,
        .algorithmVersion = ALGOTYPE_VERSION_DEFAULT,
        .isCloud = false,
        .operateId = STARTING_OPERATE_ID,
        .requestId = STARTING_REQ_ID,
        .extendLen = EXT_MSG_LEN_DEFAULT,
        .extendMsg = nullptr
    };
    
};

} // namespace AI_STD
} // namespace OHOS
#endif // AI_INFERENCE_H
/** @} */
