/**
 * Copyright 2021 Huawei Technologies Co., Ltd
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

/**
 * @addtogroup MindSpore
 * @{
 *
 * @brief Provides APIs related to MindSpore Lite model inference.
 * 
 * @Syscap SystemCapability.Ai.MindSpore
 * @since 9
 */

/**
 * @file context.h
 * 
 * @brief Provides **Context** APIs for configuring runtime information.
 *
 * File to include: \<mindspore/context.h>
 * @library libmindspore_lite_ndk.so
 * @since 9
 */
#ifndef MINDSPORE_INCLUDE_C_API_CONTEXT_C_H
#define MINDSPORE_INCLUDE_C_API_CONTEXT_C_H

#include <stddef.h>
#include <stdint.h>
#include <stdbool.h>
#include "mindspore/types.h"

#ifdef __cplusplus
extern "C"
{
#endif
/**
 * @brief Defines the pointer to the MindSpore context.
 *
 * @since 9
 */
typedef void *OH_AI_ContextHandle;

/**
 * @brief Defines the pointer to the MindSpore device information.
 *
 * @since 9
 */
typedef void *OH_AI_DeviceInfoHandle;

/**
 * @brief Creates a context object.
 *
 * @return {@link OH_AI_ContextHandle} that points to the context.
 * @since 9
 */
OH_AI_API OH_AI_ContextHandle OH_AI_ContextCreate();

/**
 * @brief Destroys a context object.
 *
 * @param context Level-2 pointer to {@link OH_AI_ContextHandle}. After the context is destroyed,
 * the pointer is set to null.
 * @since 9
 */
OH_AI_API void OH_AI_ContextDestroy(OH_AI_ContextHandle *context);

/**
 * @brief Sets the number of runtime threads.
 *
 * @param context {@link OH_AI_ContextHandle} that points to the context instance.
 * @param thread_num Number of runtime threads.
 * @since 9
 */
OH_AI_API void OH_AI_ContextSetThreadNum(OH_AI_ContextHandle context, int32_t thread_num);

/**
 * @brief Obtains the number of threads.
 *
 * @param context {@link OH_AI_ContextHandle} that points to the context instance.
 * @return Number of threads.
 * @since 9
 */
OH_AI_API int32_t OH_AI_ContextGetThreadNum(const OH_AI_ContextHandle context);

/**
 * @brief Sets the affinity mode for binding runtime threads to CPU cores, which are classified into
 * large, medium, and small cores based on the CPU frequency. You only need to bind the large or
 * medium cores, but not small cores.
 *
 * @param context {@link OH_AI_ContextHandle} that points to the context instance.
 * @param mode Affinity mode. **0**: no affinities; **1**: big cores first; **2**: medium cores first
 * @since 9
 */
OH_AI_API void OH_AI_ContextSetThreadAffinityMode(OH_AI_ContextHandle context, int mode);

/**
 * @brief Obtains the affinity mode for binding runtime threads to CPU cores.
 *
 * @param context {@link OH_AI_ContextHandle} that points to the context instance.
 * @return Affinity mode. **0**: no affinities; **1**: big cores first; **2**: medium cores first
 * @since 9
 */
OH_AI_API int OH_AI_ContextGetThreadAffinityMode(const OH_AI_ContextHandle context);

/**
 * @brief Sets the list of CPU cores bound to a runtime thread.
 * 
 * For example, if **core_list** is set to **[2,6,8]**, threads run on the 2nd, 6th, and 8th CPU cores.
 * If {@link OH_AI_ContextSetThreadAffinityMode} and {@link OH_AI_ContextSetThreadAffinityCoreList}
 * are called for the same context object,
 * the **core_list** parameter of {@link OH_AI_ContextSetThreadAffinityCoreList} takes effect,
 * but the **mode** parameter of {@link OH_AI_ContextSetThreadAffinityMode} does not.
 * 
 * @param context {@link OH_AI_ContextHandle} that points to the context instance.
 * @param core_list List of bound CPU cores.
 * @param core_num Number of cores, which indicates the length of {@link core_list}.
 * @since 9
 */
OH_AI_API void OH_AI_ContextSetThreadAffinityCoreList(OH_AI_ContextHandle context, const int32_t *core_list,
                                                        size_t core_num);

/**
 * @brief Obtains the list of bound CPU cores.
 *
 * @param context {@link OH_AI_ContextHandle} that points to the context instance.
 * @param core_num Number of CPU cores.
 * @return List of bound CPU cores. This list is managed by {@link OH_AI_ContextHandle}.
 * The caller does not need to destroy it manually.
 * @since 9
 */
OH_AI_API const int32_t *OH_AI_ContextGetThreadAffinityCoreList(const OH_AI_ContextHandle context, size_t *core_num);

/**
 * @brief Sets whether to enable parallelism between operators. The setting is ineffective because
 * the feature of this API is not yet available.
 *
 * @param context {@link OH_AI_ContextHandle} that points to the context instance.
 * @param is_parallel Whether to enable parallelism between operators. The value **true** means to
 * enable parallelism between operators, and the value **false** means the opposite.
 * @since 9
 */
OH_AI_API void OH_AI_ContextSetEnableParallel(OH_AI_ContextHandle context, bool is_parallel);

/**
 * @brief Checks whether parallelism between operators is supported.
 *
 * @param context {@link OH_AI_ContextHandle} that points to the context instance.
 * @return Whether parallelism between operators is supported. The value **true** means to
 * enable parallelism between operators, and the value **false** means the opposite.
 * @since 9
 */
OH_AI_API bool OH_AI_ContextGetEnableParallel(const OH_AI_ContextHandle context);

/**
 * @brief Attaches the custom device information to the inference context.
 *
 * @param context {@link OH_AI_ContextHandle} that points to the context instance.
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @since 9
 */
OH_AI_API void OH_AI_ContextAddDeviceInfo(OH_AI_ContextHandle context, OH_AI_DeviceInfoHandle device_info);

/**
 * @brief Creates a device information object.
 *
 * @param device_type Device type. For details, see {@link OH_AI_DeviceType}.
 * @return {@link OH_AI_DeviceInfoHandle} that points to the device information instance.
 * @since 9
 */
OH_AI_API OH_AI_DeviceInfoHandle OH_AI_DeviceInfoCreate(OH_AI_DeviceType device_type);

/**
 * @brief Destroys a device information instance. Note: After the device information instance is added
 * to the context, the caller does not need to destroy it manually.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @since 9
 */
OH_AI_API void OH_AI_DeviceInfoDestroy(OH_AI_DeviceInfoHandle *device_info);

/**
 * @brief Sets the manufacturer name.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @param provider Manufacturer name.
 * @since 9
 */
OH_AI_API void OH_AI_DeviceInfoSetProvider(OH_AI_DeviceInfoHandle device_info, const char *provider);

/**
 * @brief Obtains the provider name.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @return Provider name.
 * @since 9
 */
OH_AI_API const char *OH_AI_DeviceInfoGetProvider(const OH_AI_DeviceInfoHandle device_info);

/**
 * @brief Sets the name of a provider device.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @param device Name of the provider device, for example, CPU.
 * @since 9
 */
OH_AI_API void OH_AI_DeviceInfoSetProviderDevice(OH_AI_DeviceInfoHandle device_info, const char *device);

/**
 * @brief Obtains the name of a provider device.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @return Name of the provider device.
 * @since 9
 */
OH_AI_API const char *OH_AI_DeviceInfoGetProviderDevice(const OH_AI_DeviceInfoHandle device_info);

/**
 * @brief Obtains the type of a provider device.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @return Type of the provider device.
 * @since 9
 */
OH_AI_API OH_AI_DeviceType OH_AI_DeviceInfoGetDeviceType(const OH_AI_DeviceInfoHandle device_info);

/**
 * @brief Sets whether to enable float16 inference. This function is available only for CPU/GPU devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @param is_fp16 Whether to enable float16 inference.
 * @since 9
 */
OH_AI_API void OH_AI_DeviceInfoSetEnableFP16(OH_AI_DeviceInfoHandle device_info, bool is_fp16);

/**
 * @brief Checks whether float16 inference is enabled. This function is available only for CPU/GPU devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @return Whether float16 inference is enabled.
 * @since 9
 */
OH_AI_API bool OH_AI_DeviceInfoGetEnableFP16(const OH_AI_DeviceInfoHandle device_info);

/**
 * @brief Sets the NPU frequency type. This function is available only for NPU devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @param frequency NPU frequency type. The value ranges from **0** to **4**. The default value is **3**.
 * **1**: low power consumption; **2**: balanced; **3**: high performance; **4**: ultra-high performance
 * @since 9
 */
OH_AI_API void OH_AI_DeviceInfoSetFrequency(OH_AI_DeviceInfoHandle device_info, int frequency);

/**
 * @brief Obtains the NPU frequency type. This function is available only for NPU devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @return Frequency type of the NPU. The value ranges from **0** to **4**. **1**: low power consumption;
 * **2**: balanced; **3**: high performance; **4**: ultra-high performance
 * @since 9
 */
OH_AI_API int OH_AI_DeviceInfoGetFrequency(const OH_AI_DeviceInfoHandle device_info);

/**
 * @brief Obtains the descriptions of all NNRt devices in the system.
 *
 * @param num Number of NNRt devices.
 * @return Pointer to the NNRt device description array. If the operation fails, **NULL** is returned.
 * @since 10
 */
OH_AI_API NNRTDeviceDesc *OH_AI_GetAllNNRTDeviceDescs(size_t *num);

/**
 * @brief Obtains the pointer to an element in the NNRt device description array.
 *
 * @param descs Array of NNRt device descriptions.
 * @param index Index of an array element.
 * @return Pointer to an element in the NNRt device description array.
 * @since 10
 */
OH_AI_API NNRTDeviceDesc *OH_AI_GetElementOfNNRTDeviceDescs(NNRTDeviceDesc *descs, size_t index);

/**
 * @brief Destroys the NNRt device description array obtained by {@link OH_AI_GetAllNNRTDeviceDescs}.
 *
 * @param desc Double pointer to the NNRt device description array. After the operation is complete,
 * the content pointed to by **desc** is set to **NULL**.
 * @since 10
 */
OH_AI_API void OH_AI_DestroyAllNNRTDeviceDescs(NNRTDeviceDesc **desc);

/**
 * @brief Obtains the NNRt device ID from the specified NNRt device description. Note that this ID is
 * valid only for NNRt devices.
 *
 * @param desc Pointer to the NNRt device description.
 * @return NNRt device ID.
 * @since 10
 */
OH_AI_API size_t OH_AI_GetDeviceIdFromNNRTDeviceDesc(const NNRTDeviceDesc *desc);

/**
 * @brief Obtains the NNRt device name from the specified NNRt device description.
 *
 * @param desc Pointer to the NNRt device description.
 * @return NNRt device name. The value is a pointer that points to a constant string,
 * which is held by **desc**. The caller does not need to destroy it separately.
 * @since 10
 */
OH_AI_API const char *OH_AI_GetNameFromNNRTDeviceDesc(const NNRTDeviceDesc *desc);

/**
 * @brief Obtains the NNRt device type from the specified NNRt device description.
 *
 * @param desc Pointer to the NNRt device description.
 * @return NNRt device type enumerated by {@link OH_AI_NNRTDeviceType}.
 * @since 10
 */
OH_AI_API OH_AI_NNRTDeviceType OH_AI_GetTypeFromNNRTDeviceDesc(const NNRTDeviceDesc *desc);

/**
 * @brief Searches for the NNRt device with the specified name and creates the NNRt device information
 * based on the information about the first found NNRt device.
 *
 * @param name NNRt device name.
 * @return {@link OH_AI_DeviceInfoHandle} that points to the device information instance.
 * @since 10
 */
OH_AI_API OH_AI_DeviceInfoHandle OH_AI_CreateNNRTDeviceInfoByName(const char *name);

/**
 * @brief Searches for the NNRt device of the specified type and creates the NNRt device information
 * based on the information about the first found NNRt device.
 *
 * @param type NNRt device type enumerated by {@link OH_AI_NNRTDeviceType}.
 * @return {@link OH_AI_DeviceInfoHandle} that points to the device information instance.
 * @since 10
 */
OH_AI_API OH_AI_DeviceInfoHandle OH_AI_CreateNNRTDeviceInfoByType(OH_AI_NNRTDeviceType type);

/**
 * @brief Sets the NNRt device ID. This function is available only for NNRt devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @param device_id NNRt device ID.
 * @since 10
 */
OH_AI_API void OH_AI_DeviceInfoSetDeviceId(OH_AI_DeviceInfoHandle device_info, size_t device_id);

/**
 * @brief Obtains the NNRt device ID. This function is available only for NNRt devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @return NNRt device ID.
 * @since 10
 */
OH_AI_API size_t OH_AI_DeviceInfoGetDeviceId(const OH_AI_DeviceInfoHandle device_info);

/**
 * @brief Sets the NNRt performance mode. This function is available only for NNRt devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @param mode Performance mode enumerated by {@link OH_AI_PerformanceMode}.
 * @since 10
 */
OH_AI_API void OH_AI_DeviceInfoSetPerformanceMode(OH_AI_DeviceInfoHandle device_info, OH_AI_PerformanceMode mode);

/**
 * @brief Obtains the NNRt performance mode. This function is available only for NNRt devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @return Performance mode enumerated by {@link OH_AI_PerformanceMode}.
 * @since 10
 */
OH_AI_API OH_AI_PerformanceMode OH_AI_DeviceInfoGetPerformanceMode(const OH_AI_DeviceInfoHandle device_info);


/**
 * @brief Sets the priority of an NNRt task. This function is available only for NNRt devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @param priority NNRt task priority enumerated by {@link OH_AI_Priority}.
 * @since 10
 */
OH_AI_API void OH_AI_DeviceInfoSetPriority(OH_AI_DeviceInfoHandle device_info, OH_AI_Priority priority);

/**
 * @brief Obtains the priority of an NNRt task. This function is available only for NNRt devices.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @return NNRt task priority enumerated by {@link OH_AI_Priority}.
 * @since 10
 */
OH_AI_API OH_AI_Priority OH_AI_DeviceInfoGetPriority(const OH_AI_DeviceInfoHandle device_info);

/**
 * @brief Adds extended configuration in the form of key/value pairs to the device information.
 * This function is available only for NNRt devices.
 *
 * Note: The key/value pairs currently supported include
 * {"CachePath": "YourCachePath"}, * {"CacheVersion": "YourCacheVersion"},
 * {"QuantBuffer": "YourQuantBuffer"}, {"ModelName": "YourModelName"},
 * {"isProfiling": "YourisProfiling"}, {"opLayout": "YouropLayout"},
 * {"InputDims": "YourInputDims"}，{"DynamicDims": "YourDynamicDims"}，
 * {"QuantConfigData": "YourQuantConfigData"}，{"BandMode": "YourBandMode"}，
 * {"NPU_FM_SHARED": "YourNPU_FM_SHARED"}
 *  A total of 11 key-value pairs are provided. You can replace the values as required.
 *
 * @param device_info {@link OH_AI_DeviceInfoHandle} that points to a device information instance.
 * @param name Key in an extended key/value pair. The value is a C string.
 * @param value Start address of the value in an extended key/value pair.
 * @param value_size Length of the value in an extended key/value pair.
 * @return Status code enumerated by {@link OH_AI_Status}. The value **OH_AI_STATUS_SUCCESS**
 * indicates that the operation is successful. If the operation fails, an error code is returned.
 * @since 10
 */
OH_AI_API OH_AI_Status OH_AI_DeviceInfoAddExtension(OH_AI_DeviceInfoHandle device_info, const char *name,
    const char *value, size_t value_size);
#ifdef __cplusplus
}
#endif

/** @} */
#endif // MINDSPORE_INCLUDE_C_API_CONTEXT_C_H

<!--no_check-->