// Copyright 2022 Huawei Cloud Computing Technology Co., Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#include "CaeEngineControl.h"
#include "../cae_dataPipe/CaeMsgHead.h"
#include "../cae_dataPipe/CaeDataPipe.h"
#include "../cae_CPHMediaEngine/CPHMediaEngine.h"
#include "../cae_agent/CaeAgent.h"
#include "../cae_common/CaeCommon.h"
#include "../cae_common/CaeDefine.h"
#include "../cae_dataPipe/CaeInput.h"
#include "CaeParamStorage.h"
#include "CaeMediaConfig.h"

using namespace std;

const char *SCREENSHOT_JSON_CONFIG = R"({"frame_type":"yuv","fps":1})";

#define __int64 long long

// int64网络字节序转换为主机字节序
inline __int64 ntoh_int64(__int64 val)
{
    __int64 ret = val;
#if __BYTE_ORDER == __LITTLE_ENDIAN
    ret = (((__int64)ntohl((unsigned long)val)) << 32) | ntohl((unsigned long)(val>>32));
#endif
    return ret;
}

// double网络字节序转换为主机字节序
inline double ntoh_double(double val)
{
    double ret = val;
#if __BYTE_ORDER == __LITTLE_ENDIAN
    __int64 tmp = ntoh_int64(*((__int64*)&val));
    ret = *((double*)&tmp);
#endif
    return ret;
}

// float网络字节序转换为主机字节序
float ntohl_float(float net_float) {
    uint32_t host_int = ntohl(*reinterpret_cast<uint32_t*>(&net_float));
    return *reinterpret_cast<float*>(&host_int);
}

CaeEngineControl::CaeEngineControl()
{
    m_orientation = 255;
    m_changeOrientation = false;
    m_width = 0;
    m_height = 0;
    m_cphMediaEngine = nullptr;
}

CaeEngineControl::~CaeEngineControl()
{
    if (m_cphMediaEngine != nullptr) {
        delete m_cphMediaEngine;
        m_cphMediaEngine = nullptr;
    }
}

CaeEngineControl &CaeEngineControl::GetInstance()
{
    static CaeEngineControl instance;
    instance.Init();
    return instance;
}

int CaeEngineControl::Init()
{
    std::lock_guard<std::mutex> lock(m_initLock);
    if (m_cphMediaEngine == nullptr) {
        m_cphMediaEngine = new CPHMediaEngine();
        if (m_cphMediaEngine == nullptr) {
            LOGE("Init failed, failed to new CPHMediaEngine.");
            return -1;
        }
    }
    return 0;
}

void AudioEventCallback(uint8_t *data, uint32_t size, AudioCallbackExt &audioCallbackExt)
{
    CaePipeManager::AppendData(data, size, true, CAE_MSG_AUDIO);
}

void VideoEventCallback(uint8_t *data, uint32_t size, VideoCallbackExt &videoCallbackExt)
{
    if (CaeEngineControl::GetInstance().m_orientation != videoCallbackExt.orientation) {
        LOGI("Old orientation is %d, new orientation is %d", CaeEngineControl::GetInstance().m_orientation,
             videoCallbackExt.orientation);
        CaeEngineControl::GetInstance().m_orientation = videoCallbackExt.orientation;
        CaeEngineControl::GetInstance().m_changeOrientation = true;
    }

    // save latency data for stat
    CaeEngineControl::GetInstance().m_startCaptureTimestamp = videoCallbackExt.videoLatencyExt.startCaptureTimestamp;
    CaeEngineControl::GetInstance().m_endCaptureTimestamp = videoCallbackExt.videoLatencyExt.endCaptureTimestamp;
    CaeEngineControl::GetInstance().m_startEncodeTimestamp = videoCallbackExt.videoLatencyExt.startEncodeTimestamp;
    CaeEngineControl::GetInstance().m_endEncodeTimestamp = videoCallbackExt.videoLatencyExt.endEncodeTimestamp;
    
    CaePipeManager::AppendData(data, size, true, CAE_MSG_VIDEO);
}

void VideoEmptyCallback(uint8_t *data, uint32_t size, VideoCallbackExt &videoCallbackExt)
{}

void ScreenshotEventCallback(uint8_t *data, uint32_t size)
{
    CaePipeManager::AppendData(data, size, true, CAE_MSG_SCREENSHOT);
}

uint32_t CaeEngineControl::GetCaptureLatency() const
{
    uint32_t captureLatency = m_endCaptureTimestamp - m_startCaptureTimestamp;
    return captureLatency;
}

uint32_t CaeEngineControl::GetEncodeLatency() const
{
    uint32_t encodeLatency = m_endEncodeTimestamp - m_startEncodeTimestamp;
    return encodeLatency;
}

Result CaeEngineControl::OpenMediaStream()
{
    if (m_isMediaStreamOpening) {
        LOGI("Media stream already opened.");
        return Result::SUCCESS;
    }

    m_orientation = 255;
    m_changeOrientation = false;

    if (!IsSupportH265()) {
        return H265_NOT_SUPPORT;
    }

    char* mediaConfigJson = CaeMediaConfig::GetInstance().BuildVideoJsonConfig();
    if (mediaConfigJson == nullptr) {
        return FAILED;
    }

    bool isOpenScreenShot = false;
    if (m_isScreenShotOpening) {
        auto closeScreenShotRet = CloseScreenshot();
        isOpenScreenShot = true;
        LOGI("CloseScreenshot result : [%d].", closeScreenShotRet);
    }

    auto openRet = static_cast<Result>(m_cphMediaEngine->OpenVideo(mediaConfigJson, VideoEventCallback));
    if (openRet != SUCCESS) {
        return openRet;
    }

    if (OpenAudio() != SUCCESS) {
        m_cphMediaEngine->CloseVideo();
    }

    m_isMediaStreamOpening = true;
    if (isOpenScreenShot) {
        auto openScreenShotRet = OpenScreenShots(m_screenShotWidth, m_screenShotHeight);
        LOGI("OpenScreenShots result : [%d].", openScreenShotRet);
    }
    return SUCCESS;
}

Result CaeEngineControl::OpenScreenShots(uint32_t jpegWidth, uint32_t jpegHeight)
{
    LOGI("OpenScreenShots begin.");
    if (m_isScreenShotOpening) {
        LOGI("ScreenShot already opened.");
        return SUCCESS;
    }

    if (!m_isMediaStreamOpening) {
        // 如果当前没有串流，启动一个新的串流
        int ret = m_cphMediaEngine->OpenVideo(SCREENSHOT_JSON_CONFIG, VideoEmptyCallback);
        LOGI("ScreenShot openVideo result : [%d].", ret);
        if (ret != SUCCESS) {
            return static_cast<Result>(ret);
        }
    }

    auto openRet = static_cast<Result>(m_cphMediaEngine->OpenScreenshot(jpegWidth, jpegHeight,
                                                                        ScreenshotEventCallback));
    if (openRet != SUCCESS) {
        if (!m_isMediaStreamOpening) {
            m_cphMediaEngine->CloseVideo();
        }
        LOGE("ScreenShot OpenScreenshot failed, result : [%d].", openRet);
        return openRet;
    }
    m_screenShotWidth = jpegWidth;
    m_screenShotHeight = jpegHeight;
    m_isScreenShotOpening = true;
    return SUCCESS;
}

Result CaeEngineControl::OpenAudio() const
{
    if (CaeMediaConfig::GetInstance().IsMute()) {
        return SUCCESS;
    }
    char *audioJsonConfig = CaeMediaConfig::GetInstance().BuildAudioJsonConfig();
    if (audioJsonConfig == nullptr) {
        return FAILED;
    }
    auto openRet = static_cast<Result>(m_cphMediaEngine->OpenAudio(audioJsonConfig,
                                                              AudioEventCallback));
    if (openRet != SUCCESS) {
        LOGE("Open audio failed, startRet = %d", openRet);
        return openRet;
    }
    return SUCCESS;
}

Result CaeEngineControl::CloseMediaStream()
{
    if (!m_isMediaStreamOpening) {
        LOGI("Media stream already closed or not opened.");
        return Result::SUCCESS;
    }

    // 先暂停截图
    bool isOpenScreenShot = false;
    if (m_isScreenShotOpening) {
        auto closeScreenShotRet = CloseScreenshot();
        isOpenScreenShot = true;
        LOGI("CloseScreenshot result : [%d].", closeScreenShotRet);
    }

    CloseAudio();
    auto closeRet = static_cast<Result>(m_cphMediaEngine->CloseVideo());
    if (closeRet != Result::SUCCESS) {
        LOGE("Failed to stop video engines. %d", closeRet);
    }

    m_isMediaStreamOpening = false;
    // 恢复截图
    if (isOpenScreenShot) {
        auto openScreenShotRet = OpenScreenShots(m_screenShotWidth, m_screenShotHeight);
        LOGI("OpenScreenShots result : [%d].", openScreenShotRet);
    }
    return closeRet;
}

Result CaeEngineControl::CloseAudio()
{
    auto closeRet = static_cast<Result>(m_cphMediaEngine->CloseAudio());
    if (closeRet != Result::SUCCESS) {
        LOGE("Failed to stop engines. %d", closeRet);
    }
    return SUCCESS;
}

void CaeEngineControl::OpenVirtualDevices(string sdkVersion)
{
    if (CaeConfigManage::EnableVirtualDevice()) {
        if (m_isVirtualDeviceOpening) {
            LOGI("Virtual devices already opened.");
            return;
        }

        for (int deviceType = CAMERA; deviceType < DEVICE_MAX; ++deviceType) {
            if (GetVmiDeviceStatus((VmiDeviceType) deviceType) != VMI_INVALID) {
                LOGI("Virtual device %d is already enable.", deviceType);
                continue;
            }

            VmiConfigParams configParams;
            if (deviceType == CAMERA) {
                configParams.dataType = ENABLE_INJECT_LOCAL_CAMERA_STREAM ? (DataType)CaeVirtualDeviceConfig::GetLocalCameraStreamInjectFileType() : VMI_H264;
            }
            if (deviceType == SENSOR) {
                if (CaeCommon::CompareVersion(sdkVersion, SENSOR_CLIENT_KEY_VERSION) >= 0) {
                    configParams.sensorVersion = 2;
                    LOGI("Sensor version is 2.0");
                } else {
                    LOGI("Sensor version is 1.0");
                }
            }

            Result ret = InitVmiDevice((VmiDeviceType) deviceType, configParams);
            if (ret == Result::FAILED) {
                LOGE("Failed to init virtual device %d.", deviceType);
                continue;
            }
            ret = EnableVmiDevice((VmiDeviceType) deviceType);
            if (ret == Result::FAILED) {
                LOGE("Failed to enable virtual device %d， destroy it", deviceType);
                CaeEngineControl::GetInstance().DestroyVmiDevice((VmiDeviceType) deviceType);
                continue;
            }
        }
        m_isVirtualDeviceOpening = true;
        LOGI("Open virtual devices successfully.");
    }
}

void CaeEngineControl::CloseVirtualDevices()
{
    if (!CaeConfigManage::EnableVirtualDevice()) {
        return;
    }

    if (!m_isVirtualDeviceOpening) {
        LOGI("Virtual devices already closed or not opened.");
        return;
    }

    for (int type = CAMERA; type < DEVICE_MAX; ++type) {

        if (CaeEngineControl::GetInstance().GetVmiDeviceStatus((VmiDeviceType)type) == VMI_INVALID) {
            LOGE("Device %d is already invalid.", type);
            continue;
        }
        Result ret = CaeEngineControl::GetInstance().DisableVmiDevice((VmiDeviceType)type);
        if(ret == Result::FAILED) {
            LOGE("Failed to disable virtual device %d, ret is %d.", type, ret);
            continue;
        }
        ret = CaeEngineControl::GetInstance().DestroyVmiDevice((VmiDeviceType)type);
        if(ret == Result::FAILED) {
            LOGE("Failed to destroy virtual device %d, ret is %d.", type, ret);
        }
    }
    m_isVirtualDeviceOpening = false;
    LOGI("Destroy virtual device successfully.");
}

Result CaeEngineControl::SetMediaConfig() const
{
    char* videoParamsJson = CaeMediaConfig::GetInstance().BuildDynamicVideoJsonConfig();
    return (Result) m_cphMediaEngine->SetParams(videoParamsJson);
}

Result CaeEngineControl::RequestIframe() {
    if (!m_isMediaStreamOpening) {
        return Result::SUCCESS;
    }

    return (Result) m_cphMediaEngine->GenerateKeyFrame();
}

bool CaeEngineControl::IsSupportH265() const
{
    if (!CaeMediaConfig::GetInstance().IsRemoteEncoderInfoValid()
    && CaeMediaConfig::GetInstance().GetVideoFrameType() == "h265") {
        LOGE("H265_NOT_SUPPORT");
        return false;
    }
    return true;
}

Result CaeEngineControl::SetPhysicalResolution()
{
    if (m_isMediaStreamOpening) {
        LOGW("Media engine already opened, can't set physical resolution.");
        return Result::SUCCESS;
    }
    // 先暂停截图
    bool isOpenScreenShot = false;
    if (m_isScreenShotOpening) {
        auto closeScreenShotRet = CloseScreenshot();
        isOpenScreenShot = true;
        LOGI("CloseScreenshot result : [%d].", closeScreenShotRet);
    }
    CaeMediaConfig::GetInstance().SetNeedSetPhysicalResolution(false);
    Result result = (Result) m_cphMediaEngine->SetPhysicalResolution(CaeMediaConfig::GetInstance().GetPhysicalWidth(),
                                                                     CaeMediaConfig::GetInstance().GetPhysicalHeight());
    // 恢复截图
    if (isOpenScreenShot) {
        auto openScreenShotRet = OpenScreenShots(m_screenShotWidth, m_screenShotHeight);
        LOGI("OpenScreenShots result : [%d].", openScreenShotRet);
    }
    return result;
}

void VirtualMicrophoneCallback(uint8_t *data, uint32_t size, VmiDeviceCallbackExt &callbackExt)
{
    CaePipeManager::AppendData(data, size, true, CAE_MSG_VIRTUAL_MICROPHONE);
}

void VirtualCameraCallback(uint8_t *data, uint32_t size, VmiDeviceCallbackExt &callbackExt)
{
    CaePipeManager::AppendData(data, size, true, CAE_MSG_VIRTUAL_CAMERA);
}

void VirtualSensorCallback(uint8_t *data, uint32_t size, VmiDeviceCallbackExt &callbackExt)
{
    CaePipeManager::AppendData(data, size, true, CAE_MSG_VIRTUAL_SENSOR);
}

void VirtualLocationCallback(uint8_t *data, uint32_t size, VmiDeviceCallbackExt &callbackExt)
{
    CaePipeManager::AppendData(data, size, true, CAE_MSG_VIRTUAL_LOCATION);
}

void VirtualVibratorCallback(uint8_t *data, uint32_t size, VmiDeviceCallbackExt &callbackExt)
{
    CaePipeManager::GetInstance().AppendData(data, size, true, CAE_MSG_VIRTUAL_VIBRATOR);
}

Result CaeEngineControl::InitVmiDevice(VmiDeviceType deviceType, VmiConfigParams &params) const {
    VmiDeviceCallback virtualDeviceCallback;
    switch (deviceType) {
        case CAMERA:
            virtualDeviceCallback = VirtualCameraCallback;
            break;
        case MICROPHONE:
            virtualDeviceCallback = VirtualMicrophoneCallback;
            break;
        case SENSOR:
            virtualDeviceCallback = VirtualSensorCallback;
            break;
        case LOCATION:
            virtualDeviceCallback = VirtualLocationCallback;
            break;
        case VIBRATOR:
            virtualDeviceCallback = VirtualVibratorCallback;
            break;
        default: {
            LOGE("virtual device type %d is not support.", deviceType);
            return Result::FAILED;
        }
    }
    return m_cphMediaEngine->InitVmiDevice(deviceType, params, virtualDeviceCallback) == 0 ? Result::SUCCESS : Result::FAILED;
}

Result CaeEngineControl::EnableVmiDevice(VmiDeviceType deviceType) const {
    return static_cast<Result>(m_cphMediaEngine->EnableVmiDevice(deviceType));
}

Result CaeEngineControl::DisableVmiDevice(VmiDeviceType deviceType) const {
    return static_cast<Result>(m_cphMediaEngine->DisableVmiDevice(deviceType));
}

Result CaeEngineControl::DestroyVmiDevice(VmiDeviceType deviceType) const {
    return static_cast<Result>(m_cphMediaEngine->DestroyVmiDevice(deviceType));
}

Result CaeEngineControl::VmiDeviceSend(VmiDeviceType deviceType, uint8_t* data, uint32_t len) const {
    return static_cast<Result>(m_cphMediaEngine->VmiDeviceSend(deviceType, data, len));
}

uint8_t CaeEngineControl::GetVmiDeviceStatus(VmiDeviceType deviceType) const {
    return m_cphMediaEngine->GetVmiDeviceStatus(deviceType);
}

int CaeEngineControl::HandleTouchMsg(CaeTouchEventMsg *caeTouchEventMsg) const
{
    InputTouch touchEventMsg;
    touchEventMsg.pointerID = caeTouchEventMsg->GetId();
    touchEventMsg.orientation = caeTouchEventMsg->GetOrientation();
    touchEventMsg.action = caeTouchEventMsg->GetAction();
    touchEventMsg.pressure = caeTouchEventMsg->GetPressure();
    touchEventMsg.touchX = caeTouchEventMsg->GetX();
    touchEventMsg.touchY = caeTouchEventMsg->GetY();
    touchEventMsg.physicalWidth = caeTouchEventMsg->GetWidth();
    touchEventMsg.physicalHeight = caeTouchEventMsg->GetHeight();
    touchEventMsg.touchTime = caeTouchEventMsg->GetTime();

    LOGD("touchEventMsg.pointerID: %d.", touchEventMsg.pointerID);
    LOGD("touchEventMsg.orientation: %d.", touchEventMsg.orientation);
    LOGD("touchEventMsg.action: %d.", touchEventMsg.action);
    LOGD("touchEventMsg.pressure: %d.", touchEventMsg.pressure);
    LOGD("touchEventMsg.touchX: %d.", touchEventMsg.touchX);
    LOGD("touchEventMsg.touchY: %d.", touchEventMsg.touchY);
    LOGD("touchEventMsg.physicalWidth: %d.", touchEventMsg.physicalWidth);
    LOGD("touchEventMsg.physicalHeight: %d.", touchEventMsg.physicalHeight);
    LOGD("touchEventMsg.touchTime: %d.", touchEventMsg.touchTime);
    LOGD("sizeof(touchEventMsg) %d", sizeof(touchEventMsg));
    return m_cphMediaEngine->InjectTouchData(touchEventMsg);
}

void CaeEngineControl::HandleLocationMsg(uint8_t *virtualDeviceData, uint32_t virtualDeviceDataSize) const
{
    struct MSG_HEADER header;
    auto *rawHeader = (MSG_HEADER *) virtualDeviceData;
    header.version = ntohs(rawHeader->version);
    header.optType = ntohs(rawHeader->optType);
    header.devType = ntohs(rawHeader->devType);
    header.devId = ntohs(rawHeader->devId);
    header.len = ntohl(rawHeader->len);

    // 处理Gnss Location定位数据
    if (header.optType == DEV_OPT_GNSS_LOCATION_DATA_REQ) {
        CaeEngineControl::GetInstance().VmiDeviceSend(LOCATION, virtualDeviceData, virtualDeviceDataSize);
        LOGI("Operate gnss location data end");
        return;
    }

    // 处理Gnss Measurement卫星数据
    uint8_t *locationData = virtualDeviceData + sizeof(MSG_HEADER);
    GnssMeasurementPacket *gnssMeasurementPacketData = (GnssMeasurementPacket*)locationData;

    // Gnssclock网络字节序转换
    gnssMeasurementPacketData->clock.flags = ntohs(gnssMeasurementPacketData->clock.flags);
    gnssMeasurementPacketData->clock.leapSecond = ntohs(gnssMeasurementPacketData->clock.leapSecond);
    gnssMeasurementPacketData->clock.timeNs = ntoh_int64(gnssMeasurementPacketData->clock.timeNs);
    gnssMeasurementPacketData->clock.timeUncertaintyNs = ntoh_double(gnssMeasurementPacketData->clock.timeUncertaintyNs);
    gnssMeasurementPacketData->clock.fullBiasNs = ntoh_int64(gnssMeasurementPacketData->clock.fullBiasNs);
    gnssMeasurementPacketData->clock.biasNs = ntoh_double(gnssMeasurementPacketData->clock.biasNs);
    gnssMeasurementPacketData->clock.biasUncertaintyNs = ntoh_double(gnssMeasurementPacketData->clock.biasUncertaintyNs);
    gnssMeasurementPacketData->clock.driftNsps = ntoh_double(gnssMeasurementPacketData->clock.driftNsps);
    gnssMeasurementPacketData->clock.driftUncertaintyNsps = ntoh_double(gnssMeasurementPacketData->clock.driftUncertaintyNsps);
    gnssMeasurementPacketData->clock.hwClockDiscontinuityCount = ntohl(gnssMeasurementPacketData->clock.hwClockDiscontinuityCount);

    // HAL层GnssMeasurement最多只能接受64组卫星数据
    gnssMeasurementPacketData->count = gnssMeasurementPacketData->count > 64 ? 64 : gnssMeasurementPacketData->count;
    //  GnssMeasurement网络字节序转换
    for (int i = 0; i < gnssMeasurementPacketData->count; i++) {
        gnssMeasurementPacketData->measurements[i].flags = ntohl(gnssMeasurementPacketData->measurements[i].flags);
        gnssMeasurementPacketData->measurements[i].svid = ntohs(gnssMeasurementPacketData->measurements[i].svid);
        gnssMeasurementPacketData->measurements[i].timeOffsetNs = ntoh_double(gnssMeasurementPacketData[i].measurements->timeOffsetNs);
        gnssMeasurementPacketData->measurements[i].state = ntohl(gnssMeasurementPacketData->measurements[i].state);
        gnssMeasurementPacketData->measurements[i].receivedSvTimeInNs = ntoh_int64(gnssMeasurementPacketData->measurements[i].receivedSvTimeInNs);
        gnssMeasurementPacketData->measurements[i].receivedSvTimeUncertaintyInNs = ntoh_int64(gnssMeasurementPacketData->measurements[i].receivedSvTimeUncertaintyInNs);
        gnssMeasurementPacketData->measurements[i].cN0Dbhz = ntoh_double(gnssMeasurementPacketData->measurements[i].cN0Dbhz);
        gnssMeasurementPacketData->measurements[i].pseudorangeRateMps = ntoh_double(gnssMeasurementPacketData->measurements[i].pseudorangeRateMps);
        gnssMeasurementPacketData->measurements[i].pseudorangeRateUncertaintyMps = ntoh_double(gnssMeasurementPacketData->measurements[i].pseudorangeRateUncertaintyMps);
        gnssMeasurementPacketData->measurements[i].accumulatedDeltaRangeState = ntohs(gnssMeasurementPacketData->measurements[i].accumulatedDeltaRangeState);
        gnssMeasurementPacketData->measurements[i].accumulatedDeltaRangeM = ntoh_double(gnssMeasurementPacketData->measurements[i].accumulatedDeltaRangeM);
        gnssMeasurementPacketData->measurements[i].accumulatedDeltaRangeUncertaintyM = ntoh_double(gnssMeasurementPacketData->measurements[i].accumulatedDeltaRangeUncertaintyM);
        gnssMeasurementPacketData->measurements[i].carrierFrequencyHz = ntohl_float(gnssMeasurementPacketData->measurements[i].carrierFrequencyHz);
        gnssMeasurementPacketData->measurements[i].carrierCycles = ntoh_int64(gnssMeasurementPacketData->measurements[i].carrierCycles);
        gnssMeasurementPacketData->measurements[i].carrierPhase = ntoh_double(gnssMeasurementPacketData->measurements[i].carrierPhase);
        gnssMeasurementPacketData->measurements[i].carrierPhaseUncertainty = ntoh_double(gnssMeasurementPacketData->measurements[i].carrierPhaseUncertainty);
        gnssMeasurementPacketData->measurements[i].snrDb = ntoh_double(gnssMeasurementPacketData->measurements[i].snrDb);
    }
    VmiDeviceSend(LOCATION,virtualDeviceData,virtualDeviceDataSize);

    int locationDataSize =static_cast<int>(header.len - sizeof(MSG_HEADER));

    // 清空地址空间
    memset_s(locationData, locationDataSize, 0, locationDataSize);
}

int CaeEngineControl::HandleKeyMsg(CaeKeyEventMsg *caeKeyEventMsg) const
{
    InputKey keyEventMsg;
    keyEventMsg.action = caeKeyEventMsg->GetAction();
    keyEventMsg.keyCode = caeKeyEventMsg->GetKeycode();
    return m_cphMediaEngine->InjectKeyData(keyEventMsg);
}

int CaeEngineControl::HandleMotionMsg(void* buf, uint64_t length) const
{
    return m_cphMediaEngine->InjectAxisData(buf, length);
}

const char* CaeEngineControl::GetMediaFeatures() const
{
    return m_cphMediaEngine->GetCPHMediaFeatures();
}

Result CaeEngineControl::CloseScreenshot()
{
    if (m_isScreenShotOpening) {
        LOGI("CloseScreenshot begin.");
        auto openRet = static_cast<Result>(m_cphMediaEngine->CloseScreenshot());
        LOGI("CloseScreenshot result: [%d].", openRet);
        m_isScreenShotOpening = false;
    }
    if (!m_isMediaStreamOpening) {
        m_cphMediaEngine->CloseVideo();
    }
    return SUCCESS;
}