#include "server_socket.h"
#include "rk_defines.h"
#include "rk_debug.h"
#include <csignal> // 用于信号处理
#include <iostream>
#include "rknn_api.h"
#include "client_t.h"
#include <signal.h>
#include "rk_defines.h"
#include "taiic_key.h"
#include <fcntl.h>
#include "taiic_rknn.h"
#include "taiic_sensor.h"
#include <thread>
#include <cstdio> // 包含rename函数
#include <taiic_lm.h>
#include <taiic_vsr.h>
#include "taiic_avsr.h"
#include "taiic_mfcc_v3.h"
#include <taiic_opencv.h>
/*-------------------ISP header file start-------------------*/
#include <string.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <dirent.h>
#if ISPDEMO_ENABLE_DRM
#include "isp/drmDsp.h"
#endif
#include "isp/uAPI2/rk_aiq_user_api2_sysctl.h"
#include "isp/uAPI2/rk_aiq_user_api2_debug.h"
#include "isp/sample/sample_image_process.h"
#include "isp/sample/sample_smartIr.h"
#include "isp/rkisp_demo.h"
#include "isp/taiic_isp_model.h"
#include "taiic_uart/taiic_uart_sum.h"

#include "isp/ae_algo_demo/third_party_ae_algo.h"
#include "isp/awb_algo_demo/third_party_awbV32_algo.h" //for rv1106
#include "isp/af_algo_demo/third_party_af_algo.h"

#if ISPDEMO_ENABLE_RGA && ISPDEMO_ENABLE_DRM
#include "isp/display.h"
#include "isp/rga.h"
#endif
#include <mutex>
#include <condition_variable>
#include <vector>

using namespace std;
int message_type = 0;
bool only_result = false; // 是否发送识别结果

bool start_video_flg = false;
bool VideoModelInf_or_DataType = true;     // 默认true：模型推理
static void aivi_save_end(int vi, int ai); // asr||avsr
static void ai_frame_data_cback(uint8_t *data, unsigned int len, unsigned short frame_num, uint8_t type);

int ptt_down_num = 0;
int current_power_value = 0;
#define TAIL_PKG_LENGTH 22552
uint8_t tail_pkg_public_soc_buffer[TAIL_PKG_LENGTH + SEND_FRAME_HEAD_LEN] = {0};

int current_img_pkg = 0;
int request_img_width = 0;
int request_img_height = 0;
int total_pkg = 0;
long single_video_total_bytes = 0; // 视频数据每帧的数据长度:320*320*1.5
int model_video_size = 7372800;    // 320*320*1.5*48
// 判断当前图像是否正在采集
cv::Mat image_resize;      // resize image:112*112
cv::Mat image_resize_send; // resize image:160*160
#define FLOW_VIDEO_LENGTH 160 * 160
cv::Mat image_dst;
RK_BOOL is_model_run = RK_FALSE;
TEST_BATTERY_QUERY cap_query;

void *taiic_video_socket_send_data(void *args); // 视频数据的socket发送
/*-------------------------ISP image+audio related function-------------------------*/
TAIIC_MODE_STATE_CTX_S *mode_state; // 模式状态相关参数

LM_TOOLKIT_MODEL_CTX_S *lm_ctx;            // 模型推理相关
VSR_TOOLKIT_MODEL_CTX_S *vsr_ctx;          // 模型推理相关
AVSR_TOOLKIT_MODEL_CTX_S *taiic_asr_ctx;   // asr模型推理相关
AVSR_TOOLKIT_MODEL_CTX_S *avsr_ctx = NULL; // avsr模型推理相关

RK_U8 lm_input_data[LM_IMG_BATCH][LM_IMG_HEIGHT][LM_IMG_WIDTH][LM_IMG_CHANNEL];                  // 1*112*112*1
RK_U8 lm_output_data[RESIZE_IMG_BATCH][RESIZE_IMG_HEIGHT][RESIZE_IMG_WIDTH][RESIZE_IMG_CHANNEL]; // 1*32*32*1
RK_U8 vsr_input_data[VSR_INPUT_BATCH][VSR_INPUT_HEIGHT][VSR_INPUT_WIDTH][VSR_INPUT_CHANNEL];     // 唇图输入,1*384*32*4,NHWC
RK_U8 avsr_video_input_data[AVSR_V_BATCH][AVSR_V_HEIGHT][AVSR_V_WIDTH][AVSR_V_CHANNEL];
RK_U8 avsr_audio_input_data[ASR_BATCH_N][ASR_FRAMES_H][ASR_MFCC_W][ASR_CHANNEL_C] = {0}; // 1*48*64*1

static void *taiic_socket_type_data(void *args);

static void ai_vi_save_end(int ai, int vi);

static void app_key_callback(TAIIC_KEY_STATE old_state, TAIIC_KEY_NAME key_name, TAIIC_KEY_STATE key_state);
static void vi_frame_data_cback(void *data, RK_U32 wcount);
static void power_callback(int power_cap, long power_vol); // 电池信息查询回调
/*-----------------------server socket define----------------------*/
void signalHandler(int signum); // 信号处理函数，用于优雅地关闭服务器
void connect_state_cback(bool isConnect);
void message_cback(unsigned char msgType, Request request);
/*----------------------main function----------------------*/
int main(int argc, char **argv)
{
    // UART串口初始化
    serial_init();

    if (argc == 1)
    {
        ipSet = 43; // Honor 10 IP：192.168.43.79
    }
    else if (argc == 2)
    {
        // 将命令行参数转换为整数
        ipSet = atoi(argv[1]); // argv[1] 是第一个传入的参数
    }
    else
    {
        printf("Usage: %s <integer>\n", argv[0]);
        return 1; // 返回 1 表示错误
    }
    // 注册信号处理
    signal(SIGINT, signalHandler);  // Ctrl+C
    signal(SIGTERM, signalHandler); // kill命令
    /*-------------------isp image thread + audio-------------------*/
    strcpy(main_ctx.dev_name, DEV_NAME);
    strcpy(main_ctx.iqpath, IQ_PATH);
    strcpy(main_ctx.out_file, OUT_FILE_NAME);
    single_video_total_bytes = main_ctx.width * main_ctx.height * 3 / 2;

    if (main_ctx.writeFile)
    {
        main_ctx.fp = fopen(main_ctx.out_file, "w+");
    }

    // // 创建数据保存目录data/avi/
    // DIR *avidir = NULL;
    // if ((avidir = opendir(aviCmd)) == NULL) // 创建文件夹avi
    // {
    //     bool ret = mkdir(aviCmd, 0755);
    //     printf("%s created sucess!\n", aviCmd);
    // }

    // initial mode state
    mode_state = reinterpret_cast<TAIIC_MODE_STATE_CTX_S *>(malloc(sizeof(TAIIC_MODE_STATE_CTX_S)));
    memset(mode_state, 0, sizeof(TAIIC_MODE_STATE_CTX_S));

    // initial lm model+ config lm model
    lm_ctx = reinterpret_cast<LM_TOOLKIT_MODEL_CTX_S *>(malloc(sizeof(LM_TOOLKIT_MODEL_CTX_S))); // 分配内存空间
    memset(lm_ctx, 0, sizeof(LM_TOOLKIT_MODEL_CTX_S));
    lm_ctx->modelPath = LM_MODEL_PATH; // 模型存放路径
    lm_rknn_toolkit_config_init(lm_ctx);
    lm_rknn_toolkit_io_init(lm_ctx);

    // initial vsr model + config vsr300 model
    vsr_ctx = reinterpret_cast<VSR_TOOLKIT_MODEL_CTX_S *>(malloc(sizeof(VSR_TOOLKIT_MODEL_CTX_S))); // 分配内存空间
    memset(vsr_ctx, 0, sizeof(VSR_TOOLKIT_MODEL_CTX_S));
    vsr_ctx->modelPath = LM_VSR_MODEL_PATH_300; // TAIIC:300模型存放路径
    vsr_rknn_toolkit_config_init(vsr_ctx);
    vsr_rknn_toolkit_io_init(vsr_ctx);

    // taiic asr model initial
    taiic_asr_ctx = reinterpret_cast<AVSR_TOOLKIT_MODEL_CTX_S *>(malloc(sizeof(AVSR_TOOLKIT_MODEL_CTX_S))); // 分配内存空间
    memset(taiic_asr_ctx, 0, sizeof(AVSR_TOOLKIT_MODEL_CTX_S));
    taiic_asr_ctx->modelPath = TAIIC_ASR_MODEL_PATH_505;
    avsr_rknn_toolkit_config_init(taiic_asr_ctx);
    avsr_rknn_toolkit_io_init(taiic_asr_ctx);

    // initial avsr model + config avsr model
    avsr_ctx = reinterpret_cast<AVSR_TOOLKIT_MODEL_CTX_S *>(malloc(sizeof(AVSR_TOOLKIT_MODEL_CTX_S))); // 分配内存空间
    memset(avsr_ctx, 0, sizeof(AVSR_TOOLKIT_MODEL_CTX_S));
    avsr_ctx->modelPath = AVSR_MODEL_PATH_300;
    avsr_rknn_toolkit_config_init(avsr_ctx);
    avsr_rknn_toolkit_io_init(avsr_ctx);

    printf("====taiic init model successfully===\n");

    // 按键检测
    taiic_all_key_registers(mode_state, app_key_callback);

    // 电量查询回调
    taiic_batt_power_registers(power_callback);

    // ai_vi save end_callback
    aivi_save_end_callback(aivi_save_end);
    /*--------------------------start isp thread--------------------------------*/
    rkisp_routine(&main_ctx, 0);
    sample_smartIr_start(&main_ctx);
    // 环境状态判断线程
#ifdef TEST_BLOCKED_STATS_FUNC
    pthread_t stats_tid;
    pthread_create(&stats_tid, NULL, stats_thread, &main_ctx);
#endif

    taiic_vi_frame_data_callback(vi_frame_data_cback); // 注册vi数据回调事件
    taiic_ai_frame_data_callback(ai_frame_data_cback); // 注册ai数据回调事件

    // isp image capture thread
    pthread_create(&tid_image_cap, NULL, taiic_isp_image_capture_func, &main_ctx);

    // // flow:video socket send thread
    // pthread_t tid_video_soc_send;
    // pthread_create(&tid_video_soc_send, NULL, taiic_video_socket_send_data, NULL);
    /*-------------------Starting server-------------------*/
    std::cout << "Starting server...\n";
    // socket send thread
    pthread_t tid_socket;
    pthread_create(&tid_socket, NULL, taiic_socket_type_data, NULL);

    // 设备初始化完成。
    uart_main_func(1, 3, 0, 0);

    pthread_join(tid_image_cap, NULL);
    // pthread_join(tid_video_soc_send, NULL);
    pthread_join(tid_socket, NULL);
    return 0;
}

/*-------------------------function implement-------------------------*/
static void app_key_callback(TAIIC_KEY_STATE old_state, TAIIC_KEY_NAME key_name, TAIIC_KEY_STATE key_state)
{
    // 判断key_name去实现对应功能,key_name=0表示音频线模式
    printf("key_name, key_state,count is [%d, %d,%d]\n", key_name, key_state, cnt_ptt++);

    switch (key_name)
    {
    case TAIIC_PTT:
        mode_state->ptt_flag = key_state;
        if (mode_state->ptt_flag == PTT_DOWN)
        {
            ptt_down_num++;
            ptt_up_flag = false;

            is_model_run = RK_FALSE;
            // 嘀一声开始数据采集
            mtx_uart.lock();
            uart_main_func(1, 1, 0, 0);
            mtx_uart.unlock();

            server_client_frame_package_function(msg_presend_instruct, NULL, 0); // 0x31 00

            if (only_mic_custom_tl)
            {
                // 20250618 add by mtd////////////////////////////////////////
                custom_audio_presend_func(dataType_air, PER_AUDIO_FRAME_LEN); // 0x31 0x06

                uint8_t send_data[2] = {1, 1};
                ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                this_thread::sleep_for(std::chrono::milliseconds(custom_tl));
                // 停止传输音频数据
                send_data[0] = 0;
                ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                std::this_thread::sleep_for(std::chrono::milliseconds(100));

                data_type_finish_send_func(dataType_air);

                server_client_frame_package_function(msg_finish_send_instruct, NULL, 0); // 0x32 00
                only_mic_custom_tl = false;
                mtx_uart.lock();
                uart_main_func(1, 2, 0, 0); // 滴两声表示发送完成
                mtx_uart.unlock();
            }
            else
            {
                if ((only_custom_video) && (only_video)) // 更改图像请求分辨率（像素）
                {
                    message_type = 1;
                    custom_video_presend_func(request_img_width, request_img_height, 0);
                    // wake up isp image capture thread
                    start_video_flg = true;
                    start_vi = true;
                    cv_vi.notify_one(); // 通知等待的线程，条件已经满足
                }
                else if (only_air)
                {
                    message_type = 2;
                    uint8_t send_data_mic[2] = {1, 1}; // 气导：原始信号
                    printf("flow air audio ptt down\n");
                    custom_audio_presend_func(dataType_air, 0); // flow audio data len:0
                    // 开始传输音频数据
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(100));
                }
                else if (only_bone)
                {
                    message_type = 4;
                    printf("flow bone audio ptt down\n");
                    uint8_t send_data_mic[2] = {1, 2}; // 骨导：原始信号

                    custom_audio_presend_func(dataType_bone, 0); // flow audio data len:0
                    // 开始传输音频数据
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(100));
                }
                else if (only_air_bone)
                {
                    message_type = 6;
                    printf("flow air_bone audio ptt down\n");
                    uint8_t send_data_mic[2] = {1, 3}; // 气导+骨导

                    custom_audio_presend_func(dataType_air_bone, 0); // flow audio data len:480*2bytes
                    // 开始传输音频数据
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(100));
                }
                else if (only_video_air)
                {
                    message_type = 3;
                    printf("flow video_air_mic ptt down\n");
                    // start video
                    custom_video_presend_func(main_ctx.width, main_ctx.height, 0);
                    // wake up isp image capture thread
                    start_video_flg = true;
                    start_vi = true;
                    cv_vi.notify_one(); // 通知等待的线程，条件已经满足

                    // start air audio
                    uint8_t send_data_mic[2] = {1, 1};          // 气导：原始信号
                    custom_audio_presend_func(dataType_air, 0); // flow audio data len:0
                    // 开始传输音频数据
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(100));
                }
                else if (only_video_bone)
                {
                    message_type = 5;
                    printf("flow video_bone_mic ptt down\n");
                    // start video
                    custom_video_presend_func(main_ctx.width, main_ctx.height, 0);
                    // wake up isp image capture thread
                    start_video_flg = true;
                    start_vi = true;
                    cv_vi.notify_one(); // 通知等待的线程，条件已经满足

                    // start bone audio
                    uint8_t send_data_mic[2] = {1, 2};                             // 骨导：原始信号
                    custom_audio_presend_func(dataType_bone, PER_AUDIO_FRAME_LEN); // flow audio data len:480bytes
                    // 开始传输音频数据
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(100));
                }
                else if (only_video_air_bone)
                {
                    message_type = 7;
                    printf("flow video_air_bone_mic ptt down\n");
                    // start video
                    custom_video_presend_func(main_ctx.width, main_ctx.height, 0);
                    // wake up isp image capture thread
                    start_video_flg = true;
                    start_vi = true;
                    cv_vi.notify_one(); // 通知等待的线程，条件已经满足

                    // start bone audio
                    uint8_t send_data_mic[2] = {1, 3};                                     // 气导+骨导：原始信号
                    custom_audio_presend_func(dataType_air_bone, PER_AUDIO_FRAME_LEN * 2); // flow audio data len:480bytes
                    // 开始传输音频数据
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(100));
                }
                /*--------------------模型输入--------------------*/
                if ((only_video_model) && (!is_model_run)) // 视频模块的模型推理
                {
                    is_model_run = RK_TRUE;
                    custom_video_presend_func(main_ctx.width, main_ctx.height, model_video_size);

                    // wake up isp image capture thread
                    start_vi = true;
                    cv_vi.notify_one(); // 通知等待的线程，条件已经满足
                }
                else if ((only_air_model) && (!is_model_run)) // 气导音频模块的模型推理
                {
                    if (fileExists(air_pcm_path))
                    {
                        if (remove(air_pcm_path) == 0)
                        {
                            printf("air pcm file exists, and delete successfully\n");
                        }
                        else
                        {
                            printf("air pcm file delete failed\n");
                        }
                    }

                    is_model_run = RK_TRUE;

                    uint8_t send_data[2] = {1, 1};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                    this_thread::sleep_for(std::chrono::seconds(1) + std::chrono::milliseconds(800)); // 延时1s+800ms=1.8s
                    // 停止传输音频数据
                    send_data[0] = 0;
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                }
                else if (only_bone_model) // bone音频模块的模型推理
                {
                    if (fileExists(bone_pcm_path))
                    {
                        if (remove(bone_pcm_path) == 0)
                        {
                            printf("bone pcm file exists, and delete successfully\n");
                        }
                        else
                        {
                            printf("bone pcm file delete failed\n");
                        }
                    }

                    uint8_t send_data[2] = {1, 2};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                    this_thread::sleep_for(std::chrono::seconds(1) + std::chrono::milliseconds(800)); // 延时1s+800ms=1.8s
                    // 停止传输音频数据
                    send_data[0] = 0;
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                }
                else if (only_air_bone_model) // air+bone音频模块的模型推理
                {
                    if (fileExists(air_pcm_path))
                    {
                        if (remove(air_pcm_path) == 0)
                        {
                            printf("air pcm file exists, and delete successfully\n");
                        }
                        else
                        {
                            printf("air pcm file delete failed\n");
                        }
                    }
                    if (fileExists(bone_pcm_path))
                    {
                        if (remove(bone_pcm_path) == 0)
                        {
                            printf("bone pcm file exists, and delete successfully\n");
                        }
                        else
                        {
                            printf("bone pcm file delete failed\n");
                        }
                    }

                    uint8_t send_data[2] = {1, 3};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                    this_thread::sleep_for(std::chrono::seconds(1) + std::chrono::milliseconds(800)); // 延时1s+800ms=1.8s
                    // 停止传输音频数据
                    send_data[0] = 0;
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                }
                else if ((only_video_air_model) && (!is_model_run)) // avsr model inference
                {
                    if (fileExists(air_pcm_path))
                    {
                        if (remove(air_pcm_path) == 0)
                        {
                            printf("air pcm file exists, and delete successfully\n");
                        }
                        else
                        {
                            printf("air pcm file delete failed\n");
                        }
                    }

                    is_model_run = RK_TRUE;
                    if (message_type == 9)
                    {
                        // wake up isp image capture thread
                        start_vi = true;
                        cv_vi.notify_one(); // 通知等待的线程，条件已经满足
                    }

                    uint8_t send_data[2] = {1, 1};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                    this_thread::sleep_for(std::chrono::seconds(1) + std::chrono::milliseconds(800)); // 延时1s+800ms=1.8s
                    // 停止传输音频数据
                    send_data[0] = 0;
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                }
                else if (only_video_bone_model)
                {
                    if (fileExists(bone_pcm_path))
                    {
                        if (remove(bone_pcm_path) == 0)
                        {
                            printf("bone pcm file exists, and delete successfully\n");
                        }
                        else
                        {
                            printf("bone pcm file delete failed\n");
                        }
                    }

                    uint8_t send_data[2] = {1, 2};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                    this_thread::sleep_for(std::chrono::seconds(1) + std::chrono::milliseconds(800)); // 延时1s+800ms=1.8s
                    // 停止传输音频数据
                    send_data[0] = 0;
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                }
                else if (only_video_air_bone_model)
                {
                    if (fileExists(air_pcm_path))
                    {
                        if (remove(air_pcm_path) == 0)
                        {
                            printf("air pcm file exists, and delete successfully\n");
                        }
                        else
                        {
                            printf("air pcm file delete failed\n");
                        }
                    }
                    if (fileExists(bone_pcm_path))
                    {
                        if (remove(bone_pcm_path) == 0)
                        {
                            printf("bone pcm file exists, and delete successfully\n");
                        }
                        else
                        {
                            printf("bone pcm file delete failed\n");
                        }
                    }
                    
                    uint8_t send_data[2] = {1, 3};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                    this_thread::sleep_for(std::chrono::seconds(1) + std::chrono::milliseconds(800)); // 延时1s+800ms=1.8s
                    // 停止传输音频数据
                    send_data[0] = 0;
                    ret = frame_package_function(mic_adta_request_cmd, &send_data, sizeof(send_data));
                }
            }
        } // PTT DOWN
        else if ((mode_state->ptt_flag == PTT_UP) && (ptt_down_num > 0))
        {
            if (custom_tl == 0)
            {
                ptt_up_flag = true;

                if ((only_custom_video) && (only_video))
                {
                    start_video_flg = false;
                    only_custom_video = false;
                    only_video = false;
                    start_vi = false;

                    data_type_finish_send_func(dataType_video);
                }
                else if (only_air)
                {
                    printf("flow air ptt up\n");
                    // 停止传输音频数据
                    uint8_t send_data_mic[2] = {0, 1}; // 气导：原始信号
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(200));
                    data_type_finish_send_func(dataType_air);
                    only_air = false;
                }
                else if (only_bone)
                {
                    printf("flow bone ptt up\n");
                    // 停止传输音频数据
                    uint8_t send_data_mic[2] = {0, 2};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(200));
                    data_type_finish_send_func(dataType_bone);
                    only_bone = false;
                }
                else if (only_air_bone)
                {
                    printf("flow air_bone ptt up\n");
                    // 停止传输音频数据
                    uint8_t send_data_mic[2] = {0, 3};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(200));
                    data_type_finish_send_func(dataType_air_bone);
                    only_air_bone = false;
                }
                else if (only_video_air)
                {
                    // 1.stop video
                    start_video_flg = false;
                    start_vi = false;

                    // 2.停止传输音频数据
                    uint8_t send_data_mic[2] = {0, 1};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(200));

                    data_type_finish_send_func(dataType_video);
                    std::this_thread::sleep_for(std::chrono::milliseconds(300));
                    data_type_finish_send_func(dataType_air);
                    only_video_air = false;
                }
                else if (only_video_bone)
                {
                    // 1.stop video
                    start_video_flg = false;
                    start_vi = false;

                    // 2.停止传输音频数据
                    uint8_t send_data_mic[2] = {0, 2};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(200));

                    data_type_finish_send_func(dataType_video);
                    std::this_thread::sleep_for(std::chrono::milliseconds(300));
                    data_type_finish_send_func(dataType_bone);
                    only_video_bone = false;
                }
                else if (only_video_air_bone)
                {
                    // 1.stop video
                    start_video_flg = false;
                    start_vi = false;

                    // 2.停止传输音频数据
                    uint8_t send_data_mic[2] = {0, 3};
                    ret = frame_package_function(mic_adta_request_cmd, &send_data_mic, sizeof(send_data_mic));
                    std::this_thread::sleep_for(std::chrono::milliseconds(200));

                    data_type_finish_send_func(dataType_video);
                    std::this_thread::sleep_for(std::chrono::milliseconds(300));
                    data_type_finish_send_func(dataType_air_bone);
                    only_video_air_bone = false;
                }

                std::this_thread::sleep_for(std::chrono::milliseconds(100));
                if ((message_type >= 1) && (message_type <= 7))
                {
                    // 总的数据结束包发送
                    server_client_frame_package_function(msg_finish_send_instruct, NULL, 0); // 0x32 00
                    /*----------嘀两声:表示socket发送完成----------*/
                    mtx_uart.lock();
                    uart_main_func(1, 2, 0, 0); // 滴两声表示发送完成
                    mtx_uart.unlock();
                }
            }
        }
        break;
    }
}

// 视频数据的模型推理
void video_model_inference_func(void *data, RK_U32 wcount)
{
    if (version == 0)
    {
        printf("ISP VI:%d\n", wcount);
    }
    cv::Mat image_src(main_ctx.height, main_ctx.width, CV_8UC1, (RK_U8 *)data); // NV12 -> Mat,320*320
    // resize to request pixel
    resize(image_src, image_resize, cv::Size(ORI_WIDTH, ORI_HEIGHT));

    memcpy(lm_input_data, reinterpret_cast<RK_U8 *>(image_resize.data), sizeof(RK_U8) * ORI_HEIGHT * ORI_WIDTH);

    lm_rknn_toolkit_data_refresh(lm_ctx, &lm_input_data[0][0][0][0]);
    int ret = rknn_run(lm_ctx->context, NULL);

    LM_RESULT_S result = lm_toolkit_result_parameter_extend25(lm_ctx);
    RK_LOGI("wcount:%4d frame data result is [%d, %d, %d, %d]\n", wcount, result.left_x, result.left_y, result.right_x, result.right_y);

    if (result.right_x > result.left_x && result.right_y > result.left_y)
    {
        taiic_y_resize_crop_resize(&lm_input_data[0][0][0][0], &lm_output_data[0][0][0][0],
                                   LM_IMG_WIDTH, LM_IMG_HEIGHT,
                                   BEFORE_CROP, BEFORE_CROP,
                                   result.left_x, result.left_y,
                                   result.right_x, result.right_y,
                                   RESIZE_IMG_WIDTH, RESIZE_IMG_HEIGHT);
    }
    else
    {
        taiic_y_resize(&lm_input_data[0][0][0][0], &lm_output_data[0][0][0][0],
                       LM_IMG_WIDTH, LM_IMG_HEIGHT,
                       RESIZE_IMG_WIDTH, RESIZE_IMG_HEIGHT);
    }

    RK_U8 *pTmpData = &lm_output_data[0][0][0][0];
    int cnow = wcount / (RESHAPE_H * RESHAPE_W);               // 通道数
    int hnow = (wcount % (RESHAPE_H * RESHAPE_W)) / RESHAPE_W; // 高
    int wnow = (wcount % (RESHAPE_H * RESHAPE_W)) / RESHAPE_H; // 宽
    RK_LOGD("==data location  c is %d, h is %d, w is %d===\n", cnow, hnow, wnow);
    if (only_video_model)
    {
        for (int i = 0; i < RESIZE_PIC_H; i++)
        {
            for (int j = 0; j < RESIZE_PIC_W; j++)
            {
                vsr_input_data[0][RESIZE_PIC_H * hnow + i][RESIZE_PIC_W * wnow + j][cnow] = *pTmpData;
                pTmpData += 1;
            }
        }
        // 开始模型推理
        if (wcount == (ORI_MODEL_C - 1))
        {
            vsr_rknn_toolkit_data_refresh(vsr_ctx, &vsr_input_data[0][0][0][0]);
            rknn_run(vsr_ctx->context, NULL);

            MODEL_RESULT_S vsr_result = vsr_rknn_toolkit_result_int8_opt(vsr_ctx);
            vsr_result.intprob = static_cast<int>(round(vsr_result.prob * 100));

            printf("VSR300:result_label=%d, prob=%f,intprob=%d\n", vsr_result.label, vsr_result.prob, vsr_result.intprob);
            // video标签结果+置信度:预备发送与完成发送指令
            results_presend_func();
            results_scores_send_func(vsr_result.label, vsr_result.intprob);
            data_type_finish_send_func(dataType_result);
        }
    }
    else if (only_video_air_model)
    {
        for (int i = 0; i < RESIZE_PIC_H; i++)
        {
            for (int j = 0; j < RESIZE_PIC_W; j++)
            {
                avsr_video_input_data[0][RESIZE_PIC_H * hnow + i][RESIZE_PIC_W * wnow + j][cnow] = *pTmpData;
                pTmpData += 1;
            }
        }

        if (wcount == (ORI_MODEL_C - 1))
        {
            vi_end = 1; // vi end
            printf("vi_end=%d\n", vi_end);
        }
    }
}

// video data send module
void video_data_send_data(void *data, RK_U32 wcount)
{
    if (wcount == 0)
    {
        memset(public_soc_buffer, 0, SEND_BUF_SIZE);
        public_soc_buffer[0] = static_cast<uint8_t>(msg_sensor_data_instruct);
        public_soc_buffer[1] = 0x06;
        public_soc_buffer[3] = dataType_video; // 00 01表示视频数据
        //
        memset(tail_pkg_public_soc_buffer, 0, TAIL_PKG_LENGTH + SEND_FRAME_HEAD_LEN);
        tail_pkg_public_soc_buffer[0] = static_cast<uint8_t>(msg_sensor_data_instruct);
        tail_pkg_public_soc_buffer[1] = 0x06;
        tail_pkg_public_soc_buffer[3] = dataType_video; // 00 01表示视频数据
    }

    if ((message_type == 1) || (message_type == 3) || (message_type == 5) || (message_type == 7) || (message_type == 8) || (message_type == 9))
    {
        printf("ISP VI DATA IS:%d,%d,Send Size:160*160\n", wcount, message_type);
        cv::Mat image_src(main_ctx.height, main_ctx.width, CV_8UC1, (RK_U8 *)data); // NV12 -> Mat,320*320
        // resize to 160*160
        resize(image_src, image_resize_send, cv::Size(main_ctx.width / 2, main_ctx.height / 2));

        if (wcount == 0)
        {
            intToUint8Array(FLOW_VIDEO_LENGTH, &public_soc_buffer[8]); // send valid data length:160*160
        }
        uint8_t *byteData = static_cast<uint8_t *>(image_resize_send.data);
        memcpy(&public_soc_buffer[SEND_FRAME_HEAD_LEN], byteData, FLOW_VIDEO_LENGTH);

        if ((message_type != 9) && (message_type > 0))
        {
            mtx_soc_sendData.lock();
            serverSocket.sendData(public_soc_buffer, FLOW_VIDEO_LENGTH + SEND_FRAME_HEAD_LEN);
            mtx_soc_sendData.unlock();
        }

        if ((VideoModelInf_or_DataType) && (wcount == (ORI_MODEL_C - 1)))
        {
            data_type_finish_send_func(dataType_video);
            vi_end = 1;
        }
    }
    else
    {
        printf("ISP VI DATA IS:%d,%d,Send Size:320*320\n", wcount, message_type);
        if (wcount == 0)
        {
            intToUint8Array(SEND_BUF_SIZE - SEND_FRAME_HEAD_LEN, &public_soc_buffer[8]); // send valid data length
            total_pkg = single_video_total_bytes / (SEND_BUF_SIZE - SEND_FRAME_HEAD_LEN) + 1;

            intToUint8Array(TAIL_PKG_LENGTH, &tail_pkg_public_soc_buffer[8]); // send valid data length
        }

        uint8_t *byteData = static_cast<uint8_t *>(data);
        // int len = 0;
        for (int i = 0; i < total_pkg; i++)
        {
            current_img_pkg = i; // 当前包号
            // len = single_video_total_bytes - i * (SEND_BUF_SIZE - SEND_FRAME_HEAD_LEN);
            // printf("len=%d\n", len);

            if (i < (total_pkg - 1))
            {
                intToUint8Array(current_img_pkg, &public_soc_buffer[4]);
                memset(&public_soc_buffer[SEND_FRAME_HEAD_LEN], 0, SEND_BUF_SIZE - SEND_FRAME_HEAD_LEN);
                memcpy(&public_soc_buffer[SEND_FRAME_HEAD_LEN], byteData + i * (SEND_BUF_SIZE - SEND_FRAME_HEAD_LEN), SEND_BUF_SIZE - SEND_FRAME_HEAD_LEN);
                // send
                mtx_soc_sendData.lock();
                serverSocket.sendData(public_soc_buffer, SEND_BUF_SIZE);
                mtx_soc_sendData.unlock();
            }
            else if (i == (total_pkg - 1))
            {
                intToUint8Array(current_img_pkg, &tail_pkg_public_soc_buffer[4]);
                memset(&tail_pkg_public_soc_buffer[SEND_FRAME_HEAD_LEN], 0, TAIL_PKG_LENGTH);
                memcpy(&tail_pkg_public_soc_buffer[SEND_FRAME_HEAD_LEN], byteData + i * (SEND_BUF_SIZE - SEND_FRAME_HEAD_LEN), TAIL_PKG_LENGTH);

                // send
                mtx_soc_sendData.lock();
                serverSocket.sendData(tail_pkg_public_soc_buffer, TAIL_PKG_LENGTH + SEND_FRAME_HEAD_LEN);
                mtx_soc_sendData.unlock();

                if ((VideoModelInf_or_DataType) && (wcount == (ORI_MODEL_C - 1))) // 模型推理的数据采集vsr
                {
                    data_type_finish_send_func(dataType_video);
                    vi_end = 1;
                }
            }
        }
    }
}

// data 一帧图像的指针
// wcount 当前图像的顺序
static void vi_frame_data_cback(void *data, RK_U32 wcount)
{
    /*---------------------model inference module---------------------*/
    if ((message_type == 8) || (message_type == 9))
    {
        video_model_inference_func(data, wcount);
    }
    /*---------------------video data send module---------------------*/
    if (version != 0)
    {
        video_data_send_data(data, wcount);
    }
    // 总的数据结束包发送
    if ((only_video_model) && (wcount == (ORI_MODEL_C - 1))) // noflow:model inference:视频模型数据vsr
    {
        server_client_frame_package_function(msg_finish_send_instruct, NULL, 0); // 0x32 00
        is_model_run = RK_FALSE;
        // only_video_model = false;

        mtx_uart.lock();
        uart_main_func(1, 2, 0, 0); // 滴两声表示发送完成
        mtx_uart.unlock();
    }
    else if (((message_type == 90) || (only_video_bone_model) || (only_video_air_bone_model)) && (wcount == (ORI_MODEL_C - 1)) && (ai_end == 1))
    {
        // 总的数据结束包发送
        this_thread::sleep_for(std::chrono::milliseconds(300));
        server_client_frame_package_function(msg_finish_send_instruct, NULL, 0); // 0x32 00
        /*----------嘀两声:表示socket发送完成----------*/
        mtx_uart.lock();
        uart_main_func(1, 2, 0, 0); // 滴两声表示发送完成
        mtx_uart.unlock();
    }
}

/*------------------------isp func start------------------------*/
char *get_dev_name(demo_context_t *ctx)
{
    if (ctx->dev_using == 1)
        return ctx->dev_name;
    else if (ctx->dev_using == 2)
        return ctx->dev_name2;
    else if (ctx->dev_using == 3)
        return ctx->dev_name3;
    else if (ctx->dev_using == 4)
        return ctx->dev_name4;
    else
    {
        ERR("!!!dev_using is not supported!!!");
        return NULL;
    }
}

char *get_sensor_name(demo_context_t *ctx)
{
    return ctx->sns_name;
}
static void errno_exit(demo_context_t *ctx, const char *s)
{
    ERR("%s: %s error %d, %s\n", get_sensor_name(ctx), s, errno, strerror(errno));
    // exit(EXIT_FAILURE);
}

static int read_frame(demo_context_t *ctx, int flg)
{
    struct v4l2_buffer buf;
    int i, bytesused;

    CLEAR(buf);

    buf.type = ctx->buf_type;
    buf.memory = V4L2_MEMORY_MMAP;

    struct v4l2_plane planes[FMT_NUM_PLANES];
    memset(planes, 0, sizeof(struct v4l2_plane) * FMT_NUM_PLANES);
    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
    {
        buf.m.planes = planes;
        buf.length = FMT_NUM_PLANES;
    }

    if (-1 == xioctl(ctx->fd, VIDIOC_DQBUF, &buf))
        errno_exit(ctx, "VIDIOC_DQBUF");

    i = buf.index;

    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        bytesused = buf.m.planes[0].bytesused;
    else
        bytesused = buf.bytesused;

#if ISPDEMO_ENABLE_DRM
    if (ctx->vop)
    {
        int dispWidth, dispHeight;

        if (ctx->width > 1920)
            dispWidth = 1920;
        else
            dispWidth = ctx->width;

        if (ctx->height > 1088)
            dispHeight = 1088;
        else
            dispHeight = ctx->height;

#if ISPDEMO_ENABLE_RGA
        if (strlen(ctx->dev_name) && strlen(ctx->dev_name2))
        {
            if (ctx->dev_using == 1)
                display_win1(ctx->buffers[i].start, ctx->buffers[i].export_fd, RK_FORMAT_YCbCr_420_SP, dispWidth, dispHeight, 0);
            else
                display_win2(ctx->buffers[i].start, ctx->buffers[i].export_fd, RK_FORMAT_YCbCr_420_SP, dispWidth, dispHeight, 0);
        }
        else
        {
#else
        {
#endif
            drmDspFrame(ctx->width, ctx->height, dispWidth, dispHeight, ctx->buffers[i].export_fd, DRM_FORMAT_NV12);
        }
    }
#endif

    process_image(ctx->buffers[i].start, buf.sequence, bytesused, ctx, flg);

    if (-1 == xioctl(ctx->fd, VIDIOC_QBUF, &buf))
        errno_exit(ctx, "VIDIOC_QBUF");

    return 1;
}
static void stop_capturing(demo_context_t *ctx)
{
    enum v4l2_buf_type type;

    type = ctx->buf_type;
    if (-1 == xioctl(ctx->fd, VIDIOC_STREAMOFF, &type))
        errno_exit(ctx, "VIDIOC_STREAMOFF");
}

static void stop_capturing_pp_oneframe(demo_context_t *ctx)
{
    enum v4l2_buf_type type;

    type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    if (-1 == xioctl(ctx->fd_pp_input, VIDIOC_STREAMOFF, &type))
        errno_exit(ctx, "VIDIOC_STREAMOFF ppinput");
    type = ctx->buf_type;
    if (-1 == xioctl(ctx->fd_isp_mp, VIDIOC_STREAMOFF, &type))
        errno_exit(ctx, "VIDIOC_STREAMOFF ispmp");
}

static void start_capturing(demo_context_t *ctx)
{
    unsigned int i;
    enum v4l2_buf_type type;

    for (i = 0; i < ctx->n_buffers; ++i)
    {
        struct v4l2_buffer buf;

        CLEAR(buf);
        buf.type = ctx->buf_type;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        {
            struct v4l2_plane planes[FMT_NUM_PLANES];

            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }
        if (-1 == xioctl(ctx->fd, VIDIOC_QBUF, &buf))
            errno_exit(ctx, "VIDIOC_QBUF");
    }
    type = ctx->buf_type;
    DBG("%s:-------- stream on output -------------\n", get_sensor_name(ctx));

    if (-1 == xioctl(ctx->fd, VIDIOC_STREAMON, &type))
        errno_exit(ctx, "VIDIOC_STREAMON");
}

static void start_capturing_pp_oneframe(demo_context_t *ctx)
{
    unsigned int i;
    enum v4l2_buf_type type;

    type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    DBG("%s:-------- stream on pp input -------------\n", get_sensor_name(ctx));
    if (-1 == xioctl(ctx->fd_pp_input, VIDIOC_STREAMON, &type))
        errno_exit(ctx, "VIDIOC_STREAMON pp input");

    type = ctx->buf_type;
    for (i = 0; i < ctx->n_buffers; ++i)
    {
        struct v4l2_buffer buf;

        CLEAR(buf);
        buf.type = ctx->buf_type;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        {
            struct v4l2_plane planes[FMT_NUM_PLANES];

            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }
        if (-1 == xioctl(ctx->fd_isp_mp, VIDIOC_QBUF, &buf))
            errno_exit(ctx, "VIDIOC_QBUF");
    }
    DBG("%s:-------- stream on isp mp -------------\n", get_sensor_name(ctx));
    if (-1 == xioctl(ctx->fd_isp_mp, VIDIOC_STREAMON, &type))
        errno_exit(ctx, "VIDIOC_STREAMON ispmp");
}

static void uninit_device(demo_context_t *ctx)
{
    unsigned int i;
    if (ctx->n_buffers == 0)
        return;

    for (i = 0; i < ctx->n_buffers; ++i)
    {
        if (-1 == munmap(ctx->buffers[i].start, ctx->buffers[i].length))
            errno_exit(ctx, "munmap");

        close(ctx->buffers[i].export_fd);
    }

    free(ctx->buffers);
    ctx->n_buffers = 0;
}

static void uninit_device_pp_oneframe(demo_context_t *ctx)
{
    unsigned int i;

    for (i = 0; i < ctx->n_buffers; ++i)
    {
        if (-1 == munmap(ctx->buffers_mp[i].start, ctx->buffers_mp[i].length))
            errno_exit(ctx, "munmap");

        close(ctx->buffers_mp[i].export_fd);
    }

    free(ctx->buffers_mp);
}

static void init_mmap(int pp_onframe, demo_context_t *ctx)
{
    struct v4l2_requestbuffers req;
    int fd_tmp = -1;

    CLEAR(req);

    if (pp_onframe)
        fd_tmp = ctx->fd_isp_mp;
    else
        fd_tmp = ctx->fd;

    req.count = BUFFER_COUNT;
    req.type = ctx->buf_type;
    req.memory = V4L2_MEMORY_MMAP;

    struct buffer *tmp_buffers = NULL;

    if (-1 == xioctl(fd_tmp, VIDIOC_REQBUFS, &req))
    {
        if (EINVAL == errno)
        {
            ERR("%s: %s does not support "
                "memory mapping\n",
                get_sensor_name(ctx), get_dev_name(ctx));
            // exit(EXIT_FAILURE);
        }
        else
        {
            errno_exit(ctx, "VIDIOC_REQBUFS");
        }
    }

    if (req.count < 2)
    {
        ERR("%s: Insufficient buffer memory on %s\n", get_sensor_name(ctx),
            get_dev_name(ctx));
        // exit(EXIT_FAILURE);
    }

    tmp_buffers = (struct buffer *)calloc(req.count, sizeof(struct buffer));

    if (!tmp_buffers)
    {
        ERR("%s: Out of memory\n", get_sensor_name(ctx));
        // exit(EXIT_FAILURE);
    }

    if (pp_onframe)
        ctx->buffers_mp = tmp_buffers;
    else
        ctx->buffers = tmp_buffers;

    for (ctx->n_buffers = 0; ctx->n_buffers < req.count; ++ctx->n_buffers)
    {
        struct v4l2_buffer buf;
        struct v4l2_plane planes[FMT_NUM_PLANES];
        CLEAR(buf);
        CLEAR(planes);

        buf.type = ctx->buf_type;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = ctx->n_buffers;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        {
            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }

        if (-1 == xioctl(fd_tmp, VIDIOC_QUERYBUF, &buf))
            errno_exit(ctx, "VIDIOC_QUERYBUF");

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        {
            tmp_buffers[ctx->n_buffers].length = buf.m.planes[0].length;
            tmp_buffers[ctx->n_buffers].start =
                mmap(NULL /* start anywhere */,
                     buf.m.planes[0].length,
                     PROT_READ | PROT_WRITE /* required */,
                     MAP_SHARED /* recommended */,
                     fd_tmp, buf.m.planes[0].m.mem_offset);
        }
        else
        {
            tmp_buffers[ctx->n_buffers].length = buf.length;
            tmp_buffers[ctx->n_buffers].start =
                mmap(NULL /* start anywhere */,
                     buf.length,
                     PROT_READ | PROT_WRITE /* required */,
                     MAP_SHARED /* recommended */,
                     fd_tmp, buf.m.offset);
        }

        if (MAP_FAILED == tmp_buffers[ctx->n_buffers].start)
            errno_exit(ctx, "mmap");

        // export buf dma fd
        struct v4l2_exportbuffer expbuf;
        xcam_mem_clear(expbuf);
        expbuf.type = ctx->buf_type;
        expbuf.index = ctx->n_buffers;
        expbuf.flags = O_CLOEXEC;
        if (xioctl(fd_tmp, VIDIOC_EXPBUF, &expbuf) < 0)
        {
            errno_exit(ctx, "get dma buf failed\n");
        }
        else
        {
            DBG("%s: get dma buf(%d)-fd: %d\n", get_sensor_name(ctx), ctx->n_buffers, expbuf.fd);
        }
        tmp_buffers[ctx->n_buffers].export_fd = expbuf.fd;
    }
}

static void init_input_dmabuf_oneframe(demo_context_t *ctx)
{
    struct v4l2_requestbuffers req;

    CLEAR(req);

    printf("%s:-------- request pp input buffer -------------\n", get_sensor_name(ctx));
    req.count = BUFFER_COUNT;
    req.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    req.memory = V4L2_MEMORY_DMABUF;

    if (-1 == xioctl(ctx->fd_pp_input, VIDIOC_REQBUFS, &req))
    {
        if (EINVAL == errno)
        {
            ERR("does not support "
                "DMABUF\n");
            exit(EXIT_FAILURE);
        }
        else
        {
            errno_exit(ctx, "VIDIOC_REQBUFS");
        }
    }

    if (req.count < 2)
    {
        ERR("Insufficient buffer memory on %s\n",
            get_dev_name(ctx));
        exit(EXIT_FAILURE);
    }
    printf("%s:-------- request isp mp buffer -------------\n", get_sensor_name(ctx));
    init_mmap(true, ctx);
}

static void init_device(demo_context_t *ctx)
{
    struct v4l2_capability cap;
    struct v4l2_format fmt;

    if (-1 == xioctl(ctx->fd, VIDIOC_QUERYCAP, &cap))
    {
        if (EINVAL == errno)
        {
            ERR("%s: %s is no V4L2 device\n", get_sensor_name(ctx),
                get_dev_name(ctx));
            // exit(EXIT_FAILURE);
        }
        else
        {
            errno_exit(ctx, "VIDIOC_QUERYCAP");
        }
    }

    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
        !(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE))
    {
        ERR("%s: %s is not a video capture device, capabilities: %x\n",
            get_sensor_name(ctx), get_dev_name(ctx), cap.capabilities);
        // exit(EXIT_FAILURE);
    }

    if (!(cap.capabilities & V4L2_CAP_STREAMING))
    {
        ERR("%s: %s does not support streaming i/o\n", get_sensor_name(ctx),
            get_dev_name(ctx));
        // exit(EXIT_FAILURE);
    }

    if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)
    {
        ctx->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        CLEAR(fmt);
        fmt.type = ctx->buf_type;
        fmt.fmt.pix.width = ctx->width;
        fmt.fmt.pix.height = ctx->height;
        fmt.fmt.pix.pixelformat = ctx->format;
        fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
        if (ctx->limit_range)
            fmt.fmt.pix.quantization = V4L2_QUANTIZATION_LIM_RANGE;
        else
            fmt.fmt.pix.quantization = V4L2_QUANTIZATION_FULL_RANGE;
    }
    else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
    {
        ctx->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        CLEAR(fmt);
        fmt.type = ctx->buf_type;
        fmt.fmt.pix_mp.width = ctx->width;
        fmt.fmt.pix_mp.height = ctx->height;
        fmt.fmt.pix_mp.pixelformat = ctx->format;
        fmt.fmt.pix_mp.field = V4L2_FIELD_INTERLACED;
        if (ctx->limit_range)
            fmt.fmt.pix_mp.quantization = V4L2_QUANTIZATION_LIM_RANGE;
        else
            fmt.fmt.pix_mp.quantization = V4L2_QUANTIZATION_FULL_RANGE;
    }

    if (-1 == xioctl(ctx->fd, VIDIOC_S_FMT, &fmt))
        errno_exit(ctx, "VIDIOC_S_FMT");

    init_mmap(false, ctx);
}

static void init_device_pp_oneframe(demo_context_t *ctx)
{
    // TODO, set format and link, now do with setup_link.sh
    init_input_dmabuf_oneframe(ctx);
}

static void close_device(demo_context_t *ctx)
{
    if (-1 == close(ctx->fd))
        errno_exit(ctx, "close");

    ctx->fd = -1;
}

static void open_device(demo_context_t *ctx)
{
    printf("-------- open output dev_name:%s -------------\n", get_dev_name(ctx));
    ctx->fd = open(get_dev_name(ctx), O_RDWR /* required */ /*| O_NONBLOCK*/, 0);

    if (-1 == ctx->fd)
    {
        ERR("Cannot open '%s': %d, %s\n",
            get_dev_name(ctx), errno, strerror(errno));
        exit(EXIT_FAILURE);
    }
}

static void close_device_pp_oneframe(demo_context_t *ctx)
{
    if (-1 == close(ctx->fd_pp_input))
        errno_exit(ctx, "close");

    ctx->fd_pp_input = -1;

    if (-1 == close(ctx->fd_isp_mp))
        errno_exit(ctx, "close");

    ctx->fd_isp_mp = -1;
}

static void open_device_pp_oneframe(demo_context_t *ctx)
{
    printf("-------- open pp input(video13) -------------\n");
    ctx->fd_pp_input = open("/dev/video13", O_RDWR /* required */ /*| O_NONBLOCK*/, 0);

    if (-1 == ctx->fd_pp_input)
    {
        ERR("Cannot open '%s': %d, %s\n",
            get_dev_name(ctx), errno, strerror(errno));
        exit(EXIT_FAILURE);
    }

    printf("-------- open isp mp(video0) -------------\n");
    ctx->fd_isp_mp = open("/dev/video0", O_RDWR /* required */ /*| O_NONBLOCK*/, 0);

    if (-1 == ctx->fd_isp_mp)
    {
        ERR("Cannot open '%s': %d, %s\n",
            get_dev_name(ctx), errno, strerror(errno));
        exit(EXIT_FAILURE);
    }
}

static void deinit(demo_context_t *ctx)
{
    if (!ctx->camgroup_ctx)
        stop_capturing(ctx);

    if (ctx->pponeframe)
        stop_capturing_pp_oneframe(ctx);
    if (ctx->aiq_ctx)
    {
        printf("%s:-------- stop aiq -------------\n", get_sensor_name(ctx));
        rk_aiq_uapi2_sysctl_stop(ctx->aiq_ctx, false); // 停止AIQ控制系统
    }
    else if (ctx->camgroup_ctx)
    {
        if (ctx->dev_using == 1)
        {
            printf("%s:-------- stop aiq camgroup -------------\n", get_sensor_name(ctx));
            rk_aiq_uapi2_camgroup_stop(ctx->camgroup_ctx);
#ifdef CUSTOM_GROUP_AE_DEMO_TEST
            rk_aiq_uapi2_customAE_unRegister((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx));
#endif
#ifdef CUSTOM_GROUP_AWB_DEMO_TEST
            rk_aiq_uapi2_customAWB_unRegister((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx));
#endif
        }
    }

    if (ctx->aiq_ctx)
    {
        printf("%s:-------- deinit aiq -------------\n", get_sensor_name(ctx));
#ifdef CUSTOM_AE_DEMO_TEST
        // rk_aiq_AELibunRegCallBack(ctx->aiq_ctx, 0);
        rk_aiq_uapi2_customAE_unRegister(ctx->aiq_ctx);
#endif
#ifdef CUSTOM_AWB_DEMO_TEST
        // rk_aiq_AELibunRegCallBack(ctx->aiq_ctx, 0);
        rk_aiq_uapi2_customAWB_unRegister(ctx->aiq_ctx);
#endif
        rk_aiq_uapi2_sysctl_deinit(ctx->aiq_ctx);
        printf("%s:-------- deinit aiq end -------------\n", get_sensor_name(ctx));
    }
    else if (ctx->camgroup_ctx)
    {
        if (ctx->dev_using == 1)
        {
            printf("%s:-------- deinit aiq camgroup -------------\n", get_sensor_name(ctx));
            rk_aiq_uapi2_camgroup_destroy(ctx->camgroup_ctx);
            ctx->camgroup_ctx = NULL;
            printf("%s:-------- deinit aiq camgroup end -------------\n", get_sensor_name(ctx));
        }
    }

    uninit_device(ctx);
    if (ctx->pponeframe)
        uninit_device_pp_oneframe(ctx);
    close_device(ctx);
    if (ctx->pponeframe)
        close_device_pp_oneframe(ctx);

    if (ctx->fp)
    {
        fclose(ctx->fp);
        ctx->fp = NULL;
    }
}
static void signal_handle(int signo)
{
    printf("force exit signo %d !!!\n", signo);

    if (g_main_ctx)
    {
#ifdef ENABLE_UAPI_TEST
        _if_quit = true;
        while (!_quit_done)
            printf("wait quit done !\n");
#endif
        g_main_ctx->frame_count = 0;
        stop_capturing(g_main_ctx);
        if (g_main_ctx->camGroup && g_second_ctx)
            stop_capturing(g_second_ctx);
        deinit(g_main_ctx);
        g_main_ctx = NULL;
    }
    if (g_second_ctx)
    {
        g_second_ctx->frame_count = 0;
        deinit(g_second_ctx);
        g_second_ctx = NULL;
    }
    exit(0);
}

#if 0
static int set_ae_onoff(const rk_aiq_sys_ctx_t* ctx, bool onoff)
{
    XCamReturn ret = XCAM_RETURN_NO_ERROR;
    Uapi_ExpSwAttr_t expSwAttr;

    ret = rk_aiq_user_api_ae_getExpSwAttr(ctx, &expSwAttr);
    expSwAttr.enable = onoff;
    ret = rk_aiq_user_api_ae_setExpSwAttr(ctx, expSwAttr);

    return 0;
}
#endif

static int query_ae_state(const rk_aiq_sys_ctx_t *ctx)
{
    XCamReturn ret = XCAM_RETURN_NO_ERROR;
    Uapi_ExpQueryInfo_t queryInfo;

    ret = rk_aiq_user_api2_ae_queryExpResInfo(ctx, &queryInfo);
    // printf("ae IsConverged: %d\n", queryInfo.IsConverged);

    return 0;
}

static void set_af_manual_meascfg(const rk_aiq_sys_ctx_t *ctx)
{
    rk_aiq_af_attrib_t attr;
    uint16_t gamma_y[RKAIQ_RAWAF_GAMMA_NUM] =
        {0, 45, 108, 179, 245, 344, 409, 459, 500, 567, 622, 676, 759, 833, 896, 962, 1023};

    rk_aiq_user_api2_af_GetAttrib(ctx, &attr);
    attr.AfMode = RKAIQ_AF_MODE_FIXED;

    attr.manual_meascfg.contrast_af_en = 1;
    attr.manual_meascfg.rawaf_sel = 0; // normal = 0; hdr = 1

    attr.manual_meascfg.window_num = 2;
    attr.manual_meascfg.wina_h_offs = 2;
    attr.manual_meascfg.wina_v_offs = 2;
    attr.manual_meascfg.wina_h_size = 2580;
    attr.manual_meascfg.wina_v_size = 1935;

    attr.manual_meascfg.winb_h_offs = 1146;
    attr.manual_meascfg.winb_v_offs = 972;
    attr.manual_meascfg.winb_h_size = 300;
    attr.manual_meascfg.winb_v_size = 300;

    attr.manual_meascfg.gamma_flt_en = 1;
    memcpy(attr.manual_meascfg.gamma_y, gamma_y, RKAIQ_RAWAF_GAMMA_NUM * sizeof(uint16_t));

    attr.manual_meascfg.gaus_flt_en = 1;
    attr.manual_meascfg.gaus_h0 = 0x20;
    attr.manual_meascfg.gaus_h1 = 0x10;
    attr.manual_meascfg.gaus_h2 = 0x08;

    attr.manual_meascfg.afm_thres = 4;

    attr.manual_meascfg.lum_var_shift[0] = 0;
    attr.manual_meascfg.afm_var_shift[0] = 0;
    attr.manual_meascfg.lum_var_shift[1] = 4;
    attr.manual_meascfg.afm_var_shift[1] = 4;

    attr.manual_meascfg.sp_meas.enable = true;
    attr.manual_meascfg.sp_meas.ldg_xl = 10;
    attr.manual_meascfg.sp_meas.ldg_yl = 28;
    attr.manual_meascfg.sp_meas.ldg_kl = (255 - 28) * 256 / 45;
    attr.manual_meascfg.sp_meas.ldg_xh = 118;
    attr.manual_meascfg.sp_meas.ldg_yh = 8;
    attr.manual_meascfg.sp_meas.ldg_kh = (255 - 8) * 256 / 15;
    attr.manual_meascfg.sp_meas.highlight_th = 245;
    attr.manual_meascfg.sp_meas.highlight2_th = 200;
    rk_aiq_user_api2_af_SetAttrib(ctx, &attr);
}

/*-------------------------isp func end-------------------------*/
/*----------------sample_smartIr start----------------------------*/
static void enableIrCutter(bool on)
{
    sample_smartIr_t *smartIr_ctx = &g_sample_smartIr_ctx;

    struct v4l2_control control;

    control.id = V4L2_CID_BAND_STOP_FILTER;
    if (on)
        control.value = 3; // filter ir
    else
        control.value = 0; // ir in

    int _fd = open(smartIr_ctx->ir_cut_v4ldev, O_RDWR | O_CLOEXEC);
    if (_fd != -1)
    {
        if (ioctl(_fd, VIDIOC_S_CTRL, &control) < 0)
        {
            printf("failed to set ircut value %d to device!\n", control.value);
        }
        close(_fd);
    }
}

void switch_to_day()
{
    // ir-cutter on
    enableIrCutter(true);
    // ir off
    // switch to isp day params
}

void switch_to_night()
{
    // switch to isp night params
    // ir-cutter off
    enableIrCutter(false);
    // ir on
}
//////////////////////////////////////////////////////////////
static void *stats_thread(void *args)
{
    demo_context_t *ctx = (demo_context_t *)args;
    XCamReturn ret;
    pthread_detach(pthread_self()); // 用于从线程系统中分离一个线程
    printf("begin stats thread\n");
    rk_smart_ir_result_t ir_res;

    set_af_manual_meascfg(ctx->aiq_ctx);

    while (!_if_quit)
    {
        std::unique_lock<std::mutex> lock(mtx_stats);
        // 等待start_stats变为true
        cv_stats.wait(lock, []()
                      { return start_stats; });

        rk_aiq_isp_stats_t *stats_ref = NULL;
        // printf("{td-313} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
        ret = rk_aiq_uapi2_sysctl_get3AStatsBlk(ctx->aiq_ctx, &stats_ref, -1); // 获取3A统计信息

        if (stats_ref)
            rk_smart_ir_runOnce(ctx->ir_ctx, stats_ref, &ir_res); // 执行日夜判断流程
        // printf("SAMPLE_SMART_IR: NOW is %s\n", ir_res.status == RK_SMART_IR_STATUS_DAY ? "DAY" : "Night");
        /*通过AE的亮度统计来判断环境亮度，以及通过AWB统计来判断红外光占比，通过这些信息综合判断是否进行日夜切换*/
        // printf("SAMPLE_SMART_IR: Now is %s\n", ir_res.status == RK_SMART_IR_STATUS_DAY ? stats_day : stats_night);

        if (ir_res.status == RK_SMART_IR_STATUS_DAY)
        {
            flag_day++;
            flag_night = 0;
            if (flag_day == 1)
            {
                // 1) ir-cutter on,软光敏使能,软光敏是闪光灯的一种自动工作模式
                // printf("Day:1. ir-cutter on!\n");
                switch_to_day();

                // 2) ir-led off
                // printf("Day:2. Day status:need to close led\n");
                system("echo 0 >/sys/class/pwm/pwmchip1/pwm0/enable");
                system("echo a > /sys/devices/virtual/adw/adwdev/adwgpio");

                // 3)switch to isp day params
                //  printf("Day:3. Switch to day paragram ^^^!\n");
                switch_rst = rk_aiq_uapi2_sysctl_switch_scene(ctx->aiq_ctx, main_scene, stats_day);
                // printf("Day: switch_rst=%d\n",switch_rst);
            }
        }
        else if (ir_res.status == RK_SMART_IR_STATUS_NIGHT)
        {
            flag_night++;
            flag_day = 0;
            if (flag_night == 1)
            {
                // 1) switch to isp night params
                // printf("Night:1.Switch to night paragram!\n");
                switch_rst = rk_aiq_uapi2_sysctl_switch_scene(ctx->aiq_ctx, main_scene, stats_night);
                // printf("Night: switch_rst=%d\n",switch_rst);

                // 2) ir-cutter off,软光敏使能关闭
                // printf("Night:2.ir-cutter off!\n");
                switch_to_night();

                // 3) ir-led on
                // printf("Night: 3.status:need to open led!\n");
                system("echo 1 >/sys/class/pwm/pwmchip1/pwm0/enable");
                system("echo b > /sys/devices/virtual/adw/adwdev/adwgpio");
            }
        }

        if (ret == XCAM_RETURN_NO_ERROR && stats_ref != NULL)
        {
            // printf("get one stats frame id %d \n", stats_ref->frame_id);
            query_ae_state(ctx->aiq_ctx);
            // print_af_stats(stats_ref);
            rk_aiq_uapi2_sysctl_release3AStatsRef(ctx->aiq_ctx, stats_ref);
        }
        else
        {
            if (ret == XCAM_RETURN_NO_ERROR)
            {
                printf("aiq has stopped !\n");
                break;
            }
            else if (ret == XCAM_RETURN_ERROR_TIMEOUT)
            {
                printf("aiq timeout!\n");
                continue;
            }
            else if (ret == XCAM_RETURN_ERROR_FAILED)
            {
                printf("aiq failed!\n");
                break;
            }
        }
    }
    printf("end stats thread\n");

    _quit_done = true;
    pthread_exit(NULL);
    return 0;
}

void release_buffer(void *addr)
{
    printf("release buffer called: addr=%p\n", addr);
}

static void test_tuning_api(demo_context_t *ctx)
{
    std::string json_sensor_str = " \n\
        [{ \n\
            \"op\":\"replace\", \n\
            \"path\": \"/sensor_calib/resolution\", \n\
            \"value\": \n\
            { \"width\": 2222, \"height\": 2160} \n\
        }, { \n\
            \"op\":\"replace\", \n\
            \"path\": \"/sensor_calib/CISFlip\", \n\
            \"value\": 6\n\
        }]";

    printf("%s\n", json_sensor_str.c_str());
    rk_aiq_uapi2_sysctl_tuning(ctx->aiq_ctx, const_cast<char *>(json_sensor_str.c_str()));

    std::string json_awb_str = " \n\
        [{ \n\
            \"op\":\"replace\", \n\
            \"path\": \"/wb_v21/autoExtPara/wbGainClip/cct\", \n\
            \"value\": \n\
            [100,200,300,40,50,60] \n\
        },{ \n\
            \"op\":\"replace\", \n\
            \"path\": \"/wb_v21/autoPara/lightSources/0/name\", \n\
            \"value\": \"aaaaaaaaa\" \n\
        }]";
    printf("%s\n", json_awb_str.c_str());
    rk_aiq_uapi2_sysctl_tuning(ctx->aiq_ctx, const_cast<char *>(json_awb_str.c_str()));

    printf("%s done ..\n", __func__);
}

static void load_ir_configs(demo_context_t *ctx)
{
    // sample_smartIr_t* smartIr_ctx = &g_sample_smartIr_ctx;
    // printf("{ccckkk} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
    rk_smart_ir_result_t ir_init_res;
    int ret = 1;
    ctx->ir_cut_v4ldev = NULL;
    ctx->ir_v4ldev = NULL;

    ctx->ir_cut_v4ldev = "/dev/v4l-subdev3";
    ctx->ir_configs.d2n_envL_th = 0.08f; // 日转夜亮度阈值:0.08f(default)
    ctx->ir_configs.n2d_envL_th = 0.20f; // 夜转日亮度阈值:0.20f(default)
    ctx->ir_configs.rggain_base = 1.0f;  // 黑夜切白天的Rgain/Ggain基准值，默认值为1.0f
    ctx->ir_configs.bggain_base = 1.0f;  // 黑夜切白天的Bgain/Ggain基准值，默认值为1.0f
    ctx->ir_configs.awbgain_rad = 0.10f; // 黑夜切白天的awbgain滤波半径，默认值为0
    ctx->ir_configs.awbgain_dis = 0.22f; // 黑夜切白天的awbgain离散度阈值，需要调试,0.22f
    ctx->ir_configs.switch_cnts_th = 30; // 切换阈值，保持相同状态次数大于该阈值时才允许状态切换:100
    ret = rk_smart_ir_config(ctx->ir_ctx, &ctx->ir_configs);
    if (!ret)
    {
        printf("{tttddd} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
    }
    // set initial status to day
    ir_init_res.status = RK_SMART_IR_STATUS_DAY;
    rk_smart_ir_set_status(ctx->ir_ctx, ir_init_res);
    switch_to_day();
}

static void rkisp_routine(demo_context_t *ctx, int flg)
{
    // char sns_entity_name[64];
    rk_aiq_working_mode_t work_mode = RK_AIQ_WORKING_MODE_NORMAL;

    if (ctx->hdrmode == 2)
        work_mode = RK_AIQ_WORKING_MODE_ISP_HDR2;
    else if (ctx->hdrmode == 3)
        work_mode = RK_AIQ_WORKING_MODE_ISP_HDR3;

    printf("work_mode %d\n", work_mode);

    strcpy(sns_entity_name, rk_aiq_uapi2_sysctl_getBindedSnsEntNmByVd(get_dev_name(ctx)));
    printf("sns_entity_name:%s\n", sns_entity_name);
    sscanf(&sns_entity_name[6], "%s", ctx->sns_name);
    printf("sns_name:%s\n", ctx->sns_name);
    rk_aiq_static_info_t s_info;
    rk_aiq_uapi2_sysctl_getStaticMetas(sns_entity_name, &s_info);
    // check if hdr mode is supported
    if (work_mode != 0)
    {
        bool b_work_mode_supported = false;
        rk_aiq_sensor_info_t *sns_info = &s_info.sensor_info;
        for (int i = 0; i < SUPPORT_FMT_MAX; i++)
            // TODO, should decide the resolution firstly,
            // then check if the mode is supported on this
            // resolution
            if ((sns_info->support_fmt[i].hdr_mode == 5 /*HDR_X2*/ &&
                 work_mode == RK_AIQ_WORKING_MODE_ISP_HDR2) ||
                (sns_info->support_fmt[i].hdr_mode == 6 /*HDR_X3*/ &&
                 work_mode == RK_AIQ_WORKING_MODE_ISP_HDR3))
            {
                b_work_mode_supported = true;
                break;
            }

        if (!b_work_mode_supported)
        {
            printf("\nWARNING !!!"
                   "work mode %d is not supported, changed to normal !!!\n\n",
                   work_mode);
            work_mode = RK_AIQ_WORKING_MODE_NORMAL;
        }
    }

    printf("%s:-------- open output dev -------------\n", get_sensor_name(ctx));
    open_device(ctx);
    if (ctx->pponeframe)
        open_device_pp_oneframe(ctx);

    if (ctx->rkaiq)
    {
        XCamReturn ret = XCAM_RETURN_NO_ERROR;
        rk_aiq_tb_info_t tb_info;
        tb_info.magic = sizeof(rk_aiq_tb_info_t) - 2;
        tb_info.is_pre_aiq = false;
        ret = rk_aiq_uapi2_sysctl_preInit_tb_info(sns_entity_name, &tb_info);
        // 初始化使用场景，不是必须，默认为normal，day，用于选择json iq文件中的场景参数
        if (work_mode == RK_AIQ_WORKING_MODE_NORMAL)
            ret = rk_aiq_uapi2_sysctl_preInit_scene(sns_entity_name, "normal", "day");
        else
            ret = rk_aiq_uapi2_sysctl_preInit_scene(sns_entity_name, "hdr", "day");
        if (ret < 0)
            ERR("%s: failed to set %s scene\n",
                get_sensor_name(ctx),
                work_mode == RK_AIQ_WORKING_MODE_NORMAL ? "normal" : "hdr");

        if (strlen(ctx->iqpath))
        {
            if (!ctx->camGroup)
            {
                printf("{tttddd-0319} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
                ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, ctx->iqpath, NULL, NULL);
            }
            else
            {
                // create once for mulitple cams
                if (ctx->dev_using == 1)
                {
                    char sns_entity_name2[64] = {'\0'};
                    bool has_dev2 = false;
                    if (strlen(ctx->dev_name2))
                    {
                        strcpy(sns_entity_name2, rk_aiq_uapi2_sysctl_getBindedSnsEntNmByVd(ctx->dev_name2));
                        printf("sns_entity_name2:%s\n", sns_entity_name2);
                        // sscanf(&sns_entity_name2[6], "%s", ctx->sns_name);
                        // printf("sns_name2:%s\n", ctx->sns_name);
                        has_dev2 = true;
                    }

                    rk_aiq_camgroup_instance_cfg_t camgroup_cfg;
                    memset(&camgroup_cfg, 0, sizeof(camgroup_cfg));
                    camgroup_cfg.sns_num = 1;
                    if (has_dev2)
                        camgroup_cfg.sns_num++;
                    camgroup_cfg.sns_ent_nm_array[0] = sns_entity_name;
                    if (has_dev2)
                        camgroup_cfg.sns_ent_nm_array[1] = sns_entity_name2;
                    camgroup_cfg.config_file_dir = ctx->iqpath;
                    camgroup_cfg.overlap_map_file = "srcOverlapMap.bin";
                    ctx->camgroup_ctx = rk_aiq_uapi2_camgroup_create(&camgroup_cfg);
                    if (!ctx->camgroup_ctx)
                    {
                        printf("create camgroup ctx error !\n");
                        exit(1);
                    }

#ifdef CUSTOM_GROUP_AE_DEMO_TEST
                    rk_aiq_customeAe_cbs_t cbs = {
                        .pfn_ae_init = custom_ae_init,
                        .pfn_ae_run = custom_ae_run,
                        .pfn_ae_ctrl = custom_ae_ctrl,
                        .pfn_ae_exit = custom_ae_exit,
                    };
                    rk_aiq_uapi2_customAE_register((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx), &cbs);
                    rk_aiq_uapi2_customAE_enable((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx), true);
#endif
#ifdef CUSTOM_GROUP_AWB_DEMO_TEST
                    rk_aiq_customeAwb_cbs_t awb_cbs = {
                        .pfn_awb_init = custom_awb_init,
                        .pfn_awb_run = custom_awb_run,
                        .pfn_awb_ctrl = custom_awb_ctrl,
                        .pfn_awb_exit = custom_awb_exit,
                    };
                    rk_aiq_uapi2_customAWB_register((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx), &awb_cbs);
                    rk_aiq_uapi2_customAWB_enable((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx), true);
#endif
                }
            }
        }
        else
        {
            if (ctx->camGroup)
            {
                printf("error! should specify iq path !\n");
                exit(1);
            }
#ifndef ANDROID
            printf("{ccckkk} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
            rk_aiq_uapi2_sysctl_preInit(sns_entity_name, RK_AIQ_WORKING_MODE_NORMAL, "ov5695_TongJu_CHT842-MD.xml");
            ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, "/oem/etc/iqfiles", NULL, NULL);
#else
            printf("{ccckkk} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
            // ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, "/data/etc/iqfiles", NULL, NULL);
            ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, "/oem/etc/iqfiles", NULL, NULL);
#endif
        }
        if (ctx->aiq_ctx)
        {
            printf("%s:-------- init mipi tx/rx -------------\n", get_sensor_name(ctx));
            if (ctx->writeFileSync)
                rk_aiq_uapi2_debug_captureRawYuvSync(ctx->aiq_ctx, CAPTURE_RAW_AND_YUV_SYNC);
#ifdef CUSTOM_AE_DEMO_TEST
            // ae_reg.stAeExpFunc.pfn_ae_init = ae_init;
            // ae_reg.stAeExpFunc.pfn_ae_run = ae_run;
            // ae_reg.stAeExpFunc.pfn_ae_ctrl = ae_ctrl;
            // ae_reg.stAeExpFunc.pfn_ae_exit = ae_exit;
            // rk_aiq_AELibRegCallBack(ctx->aiq_ctx, &ae_reg, 0);
            rk_aiq_customeAe_cbs_t cbs = {
                .pfn_ae_init = custom_ae_init,
                .pfn_ae_run = custom_ae_run,
                .pfn_ae_ctrl = custom_ae_ctrl,
                .pfn_ae_exit = custom_ae_exit,
            };
            rk_aiq_uapi2_customAE_register(ctx->aiq_ctx, &cbs);
            rk_aiq_uapi2_customAE_enable(ctx->aiq_ctx, true);
#endif
#ifdef CUSTOM_AWB_DEMO_TEST
            rk_aiq_customeAwb_cbs_t awb_cbs = {
                .pfn_awb_init = custom_awb_init,
                .pfn_awb_run = custom_awb_run,
                .pfn_awb_ctrl = custom_awb_ctrl,
                .pfn_awb_exit = custom_awb_exit,
            };
            rk_aiq_uapi2_customAWB_register(ctx->aiq_ctx, &awb_cbs);
            rk_aiq_uapi2_customAWB_enable(ctx->aiq_ctx, true);
#endif
            if (ctx->isOrp)
            {
                rk_aiq_raw_prop_t prop;
                if (strcmp(ctx->orpRawFmt, "BA81") == 0)
                    prop.format = RK_PIX_FMT_SBGGR8;
                else if (strcmp(ctx->orpRawFmt, "GBRG") == 0)
                    prop.format = RK_PIX_FMT_SGBRG8;
                else if (strcmp(ctx->orpRawFmt, "RGGB") == 0)
                    prop.format = RK_PIX_FMT_SRGGB8;
                else if (strcmp(ctx->orpRawFmt, "GRBG") == 0)
                    prop.format = RK_PIX_FMT_SGRBG8;
                else if (strcmp(ctx->orpRawFmt, "BG10") == 0)
                    prop.format = RK_PIX_FMT_SBGGR10;
                else if (strcmp(ctx->orpRawFmt, "GB10") == 0)
                    prop.format = RK_PIX_FMT_SGBRG10;
                else if (strcmp(ctx->orpRawFmt, "RG10") == 0)
                    prop.format = RK_PIX_FMT_SRGGB10;
                else if (strcmp(ctx->orpRawFmt, "BA10") == 0)
                    prop.format = RK_PIX_FMT_SGRBG10;
                else if (strcmp(ctx->orpRawFmt, "BG12") == 0)
                    prop.format = RK_PIX_FMT_SBGGR12;
                else if (strcmp(ctx->orpRawFmt, "GB12") == 0)
                    prop.format = RK_PIX_FMT_SGBRG12;
                else if (strcmp(ctx->orpRawFmt, "RG12") == 0)
                    prop.format = RK_PIX_FMT_SRGGB12;
                else if (strcmp(ctx->orpRawFmt, "BA12") == 0)
                    prop.format = RK_PIX_FMT_SGRBG12;
                else if (strcmp(ctx->orpRawFmt, "BG14") == 0)
                    prop.format = RK_PIX_FMT_SBGGR14;
                else if (strcmp(ctx->orpRawFmt, "GB14") == 0)
                    prop.format = RK_PIX_FMT_SGBRG14;
                else if (strcmp(ctx->orpRawFmt, "RG14") == 0)
                    prop.format = RK_PIX_FMT_SRGGB14;
                else if (strcmp(ctx->orpRawFmt, "BA14") == 0)
                    prop.format = RK_PIX_FMT_SGRBG14;
                else
                    prop.format = RK_PIX_FMT_SBGGR10;
                prop.frame_width = ctx->orpRawW;
                prop.frame_height = ctx->orpRawH;
                prop.rawbuf_type = RK_AIQ_RAW_FILE;
                rk_aiq_uapi2_sysctl_prepareRkRaw(ctx->aiq_ctx, prop);
            }
            /*
             * rk_aiq_uapi_setFecEn(ctx->aiq_ctx, true);
             * rk_aiq_uapi_setFecCorrectDirection(ctx->aiq_ctx, FEC_CORRECT_DIRECTION_Y);
             */
#ifdef TEST_MEMS_SENSOR_INTF
            rk_aiq_mems_sensor_intf_t g_rkiio_aiq_api;
            rk_aiq_uapi2_sysctl_regMemsSensorIntf(ctx->aiq_ctx, &g_rkiio_aiq_api);
#endif

#if 0
            test_tuning_api(ctx);
#endif
            XCamReturn ret = rk_aiq_uapi2_sysctl_prepare(ctx->aiq_ctx, ctx->width, ctx->height, work_mode);

            if (ret != XCAM_RETURN_NO_ERROR)
                ERR("%s:rk_aiq_uapi2_sysctl_prepare failed: %d\n", get_sensor_name(ctx), ret);
            else
            {
                ret = rk_aiq_uapi2_setMirrorFlip(ctx->aiq_ctx, false, false, 3);
                // Ignore failure

                if (ctx->isOrp)
                {
                    rk_aiq_uapi2_sysctl_registRkRawCb(ctx->aiq_ctx, release_buffer);
                }
                ret = rk_aiq_uapi2_sysctl_start(ctx->aiq_ctx);

                init_device(ctx);
                if (ctx->pponeframe)
                    init_device_pp_oneframe(ctx);
                if (ctx->ctl_type == TEST_CTL_TYPE_DEFAULT)
                {
                    start_capturing(ctx);
                }
                if (ctx->pponeframe)
                    start_capturing_pp_oneframe(ctx);
                printf("%s:-------- stream on mipi tx/rx -------------\n", get_sensor_name(ctx));

                if (ctx->ctl_type != TEST_CTL_TYPE_DEFAULT)
                {
                restart:
                    static int test_ctl_cnts = 0;
                    ctx->frame_count = 60;
                    start_capturing(ctx);
                    while ((ctx->frame_count-- > 0))
                        read_frame(ctx, flg);
                    stop_capturing(ctx);
                    printf("+++++++ TEST SYSCTL COUNTS %d ++++++++++++ \n", test_ctl_cnts++);
                    printf("aiq stop .....\n");
                    rk_aiq_uapi2_sysctl_stop(ctx->aiq_ctx, false);
                    if (ctx->ctl_type == TEST_CTL_TYPE_REPEAT_INIT_PREPARE_START_STOP_DEINIT)
                    {
                        printf("aiq deinit .....\n");
                        rk_aiq_uapi2_sysctl_deinit(ctx->aiq_ctx);
                        printf("aiq init .....\n");
                        if (work_mode == RK_AIQ_WORKING_MODE_NORMAL)
                        {
                            ret = rk_aiq_uapi2_sysctl_preInit_scene(sns_entity_name, "normal", "day");
                            if (ctx->hdrmode == 2)
                                work_mode = RK_AIQ_WORKING_MODE_ISP_HDR2;
                            else if (ctx->hdrmode == 3)
                                work_mode = RK_AIQ_WORKING_MODE_ISP_HDR3;
                        }
                        else
                        {
                            ret = rk_aiq_uapi2_sysctl_preInit_scene(sns_entity_name, "hdr", "day");
                            work_mode = RK_AIQ_WORKING_MODE_NORMAL;
                        }
                        if (ret < 0)
                            ERR("%s: failed to set %s scene\n",
                                get_sensor_name(ctx),
                                work_mode == RK_AIQ_WORKING_MODE_NORMAL ? "normal" : "hdr");
                        printf("{tttdddd} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
                        ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, ctx->iqpath, NULL, NULL);
                        printf("aiq prepare .....\n");
                        XCamReturn ret = rk_aiq_uapi2_sysctl_prepare(ctx->aiq_ctx, ctx->width, ctx->height, work_mode);
                    }
                    else if (ctx->ctl_type == TEST_CTL_TYPE_REPEAT_PREPARE_START_STOP)
                    {
                        printf("aiq prepare .....\n");
                        XCamReturn ret = rk_aiq_uapi2_sysctl_prepare(ctx->aiq_ctx, ctx->width, ctx->height, work_mode);
                    }
                    else if (ctx->ctl_type == TEST_CTL_TYPE_REPEAT_START_STOP)
                    {
                        // do nothing
                    }
                    printf("aiq start .....\n");
                    ret = rk_aiq_uapi2_sysctl_start(ctx->aiq_ctx);
                    printf("aiq restart .....\n");
                    goto restart;
                }
            }
        }
        else if (ctx->camgroup_ctx)
        {
            // only do once for cam group
            if (ctx->dev_using == 1)
            {
                XCamReturn ret = rk_aiq_uapi2_camgroup_prepare(ctx->camgroup_ctx, work_mode);

                if (ret != XCAM_RETURN_NO_ERROR)
                    ERR("%s:rk_aiq_uapi2_camgroup_prepare failed: %d\n", get_sensor_name(ctx), ret);
                else
                {

                    ret = rk_aiq_uapi2_camgroup_start(ctx->camgroup_ctx);
                }
            }
            init_device(ctx);
            start_capturing(ctx);
        }
    }
    else
    {
        init_device(ctx);
        if (ctx->pponeframe)
            init_device_pp_oneframe(ctx);
        start_capturing(ctx);
        if (ctx->pponeframe)
            start_capturing_pp_oneframe(ctx);
    }
}

static void sample_smartIr_start(demo_context_t *ctx)
{
    // const rk_aiq_sys_ctx_t* ctx = (demo_context_t*)(arg);
    // sample_smartIr_t* smartIr_ctx = &g_sample_smartIr_ctx;
    // printf("{sample_smartIr_start:tttddd} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
    if (ctx->aiq_ctx == NULL)
    {
        printf("{tttddd} %s(%d): ...........aiq_ctx==NULL..................666 \n", __FUNCTION__, __LINE__);
    }

    ctx->ir_ctx = rk_smart_ir_init(ctx->aiq_ctx); // 初始化 SmartIr 运行环境
    load_ir_configs(ctx);                         // 配置参数

    ctx->tquit = false;
    //   pthread_create(&smartIr_ctx->tid, NULL, switch_ir_thread, NULL);
    ctx->started = true;
}

static void taiic_isp_release()
{
    if (main_ctx.isOrp)
    {
        main_ctx.orpStop = true;
        while (!main_ctx.orpStopped)
        {
            printf("wait orp stopped ... \n");
            usleep(500000);
        }
    }

#ifdef TEST_BLOCKED_STATS_FUNC
    _if_quit = true;
    while (!_quit_done)
        printf("wait quit done !\n");
#endif
    deinit(&main_ctx);

#if ISPDEMO_ENABLE_DRM
    if (strlen(main_ctx.dev_name) && strlen(main_ctx.dev_name2))
    {
        display_exit();
    }
    deInitDrmDsp();
#endif
}

static void *taiic_isp_image_capture_func(void *args)
{
    demo_context_t *ctx = (demo_context_t *)args;
    int isp_img_cnt = 0;
    // 循环获取isp图像
    while (1)
    {
        std::unique_lock<std::mutex> lock(mtx_vi);
        // 等待start_vi变为true
        cv_vi.wait(lock, []()
                   { return start_vi; });
        isp_img_cnt = 0;

        // flow video data
        while (start_video_flg)
        {
            if ((isp_img_cnt >= SKIP_ISP_FRAMES) && (!VideoModelInf_or_DataType))
            {
                read_frame(ctx, isp_img_cnt);
            }
            isp_img_cnt++;
            if ((!start_video_flg))
            {
                isp_img_cnt = -1;
                start_vi = false;
            }
        }

        // model inference image module
        if (VideoModelInf_or_DataType)
        {
            while (isp_img_cnt < (ORI_MODEL_C + SKIP_ISP_FRAMES))
            {
                read_frame(ctx, isp_img_cnt);
                isp_img_cnt++;
            }
            start_vi = false; // 图像采集失效，等待下一次的唤醒
        }
    }
    pthread_exit(NULL); // 线程结束时，调用pthread_exit
}
/*-------------------------ai function-----------------------------*/
static void ai_frame_data_cback(uint8_t *data, unsigned int len, unsigned short frame_num, uint8_t type)
{
    printf("ai callback:%d,%d,type=%d\n", len, frame_num, type);
    frameNum = frame_num;

    if ((message_type == 10) && (frameNum == (AI_COUNT - 1)))
    {
        if (version != 0)
        {
            data_type_finish_send_func(dataType_air);
        }

        ai_end = 1;

        aivi_save_end(vi_end, ai_end);
        ai_end = 0;
        // reset
        is_model_run = RK_FALSE;
        // only_air_model = false;
    }
    else if (message_type == 9) // avsr+result
    {
        if (frameNum == (AI_COUNT - 1))
        {
            ai_end = 1;
            if ((vi_end == 1) && (version == 0))
            {
                aivi_save_end(vi_end, ai_end);
            }

            if (version != 0)
            {
                data_type_finish_send_func(dataType_air);
            }
        }
    }
    else if (!only_result) // no request result
    {
        if ((frameNum == 105) && ((only_video_air_model) || (only_video_bone_model) || (only_video_air_bone_model)))
        {
            // wake up isp image capture thread
            start_vi = true;
            cv_vi.notify_one(); // 通知等待的线程，条件已经满足
        }
        else if (frameNum == (AI_COUNT - 1))
        {
            ai_end = 1;
            if (type == 1) // air
            {
                if (version != 0)
                {
                    data_type_finish_send_func(dataType_air); // 0x32 0x02 0x00 0x02
                }
            }
            else if (type == 2)
            {
                data_type_finish_send_func(dataType_bone); // 0x32 0x02 0x00 0x04
            }
            else if (type == 3)
            {
                data_type_finish_send_func(dataType_air_bone); // 0x32 0x02 0x00 0x06
            }

            if (AudioModelInf_or_DataType)
            {
                if (only_air_model || only_bone_model || only_air_bone_model)
                {
                    // only_air_model = false;
                    // only_bone_model = false;
                    // only_air_bone_model = false;

                    // 总的数据结束包发送
                    server_client_frame_package_function(msg_finish_send_instruct, NULL, 0); // 0x32 00
                    /*----------嘀两声:表示socket发送完成----------*/
                    mtx_uart.lock();
                    uart_main_func(1, 2, 0, 0); // 滴两声表示发送完成
                    mtx_uart.unlock();
                }
            }
            is_model_run = RK_FALSE;
        }
    }
}

/*------------------------Server socket-------------------------*/
void signalHandler(int signum)
{
    std::cout << "\nInterrupt signal (" << signum << ") received.\n";
    serverSocket.stop();
    exit(signum);
}

void connect_state_cback(bool isConnect)
{
    if (isConnect)
    {
        printf("server connected client successfully\n");
    }
}
void message_cback(unsigned char msgType, Request request)
{
    ai_end = 0;
    vi_end = 0;
    is_model_run = RK_FALSE;
    only_result = false;

    // 判断文件是否存在，存在则删除
    if (fileExists(air_pcm_path))
    {
        if (remove(air_pcm_path) == 0)
        {
            printf("air pcm file exists, and delete successfully\n");
        }
        else
        {
            printf("air pcm file delete failed\n");
        }
    }

    if (fileExists(bone_pcm_path))
    {
        if (remove(bone_pcm_path) == 0)
        {
            printf("bone pcm file exists, and delete successfully\n");
        }
        else
        {
            printf("bone pcm file delete failed\n");
        }
    }

    request.printInfo();
    switch (msgType)
    {
    case MSG_REQUEST_BATTERY:
        printf("Request battery\n");
        power_info_send_func(current_power_value);
        break;
    case MSG_SET_DATA_TYPE:
        printf("Request flow data type\n");
        // 1.数据类型: 判断请求的数据类型
        if (request.type == RequestType::VIDEO) // 仅请求视频数据
        {
            only_video = true;
            VideoModelInf_or_DataType = false;
        }
        else if (request.type == RequestType::MIC_AIR) // 仅请求气导数据
        {
            only_air = true;
            AudioModelInf_or_DataType = false;
        }
        else if (request.type == RequestType::MIC_BONE) // 仅请求骨导数据
        {
            only_bone = true;

            AudioModelInf_or_DataType = false;
        }
        else if (request.type == RequestType::MIC_AIR_AND_BONE) // 请求气导mic数据和骨导mic数据
        {
            printf("request air_bone_mic\n");
            only_air_bone = true;

            AudioModelInf_or_DataType = false;
        }
        else if (request.type == RequestType::VIDEO_AND_MIC_AIR) // 请求视频和气导mic数据
        {
            printf("request video_air_mic\n");
            only_video_air = true;

            VideoModelInf_or_DataType = false;
            AudioModelInf_or_DataType = false;
        }
        else if (request.type == RequestType::VIDEO_AND_MIC_BONE) // 请求视频数据和骨导mic数据
        {
            printf("request video_bone_mic\n");
            only_video_bone = true;

            VideoModelInf_or_DataType = false;
            AudioModelInf_or_DataType = false;
        }
        else if (request.type == RequestType::VIDEO_MIC_AIR_AND_BONE) // 请求视频数据、气导mic数据和骨导mic数据
        {
            printf("request video_air_bone_mic\n");
            only_video_air_bone = true;

            VideoModelInf_or_DataType = false;
            AudioModelInf_or_DataType = false;
        }

        break;
    case MSG_SET_MODEL:
        printf("Request model inference\n");
        only_video_model = false;
        only_air_model = false;
        only_bone_model = false;
        only_video_air_model = false;
        only_video_bone_model = false;
        only_video_air_bone_model = false;

        version = request.version;
        printf("version:%d\n", version);
        // 2.标签类型:判断是否请求识别结果
        if (request.tag == RequestTag::TAG)
        {
            only_result = true;
        }
        else if (request.tag == RequestTag::NONE)
        {
            only_result = false;
            message_type = 11;
        }
        // 3.模型
        if (request.type == RequestType::VIDEO) // 视频模型
        {
            only_video_model = true;
            VideoModelInf_or_DataType = true;

            if (only_result)
            {
                message_type = 8; // vsr:8
            }
        }
        else if (request.type == RequestType::MIC_AIR) // air音频模型
        {
            only_air_model = true;
            AudioModelInf_or_DataType = true;
            if (only_result)
            {
                message_type = 10; // asr:10
            }
        }
        else if (request.type == RequestType::MIC_BONE) // bone音频模型
        {
            only_bone_model = true;
            AudioModelInf_or_DataType = true;
        }
        else if (request.type == RequestType::MIC_AIR_AND_BONE) // air+bone音频模型
        {
            only_air_bone_model = true;
            AudioModelInf_or_DataType = true;
        }
        else if (request.type == RequestType::VIDEO_AND_MIC_AIR) // 视频模型+音频模型
        {
            only_video_air_model = true;
            VideoModelInf_or_DataType = true;
            AudioModelInf_or_DataType = true;
            if (only_result)
            {
                message_type = 9; // avsr:9
            }
            else
            {
                message_type = 90; // avsr+no result:90
            }
        }
        else if (request.type == RequestType::VIDEO_AND_MIC_BONE) // 视频模型+音频模型
        {
            only_video_bone_model = true;
            VideoModelInf_or_DataType = true;
            AudioModelInf_or_DataType = true;
        }
        else if (request.type == RequestType::VIDEO_MIC_AIR_AND_BONE)
        {
            only_video_air_bone_model = true;
            VideoModelInf_or_DataType = true;
            AudioModelInf_or_DataType = true;
        }
        break;
    case MSG_SET_DATA_MS:
        printf("Request audio data time length\n");
        custom_tl = request.timeLength; // 赋值气导音频时间长度
        if (custom_tl > 0)
        {
            only_mic_custom_tl = true;
        }
        break;

    case MSG_HEARTBEAT:
        printf("MSG_HEARTBEAT:\n");
        break;
    case MSG_SET_PIXESL_WH: // 更改图像请求分辨率（像素）
        printf("Request image width and height\n");

        only_custom_video = true;

    default:
        printf("default\n");
        break;
    }
}

static void *taiic_socket_type_data(void *args)
{
    if (!serverSocket.init(connect_state_cback, message_cback, ipSet))
    {
        std::cerr << "Server initialization failed!" << std::endl;
    }
    // 运行服务器
    serverSocket.run();
}

static void power_callback(int power_cap, long power_vol) // 电池信息查询回调
{
    printf("power is %d,voltage=%dmV\n", power_cap, power_vol / 1000);
    current_power_value = power_cap;
}

static void aivi_save_end(int vi, int ai)
{
    // load audio pcm as input
    audioData = loadAudioData(air_pcm_path);
    // mfcc feature extraction, output type:uint8
    extract_mfcc(audioData, mfcc_feat_u8);
    // 将matrix中的数据赋值到avsr_audio_input_data
    for (int i = 0; i < numFrames; i++)
    {
        for (int j = 0; j < num_mfcc_coef; j++)
        {
            avsr_audio_input_data[0][i][j][0] = static_cast<RK_U8>(mfcc_feat_u8(i, j));
        }
    }
    // 结果:预备发送
    results_presend_func();
    // asr模型推理
    if ((vi == 0) && (ai == 1))
    {
        // 送入模型
        unsigned char *asr_input_data[taiic_asr_ctx->io_num.n_input];
        asr_input_data[0] = new unsigned char[taiic_asr_ctx->input_attrs[0].size]; // 设置audio 输入大小
        asr_input_data[0] = &avsr_audio_input_data[0][0][0][0];

        avsr_rknn_toolkit_data_refresh(taiic_asr_ctx, asr_input_data);
        int ret = rknn_run(taiic_asr_ctx->context, NULL);
        MODEL_RESULT_S asr_result = avsr_rknn_toolkit_result_int8(taiic_asr_ctx);
        asr_result.intprob = static_cast<int>(round(asr_result.prob * 100));
        printf("ASR505:result_label=%d, prob=%f,intprob=%d\n", asr_result.label, asr_result.prob, asr_result.intprob);

        // 结果:发送
        results_scores_send_func(asr_result.label, asr_result.intprob);
    }
    else if ((vi == 1) && (ai == 1)) // avsr模型推理
    {
        unsigned char *avsr_input_data[avsr_ctx->io_num.n_input];
        avsr_input_data[0] = new unsigned char[avsr_ctx->input_attrs[0].size]; // 设置video 输入大小
        avsr_input_data[0] = &avsr_video_input_data[0][0][0][0];

        avsr_input_data[1] = new unsigned char[avsr_ctx->input_attrs[1].size]; // 设置audio 输入大小
        avsr_input_data[1] = &avsr_audio_input_data[0][0][0][0];

        avsr_rknn_toolkit_data_refresh(avsr_ctx, avsr_input_data);
        int ret = rknn_run(avsr_ctx->context, NULL);

        MODEL_RESULT_S avsr_result = avsr_rknn_toolkit_result_int8_opt(avsr_ctx);
        avsr_result.intprob = static_cast<int>(round(avsr_result.prob * 100));

        printf("AVSR300:result_label=%d, prob:%f,intprob=%d\n", avsr_result.label, avsr_result.prob, avsr_result.intprob);
        // 结果:发送
        results_scores_send_func(avsr_result.label, avsr_result.intprob);
    }
    // 结果:结束发送
    data_type_finish_send_func(dataType_result);

    // 总的数据结束包发送
    server_client_frame_package_function(msg_finish_send_instruct, NULL, 0); // 0x32 00
    /*----------嘀两声:表示socket发送完成----------*/
    mtx_uart.lock();
    uart_main_func(1, 2, 0, 0); // 滴两声表示发送完成
    mtx_uart.unlock();
}
