/*---------------------------------------------------------------------
erji_app
avsr大应用:avsr_taiic_lm
常规通话不受PTT控制
BIN:output/out/media_out/bin/avsr_taiic_lm
-----------------------------------------------------------------------*/
#include <iostream>
#include <stdio.h>
#include <errno.h>
#include <cstring>
#include <cstdlib> //包含system函数
#include <unistd.h>
#include "stdlib.h"
#include <pthread.h>
#include <signal.h>
#include <sys/poll.h>
#include <float.h>
#include <chrono>
#include <vector>
#include "taiic_key.h"
#include <fcntl.h>
#include "taiic_sensor.h"

#include <mutex>
#include <thread>
#include "taiic_opencv.h"
#include "taiic_lm.h"
#include "taiic_avsr.h"
#include "taiic_mfcc.h"
#include "../../include/rs_485/485_common.h" //485协议头文件
#include "client_t.h"

#include <condition_variable>
/*---------------------ISP Header File start---------------------*/
/*
 * ISP: V4L2 video capture example
 */
#include "../../include/isp/taiic_isp_model.h"
#include <string.h>
#include <assert.h>
#include <getopt.h> /* getopt_long() */
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <dlfcn.h>
#include <dirent.h>
#if ISPDEMO_ENABLE_DRM
#include "isp/drmDsp.h"
#endif
#include "isp/uAPI2/rk_aiq_user_api2_sysctl.h"
#include "isp/uAPI2/rk_aiq_user_api2_debug.h"
#include "isp/sample/sample_image_process.h"
#include "isp/sample/sample_smartIr.h"
#include "isp/rkisp_demo.h"
#include <termios.h>

#include "isp/ae_algo_demo/third_party_ae_algo.h"
#include "isp/awb_algo_demo/third_party_awbV32_algo.h" //for rv1106
#include "isp/af_algo_demo/third_party_af_algo.h"

#if ISPDEMO_ENABLE_RGA && ISPDEMO_ENABLE_DRM
#include "isp/display.h"
#include "isp/rga.h"
#endif
#include <list>
#include <string>
#include <algorithm>
#include "../../include/taiic_uart/taiic_uart_sum.h"
using namespace std;
RK_BOOL is_model_run = RK_FALSE;
long pcm_size = 0;

unsigned int frameNum = 0;

std::ofstream outfile;
std::ifstream wavFp;
std::ofstream mfcFp;

mutex mtx_uart;

int ai_end = 0;     // ai end flag
int vi_end = 0;     // vi end flag
int img_up_end = 0; // 图像上传完成标志位

#define TEST_BLOCKED_STATS_FUNC

tAiViSaveEndCallback *s_aivi_save_end = NULL;
static void spp_frame_data_cback(uint8_t keyValue);
static void ai_frame_data_cback(uint8_t *data, unsigned int len, unsigned short frame_num);

void aivi_save_end_callback(tAiViSaveEndCallback aivi_save_end)
{
    if (aivi_save_end != NULL)
    {
        s_aivi_save_end = aivi_save_end;
    }
}

TAIIC_MODE_STATE_CTX_S *mode_state; // 模式状态相关参数
TAIIC_MFCC mfccComputer;            // audio_mfcc
/*----------------------------avsr define---------------------*/
AVSR_TOOLKIT_MODEL_CTX_S *avsr_ctx; // avsr模型推理相关
RK_U8 avsr_video_input_data[AVSR_V_BATCH][AVSR_V_HEIGHT][AVSR_V_WIDTH][AVSR_V_CHANNEL];
RK_U8 avsr_audio_input_data[AVSR_A_BATCH][AVSR_A_HEIGHT][AVSR_A_WIDTH][AVSR_A_CHANNEL];
/*-------------------------------------------*/
static demo_context_t *g_main_ctx = NULL, *g_second_ctx = NULL, *g_third_ctx = NULL, *g_fourth_ctx = NULL;
/*----------------------ISP Header File end----------------------*/
TEST_BATTERY_QUERY cap_query;

void *ai_uart_thread_func(void *args) // 副板->主板气导音频数据采集
{
    while (true)
    {
        std::unique_lock<std::mutex> lock(mtx_ai);
        // 等待ai_start_flag变为true
        cv_ai.wait(lock, []()
                   { return ai_start; });

        // 开始执行任务
        if (fileExists(air_pcm_path))
        {
            if (remove(air_pcm_path) == 0)
            {
                printf("air pcm file exists, and delete successfully\n");
            }
            else
            {
                printf("air pcm file delete failed\n");
            }
        }

        // 开始执行任务
        mtx_uart.lock();
        std::this_thread::sleep_for(std::chrono::milliseconds(100));
        uart_main_func(5, 1, 0, 0);
        sleep(1);
        mtx_uart.unlock();

        while (1)
        {
            pcm_size = getFileSize(air_pcm_path);
            printf("%ld\n", pcm_size);
            if (pcm_size == PCM_SIZE_SOC) // 53280bytes
            {
                ai_end = 1; // ai finish flag
                RK_LOGD("=====save ai finish==\n");

                break;
            }
            sleep(1);
        }

        printf("vi_end=%d,ai_end=%d,img_up_end=%d\n", vi_end, ai_end, img_up_end);
        // 音频、视频、图像上传完成，开始音视融合
        if ((vi_end == 1) && (ai_end == 1) && (img_up_end == 1))
        {
            // end save ai -> go to mfcc,ai_data in /data/air_audio_data.pcm 16k/16bits/single channel
            mtx_uart.lock();
            sleep(1);
            uart_main_func(1, 2, 0, 0); // system("uart_commid 1 2"); // SPK嘀两声,唇图采集完成,音频采集完成
            mtx_uart.unlock();

            s_aivi_save_end(vi_end, ai_end);
        }

        // 清除工作可用的标志，为下一次循环做准备
        ai_start = false;
    }
    pthread_exit(NULL);
}
/*------------------------isp func start------------------------*/
char *get_dev_name(demo_context_t *ctx)
{
    if (ctx->dev_using == 1)
        return ctx->dev_name;
    else if (ctx->dev_using == 2)
        return ctx->dev_name2;
    else if (ctx->dev_using == 3)
        return ctx->dev_name3;
    else if (ctx->dev_using == 4)
        return ctx->dev_name4;
    else
    {
        ERR("!!!dev_using is not supported!!!");
        return NULL;
    }
}

char *get_sensor_name(demo_context_t *ctx)
{
    return ctx->sns_name;
}
static void errno_exit(demo_context_t *ctx, const char *s)
{
    ERR("%s: %s error %d, %s\n", get_sensor_name(ctx), s, errno, strerror(errno));
    // exit(EXIT_FAILURE);
}

static int read_frame(demo_context_t *ctx, int flg)
{
    struct v4l2_buffer buf;
    int i, bytesused;

    CLEAR(buf);

    buf.type = ctx->buf_type;
    buf.memory = V4L2_MEMORY_MMAP;

    struct v4l2_plane planes[FMT_NUM_PLANES];
    memset(planes, 0, sizeof(struct v4l2_plane) * FMT_NUM_PLANES);
    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
    {
        buf.m.planes = planes;
        buf.length = FMT_NUM_PLANES;
    }

    if (-1 == xioctl(ctx->fd, VIDIOC_DQBUF, &buf))
        errno_exit(ctx, "VIDIOC_DQBUF");

    i = buf.index;

    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        bytesused = buf.m.planes[0].bytesused;
    else
        bytesused = buf.bytesused;

#if ISPDEMO_ENABLE_DRM
    if (ctx->vop)
    {
        int dispWidth, dispHeight;

        if (ctx->width > 1920)
            dispWidth = 1920;
        else
            dispWidth = ctx->width;

        if (ctx->height > 1088)
            dispHeight = 1088;
        else
            dispHeight = ctx->height;

#if ISPDEMO_ENABLE_RGA
        if (strlen(ctx->dev_name) && strlen(ctx->dev_name2))
        {
            if (ctx->dev_using == 1)
                display_win1(ctx->buffers[i].start, ctx->buffers[i].export_fd, RK_FORMAT_YCbCr_420_SP, dispWidth, dispHeight, 0);
            else
                display_win2(ctx->buffers[i].start, ctx->buffers[i].export_fd, RK_FORMAT_YCbCr_420_SP, dispWidth, dispHeight, 0);
        }
        else
        {
#else
        {
#endif
            drmDspFrame(ctx->width, ctx->height, dispWidth, dispHeight, ctx->buffers[i].export_fd, DRM_FORMAT_NV12);
        }
    }
#endif

    process_image(ctx->buffers[i].start, buf.sequence, bytesused, ctx, flg);

    if (-1 == xioctl(ctx->fd, VIDIOC_QBUF, &buf))
        errno_exit(ctx, "VIDIOC_QBUF");

    return 1;
}
static void stop_capturing(demo_context_t *ctx)
{
    enum v4l2_buf_type type;

    type = ctx->buf_type;
    if (-1 == xioctl(ctx->fd, VIDIOC_STREAMOFF, &type))
        errno_exit(ctx, "VIDIOC_STREAMOFF");
}

static void stop_capturing_pp_oneframe(demo_context_t *ctx)
{
    enum v4l2_buf_type type;

    type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    if (-1 == xioctl(ctx->fd_pp_input, VIDIOC_STREAMOFF, &type))
        errno_exit(ctx, "VIDIOC_STREAMOFF ppinput");
    type = ctx->buf_type;
    if (-1 == xioctl(ctx->fd_isp_mp, VIDIOC_STREAMOFF, &type))
        errno_exit(ctx, "VIDIOC_STREAMOFF ispmp");
}

static void start_capturing(demo_context_t *ctx)
{
    unsigned int i;
    enum v4l2_buf_type type;

    for (i = 0; i < ctx->n_buffers; ++i)
    {
        struct v4l2_buffer buf;

        CLEAR(buf);
        buf.type = ctx->buf_type;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        {
            struct v4l2_plane planes[FMT_NUM_PLANES];

            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }
        if (-1 == xioctl(ctx->fd, VIDIOC_QBUF, &buf))
            errno_exit(ctx, "VIDIOC_QBUF");
    }
    type = ctx->buf_type;
    DBG("%s:-------- stream on output -------------\n", get_sensor_name(ctx));

    if (-1 == xioctl(ctx->fd, VIDIOC_STREAMON, &type))
        errno_exit(ctx, "VIDIOC_STREAMON");
}

static void start_capturing_pp_oneframe(demo_context_t *ctx)
{
    unsigned int i;
    enum v4l2_buf_type type;

    type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    DBG("%s:-------- stream on pp input -------------\n", get_sensor_name(ctx));
    if (-1 == xioctl(ctx->fd_pp_input, VIDIOC_STREAMON, &type))
        errno_exit(ctx, "VIDIOC_STREAMON pp input");

    type = ctx->buf_type;
    for (i = 0; i < ctx->n_buffers; ++i)
    {
        struct v4l2_buffer buf;

        CLEAR(buf);
        buf.type = ctx->buf_type;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        {
            struct v4l2_plane planes[FMT_NUM_PLANES];

            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }
        if (-1 == xioctl(ctx->fd_isp_mp, VIDIOC_QBUF, &buf))
            errno_exit(ctx, "VIDIOC_QBUF");
    }
    DBG("%s:-------- stream on isp mp -------------\n", get_sensor_name(ctx));
    if (-1 == xioctl(ctx->fd_isp_mp, VIDIOC_STREAMON, &type))
        errno_exit(ctx, "VIDIOC_STREAMON ispmp");
}

static void uninit_device(demo_context_t *ctx)
{
    unsigned int i;
    if (ctx->n_buffers == 0)
        return;

    for (i = 0; i < ctx->n_buffers; ++i)
    {
        if (-1 == munmap(ctx->buffers[i].start, ctx->buffers[i].length))
            errno_exit(ctx, "munmap");

        close(ctx->buffers[i].export_fd);
    }

    free(ctx->buffers);
    ctx->n_buffers = 0;
}

static void uninit_device_pp_oneframe(demo_context_t *ctx)
{
    unsigned int i;

    for (i = 0; i < ctx->n_buffers; ++i)
    {
        if (-1 == munmap(ctx->buffers_mp[i].start, ctx->buffers_mp[i].length))
            errno_exit(ctx, "munmap");

        close(ctx->buffers_mp[i].export_fd);
    }

    free(ctx->buffers_mp);
}

static void init_mmap(int pp_onframe, demo_context_t *ctx)
{
    struct v4l2_requestbuffers req;
    int fd_tmp = -1;

    CLEAR(req);

    if (pp_onframe)
        fd_tmp = ctx->fd_isp_mp;
    else
        fd_tmp = ctx->fd;

    req.count = BUFFER_COUNT;
    req.type = ctx->buf_type;
    req.memory = V4L2_MEMORY_MMAP;

    struct buffer *tmp_buffers = NULL;

    if (-1 == xioctl(fd_tmp, VIDIOC_REQBUFS, &req))
    {
        if (EINVAL == errno)
        {
            ERR("%s: %s does not support "
                "memory mapping\n",
                get_sensor_name(ctx), get_dev_name(ctx));
            // exit(EXIT_FAILURE);
        }
        else
        {
            errno_exit(ctx, "VIDIOC_REQBUFS");
        }
    }

    if (req.count < 2)
    {
        ERR("%s: Insufficient buffer memory on %s\n", get_sensor_name(ctx),
            get_dev_name(ctx));
        // exit(EXIT_FAILURE);
    }

    tmp_buffers = (struct buffer *)calloc(req.count, sizeof(struct buffer));

    if (!tmp_buffers)
    {
        ERR("%s: Out of memory\n", get_sensor_name(ctx));
        // exit(EXIT_FAILURE);
    }

    if (pp_onframe)
        ctx->buffers_mp = tmp_buffers;
    else
        ctx->buffers = tmp_buffers;

    for (ctx->n_buffers = 0; ctx->n_buffers < req.count; ++ctx->n_buffers)
    {
        struct v4l2_buffer buf;
        struct v4l2_plane planes[FMT_NUM_PLANES];
        CLEAR(buf);
        CLEAR(planes);

        buf.type = ctx->buf_type;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = ctx->n_buffers;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        {
            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }

        if (-1 == xioctl(fd_tmp, VIDIOC_QUERYBUF, &buf))
            errno_exit(ctx, "VIDIOC_QUERYBUF");

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == ctx->buf_type)
        {
            tmp_buffers[ctx->n_buffers].length = buf.m.planes[0].length;
            tmp_buffers[ctx->n_buffers].start =
                mmap(NULL /* start anywhere */,
                     buf.m.planes[0].length,
                     PROT_READ | PROT_WRITE /* required */,
                     MAP_SHARED /* recommended */,
                     fd_tmp, buf.m.planes[0].m.mem_offset);
        }
        else
        {
            tmp_buffers[ctx->n_buffers].length = buf.length;
            tmp_buffers[ctx->n_buffers].start =
                mmap(NULL /* start anywhere */,
                     buf.length,
                     PROT_READ | PROT_WRITE /* required */,
                     MAP_SHARED /* recommended */,
                     fd_tmp, buf.m.offset);
        }

        if (MAP_FAILED == tmp_buffers[ctx->n_buffers].start)
            errno_exit(ctx, "mmap");

        // export buf dma fd
        struct v4l2_exportbuffer expbuf;
        xcam_mem_clear(expbuf);
        expbuf.type = ctx->buf_type;
        expbuf.index = ctx->n_buffers;
        expbuf.flags = O_CLOEXEC;
        if (xioctl(fd_tmp, VIDIOC_EXPBUF, &expbuf) < 0)
        {
            errno_exit(ctx, "get dma buf failed\n");
        }
        else
        {
            DBG("%s: get dma buf(%d)-fd: %d\n", get_sensor_name(ctx), ctx->n_buffers, expbuf.fd);
        }
        tmp_buffers[ctx->n_buffers].export_fd = expbuf.fd;
    }
}

static void init_input_dmabuf_oneframe(demo_context_t *ctx)
{
    struct v4l2_requestbuffers req;

    CLEAR(req);

    printf("%s:-------- request pp input buffer -------------\n", get_sensor_name(ctx));
    req.count = BUFFER_COUNT;
    req.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    req.memory = V4L2_MEMORY_DMABUF;

    if (-1 == xioctl(ctx->fd_pp_input, VIDIOC_REQBUFS, &req))
    {
        if (EINVAL == errno)
        {
            ERR("does not support "
                "DMABUF\n");
            exit(EXIT_FAILURE);
        }
        else
        {
            errno_exit(ctx, "VIDIOC_REQBUFS");
        }
    }

    if (req.count < 2)
    {
        ERR("Insufficient buffer memory on %s\n",
            get_dev_name(ctx));
        exit(EXIT_FAILURE);
    }
    printf("%s:-------- request isp mp buffer -------------\n", get_sensor_name(ctx));
    init_mmap(true, ctx);
}

static void init_device(demo_context_t *ctx)
{
    struct v4l2_capability cap;
    struct v4l2_format fmt;

    if (-1 == xioctl(ctx->fd, VIDIOC_QUERYCAP, &cap))
    {
        if (EINVAL == errno)
        {
            ERR("%s: %s is no V4L2 device\n", get_sensor_name(ctx),
                get_dev_name(ctx));
            // exit(EXIT_FAILURE);
        }
        else
        {
            errno_exit(ctx, "VIDIOC_QUERYCAP");
        }
    }

    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
        !(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE))
    {
        ERR("%s: %s is not a video capture device, capabilities: %x\n",
            get_sensor_name(ctx), get_dev_name(ctx), cap.capabilities);
        // exit(EXIT_FAILURE);
    }

    if (!(cap.capabilities & V4L2_CAP_STREAMING))
    {
        ERR("%s: %s does not support streaming i/o\n", get_sensor_name(ctx),
            get_dev_name(ctx));
        // exit(EXIT_FAILURE);
    }

    if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)
    {
        ctx->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        CLEAR(fmt);
        fmt.type = ctx->buf_type;
        fmt.fmt.pix.width = ctx->width;
        fmt.fmt.pix.height = ctx->height;
        fmt.fmt.pix.pixelformat = ctx->format;
        fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
        if (ctx->limit_range)
            fmt.fmt.pix.quantization = V4L2_QUANTIZATION_LIM_RANGE;
        else
            fmt.fmt.pix.quantization = V4L2_QUANTIZATION_FULL_RANGE;
    }
    else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
    {
        ctx->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        CLEAR(fmt);
        fmt.type = ctx->buf_type;
        fmt.fmt.pix_mp.width = ctx->width;
        fmt.fmt.pix_mp.height = ctx->height;
        fmt.fmt.pix_mp.pixelformat = ctx->format;
        fmt.fmt.pix_mp.field = V4L2_FIELD_INTERLACED;
        if (ctx->limit_range)
            fmt.fmt.pix_mp.quantization = V4L2_QUANTIZATION_LIM_RANGE;
        else
            fmt.fmt.pix_mp.quantization = V4L2_QUANTIZATION_FULL_RANGE;
    }

    if (-1 == xioctl(ctx->fd, VIDIOC_S_FMT, &fmt))
        errno_exit(ctx, "VIDIOC_S_FMT");

    init_mmap(false, ctx);
}

static void init_device_pp_oneframe(demo_context_t *ctx)
{
    // TODO, set format and link, now do with setup_link.sh
    init_input_dmabuf_oneframe(ctx);
}

static void close_device(demo_context_t *ctx)
{
    if (-1 == close(ctx->fd))
        errno_exit(ctx, "close");

    ctx->fd = -1;
}

static void open_device(demo_context_t *ctx)
{
    printf("-------- open output dev_name:%s -------------\n", get_dev_name(ctx));
    ctx->fd = open(get_dev_name(ctx), O_RDWR /* required */ /*| O_NONBLOCK*/, 0);

    if (-1 == ctx->fd)
    {
        ERR("Cannot open '%s': %d, %s\n",
            get_dev_name(ctx), errno, strerror(errno));
        exit(EXIT_FAILURE);
    }
}

static void close_device_pp_oneframe(demo_context_t *ctx)
{
    if (-1 == close(ctx->fd_pp_input))
        errno_exit(ctx, "close");

    ctx->fd_pp_input = -1;

    if (-1 == close(ctx->fd_isp_mp))
        errno_exit(ctx, "close");

    ctx->fd_isp_mp = -1;
}

static void open_device_pp_oneframe(demo_context_t *ctx)
{
    printf("-------- open pp input(video13) -------------\n");
    ctx->fd_pp_input = open("/dev/video13", O_RDWR /* required */ /*| O_NONBLOCK*/, 0);

    if (-1 == ctx->fd_pp_input)
    {
        ERR("Cannot open '%s': %d, %s\n",
            get_dev_name(ctx), errno, strerror(errno));
        exit(EXIT_FAILURE);
    }

    printf("-------- open isp mp(video0) -------------\n");
    ctx->fd_isp_mp = open("/dev/video0", O_RDWR /* required */ /*| O_NONBLOCK*/, 0);

    if (-1 == ctx->fd_isp_mp)
    {
        ERR("Cannot open '%s': %d, %s\n",
            get_dev_name(ctx), errno, strerror(errno));
        exit(EXIT_FAILURE);
    }
}

static void deinit(demo_context_t *ctx)
{
    if (!ctx->camgroup_ctx)
        stop_capturing(ctx);

    if (ctx->pponeframe)
        stop_capturing_pp_oneframe(ctx);
    if (ctx->aiq_ctx)
    {
        printf("%s:-------- stop aiq -------------\n", get_sensor_name(ctx));
        rk_aiq_uapi2_sysctl_stop(ctx->aiq_ctx, false); // 停止AIQ控制系统
    }
    else if (ctx->camgroup_ctx)
    {
        if (ctx->dev_using == 1)
        {
            printf("%s:-------- stop aiq camgroup -------------\n", get_sensor_name(ctx));
            rk_aiq_uapi2_camgroup_stop(ctx->camgroup_ctx);
#ifdef CUSTOM_GROUP_AE_DEMO_TEST
            rk_aiq_uapi2_customAE_unRegister((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx));
#endif
#ifdef CUSTOM_GROUP_AWB_DEMO_TEST
            rk_aiq_uapi2_customAWB_unRegister((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx));
#endif
        }
    }

    if (ctx->aiq_ctx)
    {
        printf("%s:-------- deinit aiq -------------\n", get_sensor_name(ctx));
#ifdef CUSTOM_AE_DEMO_TEST
        // rk_aiq_AELibunRegCallBack(ctx->aiq_ctx, 0);
        rk_aiq_uapi2_customAE_unRegister(ctx->aiq_ctx);
#endif
#ifdef CUSTOM_AWB_DEMO_TEST
        // rk_aiq_AELibunRegCallBack(ctx->aiq_ctx, 0);
        rk_aiq_uapi2_customAWB_unRegister(ctx->aiq_ctx);
#endif
        rk_aiq_uapi2_sysctl_deinit(ctx->aiq_ctx);
        printf("%s:-------- deinit aiq end -------------\n", get_sensor_name(ctx));
    }
    else if (ctx->camgroup_ctx)
    {
        if (ctx->dev_using == 1)
        {
            printf("%s:-------- deinit aiq camgroup -------------\n", get_sensor_name(ctx));
            rk_aiq_uapi2_camgroup_destroy(ctx->camgroup_ctx);
            ctx->camgroup_ctx = NULL;
            printf("%s:-------- deinit aiq camgroup end -------------\n", get_sensor_name(ctx));
        }
    }

    uninit_device(ctx);
    if (ctx->pponeframe)
        uninit_device_pp_oneframe(ctx);
    close_device(ctx);
    if (ctx->pponeframe)
        close_device_pp_oneframe(ctx);

    if (ctx->fp)
    {
        fclose(ctx->fp);
        ctx->fp = NULL;
    }
}
static void signal_handle(int signo)
{
    printf("force exit signo %d !!!\n", signo);

    if (g_main_ctx)
    {
#ifdef ENABLE_UAPI_TEST
        _if_quit = true;
        while (!_quit_done)
            printf("wait quit done !\n");
#endif
        g_main_ctx->frame_count = 0;
        stop_capturing(g_main_ctx);
        if (g_main_ctx->camGroup && g_second_ctx)
            stop_capturing(g_second_ctx);
        deinit(g_main_ctx);
        g_main_ctx = NULL;
    }
    if (g_second_ctx)
    {
        g_second_ctx->frame_count = 0;
        deinit(g_second_ctx);
        g_second_ctx = NULL;
    }
    exit(0);
}

static void *test_offline_thread(void *args)
{
    pthread_detach(pthread_self());
    demo_context_t *demo_ctx = (demo_context_t *)args;
    DIR *dir = opendir(demo_ctx->orppath);
    struct dirent *dir_ent = NULL;
    std::vector<std::string> raw_files;
    if (dir)
    {
        while ((dir_ent = readdir(dir)))
        {
            if (dir_ent->d_type == DT_REG)
            {
                // is raw file
                if (strstr(dir_ent->d_name, ".raw"))
                {
                    raw_files.push_back(dir_ent->d_name);
                }
            }
        }
        closedir(dir);
    }
    std::sort(raw_files.begin(), raw_files.end(),
              [](std::string str1, std::string str2) -> bool
              {
                  std::string::size_type sz;
                  int ind1 = std::stoi(str1, &sz);
                  int ind2 = std::stoi(str2, &sz);
                  return ind1 < ind2 ? true : false;
              });
    while (!demo_ctx->orpStop)
    {
        for (auto raw_file : raw_files)
        {
            std::string full_name = demo_ctx->orppath + raw_file;
            printf("process raw : %s \n", full_name.c_str());
            rk_aiq_uapi2_sysctl_enqueueRkRawFile(demo_ctx->aiq_ctx, full_name.c_str());
            // usleep(500000);
        }
        usleep(500000);
    }
    demo_ctx->orpStopped = true;
    pthread_exit(NULL);
    // return 0;
}
#if 0
static int set_ae_onoff(const rk_aiq_sys_ctx_t* ctx, bool onoff)
{
    XCamReturn ret = XCAM_RETURN_NO_ERROR;
    Uapi_ExpSwAttr_t expSwAttr;

    ret = rk_aiq_user_api_ae_getExpSwAttr(ctx, &expSwAttr);
    expSwAttr.enable = onoff;
    ret = rk_aiq_user_api_ae_setExpSwAttr(ctx, expSwAttr);

    return 0;
}
#endif

static int query_ae_state(const rk_aiq_sys_ctx_t *ctx)
{
    XCamReturn ret = XCAM_RETURN_NO_ERROR;
    Uapi_ExpQueryInfo_t queryInfo;

    ret = rk_aiq_user_api2_ae_queryExpResInfo(ctx, &queryInfo);
    // printf("ae IsConverged: %d\n", queryInfo.IsConverged);

    return 0;
}

static void set_af_manual_meascfg(const rk_aiq_sys_ctx_t *ctx)
{
    rk_aiq_af_attrib_t attr;
    uint16_t gamma_y[RKAIQ_RAWAF_GAMMA_NUM] =
        {0, 45, 108, 179, 245, 344, 409, 459, 500, 567, 622, 676, 759, 833, 896, 962, 1023};

    rk_aiq_user_api2_af_GetAttrib(ctx, &attr);
    attr.AfMode = RKAIQ_AF_MODE_FIXED;

    attr.manual_meascfg.contrast_af_en = 1;
    attr.manual_meascfg.rawaf_sel = 0; // normal = 0; hdr = 1

    attr.manual_meascfg.window_num = 2;
    attr.manual_meascfg.wina_h_offs = 2;
    attr.manual_meascfg.wina_v_offs = 2;
    attr.manual_meascfg.wina_h_size = 2580;
    attr.manual_meascfg.wina_v_size = 1935;

    attr.manual_meascfg.winb_h_offs = 1146;
    attr.manual_meascfg.winb_v_offs = 972;
    attr.manual_meascfg.winb_h_size = 300;
    attr.manual_meascfg.winb_v_size = 300;

    attr.manual_meascfg.gamma_flt_en = 1;
    memcpy(attr.manual_meascfg.gamma_y, gamma_y, RKAIQ_RAWAF_GAMMA_NUM * sizeof(uint16_t));

    attr.manual_meascfg.gaus_flt_en = 1;
    attr.manual_meascfg.gaus_h0 = 0x20;
    attr.manual_meascfg.gaus_h1 = 0x10;
    attr.manual_meascfg.gaus_h2 = 0x08;

    attr.manual_meascfg.afm_thres = 4;

    attr.manual_meascfg.lum_var_shift[0] = 0;
    attr.manual_meascfg.afm_var_shift[0] = 0;
    attr.manual_meascfg.lum_var_shift[1] = 4;
    attr.manual_meascfg.afm_var_shift[1] = 4;

    attr.manual_meascfg.sp_meas.enable = true;
    attr.manual_meascfg.sp_meas.ldg_xl = 10;
    attr.manual_meascfg.sp_meas.ldg_yl = 28;
    attr.manual_meascfg.sp_meas.ldg_kl = (255 - 28) * 256 / 45;
    attr.manual_meascfg.sp_meas.ldg_xh = 118;
    attr.manual_meascfg.sp_meas.ldg_yh = 8;
    attr.manual_meascfg.sp_meas.ldg_kh = (255 - 8) * 256 / 15;
    attr.manual_meascfg.sp_meas.highlight_th = 245;
    attr.manual_meascfg.sp_meas.highlight2_th = 200;
    rk_aiq_user_api2_af_SetAttrib(ctx, &attr);
}

/*-------------------------isp func end-------------------------*/
/*----------------sample_smartIr start----------------------------*/
#define RK_SMART_IR_AUTO_IRLED true
typedef struct sample_smartIr_s
{
    pthread_t tid;
    bool tquit;
    const rk_aiq_sys_ctx_t *aiq_ctx;
    bool started;
    const char *ir_cut_v4ldev;
    const char *ir_v4ldev;
    rk_smart_ir_ctx_t *ir_ctx;
    rk_smart_ir_params_t ir_configs;
} sample_smartIr_t;

static sample_smartIr_t g_sample_smartIr_ctx;

static void enableIrCutter(bool on)
{
    sample_smartIr_t *smartIr_ctx = &g_sample_smartIr_ctx;

    struct v4l2_control control;

    control.id = V4L2_CID_BAND_STOP_FILTER;
    if (on)
        control.value = 3; // filter ir
    else
        control.value = 0; // ir in

    int _fd = open(smartIr_ctx->ir_cut_v4ldev, O_RDWR | O_CLOEXEC);
    if (_fd != -1)
    {
        if (ioctl(_fd, VIDIOC_S_CTRL, &control) < 0)
        {
            printf("failed to set ircut value %d to device!\n", control.value);
        }
        close(_fd);
    }
}

void switch_to_day()
{
    // ir-cutter on
    enableIrCutter(true);
    // ir off
    // switch to isp day params
}

void switch_to_night()
{
    // switch to isp night params
    // ir-cutter off
    enableIrCutter(false);
    // ir on
}
//////////////////////////////////////////////////////////////
static void *stats_thread(void *args)
{
    demo_context_t *ctx = (demo_context_t *)args;
    XCamReturn ret;
    pthread_detach(pthread_self()); // 用于从线程系统中分离一个线程
    printf("begin stats thread\n");
    rk_smart_ir_result_t ir_res;

    set_af_manual_meascfg(ctx->aiq_ctx);

    while (!_if_quit)
    {
        std::unique_lock<std::mutex> lock(mtx_stats);
        // 等待start_stats变为true
        cv_stats.wait(lock, []()
                      { return start_stats; });

        rk_aiq_isp_stats_t *stats_ref = NULL;
        // printf("{td-313} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
        ret = rk_aiq_uapi2_sysctl_get3AStatsBlk(ctx->aiq_ctx, &stats_ref, -1); // 获取3A统计信息

        if (stats_ref)
            rk_smart_ir_runOnce(ctx->ir_ctx, stats_ref, &ir_res); // 执行日夜判断流程
        // printf("SAMPLE_SMART_IR: NOW is %s\n", ir_res.status == RK_SMART_IR_STATUS_DAY ? "DAY" : "Night");
        /*通过AE的亮度统计来判断环境亮度，以及通过AWB统计来判断红外光占比，通过这些信息综合判断是否进行日夜切换*/
        // printf("SAMPLE_SMART_IR: Now is %s\n", ir_res.status == RK_SMART_IR_STATUS_DAY ? stats_day : stats_night);

        if (ir_res.status == RK_SMART_IR_STATUS_DAY)
        {
            flag_day++;
            flag_night = 0;
            if (flag_day == 1)
            {
                // 1) ir-cutter on,软光敏使能,软光敏是闪光灯的一种自动工作模式
                // printf("Day:1. ir-cutter on!\n");
                switch_to_day();

                // 2) ir-led off
                // printf("Day:2. Day status:need to close led\n");
                system("echo 0 >/sys/class/pwm/pwmchip1/pwm0/enable");
                system("echo a > /sys/devices/virtual/adw/adwdev/adwgpio");

                // 3)switch to isp day params
                //  printf("Day:3. Switch to day paragram ^^^!\n");
                switch_rst = rk_aiq_uapi2_sysctl_switch_scene(ctx->aiq_ctx, main_scene, stats_day);
                // printf("Day: switch_rst=%d\n",switch_rst);
            }
        }
        else if (ir_res.status == RK_SMART_IR_STATUS_NIGHT)
        {
            flag_night++;
            flag_day = 0;
            if (flag_night == 1)
            {
                // 1) switch to isp night params
                // printf("Night:1.Switch to night paragram!\n");
                switch_rst = rk_aiq_uapi2_sysctl_switch_scene(ctx->aiq_ctx, main_scene, stats_night);
                // printf("Night: switch_rst=%d\n",switch_rst);

                // 2) ir-cutter off,软光敏使能关闭
                // printf("Night:2.ir-cutter off!\n");
                switch_to_night();

                // 3) ir-led on
                // printf("Night: 3.status:need to open led!\n");
                system("echo 1 >/sys/class/pwm/pwmchip1/pwm0/enable");
                system("echo b > /sys/devices/virtual/adw/adwdev/adwgpio");
            }
        }

        if (ret == XCAM_RETURN_NO_ERROR && stats_ref != NULL)
        {
            // printf("get one stats frame id %d \n", stats_ref->frame_id);
            query_ae_state(ctx->aiq_ctx);
            // print_af_stats(stats_ref);
            rk_aiq_uapi2_sysctl_release3AStatsRef(ctx->aiq_ctx, stats_ref);
        }
        else
        {
            if (ret == XCAM_RETURN_NO_ERROR)
            {
                printf("aiq has stopped !\n");
                break;
            }
            else if (ret == XCAM_RETURN_ERROR_TIMEOUT)
            {
                printf("aiq timeout!\n");
                continue;
            }
            else if (ret == XCAM_RETURN_ERROR_FAILED)
            {
                printf("aiq failed!\n");
                break;
            }
        }
    }
    printf("end stats thread\n");

    _quit_done = true;
    pthread_exit(NULL);
    return 0;
}

void release_buffer(void *addr)
{
    printf("release buffer called: addr=%p\n", addr);
}

static void test_tuning_api(demo_context_t *ctx)
{
    std::string json_sensor_str = " \n\
        [{ \n\
            \"op\":\"replace\", \n\
            \"path\": \"/sensor_calib/resolution\", \n\
            \"value\": \n\
            { \"width\": 2222, \"height\": 2160} \n\
        }, { \n\
            \"op\":\"replace\", \n\
            \"path\": \"/sensor_calib/CISFlip\", \n\
            \"value\": 6\n\
        }]";

    printf("%s\n", json_sensor_str.c_str());
    rk_aiq_uapi2_sysctl_tuning(ctx->aiq_ctx, const_cast<char *>(json_sensor_str.c_str()));

    std::string json_awb_str = " \n\
        [{ \n\
            \"op\":\"replace\", \n\
            \"path\": \"/wb_v21/autoExtPara/wbGainClip/cct\", \n\
            \"value\": \n\
            [100,200,300,40,50,60] \n\
        },{ \n\
            \"op\":\"replace\", \n\
            \"path\": \"/wb_v21/autoPara/lightSources/0/name\", \n\
            \"value\": \"aaaaaaaaa\" \n\
        }]";
    printf("%s\n", json_awb_str.c_str());
    rk_aiq_uapi2_sysctl_tuning(ctx->aiq_ctx, const_cast<char *>(json_awb_str.c_str()));

    printf("%s done ..\n", __func__);
}

static void load_ir_configs(demo_context_t *ctx)
{
    // sample_smartIr_t* smartIr_ctx = &g_sample_smartIr_ctx;
    // printf("{ccckkk} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
    rk_smart_ir_result_t ir_init_res;
    int ret = 1;
    ctx->ir_cut_v4ldev = NULL;
    ctx->ir_v4ldev = NULL;

    ctx->ir_cut_v4ldev = "/dev/v4l-subdev3";
    ctx->ir_configs.d2n_envL_th = 0.08f; // 日转夜亮度阈值:0.08f(default)
    ctx->ir_configs.n2d_envL_th = 0.20f; // 夜转日亮度阈值:0.20f(default)
    ctx->ir_configs.rggain_base = 1.0f;  // 黑夜切白天的Rgain/Ggain基准值，默认值为1.0f
    ctx->ir_configs.bggain_base = 1.0f;  // 黑夜切白天的Bgain/Ggain基准值，默认值为1.0f
    ctx->ir_configs.awbgain_rad = 0.10f; // 黑夜切白天的awbgain滤波半径，默认值为0
    ctx->ir_configs.awbgain_dis = 0.22f; // 黑夜切白天的awbgain离散度阈值，需要调试,0.22f
    ctx->ir_configs.switch_cnts_th = 30; // 切换阈值，保持相同状态次数大于该阈值时才允许状态切换:100
    ret = rk_smart_ir_config(ctx->ir_ctx, &ctx->ir_configs);
    if (!ret)
    {
        printf("{tttddd} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
    }
    // set initial status to day
    ir_init_res.status = RK_SMART_IR_STATUS_DAY;
    rk_smart_ir_set_status(ctx->ir_ctx, ir_init_res);
    switch_to_day();
}

static void rkisp_routine(demo_context_t *ctx, int flg)
{
    // char sns_entity_name[64];
    rk_aiq_working_mode_t work_mode = RK_AIQ_WORKING_MODE_NORMAL;

    if (ctx->hdrmode == 2)
        work_mode = RK_AIQ_WORKING_MODE_ISP_HDR2;
    else if (ctx->hdrmode == 3)
        work_mode = RK_AIQ_WORKING_MODE_ISP_HDR3;

    printf("work_mode %d\n", work_mode);

    strcpy(sns_entity_name, rk_aiq_uapi2_sysctl_getBindedSnsEntNmByVd(get_dev_name(ctx)));
    printf("sns_entity_name:%s\n", sns_entity_name);
    sscanf(&sns_entity_name[6], "%s", ctx->sns_name);
    printf("sns_name:%s\n", ctx->sns_name);
    rk_aiq_static_info_t s_info;
    rk_aiq_uapi2_sysctl_getStaticMetas(sns_entity_name, &s_info);
    // check if hdr mode is supported
    if (work_mode != 0)
    {
        bool b_work_mode_supported = false;
        rk_aiq_sensor_info_t *sns_info = &s_info.sensor_info;
        for (int i = 0; i < SUPPORT_FMT_MAX; i++)
            // TODO, should decide the resolution firstly,
            // then check if the mode is supported on this
            // resolution
            if ((sns_info->support_fmt[i].hdr_mode == 5 /*HDR_X2*/ &&
                 work_mode == RK_AIQ_WORKING_MODE_ISP_HDR2) ||
                (sns_info->support_fmt[i].hdr_mode == 6 /*HDR_X3*/ &&
                 work_mode == RK_AIQ_WORKING_MODE_ISP_HDR3))
            {
                b_work_mode_supported = true;
                break;
            }

        if (!b_work_mode_supported)
        {
            printf("\nWARNING !!!"
                   "work mode %d is not supported, changed to normal !!!\n\n",
                   work_mode);
            work_mode = RK_AIQ_WORKING_MODE_NORMAL;
        }
    }

    printf("%s:-------- open output dev -------------\n", get_sensor_name(ctx));
    open_device(ctx);
    if (ctx->pponeframe)
        open_device_pp_oneframe(ctx);

    if (ctx->rkaiq)
    {
        XCamReturn ret = XCAM_RETURN_NO_ERROR;
        rk_aiq_tb_info_t tb_info;
        tb_info.magic = sizeof(rk_aiq_tb_info_t) - 2;
        tb_info.is_pre_aiq = false;
        ret = rk_aiq_uapi2_sysctl_preInit_tb_info(sns_entity_name, &tb_info);
        // 初始化使用场景，不是必须，默认为normal，day，用于选择json iq文件中的场景参数
        if (work_mode == RK_AIQ_WORKING_MODE_NORMAL)
            ret = rk_aiq_uapi2_sysctl_preInit_scene(sns_entity_name, "normal", "day");
        else
            ret = rk_aiq_uapi2_sysctl_preInit_scene(sns_entity_name, "hdr", "day");
        if (ret < 0)
            ERR("%s: failed to set %s scene\n",
                get_sensor_name(ctx),
                work_mode == RK_AIQ_WORKING_MODE_NORMAL ? "normal" : "hdr");

        if (strlen(ctx->iqpath))
        {
            if (!ctx->camGroup)
            {
                printf("{tttddd-0319} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
                ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, ctx->iqpath, NULL, NULL);
            }
            else
            {
                // create once for mulitple cams
                if (ctx->dev_using == 1)
                {
                    char sns_entity_name2[64] = {'\0'};
                    bool has_dev2 = false;
                    if (strlen(ctx->dev_name2))
                    {
                        strcpy(sns_entity_name2, rk_aiq_uapi2_sysctl_getBindedSnsEntNmByVd(ctx->dev_name2));
                        printf("sns_entity_name2:%s\n", sns_entity_name2);
                        // sscanf(&sns_entity_name2[6], "%s", ctx->sns_name);
                        // printf("sns_name2:%s\n", ctx->sns_name);
                        has_dev2 = true;
                    }

                    rk_aiq_camgroup_instance_cfg_t camgroup_cfg;
                    memset(&camgroup_cfg, 0, sizeof(camgroup_cfg));
                    camgroup_cfg.sns_num = 1;
                    if (has_dev2)
                        camgroup_cfg.sns_num++;
                    camgroup_cfg.sns_ent_nm_array[0] = sns_entity_name;
                    if (has_dev2)
                        camgroup_cfg.sns_ent_nm_array[1] = sns_entity_name2;
                    camgroup_cfg.config_file_dir = ctx->iqpath;
                    camgroup_cfg.overlap_map_file = "srcOverlapMap.bin";
                    ctx->camgroup_ctx = rk_aiq_uapi2_camgroup_create(&camgroup_cfg);
                    if (!ctx->camgroup_ctx)
                    {
                        printf("create camgroup ctx error !\n");
                        exit(1);
                    }

#ifdef CUSTOM_GROUP_AE_DEMO_TEST
                    rk_aiq_customeAe_cbs_t cbs = {
                        .pfn_ae_init = custom_ae_init,
                        .pfn_ae_run = custom_ae_run,
                        .pfn_ae_ctrl = custom_ae_ctrl,
                        .pfn_ae_exit = custom_ae_exit,
                    };
                    rk_aiq_uapi2_customAE_register((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx), &cbs);
                    rk_aiq_uapi2_customAE_enable((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx), true);
#endif
#ifdef CUSTOM_GROUP_AWB_DEMO_TEST
                    rk_aiq_customeAwb_cbs_t awb_cbs = {
                        .pfn_awb_init = custom_awb_init,
                        .pfn_awb_run = custom_awb_run,
                        .pfn_awb_ctrl = custom_awb_ctrl,
                        .pfn_awb_exit = custom_awb_exit,
                    };
                    rk_aiq_uapi2_customAWB_register((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx), &awb_cbs);
                    rk_aiq_uapi2_customAWB_enable((const rk_aiq_sys_ctx_t *)(ctx->camgroup_ctx), true);
#endif
                }
            }
        }
        else
        {
            if (ctx->camGroup)
            {
                printf("error! should specify iq path !\n");
                exit(1);
            }
#ifndef ANDROID
            printf("{ccckkk} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
            rk_aiq_uapi2_sysctl_preInit(sns_entity_name, RK_AIQ_WORKING_MODE_NORMAL, "ov5695_TongJu_CHT842-MD.xml");
            ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, "/oem/etc/iqfiles", NULL, NULL);
#else
            printf("{ccckkk} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
            // ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, "/data/etc/iqfiles", NULL, NULL);
            ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, "/oem/etc/iqfiles", NULL, NULL);
#endif
        }
        if (ctx->aiq_ctx)
        {
            printf("%s:-------- init mipi tx/rx -------------\n", get_sensor_name(ctx));
            if (ctx->writeFileSync)
                rk_aiq_uapi2_debug_captureRawYuvSync(ctx->aiq_ctx, CAPTURE_RAW_AND_YUV_SYNC);
#ifdef CUSTOM_AE_DEMO_TEST
            // ae_reg.stAeExpFunc.pfn_ae_init = ae_init;
            // ae_reg.stAeExpFunc.pfn_ae_run = ae_run;
            // ae_reg.stAeExpFunc.pfn_ae_ctrl = ae_ctrl;
            // ae_reg.stAeExpFunc.pfn_ae_exit = ae_exit;
            // rk_aiq_AELibRegCallBack(ctx->aiq_ctx, &ae_reg, 0);
            rk_aiq_customeAe_cbs_t cbs = {
                .pfn_ae_init = custom_ae_init,
                .pfn_ae_run = custom_ae_run,
                .pfn_ae_ctrl = custom_ae_ctrl,
                .pfn_ae_exit = custom_ae_exit,
            };
            rk_aiq_uapi2_customAE_register(ctx->aiq_ctx, &cbs);
            rk_aiq_uapi2_customAE_enable(ctx->aiq_ctx, true);
#endif
#ifdef CUSTOM_AWB_DEMO_TEST
            rk_aiq_customeAwb_cbs_t awb_cbs = {
                .pfn_awb_init = custom_awb_init,
                .pfn_awb_run = custom_awb_run,
                .pfn_awb_ctrl = custom_awb_ctrl,
                .pfn_awb_exit = custom_awb_exit,
            };
            rk_aiq_uapi2_customAWB_register(ctx->aiq_ctx, &awb_cbs);
            rk_aiq_uapi2_customAWB_enable(ctx->aiq_ctx, true);
#endif
            if (ctx->isOrp)
            {
                rk_aiq_raw_prop_t prop;
                if (strcmp(ctx->orpRawFmt, "BA81") == 0)
                    prop.format = RK_PIX_FMT_SBGGR8;
                else if (strcmp(ctx->orpRawFmt, "GBRG") == 0)
                    prop.format = RK_PIX_FMT_SGBRG8;
                else if (strcmp(ctx->orpRawFmt, "RGGB") == 0)
                    prop.format = RK_PIX_FMT_SRGGB8;
                else if (strcmp(ctx->orpRawFmt, "GRBG") == 0)
                    prop.format = RK_PIX_FMT_SGRBG8;
                else if (strcmp(ctx->orpRawFmt, "BG10") == 0)
                    prop.format = RK_PIX_FMT_SBGGR10;
                else if (strcmp(ctx->orpRawFmt, "GB10") == 0)
                    prop.format = RK_PIX_FMT_SGBRG10;
                else if (strcmp(ctx->orpRawFmt, "RG10") == 0)
                    prop.format = RK_PIX_FMT_SRGGB10;
                else if (strcmp(ctx->orpRawFmt, "BA10") == 0)
                    prop.format = RK_PIX_FMT_SGRBG10;
                else if (strcmp(ctx->orpRawFmt, "BG12") == 0)
                    prop.format = RK_PIX_FMT_SBGGR12;
                else if (strcmp(ctx->orpRawFmt, "GB12") == 0)
                    prop.format = RK_PIX_FMT_SGBRG12;
                else if (strcmp(ctx->orpRawFmt, "RG12") == 0)
                    prop.format = RK_PIX_FMT_SRGGB12;
                else if (strcmp(ctx->orpRawFmt, "BA12") == 0)
                    prop.format = RK_PIX_FMT_SGRBG12;
                else if (strcmp(ctx->orpRawFmt, "BG14") == 0)
                    prop.format = RK_PIX_FMT_SBGGR14;
                else if (strcmp(ctx->orpRawFmt, "GB14") == 0)
                    prop.format = RK_PIX_FMT_SGBRG14;
                else if (strcmp(ctx->orpRawFmt, "RG14") == 0)
                    prop.format = RK_PIX_FMT_SRGGB14;
                else if (strcmp(ctx->orpRawFmt, "BA14") == 0)
                    prop.format = RK_PIX_FMT_SGRBG14;
                else
                    prop.format = RK_PIX_FMT_SBGGR10;
                prop.frame_width = ctx->orpRawW;
                prop.frame_height = ctx->orpRawH;
                prop.rawbuf_type = RK_AIQ_RAW_FILE;
                rk_aiq_uapi2_sysctl_prepareRkRaw(ctx->aiq_ctx, prop);
            }
            /*
             * rk_aiq_uapi_setFecEn(ctx->aiq_ctx, true);
             * rk_aiq_uapi_setFecCorrectDirection(ctx->aiq_ctx, FEC_CORRECT_DIRECTION_Y);
             */
#ifdef TEST_MEMS_SENSOR_INTF
            rk_aiq_mems_sensor_intf_t g_rkiio_aiq_api;
            rk_aiq_uapi2_sysctl_regMemsSensorIntf(ctx->aiq_ctx, &g_rkiio_aiq_api);
#endif

#if 0
            test_tuning_api(ctx);
#endif
            XCamReturn ret = rk_aiq_uapi2_sysctl_prepare(ctx->aiq_ctx, ctx->width, ctx->height, work_mode);

            if (ret != XCAM_RETURN_NO_ERROR)
                ERR("%s:rk_aiq_uapi2_sysctl_prepare failed: %d\n", get_sensor_name(ctx), ret);
            else
            {
                ret = rk_aiq_uapi2_setMirrorFlip(ctx->aiq_ctx, false, false, 3);
                // Ignore failure

                if (ctx->isOrp)
                {
                    rk_aiq_uapi2_sysctl_registRkRawCb(ctx->aiq_ctx, release_buffer);
                }
                ret = rk_aiq_uapi2_sysctl_start(ctx->aiq_ctx);

                init_device(ctx);
                if (ctx->pponeframe)
                    init_device_pp_oneframe(ctx);
                if (ctx->ctl_type == TEST_CTL_TYPE_DEFAULT)
                {
                    start_capturing(ctx);
                }
                if (ctx->pponeframe)
                    start_capturing_pp_oneframe(ctx);
                printf("%s:-------- stream on mipi tx/rx -------------\n", get_sensor_name(ctx));

                if (ctx->ctl_type != TEST_CTL_TYPE_DEFAULT)
                {
                restart:
                    static int test_ctl_cnts = 0;
                    ctx->frame_count = 60;
                    start_capturing(ctx);
                    while ((ctx->frame_count-- > 0))
                        read_frame(ctx, flg);
                    stop_capturing(ctx);
                    printf("+++++++ TEST SYSCTL COUNTS %d ++++++++++++ \n", test_ctl_cnts++);
                    printf("aiq stop .....\n");
                    rk_aiq_uapi2_sysctl_stop(ctx->aiq_ctx, false);
                    if (ctx->ctl_type == TEST_CTL_TYPE_REPEAT_INIT_PREPARE_START_STOP_DEINIT)
                    {
                        printf("aiq deinit .....\n");
                        rk_aiq_uapi2_sysctl_deinit(ctx->aiq_ctx);
                        printf("aiq init .....\n");
                        if (work_mode == RK_AIQ_WORKING_MODE_NORMAL)
                        {
                            ret = rk_aiq_uapi2_sysctl_preInit_scene(sns_entity_name, "normal", "day");
                            if (ctx->hdrmode == 2)
                                work_mode = RK_AIQ_WORKING_MODE_ISP_HDR2;
                            else if (ctx->hdrmode == 3)
                                work_mode = RK_AIQ_WORKING_MODE_ISP_HDR3;
                        }
                        else
                        {
                            ret = rk_aiq_uapi2_sysctl_preInit_scene(sns_entity_name, "hdr", "day");
                            work_mode = RK_AIQ_WORKING_MODE_NORMAL;
                        }
                        if (ret < 0)
                            ERR("%s: failed to set %s scene\n",
                                get_sensor_name(ctx),
                                work_mode == RK_AIQ_WORKING_MODE_NORMAL ? "normal" : "hdr");
                        printf("{tttdddd} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
                        ctx->aiq_ctx = rk_aiq_uapi2_sysctl_init(sns_entity_name, ctx->iqpath, NULL, NULL);
                        printf("aiq prepare .....\n");
                        XCamReturn ret = rk_aiq_uapi2_sysctl_prepare(ctx->aiq_ctx, ctx->width, ctx->height, work_mode);
                    }
                    else if (ctx->ctl_type == TEST_CTL_TYPE_REPEAT_PREPARE_START_STOP)
                    {
                        printf("aiq prepare .....\n");
                        XCamReturn ret = rk_aiq_uapi2_sysctl_prepare(ctx->aiq_ctx, ctx->width, ctx->height, work_mode);
                    }
                    else if (ctx->ctl_type == TEST_CTL_TYPE_REPEAT_START_STOP)
                    {
                        // do nothing
                    }
                    printf("aiq start .....\n");
                    ret = rk_aiq_uapi2_sysctl_start(ctx->aiq_ctx);
                    printf("aiq restart .....\n");
                    goto restart;
                }
            }
        }
        else if (ctx->camgroup_ctx)
        {
            // only do once for cam group
            if (ctx->dev_using == 1)
            {
                XCamReturn ret = rk_aiq_uapi2_camgroup_prepare(ctx->camgroup_ctx, work_mode);

                if (ret != XCAM_RETURN_NO_ERROR)
                    ERR("%s:rk_aiq_uapi2_camgroup_prepare failed: %d\n", get_sensor_name(ctx), ret);
                else
                {

                    ret = rk_aiq_uapi2_camgroup_start(ctx->camgroup_ctx);
                }
            }
            init_device(ctx);
            start_capturing(ctx);
        }
    }
    else
    {
        init_device(ctx);
        if (ctx->pponeframe)
            init_device_pp_oneframe(ctx);
        start_capturing(ctx);
        if (ctx->pponeframe)
            start_capturing_pp_oneframe(ctx);
    }
}

static void sample_smartIr_start(demo_context_t *ctx)
{
    // const rk_aiq_sys_ctx_t* ctx = (demo_context_t*)(arg);
    // sample_smartIr_t* smartIr_ctx = &g_sample_smartIr_ctx;
    // printf("{sample_smartIr_start:tttddd} %s(%d): .............................666 \n", __FUNCTION__, __LINE__);
    if (ctx->aiq_ctx == NULL)
    {
        printf("{tttddd} %s(%d): ...........aiq_ctx==NULL..................666 \n", __FUNCTION__, __LINE__);
    }

    ctx->ir_ctx = rk_smart_ir_init(ctx->aiq_ctx); // 初始化 SmartIr 运行环境
    load_ir_configs(ctx);                         // 配置参数

    ctx->tquit = false;
    //   pthread_create(&smartIr_ctx->tid, NULL, switch_ir_thread, NULL);
    ctx->started = true;
}

static void isp_signal_process()
{
#ifdef _WIN32
    signal(SIGINT, signal_handle);
    signal(SIGQUIT, signal_handle);
    signal(SIGTERM, signal_handle);
#else
    sigset_t mask;
    sigemptyset(&mask);
    sigaddset(&mask, SIGINT);
    sigaddset(&mask, SIGTERM);
    sigaddset(&mask, SIGQUIT);
    pthread_sigmask(SIG_BLOCK, &mask, NULL);

    struct sigaction new_action, old_action;
    new_action.sa_handler = signal_handle;
    sigemptyset(&new_action.sa_mask);
    new_action.sa_flags = 0;
    sigaction(SIGINT, NULL, &old_action);
    if (old_action.sa_handler != SIG_IGN)
        sigaction(SIGINT, &new_action, NULL);
    sigaction(SIGQUIT, NULL, &old_action);
    if (old_action.sa_handler != SIG_IGN)
        sigaction(SIGQUIT, &new_action, NULL);
    sigaction(SIGTERM, NULL, &old_action);
    if (old_action.sa_handler != SIG_IGN)
        sigaction(SIGTERM, &new_action, NULL);
#endif
}

static void isp_demo_enable()
{
#if ISPDEMO_ENABLE_DRM
    if (main_ctx.vop)
    {

#if ISPDEMO_ENABLE_RGA
        if (strlen(main_ctx.dev_name) && strlen(main_ctx.dev_name2))
        {
            if (display_init(720, 1280) < 0)
            {
                printf("display_init failed\n");
            }
        }
        else
        {
#else
        {
#endif
            if (initDrmDsp() < 0)
            {
                printf("initDrmDsp failed\n");
            }
        }
    }
#endif
}

static void taiic_isp_release()
{
    if (main_ctx.isOrp)
    {
        main_ctx.orpStop = true;
        while (!main_ctx.orpStopped)
        {
            printf("wait orp stopped ... \n");
            usleep(500000);
        }
    }

#ifdef TEST_BLOCKED_STATS_FUNC
    _if_quit = true;
    while (!_quit_done)
        printf("wait quit done !\n");
#endif
    deinit(&main_ctx);

#if ISPDEMO_ENABLE_DRM
    if (strlen(main_ctx.dev_name) && strlen(main_ctx.dev_name2))
    {
        display_exit();
    }
    deInitDrmDsp();
#endif
}

static void *taiic_isp_image_capture_func(void *args)
{
    demo_context_t *ctx = (demo_context_t *)args;
    int isp_img_cnt = 0;
    // 循环获取isp图像
    while (1)
    {
        std::unique_lock<std::mutex> lock(mtx_vi);
        // 等待start_vi变为true
        cv_vi.wait(lock, []()
                   { return start_vi; });

        isp_img_cnt = 0;
        is_model_run = RK_TRUE;
        uart_main_func(1, 1, 0, 0); // SPK嘀一声,开始采集唇图
        while (isp_img_cnt < (ORI_MODEL_C + SKIP_ISP_FRAMES))
        {
            read_frame(ctx, isp_img_cnt);
            isp_img_cnt++;
        }

        start_vi = false;
    }
    pthread_exit(NULL); // 线程结束时，调用pthread_exit
}

/*------------------sample_smartIr end------------------------------*/
/*---------------------------------------------------------------------
                            全局变量/宏定义/结构体/函数声明
-----------------------------------------------------------------------*/
static void sigterm_handler(int sig);
int cnt = 0;

LM_TOOLKIT_MODEL_CTX_S *lm_ctx; // 模型推理相关

RK_U8 lm_input_data[LM_IMG_BATCH][LM_IMG_HEIGHT][LM_IMG_WIDTH][LM_IMG_CHANNEL]; //
RK_U8 lm_output_data[RESIZE_IMG_BATCH][RESIZE_IMG_HEIGHT][RESIZE_IMG_WIDTH][RESIZE_IMG_CHANNEL];

/*----------------------------------------------------------------------------------*/
/*------------------------------------------------function-----------------------------------------------------------*/
// 电池信息查询回调
static void power_callback(int power_cap, long power_vol) // 电池信息查询回调
{
    printf("current:power is %d,voltage=%dmV\n", power_cap, power_vol / 1000);
    // 充电状态判断逻辑
    if (cap_query.count == 0)
    {
        cap_query.last_cap = power_cap;
        printf("初始电量:%d\n", power_cap);

        cap_query.init_cap = power_cap;
        cap_query.init_vol = power_vol / 1000;
    }
    else
    {
        /*------------------充电状态判断-------------------*/
        cap_query.compCap = power_cap - cap_query.last_cap;
        if (cap_query.compCap == -1)
        {
            cap_query.charge_state = 0; // 未充电
        }
        else if (cap_query.compCap == 1)
        {
            cap_query.charge_state = 1; // 充电中
        }
        else if (cap_query.compCap == 0)
        {
            cap_query.charge_state = 2; // 充满电
        }
        cap_query.last_cap = power_cap;
    }

    // 电池电量的发送模块
    if (state_send_flag) // true:send
    {
        /*---------------------有线485/蓝牙发送---------------------------*/
        if (mode_state->audio_flag == AUDIO_CONN) // 有线连接状态
        {
            bat_send_func_485(cap_query.charge_state, power_vol / 1000, power_cap);
        }
        else if (mode_state->audio_flag == AUDIO_NO_CONN) // 蓝牙模式下UART传输电量信息到副板
        {
            uart_main_func(6, cap_query.charge_state, power_vol / 1000, power_cap); // system(cap_query.bat_lable.c_str());
        }
    }

    cap_query.count = 1; // cap_query.count++;
}

static void app_key_callback(TAIIC_KEY_STATE old_state, TAIIC_KEY_NAME key_name, TAIIC_KEY_STATE key_state)
{
    // 判断key_name去实现对应功能,key_name=0表示音频线模式
    RK_LOGI("key_name, key_state,count is [%d, %d,%d]\n", key_name, key_state, cnt++);

    switch (key_name)
    {
    case TAIIC_PTT:
        mode_state->ptt_flag = key_state;
        if (mode_state->ptt_flag == PTT_DOWN)
        {
            // 特殊模式
            if ((mode_state->mode1_flag == MODE_DOWN) && (!is_model_run))
            {
                printf("PTT DOWN && lip mode\n");
                if (mode_state->audio_flag == AUDIO_NO_CONN)
                {
                    uart_main_func(3, PTT_DOWN, 0, 0); // PTT按下的键值发送上位机
                }
                // SPP收到键值0，触发音频采集
                {
                    std::lock_guard<std::mutex> lock(mtx_ai);
                    ai_start = true; // 设置标志位为true，允许ai开始执行
                }
                cv_ai.notify_one(); // 通知等待的线程，条件已经满足
            }
        }
        break;

    case TAIIC_MODE: // 常规模式
        // 停止状态判断线程
        start_stats = false;
        cv_stats.notify_one();
        sleep(1);

        // 停止握把控制线程
        start_woba = false;
        cv_woba.notify_one();

        mode_state->mode_flag = key_state;
        if (mode_state->mode_flag == MODE_DOWN)
        {
            mtx_uart.lock();
            uart_main_func(12, 1, 0, 0); // 常规模式，通话上行数据一直发送。
            mtx_uart.unlock();

            current_mode_state = NORMAL_MODE; // 96
            if (mode_state->audio_flag == AUDIO_CONN)
            {
                mtx_uart.lock();
                uart_main_func(1, 4, 0, 0); // system("uart_commid 1 4"); // 进入有线常规模式提示音
                mtx_uart.unlock();

                mtx_uart.lock();
                uart_main_func(2, 1, 0, 0); // 切换到有线通信，启动有线下行播放
                mtx_uart.unlock();
            }
            if (mode_state->audio_flag == AUDIO_NO_CONN)
            {
                mtx_uart.lock();
                uart_main_func(1, 6, 0, 0); // system("uart_commid 1 6"); // 进入无线常规模式提示音
                mtx_uart.unlock();

                mtx_uart.lock();
                uart_main_func(2, 2, 0, 0); // 切换到蓝牙通信，启动蓝牙
                mtx_uart.unlock();
            }
        }
        break;

    case TAIIC_MODE1: // lip mode
        // 唤醒状态判断线程
        start_stats = true;
        cv_stats.notify_one();
        sleep(1);

        mode_state->mode1_flag = key_state;
        if (mode_state->mode1_flag == MODE_DOWN)
        {
            mtx_uart.lock();
            uart_main_func(12, 0, 0, 0); // 特殊模式，通话上行不发送
            mtx_uart.unlock();

            // reset vi/ai flag
            vi_end = 0;
            ai_end = 0;
            img_up_end = 0;

            current_mode_state = LIP_MODE; // 97

            if (mode_state->audio_flag == AUDIO_CONN)
            {
                // 唤醒握把控制线程
                start_woba = true;
                cv_woba.notify_one();

                mtx_uart.lock();
                uart_main_func(1, 5, 0, 0); //("uart_commid 1 5");  // 进入有线特殊模式提示音
                mtx_uart.unlock();
            }
            if (mode_state->audio_flag == AUDIO_NO_CONN)
            {
                // 停止握把控制线程
                start_woba = false;
                cv_woba.notify_one();

                mtx_uart.lock();
                uart_main_func(1, 7, 0, 0); // system("uart_commid 1 7"); // 进入无线特殊模式提示音
                mtx_uart.unlock();
            }
        }
        break;

    case TAIIC_AUDIO:
        mode_state->audio_flag = key_state;
        // 有线
        if (mode_state->audio_flag == AUDIO_CONN)
        {
            mtx_uart.lock();
            uart_main_func(2, 1, 0, 0); // 切换到有线通信，启动有线下行播放
            mtx_uart.unlock();

            if (mode_state->mode1_flag == MODE_DOWN)
            {
                // 唤醒握把控制线程
                start_woba = true;
                cv_woba.notify_one();

                mtx_uart.lock();
                uart_main_func(1, 5, 0, 0); // system("uart_commid 1 5"); // 进入有线特殊模式
                mtx_uart.unlock();

                mtx_uart.lock();
                uart_main_func(12, 0, 0, 0); // 特殊模式，通话上行不发送。
                mtx_uart.unlock();

                // reset vi/ai flag
                vi_end = 0;
                ai_end = 0;
                img_up_end = 0;
            }
            else if (mode_state->mode_flag == MODE_DOWN)
            {
                // 停止握把控制线程
                start_woba = false;
                cv_woba.notify_one();

                mtx_uart.lock();
                uart_main_func(1, 4, 0, 0); // system("uart_commid 1 4"); //进入有线常规模式
                mtx_uart.unlock();

                mtx_uart.lock();
                uart_main_func(12, 1, 0, 0); // 常规模式，通话上行数据一直发送。
                mtx_uart.unlock();
            }
        }
        // 蓝牙
        if (mode_state->audio_flag == AUDIO_NO_CONN)
        {
            // 停止握把控制线程
            start_woba = false;
            cv_woba.notify_one();

            mtx_uart.lock();
            uart_main_func(2, 2, 0, 0); // 切换到蓝牙通信，启动蓝牙
            mtx_uart.unlock();

            if (mode_state->mode1_flag == MODE_DOWN)
            {
                mtx_uart.lock();
                uart_main_func(1, 7, 0, 0); // system("uart_commid 1 7"); // 进入无线特殊模式
                mtx_uart.unlock();

                mtx_uart.lock();
                uart_main_func(12, 0, 0, 0); // 特殊模式，通话上行不发送。
                mtx_uart.unlock();
                // reset vi/ai flag
                vi_end = 0;
                ai_end = 0;
                img_up_end = 0;
            }
            if (mode_state->mode_flag == MODE_DOWN)
            {
                mtx_uart.lock();
                uart_main_func(1, 6, 0, 0); // system("uart_commid 1 6"); // 进入无线常规模式
                mtx_uart.unlock();

                mtx_uart.lock();
                uart_main_func(12, 1, 0, 0); // 常规模式，通话上行数据一直发送。
                mtx_uart.unlock();
            }
        }
        break;
    }
}

// 蓝牙SPP/485发送第一帧唇图线程,传输图像大小:112*112=12544bytes
void *spp485_send_img_thread_func_112_112(void *args)
{
    // 停止状态和电池电量的上传
    state_send_flag = false;
    cv_state.notify_one();

    RK_U8 *data = (RK_U8 *)args;
    memcpy(first_img_data_y, data, SEND_IMG_Y_SIZE); // 将第一帧数据拷贝到first_img_data_y数组中

    if (mode_state->audio_flag == AUDIO_NO_CONN) // 蓝牙模式第一帧图像通过SPP发往上位机
    {
        // 上传图像数据之前先上传图像信息
        uart_main_func(7, 0, 0, 0);
        std::this_thread::sleep_for(std::chrono::milliseconds(200)); // 延时200ms=0.2s

        // 分包数据处理
        for (int i = 0; i < SEND_PKG_CNT_Y; i++) // 13:0-12 pkgs
        {
            memset(per_pkg_data, 0, PER_PKG_IMG_SIZE); // 1024

            if (i < (SEND_PKG_CNT_Y - 1))
            {
                for (int j = 0; j < PER_PKG_IMG_SIZE; j++) // 1024
                {
                    per_pkg_data[j] = first_img_data_y[i * PER_PKG_IMG_SIZE + j];
                }
            }
            else if (i == (SEND_PKG_CNT_Y - 1))
            {
                int tmpSize = SEND_IMG_Y_SIZE - i * PER_PKG_IMG_SIZE;
                for (int j = 0; j < tmpSize; j++)
                {
                    per_pkg_data[j] = first_img_data_y[i * PER_PKG_IMG_SIZE + j];
                }
            }
            // 往上位机发送每包数据
            image_data_uart_upload_cmd(i + 1, PER_PKG_IMG_SIZE, per_pkg_data);
            std::this_thread::sleep_for(std::chrono::milliseconds(200)); // 0.2s
        }
    }else if (mode_state->audio_flag == AUDIO_CONN){  //有线模式通过485发送第一帧唇图
        // 上传图像数据之前先上传图像信息
        uart_main_func(7, 1, 0, 0);
        std::this_thread::sleep_for(std::chrono::milliseconds(200));

        // 有线485分包处理，每包1024byte
        for (int i = 0; i < SEND_PKG_CNT_Y; i++) // 13:0-12 pkgs
        {
            memset(per_pkg_data, 0, PER_PKG_IMG_SIZE); // 1024
            if (i < (SEND_PKG_CNT_Y - 1))
            {
                for (int j = 0; j < PER_PKG_IMG_SIZE; j++) // 1024
                {
                    per_pkg_data[j] = first_img_data_y[i * PER_PKG_IMG_SIZE + j];
                }
            }
            else if (i == (SEND_PKG_CNT_Y - 1))
            {
                int tmpSize = SEND_IMG_Y_SIZE - i * PER_PKG_IMG_SIZE;
                for (int j = 0; j < tmpSize; j++)
                {
                    per_pkg_data[j] = first_img_data_y[i * PER_PKG_IMG_SIZE + j];
                }
            }

            // 485 send image data
            image_data_send_func_485(i + 1, PER_PKG_IMG_SIZE, per_pkg_data);
            std::this_thread::sleep_for(std::chrono::milliseconds(200)); // 0.2s
        }
    }
    // 图像上传完成，开始状态和电池电量的上传
    img_up_end = 1;

    // 图像上传完成，开始状态和电池电量的上传
    state_send_flag = true;
    cv_state.notify_one();

    pthread_exit(NULL);
    return NULL;
}

// data 一帧图像的指针
// wcount 当前图像的顺序
static void vi_frame_data_cback(void *data, RK_U32 wcount)
{
    printf("====ISP VI DATA IS %d==\n", wcount);
    // 启动音频采集线程
    if (wcount == 0)
    {
        // 第一帧唇图上传线程
        pthread_create(&tid_spp_send_img, NULL, spp485_send_img_thread_func_112_112, data);
        // pthread_join(tid_spp_send_img, NULL);
    }
    // NV12->GRAY
    memcpy(lm_input_data, (RK_U8 *)data, sizeof(RK_U8) * ORI_HEIGHT * ORI_WIDTH);

    lm_rknn_toolkit_data_refresh(lm_ctx, &lm_input_data[0][0][0][0]);
    int ret = rknn_run(lm_ctx->context, NULL);
    LM_RESULT_S result = lm_toolkit_result_parameter(lm_ctx);
    RK_LOGI("%4d frame data result is [%d, %d, %d, %d]", wcount, result.left_x, result.left_y, result.right_x, result.right_y);

    if (result.right_x > result.left_x && result.right_y > result.left_y)
    {
        taiic_y_resize_crop_resize(&lm_input_data[0][0][0][0], &lm_output_data[0][0][0][0],
                                   LM_IMG_WIDTH, LM_IMG_HEIGHT,
                                   BEFORE_CROP, BEFORE_CROP,
                                   result.left_x, result.left_y,
                                   result.right_x, result.right_y,
                                   RESIZE_IMG_WIDTH, RESIZE_IMG_HEIGHT);
    }
    else
    {
        taiic_y_resize(&lm_input_data[0][0][0][0], &lm_output_data[0][0][0][0],
                       LM_IMG_WIDTH, LM_IMG_HEIGHT,
                       RESIZE_IMG_WIDTH, RESIZE_IMG_HEIGHT);
    }

    RK_U8 *pTmpData = &lm_output_data[0][0][0][0];
    int cnow = wcount / (RESHAPE_H * RESHAPE_W);               // 通道数
    int hnow = (wcount % (RESHAPE_H * RESHAPE_W)) / RESHAPE_W; // 高
    int wnow = (wcount % (RESHAPE_H * RESHAPE_W)) / RESHAPE_H; // 宽

    RK_LOGD("==data location  c is %d, h is %d, w is %d===\n", cnow, hnow, wnow);
    for (int i = 0; i < RESIZE_PIC_H; i++)
    {
        for (int j = 0; j < RESIZE_PIC_W; j++)
        {
            avsr_video_input_data[0][RESIZE_PIC_H * hnow + i][RESIZE_PIC_W * wnow + j][cnow] = *pTmpData;
            pTmpData += 1;
        }
    }

    if (wcount == (ORI_MODEL_C - 1))
    {
        vi_end = 1; // vi end
    }
}
// youxian/bt/C/T判断
void init_mode_state_func(TAIIC_MODE_STATE_CTX_S *ctx)
{
    // initial variable
    ctx->mode_flag = -1;
    ctx->audio_flag = -1;
    ctx->ptt_flag = -1;
    ctx->mode1_flag = -1;
    // 判断初始状态为蓝牙模式/有线模式
    char *cmd_linx_mode = new char[10];
    cmd_linx_mode = concatenate("cat ", SYSFS_GPIO_AUDIO);
    ExecCmd(cmd_linx_mode, mode_rst);
    init_mode_value = std::stoi(mode_rst);
    ctx->audio_flag = init_mode_value;
    if (ctx->audio_flag == AUDIO_CONN) // 插入音频线,默认有线
    {
        printf("初始状态:有线模式\n");
    }
    else if (ctx->audio_flag == AUDIO_NO_CONN) // 未插入音频线:蓝牙
    {
        printf("初始状态:蓝牙模式\n");

        mtx_uart.lock();
        uart_main_func(2, 2, 0, 0); // 切换到蓝牙通信，启动蓝牙
        mtx_uart.unlock();
    }
    // 判断初始状态为常规模式/特殊模式
    cmd_linx_mode = concatenate("cat ", SYSFS_GPIO_MODE);
    ExecCmd(cmd_linx_mode, mode_rst);
    init_mode_value = std::stoi(mode_rst);
    ctx->mode_flag = init_mode_value;
    if (ctx->mode_flag == MODE_DOWN) // 常规模式
    {
        printf("初始状态:常规模式\n");
        current_mode_state = NORMAL_MODE; // 96

        if (ctx->audio_flag == AUDIO_CONN)
        {
            mtx_uart.lock();
            uart_main_func(1, 4, 0, 0); // 进入有线常规模式提示音
            mtx_uart.unlock();
        }
        else if (ctx->audio_flag == AUDIO_NO_CONN)
        {
            mtx_uart.lock();
            uart_main_func(1, 6, 0, 0); // 进入无线常规模式提示音
            mtx_uart.unlock();
        }
    }
    else if (ctx->mode_flag == MODE_UP) // 释放状态
    {
        cmd_linx_mode = concatenate("cat ", SYSFS_GPIO_MODE1);
        ExecCmd(cmd_linx_mode, mode_rst);
        init_mode_value = std::stoi(mode_rst);
        ctx->mode1_flag = init_mode_value;
        if (ctx->mode1_flag == MODE_DOWN) // 特殊模式
        {
            printf("初始状态:特殊模式\n");

            mtx_uart.lock();
            uart_main_func(12, 0, 0, 0); // 特殊模式，通话上行不发送
            mtx_uart.unlock();

            // 首次为特殊，启动状态线程的判断
            start_stats = true;
            cv_stats.notify_one();
            sleep(1);

            current_mode_state = LIP_MODE; // 97

            if (mode_state->audio_flag == AUDIO_CONN) // 有线特殊
            {
                mtx_uart.lock();
                uart_main_func(1, 5, 0, 0); // 进入有线特殊模式提示音
                mtx_uart.unlock();

                // 开启握把控制线程
                start_woba = true;
                cv_woba.notify_one(); // 通知等待的线程，条件已经满足
            }
            else if (ctx->audio_flag == AUDIO_NO_CONN)
            {
                mtx_uart.lock();
                uart_main_func(1, 7, 0, 0); // 进入无线特殊模式提示音
                mtx_uart.unlock();
            }
        }
        else if (ctx->mode1_flag == MODE_UP)
        {
            printf("特殊模式键 释放状态\n");
        }
    }

    mtx_uart.lock();
    uart_main_func(1, 3, 0, 0); // system("uart_commid 1 3"); //设备初始化完成提示音
    mtx_uart.unlock();
}

// 模式状态线程函数，每隔3秒发送一次
void *mode_state_upload_thread_func(void *arg)
{
    while (1)
    {
        std::unique_lock<std::mutex> lock(mtx_state);
        // 等待state_send_flag变为true
        cv_state.wait(lock, []()
                      { return state_send_flag; });

        if (mode_state->audio_flag == AUDIO_NO_CONN) // 蓝牙连接
        {
            mtx_uart.lock();
            uart_main_func(8, current_mode_state, 0, 0); // Bluetooth:SPP
            mtx_uart.unlock();
        }
        else if (mode_state->audio_flag == AUDIO_CONN) // 有线连接
        {
            // rs_send((char *)current_mode_state, 2); // 485 send
            mode_state_send_func_485(current_mode_state);
        }
        sleep(3); // 暂停3秒
    }
    pthread_exit(NULL);
}

static void aivi_save_end(int vi, int ai) // avsr
{
    mtx_uart.lock();
    sleep(1);
    uart_main_func(1, 8, 0, 0); // system("uart_commid 1 8"); //模型推理开始提示音
    std::this_thread::sleep_for(std::chrono::milliseconds(100));
    mtx_uart.unlock();

    // Check if input is readable
    wavFp.open(air_pcm_path);
    if (!wavFp.is_open())
    {
        std::cerr << "Unable to open input file: " << std::endl;
    }
    mfccComputer.process(wavFp, mfcFp, &avsr_audio_input_data[0][0][0][0]);

    unsigned char *avsr_input_data[avsr_ctx->io_num.n_input];
    avsr_input_data[0] = new unsigned char[avsr_ctx->input_attrs[0].size]; // 设置video 输入大小
    avsr_input_data[0] = &avsr_video_input_data[0][0][0][0];

    avsr_input_data[1] = new unsigned char[avsr_ctx->input_attrs[1].size]; // 设置audio 输入大小
    avsr_input_data[1] = &avsr_audio_input_data[0][0][0][0];

    RK_LOGI("===MFCC END==\n");
    avsr_rknn_toolkit_data_refresh(avsr_ctx, avsr_input_data);
    int ret = rknn_run(avsr_ctx->context, NULL);
    MODEL_RESULT_S avsr_result = avsr_rknn_toolkit_result_int8(avsr_ctx);
    is_model_run = RK_FALSE;

    /*outfile.open("/data/lable.txt", std::ios::app);
    outfile << avsr_result.label << " " << avsr_result.prob << endl;
    outfile.close();*/

    printf("===result label is %d, prob is %f====\n", avsr_result.label, avsr_result.prob);

    mtx_uart.lock();
    std::this_thread::sleep_for(std::chrono::milliseconds(100));
    uart_main_func(1, 9, 0, 0); // system("uart_commid 1 9"); //模型推理完成提示音
    mtx_uart.unlock();

    // 485 send
    if (mode_state->audio_flag == AUDIO_CONN)
    {
        lip_label_send_func_485(avsr_result.label);
    }
    else if (mode_state->audio_flag == AUDIO_NO_CONN) // 蓝牙模式lip label通过UART口发送到副板
    {
        mtx_uart.lock();
        uart_main_func(9, avsr_result.label, 0, 0);
        mtx_uart.unlock();
    }

    is_model_run = RK_FALSE;
    // reset vi/ai flag
    vi_end = 0;
    ai_end = 0;
    img_up_end = 0;

    // 开始状态线程的判断
    state_send_flag = true;
    cv_state.notify_one();

    mtx_uart.lock();
    std::this_thread::sleep_for(std::chrono::milliseconds(100));
    uart_main_func(1, 11, 0, 0); // 开始下一指令提示音
    mtx_uart.unlock();
}

// 485youxian recv function,WOBA PTT control lip capture
void *rs_recv(void *arg)
{
    uint8_t recv_buf[BUF_RECV_SIZE] = {0}; // 第一个元素被初始化为0，其余元素默认为0
    int rd = 0;
    int fs_sel;
    fd_set fs_read;
    struct timeval respTime;
    int fd = open(UART_DEV_NAME, O_RDWR | O_NOCTTY | O_NDELAY); // 打开串口
    UART0_INIT(fd);

    while (1)
    {
        std::unique_lock<std::mutex> lock(mtx_woba);
        // 等待start_woba变为true
        cv_woba.wait(lock, []()
                     { return start_woba; });
        printf("woba:start 485:youxian+lip\n");
        // receive buffer
        FD_ZERO(&fs_read);
        FD_SET(fd, &fs_read);
        respTime.tv_sec = 10;
        respTime.tv_usec = 0;

        fs_sel = select(FD_SETSIZE, &fs_read, NULL, NULL, &respTime);
        if (fs_sel == 0)
        {
            continue;
        }
        memset(recv_buf, 4, sizeof(recv_buf)); // recv_buf的所有元素都设置为4
        rd = read(fd, recv_buf, BUF_RECV_SIZE);

        printf("uint8_t recv_buffer:%d\n", recv_buf[0]);

        // WOBA PTT control lip capture
        if (recv_buf[0] == WOBA_KEY_DOWN) // 0: woba ptt down
        {
            if ((!is_model_run) && (mode_state->mode1_flag == MODE_DOWN))
            {
                printf("PTT DOWN && lip mode\n");

                // SPP收到键值0，触发音频采集
                {
                    std::lock_guard<std::mutex> lock(mtx_ai);
                    ai_start = true; // 设置标志位为true，允许ai开始执行
                }
                cv_ai.notify_one();
            }
        }
        else if (recv_buf[0] == WOBA_KEY_UP) // 1: woba ptt up
        {
            printf("woba PTT UP && lip mode\n");
        }
        else if (recv_buf[0] == CONNECT_IIIS) // 2:CONNECT IIIS
        {
            printf("connect iiis!\n");
            diantai_flag = false;
        }
        else if (recv_buf[0] == BATTERY_START_SEND) // 3:表示开始发送初始电量
        {
            bat_send_func_485(cap_query.charge_state, cap_query.init_vol, cap_query.init_cap);

            //有线485发送固件版本号
            uart_main_func(14,1,0,0);
        }
    }
    close(fd);
    pthread_exit(NULL);
}
/*----------------------------------------------------------------
                            主函数main
-----------------------------------------------------------------*/
int main(int argc, char **argv)
{
    /*----------------------isp image capture related start----------------------*/
    strcpy(main_ctx.dev_name, DEV_NAME);
    strcpy(main_ctx.iqpath, IQ_PATH);
    strcpy(main_ctx.out_file, OUT_FILE_NAME);
    main_ctx.width = ORI_WIDTH;   // 112
    main_ctx.height = ORI_HEIGHT; // 112

    if (main_ctx.writeFile)
    {
        main_ctx.fp = fopen(main_ctx.out_file, "w+");
    }
    // UART串口初始化
    serial_init();
    /*----------------------isp image capture related end----------------------*/
    // initial mode state
    mode_state = reinterpret_cast<TAIIC_MODE_STATE_CTX_S *>(malloc(sizeof(TAIIC_MODE_STATE_CTX_S)));
    memset(mode_state, 0, sizeof(TAIIC_MODE_STATE_CTX_S));

    // initial lm model
    lm_ctx = reinterpret_cast<LM_TOOLKIT_MODEL_CTX_S *>(malloc(sizeof(LM_TOOLKIT_MODEL_CTX_S))); // 分配内存空间
    memset(lm_ctx, 0, sizeof(LM_TOOLKIT_MODEL_CTX_S));

    // initial avsr model
    avsr_ctx = reinterpret_cast<AVSR_TOOLKIT_MODEL_CTX_S *>(malloc(sizeof(AVSR_TOOLKIT_MODEL_CTX_S))); // 分配内存空间
    memset(avsr_ctx, 0, sizeof(AVSR_TOOLKIT_MODEL_CTX_S));

    // config lm model
    lm_ctx->modelPath = LM_MODEL_PATH;
    lm_rknn_toolkit_config_init(lm_ctx);
    lm_rknn_toolkit_io_init(lm_ctx);

    // config avsr model
    avsr_ctx->modelPath = AVSR_MODEL_PATH;
    avsr_rknn_toolkit_config_init(avsr_ctx);
    avsr_rknn_toolkit_io_init(avsr_ctx);
    RK_LOGD("====taiic init model config successfully===\n");

    // Mode Initialization Judgment
    init_mode_state_func(mode_state);
    /*---------------485:recv thread---------------*/
    pthread_t tid_recv;
    pthread_create(&tid_recv, NULL, rs_recv, NULL);
    /* pthread_join(tid_recv,NULL);*/

    // 按键检测
    taiic_all_key_registers(mode_state, app_key_callback);
    // 电量查询回调
    taiic_batt_power_registers(power_callback);

    // 模式状态每隔3s上传一次状态值
    pthread_t tid_mode_state_upload;
    pthread_create(&tid_mode_state_upload, NULL, mode_state_upload_thread_func, NULL);
    // /*pthread_join(tid_mode_state_upload,NULL);*/
    /*--------------------------start isp thread--------------------------------*/
    rkisp_routine(&main_ctx, 0);
    sample_smartIr_start(&main_ctx);
    // 环境状态判断线程
#ifdef TEST_BLOCKED_STATS_FUNC
    pthread_t stats_tid;
    pthread_create(&stats_tid, NULL, stats_thread, &main_ctx);
#endif

    // isp image capture
    pthread_create(&tid_image_cap, NULL, taiic_isp_image_capture_func, &main_ctx);
    /*--------------------------start isp thread end--------------------------------*/
    // ai _save end_callback
    aivi_save_end_callback(aivi_save_end);

    taiic_vi_frame_data_callback(vi_frame_data_cback); // 注册vi数据回调事件
    taiic_ai_frame_data_callback(ai_frame_data_cback); // 注册ai数据回调事件

    // audio_capture
    pthread_t tid_ai_uart;
    pthread_create(&tid_ai_uart, NULL, ai_uart_thread_func, NULL);

    taiic_spp_frame_data_callback(spp_frame_data_cback);
    // spp receive data from host
    uart_main_func(10, 0, 0, 0);
    pthread_join(tid_ai_uart, NULL);
    pthread_join(tid_image_cap, NULL);
    // setup the SIGINT to ctrl+c and handing de-init process
    signal(SIGINT, sigterm_handler);

__FAILED_LM:
    if (lm_ctx)
    {
        free(lm_ctx);
        lm_ctx = RK_NULL;
    }
__FAILED_VSR:
    if (avsr_ctx)
    {
        free(avsr_ctx);
        avsr_ctx = RK_NULL;
    }

__FAILED_SYS:
    RK_MPI_SYS_Exit();
    if (mode_state)
    {
        free(mode_state);
        mode_state = RK_NULL;
    }
    taiic_isp_release(); // isp指针释放
    // 释放循环缓冲区
    serialData.releaseRingbuffer();

    return 0;
}
/*---------------------------------------------------------------
                          函数实现
-----------------------------------------------------------------*/
static void sigterm_handler(int sig)
{
    RK_LOGI("Catched SIGINT %d\n", sig);
}

// SPP数据接收回调函数
static void spp_frame_data_cback(uint8_t keyValue)
{
    if (mode_state->audio_flag == AUDIO_NO_CONN) // 蓝牙模式
    {
        printf("spp_receive_data: %d\n", keyValue);

        if ((keyValue == WOBA_KEY_DOWN) && (!is_model_run) && (mode_state->mode1_flag == MODE_DOWN))
        {
            printf("woba ptt down!\n");

            // SPP收到键值0，触发音频采集
            {
                std::lock_guard<std::mutex> lock(mtx_ai);
                ai_start = true; // 设置标志位为true，允许ai开始执行
            }
            cv_ai.notify_one();
        }
        else if (keyValue == WOBA_KEY_UP) // woba ptt up
        {
            printf("woba ptt up!\n");
        }
        else if (keyValue == CONNECT_IIIS) // 2:CONNECT IIIS
        {
            printf("connect iiis!\n");
            diantai_flag = false;
        }
        else if (keyValue == BATTERY_START_SEND) // 收到3表示开始通过SPP发送初始电量
        {
            uart_main_func(6, cap_query.charge_state, cap_query.init_vol, cap_query.init_cap);

            //通过SPP上传固件版本号
            uart_main_func(14,0,0,0);
        }
    }
}

static void ai_frame_data_cback(uint8_t *data, unsigned int len, unsigned short frame_num)
{
    // printf("ai callback:%d,%d\n", len, frame_num);
    frameNum = frame_num;
    if (frameNum == 1)
    {
        // wake up isp image capture thread
        start_vi = true;
        cv_vi.notify_one(); // 通知等待的线程，条件已经满足
    }
}
