#include "core_wl.h"
#include "core/controller.h"
#include "controller.h"

#if HAVE_AUDIO
#include "core_wl/audio.hpp"
AudioReaderParams audioParams;
#endif
#define MAX_FRAME_FAILURES 16
static const int GRACEFUL_TERMINATION_SIGNALS[] = { SIGTERM, SIGINT, SIGHUP };

std::mutex frame_writer_mutex, frame_writer_pending_mutex;
std::unique_ptr<FrameWriter> frame_writer;
static int drm_fd = -1;
static struct gbm_device *gbm_device = NULL;
static std::string drm_device_name;

std::atomic<bool> exit_main_loop{false};

buffer_pool<Core_wl::wf_buffer, 16> *buffers = nullptr;
bool buffer_copy_done = false;

static bool use_damage = true;
static bool use_dmabuf = false;
static bool use_hwupload = false;

int32_t frame_failed_cnt = 0;

Core_wl::capture_region selected_region{};
Core_wl::wf_recorder_output *chosen_output = nullptr;
zwlr_screencopy_frame_v1 *frame = NULL;

//wayland注册与回调
static std::list<Core_wl::wf_recorder_output> available_outputs;
static wl_shm *shm = nullptr;                                                   //数据传输方式：共享内存
static zxdg_output_manager_v1 *xdg_output_manager = NULL;
static zwlr_screencopy_manager_v1 *screencopy_manager = NULL;                   //请求屏幕捕获
static zwp_linux_dmabuf_v1 *dmabuf = NULL;
static const wl_registry_listener registry_listener = {                         //全局对象的注册与监听
    .global = Core_wl::handle_global,
    .global_remove = Core_wl::handle_global_remove,
};
static const zwp_linux_dmabuf_feedback_v1_listener dmabuf_feedback_listener = { //dma数据传输
    .done = Core_wl::dmabuf_feedback_done,
    .format_table = Core_wl::dmabuf_feedback_format_table,
    .main_device = Core_wl::dmabuf_feedback_main_device,
    .tranche_done = Core_wl::dmabuf_feedback_tranche_done,
    .tranche_target_device = Core_wl::dmabuf_feedback_tranche_target_device,
    .tranche_formats = Core_wl::dmabuf_feedback_tranche_formats,
    .tranche_flags = Core_wl::dmabuf_feedback_tranche_flags,
};
const zxdg_output_v1_listener xdg_output_implementation = {                     //获取逻辑位置和大小
    .logical_position = Core_wl::handle_xdg_output_logical_position,
    .logical_size = Core_wl::handle_xdg_output_logical_size,
    .done = Core_wl::handle_xdg_output_done,
    .name = Core_wl::handle_xdg_output_name,
    .description = Core_wl::handle_xdg_output_description
};
static const struct zwlr_screencopy_frame_v1_listener frame_listener = {        //用于处理屏幕捕获帧的相关事件
    .buffer = Core_wl::frame_handle_buffer,
    .flags = Core_wl::frame_handle_flags,
    .ready = Core_wl::frame_handle_ready,
    .failed = Core_wl::frame_handle_failed,
    .damage = Core_wl::frame_handle_damage,
    .linux_dmabuf = Core_wl::frame_handle_linux_dmabuf,
    .buffer_done = Core_wl::frame_handle_buffer_done,
};
static const struct zwp_linux_buffer_params_v1_listener params_listener = {     //创建和管理dma缓冲区的参数
    .created = Core_wl::dmabuf_created,
    .failed = Core_wl::dmabuf_failed,
};

Core_wl *Core_wl::getInstance()
{
    static Core_wl C;
    return &C;
}

Core_wl::Core_wl(QObject *parent)
    : QObject{parent}
{    
    //输出声音设备
    QStringList l1,l2 ;
    getSysDevices(l1);
    getMicDevices(l2);
}

Core_wl::~Core_wl()
{
    if (m_registry) {
        wl_registry_destroy(m_registry);
        m_registry = nullptr;
    }
    if (m_display) {
        wl_display_disconnect(m_display);
        m_display = nullptr;
    }
}

void Core_wl::setPropertyOfSelectWid(uint qId, bool isfullscreen, bool isWindow, QString screen, uint id, QRect rect, double scale)
{
//    m_screen = screen;
//    m_isFullscreen = isfullscreen;
//    m_isWindow = isWindow;
//    m_xid = QString::number(id);;
    m_x = rect.x();
    m_y = rect.y();
    m_width = rect.width();
    m_height = rect.height();
//    m_scale = scale;
}

void Core_wl::setSavePath(QString savePath)
{
    m_videoPath = savePath;
    qDebug()<<Q_FUNC_INFO<<"保存路径 m_sVideoPath = "<<savePath;
}

void Core_wl::setSaveFormat(QString saveformat)
{
    m_formatString = saveformat;
    if(saveformat == "mp4"){
        m_format = SaveFormat::MP4;
    }
    if(saveformat == "mkv"){
        m_format = SaveFormat::MKV;
    }
    if(saveformat == "avi"){
        m_format = SaveFormat::AVI;
    }
    if(saveformat == "mov"){
        m_format = SaveFormat::MOV;
    }
    if(saveformat == "webm"){
        m_format = SaveFormat::WEBM;
    }
    if(saveformat == "gif"){
        m_format = SaveFormat::GIF;
    }
    qDebug()<<Q_FUNC_INFO<<"保存类型 m_formatString = "<<m_formatString;
}

void Core_wl::setResolution(int resolution)
{
    if (resolution == static_cast<int>(SaveResolution::SUPER_OR_ORIGINAL_QUALITY))
    {
        m_bitrate = 8000; //单位 Mbps
        m_qpmin = 5;
        m_qpmax = 20;
    }
    else if(resolution == static_cast<int>(SaveResolution::HIGHT_QUALITY)){
        m_bitrate = 5000;
        m_qpmin = 20;
        m_qpmax = 40;
    }
    else if(resolution == static_cast<int>(SaveResolution::STANDARD_QUALITY)){
        m_bitrate = 2500;
        if(m_info.isHWWayland()){   //9006C上不同质量的区别
            m_qpmin = 60;
            m_qpmax = 60;
        }else{
            m_qpmin = 40;
            m_qpmax = 69;
        }
    }
    qDebug()<<Q_FUNC_INFO<<"保存质量 m_qpmin = "<<m_qpmin<<" m_qpmax = "<<m_qpmax;
}

void Core_wl::setFrameRate(QString frameRate)
{
    if(frameRate.contains( "15" )){
        m_fps = 15;
        m_keyframe_dist = 30;
        m_deadline = 66666;
    }

    if(frameRate.contains( "25" )){
        m_fps = 25;
        m_keyframe_dist = 50;
        m_deadline = 40000;
    }

    if(frameRate.contains( "30" )){
        m_fps = 30;
        m_keyframe_dist = 60;
        m_deadline = 33333;
    }

    if(frameRate.contains( "50" )){
        m_fps = 50;
        m_keyframe_dist = 100;
        m_deadline = 20000;
    }

    if(frameRate.contains( "60" )){
        m_fps = 60;
        m_keyframe_dist = 120;
        m_deadline = 16666;
    }
    qDebug()<<Q_FUNC_INFO<<"保存帧率 m_fps = "<<m_fps;
}

void Core_wl::setMicrophone(QString microphone)
{
    if(microphone =="" || microphone == m_NoMic){
        m_microphoneName = "";
    }else{
        m_microphoneName = microphone;
    }

    //获取选择的麦克风设备
    QString newDevice = "";
    if (mapNameToDevice.contains(m_microphoneName)) {
        newDevice = mapNameToDevice.value(m_microphoneName);
    }else{
        qDebug()<<"选择的设备不存在";
    }

    //重新设置管道和回调
    ModifylevelMeterDevice(newDevice);
}

void Core_wl::setEncoderAndAudioCodec()
{
    switch (m_format) {
    case SaveFormat::MP4:
        m_codec = "x264enc";
        m_audioCodec = "lamemp3enc";
        break;
    case SaveFormat::MKV:
        m_codec = "vp8enc";
        m_audioCodec = "lamemp3enc";
        break;
    case SaveFormat::AVI:
        m_codec = "vp8enc";
        m_audioCodec = "lamemp3enc";
        break;
    case SaveFormat::MOV:
        m_codec = "vp8enc";
        m_audioCodec = "lamemp3enc";
        break;
    case SaveFormat::WEBM:
        m_codec = "vp8enc";
        m_audioCodec = "opusenc";
        break;
    case SaveFormat::GIF:
        m_codec = "gifenc";
        break;
    default:
        break;
    }
}

void Core_wl::getSysDevices(QStringList &ls, QString soundtype, bool isName)
{
    QStringList list;
    list << getAllDevices();
    if ( list.empty() )
        return;

    for ( int i = 0; i < list.count(); i++ ) {
        QString name = QString(list.at(i)).section( ":::", 1, 1 );
        QString device = QString(list.at(i)).section( ":::", 0, 0 );
        if (device.contains(soundtype))
        {
            bool deviceContains = false;
            if(m_info.isHWWayland()){       //华为有两个声卡使用会出错
                deviceContains = device.contains("echo-cancel") || device.contains("alsa_input") || device.contains("histen");
            }else{
                deviceContains = device.contains("echo-cancel");
            }
            deviceContains = device.contains("echo-cancel");
            if (deviceContains)
                continue;

            if (isName)
            {
                ls << name;
            }
            else
            {
                ls << device;
                qDebug().noquote() << Global::nameOutput << "[Audio] Found:" << name << "Device:" << device;
            }
        }

    }
}

void Core_wl::getMicDevices(QStringList &ls, QString soundtype, bool isName)
{
    QStringList list;
    list << getAllDevices();
    if ( list.empty() )
        return;

    for ( int i = 0; i < list.count(); i++ ) {
        QString name = QString(list.at(i)).section( ":::", 1, 1 );
        QString device = QString(list.at(i)).section( ":::", 0, 0 );

        if (!device.contains(soundtype))
        {
            bool deviceContains = false;    //设备里面有些需要去除筛选
            if(m_info.isHWWayland()){       //华为有两个声卡使用会出错; 使用3a的这个输入设备和麦克风一起会卡；alsa_input不卡
                deviceContains = device.contains("echo-cancel") || name.contains("echo")
                               || device.contains("histen") || device.contains("3a");
            }else{
                deviceContains = device.contains("echo-cancel") || name.contains("echo");
            }

            if (deviceContains)
                continue;

            mapNameToDevice.insert(name,device);
            if (isName)
            {
                ls << name;
            }
            else
            {
                ls << device;
                qDebug().noquote() << Global::nameOutput << "[Audio] Found:" << name << "Device:" << device;
            }
        }
    }
}

QString Core_wl::getUsingMicDevices(bool isName)
{
    QString usingMicName = ConfigHandler().getMicrophone();
    if(isName){
        return usingMicName;
    }else{
        return mapNameToDevice[usingMicName];
    }
}

QString Core_wl::getUsingSysDevices(bool isName)
{
    QStringList ls;
    getSysDevices(ls,"monitor");
    QString defaultSink = Controller::getInstance()->getDefultOutputDevice();
    if(!ls.isEmpty()){
        for(QString device : ls){
            if(device.contains(defaultSink)){
                return device;
            }
        }
        return "";
    }else{
        return "";
    }
}

void Core_wl::openMicrophone()
{
    qDebug()<<"开启麦克风 openMicrophone";

    toggle_microphone(false);
    m_closeMicrophone = false;
}

void Core_wl::closeMicrophone()
{
    qDebug()<<"关闭麦克风  - closeMicrophone";

    toggle_microphone(true);
    m_closeMicrophone = true;
}

void Core_wl::toggle_microphone(bool mute)
{
    //获取设备
    QString usingMicName = ConfigHandler().getMicrophone();
    QString usingMicDevice = mapNameToDevice[usingMicName];
    if(usingMicDevice.isEmpty()){
        return;
    }
    const char * device = usingMicDevice.toUtf8().constData();

    //初始化连接
    if(!m_mainloop){
        m_mainloop = pa_mainloop_new();
        m_context = pa_context_new(pa_mainloop_get_api(m_mainloop), "Microphone Control");
        pa_context_connect(m_context, nullptr, PA_CONTEXT_NOFLAGS, nullptr);
        // Wait for context to be ready
        while (pa_context_get_state(m_context) != PA_CONTEXT_READY) {
            pa_mainloop_iterate(m_mainloop, 1, nullptr);
        }
    }

    //切换静音状态
    // Mute or unmute the default source
    pa_operation* op = pa_context_set_source_mute_by_name(m_context, device, mute, nullptr, nullptr);
    if (!op) {
        qCritical() << "Failed to create mute operation for device:" << device;
        return;
    }
    while (pa_operation_get_state(op) == PA_OPERATION_RUNNING) {
        pa_mainloop_iterate(m_mainloop, 1, nullptr);
    }

    pa_operation_unref(op);
}

void Core_wl::levelMeterStart(QString name)
{
    m_microphoneName = name;
    //获取麦克风设备
    QString device;
    if(mapNameToDevice.contains(name)){
        device = mapNameToDevice.value(name);
    }

    qDebug()<<"监测音量的设备 = "<< name << device;
    GstElement *audiosrc, *audioconvert, *level, *fakesink;
    GstCaps *caps;
    GstBus *bus;

    caps = gst_caps_from_string( "audio/x-raw,channels=2" );

    pipelineMic = gst_pipeline_new( NULL );
    g_assert (pipelineMic);
    audiosrc = gst_element_factory_make( "pulsesrc", "my_audiosrc" );
    g_assert (audiosrc);
    audioconvert = gst_element_factory_make( "audioconvert", NULL );
    g_assert (audioconvert);
    level = gst_element_factory_make( "level", NULL );
    g_assert (level);
    fakesink = gst_element_factory_make( "fakesink", NULL );
    g_assert (fakesink);

    gst_bin_add_many( GST_BIN( pipelineMic ), audiosrc, audioconvert, level, fakesink, NULL );
    if ( !gst_element_link( audiosrc, audioconvert ) ) {
        g_error( "Failed to link audiosrc and audioconvert" );
    }
    if (!gst_element_link_filtered( audioconvert, level, caps ) ) {
        g_error( "Failed to link audioconvert and level" );
    }
    if ( !gst_element_link( level, fakesink ) ) {
        g_error( "Failed to link level and fakesink" );
    }

    g_object_set( G_OBJECT( audiosrc ), "device", device.toUtf8().constData(), NULL );

    QString m_name = "[kylinscreencap] " + name;
    g_object_set( G_OBJECT( audiosrc ), "client-name", m_name.toUtf8().constData(), NULL );
    g_object_set( G_OBJECT( level ), "post-messages", TRUE, NULL );
    g_object_set( G_OBJECT( fakesink ), "sync", TRUE, NULL );

    bus = gst_element_get_bus (pipelineMic);
    gst_bus_set_sync_handler( bus, (GstBusSyncHandler)message_handler,NULL, NULL );
    gst_element_set_state( pipelineMic, GST_STATE_PLAYING );
}

gboolean Core_wl::message_handler(GstBus *bus, GstMessage *message, gpointer data)
{
    Q_UNUSED(bus)

    if ( message->type == GST_MESSAGE_ELEMENT ) {
        const GstStructure *s = gst_message_get_structure( message );
        const gchar *name = gst_structure_get_name( s );
        if ( strcmp( name, "level" ) == 0 ) {
            gint channels;
            gdouble rms_dB;
            gdouble rms;
            const GValue *array_val;
            const GValue *value;
            GValueArray *rms_arr;

            // the values are packed into GValueArrays with the value per channel
            array_val = gst_structure_get_value( s, "peak" );
            rms_arr = (GValueArray *) g_value_get_boxed( array_val );

            // we can get the number of channels as the length of any of the value arrays
            channels = rms_arr->n_values;
            static double smoothed_rms = 0.0;
            for ( gint i = 0; i < channels; ++i ) {
                value = g_value_array_get_nth( rms_arr, i );
                rms_dB = g_value_get_double( value );

                // converting from dB to normal gives us a value between 0.0 and 1.0
                rms = pow( 10, rms_dB / 20 ) ;
                smoothed_rms = std::max(rms, 0.85 * smoothed_rms);
                //更新音量值
                int int_rms = static_cast<int>(smoothed_rms *100);
                emit getInstance()->updateVolume(int_rms);
            }
        }
    }

    return TRUE;
}

void Core_wl::ModifylevelMeterDevice(QString newDevice)
{
    if(pipelineMic){
        GstElement *audiosrc = gst_bin_get_by_name(GST_BIN(pipelineMic), "my_audiosrc");
        if (!audiosrc) {
            return;
        }
        gst_element_set_state(pipelineMic, GST_STATE_NULL);
        g_object_set(G_OBJECT(audiosrc), "device", newDevice.toUtf8().constData(), NULL);
        //回调函数
        GstBus *bus = gst_element_get_bus (pipelineMic);
        gst_bus_set_sync_handler( bus, (GstBusSyncHandler)message_handler,NULL, NULL );
        gst_element_set_state(pipelineMic, GST_STATE_PLAYING);
        qDebug()<<"监测的麦克风设备已变更 = "<< newDevice;
    }
}

QStringList Core_wl::getAllDevices()
{
    QStringList list;
    if ( isAvailable() ) {
        const char *ss = get_all_audio_devices();
        QString s1 = QString::fromUtf8( ss );
        QString s2 = s1.left( QString::fromUtf8( ss ).length() - 3 );
        list = s2.split( "---" );
    }

    if ( list.contains( "" ) ) {
        list.clear();
    }
    return list;
}

bool Core_wl::isAvailable()
{
    bool value = false;

    pa_mainloop *pa_ml;
    pa_mainloop_api *pa_mlapi;
    pa_context *context = NULL;

    // Create a mainloop API and connection to the default server
    pa_ml = pa_mainloop_new();
    pa_mlapi = pa_mainloop_get_api( pa_ml );
    context = pa_context_new( pa_mlapi, NULL );

    // This function connects to the pulse server
    int status = pa_context_connect( context, NULL, PA_CONTEXT_NOAUTOSPAWN, NULL );
    if ( status < 0 ) {
        value = false;
    } else {
        value = true;
    }

    pa_context_unref( context );
    pa_mainloop_free( pa_ml );

    return value;
}

bool Core_wl::startRecord()
{
    qDebug()<<"Core_wl::startRecord() 开始录制 m_display="<<m_display;
    //初始化
    if(!m_display){
        if (!initWayland())
            return false;
    }
    buffers = new buffer_pool<Core_wl::wf_buffer,16>(); //【资源释放 释放时会导致 controller 析构】
    exit_main_loop = false;
    buffer_copy_done = false;
    spawned_thread = false;

    //录制设置
    FrameWriterParams params = FrameWriterParams(exit_main_loop);
    //文件名和格式
    QString newVideoFilename = Global::name + "-" + QDateTime::currentDateTime().toString( "yyyy-MM-dd_hh-mm-ss" ) + "." + m_formatString;
    m_completePathRec = m_videoPath + "/" + newVideoFilename;
    params.file = m_completePathRec.toStdString();
//    params.muxer = "";
    //录制区域
    std::stringstream selectedRegionString;
    selectedRegionString << m_x << "," << m_y << " " << m_width << "x" << m_height;
    selected_region.set_from_string(selectedRegionString.str());
    //帧率 和 质量
    std::stringstream qminString, qmaxString;
    qminString << "qmin=" << m_qpmin;
    qmaxString << "qmax=" << m_qpmax;
    params.framerate = m_fps;
    //视频编码
    params.codec = DEFAULT_CODEC;
    parse_codec_opts(params.codec_options, qminString.str());
    parse_codec_opts(params.codec_options, qmaxString.str());
    //系统音频和麦克风
    params.enable_audio = false;
    params.audio_codec = DEFAULT_AUDIO_CODEC;
    params.sample_rate = DEFAULT_AUDIO_SAMPLE_RATE;
    if(Global::isOpenSysSound){
        params.enable_audio = true;
        std::string sysDeviceString = getUsingSysDevices().toStdString();
        audioParams.audio_source = !sysDeviceString.empty() ? strdup(sysDeviceString.c_str()) : NULL;
    }else if (Global::isOpenMic){
        params.enable_audio = true;
        std::string micDeviceString = getUsingMicDevices().toStdString();
        audioParams.audio_source = !micDeviceString.empty() ? strdup(micDeviceString.c_str()) : NULL;
    }

    params.pix_fmt = DEFAULT_PIX_FMT;
    params.enable_ffmpeg_debug_output = false;
    params.bframes = -1;

    constexpr const char* default_cmdline_output = "interactive";
    std::string cmdline_output = default_cmdline_output;
    bool force_no_dmabuf = false;
    bool force_overwrite = true;

    if (!force_overwrite && !user_specified_overwrite(params.file))
    {
        return EXIT_FAILURE;
    }
    // 硬件加速（华为）
    /*
    if (params.codec.find("vaapi") != std::string::npos)
    {

    }else{
        qDebug()<<"未使用硬件加速";
    }*/

    check_has_protos();
    load_output_info();

    if (available_outputs.size() == 1)
    {
        chosen_output = &available_outputs.front();
        if (chosen_output->name != cmdline_output &&
            cmdline_output != default_cmdline_output)
        {
            qDebug() << "Couldn't find requested output" << QString::fromStdString(cmdline_output);
            return false;
        }
    } else
    {
        for (auto& wo : available_outputs)
        {
            if (wo.name == cmdline_output)
                chosen_output = &wo;
        }

        if (chosen_output == NULL)
        {
            if (cmdline_output != default_cmdline_output)
            {
                qDebug()<<"Couldn't find requested output" << cmdline_output.c_str();
                return false;
            }

            if (selected_region.is_selected())
            {
                chosen_output = detect_output_from_region(selected_region);
            }
            else
            {
                chosen_output = choose_interactive();
            }
        }
    }

    if (chosen_output == nullptr)
    {
        qDebug()<<"Failed to select output, exiting";
        return false;
    }

    if (selected_region.is_selected())
    {
        qDebug()<<"显示器 chosen_output = "<< chosen_output->x<<chosen_output->y<<
                  chosen_output->width<<chosen_output->height;
        qDebug()<<"选择区域 selected_region = "<< selected_region.x<<selected_region.y<<
                  selected_region.width<<selected_region.height;

        if (!selected_region.contained_in({chosen_output->x, chosen_output->y,
            chosen_output->width, chosen_output->height}))
        {
            qDebug()<<"Invalid region to capture: must be completely inside the output";
            selected_region = capture_region{};
        }
    }
    fprintf(stderr, "selected region %d,%d %dx%d\n", selected_region.x, selected_region.y, selected_region.width, selected_region.height);

    qDebug()<<"core_wl 开始录制" << exit_main_loop;
    for (auto signo : GRACEFUL_TERMINATION_SIGNALS)
    {
        signal(signo, Core_wl::handle_graceful_termination);
    }
    while(!exit_main_loop)
    {
            while(buffers->capture().ready_capture() != true) {
                std::this_thread::sleep_for(std::chrono::microseconds(500));
            }
            buffer_copy_done = false;
            request_next_frame();

            while (!buffer_copy_done && !exit_main_loop && wl_display_dispatch(m_display) != -1) {
                // This space is intentionally left blank
            }

            if (exit_main_loop) {
                break;
            }

            auto& buffer = buffers->capture();
            //std::cout << "first buffer at " << timespec_to_usec(get_ct()) / 1.0e6<< std::endl;

            if (!spawned_thread)
            {
                writer_thread = std::thread([=] () {
                    write_loop(params);
                });

                spawned_thread = true;
            }

            buffer.base_usec = timespec_to_usec(buffer.presented);
            buffers->next_capture();

    }
    return true;
}

void Core_wl::stopRecord(QString &completePath)
{
    qDebug()<<Q_FUNC_INFO<<"停止录制";
    //停止条件
    exit_main_loop = true;
    //资源清理
    cleanup();
    completePath = m_completePathRec;
    if(is_paused == true){
        is_paused = false;
    }

}

void Core_wl::pauseRecord()
{
    is_paused = true;
}

void Core_wl::contineRecord()
{
    is_paused = false;
}

void Core_wl::handle_global(void *, wl_registry *registry, uint32_t name, const char *interface, uint32_t)
{
    //wl_output 全局对象   interface接口名称  name唯一标识符
    if (strcmp(interface, wl_output_interface.name) == 0)
    {
        qDebug()<<Q_FUNC_INFO<<"全局对象注册-"<< interface<< name;
        auto output = (wl_output*)wl_registry_bind(registry, name, &wl_output_interface, 1);    //将 Wayland 的 wl_registry 对象与一个输出接口 wl_output 进行绑定
        wf_recorder_output wro;
        wro.output = output;
        available_outputs.push_back(wro);
    }
    else if (strcmp(interface, wl_shm_interface.name) == 0)
    {
        shm = (wl_shm*) wl_registry_bind(registry, name, &wl_shm_interface, 1);
    }
    else if (strcmp(interface, zwlr_screencopy_manager_v1_interface.name) == 0)
    {
        qDebug()<<Q_FUNC_INFO<<"全局对象注册-"<< interface<< name;
        screencopy_manager = (zwlr_screencopy_manager_v1*) wl_registry_bind(registry, name,
            &zwlr_screencopy_manager_v1_interface, 3);
    }
    else if (strcmp(interface, zxdg_output_manager_v1_interface.name) == 0)
    {
        qDebug()<<Q_FUNC_INFO<<"全局对象注册-"<< interface<< name;
        xdg_output_manager = (zxdg_output_manager_v1*) wl_registry_bind(registry, name,
            &zxdg_output_manager_v1_interface, 2); // version 2 for name & description, if available
    }
    else if (strcmp(interface, zwp_linux_dmabuf_v1_interface.name) == 0)
    {
        dmabuf = (zwp_linux_dmabuf_v1*) wl_registry_bind(registry, name,
            &zwp_linux_dmabuf_v1_interface, 4);
        if (dmabuf) {
            struct zwp_linux_dmabuf_feedback_v1 *feedback =
                zwp_linux_dmabuf_v1_get_default_feedback(dmabuf);
            zwp_linux_dmabuf_feedback_v1_add_listener(feedback, &dmabuf_feedback_listener, NULL);
        }
    }
}

void Core_wl::handle_global_remove(void *, wl_registry *, uint32_t)
{
    //无
}

void Core_wl::dmabuf_feedback_done(void *, zwp_linux_dmabuf_feedback_v1 *feedback)
{
    zwp_linux_dmabuf_feedback_v1_destroy(feedback);
}

void Core_wl::dmabuf_feedback_format_table(void *, zwp_linux_dmabuf_feedback_v1 *, int32_t fd, uint32_t)
{
    close(fd);
}

void Core_wl::dmabuf_feedback_main_device(void *, zwp_linux_dmabuf_feedback_v1 *, wl_array *device)
{
    dev_t dev_id;
    memcpy(&dev_id, device->data, device->size);

    drmDevice *dev = NULL;
    if (drmGetDeviceFromDevId(dev_id, 0, &dev) != 0) {
        qDebug() << "Failed to get DRM device from dev id " << strerror(errno) ;
        return;
    }

    if (dev->available_nodes & (1 << DRM_NODE_RENDER)) {
        drm_device_name = dev->nodes[DRM_NODE_RENDER];
    } else if (dev->available_nodes & (1 << DRM_NODE_PRIMARY)) {
        drm_device_name = dev->nodes[DRM_NODE_PRIMARY];
    }

    drmFreeDevice(&dev);
}

void Core_wl::dmabuf_feedback_tranche_done(void *, zwp_linux_dmabuf_feedback_v1 *)
{
    //无
}

void Core_wl::dmabuf_feedback_tranche_target_device(void *, zwp_linux_dmabuf_feedback_v1 *, wl_array *)
{
    //无
}

void Core_wl::dmabuf_feedback_tranche_formats(void *, zwp_linux_dmabuf_feedback_v1 *, wl_array *)
{
    //无
}

void Core_wl::dmabuf_feedback_tranche_flags(void *, zwp_linux_dmabuf_feedback_v1 *, uint32_t)
{
    //无
}

void Core_wl::handle_xdg_output_logical_position(void *, zxdg_output_v1 *zxdg_output, int32_t x, int32_t y)
{
    qDebug()<<Q_FUNC_INFO<< "获取显示器逻辑位置="<< x<< y;
    for (auto& wo : available_outputs)
    {
        if (wo.zxdg_output == zxdg_output)
        {
            wo.x = x;
            wo.y = y;
        }
    }
}

void Core_wl::handle_xdg_output_logical_size(void *, zxdg_output_v1 *zxdg_output, int32_t w, int32_t h)
{
    qDebug()<<Q_FUNC_INFO<< "获取显示器逻辑大小="<< w<< h;
    for (auto& wo : available_outputs)
    {
        if (wo.zxdg_output == zxdg_output)
        {
            wo.width = w;
            wo.height = h;
        }
    }
}

void Core_wl::handle_xdg_output_done(void *, zxdg_output_v1 *)
{
    //无
}

void Core_wl::handle_xdg_output_name(void *, zxdg_output_v1 *zxdg_output_v1, const char *name)
{
    qDebug()<<Q_FUNC_INFO<< "获取显示器名字="<< name;
    for (auto& wo : available_outputs)
    {
        if (wo.zxdg_output == zxdg_output_v1)
            wo.name = name;
    }
}

void Core_wl::handle_xdg_output_description(void *, zxdg_output_v1 *zxdg_output_v1, const char *description)
{
    qDebug()<<Q_FUNC_INFO<< "获取显示器描述="<< description;
    for (auto& wo : available_outputs)
    {
        if (wo.zxdg_output == zxdg_output_v1)
            wo.description = description;
    }
}

void Core_wl::frame_handle_buffer(void *, zwlr_screencopy_frame_v1 *frame, uint32_t format, uint32_t width, uint32_t height, uint32_t stride)
{
    if (use_dmabuf) {
        return;
    }

    auto& buffer = buffers->capture();
    auto old_format = buffer.format;
    buffer.format = (wl_shm_format)format;
    buffer.drm_format = wl_shm_to_drm_format(format);
    buffer.width = width;
    buffer.height = height;
    buffer.stride = stride;

    /* ffmpeg requires even width and height */
    if (buffer.width % 2)
        buffer.width -= 1;
    if (buffer.height % 2)
        buffer.height -= 1;

    if (!buffer.wl_buffer || old_format != format) {
        free_shm_buffer(buffer);
        buffer.wl_buffer =
            create_shm_buffer(format, width, height, stride, &buffer.data);
    }

    if (buffer.wl_buffer == NULL) {
        qDebug()<<"failed to create buffer";
        exit(EXIT_FAILURE);
    }

    if (use_damage) {
        zwlr_screencopy_frame_v1_copy_with_damage(frame, buffer.wl_buffer);
    } else {
        zwlr_screencopy_frame_v1_copy(frame, buffer.wl_buffer);
    }
}

void Core_wl::frame_handle_flags(void *, zwlr_screencopy_frame_v1 *, uint32_t flags)
{
    buffers->capture().y_invert = flags & ZWLR_SCREENCOPY_FRAME_V1_FLAGS_Y_INVERT;
}

void Core_wl::frame_handle_ready(void *, zwlr_screencopy_frame_v1 *, uint32_t tv_sec_hi, uint32_t tv_sec_low, uint32_t tv_nsec)
{
    auto& buffer = buffers->capture();
    buffer_copy_done = true;
    buffer.presented.tv_sec = ((1ll * tv_sec_hi) << 32ll) | tv_sec_low;
    buffer.presented.tv_nsec = tv_nsec;
    frame_failed_cnt = 0;
}

void Core_wl::frame_handle_failed(void *, zwlr_screencopy_frame_v1 *)
{
    qDebug() << "Failed to copy frame, retrying..." ;
    ++frame_failed_cnt;
    request_next_frame();
    if (frame_failed_cnt > MAX_FRAME_FAILURES)
    {
        qDebug() << "Failed to copy frame too many times, exiting!" ;
        exit_main_loop = true;
    }
}

void Core_wl::frame_handle_damage(void *, zwlr_screencopy_frame_v1 *, uint32_t, uint32_t, uint32_t, uint32_t)
{
    //无
}

void Core_wl::frame_handle_linux_dmabuf(void *, zwlr_screencopy_frame_v1 *frame, uint32_t format, uint32_t width, uint32_t height)
{
    if (!use_dmabuf) {
        return;
    }

    auto& buffer = buffers->capture();

    auto old_format = buffer.format;
    buffer.format = drm_to_wl_shm_format(format);
    buffer.drm_format = format;
    buffer.width = width;
    buffer.height = height;

    if (!buffer.wl_buffer || (old_format != buffer.format)) {
        if (buffer.bo) {
            if (buffer.wl_buffer) {
                wl_buffer_destroy(buffer.wl_buffer);
            }

            zwp_linux_buffer_params_v1_destroy(buffer.params);
            gbm_bo_destroy(buffer.bo);
        }

        const uint64_t modifier = 0; // DRM_FORMAT_MOD_LINEAR
        buffer.bo = gbm_bo_create_with_modifiers(gbm_device, buffer.width,
            buffer.height, format, &modifier, 1);
        if (buffer.bo == NULL)
        {
            buffer.bo = gbm_bo_create(gbm_device, buffer.width,
                buffer.height, format, GBM_BO_USE_LINEAR | GBM_BO_USE_RENDERING);
        }
        if (buffer.bo == NULL)
        {
            qDebug() << "Failed to create gbm bo" ;
            exit_main_loop = true;
            return;
        }

        buffer.stride = gbm_bo_get_stride(buffer.bo);

        buffer.params = zwp_linux_dmabuf_v1_create_params(dmabuf);

        uint64_t mod = gbm_bo_get_modifier(buffer.bo);
        zwp_linux_buffer_params_v1_add(buffer.params,
            gbm_bo_get_fd(buffer.bo), 0,
            gbm_bo_get_offset(buffer.bo, 0),
            gbm_bo_get_stride(buffer.bo),
            mod >> 32, mod & 0xffffffff);

        zwp_linux_buffer_params_v1_add_listener(buffer.params, &params_listener, frame);
        zwp_linux_buffer_params_v1_create(buffer.params, buffer.width,
            buffer.height, format, 0);
    } else {
        if (use_damage) {
            zwlr_screencopy_frame_v1_copy_with_damage(frame, buffer.wl_buffer);
        } else {
            zwlr_screencopy_frame_v1_copy(frame, buffer.wl_buffer);
        }
    }
}

void Core_wl::frame_handle_buffer_done(void *, zwlr_screencopy_frame_v1 *)
{
    //无
}

void Core_wl::dmabuf_created(void *data, zwp_linux_buffer_params_v1 *, wl_buffer *wl_buffer)
{
    auto& buffer = buffers->capture();
    buffer.wl_buffer = wl_buffer;

    zwlr_screencopy_frame_v1 *frame = (zwlr_screencopy_frame_v1*) data;

    if (use_damage) {
        zwlr_screencopy_frame_v1_copy_with_damage(frame, buffer.wl_buffer);
    } else {
        zwlr_screencopy_frame_v1_copy(frame, buffer.wl_buffer);
    }
}

void Core_wl::dmabuf_failed(void *, zwp_linux_buffer_params_v1 *)
{
    qDebug() << "Failed to create dmabuf" ;
    exit_main_loop = true;
}

bool Core_wl::initWayland()
{
    m_display = wl_display_connect(nullptr);
    if (!m_display) {
        qWarning() << "Failed to connect to Wayland display.";
        return false;
    }

    m_registry = wl_display_get_registry(m_display);
    if (!m_registry) {
        qWarning() << "Failed to get Wayland registry.";
        return false;
    }

    // 注册监听器
    wl_registry_add_listener(m_registry, &registry_listener, nullptr);
    sync_wayland();

    return true;
}

void Core_wl::sync_wayland()
{
    wl_display_dispatch(m_display);
    wl_display_roundtrip(m_display);
}

bool Core_wl::check_has_protos()
{
    if (shm == NULL) {
        qDebug() << "compositor is missing wl_shm";
        exit(EXIT_FAILURE);
    }
    if (screencopy_manager == NULL) {
        qDebug() << "compositor doesn't support wlr-screencopy-unstable-v1";
        exit(EXIT_FAILURE);
    }

    if (xdg_output_manager == NULL)
    {
        qDebug() << "compositor doesn't support xdg-output-unstable-v1";
        exit(EXIT_FAILURE);
    }

    if (use_dmabuf && dmabuf == NULL) {
        qDebug() << "compositor doesn't support linux-dmabuf-unstable-v1";
        exit(EXIT_FAILURE);
    }

    if (available_outputs.empty())
    {
        qDebug() << "no outputs available";
        exit(EXIT_FAILURE);
    }
    return true;
}

void Core_wl::load_output_info()
{
    qDebug()<<Q_FUNC_INFO<<"加载输出信息";
    for (auto& wo : available_outputs)
    {
        wo.zxdg_output = zxdg_output_manager_v1_get_xdg_output(
            xdg_output_manager, wo.output);
        zxdg_output_v1_add_listener(wo.zxdg_output,
            &xdg_output_implementation, NULL);
    }

    sync_wayland();

}

bool Core_wl::user_specified_overwrite(std::string filename)
{
    //需要改为弹窗  或者 文件名根据时间戳
    /*
    struct stat buffer;
    if (stat (filename.c_str(), &buffer) == 0 && !S_ISCHR(buffer.st_mode))
    {
        std::string input;
        std::cerr << "Output file \"" << filename << "\" exists. Overwrite? Y/n: ";
        std::getline(std::cin, input);
        if (input.size() && input[0] != 'Y' && input[0] != 'y')
        {
            std::cerr << "Use -f to specify the file name." << std::endl;
            return false;
    }
    }*/

    return true;
}

Core_wl::wf_recorder_output *Core_wl::detect_output_from_region(const capture_region &region)
{
    for (auto& wo : available_outputs)
    {
        const capture_region output_region{wo.x, wo.y, wo.width, wo.height};
        if (region.contained_in(output_region))
        {
            qDebug() << "Detected output based on geometry: " << QString::fromStdString(wo.name) ;
            return &wo;
        }
    }

    qDebug() << "Failed to detect output based on geometry (is your geometry overlapping outputs?)";
    return nullptr;
}

Core_wl::wf_recorder_output *Core_wl::choose_interactive()
{
    qDebug() << "Please select an output from the list to capture (enter output no.):";

    int i = 1;
    for (auto& wo : available_outputs)
    {
        printf("%d. Name: %s Description: %s\n", i++, wo.name.c_str(),
            wo.description.c_str());
    }

    printf("Enter output no.:");
    fflush(stdout);

    int choice;
    if (scanf("%d", &choice) != 1 || choice > (int)available_outputs.size() || choice <= 0)
        return nullptr;

    auto it = available_outputs.begin();
    std::advance(it, choice - 1);
    return &*it;
}

void Core_wl::handle_graceful_termination(int)
{
    exit_main_loop = true;
}

void Core_wl::request_next_frame()
{
    if (frame != NULL)
    {
        zwlr_screencopy_frame_v1_destroy(frame);
    }

    /* Capture the whole output if the user hasn't provided a good geometry */
    if (!selected_region.is_selected())
    {
        frame = zwlr_screencopy_manager_v1_capture_output(
            screencopy_manager, Global::isShowCursor, chosen_output->output);
    } else
    {
        frame = zwlr_screencopy_manager_v1_capture_output_region(
            screencopy_manager, Global::isShowCursor, chosen_output->output,
            selected_region.x - chosen_output->x,
            selected_region.y - chosen_output->y,
            selected_region.width, selected_region.height);
    }

    zwlr_screencopy_frame_v1_add_listener(frame, &frame_listener, NULL);
}

void Core_wl::parse_codec_opts(std::map<std::string, std::string> &options, const std::string param)
{
    size_t pos;
    pos = param.find("=");
    if (pos != std::string::npos && pos != param.length() -1)
    {
        auto optname = param.substr(0, pos);
        auto optvalue = param.substr(pos + 1, param.length() - pos - 1);
        options.insert(std::pair<std::string, std::string>(optname, optvalue));
    } else
    {
        qDebug() << "Invalid codec option " << QString::fromStdString(param) ;
    }
}

void Core_wl::write_loop(FrameWriterParams params)
{
    /* Ignore SIGTERM/SIGINT/SIGHUP, main loop is responsible for the exit_main_loop signal */
    sigset_t sigset;
    sigemptyset(&sigset);
    for (auto signo : GRACEFUL_TERMINATION_SIGNALS)
    {
        sigaddset(&sigset, signo);
    }
    pthread_sigmask(SIG_BLOCK, &sigset, NULL);

#if HAVE_AUDIO
    std::unique_ptr<AudioReader> pr;
#endif

//    std::optional<uint64_t> first_frame_ts;
    uint64_t* first_frame_ts = nullptr;

    while(!exit_main_loop)
    {
        // wait for frame to become available
        while(buffers->encode().ready_encode() != true && !exit_main_loop) {
            std::this_thread::sleep_for(std::chrono::microseconds(1000));
        }
        if (exit_main_loop) {
            break;
        }

        auto& buffer = buffers->encode();

        frame_writer_pending_mutex.lock();
        frame_writer_mutex.lock();
        frame_writer_pending_mutex.unlock();

        if (!frame_writer)
        {
            /* This is the first time buffer attributes are available */
            params.format = get_input_format(buffer);
            params.drm_format = buffer.drm_format;
            params.width = buffer.width;
            params.height = buffer.height;
            params.stride = buffer.stride;
            frame_writer = std::unique_ptr<FrameWriter> (new FrameWriter(params));

#if HAVE_AUDIO
            if (params.enable_audio)
            {
                audioParams.audio_frame_size = frame_writer->get_audio_buffer_size();
                audioParams.sample_rate = params.sample_rate;
                pr = std::unique_ptr<AudioReader> (AudioReader::create(audioParams));
                if (pr)
                {
                    pr->start();
                }
            }
#endif
        }
        bool drop = false;
        uint64_t sync_timestamp = 0;

//        qDebug()<<"记录时间 pause_start= "<<pause_start<< ", is_paused ="<< is_paused ;
        // 暂停时记录时间
        if (is_paused && pause_start == 0) {
            pause_start = buffer.base_usec;
            qDebug()<<"开始暂停 pause_start = " << pause_start;
#if HAVE_AUDIO
    if (pr) {
        pr->pause();
    }
#endif
        }
        // 恢复时计算暂停时长
        else if (!is_paused && pause_start != 0) {
            qDebug()<<"停止暂停 pause_end = " << buffer.base_usec;
            pause_duration += buffer.base_usec - pause_start;
            pause_start = 0; // 重置暂停开始时间
            qDebug()<<"停止暂停的时长 pause_duration = " << pause_duration;
#if HAVE_AUDIO
    if (pr) {
        pr->resume();
    }
#endif
        }
        if (first_frame_ts != nullptr) {
            // 调整 sync_timestamp
            sync_timestamp = buffer.base_usec - *first_frame_ts - pause_duration;
//            qDebug()<<"当前帧的写入时间戳0 sync_timestamp "<< sync_timestamp;
#if HAVE_AUDIO
        } else if (pr) {
            if (!pr->get_time_base() || pr->get_time_base() > buffer.base_usec) {
                drop = true;
            } else {
                first_frame_ts = new uint64_t;
                *first_frame_ts = pr->get_time_base();
                sync_timestamp = buffer.base_usec - *first_frame_ts - pause_duration;
            }
#endif
        } else {
            sync_timestamp = 0;
            first_frame_ts = new uint64_t;
            *first_frame_ts = buffer.base_usec;
        }
        bool do_cont = false;

        if (!drop) {
            if (use_dmabuf) {
                if (use_hwupload) {
                    uint32_t stride = 0;
                    void *map_data = NULL;
                    void *data = gbm_bo_map(buffer.bo, 0, 0, buffer.width, buffer.height,
                                            GBM_BO_TRANSFER_READ, &stride, &map_data);
                    if (!data) {
                        qDebug() << "Failed to map bo" ;
                        break;
                    }
                    if(!is_paused){
                        do_cont = frame_writer->add_frame((unsigned char*)data,sync_timestamp, buffer.y_invert);
                    }else{
                        do_cont = true;
                    }

                    gbm_bo_unmap(buffer.bo, map_data);
                } else {
                    if(!is_paused){
                        do_cont = frame_writer->add_frame(buffer.bo,sync_timestamp, buffer.y_invert);
                    }else{
                        do_cont = true;
                    }
                }
            } else {
                if(!is_paused){
                    do_cont = frame_writer->add_frame((unsigned char*)buffer.data,sync_timestamp, buffer.y_invert);
                }else{
                    do_cont = true;
                }
            }
        } else {
            do_cont = true;
        }

        frame_writer_mutex.unlock();

        if (!do_cont) {
            break;
        }

        buffers->next_encode();

    }

    std::lock_guard<std::mutex> lock(frame_writer_mutex);
    /* Free the AudioReader connection first. This way it'd flush any remaining
      * frames to the FrameWriter */
#if HAVE_AUDIO
    pr = nullptr;
#endif
    frame_writer = nullptr;
    pause_duration = 0;

}

uint64_t Core_wl::timespec_to_usec(const timespec &ts)
{
    return ts.tv_sec * 1000000ll + 1ll * ts.tv_nsec / 1000ll;
}

InputFormat Core_wl::get_input_format(wf_buffer &buffer)
{
    if (use_dmabuf && !use_hwupload) {
        return INPUT_FORMAT_DMABUF;
    }
    switch (buffer.format) {
    case WL_SHM_FORMAT_ARGB8888:
    case WL_SHM_FORMAT_XRGB8888:
        return INPUT_FORMAT_BGR0;
    case WL_SHM_FORMAT_XBGR8888:
    case WL_SHM_FORMAT_ABGR8888:
        return INPUT_FORMAT_RGB0;
    case WL_SHM_FORMAT_BGR888:
        return INPUT_FORMAT_BGR8;
    case WL_SHM_FORMAT_RGB565:
        return INPUT_FORMAT_RGB565;
    case WL_SHM_FORMAT_BGR565:
        return INPUT_FORMAT_BGR565;
    case WL_SHM_FORMAT_ARGB2101010:
    case WL_SHM_FORMAT_XRGB2101010:
        return INPUT_FORMAT_X2RGB10;
    case WL_SHM_FORMAT_ABGR2101010:
    case WL_SHM_FORMAT_XBGR2101010:
        return INPUT_FORMAT_X2BGR10;
    case WL_SHM_FORMAT_ABGR16161616:
    case WL_SHM_FORMAT_XBGR16161616:
        return INPUT_FORMAT_RGBX64;
    case WL_SHM_FORMAT_ARGB16161616:
    case WL_SHM_FORMAT_XRGB16161616:
        return INPUT_FORMAT_BGRX64;
    case WL_SHM_FORMAT_ABGR16161616F:
    case WL_SHM_FORMAT_XBGR16161616F:
        return INPUT_FORMAT_RGBX64F;
    default:
        qDebug() << "Unsupported buffer format " << static_cast<int>(buffer.format) << ", exiting.";
        std::exit(0);
    }
}

uint32_t Core_wl::wl_shm_to_drm_format(uint32_t format)
{
    if (format == WL_SHM_FORMAT_ARGB8888) {
        return GBM_FORMAT_ARGB8888;
    } else if (format == WL_SHM_FORMAT_XRGB8888) {
        return GBM_FORMAT_XRGB8888;
    } else {
        return format;
    }
}

wl_buffer *Core_wl::create_shm_buffer(uint32_t fmt, int width, int height, int stride, void **data_out)
{
    int size = stride * height;

    int fd = backingfile(size);
    if (fd < 0) {
        qDebug() << "creating a buffer file for" << size << "B failed:" << strerror(errno);

        return NULL;
    }

    void *data = mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
    if (data == MAP_FAILED) {
        qDebug().noquote() << "mmap failed:" << strerror(errno);
        close(fd);
        return NULL;
    }

    struct wl_shm_pool *pool = wl_shm_create_pool(shm, fd, size);
    close(fd);
    struct wl_buffer *buffer = wl_shm_pool_create_buffer(pool, 0, width, height,
        stride, fmt);
    wl_shm_pool_destroy(pool);

    *data_out = data;
    return buffer;
}

int Core_wl::backingfile(off_t size)
{
    char name[] = "/tmp/wf-recorder-shared-XXXXXX";
    int fd = mkstemp(name);
    if (fd < 0) {
        return -1;
    }

    int ret;
    while ((ret = ftruncate(fd, size)) == EINTR) {
        // No-op
    }
    if (ret < 0) {
        close(fd);
        return -1;
    }

    unlink(name);
    return fd;
}

wl_shm_format Core_wl::drm_to_wl_shm_format(uint32_t format)
{
    if (format == GBM_FORMAT_ARGB8888) {
        return WL_SHM_FORMAT_ARGB8888;
    } else if (format == GBM_FORMAT_XRGB8888) {
        return WL_SHM_FORMAT_XRGB8888;
    } else {
        return (wl_shm_format)format;
    }
}

void Core_wl::free_shm_buffer(wf_buffer &buffer)
{
    if (buffer.wl_buffer == NULL)
    {
        return;
    }

    munmap(buffer.data, buffer.size);
    wl_buffer_destroy(buffer.wl_buffer);
    buffer.wl_buffer = NULL;
}
void Core_wl::handleEvents()
{

}

void Core_wl::cleanup()
{
    qDebug()<<Q_FUNC_INFO<<"清理";
    if (writer_thread.joinable())
    {
        writer_thread.join();   //等待结束线程
        qDebug()<<"writer_thread 释放";
    }
/*
    if(buffers){
        qDebug()<<"buffers 释放1";
        delete buffers;
        qDebug()<<"buffers 释放2";
        buffers = nullptr;
        qDebug()<<"buffers 释放3";
    }

    for (size_t i = 0; i < buffers.size(); ++i)
    {
        auto buffer = buffers.at(i);
        if (buffer && buffer->wl_buffer)
            wl_buffer_destroy(buffer->wl_buffer);
    }

    if (gbm_device) {
        gbm_device_destroy(gbm_device);
        close(drm_fd);
    }

    if (frame) {
        zwlr_screencopy_frame_v1_destroy(frame);
        frame = nullptr;
    }
    if (shm) {
        wl_shm_destroy(shm);
        shm = nullptr;
    }
    if (dmabuf) {
        zwp_linux_dmabuf_v1_destroy(dmabuf);
        dmabuf = nullptr;
    }
    if (xdg_output_manager) {
        zxdg_output_manager_v1_destroy(xdg_output_manager);
        xdg_output_manager = nullptr;
    }
    if (screencopy_manager) {
        zwlr_screencopy_manager_v1_destroy(screencopy_manager);
        screencopy_manager = nullptr;
    }

    if (params) {
        zwp_linux_buffer_params_v1_destroy(params);
        params = nullptr;
    }
    if (xdg_output) {
        zxdg_output_v1_destroy(xdg_output);
        xdg_output = nullptr;
    }

*/
    if (audioParams.audio_source != NULL) {
        free(audioParams.audio_source);
    }


}
