#include "gstreamer_pipeline.h"
#include <QDebug>
#include <QDateTime>
#include <QTimer>
#include <QFile>
#define UDP_RTP 1
GStreamerPipeline::GStreamerPipeline(QObject *parent)
    : QObject(parent)
    , m_pipeline(nullptr)
    , m_appSink(nullptr)
    , m_tee(nullptr)
    , m_recordQueue(nullptr)
    , m_recordEncoder(nullptr)
    , m_recordMuxer(nullptr)
    , m_recordSink(nullptr)
    , m_recordTeePad(nullptr)
    , m_streamQueue(nullptr)
    , m_streamEncoder(nullptr)
    , m_streamSink(nullptr)
    , m_streamTeePad(nullptr)
    , m_width(640)
    , m_height(480)
    , m_fps(30)
    , m_isRunning(false)
    , m_isRecording(false)
    , m_isStreaming(false)
{
    qDebug() << "GStreamerPipeline constructor";
}

GStreamerPipeline::~GStreamerPipeline()
{
    qDebug() << "GStreamerPipeline destructor";
    stopPipeline();
}

bool GStreamerPipeline::initializePipeline(const QString &device, int width, int height, int fps)
{
    stopPipeline();
    
    m_width = width;
    m_height = height;
    m_fps = fps;

    // 构建基础预览管道
    QString pipelineStr = QString(
        "v4l2src device=%1 ! "
        "image/jpeg, width=%2, height=%3, framerate=%4/1 ! "
        "tee name=main-tee "
        "main-tee. ! queue name=preview-queue max-size-buffers=100 leaky=downstream ! "
        "jpegdec ! "
        "videoconvert ! "
        "video/x-raw, format=BGR ! "
        "videoscale ! "
        "video/x-raw, width=%2, height=%3 ! "
        "appsink name=main-appsink emit-signals=true sync=false "
    ).arg(device).arg(m_width).arg(m_height).arg(m_fps);
    
    qDebug() << "Initializing pipeline:" << pipelineStr;
    
    GError *error = nullptr;
    m_pipeline = gst_parse_launch(pipelineStr.toUtf8().constData(), &error);
    
    if (error) {
        qCritical() << "Failed to create pipeline:" << error->message;
        emit errorOccurred(QString("Failed to create pipeline: %1").arg(error->message));
        g_error_free(error);
        return false;
    }
    
    if (!m_pipeline) {
        qCritical() << "Failed to create pipeline: Unknown error";
        emit errorOccurred("Failed to create pipeline: Unknown error");
        return false;
    }
    
    // 获取元素
    m_appSink = gst_bin_get_by_name(GST_BIN(m_pipeline), "main-appsink");
    m_tee = gst_bin_get_by_name(GST_BIN(m_pipeline), "main-tee");

    if (!m_appSink || !m_tee) {
        qCritical() << "Failed to get pipeline elements";
        emit errorOccurred("Failed to get pipeline elements");
        if (m_pipeline) {
            gst_object_unref(m_pipeline);
            m_pipeline = nullptr;
        }
        return false;
    }
    
    // 配置appsink
    g_object_set(m_appSink, "emit-signals", TRUE, "sync", FALSE, nullptr);
    g_signal_connect(m_appSink, "new-sample", G_CALLBACK(onNewSample), this);
    
    qDebug() << "Pipeline initialized successfully";
    return true;
}

bool GStreamerPipeline::startPipeline()
{
    if (!m_pipeline) {
        qCritical() << "Pipeline not initialized";
        emit errorOccurred("Pipeline not initialized");
        return false;
    }
    
    GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        qCritical() << "Failed to start pipeline";
        emit errorOccurred("Failed to start pipeline");
        return false;
    }
    
    m_isRunning = true;
    qDebug() << "Pipeline started successfully";
    return true;
}

void GStreamerPipeline::stopPipeline()
{
    // 停止录制和推流
    if (m_isRecording) {
        stopRecording();
    }
    if (m_isStreaming) {
        stopStreaming();
    }
    
    if (m_pipeline) {
        gst_element_set_state(m_pipeline, GST_STATE_NULL);
        
        if (m_appSink) {
            gst_object_unref(m_appSink);
            m_appSink = nullptr;
        }
        if (m_tee) {
            gst_object_unref(m_tee);
            m_tee = nullptr;
        }
        
        gst_object_unref(m_pipeline);
        m_pipeline = nullptr;
    }
    
    m_isRunning = false;
    qDebug() << "Pipeline stopped";
}

// 在 gstreamer_pipeline.cpp 中添加以下实现

bool GStreamerPipeline::startRecording(const QString& filename)
{
    if (!m_isRunning || !m_tee) {
        qCritical() << "Pipeline not running or tee not available";
        return false;
    }
    
    if (m_isRecording) {
        qWarning() << "Recording already in progress";
        return true;
    }
    
    qDebug() << "Starting recording to:" << filename;
    
    // 创建录制分支元素
    m_recordQueue = gst_element_factory_make("queue", "record-queue");
    GstElement *recordDecode = gst_element_factory_make("mppjpegdec", "record-decode");
    GstElement *recordConvert = gst_element_factory_make("videoconvert", "record-convert");
    GstElement *recordScale = gst_element_factory_make("videoscale", "record-scale");  // 添加缩放
    GstElement *recordCapsFilter = gst_element_factory_make("capsfilter", "record-capsfilter");  // 添加capsfilter
    m_recordEncoder = gst_element_factory_make("mpph264enc", "record-encoder");
    GstElement *h264Parse = gst_element_factory_make("h264parse", "record-h264parse");  // 添加h264parse

    // 创建音频录制分支元素
    m_audioSource = gst_element_factory_make("alsasrc", "audio-source");
    m_audioQueue = gst_element_factory_make("queue", "audio-queue");
    m_audioConvert = gst_element_factory_make("audioconvert", "audio-convert");
    m_audioResample = gst_element_factory_make("audioresample", "audio-resample");
    m_audioEncoder = gst_element_factory_make("voaacenc", "audio-encoder");
    
    m_recordMuxer = gst_element_factory_make("mp4mux", "record-muxer");
    m_recordSink = gst_element_factory_make("filesink", "record-sink");

    // 检查所有元素是否创建成功
    if (!m_recordQueue || !recordDecode || !recordConvert || !recordScale || 
        !recordCapsFilter || !m_recordEncoder || !h264Parse ||
        !m_audioSource || !m_audioQueue || !m_audioConvert || !m_audioResample || !m_audioEncoder ||
        !m_recordMuxer || !m_recordSink) {
        qCritical() << "Failed to create recording elements";
        // 清理已创建的元素
        if (recordDecode) gst_object_unref(recordDecode);
        if (recordConvert) gst_object_unref(recordConvert);
        if (recordScale) gst_object_unref(recordScale);
        if (recordCapsFilter) gst_object_unref(recordCapsFilter);
        if (h264Parse) gst_object_unref(h264Parse);
        cleanupRecordingElements();
        return false;
    }
    
    // 配置录制元素
    g_object_set(m_recordQueue, "max-size-buffers", 100, "leaky", 2, nullptr);
    
    g_object_set(recordDecode, 
                 "fast-mode", TRUE,           // 开启快速模式
                 "ignore-error", TRUE,        // 忽略解码错误
                 nullptr);
                 

    // 配置capsfilter确保正确的视频格式
    GstCaps *caps = gst_caps_new_simple("video/x-raw",
                                       "format", G_TYPE_STRING, "NV12",  // MPP编码器通常使用NV12格式
                                       "width", G_TYPE_INT, m_width,
                                       "height", G_TYPE_INT, m_height,
                                       "framerate", GST_TYPE_FRACTION, m_fps, 1,
                                       nullptr);
    g_object_set(recordCapsFilter, "caps", caps, nullptr);
    gst_caps_unref(caps);
    
    // 配置MPP编码器参数
    g_object_set(m_recordEncoder, 
                 "bps", 2000000,           // 目标码率 2000 kbps
                 "rc-mode", 1,             // CBR模式
                 "gop", 30,                // 关键帧间隔
                 "profile", 66,            // baseline profile (66)
                 "level", 41,              // level 4.1
                 nullptr);
    
    g_object_set(m_recordMuxer, "faststart", TRUE, nullptr);
    g_object_set(m_recordSink, "location", filename.toUtf8().constData(), nullptr);
    
    // 配置音频元素
    g_object_set(m_audioSource, "device", "hw:2", nullptr);  // 使用card 2
    g_object_set(m_audioQueue, "max-size-buffers", 100, "leaky", 2, nullptr);
    g_object_set(m_audioEncoder,
                 "bitrate", 128000,  // 128 kbps
                 nullptr);

    // 将元素添加到管道
    gst_bin_add_many(GST_BIN(m_pipeline), 
                     // 视频元素
                     m_recordQueue, recordDecode, recordConvert,
                     recordScale, recordCapsFilter, m_recordEncoder, h264Parse,
                     // 音频元素
                     m_audioSource, m_audioQueue, m_audioConvert, m_audioResample, m_audioEncoder,
                     // 复用器和输出
                     m_recordMuxer, m_recordSink, nullptr);
    
    // 连接录制分支元素 - 分段连接以便调试
    bool linkSuccess = true;
    
    // 视频分支：queue -> mppjpegdec -> convert -> scale -> capsfilter -> mpph264enc -> h264parse
    if (!gst_element_link_many(m_recordQueue, recordDecode, recordConvert, recordScale, recordCapsFilter, m_recordEncoder, h264Parse, nullptr)) {
        qCritical() << "Failed to link video recording elements";
        linkSuccess = false;
    }
    
    // 音频分支：alsasrc -> queue -> audioconvert -> audioresample -> voaacenc
    if (linkSuccess && !gst_element_link_many(m_audioSource, m_audioQueue, m_audioConvert, m_audioResample, m_audioEncoder, nullptr)) {
        qCritical() << "Failed to link audio recording elements";
        linkSuccess = false;
    }
    
    // 连接到复用器
    if (linkSuccess) {
        // 连接视频到复用器
        if (!gst_element_link(h264Parse, m_recordMuxer)) {
            qCritical() << "Failed to link video to muxer";
            linkSuccess = false;
        }
        
        // 连接音频到复用器
        if (linkSuccess && !gst_element_link(m_audioEncoder, m_recordMuxer)) {
            qCritical() << "Failed to link audio to muxer";
            linkSuccess = false;
        }
        
        // 连接复用器到输出
        if (linkSuccess && !gst_element_link(m_recordMuxer, m_recordSink)) {
            qCritical() << "Failed to link muxer to filesink";
            linkSuccess = false;
        }
    }

    if (!linkSuccess) {
        qCritical() << "Failed to link recording elements";
        
        // 从管道中移除所有元素
        gst_bin_remove_many(GST_BIN(m_pipeline), m_recordQueue, recordDecode, recordConvert,
                           recordScale, recordCapsFilter, m_recordEncoder, h264Parse,
                           m_recordMuxer, m_recordSink, nullptr);
        
        // 清理引用
        if (recordDecode) gst_object_unref(recordDecode);
        if (recordConvert) gst_object_unref(recordConvert);
        if (recordScale) gst_object_unref(recordScale);
        if (recordCapsFilter) gst_object_unref(recordCapsFilter);
        if (h264Parse) gst_object_unref(h264Parse);
        cleanupRecordingElements();
        return false;
    }
    
    // 获取tee的pad并连接
    m_recordTeePad = gst_element_get_request_pad(m_tee, "src_%u");
    GstPad *queueSinkPad = gst_element_get_static_pad(m_recordQueue, "sink");
    
    if (!m_recordTeePad || !queueSinkPad) {
        qCritical() << "Failed to get pads for recording connection";
        if (m_recordTeePad) {
            gst_object_unref(m_recordTeePad);
            m_recordTeePad = nullptr;
        }
        if (queueSinkPad) gst_object_unref(queueSinkPad);
        
        // 从管道中移除所有元素
        gst_bin_remove_many(GST_BIN(m_pipeline), m_recordQueue, recordDecode, recordConvert,
                           recordScale, recordCapsFilter, m_recordEncoder, h264Parse,
                           m_recordMuxer, m_recordSink, nullptr);
        
        if (recordDecode) gst_object_unref(recordDecode);
        if (recordConvert) gst_object_unref(recordConvert);
        if (recordScale) gst_object_unref(recordScale);
        if (recordCapsFilter) gst_object_unref(recordCapsFilter);
        if (h264Parse) gst_object_unref(h264Parse);
        cleanupRecordingElements();
        return false;
    }
    
    GstPadLinkReturn linkRet = gst_pad_link(m_recordTeePad, queueSinkPad);
    if (linkRet != GST_PAD_LINK_OK) {
        qCritical() << "Failed to link tee to recording branch, error:" << linkRet;
        
        // 清理
        if (m_recordTeePad) {
            gst_element_release_request_pad(m_tee, m_recordTeePad);
            gst_object_unref(m_recordTeePad);
            m_recordTeePad = nullptr;
        }
        
        // 从管道中移除所有元素
        gst_element_set_state(m_recordSink, GST_STATE_NULL);
        gst_element_set_state(m_recordMuxer, GST_STATE_NULL);
        gst_element_set_state(h264Parse, GST_STATE_NULL);
        gst_element_set_state(m_recordEncoder, GST_STATE_NULL);
        gst_element_set_state(recordCapsFilter, GST_STATE_NULL);
        gst_element_set_state(recordScale, GST_STATE_NULL);
        gst_element_set_state(recordConvert, GST_STATE_NULL);
        gst_element_set_state(recordDecode, GST_STATE_NULL);
        gst_element_set_state(m_recordQueue, GST_STATE_NULL);
        
        gst_bin_remove_many(GST_BIN(m_pipeline), m_recordQueue, recordDecode, recordConvert,
                           recordScale, recordCapsFilter, m_recordEncoder, h264Parse,
                           m_recordMuxer, m_recordSink, nullptr);
        
        gst_object_unref(queueSinkPad);
        if (recordDecode) gst_object_unref(recordDecode);
        if (recordConvert) gst_object_unref(recordConvert);
        if (recordScale) gst_object_unref(recordScale);
        if (recordCapsFilter) gst_object_unref(recordCapsFilter);
        if (h264Parse) gst_object_unref(h264Parse);
        cleanupRecordingElements();
        return false;
    }
    
    // 同步录制分支状态
    gst_element_sync_state_with_parent(m_recordQueue);
    gst_element_sync_state_with_parent(recordDecode);
    gst_element_sync_state_with_parent(recordConvert);
    gst_element_sync_state_with_parent(recordScale);
    gst_element_sync_state_with_parent(recordCapsFilter);
    gst_element_sync_state_with_parent(m_recordEncoder);
    gst_element_sync_state_with_parent(h264Parse);

    
    // 音频元素
    gst_element_sync_state_with_parent(m_audioSource);
    gst_element_sync_state_with_parent(m_audioQueue);
    gst_element_sync_state_with_parent(m_audioConvert);
    gst_element_sync_state_with_parent(m_audioResample);
    gst_element_sync_state_with_parent(m_audioEncoder);

        gst_element_sync_state_with_parent(m_recordMuxer);
    gst_element_sync_state_with_parent(m_recordSink);
    
    m_isRecording = true;
    m_currentOutputPath = filename;
    
    qDebug() << "Recording started successfully with MPP encoder";
    emit recordingStarted();
    
    gst_object_unref(queueSinkPad);
    return true;
}

bool GStreamerPipeline::stopRecording()
{
    if (!m_isRecording) {
        qWarning() << "No recording in progress";
        return true;
    }
    
    qDebug() << "Stopping recording with audio";
    
    // 阻塞视频tee pad
    if (m_recordTeePad) {
        gst_pad_add_probe(m_recordTeePad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
                         nullptr, nullptr, nullptr);
    }
    
    // 发送 EOS 到视频分支
    if (m_recordQueue) {
        gst_element_send_event(m_recordQueue, gst_event_new_eos());
    }
    
    // 发送 EOS 到音频分支
    if (m_audioSource) {
        gst_element_send_event(m_audioSource, gst_event_new_eos());
    }
    
    // 等待 EOS
    GstBus *bus = gst_element_get_bus(m_pipeline);
    if (bus) {
        GstMessage *msg = gst_bus_timed_pop_filtered(bus, 3 * GST_SECOND, 
            static_cast<GstMessageType>(GST_MESSAGE_EOS | GST_MESSAGE_ERROR));
        
        if (msg) {
            if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_EOS) {
                qDebug() << "Recording EOS received";
            } else {
                GError *error = nullptr;
                gchar *debug = nullptr;
                gst_message_parse_error(msg, &error, &debug);
                qWarning() << "Recording EOS error:" << error->message;
                g_error_free(error);
                g_free(debug);
            }
            gst_message_unref(msg);
        } else {
            qWarning() << "Timeout waiting for recording EOS, proceeding with cleanup";
        }
        gst_object_unref(bus);
    }
    
    // 断开视频tee连接
    if (m_recordTeePad && m_tee) {
        GstPad *videoQueueSinkPad = gst_element_get_static_pad(m_recordQueue, "sink");
        if (videoQueueSinkPad) {
            gst_pad_unlink(m_recordTeePad, videoQueueSinkPad);
            gst_object_unref(videoQueueSinkPad);
        }
        gst_element_release_request_pad(m_tee, m_recordTeePad);
        gst_object_unref(m_recordTeePad);
        m_recordTeePad = nullptr;
    }
    
    // 停止并移除所有录制元素（包括音频）
    if (m_recordQueue) {
        // 获取所有视频元素
        GstElement *recordDecode = gst_bin_get_by_name(GST_BIN(m_pipeline), "record-decode");
        GstElement *recordConvert = gst_bin_get_by_name(GST_BIN(m_pipeline), "record-convert");
        GstElement *recordScale = gst_bin_get_by_name(GST_BIN(m_pipeline), "record-scale");
        GstElement *recordCapsFilter = gst_bin_get_by_name(GST_BIN(m_pipeline), "record-capsfilter");
        GstElement *h264Parse = gst_bin_get_by_name(GST_BIN(m_pipeline), "record-h264parse");
        
        // 获取所有音频元素
        GstElement *audioSource = gst_bin_get_by_name(GST_BIN(m_pipeline), "audio-source");
        GstElement *audioQueue = gst_bin_get_by_name(GST_BIN(m_pipeline), "audio-queue");
        GstElement *audioConvert = gst_bin_get_by_name(GST_BIN(m_pipeline), "audio-convert");
        GstElement *audioResample = gst_bin_get_by_name(GST_BIN(m_pipeline), "audio-resample");
        GstElement *audioEncoder = gst_bin_get_by_name(GST_BIN(m_pipeline), "audio-encoder");
        
        // 设置所有元素状态为 NULL
        // 视频元素
        if (m_recordSink) gst_element_set_state(m_recordSink, GST_STATE_NULL);
        if (m_recordMuxer) gst_element_set_state(m_recordMuxer, GST_STATE_NULL);
        if (h264Parse) gst_element_set_state(h264Parse, GST_STATE_NULL);
        if (m_recordEncoder) gst_element_set_state(m_recordEncoder, GST_STATE_NULL);
        if (recordCapsFilter) gst_element_set_state(recordCapsFilter, GST_STATE_NULL);
        if (recordScale) gst_element_set_state(recordScale, GST_STATE_NULL);
        if (recordConvert) gst_element_set_state(recordConvert, GST_STATE_NULL);
        if (recordDecode) gst_element_set_state(recordDecode, GST_STATE_NULL);
        if (m_recordQueue) gst_element_set_state(m_recordQueue, GST_STATE_NULL);
        
        // 音频元素
        if (audioEncoder) gst_element_set_state(audioEncoder, GST_STATE_NULL);
        if (audioResample) gst_element_set_state(audioResample, GST_STATE_NULL);
        if (audioConvert) gst_element_set_state(audioConvert, GST_STATE_NULL);
        if (audioQueue) gst_element_set_state(audioQueue, GST_STATE_NULL);
        if (audioSource) gst_element_set_state(audioSource, GST_STATE_NULL);
        
        // 从管道中移除所有元素
        gst_bin_remove_many(GST_BIN(m_pipeline), 
                           // 视频元素
                           m_recordQueue, recordDecode, recordConvert,
                           recordScale, recordCapsFilter, m_recordEncoder, h264Parse,
                           // 音频元素
                           audioSource, audioQueue, audioConvert, audioResample, audioEncoder,
                           // 复用器和输出
                           m_recordMuxer, m_recordSink, nullptr);
        
        // 清理引用
        if (recordDecode) gst_object_unref(recordDecode);
        if (recordConvert) gst_object_unref(recordConvert);
        if (recordScale) gst_object_unref(recordScale);
        if (recordCapsFilter) gst_object_unref(recordCapsFilter);
        if (h264Parse) gst_object_unref(h264Parse);
        if (audioSource) gst_object_unref(audioSource);
        if (audioQueue) gst_object_unref(audioQueue);
        if (audioConvert) gst_object_unref(audioConvert);
        if (audioResample) gst_object_unref(audioResample);
        if (audioEncoder) gst_object_unref(audioEncoder);
        
        cleanupRecordingElements();
    }
    
    m_isRecording = false;
    qDebug() << "Recording stopped successfully. File saved to:" << m_currentOutputPath;
    emit recordingStopped();
    
    return true;
}

void GStreamerPipeline::cleanupRecordingElements()
{
    // 只需要重置指针，不需要手动unref
    // 视频元素
    m_recordQueue = nullptr;
    m_recordEncoder = nullptr;
    m_recordMuxer = nullptr;
    m_recordSink = nullptr;
    
    // 音频元素
    m_audioSource = nullptr;
    m_audioQueue = nullptr;
    m_audioConvert = nullptr;
    m_audioResample = nullptr;
    m_audioEncoder = nullptr;

}

#if UDP_RTP
bool GStreamerPipeline::startStreaming(const QString& streamUrl)
{
    if (!m_isRunning || !m_tee) {
        qCritical() << "Pipeline not running or tee not available";
        return false;
    }
    
    if (m_isStreaming) {
        qWarning() << "Streaming already in progress";
        return true;
    }
    
    QString host = "127.0.0.1";
    int port = 1234;
    
    if (streamUrl.startsWith("udp://")) {
        QStringList parts = streamUrl.mid(6).split(":");
        if (parts.size() == 2) {
            host = parts[0];
            port = parts[1].toInt();
        }
    }

    qDebug() << "Starting streaming to:" << streamUrl;
    
    // 创建推流分支
    m_streamQueue = gst_element_factory_make("queue", "stream-queue");
    GstElement *streamDecode = gst_element_factory_make("mppjpegdec", "stream-decode");
    GstElement *streamConvert = gst_element_factory_make("videoconvert", "stream-convert");
    GstElement *streamScale = gst_element_factory_make("videoscale", "stream-scale");
    GstElement *streamCapsFilter = gst_element_factory_make("capsfilter", "stream-capsfilter");

    m_streamEncoder = gst_element_factory_make("mpph264enc", "stream-encoder");
    GstElement *h264Parse = gst_element_factory_make("h264parse", "stream-h264parse");
    GstElement *rtpPay = gst_element_factory_make("rtph264pay", "stream-rtppay");
    m_streamSink = gst_element_factory_make("udpsink", "stream-sink");
    
     if (!m_streamQueue || !streamDecode || !streamConvert || !streamScale || 
        !streamCapsFilter || !m_streamEncoder || !h264Parse || !rtpPay || !m_streamSink) {
        qCritical() << "Failed to create streaming elements";
        cleanupStreamingElements();
        return false;
    }
    
    // 配置推流元素
    g_object_set(m_streamQueue, "max-size-buffers", 100, "leaky", 2, nullptr);

    GstCaps *caps = gst_caps_new_simple("video/x-raw",
                                       "format", G_TYPE_STRING, "NV12",  // MPP编码器通常使用NV12格式
                                       "width", G_TYPE_INT, m_width,
                                       "height", G_TYPE_INT, m_height,
                                       "framerate", GST_TYPE_FRACTION, m_fps, 1,
                                       nullptr);
    g_object_set(streamCapsFilter, "caps", caps, nullptr);
    gst_caps_unref(caps);

    g_object_set(m_streamEncoder,
                 "bitrate", 2000,        // 2000 kbps
                 "speed-preset", 1,      // ultrafast
                 "tune", 0x00000004,     // zerolatency
                 "threads", 2,
                 "key-int-max", 30,      // 关键帧间隔
                 nullptr);
    g_object_set(rtpPay, 
                 "pt", 96,                 // payload type
                 "config-interval", 1,     // 发送配置间隔
                 nullptr);

    g_object_set(m_streamSink, 
                 "host", host.toUtf8().constData(),
                 "port", port,
                 "sync", false,            // 不同步，实时流
                 nullptr);
    
    // 添加到管道
    gst_bin_add_many(GST_BIN(m_pipeline), 
                     m_streamQueue, streamDecode, streamConvert,
                     streamScale, streamCapsFilter, m_streamEncoder, 
                     h264Parse, rtpPay, m_streamSink, nullptr);
    
    // 连接推流分支
    bool linkSuccess = true;
    
    // 连接视频处理链：queue -> mppjpegdec -> convert -> scale -> capsfilter -> mpph264enc -> h264parse -> rtph264pay -> udpsink
    if (!gst_element_link_many(m_streamQueue, streamDecode, streamConvert, 
                              streamScale, streamCapsFilter, m_streamEncoder, 
                              h264Parse, rtpPay, m_streamSink, nullptr)) {
        qCritical() << "Failed to link streaming elements";
        linkSuccess = false;
    }
    
    if (!linkSuccess) {
        qCritical() << "Failed to link streaming elements";
        
        // 从管道中移除所有元素
        gst_bin_remove_many(GST_BIN(m_pipeline), 
                           m_streamQueue, streamDecode, streamConvert,
                           streamScale, streamCapsFilter, m_streamEncoder, 
                           h264Parse, rtpPay, m_streamSink, nullptr);
        cleanupStreamingElements();
        return false;
    }
    
    // 连接 tee
    m_streamTeePad = gst_element_get_request_pad(m_tee, "src_%u");
    GstPad *queueSinkPad = gst_element_get_static_pad(m_streamQueue, "sink");
    
    if (!m_streamTeePad || !queueSinkPad) {
        qCritical() << "Failed to get pads for streaming connection";
        if (m_streamTeePad) {
            gst_object_unref(m_streamTeePad);
            m_streamTeePad = nullptr;
        }
        if (queueSinkPad) gst_object_unref(queueSinkPad);
        
        gst_bin_remove_many(GST_BIN(m_pipeline), 
                           m_streamQueue, streamDecode, streamConvert,
                           streamScale, streamCapsFilter, m_streamEncoder, 
                           h264Parse, rtpPay, m_streamSink, nullptr);
        cleanupStreamingElements();
        return false;
    }
    
    GstPadLinkReturn linkRet = gst_pad_link(m_streamTeePad, queueSinkPad);
    if (linkRet != GST_PAD_LINK_OK) {
        qCritical() << "Failed to link tee to streaming branch, error:" << linkRet;
        
        // 清理
        if (m_streamTeePad) {
            gst_element_release_request_pad(m_tee, m_streamTeePad);
            gst_object_unref(m_streamTeePad);
            m_streamTeePad = nullptr;
        }
        
        // 停止并移除元素
        gst_element_set_state(m_streamSink, GST_STATE_NULL);
        gst_element_set_state(rtpPay, GST_STATE_NULL);
        gst_element_set_state(h264Parse, GST_STATE_NULL);
        gst_element_set_state(m_streamEncoder, GST_STATE_NULL);
        gst_element_set_state(streamCapsFilter, GST_STATE_NULL);
        gst_element_set_state(streamScale, GST_STATE_NULL);
        gst_element_set_state(streamConvert, GST_STATE_NULL);
        gst_element_set_state(streamDecode, GST_STATE_NULL);
        gst_element_set_state(m_streamQueue, GST_STATE_NULL);
        
        gst_bin_remove_many(GST_BIN(m_pipeline), 
                           m_streamQueue, streamDecode, streamConvert,
                           streamScale, streamCapsFilter, m_streamEncoder, 
                           h264Parse, rtpPay, m_streamSink, nullptr);
        
        gst_object_unref(queueSinkPad);
        cleanupStreamingElements();
        return false;
    }
    
    // 同步状态
    gst_element_sync_state_with_parent(m_streamQueue);
    gst_element_sync_state_with_parent(streamDecode);
    gst_element_sync_state_with_parent(streamConvert);
    gst_element_sync_state_with_parent(streamScale);
    gst_element_sync_state_with_parent(streamCapsFilter);
    gst_element_sync_state_with_parent(m_streamEncoder);
    gst_element_sync_state_with_parent(h264Parse);
    gst_element_sync_state_with_parent(rtpPay);
    gst_element_sync_state_with_parent(m_streamSink);
    
    m_isStreaming = true;
    m_currentStreamUrl = streamUrl;
    
    qDebug() << "UDP streaming started successfully to" << host << ":" << port;
    emit streamingStarted();
    
    gst_object_unref(queueSinkPad);
    return true;
}



bool GStreamerPipeline::stopStreaming()
{
    if (!m_isStreaming) {
        qWarning() << "No streaming in progress";
        return true;
    }
    
    qDebug() << "Stopping UDP streaming";
    
    // 阻塞推流分支
    if (m_streamTeePad) {
        gst_pad_add_probe(m_streamTeePad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
                         nullptr, nullptr, nullptr);
    }
    
    // 断开 tee 连接
    if (m_streamTeePad && m_tee) {
        GstPad *queueSinkPad = gst_element_get_static_pad(m_streamQueue, "sink");
        if (queueSinkPad) {
            gst_pad_unlink(m_streamTeePad, queueSinkPad);
            gst_object_unref(queueSinkPad);
        }
        gst_element_release_request_pad(m_tee, m_streamTeePad);
        gst_object_unref(m_streamTeePad);
        m_streamTeePad = nullptr;
    }
    
    // 停止并移除推流元素
    if (m_streamQueue) {
        GstElement *streamDecode = gst_bin_get_by_name(GST_BIN(m_pipeline), "stream-decode");
        GstElement *streamConvert = gst_bin_get_by_name(GST_BIN(m_pipeline), "stream-convert");
        GstElement *streamScale = gst_bin_get_by_name(GST_BIN(m_pipeline), "stream-scale");
        GstElement *streamCapsFilter = gst_bin_get_by_name(GST_BIN(m_pipeline), "stream-capsfilter");
        GstElement *h264Parse = gst_bin_get_by_name(GST_BIN(m_pipeline), "stream-h264parse");
        GstElement *rtpPay = gst_bin_get_by_name(GST_BIN(m_pipeline), "stream-rtppay");
        
        // 设置状态为 NULL
        if (m_streamSink) gst_element_set_state(m_streamSink, GST_STATE_NULL);
        if (rtpPay) gst_element_set_state(rtpPay, GST_STATE_NULL);
        if (h264Parse) gst_element_set_state(h264Parse, GST_STATE_NULL);
        if (m_streamEncoder) gst_element_set_state(m_streamEncoder, GST_STATE_NULL);
        if (streamCapsFilter) gst_element_set_state(streamCapsFilter, GST_STATE_NULL);
        if (streamScale) gst_element_set_state(streamScale, GST_STATE_NULL);
        if (streamConvert) gst_element_set_state(streamConvert, GST_STATE_NULL);
        if (streamDecode) gst_element_set_state(streamDecode, GST_STATE_NULL);
        if (m_streamQueue) gst_element_set_state(m_streamQueue, GST_STATE_NULL);
        
        // 从管道中移除
        gst_bin_remove_many(GST_BIN(m_pipeline), 
                           m_streamQueue, streamDecode, streamConvert,
                           streamScale, streamCapsFilter, m_streamEncoder, 
                           h264Parse, rtpPay, m_streamSink, nullptr);
        
        // 清理引用
        if (streamDecode) gst_object_unref(streamDecode);
        if (streamConvert) gst_object_unref(streamConvert);
        if (streamScale) gst_object_unref(streamScale);
        if (streamCapsFilter) gst_object_unref(streamCapsFilter);
        if (h264Parse) gst_object_unref(h264Parse);
        if (rtpPay) gst_object_unref(rtpPay);
        
        cleanupStreamingElements();
    }
    
    m_isStreaming = false;
    qDebug() << "UDP streaming stopped successfully";
    emit streamingStopped();
    
    return true;
}
#endif



void GStreamerPipeline::cleanupStreamingElements()
{
    m_streamQueue = nullptr;
    m_streamEncoder = nullptr;
    m_streamSink = nullptr;
}

GstFlowReturn GStreamerPipeline::onNewSample(GstAppSink *appsink, gpointer user_data)
{
    GStreamerPipeline *pipeline = static_cast<GStreamerPipeline*>(user_data);
    return pipeline->handleNewSample(appsink);
}

GstFlowReturn GStreamerPipeline::handleNewSample(GstAppSink *appsink)
{
    GstSample *sample = gst_app_sink_pull_sample(appsink);
    if (!sample) {
        return GST_FLOW_ERROR;
    }
    
    GstBuffer *buffer = gst_sample_get_buffer(sample);
    GstMapInfo map;
    
    if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
        qWarning() << "Failed to map buffer";
        gst_sample_unref(sample);
        return GST_FLOW_ERROR;
    }
    
    GstCaps *caps = gst_sample_get_caps(sample);
    GstStructure *structure = gst_caps_get_structure(caps, 0);
    
    gint width, height;
    if (!gst_structure_get_int(structure, "width", &width) ||
        !gst_structure_get_int(structure, "height", &height)) {
        qWarning() << "Failed to get width/height from caps";
        gst_buffer_unmap(buffer, &map);
        gst_sample_unref(sample);
        return GST_FLOW_ERROR;
    }
    
    // 创建FrameData，直接传递指针（零拷贝）
    FrameData frameData(
        map.data, 
        width, 
        height, 
        QImage::Format_BGR888,
        [buffer, sample]() {
            gst_sample_unref(sample);
        }
    );
    
    // 增加引用计数，确保数据在FrameData使用期间有效
    gst_buffer_ref(buffer);
    gst_sample_ref(sample);
    
    // 发射高性能信号
    emit newFrameDataAvailable(frameData);
    
    // 立即释放当前函数的引用（FrameData持有引用）
    gst_buffer_unmap(buffer, &map);
    gst_buffer_unref(buffer);
    gst_sample_unref(sample);
    
    return GST_FLOW_OK;
}

bool GStreamerPipeline::isRunning() const { return m_isRunning; }
bool GStreamerPipeline::isRecording() const { return m_isRecording; }
bool GStreamerPipeline::isStreaming() const { return m_isStreaming; }

