#include "gstvideoplayer.h"
#include <QDebug>
#include <QApplication>
#include <QGuiApplication>
#include <QWindow>

static gboolean onBusMessage(GstBus *bus, GstMessage *msg, gpointer data) {
    Q_UNUSED(bus);
    GstVideoPlayer *player = static_cast<GstVideoPlayer*>(data);
    player->handleGstMessage(msg);
    return true;
}

// GStreamer 回调函数：当 appsrc 需要数据时触发
void GstVideoPlayer::onNeedData(GstAppSrc *appsrc, guint size, gpointer data) {
    Q_UNUSED(appsrc);
    Q_UNUSED(size);
    //qDebug() << "need data";
    //GstVideoPlayer *player = static_cast<GstVideoPlayer*>(data);
    // 不需要主动推送数据，由 QUdpSocket 的槽函数处理
}

// GStreamer 回调函数：当 appsrc 缓冲区已满时触发
void GstVideoPlayer::onEnoughData(GstAppSrc *appsrc, gpointer data) {
    Q_UNUSED(appsrc);
    //GstVideoPlayer *player = static_cast<GstVideoPlayer*>(data);
    // 可以在此暂停 UDP 接收（可选）
}

// 新增: appsink 的新样本回调
GstFlowReturn GstVideoPlayer::onNewSample(GstAppSink *appsink, gpointer data) {
    Q_UNUSED(appsink);
    GstVideoPlayer *player = static_cast<GstVideoPlayer*>(data);
    player->onNewFrame();

    return GST_FLOW_OK;
}

// probe回调函数实现
GstPadProbeReturn GstVideoPlayer::capsProbeCallback(GstPad *pad, GstPadProbeInfo *info, gpointer user_data) {
    GstVideoPlayer *player = static_cast<GstVideoPlayer*>(user_data);

    if (GST_PAD_PROBE_INFO_TYPE(info) & GST_PAD_PROBE_TYPE_EVENT_UPSTREAM) {
        GstEvent *event = GST_PAD_PROBE_INFO_EVENT(info);
        if (GST_EVENT_TYPE(event) == GST_EVENT_CAPS) {
            GstCaps *caps;
            gst_event_parse_caps(event, &caps);
            if (caps) {
                player->parseVideoInfo(caps);
            }
        }
    }

    return GST_PAD_PROBE_OK;
}

void GstVideoPlayer::parseVideoInfo(GstCaps *caps)
{
    GstStructure *structure = gst_caps_get_structure(caps, 0);
        if (!structure) return;

        int width, height;
        gint fps_num = 0;
        gint fps_denom = 1;
        double fps = 0;

        // 获取宽度和高度
        if (gst_structure_get_int(structure, "width", &width) &&
            gst_structure_get_int(structure, "height", &height)) {

            // 获取帧率
            if (gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_denom)) {
                if (fps_denom > 0) {
                    fps = (double)fps_num / fps_denom;
                }
            }

            // 只在信息变化时发送信号
            if (width != m_width || height != m_height || qAbs(fps - m_fps) > 0.1) {
                m_width = width;
                m_height = height;
                m_fps = fps;

                qDebug() << "Video info changed - Width:" << width
                         << "Height:" << height
                         << "FPS:" << fps;

                // 发送信号到界面
                emit videoInfoChanged(width, height, fps);
            }
        }
}

GstVideoPlayer::GstVideoPlayer(QWidget *videoWidget, QObject *parent)
    : QObject(parent),
     m_videoWidget(videoWidget),
     m_appSrc(nullptr),
     m_videoSink(nullptr),
     m_pipeline(nullptr)
{
    // 确保GStreamer初始化
    static bool gstInitialized = false;
    if (!gstInitialized) {
        gst_init(nullptr, nullptr);
        gstInitialized = true;
    }
}

GstVideoPlayer::~GstVideoPlayer() {
    stopPlayback();
}


void GstVideoPlayer::startPlayback(const QString &multicastAddress, quint16 port) {
    // 如果已有管道，先停止
    if (m_pipeline) {
        stopPlayback();
    }
    // 创建GStreamer管道 bottom top
//    QString pipelineStr =
//        "appsrc name=udpsource ! "
//        "h264parse ! "
//        "mppvideodec ! "
//        "textoverlay name=watermark text=\"ZXHL\" "
//        "valignment=top halignment=right "
//        "font-desc=\"Sans, 16\" "
//        "shaded-background=false ! "
//        "xvimagesink name=vsink sync=false";

    QString pipelineStr =
        "appsrc name=udpsource ! "
        "h264parse ! "
        "mppvideodec ! "
        "xvimagesink name=vsink sync=false";

    //ok but cpu high
//    QString pipelineStr =
//        "appsrc name=udpsource ! "
//        "h264parse ! "
//        "mppvideodec ! "
//        "video/x-raw,format=NV12 ! "
//        "videoconvert ! "
//        "video/x-raw,format=RGB ! "
//        "appsink name=vsink emit-signals=true sync=false";

//        QString pipelineStr =
//            "appsrc name=udpsource ! "
//            "h264parse ! "
//            "mppvideodec ! "
//            "appsink name=vsink emit-signals=true sync=false";

    qDebug() << "Creating pipeline:" << pipelineStr;

    GError *error = nullptr;
    m_pipeline = gst_parse_launch(pipelineStr.toUtf8().constData(), &error);

    if (error) {
        qCritical() << "Pipeline creation failed:" << error->message;
        g_error_free(error);
        return;
    }

    if (!m_pipeline) {
        qCritical() << "Failed to create pipeline";
        return;
    }

    //在解码器和 sink 之间添加probe来获取caps信息
    GstElement *decoder = gst_bin_get_by_name(GST_BIN(m_pipeline), "mppvideodec");
    if (decoder) {
        GstPad *srcpad = gst_element_get_static_pad(decoder, "src");
        if (srcpad) {
            gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
                             capsProbeCallback, this, nullptr);
            gst_object_unref(srcpad);
        }
        gst_object_unref(decoder);
    } else {
        qWarning() << "Could not find decoder element";
    }


    // 获取 m_appSrc 和 videosink 元素
    m_appSrc = gst_bin_get_by_name(GST_BIN(m_pipeline), "udpsource");
    m_videoSink = gst_bin_get_by_name(GST_BIN(m_pipeline), "vsink");
    if (!m_videoSink || !m_appSrc) {
        qCritical() << "Failed to get video sink element";
        gst_object_unref(m_pipeline);
        m_pipeline = nullptr;
        return;
    }

    // 配置 appsrc
    g_object_set(m_appSrc,
        "stream-type", 0,
        "format", GST_FORMAT_TIME,
        "is-live", TRUE,
        NULL);

    // 设置 appsrc 回调
    GstAppSrcCallbacks callbacks;
    callbacks.need_data = onNeedData;
    callbacks.enough_data = onEnoughData;
    callbacks.seek_data = nullptr;
    gst_app_src_set_callbacks(GST_APP_SRC(m_appSrc), &callbacks, this, nullptr);

    // 设置 appsink 回调
//    GstAppSinkCallbacks sinkCallbacks;
//    sinkCallbacks.eos = nullptr;
//    sinkCallbacks.new_preroll = nullptr;
//    sinkCallbacks.new_sample = onNewSample;  // 设置新样本回调
//    gst_app_sink_set_callbacks(GST_APP_SINK(m_videoSink), &sinkCallbacks, this, nullptr);


    //g_signal_connect(m_videoSink, "new-sample", G_CALLBACK(onNewSample), this);

// 设置Qt窗口ID
#if defined(Q_OS_LINUX) && !defined(Q_OS_ANDROID)
    // 对于RK3588平台，使用Wayland或X11
    if (QGuiApplication::platformName().contains("wayland")) {
        // Wayland系统
        QWindow *window = m_videoWidget->window()->windowHandle();
        if (window) {
            gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink),
                (guintptr)window);
        } else {
            qWarning() << "Failed to get Wayland window handle";
        }
    } else {
        // X11系统
        WId xid = m_videoWidget->winId();
        gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), xid);
    }
#else
    // 其他平台
    WId winId = m_videoWidget->winId();
    gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink), winId);
#endif

    // 设置总线消息处理器
    GstBus *bus = gst_element_get_bus(m_pipeline);
    gst_bus_add_watch(bus, onBusMessage, this);
    gst_object_unref(bus);

    // 启动管道
    GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        qCritical() << "Failed to start pipeline";
    } else {
        qDebug() << "Pipeline started successfully";
    }  
}

void GstVideoPlayer::stopPlayback() {
    if (m_pipeline) {
        gst_element_set_state(m_pipeline, GST_STATE_NULL);
        gst_object_unref(m_pipeline);
        m_pipeline = nullptr;
    }
    if (m_videoSink) {
        gst_object_unref(m_videoSink);
        m_videoSink = nullptr;
    }
    if (m_appSrc) {
        gst_object_unref(m_appSrc);
        m_appSrc = nullptr;
    }
}

// 处理接收到的 UDP 数据
void GstVideoPlayer::onUdpDataReceived(const QByteArray &datagram) {
    // 将数据推送到 appsrc
    GstBuffer *buffer = gst_buffer_new_allocate(nullptr, datagram.size(), nullptr);
    GstMapInfo map;
    gst_buffer_map(buffer, &map, GST_MAP_WRITE);
    memcpy(map.data, datagram.constData(), datagram.size());
    gst_buffer_unmap(buffer, &map);

    GstFlowReturn ret = gst_app_src_push_buffer(GST_APP_SRC(m_appSrc), buffer);
    if (ret != GST_FLOW_OK) {
        qWarning() << "Failed to push buffer to appsrc";
    }
}

void GstVideoPlayer::handleGstMessage(GstMessage *msg) {
    switch (GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_ERROR: {
            GError *err;
            gchar *debug;
            gst_message_parse_error(msg, &err, &debug);
            qCritical() << "GStreamer error:" << err->message;
            if (debug) qCritical() << "Debug info:" << debug;
            g_error_free(err);
            g_free(debug);
            break;
        }
        case GST_MESSAGE_WARNING: {
            GError *err;
            gchar *debug;
            gst_message_parse_warning(msg, &err, &debug);
            qWarning() << "GStreamer warning:" << err->message;
            if (debug) qWarning() << "Debug info:" << debug;
            g_error_free(err);
            g_free(debug);
            break;
        }
        case GST_MESSAGE_EOS:
            qInfo() << "End of stream reached";
            break;
        case GST_MESSAGE_STATE_CHANGED: {
            if (GST_MESSAGE_SRC(msg) == GST_OBJECT(m_pipeline)) {
                GstState old_state, new_state, pending;
                gst_message_parse_state_changed(msg, &old_state, &new_state, &pending);
                qDebug() << "Pipeline state changed from"
                         << gst_element_state_get_name(old_state)
                         << "to"
                         << gst_element_state_get_name(new_state);
            }
            break;
        }
        case GST_MESSAGE_ELEMENT: {
            const GstStructure *s = gst_message_get_structure(msg);
            if (gst_structure_has_name(s, "prepare-window-handle")) {
                // 设置窗口ID
                gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(m_videoSink),
                    (guintptr)m_videoWidget->winId());
            }
            break;
        }
        default:
            break;
    }
}

GstFlowReturn GstVideoPlayer::onNewFrame()
{
    // 从 appsink 获取样本
    GstSample *sample = gst_app_sink_pull_sample(GST_APP_SINK(m_videoSink));
    if (!sample) {
        return GST_FLOW_ERROR;
    }

    // 获取缓冲区
    GstBuffer *buffer = gst_sample_get_buffer(sample);
    if (!buffer) {
        gst_sample_unref(sample);
        return GST_FLOW_ERROR;
    }

    // 获取视频信息
    GstCaps *caps = gst_sample_get_caps(sample);
    if (!caps) {
        gst_sample_unref(sample);
        return GST_FLOW_ERROR;
    }

    // 解析视频格式
    GstStructure *structure = gst_caps_get_structure(caps, 0);
    gint width = 0, height = 0;
    if (!gst_structure_get_int(structure, "width", &width) ||
        !gst_structure_get_int(structure, "height", &height)) {
        gst_sample_unref(sample);
        return GST_FLOW_ERROR;
    }

    // 映射缓冲区
    GstMapInfo map;
    if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
        gst_sample_unref(sample);
        return GST_FLOW_ERROR;
    }

    const gchar *format;
    format = gst_structure_get_string(structure, "format");
    qDebug() << "format:" << format;
    if(strcmp(format, "I420")) {

    } else if(strcmp(format, "NV12") == 0) {
        // 创建YUV420P格式的QImage
        //char *rgb24 = new char[width * height * 3];
        //convert_yuv_to_rgb_buffer(map.data, (quint8*)rgb24, width, height);
        //QImage tImg = QImage((quint8*)rgb24, width, height, width * 3, QImage::Format_RGB888);
        //currentImg.save("save.png");
        //emit newImageAvailable(tImg.copy());
        gst_buffer_unmap(buffer, &map);

    } else if(strcmp(format, "RGB") == 0) {
        currentImg = QImage(map.data, width, height, QImage::Format_RGB888).copy();
        emit newImageAvailable(currentImg.copy());
        gst_buffer_unmap(buffer, &map);
    }
    // 清理
    gst_sample_unref(sample);

}

