#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <gst/app/gstappsrc.h>
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"
#include <queue>
#include <mutex>
#include <thread>
#include <chrono>
#include "../include/vedioDevice.h"
#define DEFAULT_RTSP_PORT "8554"
#define CAP_WIDTH (800)
#define CAP_HIGH	(600)
static char *port = (char *)DEFAULT_RTSP_PORT;
using namespace cv;

std::queue<cv::Mat> frameQueue;
std::mutex frameQueueMutex;
vedioDevice g_vd;

int g_cnt = 0;
int g_camera_cnt = 0;
int g_need_data = 0;


// 修改 MyContext 结构体
typedef struct
{
    gboolean white;
    GstClockTime timestamp;
    GstElement *appsrc;
} MyContext;

// 修改 need_data 函数
static gboolean push_data(MyContext *ctx)
{
    cv::Mat frame;

    // Get frame from the queue
    std::unique_lock<std::mutex> lock(frameQueueMutex);
    if (!frameQueue.empty())
    {
        frame = frameQueue.front();
        frameQueue.pop();
    }
    lock.unlock();

    if (!frame.empty())
    {
        GstBuffer *buffer;
        GstFlowReturn ret;

        buffer = gst_buffer_new_allocate(NULL, frame.total() * frame.elemSize(), NULL);

        GstMapInfo map;
        gst_buffer_map(buffer, &map, GST_MAP_WRITE);
        memcpy(map.data, frame.data, frame.total() * frame.elemSize());
        gst_buffer_unmap(buffer, &map);

        GST_BUFFER_PTS(buffer) = ctx->timestamp;
        GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, 2);
        ctx->timestamp += GST_BUFFER_DURATION(buffer);

        g_signal_emit_by_name(ctx->appsrc, "push-buffer", buffer, &ret);
        gst_buffer_unref(buffer);
    }

    return G_SOURCE_CONTINUE;
}

// 修改 need_data 函数
static void need_data(GstElement *appsrc, guint unused, MyContext *ctx)
{
    ctx->appsrc = appsrc;

    // Schedule push_data to be called in the GStreamer main loop thread
    g_idle_add((GSourceFunc)push_data, ctx);
}


void captureThread()
{
    while (true)
    {
        cv::Mat frame;
        g_vd.getFrame(frame);

        if (!frame.empty())
        {
            // Add the frame to the queue
            std::unique_lock<std::mutex> lock(frameQueueMutex);
			g_print("2frame size is %d, need-data:%d, %d, cap:%d, \n", frame.total() * frame.elemSize(), g_cnt, g_need_data, g_camera_cnt);
            frameQueue.push(frame);
			g_camera_cnt++;
            lock.unlock();
        }
		 std::this_thread::sleep_for(std::chrono::milliseconds(500)); 
        // You might want to add a delay or use some synchronization mechanism here
        // to control the frame acquisition rate
    }
}

static void media_configure(GstRTSPMediaFactory *factory, GstRTSPMedia *media, gpointer user_data)
{
    GstElement *element, *appsrc;
    MyContext *ctx;
	
    element = gst_rtsp_media_get_element(media);
    appsrc = gst_bin_get_by_name_recurse_up(GST_BIN(element), "mysrc");

    gst_util_set_object_arg(G_OBJECT(appsrc), "format", "time");
    g_object_set(G_OBJECT(appsrc), "caps",
                 gst_caps_new_simple("video/x-raw",
                                     "format", G_TYPE_STRING, "BGR",
                                     "width", G_TYPE_INT, CAP_WIDTH,
                                     "height", G_TYPE_INT, CAP_HIGH,
                                     "framerate", GST_TYPE_FRACTION, 1, 2,
                                     NULL),
                 "format", GST_FORMAT_TIME,
                 NULL);

    ctx = g_new0(MyContext, 1);
    ctx->white = FALSE;
    ctx->timestamp = 0;
    g_object_set_data_full(G_OBJECT(media), "my-extra-data", ctx,
                           (GDestroyNotify)g_free);

    g_signal_connect(appsrc, "need-data", (GCallback)need_data, ctx);
    gst_object_unref(appsrc);
    gst_object_unref(element);
}

int main(int argc, char *argv[])
{
	//putenv("GST_DEBUG=appsrc:5");
    putenv("GST_DEBUG_FILE=debug.log");
    putenv("GST_DEBUG_NO_COLOR=1");
    GMainLoop *loop;
    GstRTSPServer *server;
    GstRTSPMountPoints *mounts;
    GstRTSPMediaFactory *factory;

    gst_init(&argc, &argv);

//    cap.open(0); // Open default camera
//	cap.set(cv::CAP_PROP_FRAME_WIDTH, CAP_WIDTH);  // 设置宽度为1920像素
//	cap.set(cv::CAP_PROP_FRAME_HEIGHT, CAP_HIGH); // 设置高度为1080像素
//	cap.set(cv::CAP_PROP_FPS, 10);
//	double fps = cap.get(cv::CAP_PROP_FPS);
//	double width = cap.get(cv::CAP_PROP_FRAME_WIDTH);
//	double height = cap.get(cv::CAP_PROP_FRAME_HEIGHT);
//	g_print("%f, %f, %f.\n", fps, width, height);
//    if (!cap.isOpened())
//    {
//        g_print("Error: Couldn't open the camera.\n");
//        return -1;
//    }

    std::thread captureThreadObj(captureThread);

    loop = g_main_loop_new(NULL, FALSE);

    server = gst_rtsp_server_new();
    g_object_set(server, "service", port, NULL);
    gst_rtsp_server_set_address(server, "0.0.0.0");

    mounts = gst_rtsp_server_get_mount_points(server);

    factory = gst_rtsp_media_factory_new();
	gst_rtsp_media_factory_set_launch(factory,
    "( appsrc name=mysrc ! videoconvert ! capsfilter caps=\"video/x-raw,format=I420\" ! x264enc tune=zerolatency ! rtph264pay name=pay0 pt=96 )");

    g_signal_connect(factory, "media-configure", (GCallback)media_configure,
                     NULL);

    gst_rtsp_mount_points_add_factory(mounts, "/test", factory);
    g_object_unref(mounts);

    gst_rtsp_server_attach(server, NULL);

    g_print("stream ready at rtsp://127.0.0.1:8554/test\n");
    g_main_loop_run(loop);

    captureThreadObj.join(); // Wait for the capture thread to finish
//    cap.release();           // Release the camera

    return 0;
}
