#include "gstVideoReader.h"
#include <gst/app/app.h>
#include <gst/gst.h>
#include <gst/video/video.h>
#include <string.h>
#include <iostream>
#include "img-utils.h"
#include "spdlog/spdlog.h"
using namespace std;

struct GstObjElement {
    bool isLive;
    GstElement *pipeline, *source, *videoconvert, *appsink;
    GstCaps* caps;
    GstBus* bus;
    GMainLoop* loop;
};

RGBImage RGBImage::clone() {
    RGBImage img;
    img.width = width;
    img.height = height;
    img.size = height * width * 3;
    img.dataPtr = std::shared_ptr<uint8_t>(new (std::nothrow) uint8_t[img.size], std::default_delete<uint8_t[]>());
    img.data = img.dataPtr.get();
    memcpy(img.data, data, img.size);

    return img;
}

GstVideoReader::GstVideoReader() {
    bool toInit = true;
    if (toInit) {
        gst_init(NULL, NULL);
        toInit = false;
    }

    gstEle_ = new GstObjElement;
    memset(gstEle_, 0, sizeof(GstObjElement));
}

GstVideoReader::~GstVideoReader() {
    if (gstEle_) {
        delete gstEle_;
        gstEle_ = nullptr;
    }
}

static void gstElementFactoryMake(GstElement*& element, const char* factoryName, const char* name) {
    element = gst_element_factory_make(factoryName, name);
    if (!element) {
        SPDLOG_ERROR("{} create failed", factoryName);
        string errstr = "failed to create ";
        errstr.append(factoryName);
        throw std::invalid_argument(errstr);
    }
}

static void cbMessage(GstBus* bus, GstMessage* msg, GstObjElement* data) {
    switch (GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_ERROR: {
            GError* err;

            gchar* debug;

            gst_message_parse_error(msg, &err, &debug);

            g_print("Error: %s\n", err->message);

            g_error_free(err);

            g_free(debug);

            gst_element_set_state(data->pipeline, GST_STATE_READY);

            g_main_loop_quit(data->loop);
            break;
        }
        case GST_MESSAGE_EOS: /* end-of-stream */
            SPDLOG_INFO("GST_MESSAGE_EOS");
            gst_element_set_state(data->pipeline, GST_STATE_READY);

            g_main_loop_quit(data->loop);
            break;
        case GST_MESSAGE_BUFFERING: {
            SPDLOG_INFO("GST_MESSAGE_BUFFERING");
            gint percent = 0; /* If the stream is live, we do not care about buffering. */

            if (data->isLive)
                break;

            gst_message_parse_buffering(msg, &percent);

            g_print("Buffering (%3d%%)\r", percent);
            /* Wait until buffering is complete before start/resume playing */

            if (percent < 100)

                gst_element_set_state(data->pipeline, GST_STATE_PAUSED);
            else
                gst_element_set_state(data->pipeline, GST_STATE_PLAYING);
            break;
        }
        case GST_MESSAGE_CLOCK_LOST: /* Get a new clock */
            SPDLOG_INFO("GST_MESSAGE_CLOCK_LOST");
            gst_element_set_state(data->pipeline, GST_STATE_PAUSED);

            gst_element_set_state(data->pipeline, GST_STATE_PLAYING);
            break;
        default: /* Unhandled message */
            break;
    }
}

static void onPadAdded(GstElement* src, GstPad* srcpad, GstElement* element) {
    GstPad* sinkpad;
    GstPadLinkReturn ret;
    GstCaps* new_pad_caps = NULL;
    GstStructure* new_pad_struct = NULL;
    const gchar* new_pad_type = NULL;

    SPDLOG_INFO("Received new pad '{}' from '{}'", GST_PAD_NAME(srcpad), GST_ELEMENT_NAME(src));
    /* We can now link this pad with the vorbis-decoder sink pad */

    sinkpad = gst_element_get_static_pad(element, "sink");
    /* If our converter is already linked, we have nothing to do here */
    if (gst_pad_is_linked(sinkpad)) {
        SPDLOG_INFO("We are already linked. Ignoring.");
        goto exit;
    }
    /* Check the new pad's type */
    new_pad_caps = gst_pad_get_current_caps(srcpad);
    new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
    new_pad_type = gst_structure_get_name(new_pad_struct);
    SPDLOG_INFO("new pad type: {}", new_pad_type);

    if (!g_str_has_prefix(new_pad_type, "video")) {
        SPDLOG_WARN("It has type {} which is not video. Ignoring.\n", new_pad_type);
        goto exit;
    }

    ret = gst_pad_link(srcpad, sinkpad);
    if (GST_PAD_LINK_FAILED(ret)) {
        SPDLOG_INFO("Type is '{}' but link failed.", new_pad_type);
    } else {
        SPDLOG_INFO("Link succeeded (type '{}').", new_pad_type);
    }

exit:
    /* Unreference the new pad's caps, if we got them */
    if (new_pad_caps != NULL) {
        gst_caps_unref(new_pad_caps);
    }
    /* Unreference the sink pad */
    gst_object_unref(sinkpad);
}

static GstFlowReturn newSample(GstAppSink* appsink, gpointer user_data) {
    // SPDLOG_INFO("{}", __func__);
    GstSample* sample;
    GstBuffer* buffer;
    GstMapInfo map;
    auto& obj = *(GstVideoReader*)user_data;

    ThreadSafeQueue<std::shared_ptr<Tensor<unsigned char>>>& frameQue = obj.frameQue_;
    /* 从appsink获取一个样本 */
    sample = gst_app_sink_pull_sample(appsink);
    if (sample == NULL) {
        g_print("无法获取样本\n");
        return GST_FLOW_ERROR;
    }

    GstCaps* caps = gst_sample_get_caps(sample);
    GstStructure* info = gst_caps_get_structure(caps, 0);
    const char* format = gst_structure_get_string(info, "format");
    gboolean res;
    gint height = 0, width = 0;
    res = gst_structure_get_int(info, "width", &width);     // 获取图片的宽
    res |= gst_structure_get_int(info, "height", &height);  // 获取图片的高
    if (!res) {
        SPDLOG_WARN("gst_structure_get_int fail");
        gst_sample_unref(sample);
        return GST_FLOW_ERROR;
    }
    auto img = make_shared<Tensor<unsigned char>>(1, height, width, 3, 0);
    /* 获取缓冲区 */
    buffer = gst_sample_get_buffer(sample);
    if (buffer == NULL) {
        SPDLOG_INFO("无法获取缓冲区\n");
        return GST_FLOW_ERROR;
    }

    /* 映射缓冲区以访问数据 */
    if (gst_buffer_map(buffer, &map, GST_MAP_READ)) {
        /* 在这里处理map.data和map.size */
        yuv420ColorCvt(map.data, height, width, img->getData(), YUV2RGB_I420);
        // unsigned char* p = img->getData();
        // SPDLOG_INFO("newSample format {} {} {} {} {}", format, height, width, GST_BUFFER_TIMESTAMP(buffer), obj.path_);
        frameQue.Push(img);
        // SPDLOG_INFO("接收到帧数据 {} {}\n", frameQue.Size(), obj.path_);
        gst_buffer_unmap(buffer, &map);
    } else {
        SPDLOG_INFO("无法映射缓冲区\n");
        return GST_FLOW_ERROR;
    }

    /* 释放样本和缓冲区 */
    gst_sample_unref(sample);
    return GST_FLOW_OK;
}

void GstVideoReader::open(const std::string& videoPath) {
    path_ = videoPath;
    gstEle_->pipeline = gst_pipeline_new("pipeline");
    if (!gstEle_->pipeline) {
        string errstr = "new source-pipeline failed";
        SPDLOG_ERROR("{}", errstr);
        throw std::bad_exception();
    }
    GstElement *demuxer, *queue, *h264parse, *decoder;
    GstClock* clock = nullptr;
    gstElementFactoryMake(gstEle_->source, "filesrc", "inputFile");
    gstElementFactoryMake(gstEle_->appsink, "appsink", "app-sink");
    gstElementFactoryMake(queue, "queue", "src-queue");
    gstElementFactoryMake(demuxer, "qtdemux", "qt-demuxer");
    gstElementFactoryMake(h264parse, "h264parse", "parse");
    gst_element_set_clock(gstEle_->pipeline, clock);

    try {
        gstElementFactoryMake(decoder, "mppvideodec", "hw-mppvideodec");
    } catch (const std::exception& e) {
        gstElementFactoryMake(decoder, "avdec_h264", "sft-mppvideodec");
    }

    g_object_set(G_OBJECT(gstEle_->source), "location", videoPath.c_str(), nullptr);
    g_object_set(gstEle_->appsink, "emit-signals", TRUE, nullptr);
    g_signal_connect(gstEle_->appsink, "new-sample", G_CALLBACK(newSample), this);
    gst_bin_add_many(GST_BIN(gstEle_->pipeline), gstEle_->source, demuxer, queue, h264parse, decoder, gstEle_->appsink, nullptr);
    if (gst_element_link(gstEle_->source, demuxer) != TRUE) {
        SPDLOG_ERROR("source and qtdemux could not be linked");
        throw std::bad_exception();
    }
    if (gst_element_link_many(queue, h264parse, decoder, gstEle_->appsink, nullptr) != TRUE) {
        SPDLOG_ERROR("queue, h264parse, mppvideodec, and sink could not be linked");
        throw std::bad_exception();
    }
    g_signal_connect(demuxer, "pad-added", G_CALLBACK(onPadAdded), queue);
}

void GstVideoReader::play() {
    gstEle_->bus = gst_element_get_bus(gstEle_->pipeline);

    gstEle_->loop = g_main_loop_new(NULL, FALSE);
    gst_bus_add_signal_watch(gstEle_->bus);
    g_signal_connect(gstEle_->bus, "message", G_CALLBACK(cbMessage), gstEle_);

    GstStateChangeReturn ret = gst_element_set_state(gstEle_->pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        SPDLOG_ERROR("Unable to set the pipeline to the playing state");
        throw std::bad_exception();
    }
}

void GstVideoReader::loopRun() {
    g_main_loop_run(gstEle_->loop);
    SPDLOG_INFO("out of loop");
}

void GstVideoReader::stop() {
    GstStateChangeReturn ret = gst_element_set_state(gstEle_->pipeline, GST_STATE_NULL);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        SPDLOG_ERROR("Unable to set the pipeline to the playing state");
        throw std::bad_exception();
    }
    frameQue_.Clear();
}